Repository: cloudant/python-cloudant Branch: master Commit: 1fb0bb577e8f Files: 93 Total size: 964.7 KB Directory structure: gitextract_yhao7vs7/ ├── .github/ │ ├── ISSUE_TEMPLATE.md │ └── PULL_REQUEST_TEMPLATE.md ├── .gitignore ├── .travis.yml ├── CHANGES.md ├── CONTRIBUTING.md ├── DCO1.1.txt ├── Jenkinsfile ├── LICENSE ├── MANIFEST.in ├── MIGRATION.md ├── README.md ├── VERSION ├── docs/ │ ├── Makefile │ ├── adapters.rst │ ├── client.rst │ ├── cloudant.rst │ ├── compatibility.rst │ ├── conf.py │ ├── database.rst │ ├── design_document.rst │ ├── document.rst │ ├── error.rst │ ├── feed.rst │ ├── getting_started.rst │ ├── index.rst │ ├── make.bat │ ├── module_index.rst │ ├── modules.rst │ ├── query.rst │ ├── replicator.rst │ ├── result.rst │ ├── security_document.rst │ └── view.rst ├── pylintrc ├── requirements.txt ├── setup.py ├── src/ │ └── cloudant/ │ ├── _2to3.py │ ├── __init__.py │ ├── _client_session.py │ ├── _common_util.py │ ├── _messages.py │ ├── adapters.py │ ├── client.py │ ├── database.py │ ├── design_document.py │ ├── document.py │ ├── error.py │ ├── feed.py │ ├── index.py │ ├── query.py │ ├── replicator.py │ ├── result.py │ ├── scheduler.py │ ├── security_document.py │ └── view.py ├── test-requirements.txt └── tests/ ├── __init__.py ├── credentials.py ├── integration/ │ ├── __init__.py │ ├── changes_test.py │ ├── document_test.py │ ├── end_to_end_example_test.py │ ├── iter_test.py │ └── replicator_test.py └── unit/ ├── __init__.py ├── _test_util.py ├── adapter_tests.py ├── auth_renewal_tests.py ├── changes_tests.py ├── client_tests.py ├── cloud_foundry_tests.py ├── database_partition_tests.py ├── database_tests.py ├── db_updates_tests.py ├── design_document_tests.py ├── document_tests.py ├── document_validation_tests.py ├── fixtures/ │ └── __init__.py ├── iam_auth_tests.py ├── index_tests.py ├── infinite_feed_tests.py ├── param_translation_tests.py ├── query_result_tests.py ├── query_tests.py ├── replicator_mock_tests.py ├── replicator_tests.py ├── result_tests.py ├── scheduler_tests.py ├── security_document_tests.py ├── unit_t_db_base.py ├── view_execution_tests.py └── view_tests.py ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/ISSUE_TEMPLATE.md ================================================ Please [read these guidelines](http://ibm.biz/cdt-issue-guide) before opening an issue. ## Bug Description ### 1. Steps to reproduce and the simplest code sample possible to demonstrate the issue ### 2. What you expected to happen ### 3. What actually happened ## Environment details ================================================ FILE: .github/PULL_REQUEST_TEMPLATE.md ================================================ ## Checklist - [ ] Tick to sign-off your agreement to the [Developer Certificate of Origin (DCO) 1.1](../blob/master/DCO1.1.txt) - [ ] Added tests for code changes _or_ test/build only changes - [ ] Updated the change log file (`CHANGES.md`) _or_ test/build only changes - [ ] Completed the PR template below: ## Description ## Approach ## Schema & API Changes ## Security and Privacy ## Testing ## Monitoring and Logging ================================================ FILE: .gitignore ================================================ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] # C extensions *.so # Emacs temp files *~ \#* \.\#* # Distribution / packaging .Python env/ bin/ build/ develop-eggs/ dist/ eggs/ lib/ lib64/ parts/ sdist/ var/ venv/ venv.*/ *.egg-info/ .installed.cfg *.egg # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .cache nosetests.xml coverage.xml # Translations *.mo # Mr Developer .mr.developer.cfg .project .pydevproject # Rope .ropeproject # Django stuff: *.log *.pot # Sphinx documentation docs/_build/ ================================================ FILE: .travis.yml ================================================ sudo: required language: python python: - "3.8" env: - ADMIN_PARTY=true COUCHDB_VERSION=2.3.1 - ADMIN_PARTY=false COUCHDB_VERSION=2.3.1 services: - docker before_install: - docker pull couchdb:$COUCHDB_VERSION - docker run -d -p 5984:5984 couchdb:$COUCHDB_VERSION install: "pip install -r requirements.txt && pip install -r test-requirements.txt" before_script: # Make sure CouchDB is up - while [ $? -ne 0 ]; do sleep 1 && curl -v http://localhost:5984; done - curl -X PUT http://localhost:5984/_users - curl -X PUT http://localhost:5984/_replicator # command to run tests script: - pylint ./src/cloudant - nosetests -A 'not db or ((db == "couch" or "couch" in db) and (not couchapi or couchapi <='${COUCHDB_VERSION:0:1}'))' -w ./tests/unit notifications: email: false ================================================ FILE: CHANGES.md ================================================ # UNRELEASED - [DEPRECATED] This library is end-of-life and no longer supported. # 2.15.0 (2021-08-26) - [NEW] Override `dict.get` method for `CouchDatabase` to add `remote` parameter allowing it to retrieve a remote document if specified. - [FIXED] Fixed the documentation for `bookmarks`. - [FIXED] Also exit `follow_replication` for `failed` state. - [FIXED] Fixed result paging for grouped view queries. - [FIXED] Incorrect use of username as account name in `Cloudant.bluemix()`. - [FIXED] Use custom encoder (if provided) for all view `key` params not just `keys`. - [FIXED] Support boolean type for `key`, `endkey`, and `startkey` in view requests. - [DEPRECATED] This library is now deprecated and will be EOL on Dec 31 2021. - [REMOVED] Removed Python 2 compatibility from the supported environments. - [IMPROVED] Documented use of `None` account name and url override for `Cloudant.iam()`. - [IMPROVED] - Document IDs and attachment names are now rejected if they could cause an unexpected Cloudant request. We have seen that some applications pass unsantized document IDs to SDK functions (e.g. direct from user requests). In response to this we have updated many functions to reject obviously invalid paths. However, for complete safety applications must still validate that document IDs and attachment names match expected patterns. # 2.14.0 (2020-08-17) - [FIXED] Set default value for `partitioned` parameter to false when creating a design document. - [FIXED] Corrected setting of `partitioned` flag for `create_query_index` requests. - [FIXED] Added a workaround for installation on Python 2. # 2.13.0 (2020-04-16) - [FIXED] Correctly raise exceptions from `create_database` calls. - [FIXED] Fix `DeprecationWarning` from `collections`. # 2.12.0 (2019-03-28) - [NEW] Added partitioned database support. - [FIXED] Bug where document context manager performed remote save despite uncaught exceptions being raised inside `with` block. - [FIXED] Fixed parameter type of `selector` in docstring. - [FIXED] Removed internal `Document._document_id` property to allow a safe use of dict's methods. - [IMPROVED] Performance of `Result` iteration by releasing result objects immediately after they are returned to the client. - [IMPROVED] Updated `Getting started` section with a `get_query_result` example. - [IMPROVED] Updated `Result` iteration by paginating with views' `startkey` and queries' `bookmark`. # 2.11.0 (2019-01-21) - [NEW] Added option for client to authenticate with IAM token server. - [FIXED] Updated the default IAM token server URL. # 2.10.2 (2018-12-19) - [FIXED] A performance regression deserializing JSON in version 2.10.1. # 2.10.1 (2018-11-16) - [FIXED] Unexpected keyword argument errors when using the library with the `simplejson` module present in the environment caused by `requests` preferentially loading it over the system `json` module. # 2.10.0 (2018-09-19) - [NEW] Add custom JSON encoder/decoder option to `Document` constructor. - [NEW] Add new view parameters, `stable` and `update`, as keyword arguments to `get_view_result`. - [NEW] Allow arbitrary query parameters to be passed to custom changes filters. - [FIXED] Case where an exception was raised after successful retry when using `doc.update_field`. - [FIXED] Removed unnecessary request when retrieving a Result collection that is less than the `page_size` value. # 2.9.0 (2018-06-13) - [NEW] Added functionality to test if a key is in a database as in `key in db`, overriding dict `__contains__` and checking in the remote database. - [NEW] Moved `create_query_index` and other query related methods to `CouchDatabase` as the `_index`/`_find` API is available in CouchDB 2.x. - [NEW] Support IAM authentication in replication documents. - [FIXED] Case where `Document` context manager would throw instead of creating a new document if no `_id` was provided. - [IMPROVED] Added support for IAM API key in `cloudant_bluemix` method. - [IMPROVED] Shortened length of client URLs by removing username and password. - [IMPROVED] Verified library operation on Python 3.6.3. # 2.8.1 (2018-02-16) - [FIXED] Installation failures of 2.8.0 caused by missing VERSION file in distribution. # 2.8.0 (2018-02-15) - [NEW] Added support for `/_search_disk_size` endpoint which retrieves disk size information for a specific search index. - [FIXED] Updated default IBM Cloud Identity and Access Management token URL. - [REMOVED] Removed broken source and target parameters that constantly threw `AttributeError` when creating a replication document. # 2.7.0 (2017-10-31) - [NEW] Added API for upcoming Bluemix Identity and Access Management support for Cloudant on Bluemix. Note: IAM API key support is not yet enabled in the service. - [NEW] Added HTTP basic authentication support. - [NEW] Added `Result.all()` convenience method. - [NEW] Allow `service_name` to be specified when instantiating from a Bluemix VCAP_SERVICES environment variable. - [IMPROVED] Updated `posixpath.join` references to use `'/'.join` when concatenating URL parts. - [IMPROVED] Updated documentation by replacing deprecated Cloudant links with the latest Bluemix links. # 2.6.0 (2017-08-10) - [NEW] Added `Cloudant.bluemix()` class method to the Cloudant client allowing service credentials to be passed using the CloudFoundry VCAP_SERVICES environment variable. - [FIXED] Fixed client construction in `cloudant_bluemix` context manager. - [FIXED] Fixed validation for feed options to accept zero as a valid value. # 2.5.0 (2017-07-06) - [FIXED] Fixed crash caused by non-UTF8 chars in design documents. - [FIXED] Fixed `TypeError` when setting revision limits on Python>=3.6. - [FIXED] Fixed the `exists()` double check on `client.py` and `database.py`. - [FIXED] Fixed Cloudant exception code 409 with 412 when creating a database that already exists. - [FIXED] Catch error if `throw_on_exists` flag is `False` for creating a document. - [FIXED] Fixed /_all_docs call where `keys` is an empty list. - [FIXED] Issue where docs with IDs that sorted lower than 0 were not returned when iterating through _all_docs. # 2.4.0 (2017-02-14) - [NEW] Added `timeout` option to the client constructor for setting a timeout on a HTTP connection or a response. - [NEW] Added `cloudant_bluemix` method to the Cloudant client allowing service credentials to be passed using the CloudFoundry VCAP_SERVICES environment variable. - [IMPROVED] Updated non-response related errors with additional status code and improved error message for easier debugging. All non-response error are handled using either CloudantException or CloudantArgumentError. - [FIXED] Support `long` type argument when executing in Python 2. # 2.3.1 (2016-11-30) - [FIXED] Resolved issue where generated UUIDs for replication documents would not be converted to strings. - [FIXED] Resolved issue where CouchDatabase.infinite_changes() method can cause a stack overflow. # 2.3.0 (2016-11-02) - [FIXED] Resolved issue where the custom JSON encoder was at times not used when transforming data. - [NEW] Added support for managing the database security document through the SecurityDocument class and CouchDatabase convenience method `get_security_document`. - [NEW] Added `auto_renewal` option to the client constructor to handle the automatic renewal of an expired session cookie auth. # 2.2.0 (2016-10-20) - [NEW] Added auto connect feature to the client constructor. - [FIXED] Requests session is no longer valid after disconnect. # 2.1.1 (2016-10-03) - [FIXED] HTTPError is now raised when 4xx or 5xx codes are encountered. # 2.1.0 (2016-08-31) - [NEW] Added support for Cloudant Search execution. - [NEW] Added support for Cloudant Search index management. - [NEW] Added support for managing and querying list functions. - [NEW] Added support for managing and querying show functions. - [NEW] Added support for querying update handlers. - [NEW] Added `rewrites` accessor property for URL rewriting. - [NEW] Added `st_indexes` accessor property for Cloudant Geospatial indexes. - [NEW] Added support for DesignDocument `_info` and `_search_info` endpoints. - [NEW] Added `validate_doc_update` accessor property for update validators. - [NEW] Added support for a custom `requests.HTTPAdapter` to be configured using an optional `adapter` arg e.g. `Cloudant(USERNAME, PASSWORD, account=ACCOUNT_NAME, adapter=Replay429Adapter())`. - [IMPROVED] Made the 429 response code backoff optional and configurable. To enable the backoff add an `adapter` arg of a `Replay429Adapter` with the desired number of retries and initial backoff. To replicate the 2.0.0 behaviour use: `adapter=Replay429Adapter(retries=10, initialBackoff=0.25)`. If `retries` or `initialBackoff` are not specified they will default to 3 retries and a 0.25 s initial backoff. - [IMPROVED] Additional error reason details appended to HTTP response message errors. - [FIX] `415 Client Error: Unsupported Media Type` when using keys with `db.all_docs`. - [FIX] Allowed strings as well as lists for search `group_sort` arguments. # 2.0.3 (2016-06-03) - [FIX] Fixed the python-cloudant readthedocs documentation home page to resolve correctly. # 2.0.2 (2016-06-02) - [IMPROVED] Updated documentation links from python-cloudant.readthedocs.org to python-cloudant.readthedocs.io. - [FIX] Fixed issue with Windows platform compatibility,replaced usage of os.uname for the user-agent string. - [FIX] Fixed readthedocs link in README.rst to resolve to documentation home page. # 2.0.1 (2016-06-02) - [IMPROVED] Updated documentation links from python-cloudant.readthedocs.org to python-cloudant.readthedocs.io. - [FIX] Fixed issue with Windows platform compatibility,replaced usage of os.uname for the user-agent string. - [FIX] Fixed readthedocs link in README.rst to resolve to documentation home page. # 2.0.0 (2016-05-02) - [BREAKING] Renamed modules account.py, errors.py, indexes.py, views.py, to client.py, error.py, index.py, and view.py. - [BREAKING] Removed the `make_result` method from `View` and `Query` classes. If you need to make a query or view result, use `CloudantDatabase.get_query_result`, `CouchDatabase.get_view_result`, or the `View.custom_result` context manager. Additionally, the `Result` and `QueryResult` classes can be called directly to construct a result object. - [BREAKING] Refactored the `SearchIndex` class to now be the `TextIndex` class. Also renamed the `CloudantDatabase` convenience methods of `get_all_indexes`, `create_index`, and `delete_index` as `get_query_indexes`, `create_query_index`, and `delete_query_index` respectively. These changes were made to clarify that the changed class and the changed methods were specific to query index processing only. - [BREAKING] Replace "session" and "url" feed constructor arguments with "source" which can be either a client or a database object. Changes also made to the client `db_updates` method signature and the database `changes` method signature. - [BREAKING] Fixed `CloudantDatabase.share_database` to accept all valid permission roles. Changed the method signature to accept roles as a list argument. - [BREAKING] Removed credentials module from the API and moved it to the tests folder since the functionality is outside of the scope of this library but is still be useful in unit/integration tests. - [IMPROVED] Changed the handling of queries using the keys argument to issue a http POST request instead of a http GET request so that the request is no longer bound by any URL length limitation. - [IMPROVED] Added support for Result/QueryResult data access via index value and added validation logic to `Result.__getitem__()`. - [IMPROVED] Updated feed functionality to process `_changes` and `_db_updates` with their supported options. Also added an infinite feed option. - [NEW] Handled HTTP status code `429 Too Many Requests` with blocking backoff and retries. - [NEW] Added support for CouchDB Admin Party mode. This library can now be used with CouchDB instances where everyone is Admin. - [FIX] Fixed `Document.get_attachment` method to successfully create text and binary files based on http response Content-Type. The method also returns text, binary, and json content based on http response Content-Type. - [FIX] Added validation to `Cloudant.bill`, `Cloudant.volume_usage`, and `Cloudant.requests_usage` methods to ensure that a valid year/month combination or neither are used as arguments. - [FIX] Fixed the handling of empty views in the DesignDocument. - [FIX] The `CouchDatabase.create_document` method now handles documents and design documents correctly. If the document created is a design document then the locally cached object will be a DesignDocument otherwise it will be a Document. - [CHANGE] Moved internal `Code` class, functions like `python_to_couch` and `type_or_none`, and constants into a _common_util module. - [CHANGE] Updated User-Agent header format to be `python-cloudant//Python///`. - [CHANGE] Completed the addition of unit tests that target a database server. Removed all mocked unit tests. # 2.0.0b2 (2016-02-24) - [FIX] Remove the fields parameter from required Query parameters. - [NEW] Add Python 3 support. # 2.0.0b1 (2016-01-11) - [NEW] Added support for Cloudant Query execution. - [NEW] Added support for Cloudant Query index management. - [FIX] DesignDocument content is no longer limited to just views. - [FIX] Document url encoding is now enforced. - [FIX] Database iterator now yields Document/DesignDocument objects with valid document urls. # 2.0.0a4 (2015-12-03) - [FIX] Fixed incorrect readme reference to current library being Alpha 2. # 2.0.0a3 (2015-12-03) - [NEW] Added API documentation hosted on readthedocs.org. # 2.0.0a2 (2015-11-19) - [NEW] Added unit tests targeting CouchDB and Cloudant databases. - [FIX] Fixed bug in database create validation check to work if response code is either 201 (created) or 202 (accepted). - [FIX] Fixed database iterator infinite loop problem and to now yield a Document object. - [BREAKING] Removed previous bulk_docs method from the CouchDatabase class and renamed the previous bulk_insert method as bulk_docs. The previous bulk_docs functionality is available through the all_docs method using the "keys" parameter. - [FIX] Made missing_revisions, revisions_diff, get_revision_limit, set_revision_limit, and view_cleanup API methods available for CouchDB as well as Cloudant. - [BREAKING] Moved the db_update method to the account module. - [FIX] Fixed missing_revisions to key on 'missing_revs'. - [FIX] Fixed set_revision_limit to encode the request data payload correctly. - [FIX] `Document.create()` will no longer update an existing document. - [BREAKING] Renamed Document `field_append` method to `list_field_append`. - [BREAKING] Renamed Document `field_remove` method to `list_field_remove`. - [BREAKING] Renamed Document `field_replace` method to `field_set`. - [FIX] The Document local dictionary `_id` key is now synched with `_document_id` private attribute. - [FIX] The Document local dictionary is now refreshed after an add/update/delete of an attachment. - [FIX] The Document `fetch()` method now refreshes the Document local dictionary content correctly. - [BREAKING] Replace the ReplicatorDatabase class with the Replicator class. A Replicator object has a database attribute that represents the _replicator database. This allows the Replicator to work for both a CloudantDatabase and a CouchDatabase. - [REMOVED] Removed "not implemented" methods from the DesignDocument. - [FIX] Add implicit "_design/" prefix for DesignDocument document ids. # 2.0.0a1 (2015-10-13) - Initial release (2.0.0a1). ================================================ FILE: CONTRIBUTING.md ================================================ # Contributing ## Issues Please [read these guidelines](http://ibm.biz/cdt-issue-guide) before opening an issue. If you still need to open an issue then we ask that you complete the template as fully as possible. ## Pull requests We welcome pull requests, but ask contributors to keep in mind the following: * Only PRs with the template completed will be accepted * We will not accept PRs for user specific functionality ### Developer Certificate of Origin In order for us to accept pull-requests, the contributor must sign-off a [Developer Certificate of Origin (DCO)](DCO1.1.txt). This clarifies the intellectual property license granted with any contribution. It is for your protection as a Contributor as well as the protection of IBM and its customers; it does not change your rights to use your own Contributions for any other purpose. Please read the agreement and acknowledge it by ticking the appropriate box in the PR text, for example: - [x] Tick to sign-off your agreement to the Developer Certificate of Origin (DCO) 1.1 ## General information Python-Cloudant Client Library is written in Python. ## Requirements - Python - pip It is recommended to use a [virtual environment](https://virtualenv.pypa.io/en/latest) during development. The python-cloudant dependencies can be installed via the `requirements.txt` file using pip. For example to create a virtualenv and install requirements: ```sh virtualenv . ./bin/activate pip install -r requirements.txt pip install -r test-requirements.txt ``` ## Testing The tests need an Apache CouchDB or Cloudant service to run against. The tests create databases in your CouchDB instance, these are `db-`. They also create and delete documents in the `_replicator` database. The tests are run with the `nosetests` runner. In this example the `ADMIN_PARTY` environment variable is used to tell the tests not to use any authentication. See below for the full set of variables that can be used. ```sh $ ADMIN_PARTY=true nosetests -w ./tests/unit ``` There are several environment variables which affect test behaviour: - `RUN_CLOUDANT_TESTS`: set this to run the tests that use Cloudant-specific features. If you set this, you must set one of the following combinations of other variables: - `DB_URL`, `DB_USER` and `DB_PASSWORD`. - `CLOUDANT_ACCOUNT`, `DB_USER` and `DB_PASSWORD`. - If you set both `DB_URL` and `CLOUDANT_ACCOUNT`, `DB_URL` is used as the URL to make requests to and `CLOUDANT_ACCOUNT` is inserted into the `X-Cloudant-User` header. - Without `RUN_CLOUDANT_TESTS`, the following environment variables have an effect: - Set `DB_URL` to set the root URL of the CouchDB/Cloudant instance. It defaults to `http://localhost:5984`. - Set `ADMIN_PARTY` to `true` to not use any authentication details. - Without `ADMIN_PARTY`, set `DB_USER` and `DB_PASSWORD` to use those credentials to access the database. - Without `ADMIN_PARTY` and `DB_USER`, the tests assume CouchDB is in admin party mode, but create a user via `_config` to run tests as. This user is deleted at the end of the test run, but beware it'll break other applications using the CouchDB instance that rely on admin party mode being in effect while the tests are running. ### Test attributes Database tests also have node attributes. Currently there are these attributes: `db` - `cloudant` and/or `couch` `couchapi` - Apache CouchDB major version number (i.e. API level) e.g. `2` Example to run database tests that require CouchDB version 1 API and no Cloudant features: `nosetests -A 'db and ((db is "couch" or "couch" in db) and (not couchapi or couchapi <=1))' -w ./tests/unit` ================================================ FILE: DCO1.1.txt ================================================ Developer Certificate of Origin Version 1.1 Copyright (C) 2004, 2006 The Linux Foundation and its contributors. 1 Letterman Drive Suite D4700 San Francisco, CA, 94129 Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Developer's Certificate of Origin 1.1 By making a contribution to this project, I certify that: (a) The contribution was created in whole or in part by me and I have the right to submit it under the open source license indicated in the file; or (b) The contribution is based upon previous work that, to the best of my knowledge, is covered under an appropriate open source license and I have the right under that license to submit that work with modifications, whether created in whole or in part by me, under the same open source license (unless I am permitted to submit under a different license), as indicated in the file; or (c) The contribution was provided directly to me by some other person who certified (a), (b) or (c) and I have not modified it. (d) I understand and agree that this project and the contribution are public and that a record of the contribution (including all personal information I submit with it, including my sign-off) is maintained indefinitely and may be redistributed consistent with this project or the open source license(s) involved. ================================================ FILE: Jenkinsfile ================================================ def getEnvForSuite(suiteName) { // Base environment variables def envVars = [ "DB_URL=${SDKS_TEST_SERVER_URL}", "RUN_CLOUDANT_TESTS=1", "SKIP_DB_UPDATES=1" // Disable pending resolution of case 71610 ] // Add test suite specific environment variables switch(suiteName) { case 'basic': envVars.add("RUN_BASIC_AUTH_TESTS=1") break case 'iam': // Setting IAM_API_KEY forces tests to run using an IAM enabled client. envVars.add("IAM_API_KEY=$DB_IAM_API_KEY") envVars.add("IAM_TOKEN_URL=$SDKS_TEST_IAM_URL") break case 'cookie': case 'simplejson': break default: error("Unknown test suite environment ${suiteName}") } return envVars } def setupPythonAndTest(pythonVersion, testSuite) { node('sdks-executor') { // Unstash the source on this node unstash name: 'source' // Set up the environment and test withCredentials([usernamePassword(credentialsId: 'testServerLegacy', usernameVariable: 'DB_USER', passwordVariable: 'DB_PASSWORD'), string(credentialsId: 'testServerIamApiKey', variable: 'DB_IAM_API_KEY')]) { withEnv(getEnvForSuite("${testSuite}")) { try { sh """ virtualenv tmp -p ${pythonVersion.startsWith('3') ? "python3" : "python"} . ./tmp/bin/activate python --version pip install -r requirements.txt pip install -r test-requirements.txt ${'simplejson'.equals(testSuite) ? 'pip install simplejson' : ''} pylint ./src/cloudant nosetests -A 'not db or (db == "cloudant" or "cloudant" in db)' -w ./tests/unit --with-xunit """ } finally { // Load the test results junit 'nosetests.xml' } } } } } // Start of build stage('Checkout'){ // Checkout and stash the source node{ checkout scm stash name: 'source' } } stage('Test'){ def py3 = '3' def axes = [:] [py3].each { version -> ['basic','cookie','iam'].each { auth -> axes.put("Python${version}-${auth}", {setupPythonAndTest(version, auth)}) } } axes.put("Python${py3}-simplejson", {setupPythonAndTest(py3, 'simplejson')}) parallel(axes) } stage('Publish') { gitTagAndPublish { isDraft=true releaseApiUrl='https://api.github.com/repos/cloudant/python-cloudant/releases' } } ================================================ FILE: LICENSE ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: MANIFEST.in ================================================ include requirements.txt VERSION LICENSE ================================================ FILE: MIGRATION.md ================================================ # Migrating to the `cloudant-python-sdk` library This document is to assist in migrating from the `python-cloudant` (module: `cloudant`) to the newly supported [`cloudant-python-sdk`](https://github.com/IBM/cloudant-python-sdk) (module: `ibmcloudant`). ## Initializing the client connection There are several ways to create a client connection in `cloudant-python-sdk`: 1. [Environment variables](https://github.com/IBM/cloudant-python-sdk#authentication-with-environment-variables) 2. [External configuration file](https://github.com/IBM/cloudant-python-sdk#authentication-with-external-configuration) 3. [Programmatically](https://github.com/IBM/cloudant-python-sdk#programmatic-authentication) [See the README](https://github.com/IBM/cloudant-python-sdk#code-examples) for code examples on using environment variables. ## Other differences 1. The `cloudant-python-sdk` library does not support local dictionary caching of database and document objects. 1. There are no context managers in `cloudant-python-sdk`. To reproduce the behaviour of the `python-cloudant` context managers in `cloudant-python-sdk` users need to explicitly call the specific operations against the remote HTTP API. For example, in the case of the document context manager, this would mean doing both a `get_document` to fetch and a `put_document` to save. 1. In `cloudant-python-sdk` View, Search, and Query (aka `_find` endpoint) operation responses contain raw JSON content like using `raw_result=True` in `python-cloudant`. 1. Replay adapters are replaced by the [automatic retries](https://github.com/IBM/ibm-cloud-sdk-common/#automatic-retries) feature for failed requests. 1. Error handling is not transferable from `python-cloudant` to `cloudant-python-sdk`. For more information go to the [Error handling section](https://cloud.ibm.com/apidocs/cloudant?code=python#error-handling) in our API docs. 1. Custom HTTP client configurations in `python-cloudant` can be set differently in `cloudant-python-sdk`. For more information go to the [Configuring the HTTP client section](https://github.com/IBM/ibm-cloud-sdk-common/#configuring-the-http-client) in the IBM Cloud SDK Common README. ### Troubleshooting 1. Authentication errors occur during service instantiation. For example, the code `service = CloudantV1.new_instance(service_name="EXAMPLE")` will fail with `ValueError: At least one of iam_profile_name or iam_profile_id must be specified.` if required environment variables prefixed with `EXAMPLE` are not set. 1. Server errors occur when running a request against the service. We suggest to check server errors with [`getServerInformation`](https://cloud.ibm.com/apidocs/cloudant?code=python#getserverinformation) which is the new alternative of `metadata()`. ## Request mapping Here's a list of the top 5 most frequently used `python-cloudant` operations and the `cloudant-python-sdk` equivalent API operation documentation link: | `python-cloudant` operation | `cloudant-python-sdk` API operation documentation link | |---------------------------------------|---------------------------------| |`Document('db_name', 'docid').fetch()` |[`getDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#getdocument)| |`db.get_view_result()` |[`postView`](https://cloud.ibm.com/apidocs/cloudant?code=python#postview)| |`db.get_query_result()` |[`postFind`](https://cloud.ibm.com/apidocs/cloudant?code=python#postfind)| | `doc.exists()` |[`headDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#headdocument)| |`Document('db_name', 'docid').save()` |[`putDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#putdocument)| [A table](#reference-table) with the whole list of operations is provided at the end of this guide. The `cloudant-python-sdk` library is generated from a more complete API spec and provides a significant number of operations that do not exist in `python-cloudant`. See [the IBM Cloud API Documentation](https://cloud.ibm.com/apidocs/cloudant) to review request parameter and body options, code examples, and additional details for every endpoint. ## Known Issues There's an [outline of known issues](https://github.com/IBM/cloudant-python-sdk/blob/master/KNOWN_ISSUES.md) in the `cloudant-python-sdk` repository. ## Reference table The table below contains a list of `python-cloudant` functions and the `cloudant-python-sdk` equivalent API operation documentation link. The `cloudant-python-sdk` operation documentation link will contain the new function in a code sample e.g. `getServerInformation` link will contain a code example with `get_server_information()`. **Note:** There are many API operations included in the new `cloudant-python-sdk` that are not available in the `python-cloudant` library. The [API documentation](https://cloud.ibm.com/apidocs/cloudant?code=python) contains the full list of operations. | `python-cloudant` function | `cloudant-python-sdk` API operation documentation link | |-----------------|---------------------| |`metadata()`|[`getServerInformation`](https://cloud.ibm.com/apidocs/cloudant?code=python#getserverinformation)| |`all_dbs()`|[`getAllDbs`](https://cloud.ibm.com/apidocs/cloudant?code=python#getalldbs)| |`db_updates()/infinite_db_updates()`|[`getDbUpdates`](https://cloud.ibm.com/apidocs/cloudant?code=python#getdbupdates)| |`Replicator.stop_replication()`|[`deleteReplicationDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#deletereplicationdocument)| |`Replicator.replication_state()`|[`getReplicationDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#getreplicationdocument)| |`Replicator.create_replication()`|[`putReplicationDocument`](https://cloud.ibm.com/apidocs/cloudant?code=#putreplicationdocument)| |`Scheduler.get_doc()`|[`getSchedulerDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#getschedulerdocument)| |`Scheduler.list_docs()`|[`getSchedulerDocs`](https://cloud.ibm.com/apidocs/cloudant?code=python#getschedulerdocs)| |`Scheduler.list_jobs()`|[`getSchedulerJobs`](https://cloud.ibm.com/apidocs/cloudant?code=python#getschedulerjobs)| |`session()`|[`getSessionInformation`](https://cloud.ibm.com/apidocs/cloudant?code=python#getsessioninformation)| |`uuids()`|[`getUuids`](https://cloud.ibm.com/apidocs/cloudant?code=python#getuuids)| |`db.delete()`|[`deleteDatabase`](https://cloud.ibm.com/apidocs/cloudant?code=python#deletedatabase)| |`db.metadata()`|[`getDatabaseInformation`](https://cloud.ibm.com/apidocs/cloudant?code=python#getdatabaseinformation)| |`db.create_document()`|[`postDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#postdocument)| |`db.create()`|[`putDatabase`](https://cloud.ibm.com/apidocs/cloudant?code=python#putdatabase)| |`db.all_docs()/db.keys()`|[`postAllDocs`](https://cloud.ibm.com/apidocs/cloudant?code=python#postalldocs)| |`db.bulk_docs()`|[`postBulkDocs`](https://cloud.ibm.com/apidocs/cloudant?code=python#postbulkdocs)| |`db.changes()/db.infinite_changes()`|[`postChanges`](https://cloud.ibm.com/apidocs/cloudant?code=python#postchanges-databases)| |`DesignDocument(db, '_design/doc').delete()`|[`deleteDesignDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#deletedesigndocument)| |`db.get_design_document()/DesignDocument(db, '_design/doc').fetch()`|[`getDesignDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#getdesigndocument)| |`DesignDocument(db, '_design/doc').save()`|[`putDesignDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#putdesigndocument)| |`DesignDocument(db, '_design/doc').info()`|[`getDesignDocumentInformation`](https://cloud.ibm.com/apidocs/cloudant?code=python#getdesigndocumentinformation)| |`db.get_search_result()`|[`postSearch`](https://cloud.ibm.com/apidocs/cloudant?code=python#postsearch)| |`db.get_view_result()`|[`postView`](https://cloud.ibm.com/apidocs/cloudant?code=python#postview)| |`db.list_design_documents()`|[`postDesignDocs`](https://cloud.ibm.com/apidocs/cloudant?code=python#postdesigndocs)| |`db.get_query_result()`|[`postFind`](https://cloud.ibm.com/apidocs/cloudant?code=python#postfind)| |`db.get_query_indexes()`|[`getIndexesInformation`](https://cloud.ibm.com/apidocs/cloudant?code=python#getindexesinformation)| |`db.create_query_index()`|[`postIndex`](https://cloud.ibm.com/apidocs/cloudant?code=python#postindex)| |`db.delete_query_index()`|[`deleteIndex`](https://cloud.ibm.com/apidocs/cloudant?code=python#deleteindex)| |`Document(db, '_local/docid').fetch()`|[`getLocalDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#getlocaldocument)| |`Document(db, '_local/docid').save()`|[`putLocalDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#putlocaldocument)| |`Document(db, '_local/docid').delete()`|[`deleteLocalDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#deletelocaldocument)| |`db.missing_revisions()/db.revisions_diff()`|[`postRevsDiff`](https://cloud.ibm.com/apidocs/cloudant?code=python#postrevsdiff)| |`db.partition_metadata()`|[`getPartitionInformation`](https://cloud.ibm.com/apidocs/cloudant?code=python#getpartitioninformation)| |`db.partitioned_all_docs()`|[`postPartitionAllDocs`](https://cloud.ibm.com/apidocs/cloudant?code=python#postpartitionalldocs)| |`db.get_partitioned_search_result()`|[`postPartitionSearch`](https://cloud.ibm.com/apidocs/cloudant?code=python#postpartitionsearch)| |`db.get_partitioned_view_result()`|[`postPartitionView`](https://cloud.ibm.com/apidocs/cloudant?code=python#postpartitionview)| |`db.get_partitioned_query_result()`|[`postPartitionFind`](https://cloud.ibm.com/apidocs/cloudant?code=python#postpartitionfind-partitioned-databases)| |`db.get_security_document()/db.security_document()`|[`getSecurity`](https://cloud.ibm.com/apidocs/cloudant?code=python#getsecurity)| |`db.share_database()`|[`putSecurity`](https://cloud.ibm.com/apidocs/cloudant?code=python#putsecurity)| |`db.shards()`|[`getShardsInformation`](https://cloud.ibm.com/apidocs/cloudant?code=python#getshardsinformation)| |`Document(db, 'docid').delete()`|[`deleteDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#deletedocument)| |`Document(db, 'docid').fetch()`|[`getDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#getdocument)| |`Document(db, 'docid').exists()`|[`headDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#headdocument)| |`Document(db, 'docid').save()`|[`putDocument`](https://cloud.ibm.com/apidocs/cloudant?code=python#putdocument)| |`Document(db, 'docid').delete_attachment()`|[`deleteAttachment`](https://cloud.ibm.com/apidocs/cloudant?code=python#deleteattachment)| |`Document(db, 'docid').get_attachment()`|[`getAttachment`](https://cloud.ibm.com/apidocs/cloudant?code=python#getattachment)| |`Document(db, 'docid').put_attachment()`|[`putAttachment`](https://cloud.ibm.com/apidocs/cloudant?code=python#putattachment)| |`generate_api_key()`|[`postApiKeys`](https://cloud.ibm.com/apidocs/cloudant?code=python#postapikeys)| |`SecurityDocument().save()`|[`putCloudantSecurityConfiguration`](https://cloud.ibm.com/apidocs/cloudant?code=python#putcloudantsecurity)| |`cors_configuration()/cors_origin()`|[`getCorsInformation`](https://cloud.ibm.com/apidocs/cloudant?code=python#getcorsinformation)| |`update_cors_configuration()`|[`putCorsConfiguration`](https://cloud.ibm.com/apidocs/cloudant?code=python#putcorsconfiguration)| ================================================ FILE: README.md ================================================ # :warning: NO LONGER MAINTAINED :warning: **This library is end-of-life and no longer supported.** This repository will not be updated. The repository will be kept available in read-only mode. Please see the [Migration Guide](./MIGRATION.md) for advice about migrating to our replacement library [cloudant-python-sdk](https://github.com/IBM/cloudant-python-sdk). For FAQs and additional information please refer to the [Cloudant blog](https://blog.cloudant.com/2021/06/30/Cloudant-SDK-Transition.html). # Cloudant Python Client [![Build Status](https://travis-ci.org/cloudant/python-cloudant.svg?branch=master)](https://travis-ci.org/cloudant/python-cloudant) [![Readthedocs](https://readthedocs.org/projects/pip/badge/)](http://python-cloudant.readthedocs.io) [![Compatibility](https://img.shields.io/badge/python-3.5-blue.svg)](http://python-cloudant.readthedocs.io/en/latest/compatibility.html) [![pypi](https://img.shields.io/pypi/v/cloudant.svg)](https://pypi.python.org/pypi/cloudant) This is the official Cloudant library for Python. * [Installation and Usage](#installation-and-usage) * [Getting Started](#getting-started) * [API Reference](http://python-cloudant.readthedocs.io/en/latest/cloudant.html) * [Related Documentation](#related-documentation) * [Development](#development) * [Contributing](CONTRIBUTING.md) * [Test Suite](CONTRIBUTING.md#running-the-tests) * [Using in Other Projects](#using-in-other-projects) * [License](#license) * [Issues](#issues) * [Migrating to `cloudant-python-sdk` library](#migrating-to-cloudant-python-sdk-library) ## Installation and Usage Released versions of this library are [hosted on PyPI](https://pypi.python.org/pypi/cloudant) and can be installed with `pip`. In order to install the latest version, execute pip install cloudant ## Getting started See [Getting started (readthedocs.io)](http://python-cloudant.readthedocs.io/en/latest/getting_started.html) ## API Reference See [API reference docs (readthedocs.io)](http://python-cloudant.readthedocs.io/en/latest/cloudant.html) ## Related Documentation * [Cloudant Python client library docs (readthedocs.io)](http://python-cloudant.readthedocs.io) * [Cloudant documentation](https://console.bluemix.net/docs/services/Cloudant/cloudant.html#overview) * [Cloudant Learning Center](https://developer.ibm.com/clouddataservices/cloudant-learning-center/) * [Tutorial for creating and populating a database on IBM Cloud](https://console.bluemix.net/docs/services/Cloudant/tutorials/create_database.html#creating-and-populating-a-simple-cloudant-nosql-db-database-on-ibm-cloud) ## Development See [CONTRIBUTING.md](https://github.com/cloudant/python-cloudant/blob/master/CONTRIBUTING.md) ## Using in other projects The preferred approach for using `python-cloudant` in other projects is to use the PyPI as described above. ### Examples in open source projects [Getting Started with Python Flask on IBM Cloud](https://github.com/IBM-Cloud/get-started-python) [Movie Recommender Demo](https://github.com/snowch/movie-recommender-demo): - [Update and check if documents exist](https://github.com/snowch/movie-recommender-demo/blob/master/web_app/app/dao.py#L162-L168) - [Connect to Cloudant using 429 backoff with 10 retries](https://github.com/snowch/movie-recommender-demo/blob/master/web_app/app/cloudant_db.py#L17-L18) [Watson Recipe Bot](https://github.com/ibm-watson-data-lab/watson-recipe-bot-python-cloudant): - [Use Cloudant Query to find design docs](https://github.com/ibm-watson-data-lab/watson-recipe-bot-python-cloudant/blob/master/souschef/cloudant_recipe_store.py#L33-L77) ## License Copyright © 2015 IBM. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ## Issues Before opening a new issue please consider the following: * Only the latest release is supported. If at all possible please try to reproduce the issue using the latest version. * Please check the [existing issues](https://github.com/cloudant/python-cloudant/issues) to see if the problem has already been reported. Note that the default search includes only open issues, but it may already have been closed. * Cloudant customers should contact Cloudant support for urgent issues. * When opening a new issue [here in github](../../issues) please complete the template fully. ## Migrating to `cloudant-python-sdk` library We have a newly supported Cloudant Python SDK named [cloudant-python-sdk](https://github.com/IBM/cloudant-python-sdk). For advice on migrating from this module see [MIGRATION.md](MIGRATION.md). ================================================ FILE: VERSION ================================================ 2.15.1-SNAPSHOT ================================================ FILE: docs/Makefile ================================================ # Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " applehelp to make an Apple Help Book" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" @echo " coverage to run coverage check of the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/CloudantPythonclientlibrary.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/CloudantPythonclientlibrary.qhc" applehelp: $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp @echo @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." @echo "N.B. You won't be able to view it unless you put it in" \ "~/Library/Documentation/Help or install it in your application" \ "bundle." devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/CloudantPythonclientlibrary" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/CloudantPythonclientlibrary" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." coverage: $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage @echo "Testing of coverage in the sources finished, look at the " \ "results in $(BUILDDIR)/coverage/python.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." ================================================ FILE: docs/adapters.rst ================================================ adapters ======== .. automodule:: cloudant.adapters :members: :undoc-members: :special-members: __getitem__, __iter__ :show-inheritance: ================================================ FILE: docs/client.rst ================================================ client ====== .. automodule:: cloudant.client :members: :undoc-members: :special-members: __getitem__, __delitem__, __setitem__ :show-inheritance: ================================================ FILE: docs/cloudant.rst ================================================ Cloudant client library API =========================== .. automodule:: cloudant :members: :undoc-members: :show-inheritance: .. toctree:: :maxdepth: 3 modules ================================================ FILE: docs/compatibility.rst ================================================ Compatibility ============= This library can be used with the following databases * `IBM Cloudant® Database-as-a-Service `_ * `IBM Cloudant® Data Layer Local Edition (Cloudant Local) `_ * `Apache CouchDB™ `_ Note that some features are Cloudant specific. This library has been tested with the following versions of Python * `Python™ 3.5 `_ ================================================ FILE: docs/conf.py ================================================ # -*- coding: utf-8 -*- # # Cloudant Python client library documentation build configuration file, created by # sphinx-quickstart on Thu Nov 19 15:15:05 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os import shlex # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) sys.path.insert(0, os.path.abspath('../src')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx' ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'python-cloudant' copyright = u'2015, IBM' author = u'IBM' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '2.15.1-SNAPSHOT' # The full version, including alpha/beta/rc tags. release = '2.15.1-SNAPSHOT' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [] # otherwise, readthedocs.org uses their theme by default, # so no need to specify it # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' #html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value #html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. #html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'CloudantPythonclientlibrarydoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', # Latex figure (float) alignment #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'CloudantPythonclientlibrary.tex', u'python-cloudant documentation', u'IBM', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'pythoncloudantclientlibrary', u'python-cloudant documentation', [author], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'PythonCloudantclientlibrary', u'python-cloudant documentation', author, 'PythonCloudantclientlibrary', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'https://docs.python.org/': None} ================================================ FILE: docs/database.rst ================================================ database ======== .. automodule:: cloudant.database :members: :undoc-members: :special-members: __getitem__, __iter__ :show-inheritance: ================================================ FILE: docs/design_document.rst ================================================ design_document =============== .. automodule:: cloudant.design_document :members: :exclude-members: info :undoc-members: :show-inheritance: ================================================ FILE: docs/document.rst ================================================ document ======== .. automodule:: cloudant.document :members: :undoc-members: :show-inheritance: ================================================ FILE: docs/error.rst ================================================ error ===== .. automodule:: cloudant.error :members: :undoc-members: :show-inheritance: ================================================ FILE: docs/feed.rst ================================================ feed ==== .. automodule:: cloudant.feed :members: :undoc-members: :show-inheritance: ================================================ FILE: docs/getting_started.rst ================================================ ############### Getting started ############### Now it's time to begin doing some work with Cloudant and Python. For working code samples of any of the API's please go to our test suite. .. toctree:: :maxdepth: 2 *********** Connections *********** In order to manage a connection you must first initialize the connection by constructing either a ``Cloudant`` or ``CouchDB`` client. Since connecting to the Cloudant managed service provides extra end points as compared to a CouchDB server, we provide the two different client implementations in order to connect to the desired database service. Once the client is constructed, you follow that up by connecting to the server, performing your tasks, and then disconnecting from the server. Later in the `Context managers`_ section we will see how to simplify this process through the use of the Python *with* statement. Note: If you require retrying requests after an HTTP 429 error, the ``Replay429Adapter`` can be added when constructing a ``Cloudant`` client and configured with an initial back off and retry count. Note: Currently, the connect and read timeout will wait forever for a HTTP connection or a response on all requests. A timeout can be set using the ``timeout`` argument when constructing a client. Connecting with a client ======================== .. code-block:: python # Use CouchDB to create a CouchDB client # from cloudant.client import CouchDB # client = CouchDB(USERNAME, PASSWORD, url='http://127.0.0.1:5984', connect=True) # Use Cloudant to create a Cloudant client using account from cloudant.client import Cloudant client = Cloudant(USERNAME, PASSWORD, account=ACCOUNT_NAME, connect=True) # or using url # client = Cloudant(USERNAME, PASSWORD, url='https://acct.cloudant.com') # or with a 429 replay adapter that includes configured retries and initial backoff # client = Cloudant(USERNAME, PASSWORD, account=ACCOUNT_NAME, # adapter=Replay429Adapter(retries=10, initialBackoff=0.01)) # or with a connect and read timeout of 5 minutes # client = Cloudant(USERNAME, PASSWORD, account=ACCOUNT_NAME, # timeout=300) # Perform client tasks... session = client.session() print('Username: {0}'.format(session['userCtx']['name'])) print('Databases: {0}'.format(client.all_dbs())) # Disconnect from the server client.disconnect() ************** Authentication ************** When constructing a ``Cloudant`` client, you can authenticate using the `cookie authentication `_ functionality. The server will always attempt to automatically renew the cookie shortly before its expiry. However, if the client does not send a request to the server during this renewal window and ``auto_renew=False`` then the cookie is not renewed. Using ``auto_renew=True`` will attempt to renew the cookie at any point during the lifetime of the session when either of the following statements hold true: - The server returns a ``credentials_expired`` error message. - The server returns a ``401 Unauthorized`` status code. - The server returns a ``403 Forbidden`` status code. .. code-block:: python # Create client using auto_renew to automatically renew expired cookie auth client = Cloudant(USERNAME, PASSWORD, url='https://acct.cloudant.com', connect=True, auto_renew=True) ************************************ Identity and Access Management (IAM) ************************************ IBM Cloud Identity & Access Management enables you to securely authenticate users and control access to all cloud resources consistently in the IBM Bluemix Cloud Platform. See `IBM Cloud Identity and Access Management `_ for more information. The production IAM token service at *https://iam.cloud.ibm.com/identity/token* is used by default. You can set an ``IAM_TOKEN_URL`` environment variable to override this. You can easily connect to your Cloudant account using an IAM API key: .. code-block:: python # Authenticate using an IAM API key client = Cloudant.iam(ACCOUNT_NAME, API_KEY, connect=True) If you need to authenticate to a server outside of the `cloudant.com` domain, you can use the `url` parameter: .. code-block:: python # Authenticate using an IAM API key to an account outside of the cloudant.com domain client = Cloudant.iam(None, API_KEY, url='https://private.endpoint.example', connect=True) **************** Resource sharing **************** The ``Cloudant`` or ``CouchDB`` client objects make HTTP calls using the ``requests`` library. ``requests`` uses the `urllib3 `_ library which features connection pooling and thread safety. Connection pools can be managed by using the ``requests`` library's `HTTPAdapter `_ when constructing a ``Cloudant`` or ``ClouchDB`` client instance. The default number set by the ``urllib3`` library for cached connection pools is 10. Use the ``HTTPAdapter`` argument ``pool_connections`` to set the number of urllib3 connection pools to cache, and the ``pool_maxsize`` argument to set the maximum number of connections to save in the pool. Although the ``client`` session is documented as thread safe and it's possible for a static ``client`` to be accessible by multiple threads, there are still cases that do not guarantee thread safe execution. It's recommended to use one ``client`` object per thread. .. code-block:: python # Create client with 15 cached pool connections and a max pool size of 100 httpAdapter = HTTPAdapter(pool_connections=15, pool_maxsize=100) client = Cloudant(USERNAME, PASSWORD, url='https://acct.cloudant.com' connect=True, adapter=httpAdapter) Note: Idle connections within the pool may be terminated by the server, so will not remain open indefinitely meaning that this will not completely remove the overhead of creating new connections. Using library in app server environment ======================================= This library can be used in an app server, and the example below shows how to use ``client`` in a ``flask`` app server. .. code-block:: python from flask import Flask import atexit app = Flask(__name__) @app.route('/') def hello_world(): # Cookie authentication can be renewed automatically using ``auto_renew=True`` # which is typically what you would require when running in an application # server where the connection may stay open for a long period of time # Note: Each time you instantiate an instance of the Cloudant client, an # authentication request will be made to Cloudant to retrieve the session cookie. # If the performance overhead of this call is a concern for you, consider # using vanilla python requests with a custom subclass of HTTPAdapter that # performs the authentication call to Cloudant when it establishes the http # connection during the creation of the connection pool. client = Cloudant(USERNAME, PASSWORD, url='https://acct.cloudant.com', connect=True, auto_renew=True) # do something with client return 'Hello World!' # When shutting down the app server, use ``client.disconnect()`` to properly # logout and end the ``client`` session @atexit.register def shutdown(): client.disconnect() ********* Databases ********* Once a connection is established you can then create a database, open an existing database, or delete a database. The following examples assume a client connection has already been established. Creating a database =================== .. code-block:: python # Create a database using an initialized client # The result is a new CloudantDatabase or CouchDatabase based on the client my_database = client.create_database('my_database') # You can check that the database exists if my_database.exists(): print('SUCCESS!!') Opening a database ================== Opening an existing database is done by supplying the name of an existing database to the client. Since the ``Cloudant`` and ``CouchDB`` classes are sub-classes of ``dict``, this can be accomplished through standard Python ``dict`` notation. .. code-block:: python # Open an existing database my_database = client['my_database'] Deleting a database =================== .. code-block:: python # Delete a database using an initialized client client.delete_database('my_database') Partitioned Databases ===================== Partitioned databases introduce the ability for a user to create logical groups of documents called partitions by providing a partition key with each document. .. warning:: Your Cloudant cluster must have the ``partitions`` feature enabled. A full list of enabled features can be retrieved by calling the client :func:`~cloudant.client.CouchDB.metadata` method. Creating a partitioned database ------------------------------- .. code-block:: python db = client.create_database('mydb', partitioned=True) Handling documents ------------------ The document ID contains both the partition key and document key in the form ``:`` where: - Partition Key *(string)*. Must be non-empty. Must not contain colons (as this is the partition key delimiter) or begin with an underscore. - Document Key *(string)*. Must be non-empty. Must not begin with an underscore. Be aware that ``_design`` documents and ``_local`` documents must not contain a partition key as they are global definitions. **Create a document** .. code-block:: python partition_key = 'Year2' document_key = 'julia30' db.create_document({ '_id': ':'.join((partition_key, document_key)), 'name': 'Jules', 'age': 6 }) **Get a document** .. code-block:: python doc = db[':'.join((partition_key, document_key))] Creating design documents ------------------------- To define partitioned indexes you must set the ``partitioned=True`` optional when constructing the new ``DesignDocument`` class. .. code-block:: python ddoc = DesignDocument(db, document_id='view', partitioned=True) ddoc.add_view('myview','function(doc) { emit(doc.foo, doc.bar); }') ddoc.save() To define a partitioned Cloudant Query index you may set the ``partitioned=True`` optional, but it is not required as the index will be partitioned by default in a partitioned database. Conversely, you must set the ``partitioned=False`` optional if you wish to create a global (non-partitioned) index in a partitioned database. .. code-block:: python index = db.create_query_index( design_document_id='query', index_name='foo-index', fields=['foo'], partitioned=True ) index.create() Querying data ------------- A partition key can be specified when querying data so that results can be constrained to a specific database partition. .. warning:: To run partitioned queries the database itself must be partitioned. **Query** .. code-block:: python results = self.db.get_partitioned_query_result( partition_key, selector={'foo': {'$eq': 'bar'}}) for result in results: ... See :func:`~cloudant.database.CouchDatabase.get_partitioned_query_result` for a full list of supported parameters. **Search** .. code-block:: python results = self.db.get_partitioned_search_result( partition_key, search_ddoc['_id'], 'search1', query='*:*') for result in results['rows']: .... See :func:`~cloudant.database.CloudantDatabase.get_partitioned_search_result` for a full list of supported parameters. **Views (MapReduce)** .. code-block:: python results = self.db.get_partitioned_view_result( partition_key, view_ddoc['_id'], 'view1') for result in results: .... See :func:`~cloudant.database.CouchDatabase.get_partitioned_view_result` for a full list of supported parameters. ********* Documents ********* Working with documents using this library is handled through the use of Document objects and Database API methods. A document context manager is also provided to simplify the process. This is discussed later in the `Context managers`_ section. The examples that follow demonstrate how to create, read, update, and delete a document. These examples assume that either a CloudantDatabase or a CouchDatabase object already exists. Creating a document =================== .. code-block:: python # Create document content data data = { '_id': 'julia30', # Setting _id is optional 'name': 'Julia', 'age': 30, 'pets': ['cat', 'dog', 'frog'] } # Create a document using the Database API my_document = my_database.create_document(data) # Check that the document exists in the database if my_document.exists(): print('SUCCESS!!') Retrieving a document ===================== Accessing a document from a database is done by supplying the document identifier of an existing document to either a ``CloudantDatabase`` or a ``CouchDatabase`` object. Since the ``CloudantDatabase`` and ``CouchDatabase`` classes are sub-classes of ``dict``, this is accomplished through standard ``dict`` notation. .. code-block:: python my_document = my_database['julia30'] # Display the document print(my_document) Checking if a document exists ============================= You can check if a document exists in a database the same way you would check if a ``dict`` has a key-value pair by key. .. code-block:: python doc_exists = 'julia30' in my_database if doc_exists: print('document with _id julia30 exists') Retrieve all documents ====================== You can also iterate over a ``CloudantDatabase`` or a ``CouchDatabase`` object to retrieve all documents in a database. .. code-block:: python # Get all of the documents from my_database for document in my_database: print(document) Update a document ================= .. code-block:: python # First retrieve the document my_document = my_database['julia30'] # Update the document content # This can be done as you would any other dictionary my_document['name'] = 'Jules' my_document['age'] = 6 # You must save the document in order to update it on the database my_document.save() Delete a document ================= .. code-block:: python # First retrieve the document my_document = my_database['julia30'] # Delete the document my_document.delete() ******************** Dealing with results ******************** If you want to get Pythonic with your returned data content, we've added a ``Result`` class that provides a key accessible, sliceable, and iterable interface to result collections. To use it, construct a ``Result`` object passing in a reference to a raw data callable such as the ``all_docs`` method from a database object or a ``view`` object itself, which happens to be defined as callable and then access the data as you would using standard Python key access, slicing, and iteration techniques. The following set of examples illustrate ``Result`` key access, slicing and iteration over a result collection in action. It assumes that either a ``CloudantDatabase`` or a ``CouchDatabase`` object already exists. .. code-block:: python from cloudant.result import Result, ResultByKey # Retrieve Result wrapped document content. # Note: The include_docs parameter is optional and is used to illustrate that view query # parameters can be used to customize the result collection. result_collection = Result(my_database.all_docs, include_docs=True) # Get the result at a given location in the result collection # Note: Valid result collection indexing starts at 0 result = result_collection[0] # result is the 1st in the collection result = result_collection[9] # result is the 10th in the collection # Get the result for matching a key result = result_collection['julia30'] # result is all that match key 'julia30' # If your key is an integer then use the ResultByKey class to differentiate your integer # key from an indexed location within the result collection which is also an integer. result = result_collection[ResultByKey(9)] # result is all that match key 9 # Slice by key values result = result_collection['julia30': 'ruby99'] # result is between and including keys result = result_collection['julia30': ] # result is after and including key result = result_collection[: 'ruby99'] # result is up to and including key # Slice by index values result = result_collection[100: 200] # result is between 100 to 200, including 200th result = result_collection[: 200] # result is up to and including the 200th result = result_collection[100: ] # result is after the 100th # Iterate over the result collection for result in result_collection: print(result) This example retrieves the query result from the specified database based on the query parameters provided, updates the document, and saves the document in the remote database. By default, the result is returned as a ``QueryResult`` which uses the skip and limit query parameters internally to handle slicing and iteration through the query result collection. For more detail on slicing and iteration, refer to the :class:`~cloudant.result.QueryResult` documentation. .. code-block:: python # Retrieve documents where the name field is 'foo' selector = {'name': {'$eq': 'foo'}} docs = my_database.get_query_result(selector) for doc in docs: # Create Document object from dict updated_doc = Document(my_database, doc['_id']) updated_doc.update(doc) # Update document field updated_doc['name'] = 'new_name' # Save document updated_doc.save() **************** Context managers **************** Now that we've gone through the basics, let's take a look at how to simplify the process of connection, database acquisition, and document management through the use of Python *with* blocks and this library's context managers. Handling your business using *with* blocks saves you from having to connect and disconnect your client as well as saves you from having to perform a lot of fetch and save operations as the context managers handle these operations for you. This example uses the ``cloudant`` context helper to illustrate the process but identical functionality exists for CouchDB through the ``couchdb`` and ``couchdb_admin_party`` context helpers. .. code-block:: python from cloudant import cloudant # ...or use CouchDB variant # from cloudant import couchdb # Perform a connect upon entry and a disconnect upon exit of the block with cloudant(USERNAME, PASSWORD, account=ACCOUNT_NAME) as client: # ...or use CouchDB variant # with couchdb(USERNAME, PASSWORD, url=COUCHDB_URL) as client: # Perform client tasks... session = client.session() print('Username: {0}'.format(session['userCtx']['name'])) print('Databases: {0}'.format(client.all_dbs())) # Create a database my_database = client.create_database('my_database') if my_database.exists(): print('SUCCESS!!') # You can open an existing database del my_database my_database = client['my_database'] The following example uses the ``Document`` context manager. Here we make multiple updates to a single document. Note that we don't save to the server after each update. We only save once to the server upon exiting the ``Document`` context manager. .. warning:: Uncaught exceptions inside the ``with`` block will prevent your document changes being saved to the remote server. However, changes will still be applied to your local document object. .. code-block:: python from cloudant import cloudant from cloudant.document import Document with cloudant(USERNAME, PASSWORD, account=ACCOUNT_NAME) as client: my_database = client.create_database('my_database') # Upon entry into the document context, fetches the document from the # remote database, if it exists. Upon exit from the context, saves the # document to the remote database with changes made within the context # or creates a new document. with Document(database, 'julia006') as document: # If document exists, it's fetched from the remote database # Changes are made locally document['name'] = 'Julia' document['age'] = 6 # The document is saved to the remote database # Display a Document print(my_database['julia30']) # Delete the database client.delete_database('my_database') print('Databases: {0}'.format(client.all_dbs())) Always use the ``_deleted`` document property to delete a document from within a ``Document`` context manager. For example: .. code-block:: python with Document(my_database, 'julia30') as doc: doc['_deleted'] = True *You can also delete non underscore prefixed document keys to reduce the size of the request.* .. warning:: Don't use the ``doc.delete()`` method inside your ``Document`` context manager. This method immediately deletes the document on the server and clears the local document dictionary. A new, empty document is still saved to the server upon exiting the context manager. **************** Endpoint access **************** If for some reason you need to call a Cloudant/CouchDB endpoint directly rather using the API you can still benefit from the Cloudant/CouchDB client's authentication and session usage by directly accessing its underlying Requests_ session object. Access the session object using the ``r_session`` attribute on your client object. From there, use the session to make requests as the user the client is set up with. The following example shows a ``GET`` to the ``_all_docs`` endpoint, but obviously you can use this for any HTTP request to the Cloudant/CouchDB server. This example assumes that either a ``Cloudant`` or a ``CouchDB`` client object already exists. .. _Requests: http://docs.python-requests.org/en/latest/ .. code-block:: python # Define the end point and parameters end_point = '{0}/{1}'.format(client.server_url, 'my_database/_all_docs') params = {'include_docs': 'true'} # Issue the request response = client.r_session.get(end_point, params=params) # Display the response content print(response.json()) *************** TLS 1.2 Support *************** The TLS protocol is used to encrypt communications across a network to ensure that transmitted data remains private. There are three released versions of TLS: 1.0, 1.1, and 1.2. All HTTPS connections use TLS. If your server enforces the use of TLS 1.2 then the python-cloudant client will continue to work as expected (assuming you're running a version of Python/OpenSSL that supports TLS 1.2). ================================================ FILE: docs/index.rst ================================================ This library is end-of-life and no longer supported. ================================================ FILE: docs/make.bat ================================================ @ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled echo. coverage to run coverage check of the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) REM Check if sphinx-build is available and fallback to Python version if any %SPHINXBUILD% 2> nul if errorlevel 9009 goto sphinx_python goto sphinx_ok :sphinx_python set SPHINXBUILD=python -m sphinx.__init__ %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) :sphinx_ok if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\CloudantPythonclientlibrary.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\CloudantPythonclientlibrary.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "coverage" ( %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage if errorlevel 1 exit /b 1 echo. echo.Testing of coverage in the sources finished, look at the ^ results in %BUILDDIR%/coverage/python.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end ================================================ FILE: docs/module_index.rst ================================================ index ===== .. automodule:: cloudant.index :members: :undoc-members: :show-inheritance: ================================================ FILE: docs/modules.rst ================================================ Modules ======= .. toctree:: :maxdepth: 2 client database document design_document security_document view query module_index result replicator feed error adapters ================================================ FILE: docs/query.rst ================================================ query ===== .. automodule:: cloudant.query :members: :undoc-members: :special-members: __call__ :show-inheritance: ================================================ FILE: docs/replicator.rst ================================================ replicator ========== .. automodule:: cloudant.replicator :members: :undoc-members: :show-inheritance: ================================================ FILE: docs/result.rst ================================================ result ====== .. automodule:: cloudant.result :members: :undoc-members: :special-members: __getitem__, __iter__ :exclude-members: type_or_none :show-inheritance: ================================================ FILE: docs/security_document.rst ================================================ security_document ================= .. automodule:: cloudant.security_document :members: :undoc-members: :show-inheritance: ================================================ FILE: docs/view.rst ================================================ view ==== .. automodule:: cloudant.view :members: :undoc-members: :special-members: __call__ :show-inheritance: ================================================ FILE: pylintrc ================================================ [MASTER] # Specify a configuration file. rcfile=pylintrc # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Profiled execution. profile=no # Add files or directories to the blacklist. They should be base names, not # paths. ignore=CVS # Pickle collected data for later comparisons. persistent=yes # List of plugins (as comma separated values of python modules names) to load, # usually to register additional checkers. load-plugins= # Use multiple processes to speed up Pylint. jobs=1 # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code extension-pkg-whitelist= # Allow optimization of some AST trees. This will activate a peephole AST # optimizer, which will apply various small optimizations. For instance, it can # be used to obtain the result of joining multiple strings with the addition # operator. Joining a lot of strings can lead to a maximum recursion error in # Pylint and this flag can prevent that. It has one side effect, the resulting # AST will be different than the one from reality. optimize-ast=no [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED confidence= # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time. See also the "--disable" option for examples. #enable= # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once).You can also use "--disable=all" to # disable everything first and then reenable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" # Disable "redefined-variable-type" refactor warning messages # Disable "too-many-..." and "too-few-..." refactor warning messages # Disable "locally-disabled" message # Disable Python 3 "useless-object-inheritance" message disable=R0204,R0901,R0902,R0903,R0904,R0913,R0914,R0915,locally-disabled,keyword-arg-before-vararg,useless-object-inheritance [REPORTS] # Set the output format. Available formats are text, parseable, colorized, msvs # (visual studio) and html. You can also give a reporter class, eg # mypackage.mymodule.MyReporterClass. output-format=text # Put messages in a separate file for each module / package specified on the # command line instead of printing them on stdout. Reports (if any) will be # written in a file name "pylint_global.[txt|html]". files-output=no # Tells whether to display a full report or only the messages reports=yes # Python expression which should return a note less than 10 (10 is the highest # note). You have access to the variables errors warning, statement which # respectively contain the number of errors / warnings messages and the total # number of statements analyzed. This is used by the global evaluation report # (RP0004). evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) # Add a comment according to your evaluation note. This is used by the global # evaluation report (RP0004). comment=no # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details #msg-template= [BASIC] # Required attributes for module, separated by a comma required-attributes= # List of builtins function names that should not be used, separated by a comma bad-functions=map,filter,input # Good variable names which should always be accepted, separated by a comma good-names=i,j,k,ex,Run,_,db # Bad variable names which should always be refused, separated by a comma bad-names=foo,bar,baz,toto,tutu,tata # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. name-group= # Include a hint for the correct naming format with invalid-name include-naming-hint=no # Regular expression matching correct function names function-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for function names function-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct variable names variable-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for variable names variable-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct constant names const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ # Naming hint for constant names const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ # Regular expression matching correct attribute names attr-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for attribute names attr-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct argument names argument-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for argument names argument-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct class attribute names class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ # Naming hint for class attribute names class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ # Regular expression matching correct inline iteration names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ # Naming hint for inline iteration names inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ # Regular expression matching correct class names class-rgx=[A-Z_][a-zA-Z0-9]+$ # Naming hint for class names class-name-hint=[A-Z_][a-zA-Z0-9]+$ # Regular expression matching correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Naming hint for module names module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Regular expression matching correct method names method-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for method names method-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=__.*__ # Minimum line length for functions/classes that require docstrings, shorter # ones are exempt. docstring-min-length=-1 [FORMAT] # Maximum number of characters on a single line. max-line-length=100 # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ # Allow the body of an if to be on the same line as the test if there is no # else. single-line-if-stmt=no # List of optional constructs for which whitespace checking is disabled no-space-check=trailing-comma,dict-separator # Maximum number of lines in a module max-module-lines=2000 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' # Number of spaces of indent required inside a hanging or continued line. indent-after-paren=4 # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. expected-line-ending-format= [LOGGING] # Logging modules to check that the string format arguments are in logging # function parameter format logging-modules=logging [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME,XXX,TODO [SIMILARITIES] # Minimum lines number of a similarity. min-similarity-lines=5 # Ignore comments when computing similarities. ignore-comments=yes # Ignore docstrings when computing similarities. ignore-docstrings=yes # Ignore imports when computing similarities. ignore-imports=no [SPELLING] # Spelling dictionary name. Available dictionaries: none. To make it working # install python-enchant package. spelling-dict= # List of comma separated words that should not be checked. spelling-ignore-words= # A path to a file that contains private dictionary; one word per line. spelling-private-dict-file= # Tells whether to store unknown words to indicated private dictionary in # --spelling-private-dict-file option instead of raising a message. spelling-store-unknown-words=no [TYPECHECK] # Tells whether missing members accessed in mixin class should be ignored. A # mixin class is detected if its name ends with "mixin" (case insensitive). ignore-mixin-members=yes # List of module names for which member attributes should not be checked # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis ignored-modules= # List of classes names for which member attributes should not be checked # (useful for classes with attributes dynamically set). ignored-classes=SQLObject # When zope mode is activated, add a predefined set of Zope acquired attributes # to generated-members. zope=no # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E0201 when accessed. Python regular # expressions are accepted. generated-members=REQUEST,acl_users,aq_parent [VARIABLES] # Tells whether we should check for unused import in __init__ files. init-import=no # A regular expression matching the name of dummy variables (i.e. expectedly # not used). dummy-variables-rgx=_$|dummy # List of additional names supposed to be defined in builtins. Remember that # you should avoid to define new builtins when possible. additional-builtins= # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_,_cb [CLASSES] # List of interface methods to ignore, separated by a comma. This is used for # instance to not check methods defines in Zope's Interface base class. ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__,__new__,setUp # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=mcs # List of member names, which should be excluded from the protected access # warning. exclude-protected=_asdict,_fields,_replace,_source,_make [DESIGN] # Maximum number of arguments for function / method max-args=5 # Argument names that match this expression will be ignored. Default to name # with leading underscore ignored-argument-names=_.* # Maximum number of locals for function / method body max-locals=15 # Maximum number of return / yield for function / method body max-returns=6 # Maximum number of branch for function / method body max-branches=12 # Maximum number of statements in function / method body max-statements=50 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of attributes for a class (see R0902). max-attributes=7 # Minimum number of public methods for a class (see R0903). min-public-methods=2 # Maximum number of public methods for a class (see R0904). max-public-methods=20 [IMPORTS] # Deprecated modules which should not be used, separated by a comma deprecated-modules=regsub,TERMIOS,Bastion,rexec # Create a graph of every (i.e. internal and external) dependencies in the # given file (report RP0402 must not be disabled) import-graph= # Create a graph of external dependencies in the given file (report RP0402 must # not be disabled) ext-import-graph= # Create a graph of internal dependencies in the given file (report RP0402 must # not be disabled) int-import-graph= [EXCEPTIONS] # Exceptions that will emit a warning when being caught. Defaults to # "Exception" overgeneral-exceptions=Exception ================================================ FILE: requirements.txt ================================================ requests >=2.7.0, <3.0.0 ================================================ FILE: setup.py ================================================ #!/usr/bin/env python # Copyright (c) 2015 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _setup.py_ Cloudant / CouchDB Client Library """ from io import open from os import path from setuptools import setup, find_packages requirements_file = open('requirements.txt') requirements = requirements_file.read().strip().split('\n') requirements_file.close() version_file = open('VERSION') version = version_file.read().strip() version_file.close() this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup_args = { 'description': 'Cloudant / CouchDB Client Library', 'long_description': long_description, 'long_description_content_type': 'text/markdown', 'include_package_data': True, 'install_requires': requirements, 'name': 'cloudant', 'version': version, 'author': 'IBM', 'author_email': 'alfinkel@us.ibm.com', 'url': 'https://github.com/cloudant/python-cloudant', 'packages': find_packages('./src'), 'provides': find_packages('./src'), 'package_dir': {'': 'src'}, 'classifiers': [ 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: Apache Software License', 'Topic :: Software Development :: Libraries', 'Development Status :: 5 - Production/Stable', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5' ] } setup(**setup_args) ================================================ FILE: src/cloudant/_2to3.py ================================================ # Copyright (c) 2016, 2017 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Python 2 to 3 compatibility methods The philosophy employed here is to treat py2 as the special case vs. py3 as future Python releases presumably will retain new semamtics in py3. """ import sys PY2 = sys.version_info[0] < 3 ENCODING = 'utf-8' NONETYPE = type(None) # pylint: disable=undefined-variable STRTYPE = basestring if PY2 else str # pylint: disable=undefined-variable UNITYPE = unicode if PY2 else str # pylint: disable=undefined-variable LONGTYPE = long if PY2 else int # pylint: disable=undefined-variable UNICHR = unichr if PY2 else chr if PY2: # pylint: disable=wrong-import-position,no-name-in-module,import-error,unused-import from urllib import quote as url_quote, quote_plus as url_quote_plus from urlparse import urlparse as url_parse from urlparse import urljoin as url_join from ConfigParser import RawConfigParser from cookielib import Cookie def iteritems_(adict): """ iterate dict key, value tuples in a py2 and 3 compatible way :param dict adict: :return: iterator of (key, value) tuples """ return adict.iteritems() def next_(itr): """ return next item from an iterable is a py2 and 3 compatible way :param Iterable itr: :return: the next item in itr """ return itr.next() else: from urllib.parse import urlparse as url_parse # pylint: disable=wrong-import-position,no-name-in-module,import-error,ungrouped-imports from urllib.parse import urljoin as url_join # pylint: disable=wrong-import-position,no-name-in-module,import-error,ungrouped-imports from urllib.parse import quote as url_quote # pylint: disable=wrong-import-position,no-name-in-module,import-error,ungrouped-imports from urllib.parse import quote_plus as url_quote_plus # pylint: disable=wrong-import-position,no-name-in-module,import-error,ungrouped-imports from configparser import RawConfigParser # pylint: disable=wrong-import-position,no-name-in-module,import-error from http.cookiejar import Cookie # pylint: disable=wrong-import-position,no-name-in-module,import-error def iteritems_(adict): """ iterate dict key, value tuples in a py2 and 3 compatible way :param dict adict: :return: iterator of (key, value) tuples """ return adict.items() def next_(itr): """ return the next item in an iterable in a py2 and 3 compatible way :param Iterable itr: :return: the next item in itr """ return next(itr) def bytes_(astr): """ return a bytes representation of astr in a py2 and 3 compatible way :param str astr: :return: bytes object """ return astr.encode(ENCODING) if hasattr(astr, 'encode') else astr def unicode_(astr): """ return a unicode string representation of astr in a py2 and 3 compatible way :param bytes astr: :return: unicode string """ return astr.decode(ENCODING) if hasattr(astr, 'decode') else astr ================================================ FILE: src/cloudant/__init__.py ================================================ #!/usr/bin/env python # Copyright (c) 2015, 2018 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Cloudant / CouchDB Python client library API package """ __version__ = '2.15.1-SNAPSHOT' # pylint: disable=wrong-import-position import contextlib import warnings # pylint: disable=wrong-import-position from .client import Cloudant, CouchDB from ._common_util import CloudFoundryService warnings.warn('The module cloudant is now end-of-life. The replacement is ibmcloudant.', DeprecationWarning) @contextlib.contextmanager def cloudant(user, passwd, **kwargs): """ Provides a context manager to create a Cloudant session and provide access to databases, docs etc. :param str user: Username used to connect to Cloudant. :param str passwd: Authentication token used to connect to Cloudant. :param str account: The Cloudant account name. If the account parameter is present, it will be used to construct the Cloudant service URL. :param str url: If the account is not present and the url parameter is present then it will be used to set the Cloudant service URL. The url must be a fully qualified http/https URL. :param str x_cloudant_user: Override the X-Cloudant-User setting used to authenticate. This is needed to authenticate on one's behalf, eg with an admin account. This parameter must be accompanied by the url parameter. If the url parameter is omitted then the x_cloudant_user parameter setting is ignored. :param str encoder: Optional json Encoder object used to encode documents for storage. Defaults to json.JSONEncoder. For example: .. code-block:: python # cloudant context manager from cloudant import cloudant with cloudant(USERNAME, PASSWORD, account=ACCOUNT_NAME) as client: # Context handles connect() and disconnect() for you. # Perform library operations within this context. Such as: print client.all_dbs() # ... """ cloudant_session = Cloudant(user, passwd, **kwargs) cloudant_session.connect() yield cloudant_session cloudant_session.disconnect() @contextlib.contextmanager def cloudant_iam(account_name, api_key, **kwargs): """ Provides a context manager to create a Cloudant session using IAM authentication and provide access to databases, docs etc. :param account_name: Cloudant account name. :param api_key: IAM authentication API key. For example: .. code-block:: python # cloudant context manager from cloudant import cloudant_iam with cloudant_iam(ACCOUNT_NAME, API_KEY) as client: # Context handles connect() and disconnect() for you. # Perform library operations within this context. Such as: print client.all_dbs() # ... """ cloudant_session = Cloudant.iam(account_name, api_key, **kwargs) cloudant_session.connect() yield cloudant_session cloudant_session.disconnect() @contextlib.contextmanager def cloudant_bluemix(vcap_services, instance_name=None, service_name=None, **kwargs): """ Provides a context manager to create a Cloudant session and provide access to databases, docs etc. :param vcap_services: VCAP_SERVICES environment variable :type vcap_services: dict or str :param str instance_name: Optional Bluemix instance name. Only required if multiple Cloudant instances are available. :param str service_name: Optional Bluemix service name. :param str encoder: Optional json Encoder object used to encode documents for storage. Defaults to json.JSONEncoder. Loads all configuration from the specified VCAP_SERVICES Cloud Foundry environment variable. The VCAP_SERVICES variable contains connection information to access a service instance. For example: .. code-block:: json { "VCAP_SERVICES": { "cloudantNoSQLDB": [ { "credentials": { "apikey": "some123api456key" "username": "example", "password": "xxxxxxx", "host": "example.cloudant.com", "port": 443, "url": "https://example:xxxxxxx@example.cloudant.com" }, "syslog_drain_url": null, "label": "cloudantNoSQLDB", "provider": null, "plan": "Lite", "name": "Cloudant NoSQL DB" } ] } } See `Cloud Foundry Environment Variables `_. Example usage: .. code-block:: python import os # cloudant_bluemix context manager from cloudant import cloudant_bluemix with cloudant_bluemix(os.getenv('VCAP_SERVICES'), 'Cloudant NoSQL DB') as client: # Context handles connect() and disconnect() for you. # Perform library operations within this context. Such as: print client.all_dbs() # ... """ cloudant_session = Cloudant.bluemix( vcap_services, instance_name=instance_name, service_name=service_name, **kwargs ) cloudant_session.connect() yield cloudant_session cloudant_session.disconnect() @contextlib.contextmanager def couchdb(user, passwd, **kwargs): """ Provides a context manager to create a CouchDB session and provide access to databases, docs etc. :param str user: Username used to connect to CouchDB. :param str passwd: Passcode used to connect to CouchDB. :param str url: URL for CouchDB server. :param str encoder: Optional json Encoder object used to encode documents for storage. Defaults to json.JSONEncoder. For example: .. code-block:: python # couchdb context manager from cloudant import couchdb with couchdb(USERNAME, PASSWORD, url=COUCHDB_URL) as client: # Context handles connect() and disconnect() for you. # Perform library operations within this context. Such as: print client.all_dbs() # ... """ couchdb_session = CouchDB(user, passwd, **kwargs) couchdb_session.connect() yield couchdb_session couchdb_session.disconnect() @contextlib.contextmanager def couchdb_admin_party(**kwargs): """ Provides a context manager to create a CouchDB session in Admin Party mode and provide access to databases, docs etc. :param str url: URL for CouchDB server. :param str encoder: Optional json Encoder object used to encode documents for storage. Defaults to json.JSONEncoder. For example: .. code-block:: python # couchdb_admin_party context manager from cloudant import couchdb_admin_party with couchdb_admin_party(url=COUCHDB_URL) as client: # Context handles connect() and disconnect() for you. # Perform library operations within this context. Such as: print client.all_dbs() # ... """ couchdb_session = CouchDB(None, None, True, **kwargs) couchdb_session.connect() yield couchdb_session couchdb_session.disconnect() ================================================ FILE: src/cloudant/_client_session.py ================================================ #!/usr/bin/env python # Copyright (c) 2015, 2019 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Module containing client session classes. """ import base64 import json import os from requests import RequestException, Session from ._2to3 import bytes_, unicode_, url_join from ._common_util import response_to_json_dict from .error import CloudantException class ClientSession(Session): """ This class extends Session and provides a default timeout. """ def __init__(self, username=None, password=None, session_url=None, **kwargs): super(ClientSession, self).__init__() self._username = username self._password = password self._session_url = session_url self._auto_renew = kwargs.get('auto_renew', False) self._timeout = kwargs.get('timeout', None) def base64_user_pass(self): """ Composes a basic http auth string, suitable for use with the _replicator database, and other places that need it. :returns: Basic http authentication string """ if self._username is None or self._password is None: return None hash_ = base64.urlsafe_b64encode(bytes_("{username}:{password}".format( username=self._username, password=self._password ))) return "Basic {0}".format(unicode_(hash_)) # pylint: disable=arguments-differ def request(self, method, url, **kwargs): """ Overrides ``requests.Session.request`` to set the timeout. """ resp = super(ClientSession, self).request( method, url, timeout=self._timeout, **kwargs) return resp def info(self): """ Get session information. """ if self._session_url is None: return None resp = self.get(self._session_url) resp.raise_for_status() return response_to_json_dict(resp) def set_credentials(self, username, password): """ Set a new username and password. :param str username: New username. :param str password: New password. """ if username is not None: self._username = username if password is not None: self._password = password def login(self): """ No-op method - not implemented here. """ # pylint: disable=unnecessary-pass pass def logout(self): """ No-op method - not implemented here. """ # pylint: disable=unnecessary-pass pass class BasicSession(ClientSession): """ This class extends ClientSession to provide basic access authentication. """ def __init__(self, username, password, server_url, **kwargs): super(BasicSession, self).__init__( username=username, password=password, session_url=url_join(server_url, '_session'), **kwargs) def request(self, method, url, **kwargs): """ Overrides ``requests.Session.request`` to provide basic access authentication. """ auth = None if self._username is not None and self._password is not None: auth = (self._username, self._password) return super(BasicSession, self).request( method, url, auth=auth, **kwargs) class CookieSession(ClientSession): """ This class extends ClientSession and provides cookie authentication. """ def __init__(self, username, password, server_url, **kwargs): super(CookieSession, self).__init__( username=username, password=password, session_url=url_join(server_url, '_session'), **kwargs) def login(self): """ Perform cookie based user login. """ resp = super(CookieSession, self).request( 'POST', self._session_url, data={'name': self._username, 'password': self._password}, ) resp.raise_for_status() def logout(self): """ Logout cookie based user. """ resp = super(CookieSession, self).request('DELETE', self._session_url) resp.raise_for_status() def request(self, method, url, **kwargs): """ Overrides ``requests.Session.request`` to renew the cookie and then retry the original request (if required). """ resp = super(CookieSession, self).request(method, url, **kwargs) if not self._auto_renew: return resp is_expired = any(( resp.status_code == 403 and response_to_json_dict(resp).get('error') == 'credentials_expired', resp.status_code == 401 )) if is_expired: self.login() resp = super(CookieSession, self).request(method, url, **kwargs) return resp class IAMSession(ClientSession): """ This class extends ClientSession and provides IAM authentication. """ def __init__(self, api_key, server_url, client_id=None, client_secret=None, **kwargs): super(IAMSession, self).__init__( session_url=url_join(server_url, '_iam_session'), **kwargs) self._api_key = api_key self._token_url = os.environ.get( 'IAM_TOKEN_URL', 'https://iam.cloud.ibm.com/identity/token') self._token_auth = None if client_id and client_secret: self._token_auth = (client_id, client_secret) @property def get_api_key(self): """ Get IAM API key. :return: IAM API key. """ return self._api_key def login(self): """ Perform IAM cookie based user login. """ access_token = self._get_access_token() try: super(IAMSession, self).request( 'POST', self._session_url, headers={'Content-Type': 'application/json'}, data=json.dumps({'access_token': access_token}) ).raise_for_status() except RequestException: raise CloudantException( 'Failed to exchange IAM token with Cloudant') def logout(self): """ Logout IAM cookie based user. """ self.cookies.clear() def request(self, method, url, **kwargs): """ Overrides ``requests.Session.request`` to renew the IAM cookie and then retry the original request (if required). """ # The CookieJar API prevents callers from getting an individual Cookie # object by name. # We are forced to use the only exposed method of discarding expired # cookies from the CookieJar. Internally this involves iterating over # the entire CookieJar and calling `.is_expired()` on each Cookie # object. self.cookies.clear_expired_cookies() if self._auto_renew and 'IAMSession' not in self.cookies.keys(): self.login() resp = super(IAMSession, self).request(method, url, **kwargs) if not self._auto_renew: return resp if resp.status_code == 401: self.login() resp = super(IAMSession, self).request(method, url, **kwargs) return resp # pylint: disable=arguments-differ, unused-argument def set_credentials(self, username, api_key): """ Set a new IAM API key. :param str username: Username parameter is unused. :param str api_key: New IAM API key. """ if api_key is not None: self._api_key = api_key def _get_access_token(self): """ Get IAM access token using API key. """ err = 'Failed to contact IAM token service' try: resp = super(IAMSession, self).request( 'POST', self._token_url, auth=self._token_auth, headers={'Accepts': 'application/json'}, data={ 'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 'response_type': 'cloud_iam', 'apikey': self._api_key } ) err = response_to_json_dict(resp).get('errorMessage', err) resp.raise_for_status() return response_to_json_dict(resp)['access_token'] except KeyError: raise CloudantException('Invalid response from IAM token service') except RequestException: raise CloudantException(err) ================================================ FILE: src/cloudant/_common_util.py ================================================ #!/usr/bin/env python # Copyright © 2015, 2021 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Module containing miscellaneous classes, functions, and constants used throughout the library. """ import sys import platform import json from ._2to3 import LONGTYPE, STRTYPE, NONETYPE, UNITYPE, iteritems_ from .error import CloudantArgumentError, CloudantException, CloudantClientException try: from collections.abc import Sequence except ImportError: from collections import Sequence # Library Constants DESIGN_PREFIX = '_design/' LOCAL_PREFIX = '_local/' USER_AGENT = '/'.join([ 'python-cloudant', sys.modules['cloudant'].__version__, 'Python', '{0}.{1}.{2}'.format( sys.version_info[0], sys.version_info[1], sys.version_info[2]), platform.system(), platform.machine() ]) QUERY_LANGUAGE = 'query' # Index Types JSON_INDEX_TYPE = 'json' TEXT_INDEX_TYPE = 'text' SPECIAL_INDEX_TYPE = 'special' # Argument Types ANY_ARG = object() ANY_TYPE = object() RESULT_ARG_TYPES = { 'descending': (bool,), 'endkey': (int, LONGTYPE, STRTYPE, Sequence, bool,), 'endkey_docid': (STRTYPE,), 'group': (bool,), 'group_level': (int, LONGTYPE, NONETYPE,), 'include_docs': (bool,), 'inclusive_end': (bool,), 'key': (int, LONGTYPE, STRTYPE, Sequence, bool,), 'keys': (list,), 'limit': (int, LONGTYPE, NONETYPE,), 'reduce': (bool,), 'skip': (int, LONGTYPE, NONETYPE,), 'stable': (bool,), 'stale': (STRTYPE,), 'startkey': (int, LONGTYPE, STRTYPE, Sequence, bool,), 'startkey_docid': (STRTYPE,), 'update': (STRTYPE,), } # pylint: disable=unnecessary-lambda TYPE_CONVERTERS = { STRTYPE: lambda x: json.dumps(x), str: lambda x: json.dumps(x), UNITYPE: lambda x: json.dumps(x), Sequence: lambda x: json.dumps(list(x)), list: lambda x: json.dumps(x), tuple: lambda x: json.dumps(list(x)), int: lambda x: x, LONGTYPE: lambda x: x, bool: lambda x: 'true' if x else 'false', NONETYPE: lambda x: x } _COUCH_DB_UPDATES_ARG_TYPES = { 'feed': (STRTYPE,), 'heartbeat': (bool,), 'timeout': (int, LONGTYPE, NONETYPE,), } _DB_UPDATES_ARG_TYPES = { 'descending': (bool,), 'limit': (int, LONGTYPE, NONETYPE,), 'since': (int, LONGTYPE, STRTYPE,), } _DB_UPDATES_ARG_TYPES.update(_COUCH_DB_UPDATES_ARG_TYPES) _DB_UPDATES_ARG_TYPES['heartbeat'] = (int, LONGTYPE, NONETYPE,) _CHANGES_ARG_TYPES = { 'conflicts': (bool,), 'doc_ids': (list,), 'filter': (STRTYPE,), 'include_docs': (bool,), 'style': (STRTYPE,), ANY_ARG: ANY_TYPE # pass arbitrary query parameters to a custom filter } _CHANGES_ARG_TYPES.update(_DB_UPDATES_ARG_TYPES) QUERY_ARG_TYPES = { 'selector': dict, 'limit': (int, LONGTYPE, NONETYPE), 'skip': (int, LONGTYPE, NONETYPE), 'sort': list, 'fields': list, 'r': (int, LONGTYPE, NONETYPE), 'bookmark': STRTYPE, 'use_index': STRTYPE } TEXT_INDEX_ARGS = {'fields': list, 'default_field': dict, 'selector': dict} SEARCH_INDEX_ARGS = { 'bookmark': STRTYPE, 'counts': list, 'drilldown': list, 'group_field': STRTYPE, 'group_limit': (int, NONETYPE), 'group_sort': (STRTYPE, list), 'include_docs': bool, 'limit': (int, NONETYPE), 'query': (STRTYPE, int, LONGTYPE), 'q': (STRTYPE, int, LONGTYPE), 'ranges': dict, 'sort': (STRTYPE, list), 'stale': STRTYPE, 'highlight_fields': list, 'highlight_pre_tag': STRTYPE, 'highlight_post_tag': STRTYPE, 'highlight_number': (int, LONGTYPE, NONETYPE), 'highlight_size': (int, LONGTYPE, NONETYPE), 'include_fields': list, 'partition': STRTYPE } # Functions def feed_arg_types(feed_type): """ Return the appropriate argument type dictionary based on the type of feed. """ if feed_type == 'Cloudant': return _DB_UPDATES_ARG_TYPES if feed_type == 'CouchDB': return _COUCH_DB_UPDATES_ARG_TYPES return _CHANGES_ARG_TYPES def python_to_couch(options, encoder=None): """ Translates query options from python style options into CouchDB/Cloudant query options. For example ``{'include_docs': True}`` will translate to ``{'include_docs': 'true'}``. Primarily meant for use by code that formulates a query to retrieve results data from the remote database, such as the database API convenience method :func:`~cloudant.database.CouchDatabase.all_docs` or the View :func:`~cloudant.view.View.__call__` callable, both used to retrieve data. :param dict options: Python style parameters to be translated. :param encoder: Custom encoder, defaults to None :returns: Dictionary of translated CouchDB/Cloudant query parameters """ translation = dict() for key, val in iteritems_(options): py_to_couch_validate(key, val) translation.update(_py_to_couch_translate(key, val, encoder)) return translation def py_to_couch_validate(key, val): """ Validates the individual parameter key and value. """ if key not in RESULT_ARG_TYPES: raise CloudantArgumentError(116, key) # pylint: disable=unidiomatic-typecheck # Validate argument values and ensure that a boolean is not passed in # if an integer is expected if (not isinstance(val, RESULT_ARG_TYPES[key]) or (type(val) is bool and bool not in RESULT_ARG_TYPES[key] and int in RESULT_ARG_TYPES[key])): raise CloudantArgumentError(117, key, RESULT_ARG_TYPES[key]) if key == 'keys': for key_list_val in val: if (not isinstance(key_list_val, RESULT_ARG_TYPES['key']) or isinstance(key_list_val, bool)): raise CloudantArgumentError(134, RESULT_ARG_TYPES['key']) if key == 'stale': if val not in ('ok', 'update_after'): raise CloudantArgumentError(135, val) def _py_to_couch_translate(key, val, encoder=None): """ Performs the conversion of the Python parameter value to its CouchDB equivalent. """ try: if key in ['keys', 'endkey_docid', 'startkey_docid', 'stale', 'update']: return {key: val} if key in ['endkey', 'key', 'startkey']: return {key: json.dumps(val, cls=encoder)} if val is None: return {key: None} arg_converter = TYPE_CONVERTERS.get(type(val)) return {key: arg_converter(val)} except Exception as ex: raise CloudantArgumentError(136, key, ex) def type_or_none(typerefs, value): """ Provides a helper function to check that a value is of the types passed or None. """ return isinstance(value, typerefs) or value is None def codify(code_or_str): """ Provides a helper to rationalize code content. """ if code_or_str is None: return None if not isinstance(code_or_str, _Code): return _Code(code_or_str) return code_or_str def get_docs(r_session, url, encoder=None, headers=None, **params): """ Provides a helper for functions that require GET or POST requests with a JSON, text, or raw response containing documents. :param r_session: Authentication session from the client :param str url: URL containing the endpoint :param JSONEncoder encoder: Custom encoder from the client :param dict headers: Optional HTTP Headers to send with the request :returns: Raw response content from the specified endpoint """ keys_list = params.pop('keys', None) keys = None if keys_list is not None: keys = json.dumps({'keys': keys_list}, cls=encoder) f_params = python_to_couch(params, encoder) resp = None if keys is not None: # If we're using POST we are sending JSON so add the header if headers is None: headers = {} headers['Content-Type'] = 'application/json' resp = r_session.post(url, headers=headers, params=f_params, data=keys) else: resp = r_session.get(url, headers=headers, params=f_params) resp.raise_for_status() return resp #pylint: disable=unused-argument def append_response_error_content(response, **kwargs): """ Provides a helper to act as callback function for the response event hook and add a HTTP response error with reason message to ``response.reason``. The ``response`` and ``**kwargs`` are necessary for this function to properly operate as the callback. :param response: HTTP response object :param kwargs: HTTP request parameters """ if response.status_code >= 400: try: resp_dict = response_to_json_dict(response) error = resp_dict.get('error', '') reason = resp_dict.get('reason', '') # Append to the existing response's reason response.reason += ' {0} {1}'.format(error, reason) except ValueError: pass return response def response_to_json_dict(response, **kwargs): """ Standard place to convert responses to JSON. :param response: requests response object :param **kwargs: arguments accepted by json.loads :returns: dict of JSON response """ if response.encoding is None: response.encoding = 'utf-8' return json.loads(response.text, **kwargs) def assert_document_type_id(docid): """ Validate the document ID. Raises an error if the ID is an `_` prefixed name that isn't either `_design` or `_local`. :return: """ invalid = False if docid.startswith('_'): if docid.startswith(DESIGN_PREFIX) and DESIGN_PREFIX != docid: invalid = False elif docid.startswith(LOCAL_PREFIX) and LOCAL_PREFIX != docid: invalid = False else: invalid = True if invalid: raise CloudantArgumentError(137, docid) def assert_attachment_name(attname): """ Validate the document attachment's name. Raises an error if `_` prefixed name exists. :return: """ if attname.startswith('_'): raise CloudantArgumentError(138, attname) # Classes class _Code(str): """ Wraps a ``str`` object as a _Code object providing the means to handle Javascript blob content. Used internally by the View object when codifying map and reduce Javascript content. """ def __new__(cls, code): if type(code).__name__ == 'unicode': return str.__new__(cls, code.encode('utf8')) return str.__new__(cls, code) class CloudFoundryService(object): """ Manages Cloud Foundry service configuration. """ def __init__(self, vcap_services, instance_name=None, service_name=None): try: services = vcap_services if not isinstance(vcap_services, dict): services = json.loads(vcap_services) cloudant_services = services.get(service_name, []) # use first service if no name given and only one service present use_first = instance_name is None and len(cloudant_services) == 1 for service in cloudant_services: if use_first or service.get('name') == instance_name: credentials = service['credentials'] self._host = credentials['host'] self._name = service.get('name') self._port = credentials.get('port', 443) self._username = credentials['username'] if 'apikey' in credentials: self._iam_api_key = credentials['apikey'] elif 'username' in credentials and 'password' in credentials: self._password = credentials['password'] else: raise CloudantClientException(103) break else: raise CloudantException('Missing service in VCAP_SERVICES') except KeyError as ex: raise CloudantException( "Invalid service: '{0}' missing".format(ex.args[0]) ) except TypeError: raise CloudantException( 'Failed to decode VCAP_SERVICES service credentials' ) except ValueError: raise CloudantException('Failed to decode VCAP_SERVICES JSON') @property def host(self): """ Return service host. """ return self._host @property def name(self): """ Return service name. """ return self._name @property def password(self): """ Return service password. """ return self._password @property def port(self): """ Return service port. """ return str(self._port) @property def url(self): """ Return service url. """ return 'https://{0}:{1}'.format(self._host, self._port) @property def username(self): """ Return service username. """ return self._username @property def iam_api_key(self): """ Return service IAM API key. """ return self._iam_api_key ================================================ FILE: src/cloudant/_messages.py ================================================ #!/usr/bin/env python # Copyright © 2016, 2021 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Module that contains exception messages for the Cloudant Python client library. """ ARGUMENT_ERROR = { 100: 'A general Cloudant argument error was raised.', # Client 101: 'Invalid year and/or month supplied. Found: year - {0}, month - {1}', # Database 102: 'Invalid role(s) provided: {0}. Valid roles are: {1}', 103: 'Invalid index type: {0}. Index type must be ' 'either \"json\" or \"text\".', 104: 'A single query/q parameter is required. Found: {0}', 105: 'Invalid argument: {0}', 106: 'Argument {0} is not an instance of expected type: {1}', # Design document 107: 'View {0} already exists in this design doc.', 108: 'An index with name {0} already exists in this design doc.', 109: 'A list with name {0} already exists in this design doc.', 110: 'A show function with name {0} already exists in this design doc.', 111: 'View {0} does not exist in this design doc.', 112: 'An index with name {0} does not exist in this design doc.', 113: 'A list with name {0} does not exist in this design doc.', 114: 'A show function with name {0} does not exist in this design doc.', # Feed 115: 'Error converting argument {0}: {1}', 116: 'Invalid argument {0}', 117: 'Argument {0} not instance of expected type: {1}', 118: 'Argument {0} must be > 0. Found: {1}', 119: 'Invalid value ({0}) for feed option. Must be one of {1}', 120: 'Invalid value ({0}) for style option. Must be main_only, ' 'or all_docs.', 121: 'Invalid infinite feed option: {0}. Must be set to continuous.', # Index 122: 'The design document id: {0} is not a string.', 123: 'The index name: {0} is not a string.', 124: '{0} provided as argument(s). A JSON index requires that ' 'only a \'fields\' argument is provided.', 125: 'Deleting an index requires a design document id be provided.', 126: 'Deleting an index requires an index name be provided.', 127: 'Invalid argument: {0}', 128: 'Argument {0} is not an instance of expected type: {1}', # Query 129: 'Invalid argument: {0}', 130: 'Argument {0} is not an instance of expected type: {1}', 131: 'No selector in the query or the selector was empty. ' 'Add a selector to define the query and retry.', # View 132: 'The map property must be a dictionary.', 133: 'The reduce property must be a string.', # Common_util 134: 'Key list element not of expected type: {0}', 135: 'Invalid value for stale option {0} must be ok or update_after.', 136: 'Error converting argument {0}: {1}', 137: 'Invalid document ID: {0}', 138: 'Invalid attachment name: {0}' } CLIENT = { 100: 'A general Cloudant client exception was raised.', 101: 'Value must be set to a Database object. Found type: {0}', 102: 'You must provide a url or an account.', 103: 'Invalid service: IAM API key or username/password credentials are required.', 404: 'Database {0} does not exist. Verify that the client is valid and try again.', 412: 'Database {0} already exists.' } DATABASE = { 100: 'A general Cloudant database exception was raised.', 101: 'Unexpected index type. Found: {0}', 400: 'Invalid database name during creation. Found: {0}', 401: 'Unauthorized to create database {0}', 409: 'Document with id {0} already exists.', 412: 'Database {0} already exists.' } DESIGN_DOCUMENT = { 100: 'A general Cloudant design document exception was raised.', 101: 'Cannot add a MapReduce view to a design document for query indexes.', 102: 'Cannot update a query index view using this method.', 103: 'Cannot delete a query index view using this method.', 104: 'View {0} must be of type View.', 105: 'View {0} must be of type QueryIndexView.', 106: 'Function for search index {0} must be of type string.', 107: 'Definition for query text index {0} must be of type dict.' } DOCUMENT = { 100: 'A general Cloudant document exception was raised.', 101: 'A document id is required to fetch document contents. ' 'Add an _id key and value to the document and re-try.', 102: 'The field {0} is not a list.', 103: 'Attempting to delete a doc with no _rev. Try running .fetch and re-try.' } FEED = { 100: 'A general Cloudant feed exception was raised.', 101: 'Infinite _db_updates feed not supported for CouchDB.' } INDEX = { 100: 'A general Cloudant index exception was raised.', 101: 'Creating the \"special\" index is not allowed.', 102: 'Deleting the \"special\" index is not allowed.' } REPLICATOR = { 100: 'A general Cloudant replicator exception was raised.', 101: 'You must specify either a source_db Database object or a manually composed' ' \'source\' string/dict.', 102: 'You must specify either a target_db Database object or a manually composed' ' \'target\' string/dict.', 404: 'Replication with id {0} not found.' } RESULT = { 100: 'A general result exception was raised.', 101: 'Failed to interpret the argument {0} as a valid key value or as a valid slice.', 102: 'Cannot use {0} when performing key access or key slicing. Found {1}', 103: 'Cannot use {0} for iteration. Found {1}', 104: 'Invalid page_size: {0}' } VIEW = { 100: 'A general view exception was raised.', 101: 'A QueryIndexView is not callable. If you wish to execute a query ' 'use the database \'get_query_result\' convenience method.', 102: 'Cannot create a custom result context manager using a ' 'QueryIndexView. If you wish to execute a query use the ' 'database \'get_query_result\' convenience method instead.' } ================================================ FILE: src/cloudant/adapters.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright © 2016 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Module that contains default transport adapters for use with requests. """ from requests.adapters import HTTPAdapter from requests.packages import urllib3 class Replay429Adapter(HTTPAdapter): """ A requests TransportAdapter that extends the default HTTPAdapter with configuration to replay requests that receive a 429 Too Many Requests response from the server. The duration of the sleep between requests will be doubled for each 429 response received. Parameters can be passed in to control behavior: :param int retries: the number of times the request can be replayed before failing. :param float initialBackoff: time in seconds for the first backoff. """ def __init__(self, retries=3, initialBackoff=0.25): super(Replay429Adapter, self).__init__(max_retries=urllib3.util.Retry( # Configure the number of retries for status codes total=retries, # No retries for connect|read errors connect=0, read=0, # Allow retries for all the CouchDB HTTP method types method_whitelist=frozenset(['GET', 'HEAD', 'PUT', 'POST', 'DELETE', 'COPY']), # Only retry for a 429 too many requests status code status_forcelist=[429], # Configure the start value of the doubling backoff backoff_factor=initialBackoff)) ================================================ FILE: src/cloudant/client.py ================================================ #!/usr/bin/env python # Copyright (c) 2015, 2021 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Top level API module that maps to a Cloudant or CouchDB client connection instance. """ import json from ._2to3 import url_parse from ._client_session import ( BasicSession, ClientSession, CookieSession, IAMSession ) from .database import CloudantDatabase, CouchDatabase from .feed import Feed, InfiniteFeed from .error import ( CloudantArgumentError, CloudantClientException, CloudantDatabaseException, CloudantException) from ._common_util import ( USER_AGENT, append_response_error_content, CloudFoundryService, response_to_json_dict, ) class CouchDB(dict): """ Encapsulates a CouchDB client, handling top level user API calls having to do with session and database management. Maintains a requests.Session for working with the instance specified in the constructor. Parameters can be passed in to control behavior: :param str user: Username used to connect to CouchDB. :param str auth_token: Authentication token used to connect to CouchDB. :param bool admin_party: Setting to allow the use of Admin Party mode in CouchDB. Defaults to ``False``. :param str url: URL for CouchDB server. :param str encoder: Optional json Encoder object used to encode documents for storage. Defaults to json.JSONEncoder. :param requests.HTTPAdapter adapter: Optional adapter to use for configuring requests. :param bool connect: Keyword argument, if set to True performs the call to connect as part of client construction. Default is False. :param bool auto_renew: Keyword argument, if set to True performs automatic renewal of expired session authentication settings. Default is False. :param float timeout: Timeout in seconds (use float for milliseconds, for example 0.1 for 100 ms) for connecting to and reading bytes from the server. If a single value is provided it will be applied to both the connect and read timeouts. To specify different values for each timeout use a tuple. For example, a 10 second connect timeout and a 1 minute read timeout would be (10, 60). This follows the same behaviour as the `Requests library timeout argument `_. but will apply to every request made using this client. :param bool use_basic_auth: Keyword argument, if set to True performs basic access authentication with server. Default is False. :param bool use_iam: Keyword argument, if set to True performs IAM authentication with server. Default is False. Use :func:`~cloudant.client.CouchDB.iam` to construct an IAM authenticated client. :param string iam_client_id: Keyword argument, client ID to use when authenticating with the IAM token server. Default is ``None``. :param string iam_client_secret: Keyword argument, client secret to use when authenticating with the IAM token server. Default is ``None``. """ _DATABASE_CLASS = CouchDatabase def __init__(self, user, auth_token, admin_party=False, **kwargs): super(CouchDB, self).__init__() self._user = user self._auth_token = auth_token self.server_url = kwargs.get('url') self._client_user_header = None self.admin_party = admin_party self.encoder = kwargs.get('encoder') or json.JSONEncoder self.adapter = kwargs.get('adapter') self._timeout = kwargs.get('timeout', None) self.r_session = None self._auto_renew = kwargs.get('auto_renew', False) self._use_basic_auth = kwargs.get('use_basic_auth', False) self._use_iam = kwargs.get('use_iam', False) self._iam_client_id = kwargs.get('iam_client_id', None) self._iam_client_secret = kwargs.get('iam_client_secret', None) # If user/pass exist in URL, remove and set variables if not self._use_basic_auth and self.server_url: parsed_url = url_parse(kwargs.get('url')) # Note: To prevent conflicts with field names, the method # and attribute names of `url_parse` start with an underscore if parsed_url.port is None: self.server_url = parsed_url._replace( netloc="{}".format(parsed_url.hostname)).geturl() else: self.server_url = parsed_url._replace( netloc="{}:{}".format(parsed_url.hostname, parsed_url.port)).geturl() if (not user and not auth_token) and (parsed_url.username and parsed_url.password): self._user = parsed_url.username self._auth_token = parsed_url.password self._features = None connect_to_couch = kwargs.get('connect', False) if connect_to_couch and self._DATABASE_CLASS == CouchDatabase: self.connect() @property def is_iam_authenticated(self): """ Show if a client has authenticated using an IAM API key. :return: True if client is IAM authenticated. False otherwise. """ return self._use_iam def features(self): """ lazy fetch and cache features """ if self._features is None: metadata = self.metadata() if "features" in metadata: self._features = metadata["features"] else: self._features = [] return self._features def connect(self): """ Starts up an authentication session for the client using cookie authentication if necessary. """ if self.r_session: self.session_logout() if self.admin_party: self._use_iam = False self.r_session = ClientSession( timeout=self._timeout ) elif self._use_basic_auth: self._use_iam = False self.r_session = BasicSession( self._user, self._auth_token, self.server_url, timeout=self._timeout ) elif self._use_iam: self.r_session = IAMSession( self._auth_token, self.server_url, auto_renew=self._auto_renew, client_id=self._iam_client_id, client_secret=self._iam_client_secret, timeout=self._timeout ) else: self.r_session = CookieSession( self._user, self._auth_token, self.server_url, auto_renew=self._auto_renew, timeout=self._timeout ) # If a Transport Adapter was supplied add it to the session if self.adapter is not None: self.r_session.mount(self.server_url, self.adapter) if self._client_user_header is not None: self.r_session.headers.update(self._client_user_header) self.session_login() # Utilize an event hook to append to the response message # using :func:`~cloudant.common_util.append_response_error_content` self.r_session.hooks['response'].append(append_response_error_content) def disconnect(self): """ Ends a client authentication session, performs a logout and a clean up. """ if self.r_session: self.session_logout() self.r_session = None self.clear() def session(self): """ Retrieves information about the current login session to verify data related to sign in. :returns: Dictionary of session info for the current session. """ return self.r_session.info() def session_cookie(self): """ Retrieves the current session cookie. :returns: Session cookie for the current session """ return self.r_session.cookies.get('AuthSession') def session_login(self, user=None, passwd=None): """ Performs a session login by posting the auth information to the _session endpoint. :param str user: Username used to connect to server. :param str auth_token: Authentication token used to connect to server. """ self.change_credentials(user=user, auth_token=passwd) def change_credentials(self, user=None, auth_token=None): """ Change login credentials. :param str user: Username used to connect to server. :param str auth_token: Authentication token used to connect to server. """ self.r_session.set_credentials(user, auth_token) self.r_session.login() def session_logout(self): """ Performs a session logout and clears the current session by sending a delete request to the _session endpoint. """ self.r_session.logout() def basic_auth_str(self): """ Composes a basic http auth string, suitable for use with the _replicator database, and other places that need it. :returns: Basic http authentication string """ return self.r_session.base64_user_pass() def all_dbs(self): """ Retrieves a list of all database names for the current client. :returns: List of database names for the client """ url = '/'.join((self.server_url, '_all_dbs')) resp = self.r_session.get(url) resp.raise_for_status() return response_to_json_dict(resp) def create_database(self, dbname, partitioned=False, **kwargs): """ Creates a new database on the remote server with the name provided and adds the new database object to the client's locally cached dictionary before returning it to the caller. The method will optionally throw a CloudantClientException if the database exists remotely. :param str dbname: Name used to create the database. :param bool throw_on_exists: Boolean flag dictating whether or not to throw a CloudantClientException when attempting to create a database that already exists. :param bool partitioned: Create as a partitioned database. Defaults to ``False``. :returns: The newly created database object """ new_db = self._DATABASE_CLASS(self, dbname, partitioned=partitioned) try: new_db.create(kwargs.get('throw_on_exists', False)) except CloudantDatabaseException as ex: if ex.status_code == 412: raise CloudantClientException(412, dbname) raise ex super(CouchDB, self).__setitem__(dbname, new_db) return new_db def delete_database(self, dbname): """ Removes the named database remotely and locally. The method will throw a CloudantClientException if the database does not exist. :param str dbname: Name of the database to delete. """ db = self._DATABASE_CLASS(self, dbname) if not db.exists(): raise CloudantClientException(404, dbname) db.delete() if dbname in list(self.keys()): super(CouchDB, self).__delitem__(dbname) def db_updates(self, raw_data=False, **kwargs): """ Returns the ``_db_updates`` feed iterator. While iterating over the feed, if necessary, the iteration can be stopped by issuing a call to the ``stop()`` method on the returned iterator object. For example: .. code-block:: python # Iterate over a "longpoll" _db_updates feed db_updates = client.db_updates() for db_update in db_updates: if some_condition: db_updates.stop() print(db_update) # Iterate over a "continuous" _db_updates feed with additional options db_updates = client.db_updates(feed='continuous', heartbeat=False) for db_update in db_updates: if some_condition: db_updates.stop() print(db_update) :param bool raw_data: If set to True then the raw response data will be streamed otherwise if set to False then JSON formatted data will be streamed. Default is False. :param str feed: Type of feed. Valid values are ``continuous``, and ``longpoll``. Default is ``longpoll``. :param bool heartbeat: Whether CouchDB will send a newline character on timeout. Default is True. :param int timeout: Number of seconds to wait for data before terminating the response. :param int chunk_size: The HTTP response stream chunk size. Defaults to 512. :returns: Feed object that can be iterated over as a ``_db_updates`` feed. """ return Feed(self, raw_data, **kwargs) def metadata(self): """ Retrieves the remote server metadata dictionary. :returns: Dictionary containing server metadata details """ resp = self.r_session.get(self.server_url) resp.raise_for_status() return response_to_json_dict(resp) def keys(self, remote=False): """ Returns the database names for this client. Default is to return only the locally cached database names, specify ``remote=True`` to make a remote request to include all databases. :param bool remote: Dictates whether the list of locally cached database names are returned or a remote request is made to include an up to date list of databases from the server. Defaults to False. :returns: List of database names """ if not remote: return list(super(CouchDB, self).keys()) return self.all_dbs() def __getitem__(self, key): """ Overrides dictionary __getitem__ behavior to provide a database instance for the specified key. If the database instance does not exist locally, then a remote request is made and the database is subsequently added to the local cache and returned to the caller. If the database instance already exists locally then it is returned and a remote request is not performed. A KeyError will result if the database does not exist locally or on the server. :param str key: Database name used to retrieve the database object. :returns: Database object """ if key in list(self.keys()): return super(CouchDB, self).__getitem__(key) db = self._DATABASE_CLASS(self, key) if db.exists(): super(CouchDB, self).__setitem__(key, db) else: raise KeyError(key) return db def __delitem__(self, key, remote=False): """ Overrides dictionary __delitem__ behavior to make deleting the database key a proxy for deleting the database. If remote=True then it will delete the database on the remote server, otherwise only the local cached object will be removed. :param str key: Database name of the database to be deleted. :param bool remote: Dictates whether the locally cached database is deleted or a remote request is made to delete the database from the server. Defaults to False. """ super(CouchDB, self).__delitem__(key) if remote: self.delete_database(key) def get(self, key, default=None, remote=False): """ Overrides dictionary get behavior to retrieve database objects with support for returning a default. If remote=True then a remote request is made to retrieve the database from the remote server, otherwise the client's locally cached database object is returned. :param str key: Database name used to retrieve the database object. :param str default: Default database name. Defaults to None. :param bool remote: Dictates whether the locally cached database is returned or a remote request is made to retrieve the database from the server. Defaults to False. :returns: Database object """ if not remote: return super(CouchDB, self).get(key, default) db = self._DATABASE_CLASS(self, key) if db.exists(): super(CouchDB, self).__setitem__(key, db) return db return default def __setitem__(self, key, value, remote=False): """ Override dictionary __setitem__ behavior to verify that only database instances are added as keys. If remote=True then also create the database remotely if the database does not exist. Note: The only way to override the default for the ``remote`` argument setting it to True is to call __setitem__ directly. A much simpler approach is to use :func:`~cloudant.client.CouchDB.create_database` instead, if your intention is to create a database remotely. :param str key: Database name to be used as the key for the database in the locally cached dictionary. :param value: Database object to be used in the locally cached dictionary. :param bool remote: Dictates whether the method will attempt to create the database remotely or not. Defaults to False. """ if not isinstance(value, self._DATABASE_CLASS): raise CloudantClientException(101, type(value).__name__) if remote and not value.exists(): value.create() super(CouchDB, self).__setitem__(key, value) class Cloudant(CouchDB): """ Encapsulates a Cloudant client, handling top level user API calls having to do with session and database management. Maintains a requests.Session for working with the instance specified in the constructor. Parameters can be passed in to control behavior: :param str cloudant_user: Username used to connect to Cloudant. :param str auth_token: Authentication token used to connect to Cloudant. :param str account: The Cloudant account name. If the account parameter is present, it will be used to construct the Cloudant service URL. :param str url: If the account is not present and the url parameter is present then it will be used to set the Cloudant service URL. The url must be a fully qualified http/https URL. :param str x_cloudant_user: Override the X-Cloudant-User setting used to authenticate. This is needed to authenticate on one's behalf, eg with an admin account. This parameter must be accompanied by the url parameter. If the url parameter is omitted then the x_cloudant_user parameter setting is ignored. :param str encoder: Optional json Encoder object used to encode documents for storage. Defaults to json.JSONEncoder. :param requests.HTTPAdapter adapter: Optional adapter to use for configuring requests. """ _DATABASE_CLASS = CloudantDatabase def __init__(self, cloudant_user, auth_token, **kwargs): super(Cloudant, self).__init__(cloudant_user, auth_token, **kwargs) self._client_user_header = {'User-Agent': USER_AGENT} account = kwargs.get('account') if account is not None: self.server_url = 'https://{0}.cloudant.com'.format(account) if kwargs.get('x_cloudant_user') is not None: self._client_user_header['X-Cloudant-User'] = kwargs.get('x_cloudant_user') if self.server_url is None: raise CloudantClientException(102) if kwargs.get('connect', False): self.connect() def db_updates(self, raw_data=False, **kwargs): """ Returns the ``_db_updates`` feed iterator. The ``_db_updates`` feed can be iterated over and once complete can also provide the last sequence identifier of the feed. If necessary, the iteration can be stopped by issuing a call to the ``stop()`` method on the returned iterator object. For example: .. code-block:: python # Iterate over a "normal" _db_updates feed db_updates = client.db_updates() for db_update in db_updates: print(db_update) print(db_updates.last_seq) # Iterate over a "continuous" _db_updates feed with additional options db_updates = client.db_updates(feed='continuous', since='now', descending=True) for db_update in db_updates: if some_condition: db_updates.stop() print(db_update) :param bool raw_data: If set to True then the raw response data will be streamed otherwise if set to False then JSON formatted data will be streamed. Default is False. :param bool descending: Whether results should be returned in descending order, i.e. the latest event first. By default, the oldest event is returned first. :param str feed: Type of feed. Valid values are ``continuous``, ``longpoll``, and ``normal``. Default is ``normal``. :param int heartbeat: Time in milliseconds after which an empty line is sent during ``longpoll`` or ``continuous`` if there have been no changes. Must be a positive number. Default is no heartbeat. :param int limit: Maximum number of rows to return. Must be a positive number. Default is no limit. :param since: Start the results from changes after the specified sequence identifier. In other words, using since excludes from the list all changes up to and including the specified sequence identifier. If since is 0 (the default), or omitted, the request returns all changes. If it is ``now``, only changes made after the time of the request will be emitted. :param int timeout: Number of milliseconds to wait for data before terminating the response. ``heartbeat`` supersedes ``timeout`` if both are supplied. :param int chunk_size: The HTTP response stream chunk size. Defaults to 512. :returns: Feed object that can be iterated over as a ``_db_updates`` feed. """ return Feed(self, raw_data, **kwargs) def infinite_db_updates(self, **kwargs): """ Returns an infinite (perpetually refreshed) ``_db_updates`` feed iterator. If necessary, the iteration can be stopped by issuing a call to the ``stop()`` method on the returned iterator object. For example: .. code-block:: python # Iterate over an infinite _db_updates feed db_updates = client.infinite_db_updates() for db_update in db_updates: if some_condition: db_updates.stop() print(db_update) :param bool descending: Whether results should be returned in descending order, i.e. the latest event first. By default, the oldest event is returned first. :param int heartbeat: Time in milliseconds after which an empty line is sent if there have been no changes. Must be a positive number. Default is no heartbeat. :param since: Start the results from changes after the specified sequence identifier. In other words, using since excludes from the list all changes up to and including the specified sequence identifier. If since is 0 (the default), or omitted, the request returns all changes. If it is ``now``, only changes made after the time of the request will be emitted. :param int timeout: Number of milliseconds to wait for data before terminating the response. ``heartbeat`` supersedes ``timeout`` if both are supplied. :param int chunk_size: The HTTP response stream chunk size. Defaults to 512. :returns: Feed object that can be iterated over as a ``_db_updates`` feed. """ return InfiniteFeed(self, **kwargs) def _usage_endpoint(self, endpoint, year=None, month=None): """ Common helper for getting usage and billing reports with optional year and month URL elements. :param str endpoint: Cloudant usage endpoint. :param int year: Year to query against. Optional parameter. Defaults to None. If used, it must be accompanied by ``month``. :param int month: Month to query against that must be an integer between 1 and 12. Optional parameter. Defaults to None. If used, it must be accompanied by ``year``. """ err = False if year is None and month is None: resp = self.r_session.get(endpoint) else: try: if int(year) > 0 and int(month) in range(1, 13): resp = self.r_session.get( '/'.join((endpoint, str(int(year)), str(int(month))))) else: err = True except (ValueError, TypeError): err = True if err: raise CloudantArgumentError(101, year, month) resp.raise_for_status() return response_to_json_dict(resp) def bill(self, year=None, month=None): """ Retrieves Cloudant billing data, optionally for a given year and month. :param int year: Year to query against, for example 2014. Optional parameter. Defaults to None. If used, it must be accompanied by ``month``. :param int month: Month to query against that must be an integer between 1 and 12. Optional parameter. Defaults to None. If used, it must be accompanied by ``year``. :returns: Billing data in JSON format """ endpoint = '/'.join((self.server_url, '_api', 'v2', 'bill')) return self._usage_endpoint(endpoint, year, month) def volume_usage(self, year=None, month=None): """ Retrieves Cloudant volume usage data, optionally for a given year and month. :param int year: Year to query against, for example 2014. Optional parameter. Defaults to None. If used, it must be accompanied by ``month``. :param int month: Month to query against that must be an integer between 1 and 12. Optional parameter. Defaults to None. If used, it must be accompanied by ``year``. :returns: Volume usage data in JSON format """ endpoint = '/'.join(( self.server_url, '_api', 'v2', 'usage', 'data_volume')) return self._usage_endpoint(endpoint, year, month) def requests_usage(self, year=None, month=None): """ Retrieves Cloudant requests usage data, optionally for a given year and month. :param int year: Year to query against, for example 2014. Optional parameter. Defaults to None. If used, it must be accompanied by ``month``. :param int month: Month to query against that must be an integer between 1 and 12. Optional parameter. Defaults to None. If used, it must be accompanied by ``year``. :returns: Requests usage data in JSON format """ endpoint = '/'.join(( self.server_url, '_api', 'v2', 'usage', 'requests')) return self._usage_endpoint(endpoint, year, month) def shared_databases(self): """ Retrieves a list containing the names of databases shared with this account. :returns: List of database names """ endpoint = '/'.join(( self.server_url, '_api', 'v2', 'user', 'shared_databases')) resp = self.r_session.get(endpoint) resp.raise_for_status() data = response_to_json_dict(resp) return data.get('shared_databases', []) def generate_api_key(self): """ Creates and returns a new API Key/pass pair. :returns: API key/pass pair in JSON format """ endpoint = '/'.join((self.server_url, '_api', 'v2', 'api_keys')) resp = self.r_session.post(endpoint) resp.raise_for_status() return response_to_json_dict(resp) def cors_configuration(self): """ Retrieves the current CORS configuration. :returns: CORS data in JSON format """ endpoint = '/'.join(( self.server_url, '_api', 'v2', 'user', 'config', 'cors')) resp = self.r_session.get(endpoint) resp.raise_for_status() return response_to_json_dict(resp) def disable_cors(self): """ Switches CORS off. :returns: CORS status in JSON format """ return self.update_cors_configuration( enable_cors=False, allow_credentials=False, origins=[], overwrite_origins=True ) def cors_origins(self): """ Retrieves a list of CORS origins. :returns: List of CORS origins """ cors = self.cors_configuration() return cors['origins'] def update_cors_configuration( self, enable_cors=True, allow_credentials=True, origins=None, overwrite_origins=False): """ Merges existing CORS configuration with updated values. :param bool enable_cors: Enables/disables CORS. Defaults to True. :param bool allow_credentials: Allows authentication credentials. Defaults to True. :param list origins: List of allowed CORS origin(s). Special cases are a list containing a single "*" which will allow any origin and an empty list which will not allow any origin. Defaults to None. :param bool overwrite_origins: Dictates whether the origins list is overwritten of appended to. Defaults to False. :returns: CORS configuration update status in JSON format """ if origins is None: origins = [] cors_config = { 'enable_cors': enable_cors, 'allow_credentials': allow_credentials, 'origins': origins } if overwrite_origins: return self._write_cors_configuration(cors_config) old_config = self.cors_configuration() # update config values updated_config = old_config.copy() updated_config['enable_cors'] = cors_config.get('enable_cors') updated_config['allow_credentials'] = cors_config.get('allow_credentials') if cors_config.get('origins') == ["*"]: updated_config['origins'] = ["*"] elif old_config.get('origins') != cors_config.get('origins'): new_origins = list( set(old_config.get('origins')).union( set(cors_config.get('origins'))) ) updated_config['origins'] = new_origins return self._write_cors_configuration(updated_config) def _write_cors_configuration(self, config): """ Overwrites the entire CORS config with the values updated in update_cors_configuration. :param dict config: Dictionary containing the updated CORS configuration. :returns: CORS configuration update status in JSON format """ endpoint = '/'.join(( self.server_url, '_api', 'v2', 'user', 'config', 'cors')) resp = self.r_session.put( endpoint, data=json.dumps(config, cls=self.encoder), headers={'Content-Type': 'application/json'} ) resp.raise_for_status() return response_to_json_dict(resp) @classmethod def bluemix(cls, vcap_services, instance_name=None, service_name=None, **kwargs): """ Create a Cloudant session using a VCAP_SERVICES environment variable. :param vcap_services: VCAP_SERVICES environment variable :type vcap_services: dict or str :param str instance_name: Optional Bluemix instance name. Only required if multiple Cloudant instances are available. :param str service_name: Optional Bluemix service name. Example usage: .. code-block:: python import os from cloudant.client import Cloudant client = Cloudant.bluemix(os.getenv('VCAP_SERVICES'), 'Cloudant NoSQL DB') print client.all_dbs() """ service_name = service_name or 'cloudantNoSQLDB' # default service try: service = CloudFoundryService(vcap_services, instance_name=instance_name, service_name=service_name) except CloudantException: raise CloudantClientException(103) if hasattr(service, 'iam_api_key'): return Cloudant.iam(None, service.iam_api_key, url=service.url, **kwargs) return Cloudant(service.username, service.password, url=service.url, **kwargs) @classmethod def iam(cls, account_name, api_key, **kwargs): """ Create a Cloudant client that uses IAM authentication. :param account_name: Cloudant account name; or use None and a url kwarg. :param api_key: IAM authentication API key. """ return cls(None, api_key, account=account_name, auto_renew=kwargs.get('auto_renew', True), use_iam=True, **kwargs) ================================================ FILE: src/cloudant/database.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2019 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License a # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ API module that maps to a Cloudant or CouchDB database instance. """ import json import contextlib from requests.exceptions import HTTPError from ._2to3 import url_quote_plus, iteritems_ from ._common_util import ( JSON_INDEX_TYPE, SEARCH_INDEX_ARGS, SPECIAL_INDEX_TYPE, TEXT_INDEX_TYPE, TYPE_CONVERTERS, get_docs, response_to_json_dict) from .document import Document from .design_document import DesignDocument from .security_document import SecurityDocument from .view import View from .index import Index, TextIndex, SpecialIndex from .query import Query from .error import CloudantArgumentError, CloudantDatabaseException from .result import Result, QueryResult from .feed import Feed, InfiniteFeed class CouchDatabase(dict): """ Encapsulates a CouchDB database. A CouchDatabase object is instantiated with a reference to a client/session. It supports accessing the documents, and various database features such as the document indexes, changes feed, design documents, etc. :param CouchDB client: Client instance used by the database. :param str database_name: Database name used to reference the database. :param int fetch_limit: Optional fetch limit used to set the max number of documents to fetch per query during iteration cycles. Defaults to 100. :param bool partitioned: Create as a partitioned database. Defaults to ``False``. """ def __init__(self, client, database_name, fetch_limit=100, partitioned=False): super(CouchDatabase, self).__init__() self.client = client self._database_host = client.server_url self.database_name = database_name self._fetch_limit = fetch_limit self._partitioned = partitioned self.result = Result(self.all_docs) @property def r_session(self): """ Returns the ``r_session`` from the client instance used by the database. :returns: Client ``r_session`` """ return self.client.r_session @property def admin_party(self): """ Returns the CouchDB Admin Party status. ``True`` if using Admin Party ``False`` otherwise. :returns: CouchDB Admin Party mode status """ return self.client.admin_party @property def database_url(self): """ Constructs and returns the database URL. :returns: Database URL """ return '/'.join(( self._database_host, url_quote_plus(self.database_name))) @property def creds(self): """ Retrieves a dictionary of useful authentication information that can be used to authenticate against this database. :returns: Dictionary containing authentication information """ session = self.client.session() if session is None: return None return { "basic_auth": self.client.basic_auth_str(), "user_ctx": session.get('userCtx') } def database_partition_url(self, partition_key): """ Get the URL of the database partition. :param str partition_key: Partition key. :return: URL of the database partition. :rtype: str """ return '/'.join((self.database_url, '_partition', url_quote_plus(partition_key))) def exists(self): """ Performs an existence check on the remote database. :returns: Boolean True if the database exists, False otherwise """ resp = self.r_session.head(self.database_url) if resp.status_code not in [200, 404]: resp.raise_for_status() return resp.status_code == 200 def metadata(self): """ Retrieves the remote database metadata dictionary. :returns: Dictionary containing database metadata details """ resp = self.r_session.get(self.database_url) resp.raise_for_status() return response_to_json_dict(resp) def partition_metadata(self, partition_key): """ Retrieves the metadata dictionary for the remote database partition. :param str partition_key: Partition key. :returns: Metadata dictionary for the database partition. :rtype: dict """ resp = self.r_session.get(self.database_partition_url(partition_key)) resp.raise_for_status() return response_to_json_dict(resp) def doc_count(self): """ Retrieves the number of documents in the remote database :returns: Database document count """ return self.metadata().get('doc_count') def create_document(self, data, throw_on_exists=False): """ Creates a new document in the remote and locally cached database, using the data provided. If an _id is included in the data then depending on that _id either a :class:`~cloudant.document.Document` or a :class:`~cloudant.design_document.DesignDocument` object will be added to the locally cached database and returned by this method. :param dict data: Dictionary of document JSON data, containing _id. :param bool throw_on_exists: Optional flag dictating whether to raise an exception if the document already exists in the database. :returns: A :class:`~cloudant.document.Document` or :class:`~cloudant.design_document.DesignDocument` instance corresponding to the new document in the database. """ docid = data.get('_id', None) doc = None if docid and docid.startswith('_design/'): doc = DesignDocument(self, docid) else: doc = Document(self, docid) doc.update(data) try: doc.create() except HTTPError as error: if error.response.status_code == 409: if throw_on_exists: raise CloudantDatabaseException(409, docid) else: raise super(CouchDatabase, self).__setitem__(doc['_id'], doc) return doc def new_document(self): """ Creates a new, empty document in the remote and locally cached database, auto-generating the _id. :returns: Document instance corresponding to the new document in the database """ doc = Document(self, None) doc.create() super(CouchDatabase, self).__setitem__(doc['_id'], doc) return doc def design_documents(self): """ Retrieve the JSON content for all design documents in this database. Performs a remote call to retrieve the content. :returns: All design documents found in this database in JSON format """ url = '/'.join((self.database_url, '_all_docs')) query = "startkey=\"_design\"&endkey=\"_design0\"&include_docs=true" resp = self.r_session.get(url, params=query) resp.raise_for_status() data = response_to_json_dict(resp) return data['rows'] def list_design_documents(self): """ Retrieves a list of design document names in this database. Performs a remote call to retrieve the content. :returns: List of names for all design documents in this database """ url = '/'.join((self.database_url, '_all_docs')) query = "startkey=\"_design\"&endkey=\"_design0\"" resp = self.r_session.get(url, params=query) resp.raise_for_status() data = response_to_json_dict(resp) return [x.get('key') for x in data.get('rows', [])] def get_design_document(self, ddoc_id): """ Retrieves a design document. If a design document exists remotely then that content is wrapped in a DesignDocument object and returned to the caller. Otherwise a "shell" DesignDocument object is returned. :param str ddoc_id: Design document id :returns: A DesignDocument instance, if exists remotely then it will be populated accordingly """ ddoc = DesignDocument(self, ddoc_id) try: ddoc.fetch() except HTTPError as error: if error.response.status_code != 404: raise return ddoc def get_security_document(self): """ Retrieves the database security document as a SecurityDocument object. The returned object is useful for viewing as well as updating the the database's security document. :returns: A SecurityDocument instance representing the database security document """ sdoc = SecurityDocument(self) sdoc.fetch() return sdoc def get_partitioned_view_result(self, partition_key, ddoc_id, view_name, raw_result=False, **kwargs): """ Retrieves the partitioned view result based on the design document and view name. See :func:`~cloudant.database.CouchDatabase.get_view_result` method for further details. :param str partition_key: Partition key. :param str ddoc_id: Design document id used to get result. :param str view_name: Name of the view used to get result. :param bool raw_result: Dictates whether the view result is returned as a default Result object or a raw JSON response. Defaults to False. :param kwargs: See :func:`~cloudant.database.CouchDatabase.get_view_result` method for available keyword arguments. :returns: The result content either wrapped in a QueryResult or as the raw response JSON content. :rtype: QueryResult, dict """ ddoc = DesignDocument(self, ddoc_id) view = View(ddoc, view_name, partition_key=partition_key) return self._get_view_result(view, raw_result, **kwargs) def get_view_result(self, ddoc_id, view_name, raw_result=False, **kwargs): """ Retrieves the view result based on the design document and view name. By default the result is returned as a :class:`~cloudant.result.Result` object which provides a key accessible, sliceable, and iterable interface to the result collection. Depending on how you are accessing, slicing or iterating through your result collection certain query parameters are not permitted. See :class:`~cloudant.result.Result` for additional details. However, by setting ``raw_result=True``, the result will be returned as the raw JSON response content for the view requested. With this setting there are no restrictions on the query parameters used but it also means that the result collection key access, slicing, and iteration is the responsibility of the developer. For example: .. code-block:: python # get Result based on a design document view result = db.get_view_result('_design/ddoc_id_001', 'view_001') # get a customized Result based on a design document view result = db.get_view_result('_design/ddoc_id_001', 'view_001', include_docs=True, reduce=False) # get raw response content based on a design document view result = db.get_view_result('_design/ddoc_id_001', 'view_001', raw_result=True) # get customized raw response content for a design document view db.get_view_result('_design/ddoc_id_001', 'view_001', raw_result=True, include_docs=True, skip=100, limit=100) For more detail on key access, slicing and iteration, refer to the :class:`~cloudant.result.Result` documentation. :param str ddoc_id: Design document id used to get result. :param str view_name: Name of the view used to get result. :param bool raw_result: Dictates whether the view result is returned as a default Result object or a raw JSON response. Defaults to False. :param bool descending: Return documents in descending key order. :param endkey: Stop returning records at this specified key. Not valid when used with :class:`~cloudant.result.Result` key access and key slicing. :param str endkey_docid: Stop returning records when the specified document id is reached. :param bool group: Using the reduce function, group the results to a group or single row. :param group_level: Only applicable if the view uses complex keys: keys that are lists. Groups reduce results for the specified number of list fields. :param bool include_docs: Include the full content of the documents. :param bool inclusive_end: Include rows with the specified endkey. :param key: Return only documents that match the specified key. Not valid when used with :class:`~cloudant.result.Result` key access and key slicing. :param list keys: Return only documents that match the specified keys. Not valid when used with :class:`~cloudant.result.Result` key access and key slicing. :param int limit: Limit the number of returned documents to the specified count. Not valid when used with :class:`~cloudant.result.Result` iteration. :param int page_size: Sets the page size for result iteration. Only valid if used with ``raw_result=False``. :param bool reduce: True to use the reduce function, false otherwise. :param int skip: Skip this number of rows from the start. Not valid when used with :class:`~cloudant.result.Result` iteration. :param bool stable: Whether or not the view results should be returned from a "stable" set of shards. :param str stale: Allow the results from a stale view to be used. This makes the request return immediately, even if the view has not been completely built yet. If this parameter is not given, a response is returned only after the view has been built. Note that this parameter is deprecated and the appropriate combination of `stable` and `update` should be used instead. :param startkey: Return records starting with the specified key. Not valid when used with :class:`~cloudant.result.Result` key access and key slicing. :param str startkey_docid: Return records starting with the specified document ID. :param str update: Determine whether the view in question should be updated prior to or after responding to the user. Valid values are: false: return results before updating the view; true: Return results after updating the view; lazy: Return the view results without waiting for an update, but update them immediately after the request. :returns: The result content either wrapped in a QueryResult or as the raw response JSON content """ ddoc = DesignDocument(self, ddoc_id) view = View(ddoc, view_name) return self._get_view_result(view, raw_result, **kwargs) @staticmethod def _get_view_result(view, raw_result, **kwargs): """ Get view results helper. """ if raw_result: return view(**kwargs) if kwargs: return Result(view, **kwargs) return view.result def create(self, throw_on_exists=False): """ Creates a database defined by the current database object, if it does not already exist and raises a CloudantException if the operation fails. If the database already exists then this method call is a no-op. :param bool throw_on_exists: Boolean flag dictating whether or not to throw a CloudantDatabaseException when attempting to create a database that already exists. :returns: The database object """ if not throw_on_exists and self.exists(): return self resp = self.r_session.put(self.database_url, params={ 'partitioned': TYPE_CONVERTERS.get(bool)(self._partitioned) }) if resp.status_code == 201 or resp.status_code == 202: return self raise CloudantDatabaseException( resp.status_code, self.database_url, resp.text ) def delete(self): """ Deletes the current database from the remote instance. """ resp = self.r_session.delete(self.database_url) resp.raise_for_status() def all_docs(self, **kwargs): """ Wraps the _all_docs primary index on the database, and returns the results by value. This can be used as a direct query to the _all_docs endpoint. More convenient/efficient access using keys, slicing and iteration can be done through the ``result`` attribute. Keyword arguments supported are those of the view/index access API. :param bool descending: Return documents in descending key order. :param endkey: Stop returning records at this specified key. :param str endkey_docid: Stop returning records when the specified document id is reached. :param bool include_docs: Include the full content of the documents. :param bool inclusive_end: Include rows with the specified endkey. :param key: Return only documents that match the specified key. :param list keys: Return only documents that match the specified keys. :param int limit: Limit the number of returned documents to the specified count. :param int skip: Skip this number of rows from the start. :param startkey: Return records starting with the specified key. :param str startkey_docid: Return records starting with the specified document ID. :returns: Raw JSON response content from ``_all_docs`` endpoint """ resp = get_docs(self.r_session, '/'.join([self.database_url, '_all_docs']), self.client.encoder, **kwargs) return response_to_json_dict(resp) def partitioned_all_docs(self, partition_key, **kwargs): """ Wraps the _all_docs primary index on the database partition, and returns the results by value. See :func:`~cloudant.database.CouchDatabase.all_docs` method for further details. :param str partition_key: Partition key. :param kwargs: See :func:`~cloudant.database.CouchDatabase.all_docs` method for available keyword arguments. :returns: Raw JSON response content from ``_all_docs`` endpoint. :rtype: dict """ resp = get_docs(self.r_session, '/'.join([ self.database_partition_url(partition_key), '_all_docs' ]), self.client.encoder, **kwargs) return response_to_json_dict(resp) @contextlib.contextmanager def custom_result(self, **options): """ Provides a context manager that can be used to customize the ``_all_docs`` behavior and wrap the output as a :class:`~cloudant.result.Result`. :param bool descending: Return documents in descending key order. :param endkey: Stop returning records at this specified key. Not valid when used with :class:`~cloudant.result.Result` key access and key slicing. :param str endkey_docid: Stop returning records when the specified document id is reached. :param bool include_docs: Include the full content of the documents. :param bool inclusive_end: Include rows with the specified endkey. :param key: Return only documents that match the specified key. Not valid when used with :class:`~cloudant.result.Result` key access and key slicing. :param list keys: Return only documents that match the specified keys. Not valid when used with :class:`~cloudant.result.Result` key access and key slicing. :param int page_size: Sets the page size for result iteration. :param startkey: Return records starting with the specified key. Not valid when used with :class:`~cloudant.result.Result` key access and key slicing. :param str startkey_docid: Return records starting with the specified document ID. For example: .. code-block:: python with database.custom_result(include_docs=True) as rslt: data = rslt[100: 200] """ rslt = Result(self.all_docs, **options) yield rslt del rslt def keys(self, remote=False): """ Retrieves the list of document ids in the database. Default is to return only the locally cached document ids, specify remote=True to make a remote request to include all document ids from the remote database instance. :param bool remote: Dictates whether the list of locally cached document ids are returned or a remote request is made to include an up to date list of document ids from the server. Defaults to False. :returns: List of document ids """ if not remote: return list(super(CouchDatabase, self).keys()) docs = self.all_docs() return [row['id'] for row in docs.get('rows', [])] def changes(self, raw_data=False, **kwargs): """ Returns the ``_changes`` feed iterator. The ``_changes`` feed can be iterated over and once complete can also provide the last sequence identifier of the feed. If necessary, the iteration can be stopped by issuing a call to the ``stop()`` method on the returned iterator object. For example: .. code-block:: python # Iterate over a "normal" _changes feed changes = db.changes() for change in changes: print(change) print(changes.last_seq) # Iterate over a "continuous" _changes feed with additional options changes = db.changes(feed='continuous', since='now', descending=True) for change in changes: if some_condition: changes.stop() print(change) :param bool raw_data: If set to True then the raw response data will be streamed otherwise if set to False then JSON formatted data will be streamed. Default is False. :param bool conflicts: Can only be set if include_docs is True. Adds information about conflicts to each document. Default is False. :param bool descending: Changes appear in sequential order. Default is False. :param list doc_ids: To be used only when ``filter`` is set to ``_doc_ids``. Filters the feed so that only changes to the specified documents are sent. :param str feed: Type of feed. Valid values are ``continuous``, ``longpoll``, and ``normal``. Default is ``normal``. :param str filter: Name of filter function from a design document to get updates. Default is no filter. :param int heartbeat: Time in milliseconds after which an empty line is sent during ``longpoll`` or ``continuous`` if there have been no changes. Must be a positive number. Default is no heartbeat. :param bool include_docs: Include the document with the result. The document will not be returned as a :class:`~cloudant.document.Document` but instead will be returned as either formated JSON or as raw response content. Default is False. :param int limit: Maximum number of rows to return. Must be a positive number. Default is no limit. :param since: Start the results from changes after the specified sequence identifier. In other words, using since excludes from the list all changes up to and including the specified sequence identifier. If since is 0 (the default), or omitted, the request returns all changes. If it is ``now``, only changes made after the time of the request will be emitted. :param str style: Specifies how many revisions are returned in the changes array. The default, ``main_only``, only returns the current "winning" revision; ``all_docs`` returns all leaf revisions, including conflicts and deleted former conflicts. :param int timeout: Number of milliseconds to wait for data before terminating the response. ``heartbeat`` supersedes ``timeout`` if both are supplied. :param int chunk_size: The HTTP response stream chunk size. Defaults to 512. :returns: Feed object that can be iterated over as a ``_changes`` feed. """ return Feed(self, raw_data, **kwargs) def infinite_changes(self, **kwargs): """ Returns an infinite (perpetually refreshed) ``_changes`` feed iterator. If necessary, the iteration can be stopped by issuing a call to the ``stop()`` method on the returned iterator object. For example: .. code-block:: python # Iterate over an infinite _changes feed changes = db.infinite_changes() for change in changes: if some_condition: changes.stop() print(change) :param bool conflicts: Can only be set if include_docs is True. Adds information about conflicts to each document. Default is False. :param bool descending: Changes appear in sequential order. Default is False. :param list doc_ids: To be used only when ``filter`` is set to ``_doc_ids``. Filters the feed so that only changes to the specified documents are sent. :param str filter: Name of filter function from a design document to get updates. Default is no filter. :param int heartbeat: Time in milliseconds after which an empty line is sent if there have been no changes. Must be a positive number. Default is no heartbeat. :param bool include_docs: Include the document with the result. The document will not be returned as a :class:`~cloudant.document.Document` but instead will be returned as either formated JSON or as raw response content. Default is False. :param since: Start the results from changes after the specified sequence identifier. In other words, using since excludes from the list all changes up to and including the specified sequence identifier. If since is 0 (the default), or omitted, the request returns all changes. If it is ``now``, only changes made after the time of the request will be emitted. :param str style: Specifies how many revisions are returned in the changes array. The default, ``main_only``, only returns the current "winning" revision; ``all_docs`` returns all leaf revisions, including conflicts and deleted former conflicts. :param int timeout: Number of milliseconds to wait for data before terminating the response. ``heartbeat`` supersedes ``timeout`` if both are supplied. :param int chunk_size: The HTTP response stream chunk size. Defaults to 512. :returns: Feed object that can be iterated over as a ``_changes`` feed. """ return InfiniteFeed(self, **kwargs) def __getitem__(self, key): """ Overrides dictionary __getitem__ behavior to provide a document instance for the specified key from the current database. If the document instance does not exist locally, then a remote request is made and the document is subsequently added to the local cache and returned to the caller. If the document instance already exists locally then it is returned and a remote request is not performed. A KeyError will result if the document does not exist locally or in the remote database. :param str key: Document id used to retrieve the document from the database. :returns: A Document or DesignDocument object depending on the specified document id (key) """ if key in list(self.keys()): return super(CouchDatabase, self).__getitem__(key) if key.startswith('_design/'): doc = DesignDocument(self, key) else: doc = Document(self, key) if doc.exists(): doc.fetch() super(CouchDatabase, self).__setitem__(key, doc) else: raise KeyError(key) return doc def get(self, key, remote=False): """ Overrides dict's get method. This gets an item from the database or cache like __getitem__, but instead of throwing an exception if the item is not found, it simply returns None. :param bool remote: Dictates whether a remote request is made to retrieve the doc, if it is not present in the local cache. Defaults to False. """ if remote: try: return self.__getitem__(key) except KeyError: return None else: return super(CouchDatabase, self).get(key) def __contains__(self, key): """ Overrides dictionary __contains__ behavior to check if a document by key exists in the current cached or remote database. For example: .. code-block:: python if key in database: doc = database[key] # Do something with doc :param str key: Document id used to check if it exists in the database. :returns: True if the document exists in the local or remote database, otherwise False. """ if key in list(self.keys()): return True if key.startswith('_design/'): doc = DesignDocument(self, key) else: doc = Document(self, key) return doc.exists() def __iter__(self, remote=True): """ Overrides dictionary __iter__ behavior to provide iterable Document results. By default, Documents are fetched from the remote database, in batches equal to the database object's defined ``fetch_limit``, yielding Document/DesignDocument objects. If ``remote=False`` then the locally cached Document objects are iterated over with no attempt to retrieve documents from the remote database. :param bool remote: Dictates whether the locally cached Document objects are returned or a remote request is made to retrieve Document objects from the remote database. Defaults to True. :returns: Iterable of Document and/or DesignDocument objects """ if not remote: super(CouchDatabase, self).__iter__() else: # Use unicode Null U+0000 as the initial lower bound to ensure any # document id could exist in the results set. next_startkey = u'\u0000' while next_startkey is not None: docs = self.all_docs( limit=self._fetch_limit, include_docs=True, startkey=next_startkey ).get('rows', []) if len(docs) >= self._fetch_limit: # Ensure the next document batch contains ids that sort # strictly higher than the previous document id fetched. next_startkey = docs[-1]['id'] + u'\u0000' else: # This is the last batch of docs, so we set # ourselves up to break out of the while loop # after this pass. next_startkey = None for doc in docs: # Wrap the doc dictionary as the appropriate # document object before yielding it. if doc['id'].startswith('_design/'): document = DesignDocument(self, doc['id']) else: document = Document(self, doc['id']) document.update(doc['doc']) super(CouchDatabase, self).__setitem__(doc['id'], document) yield document return def bulk_docs(self, docs): """ Performs multiple document inserts and/or updates through a single request. Each document must either be or extend a dict as is the case with Document and DesignDocument objects. A document must contain the ``_id`` and ``_rev`` fields if the document is meant to be updated. :param list docs: List of Documents to be created/updated. :returns: Bulk document creation/update status in JSON format """ url = '/'.join((self.database_url, '_bulk_docs')) data = {'docs': docs} headers = {'Content-Type': 'application/json'} resp = self.r_session.post( url, data=json.dumps(data, cls=self.client.encoder), headers=headers ) resp.raise_for_status() return response_to_json_dict(resp) def missing_revisions(self, doc_id, *revisions): """ Returns a list of document revision values that do not exist in the current remote database for the specified document id and specified list of revision values. :param str doc_id: Document id to check for missing revisions against. :param list revisions: List of document revisions values to check against. :returns: List of missing document revision values """ url = '/'.join((self.database_url, '_missing_revs')) data = {doc_id: list(revisions)} resp = self.r_session.post( url, headers={'Content-Type': 'application/json'}, data=json.dumps(data, cls=self.client.encoder) ) resp.raise_for_status() resp_json = response_to_json_dict(resp) missing_revs = resp_json['missing_revs'].get(doc_id) if missing_revs is None: missing_revs = [] return missing_revs def revisions_diff(self, doc_id, *revisions): """ Returns the differences in the current remote database for the specified document id and specified list of revision values. :param str doc_id: Document id to check for revision differences against. :param list revisions: List of document revisions values to check against. :returns: The revision differences in JSON format """ url = '/'.join((self.database_url, '_revs_diff')) data = {doc_id: list(revisions)} resp = self.r_session.post( url, headers={'Content-Type': 'application/json'}, data=json.dumps(data, cls=self.client.encoder) ) resp.raise_for_status() return response_to_json_dict(resp) def get_revision_limit(self): """ Retrieves the limit of historical revisions to store for any single document in the current remote database. :returns: Revision limit value for the current remote database """ url = '/'.join((self.database_url, '_revs_limit')) resp = self.r_session.get(url) resp.raise_for_status() try: ret = int(resp.text) except ValueError: raise CloudantDatabaseException(400, response_to_json_dict(resp)) return ret def set_revision_limit(self, limit): """ Sets the limit of historical revisions to store for any single document in the current remote database. :param int limit: Number of revisions to store for any single document in the current remote database. :returns: Revision limit set operation status in JSON format """ url = '/'.join((self.database_url, '_revs_limit')) resp = self.r_session.put(url, data=json.dumps(limit, cls=self.client.encoder)) resp.raise_for_status() return response_to_json_dict(resp) def view_cleanup(self): """ Removes view files that are not used by any design document in the remote database. :returns: View cleanup status in JSON format """ url = '/'.join((self.database_url, '_view_cleanup')) resp = self.r_session.post( url, headers={'Content-Type': 'application/json'} ) resp.raise_for_status() return response_to_json_dict(resp) def get_list_function_result(self, ddoc_id, list_name, view_name, **kwargs): """ Retrieves a customized MapReduce view result from the specified database based on the list function provided. List functions are used, for example, when you want to access Cloudant directly from a browser, and need data to be returned in a different format, such as HTML. Note: All query parameters for View requests are supported. See :class:`~cloudant.database.get_view_result` for all supported query parameters. For example: .. code-block:: python # Assuming that 'view001' exists as part of the # 'ddoc001' design document in the remote database... # Retrieve documents where the list function is 'list1' resp = db.get_list_function_result('ddoc001', 'list1', 'view001', limit=10) for row in resp['rows']: # Process data (in text format). For more detail on list functions, refer to the `Cloudant list documentation `_. :param str ddoc_id: Design document id used to get result. :param str list_name: Name used in part to identify the list function. :param str view_name: Name used in part to identify the view. :return: Formatted view result data in text format """ ddoc = DesignDocument(self, ddoc_id) headers = {'Content-Type': 'application/json'} resp = get_docs(self.r_session, '/'.join([ddoc.document_url, '_list', list_name, view_name]), self.client.encoder, headers, **kwargs) return resp.text def get_show_function_result(self, ddoc_id, show_name, doc_id): """ Retrieves a formatted document from the specified database based on the show function provided. Show functions, for example, are used when you want to access Cloudant directly from a browser, and need data to be returned in a different format, such as HTML. For example: .. code-block:: python # Assuming that 'view001' exists as part of the # 'ddoc001' design document in the remote database... # Retrieve a formatted 'doc001' document where the show function is 'show001' resp = db.get_show_function_result('ddoc001', 'show001', 'doc001') for row in resp['rows']: # Process data (in text format). For more detail on show functions, refer to the `Cloudant show documentation `_. :param str ddoc_id: Design document id used to get the result. :param str show_name: Name used in part to identify the show function. :param str doc_id: The ID of the document to show. :return: Formatted document result data in text format """ ddoc = DesignDocument(self, ddoc_id) headers = {'Content-Type': 'application/json'} resp = get_docs(self.r_session, '/'.join([ddoc.document_url, '_show', show_name, doc_id]), self.client.encoder, headers) return resp.text def update_handler_result(self, ddoc_id, handler_name, doc_id=None, data=None, **params): """ Creates or updates a document from the specified database based on the update handler function provided. Update handlers are used, for example, to provide server-side modification timestamps, and document updates to individual fields without the latest revision. You can provide query parameters needed by the update handler function using the ``params`` argument. Create a document with a generated ID: .. code-block:: python # Assuming that 'update001' update handler exists as part of the # 'ddoc001' design document in the remote database... # Execute 'update001' to create a new document resp = db.update_handler_result('ddoc001', 'update001', data={'name': 'John', 'message': 'hello'}) Create or update a document with the specified ID: .. code-block:: python # Assuming that 'update001' update handler exists as part of the # 'ddoc001' design document in the remote database... # Execute 'update001' to update document 'doc001' in the database resp = db.update_handler_result('ddoc001', 'update001', 'doc001', data={'month': 'July'}) For more details, see the `update handlers documentation `_. :param str ddoc_id: Design document id used to get result. :param str handler_name: Name used in part to identify the update handler function. :param str doc_id: Optional document id used to specify the document to be handled. :returns: Result of update handler function in text format """ ddoc = DesignDocument(self, ddoc_id) if doc_id: resp = self.r_session.put( '/'.join([ddoc.document_url, '_update', handler_name, doc_id]), params=params, data=data) else: resp = self.r_session.post( '/'.join([ddoc.document_url, '_update', handler_name]), params=params, data=data) resp.raise_for_status() return resp.text def get_query_indexes(self, raw_result=False): """ Retrieves query indexes from the remote database. :param bool raw_result: If set to True then the raw JSON content for the request is returned. Default is to return a list containing :class:`~cloudant.index.Index`, :class:`~cloudant.index.TextIndex`, and :class:`~cloudant.index.SpecialIndex` wrapped objects. :returns: The query indexes in the database """ url = '/'.join((self.database_url, '_index')) resp = self.r_session.get(url) resp.raise_for_status() if raw_result: return response_to_json_dict(resp) indexes = [] for data in response_to_json_dict(resp).get('indexes', []): if data.get('type') == JSON_INDEX_TYPE: indexes.append(Index( self, data.get('ddoc'), data.get('name'), partitioned=data.get('partitioned', False), **data.get('def', {}) )) elif data.get('type') == TEXT_INDEX_TYPE: indexes.append(TextIndex( self, data.get('ddoc'), data.get('name'), partitioned=data.get('partitioned', False), **data.get('def', {}) )) elif data.get('type') == SPECIAL_INDEX_TYPE: indexes.append(SpecialIndex( self, data.get('ddoc'), data.get('name'), partitioned=data.get('partitioned', False), **data.get('def', {}) )) else: raise CloudantDatabaseException(101, data.get('type')) return indexes def create_query_index( self, design_document_id=None, index_name=None, index_type='json', partitioned=None, **kwargs ): """ Creates either a JSON or a text query index in the remote database. :param str index_type: The type of the index to create. Can be either 'text' or 'json'. Defaults to 'json'. :param str design_document_id: Optional identifier of the design document in which the index will be created. If omitted the default is that each index will be created in its own design document. Indexes can be grouped into design documents for efficiency. However, a change to one index in a design document will invalidate all other indexes in the same document. :param str index_name: Optional name of the index. If omitted, a name will be generated automatically. :param list fields: A list of fields that should be indexed. For JSON indexes, the fields parameter is mandatory and should follow the 'sort syntax'. For example ``fields=['name', {'age': 'desc'}]`` will create an index on the 'name' field in ascending order and the 'age' field in descending order. For text indexes, the fields parameter is optional. If it is included then each field element in the fields list must be a single element dictionary where the key is the field name and the value is the field type. For example ``fields=[{'name': 'string'}, {'age': 'number'}]``. Valid field types are ``'string'``, ``'number'``, and ``'boolean'``. :param dict default_field: Optional parameter that specifies how the ``$text`` operator can be used with the index. Only valid when creating a text index. :param dict selector: Optional parameter that can be used to limit the index to a specific set of documents that match a query. It uses the same syntax used for selectors in queries. Only valid when creating a text index. :returns: An Index object representing the index created in the remote database """ if index_type == JSON_INDEX_TYPE: index = Index(self, design_document_id, index_name, partitioned=partitioned, **kwargs) elif index_type == TEXT_INDEX_TYPE: index = TextIndex(self, design_document_id, index_name, partitioned=partitioned, **kwargs) else: raise CloudantArgumentError(103, index_type) index.create() return index def delete_query_index(self, design_document_id, index_type, index_name): """ Deletes the query index identified by the design document id, index type and index name from the remote database. :param str design_document_id: The design document id that the index exists in. :param str index_type: The type of the index to be deleted. Must be either 'text' or 'json'. :param str index_name: The index name of the index to be deleted. """ if index_type == JSON_INDEX_TYPE: index = Index(self, design_document_id, index_name) elif index_type == TEXT_INDEX_TYPE: index = TextIndex(self, design_document_id, index_name) else: raise CloudantArgumentError(103, index_type) index.delete() def get_partitioned_query_result(self, partition_key, selector, fields=None, raw_result=False, **kwargs): """ Retrieves the partitioned query result from the specified database based on the query parameters provided. See :func:`~cloudant.database.CouchDatabase.get_query_result` method for further details. :param str partition_key: Partition key. :param str selector: Dictionary object describing criteria used to select documents. :param list fields: A list of fields to be returned by the query. :param bool raw_result: Dictates whether the query result is returned wrapped in a QueryResult or if the response JSON is returned. Defaults to False. :param kwargs: See :func:`~cloudant.database.CouchDatabase.get_query_result` method for available keyword arguments. :returns: The result content either wrapped in a QueryResult or as the raw response JSON content. :rtype: QueryResult, dict """ query = Query(self, selector=selector, fields=fields, partition_key=partition_key) return self._get_query_result(query, raw_result, **kwargs) def get_query_result(self, selector, fields=None, raw_result=False, **kwargs): """ Retrieves the query result from the specified database based on the query parameters provided. By default the result is returned as a :class:`~cloudant.result.QueryResult` which uses the ``skip`` and ``limit`` query parameters internally to handle slicing and iteration through the query result collection. Therefore ``skip`` and ``limit`` cannot be used as arguments to get the query result when ``raw_result=False``. However, by setting ``raw_result=True``, the result will be returned as the raw JSON response content for the query requested. Using this setting requires the developer to manage their own slicing and iteration. Therefore ``skip`` and ``limit`` are valid arguments in this instance. For example: .. code-block:: python # Retrieve documents where the name field is 'foo' selector = {'name': {'$eq': 'foo'}} docs = db.get_query_result(selector) for doc in docs: print doc # Retrieve documents sorted by the age field in ascending order docs = db.get_query_result(selector, sort=['name']) for doc in docs: print doc # Retrieve JSON response content, limiting response to 100 documents resp = db.get_query_result(selector, raw_result=True, limit=100) for doc in resp['docs']: print doc For more detail on slicing and iteration, refer to the :class:`~cloudant.result.QueryResult` documentation. :param dict selector: Dictionary object describing criteria used to select documents. :param list fields: A list of fields to be returned by the query. :param bool raw_result: Dictates whether the query result is returned wrapped in a QueryResult or if the response JSON is returned. Defaults to False. :param str bookmark: A string that enables you to specify which page of results you require. :param int limit: Maximum number of results returned. Only valid if used with ``raw_result=True``. :param int page_size: Sets the page size for result iteration. Default is 100. Only valid with ``raw_result=False``. :param int r: Read quorum needed for the result. Each document is read from at least 'r' number of replicas before it is returned in the results. :param int skip: Skip the first 'n' results, where 'n' is the value specified. Only valid if used with ``raw_result=True``. :param list sort: A list of fields to sort by. Optionally the list can contain elements that are single member dictionary structures that specify sort direction. For example ``sort=['name', {'age': 'desc'}]`` means to sort the query results by the "name" field in ascending order and the "age" field in descending order. :param str use_index: Identifies a specific index for the query to run against, rather than using the Cloudant Query algorithm which finds what it believes to be the best index. :returns: The result content either wrapped in a QueryResult or as the raw response JSON content """ query = Query(self, selector=selector, fields=fields) return self._get_query_result(query, raw_result, **kwargs) @staticmethod def _get_query_result(query, raw_result, **kwargs): """ Get query results helper. """ if raw_result: return query(**kwargs) if kwargs: return QueryResult(query, **kwargs) return query.result class CloudantDatabase(CouchDatabase): """ Encapsulates a Cloudant database. A CloudantDatabase object is instantiated with a reference to a client/session. It supports accessing the documents, and various database features such as the document indexes, changes feed, design documents, etc. :param Cloudant client: Client instance used by the database. :param str database_name: Database name used to reference the database. :param int fetch_limit: Optional fetch limit used to set the max number of documents to fetch per query during iteration cycles. Defaults to 100. :param bool partitioned: Create as a partitioned database. Defaults to ``False``. """ def __init__(self, client, database_name, fetch_limit=100, partitioned=False): super(CloudantDatabase, self).__init__( client, database_name, fetch_limit=fetch_limit, partitioned=partitioned ) def security_document(self): """ Retrieves the security document for the current database containing information about the users that the database is shared with. :returns: Security document as a ``dict`` """ return dict(self.get_security_document()) @property def security_url(self): """ Constructs and returns the security document URL. :returns: Security document URL """ url = '/'.join((self._database_host, '_api', 'v2', 'db', self.database_name, '_security')) return url def share_database(self, username, roles=None): """ Shares the current remote database with the username provided. You can grant varying degrees of access rights, default is to share read-only, but additional roles can be added by providing the specific roles as a ``list`` argument. If the user already has this database shared with them then it will modify/overwrite the existing permissions. :param str username: Cloudant user to share the database with. :param list roles: A list of `roles `_ to grant to the named user. :returns: Share database status in JSON format """ if roles is None: roles = ['_reader'] valid_roles = [ '_reader', '_writer', '_admin', '_replicator', '_db_updates', '_design', '_shards', '_security' ] doc = self.security_document() data = doc.get('cloudant', {}) perms = [] if all(role in valid_roles for role in roles): perms = list(set(roles)) if not perms: raise CloudantArgumentError(102, roles, valid_roles) data[username] = perms doc['cloudant'] = data resp = self.r_session.put( self.security_url, data=json.dumps(doc, cls=self.client.encoder), headers={'Content-Type': 'application/json'} ) resp.raise_for_status() return response_to_json_dict(resp) def unshare_database(self, username): """ Removes all sharing with the named user for the current remote database. This will remove the entry for the user from the security document. To modify permissions, use the :func:`~cloudant.database.CloudantDatabase.share_database` method instead. :param str username: Cloudant user to unshare the database from. :returns: Unshare database status in JSON format """ doc = self.security_document() data = doc.get('cloudant', {}) if username in data: del data[username] doc['cloudant'] = data resp = self.r_session.put( self.security_url, data=json.dumps(doc, cls=self.client.encoder), headers={'Content-Type': 'application/json'} ) resp.raise_for_status() return response_to_json_dict(resp) def shards(self): """ Retrieves information about the shards in the current remote database. :returns: Shard information retrieval status in JSON format """ url = '/'.join((self.database_url, '_shards')) resp = self.r_session.get(url) resp.raise_for_status() return response_to_json_dict(resp) def get_partitioned_search_result(self, partition_key, ddoc_id, index_name, **query_params): """ Retrieves the raw JSON content from the remote database based on the partitioned search index on the server, using the query_params provided as query parameters. See :func:`~cloudant.database.CouchDatabase.get_search_result` method for further details. :param str partition_key: Partition key. :param str ddoc_id: Design document id used to get the search result. :param str index_name: Name used in part to identify the index. :param query_params: See :func:`~cloudant.database.CloudantDatabase.get_search_result` method for available keyword arguments. :returns: Search query result data in JSON format. :rtype: dict """ ddoc = DesignDocument(self, ddoc_id) return self._get_search_result( '/'.join(( ddoc.document_partition_url(partition_key), '_search', index_name )), **query_params ) def get_search_result(self, ddoc_id, index_name, **query_params): """ Retrieves the raw JSON content from the remote database based on the search index on the server, using the query_params provided as query parameters. A ``query`` parameter containing the Lucene query syntax is mandatory. Example for search queries: .. code-block:: python # Assuming that 'searchindex001' exists as part of the # 'ddoc001' design document in the remote database... # Retrieve documents where the Lucene field name is 'name' and # the value is 'julia*' resp = db.get_search_result('ddoc001', 'searchindex001', query='name:julia*', include_docs=True) for row in resp['rows']: # Process search index data (in JSON format). Example if the search query requires grouping by using the ``group_field`` parameter: .. code-block:: python # Assuming that 'searchindex001' exists as part of the # 'ddoc001' design document in the remote database... # Retrieve JSON response content, limiting response to 10 documents resp = db.get_search_result('ddoc001', 'searchindex001', query='name:julia*', group_field='name', limit=10) for group in resp['groups']: for row in group['rows']: # Process search index data (in JSON format). :param str ddoc_id: Design document id used to get the search result. :param str index_name: Name used in part to identify the index. :param str bookmark: Optional string that enables you to specify which page of results you require. Only valid for queries that do not specify the ``group_field`` query parameter. :param list counts: Optional JSON array of field names for which counts should be produced. The response will contain counts for each unique value of this field name among the documents matching the search query. Requires the index to have faceting enabled. :param list drilldown: Optional list of fields that each define a pair of a field name and a value. This field can be used several times. The search will only match documents that have the given value in the field name. It differs from using ``query=fieldname:value`` only in that the values are not analyzed. :param str group_field: Optional string field by which to group search matches. Fields containing other data (numbers, objects, arrays) can not be used. :param int group_limit: Optional number with the maximum group count. This field can only be used if ``group_field`` query parameter is specified. :param group_sort: Optional JSON field that defines the order of the groups in a search using ``group_field``. The default sort order is relevance. This field can have the same values as the sort field, so single fields as well as arrays of fields are supported. :param int limit: Optional number to limit the maximum count of the returned documents. In case of a grouped search, this parameter limits the number of documents per group. :param query/q: A Lucene query in the form of ``name:value``. If name is omitted, the special value ``default`` is used. The ``query`` parameter can be abbreviated as ``q``. :param ranges: Optional JSON facet syntax that reuses the standard Lucene syntax to return counts of results which fit into each specified category. Inclusive range queries are denoted by brackets. Exclusive range queries are denoted by curly brackets. For example ``ranges={"price":{"cheap":"[0 TO 100]"}}`` has an inclusive range of 0 to 100. Requires the index to have faceting enabled. :param sort: Optional JSON string of the form ``fieldname`` for ascending or ``-fieldname`` for descending sort order. Fieldname is the name of a string or number field and type is either number or string or a JSON array of such strings. The type part is optional and defaults to number. :param str stale: Optional string to allow the results from a stale index to be used. This makes the request return immediately, even if the index has not been completely built yet. :param list highlight_fields: Optional list of fields which should be highlighted. :param str highlight_pre_tag: Optional string inserted before the highlighted word in the highlights output. Defaults to ````. :param str highlight_post_tag: Optional string inserted after the highlighted word in the highlights output. Defaults to ````. :param int highlight_number: Optional number of fragments returned in highlights. If the search term occurs less often than the number of fragments specified, longer fragments are returned. Default is 1. :param int highlight_size: Optional number of characters in each fragment for highlights. Defaults to 100 characters. :param list include_fields: Optional list of field names to include in search results. Any fields included must have been indexed with the ``store:true`` option. :returns: Search query result data in JSON format """ ddoc = DesignDocument(self, ddoc_id) return self._get_search_result( '/'.join((ddoc.document_url, '_search', index_name)), **query_params ) def _get_search_result(self, query_url, **query_params): """ Get search results helper. """ param_q = query_params.get('q') param_query = query_params.get('query') # Either q or query parameter is required if bool(param_q) == bool(param_query): raise CloudantArgumentError(104, query_params) # Validate query arguments and values for key, val in iteritems_(query_params): if key not in list(SEARCH_INDEX_ARGS.keys()): raise CloudantArgumentError(105, key) if not isinstance(val, SEARCH_INDEX_ARGS[key]): raise CloudantArgumentError(106, key, SEARCH_INDEX_ARGS[key]) # Execute query search headers = {'Content-Type': 'application/json'} resp = self.r_session.post( query_url, headers=headers, data=json.dumps(query_params, cls=self.client.encoder) ) resp.raise_for_status() return response_to_json_dict(resp) ================================================ FILE: src/cloudant/design_document.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2019 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ API module/class for interacting with a design document in a database. """ from ._2to3 import iteritems_, url_quote_plus, STRTYPE from ._common_util import QUERY_LANGUAGE, codify, response_to_json_dict, \ assert_document_type_id, DESIGN_PREFIX from .document import Document from .view import View, QueryIndexView from .error import CloudantArgumentError, CloudantDesignDocumentException class DesignDocument(Document): """ Encapsulates a specialized version of a :class:`~cloudant.document.Document`. A DesignDocument object is instantiated with a reference to a database and provides an API to view management, index management, list and show functions, etc. When instantiating a DesignDocument or when setting the document id (``_id``) field, the value must start with ``_design/``. If it does not, then ``_design/`` will be prepended to the provided document id value. Note: Currently only the view management and search index management API exists. Remaining design document functionality will be added later. :param database: A database instance used by the DesignDocument. Can be either a ``CouchDatabase`` or ``CloudantDatabase`` instance. :param str document_id: Optional document id. If provided and does not start with ``_design/``, it will be prepended with ``_design/``. :param bool partitioned: Optional. Create as a partitioned design document. Defaults to ``False`` for both partitioned and non-partitioned databases. """ def __init__(self, database, document_id=None, partitioned=False): if document_id: assert_document_type_id(document_id) if document_id and not document_id.startswith(DESIGN_PREFIX): document_id = '{0}{1}'.format(DESIGN_PREFIX, document_id) super(DesignDocument, self).__init__(database, document_id) if partitioned: self.setdefault('options', {'partitioned': True}) else: self.setdefault('options', {'partitioned': False}) self._nested_object_names = frozenset(['views', 'indexes', 'lists', 'shows']) for prop in self._nested_object_names: self.setdefault(prop, dict()) @property def validate_doc_update(self): """ Provides an accessor property to the update validators dictionary in the locally cached DesignDocument. Update validators evaluate whether a document should be written to disk when insertions and updates are attempted. Update validator example: .. code-block:: python # Add the update validator to ``validate_doc_update`` and save the design document ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc['validate_doc_update'] = ( 'function(newDoc, oldDoc, userCtx, secObj) { ' 'if (newDoc.address === undefined) { ' 'throw({forbidden: \'Document must have an address.\'}); }}') ddoc.save() For more details, see the `Update Validators documentation `_. :returns: Dictionary containing update validator functions """ return self.get('validate_doc_update') @property def filters(self): """ Provides an accessor property to the filters dictionary in the locally cached DesignDocument. Filter functions enable you to add tests for filtering each of the objects included in the changes feed. If any of the function tests fail, the object is filtered from the feed. If the function returns a true result when applied to a change, the change remains in the feed. Filter functions require two arguments: ``doc`` and ``req``. The ``doc`` argument represents the document being tested for filtering. The ``req`` argument contains additional information about the HTTP request. Filter function example: .. code-block:: python # Add the filter function to ``filters`` and save the design document ddoc = DesignDocument(self.db, '_design/ddoc001') # Filter and remove documents that are not of ``type`` mail ddoc['filters'] = { 'filter001': 'function(doc, req){if (doc.type != \'mail\'){return false;} ' 'return true;} ' } ddoc.save() To execute filter functions on a changes feed, see the database API :func:`~cloudant.database.CouchDatabase.changes` For more details, see the `Filter functions documentation `_. :returns: Dictionary containing filter function names and functions as key/value """ return self.get('filters') @property def updates(self): """ Provides an accessor property to the updates dictionary in the locally cached DesignDocument. Update handlers are custom functions stored on Cloudant's server that will create or update a document. To execute the update handler function, see :func:`~cloudant.database.CouchDatabase.update_handler_result`. Update handlers receive two arguments: ``doc`` and ``req``. If a document ID is provided in the request to the update handler, then ``doc`` will be the document corresponding with that ID. If no ID was provided, ``doc`` will be null. Update handler example: .. code-block:: python # Add the update handler to ``updates`` and save the design document ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc001['updates'] = { 'update001': 'function(doc, req) { if (!doc) ' '{ if ('id' in req && req.id){ return [{_id: req.id}, ' '\"New World\"] } return [null, \"Empty World\"] } ' 'doc.world = \'hello\'; ' 'return [doc, \"Added world.hello!\"]} ' } ddoc.save() Note: Update handler functions must return an array of two elements, the first being the document to save (or null, if you don't want to save anything), and the second being the response body. :returns: Dictionary containing update handler names and objects as key/value """ return self.get('updates') @property def st_indexes(self): """ Provides an accessor property to the Cloudant Geospatial (a.k.a. Cloudant Geo) indexes dictionary in the locally cached DesignDocument. Each Cloudant Geo index is a JSON object within the ``st_indexes`` containing an index name and a javascript function. Note: To make it easier to work with Cloudant Geo documents, it is best practice to create a separate design document specifically for Cloudant Geo indexes. Geospatial index example: .. code-block:: python # Add the Cloudant Geo index to ``st_indexes`` and save the design document ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc['st_indexes'] = { 'geoidx': { 'index': 'function(doc) { ' 'if (doc.geometry && doc.geometry.coordinates) { ' 'st_index(doc.geometry);}} ' } } ddoc.save() Once the Cloudant Geo index is saved to the remote database, you can query the index with a GET request. To issue a request against the ``_geo`` endpoint, see the steps outlined in the `endpoint access `_ section. For more details, see the `Cloudant Geospatial documentation `_. :return: Dictionary containing Cloudant Geo names and index objects as key/value """ return self.get('st_indexes') @property def lists(self): """ Provides an accessor property to the lists dictionary in the locally cached DesignDocument. :returns: Dictionary containing list names and objects as key/value """ return self.get('lists') @property def shows(self): """ Provides an accessor property to the shows dictionary in the locally cached DesignDocument. :returns: Dictionary containing show names and functions as key/value """ return self.get('shows') @property def rewrites(self): """ Provides an accessor property to a list of dictionaries with rewrite rules in the locally cached DesignDocument. Each rule for URL rewriting is a JSON object with four fields: ``from``, ``to``, ``method``, and ``query``. Note: Requests that match the rewrite rules must have a URL path that starts with ``/$DATABASE/_design/doc/_rewrite``. Rewrite rule example: .. code-block:: python # Add the rule to ``rewrites`` and save the design document ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc['rewrites'] = [ {"from": "/old/topic", "to": "/new/", "method": "GET", "query": {} } ] ddoc.save() Once the rewrite rule is saved to the remote database, the GET request URL ``/$DATABASE/_design/doc/_rewrite/old/topic?k=v`` would be rewritten as ``/$DATABASE/_design/doc/_rewrite/new?k=v``. For more details on URL rewriting, see the `rewrite rules documentation `_. :returns: List of dictionaries containing rewrite rules as key/value """ return self.get('rewrites') @property def views(self): """ Provides an accessor property to the View dictionary in the locally cached DesignDocument. :returns: Dictionary containing view names and View objects as key/value """ return self.get('views') @property def indexes(self): """ Provides an accessor property to the indexes dictionary in the locally cached DesignDocument. :returns: Dictionary containing index names and index objects as key/value """ return self.get('indexes') def document_partition_url(self, partition_key): """ Retrieve the design document partition URL. :param str partition_key: Partition key. :return: Design document partition URL. :rtype: str """ return '/'.join(( self._database.database_partition_url(partition_key), '_design', url_quote_plus(self['_id'][8:], safe='') )) def add_view(self, view_name, map_func, reduce_func=None, **kwargs): """ Appends a MapReduce view to the locally cached DesignDocument View dictionary. To create a JSON query index use :func:`~cloudant.database.CloudantDatabase.create_query_index` instead. A CloudantException is raised if an attempt to add a QueryIndexView (JSON query index) using this method is made. :param str view_name: Name used to identify the View. :param str map_func: Javascript map function. :param str reduce_func: Optional Javascript reduce function. """ if self.get_view(view_name) is not None: raise CloudantArgumentError(107, view_name) if self.get('language', None) == QUERY_LANGUAGE: raise CloudantDesignDocumentException(101) view = View(self, view_name, map_func, reduce_func, **kwargs) self.views.__setitem__(view_name, view) def add_search_index(self, index_name, search_func, analyzer=None): """ Appends a Cloudant search index to the locally cached DesignDocument indexes dictionary. :param str index_name: Name used to identify the search index. :param str search_func: Javascript search index function. :param analyzer: Optional analyzer for this search index. """ if self.get_index(index_name) is not None: raise CloudantArgumentError(108, index_name) if analyzer is not None: search = {'index': codify(search_func), 'analyzer': analyzer} else: search = {'index': codify(search_func)} self.indexes.__setitem__(index_name, search) def add_list_function(self, list_name, list_func): """ Appends a list function to the locally cached DesignDocument indexes dictionary. :param str list_name: Name used to identify the list function. :param str list_func: Javascript list function. """ if self.get_list_function(list_name) is not None: raise CloudantArgumentError(109, list_name) self.lists.__setitem__(list_name, codify(list_func)) def add_show_function(self, show_name, show_func): """ Appends a show function to the locally cached DesignDocument shows dictionary. :param show_name: Name used to identify the show function. :param show_func: Javascript show function. """ if self.get_show_function(show_name) is not None: raise CloudantArgumentError(110, show_name) self.shows.__setitem__(show_name, show_func) def update_view(self, view_name, map_func, reduce_func=None, **kwargs): """ Modifies/overwrites an existing MapReduce view definition in the locally cached DesignDocument View dictionary. To update a JSON query index use :func:`~cloudant.database.CloudantDatabase.delete_query_index` followed by :func:`~cloudant.database.CloudantDatabase.create_query_index` instead. A CloudantException is raised if an attempt to update a QueryIndexView (JSON query index) using this method is made. :param str view_name: Name used to identify the View. :param str map_func: Javascript map function. :param str reduce_func: Optional Javascript reduce function. """ view = self.get_view(view_name) if view is None: raise CloudantArgumentError(111, view_name) if isinstance(view, QueryIndexView): raise CloudantDesignDocumentException(102) view = View(self, view_name, map_func, reduce_func, **kwargs) self.views.__setitem__(view_name, view) def update_search_index(self, index_name, search_func, analyzer=None): """ Modifies/overwrites an existing Cloudant search index in the locally cached DesignDocument indexes dictionary. :param str index_name: Name used to identify the search index. :param str search_func: Javascript search index function. :param analyzer: Optional analyzer for this search index. """ search = self.get_index(index_name) if search is None: raise CloudantArgumentError(112, index_name) if analyzer is not None: search = {'index': codify(search_func), 'analyzer': analyzer} else: search = {'index': codify(search_func)} self.indexes.__setitem__(index_name, search) def update_list_function(self, list_name, list_func): """ Modifies/overwrites an existing list function in the locally cached DesignDocument indexes dictionary. :param str list_name: Name used to identify the list function. :param str list_func: Javascript list function. """ if self.get_list_function(list_name) is None: raise CloudantArgumentError(113, list_name) self.lists.__setitem__(list_name, codify(list_func)) def update_show_function(self, show_name, show_func): """ Modifies/overwrites an existing show function in the locally cached DesignDocument shows dictionary. :param show_name: Name used to identify the show function. :param show_func: Javascript show function. """ if self.get_show_function(show_name) is None: raise CloudantArgumentError(114, show_name) self.shows.__setitem__(show_name, show_func) def delete_view(self, view_name): """ Removes an existing MapReduce view definition from the locally cached DesignDocument View dictionary. To delete a JSON query index use :func:`~cloudant.database.CloudantDatabase.delete_query_index` instead. A CloudantException is raised if an attempt to delete a QueryIndexView (JSON query index) using this method is made. :param str view_name: Name used to identify the View. """ view = self.get_view(view_name) if view is None: return if isinstance(view, QueryIndexView): raise CloudantDesignDocumentException(103) self.views.__delitem__(view_name) def delete_index(self, index_name): """ Removes an existing index in the locally cached DesignDocument indexes dictionary. :param str index_name: Name used to identify the index. """ index = self.get_index(index_name) if index is None: return self.indexes.__delitem__(index_name) def delete_list_function(self, list_name): """ Removes an existing list function in the locally cached DesignDocument lists dictionary. :param str list_name: Name used to identify the list. """ self.lists.__delitem__(list_name) def delete_show_function(self, show_name): """ Removes an existing show function in the locally cached DesignDocument shows dictionary. :param show_name: Name used to identify the list. """ if self.get_show_function(show_name) is None: return self.shows.__delitem__(show_name) def fetch(self): """ Retrieves the remote design document content and populates the locally cached DesignDocument dictionary. View content is stored either as View or QueryIndexView objects which are extensions of the ``dict`` type. All other design document data are stored directly as ``dict`` types. """ super(DesignDocument, self).fetch() if self.views: for view_name, view_def in iteritems_(self.get('views', dict())): if self.get('language', None) != QUERY_LANGUAGE: self['views'][view_name] = View( self, view_name, view_def.pop('map', None), view_def.pop('reduce', None), **view_def ) else: self['views'][view_name] = QueryIndexView( self, view_name, view_def.pop('map', None), view_def.pop('reduce', None), **view_def ) for prop in self._nested_object_names: # Ensure dict for each sub-object exists in locally cached DesignDocument. getattr(self, prop, self.setdefault(prop, dict())) # pylint: disable=too-many-branches def save(self): """ Saves changes made to the locally cached DesignDocument object's data structures to the remote database. If the design document does not exist remotely then it is created in the remote database. If the object does exist remotely then the design document is updated remotely. In either case the locally cached DesignDocument object is also updated accordingly based on the successful response of the operation. """ if self.views: if self.get('language', None) != QUERY_LANGUAGE: for view_name, view in self.iterviews(): if isinstance(view, QueryIndexView): raise CloudantDesignDocumentException(104, view_name) else: for view_name, view in self.iterviews(): if not isinstance(view, QueryIndexView): raise CloudantDesignDocumentException(105, view_name) if self.indexes: if self.get('language', None) != QUERY_LANGUAGE: for index_name, search in self.iterindexes(): # Check the instance of the javascript search function if not isinstance(search['index'], STRTYPE): raise CloudantDesignDocumentException(106, index_name) else: for index_name, index in self.iterindexes(): if not isinstance(index['index'], dict): raise CloudantDesignDocumentException(107, index_name) for prop in self._nested_object_names: if not getattr(self, prop): # Ensure empty dict for each sub-object is not saved remotely. self.__delitem__(prop) super(DesignDocument, self).save() for prop in self._nested_object_names: # Ensure views, indexes, and lists dict exist in locally cached DesignDocument. getattr(self, prop, self.setdefault(prop, dict())) def __setitem__(self, key, value): """ Ensures that when setting the document id for a DesignDocument it is always prefaced with '_design'. """ if ( key == '_id' and value is not None and not value.startswith('_design/') ): value = '_design/{0}'.format(value) super(DesignDocument, self).__setitem__(key, value) def iterviews(self): """ Provides a way to iterate over the locally cached DesignDocument View dictionary. For example: .. code-block:: python for view_name, view in ddoc.iterviews(): # Perform view processing :returns: Iterable containing view name and associated View object """ for view_name, view in iteritems_(self.views): yield view_name, view def iterindexes(self): """ Provides a way to iterate over the locally cached DesignDocument indexes dictionary. For example: .. code-block:: python for index_name, search_func in ddoc.iterindexes(): # Perform search index processing :returns: Iterable containing index name and associated index object """ for index_name, search_func in iteritems_(self.indexes): yield index_name, search_func def iterlists(self): """ Provides a way to iterate over the locally cached DesignDocument lists dictionary. :returns: Iterable containing list function name and associated list function """ for list_name, list_func in iteritems_(self.lists): yield list_name, list_func def itershows(self): """ Provides a way to iterate over the locally cached DesignDocument shows dictionary. :returns: Iterable containing show function name and associated show function """ for show_name, show_func in iteritems_(self.shows): yield show_name, show_func def list_views(self): """ Retrieves a list of available View objects in the locally cached DesignDocument. :returns: List of view names """ return list(self.views.keys()) def list_indexes(self): """ Retrieves a list of available indexes in the locally cached DesignDocument. :returns: List of index names """ return list(self.indexes.keys()) def list_list_functions(self): """ Retrieves a list of available list functions in the locally cached DesignDocument lists dictionary. :returns: List of list function names """ return list(self.lists.keys()) def list_show_functions(self): """ Retrieves a list of available show functions in the locally cached DesignDocument shows dictionary. :returns: List of show function names """ return list(self.shows.keys()) def get_view(self, view_name): """ Retrieves a specific View from the locally cached DesignDocument by name. :param str view_name: Name used to identify the View. :returns: View object for the specified view_name """ return self.views.get(view_name) def get_index(self, index_name): """ Retrieves a specific index from the locally cached DesignDocument indexes dictionary by name. :param str index_name: Name used to identify the index. :returns: Index dictionary for the specified index name """ return self.indexes.get(index_name) def get_list_function(self, list_name): """ Retrieves a specific list function from the locally cached DesignDocument lists dictionary by name. :param str list_name: Name used to identify the list function. :returns: String form of the specified list function """ return self.lists.get(list_name) def get_show_function(self, show_name): """ Retrieves a specific show function from the locally cached DesignDocument shows dictionary by name. :param str show_name: Name used to identify the show function. :returns: String form of the specified show function """ return self.shows.get(show_name) def info(self): """ Retrieves the design document view information data, returns dictionary GET databasename/_design/{ddoc}/_info """ ddoc_info = self.r_session.get( '/'.join([self.document_url, '_info'])) ddoc_info.raise_for_status() return response_to_json_dict(ddoc_info) def search_info(self, search_index): """ Retrieves information about a specified search index within the design document, returns dictionary GET databasename/_design/{ddoc}/_search_info/{search_index} """ ddoc_search_info = self.r_session.get( '/'.join([self.document_url, '_search_info', search_index])) ddoc_search_info.raise_for_status() return response_to_json_dict(ddoc_search_info) def search_disk_size(self, search_index): """ Retrieves disk size information about a specified search index within the design document, returns dictionary GET databasename/_design/{ddoc}/_search_disk_size/{search_index} """ ddoc_search_disk_size = self.r_session.get( '/'.join([self.document_url, '_search_disk_size', search_index])) ddoc_search_disk_size.raise_for_status() return response_to_json_dict(ddoc_search_disk_size) ================================================ FILE: src/cloudant/document.py ================================================ #!/usr/bin/env python # Copyright © 2015, 2021 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ API module/class for interacting with a document in a database. """ import json import requests from requests.exceptions import HTTPError from ._2to3 import url_quote, url_quote_plus from ._common_util import response_to_json_dict, assert_document_type_id, assert_attachment_name from .error import CloudantDocumentException class Document(dict): """ Encapsulates a JSON document. A Document object is instantiated with a reference to a database and used to manipulate document content in a CouchDB or Cloudant database instance. In addition to basic CRUD style operations, a Document object also provides a convenient context manager. This context manager removes having to explicitly :func:`~cloudant.document.Document.fetch` the document from the remote database before commencing work on it as well as explicitly having to :func:`~cloudant.document.Document.save` the document once work is complete. For example: .. code-block:: python # Upon entry into the document context, fetches the document from the # remote database, if it exists. Upon exit from the context, saves the # document to the remote database with changes made within the context. with Document(database, 'julia006') as document: # The document is fetched from the remote database # Changes are made locally document['name'] = 'Julia' document['age'] = 6 # The document is saved to the remote database :param database: A database instance used by the Document. Can be either a ``CouchDatabase`` or ``CloudantDatabase`` instance. :param str document_id: Optional document id used to identify the document. :param str encoder: Optional JSON encoder object (extending json.JSONEncoder). :param str decoder: Optional JSON decoder object (extending json.JSONDecoder). """ def __init__(self, database, document_id=None, **kwargs): super(Document, self).__init__() self._client = database.client self._database = database self._database_host = self._client.server_url self._database_name = database.database_name if document_id: self['_id'] = document_id self.encoder = kwargs.get('encoder') or self._client.encoder self.decoder = kwargs.get('decoder') or json.JSONDecoder @property def r_session(self): """ Returns the database instance ``r_session`` used by the document. :returns: Client ``r_session`` """ return self._client.r_session @property def document_url(self): """ Constructs and returns the document URL. :returns: Document URL """ if '_id' not in self or self['_id'] is None: return None # handle design document url if self['_id'].startswith('_design/'): return '/'.join(( self._database_host, url_quote_plus(self._database_name), '_design', url_quote(self['_id'][8:], safe='') )) # handle _local document url if self['_id'].startswith('_local/'): return '/'.join(( self._database_host, url_quote_plus(self._database_name), '_local', url_quote(self['_id'][7:], safe='') )) # handle document url return '/'.join(( self._database_host, url_quote_plus(self._database_name), url_quote(self['_id'], safe='') )) def exists(self): """ Retrieves whether the document exists in the remote database or not. :returns: True if the document exists in the remote database, otherwise False """ if '_id' not in self or self['_id'] is None: return False assert_document_type_id(self['_id']) resp = self.r_session.head(self.document_url) if resp.status_code not in [200, 404]: resp.raise_for_status() return resp.status_code == 200 def json(self): """ Retrieves the JSON string representation of the current locally cached document object, encoded by the encoder specified in the associated client object. :returns: Encoded JSON string containing the document data """ return json.dumps(dict(self), cls=self.encoder) def create(self): """ Creates the current document in the remote database and if successful, updates the locally cached Document object with the ``_id`` and ``_rev`` returned as part of the successful response. """ # Ensure that an existing document will not be "updated" doc = dict(self) if doc.get('_rev') is not None: doc.__delitem__('_rev') headers = {'Content-Type': 'application/json'} resp = self.r_session.post( self._database.database_url, headers=headers, data=json.dumps(doc, cls=self.encoder) ) resp.raise_for_status() data = response_to_json_dict(resp) super(Document, self).__setitem__('_id', data['id']) super(Document, self).__setitem__('_rev', data['rev']) def fetch(self): """ Retrieves the content of the current document from the remote database and populates the locally cached Document object with that content. A call to fetch will overwrite any dictionary content currently in the locally cached Document object. """ if self.document_url is None: raise CloudantDocumentException(101) if '_id' in self: assert_document_type_id(self['_id']) resp = self.r_session.get(self.document_url) resp.raise_for_status() self.clear() self.update(response_to_json_dict(resp, cls=self.decoder)) def save(self): """ Saves changes made to the locally cached Document object's data structures to the remote database. If the document does not exist remotely then it is created in the remote database. If the object does exist remotely then the document is updated remotely. In either case the locally cached Document object is also updated accordingly based on the successful response of the operation. """ headers = {} headers.setdefault('Content-Type', 'application/json') if not self.exists(): self.create() return put_resp = self.r_session.put( self.document_url, data=self.json(), headers=headers ) put_resp.raise_for_status() data = response_to_json_dict(put_resp) super(Document, self).__setitem__('_rev', data['rev']) return # Update Actions # These are handy functions to use with update_field below. @staticmethod def list_field_append(doc, field, value): """ Appends a value to a list field in a locally cached Document object. If a field does not exist it will be created first. :param Document doc: Locally cached Document object that can be a Document, DesignDocument or dict. :param str field: Name of the field list to append to. :param value: Value to append to the field list. """ if doc.get(field) is None: doc[field] = [] if not isinstance(doc[field], list): raise CloudantDocumentException(102, field) if value is not None: doc[field].append(value) @staticmethod def list_field_remove(doc, field, value): """ Removes a value from a list field in a locally cached Document object. :param Document doc: Locally cached Document object that can be a Document, DesignDocument or dict. :param str field: Name of the field list to remove from. :param value: Value to remove from the field list. """ if not isinstance(doc[field], list): raise CloudantDocumentException(102, field) doc[field].remove(value) @staticmethod def field_set(doc, field, value): """ Sets or replaces a value for a field in a locally cached Document object. To remove the field set the ``value`` to None. :param Document doc: Locally cached Document object that can be a Document, DesignDocument or dict. :param str field: Name of the field to set. :param value: Value to set the field to. """ if value is None: doc.__delitem__(field) else: doc[field] = value def _update_field(self, action, field, value, max_tries, tries=0): """ Private update_field method. Wrapped by Document.update_field. Tracks a "tries" var to help limit recursion. """ # Refresh our view of the document. self.fetch() # Update the field. action(self, field, value) # Attempt to save, retrying conflicts up to max_tries. try: self.save() except requests.HTTPError as ex: if tries < max_tries and ex.response.status_code == 409: self._update_field( action, field, value, max_tries, tries=tries+1) else: raise def update_field(self, action, field, value, max_tries=10): """ Updates a field in the remote document. If a conflict exists, the document is re-fetched from the remote database and the update is retried. This is performed up to ``max_tries`` number of times. Use this method when you want to update a single field in a document, and don't want to risk clobbering other people's changes to the document in other fields, but also don't want the caller to implement logic to deal with conflicts. For example: .. code-block:: python # Append the string 'foo' to the 'words' list of Document doc. doc.update_field( action=doc.list_field_append, field='words', value='foo' ) :param callable action: A routine that takes a Document object, a field name, and a value. The routine should attempt to update a field in the locally cached Document object with the given value, using whatever logic is appropriate. Valid actions are :func:`~cloudant.document.Document.list_field_append`, :func:`~cloudant.document.Document.list_field_remove`, :func:`~cloudant.document.Document.field_set` :param str field: Name of the field to update :param value: Value to update the field with :param int max_tries: In the case of a conflict, the number of retries to attempt """ self._update_field(action, field, value, max_tries) def delete(self): """ Removes the document from the remote database and clears the content of the locally cached Document object with the exception of the ``_id`` field. In order to successfully remove a document from the remote database, a ``_rev`` value must exist in the locally cached Document object. """ if not self.get("_rev"): raise CloudantDocumentException(103) assert_document_type_id(self['_id']) del_resp = self.r_session.delete( self.document_url, params={"rev": self["_rev"]}, ) del_resp.raise_for_status() _id = self['_id'] self.clear() self['_id'] = _id def __enter__(self): """ Supports context like editing of document fields. Handles context entry logic. Executes a Document.fetch() upon entry. """ # We don't want to raise an exception if the document is not found # because upon __exit__ the save() call will create the document # if necessary. try: self.fetch() except HTTPError as error: if error.response.status_code != 404: raise except CloudantDocumentException as error: if error.status_code != 101: raise return self def __exit__(self, exc_type, exc_value, traceback): """ Support context like editing of document fields. Handles context exit logic. Executes a `Document.save()` upon exit if no exception occurred. """ if exc_type is None: self.save() def get_attachment( self, attachment, headers=None, write_to=None, attachment_type=None): """ Retrieves a document's attachment and optionally writes it to a file. If the content_type of the attachment is 'application/json' then the data returned will be in JSON format otherwise the response content will be returned as text or binary. :param str attachment: Attachment file name used to identify the attachment. :param dict headers: Optional, additional headers to be sent with request. :param file write_to: Optional file handler to write the attachment to. The write_to file must be opened for writing prior to including it as an argument for this method. :param str attachment_type: Optional setting to define how to handle the attachment when returning its contents from this method. Valid values are ``'text'``, ``'json'``, and ``'binary'`` If omitted then the returned content will be based on the response Content-Type. :returns: The attachment content """ # need latest rev self.fetch() assert_attachment_name(attachment) attachment_url = '/'.join((self.document_url, url_quote(attachment, safe=''))) if headers is None: headers = {'If-Match': self['_rev']} else: headers['If-Match'] = self['_rev'] resp = self.r_session.get(attachment_url, headers=headers) resp.raise_for_status() if attachment_type is None: if resp.headers['Content-Type'].startswith('text/'): attachment_type = 'text' elif resp.headers['Content-Type'] == 'application/json': attachment_type = 'json' else: attachment_type = 'binary' if write_to is not None: if attachment_type in ('text', 'json'): write_to.write(resp.text) else: write_to.write(resp.content) if attachment_type == 'text': return resp.text if attachment_type == 'json': return response_to_json_dict(resp) return resp.content def delete_attachment(self, attachment, headers=None): """ Removes an attachment from a remote document and refreshes the locally cached document object. :param str attachment: Attachment file name used to identify the attachment. :param dict headers: Optional, additional headers to be sent with request. :returns: Attachment deletion status in JSON format """ # need latest rev self.fetch() assert_attachment_name(attachment) attachment_url = '/'.join((self.document_url, attachment)) if headers is None: headers = {'If-Match': self['_rev']} else: headers['If-Match'] = self['_rev'] resp = self.r_session.delete( attachment_url, headers=headers ) resp.raise_for_status() super(Document, self).__setitem__('_rev', response_to_json_dict(resp)['rev']) # Execute logic only if attachment metadata exists locally if self.get('_attachments'): # Remove the attachment metadata for the specified attachment if self['_attachments'].get(attachment): self['_attachments'].__delitem__(attachment) # Remove empty attachment metadata from the local dictionary if not self['_attachments']: super(Document, self).__delitem__('_attachments') return response_to_json_dict(resp) def put_attachment(self, attachment, content_type, data, headers=None): """ Adds a new attachment, or updates an existing attachment, to the remote document and refreshes the locally cached Document object accordingly. :param attachment: Attachment file name used to identify the attachment. :param content_type: The http ``Content-Type`` of the attachment used as an additional header. :param data: Attachment data defining the attachment content. :param headers: Optional, additional headers to be sent with request. :returns: Attachment addition/update status in JSON format """ # need latest rev self.fetch() assert_attachment_name(attachment) attachment_url = '/'.join((self.document_url, attachment)) if headers is None: headers = { 'If-Match': self['_rev'], 'Content-Type': content_type } else: headers['If-Match'] = self['_rev'] headers['Content-Type'] = content_type resp = self.r_session.put( attachment_url, data=data, headers=headers ) resp.raise_for_status() self.fetch() return response_to_json_dict(resp) ================================================ FILE: src/cloudant/error.py ================================================ #!/usr/bin/env python # Copyright (c) 2015, 2016 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Module that contains common exception classes for the Cloudant Python client library. """ from cloudant._messages import ( ARGUMENT_ERROR, CLIENT, DATABASE, DESIGN_DOCUMENT, DOCUMENT, FEED, INDEX, REPLICATOR, RESULT, VIEW) class CloudantException(Exception): """ Provides a way to issue Cloudant Python client library specific exceptions. A CloudantException object is instantiated with a message and optional code. Note: The intended use for this class is internal to the Cloudant Python client library. :param str msg: A message that describes the exception. :param int code: A code value used to identify the exception. """ def __init__(self, msg, code=None): super(CloudantException, self).__init__(msg) self.status_code = code class CloudantArgumentError(CloudantException): """ Provides a way to issue Cloudant Python client library specific exceptions that pertain to invalid argument errors. Note: The intended use for this class is internal to the Cloudant Python client library. :param int code: An optional code value used to identify the exception. Defaults to 100. :param args: A list of arguments used to format the exception message. """ def __init__(self, code=100, *args): try: msg = ARGUMENT_ERROR[code].format(*args) except (KeyError, IndexError): code = 100 msg = ARGUMENT_ERROR[code] super(CloudantArgumentError, self).__init__(msg, code) class ResultException(CloudantException): """ Provides a way to issue Cloudant Python client library result specific exceptions. :param int code: A code value used to identify the result exception. Defaults to 100. :param args: A list of arguments used to format the exception message. """ def __init__(self, code=100, *args): try: msg = RESULT[code].format(*args) except (KeyError, IndexError): code = 100 msg = RESULT[code] super(ResultException, self).__init__(msg, code) class CloudantClientException(CloudantException): """ Provides a way to issue Cloudant library client specific exceptions. :param int code: A code value used to identify the client exception. :param args: A list of arguments used to format the exception message. """ def __init__(self, code=100, *args): try: msg = CLIENT[code].format(*args) except (KeyError, IndexError): code = 100 msg = CLIENT[code] super(CloudantClientException, self).__init__(msg, code) class CloudantDatabaseException(CloudantException): """ Provides a way to issue Cloudant library database specific exceptions. :param int code: A code value used to identify the database exception. :param args: A list of arguments used to format the exception message. """ def __init__(self, code=100, *args): try: if code in DATABASE: msg = DATABASE[code].format(*args) elif isinstance(code, int): msg = ' '.join(args) else: code = 100 msg = DATABASE[code] except (KeyError, IndexError): code = 100 msg = DATABASE[code] super(CloudantDatabaseException, self).__init__(msg, code) class CloudantDesignDocumentException(CloudantException): """ Provides a way to issue Cloudant library design document exceptions. :param int code: A code value used to identify the design doc exception. :param args: A list of arguments used to format the exception message. """ def __init__(self, code=100, *args): try: msg = DESIGN_DOCUMENT[code].format(*args) except (KeyError, IndexError): code = 100 msg = DESIGN_DOCUMENT[code] super(CloudantDesignDocumentException, self).__init__(msg, code) class CloudantDocumentException(CloudantException): """ Provides a way to issue Cloudant library document specific exceptions. :param int code: A code value used to identify the document exception. :param args: A list of arguments used to format the exception message. """ def __init__(self, code=100, *args): try: msg = DOCUMENT[code].format(*args) except (KeyError, IndexError): code = 100 msg = DOCUMENT[code] super(CloudantDocumentException, self).__init__(msg, code) class CloudantFeedException(CloudantException): """ Provides a way to issue Cloudant library feed specific exceptions. :param int code: A code value used to identify the feed exception. :param args: A list of arguments used to format the exception message. """ def __init__(self, code=100, *args): try: msg = FEED[code].format(*args) except (KeyError, IndexError): code = 100 msg = FEED[code] super(CloudantFeedException, self).__init__(msg, code) class CloudantIndexException(CloudantException): """ Provides a way to issue Cloudant library index specific exceptions. :param int code: A code value used to identify the index exception. :param args: A list of arguments used to format the exception message. """ def __init__(self, code=100, *args): try: msg = INDEX[code].format(*args) except (KeyError, IndexError): code = 100 msg = INDEX[code] super(CloudantIndexException, self).__init__(msg, code) class CloudantReplicatorException(CloudantException): """ Provides a way to issue Cloudant library replicator specific exceptions. :param int code: A code value used to identify the replicator exception. :param args: A list of arguments used to format the exception message. """ def __init__(self, code=100, *args): try: msg = REPLICATOR[code].format(*args) except (KeyError, IndexError): code = 100 msg = REPLICATOR[code] super(CloudantReplicatorException, self).__init__(msg, code) class CloudantViewException(CloudantException): """ Provides a way to issue Cloudant library view specific exceptions. :param int code: A code value used to identify the view exception. :param args: A list of arguments used to format the exception message. """ def __init__(self, code=100, *args): try: msg = VIEW[code].format(*args) except (KeyError, IndexError): code = 100 msg = VIEW[code] super(CloudantViewException, self).__init__(msg, code) ================================================ FILE: src/cloudant/feed.py ================================================ #!/usr/bin/env python # Copyright (c) 2015, 2018 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Module containing the Feed class which provides iterator support for consuming continuous and non-continuous feeds like ``_changes`` and ``_db_updates``. """ import json from ._2to3 import iteritems_, next_, unicode_, STRTYPE, NONETYPE from .error import CloudantArgumentError, CloudantFeedException from ._common_util import ANY_ARG, ANY_TYPE, feed_arg_types, TYPE_CONVERTERS class Feed(object): """ Provides an iterator for consuming client and database feeds such as ``_db_updates`` and ``_changes``. A Feed object is constructed with a :mod:`~cloudant.client` or a :mod:`~cloudant.database` which it uses to issue HTTP requests to the appropriate feed endpoint. Instead of using this class directly, it is recommended to use the client APIs :func:`~cloudant.client.CouchDB.db_updates`, :func:`~cloudant.client.Cloudant.db_updates`, or the database API :func:`~cloudant.database.CouchDatabase.changes`. Reference those methods for a list of valid feed options. :param source: Either a :mod:`~cloudant.client` object or a :mod:`~cloudant.database` object. :param bool raw_data: If set to True then the raw response data will be streamed otherwise if set to False then JSON formatted data will be streamed. Default is False. """ def __init__(self, source, raw_data=False, **options): self._r_session = source.r_session self._raw_data = raw_data self._options = options self._source = source.__class__.__name__ if self._source == 'CouchDB': self._url = '/'.join([source.server_url, '_db_updates']) # Set CouchDB _db_updates option defaults as they differ from # the _changes and Cloudant _db_updates option defaults self._options['feed'] = self._options.get('feed', 'longpoll') self._options['heartbeat'] = self._options.get('heartbeat', True) elif self._source == 'Cloudant': self._url = '/'.join([source.server_url, '_db_updates']) else: self._url = '/'.join([source.database_url, '_changes']) self._chunk_size = self._options.pop('chunk_size', 512) self._resp = None self._lines = None self._last_seq = None self._stop = False @property def last_seq(self): """ Returns the last sequence identifier for the feed. Only available after the feed has iterated through to completion. :returns: A string representing the last sequence number of a feed. """ return self._last_seq def stop(self): """ Stops a feed iteration. """ self._stop = True def _start(self): """ Starts streaming the feed using the provided session and feed options. """ params = self._translate(self._options) self._resp = self._r_session.get(self._url, params=params, stream=True) self._resp.raise_for_status() self._lines = self._resp.iter_lines(self._chunk_size) def _translate(self, options): """ Perform translation of feed options passed in as keyword arguments to CouchDB/Cloudant equivalent. """ translation = dict() for key, val in iteritems_(options): self._validate(key, val, feed_arg_types(self._source)) try: if isinstance(val, STRTYPE): translation[key] = val elif not isinstance(val, NONETYPE): arg_converter = TYPE_CONVERTERS.get(type(val), json.dumps) translation[key] = arg_converter(val) except Exception as ex: raise CloudantArgumentError(115, key, ex) return translation def _validate(self, key, val, arg_types): """ Ensures that the key and the value are valid arguments to be used with the feed. """ if key in arg_types: arg_type = arg_types[key] else: if ANY_ARG not in arg_types: raise CloudantArgumentError(116, key) arg_type = arg_types[ANY_ARG] if arg_type == ANY_TYPE: return if (not isinstance(val, arg_type) or (isinstance(val, bool) and int in arg_type)): raise CloudantArgumentError(117, key, arg_type) if isinstance(val, int) and val < 0 and not isinstance(val, bool): raise CloudantArgumentError(118, key, val) if key == 'feed': valid_vals = ('continuous', 'normal', 'longpoll') if self._source == 'CouchDB': valid_vals = ('continuous', 'longpoll') if val not in valid_vals: raise CloudantArgumentError(119, val, valid_vals) if key == 'style' and val not in ('main_only', 'all_docs'): raise CloudantArgumentError(120, val) def __iter__(self): """ Makes this object an iterator. """ return self def __next__(self): """ Provides Python3 compatibility. """ return self.next() # pylint: disable=not-callable def next(self): """ Handles the iteration by pulling the next line out of the stream, attempting to convert the response to JSON if necessary. :returns: Data representing what was seen in the feed """ while True: if not self._resp: self._start() if self._stop: raise StopIteration skip, data = self._process_data(next_(self._lines)) if not skip: break return data def _process_data(self, line): """ Validates and processes the line passed in and converts it to a Python object if necessary. """ skip = False if self._raw_data: return skip, line line = unicode_(line) if not line: if (self._options.get('heartbeat', False) and self._options.get('feed') in ('continuous', 'longpoll') and not self._last_seq): line = None else: skip = True elif line in ('{"results":[', '],'): skip = True elif line[-1] == ',': line = line[:-1] elif line[:10] == ('"last_seq"'): line = '{' + line try: if line: data = json.loads(line) if data.get('last_seq'): self._last_seq = data['last_seq'] skip = True else: data = None except ValueError: data = {"error": "Bad JSON line", "line": line} return skip, data class InfiniteFeed(Feed): """ Provides an infinite iterator for consuming client and database feeds such as ``_db_updates`` and ``_changes``. An InfiniteFeed object is constructed with a :class:`~cloudant.client.Cloudant` object or a :mod:`~cloudant.database` object which it uses to issue HTTP requests to the appropriate feed endpoint. An infinite feed is NOT supported for use with a :class:`~cloudant.client.CouchDB` object and unlike a :class:`~cloudant.feed.Feed` which can be a ``normal``, ``longpoll``, or ``continuous`` feed, an InfiniteFeed can only be ``continuous`` and the iterator will only stream formatted JSON objects. Instead of using this class directly, it is recommended to use the client API :func:`~cloudant.client.Cloudant.infinite_db_updates` or the database API :func:`~cloudant.database.CouchDatabase._infinite_changes`. Reference those methods for a valid list of feed options. Note: The infinite iterator is not exception resilient so if an unexpected exception occurs, the iterator will terminate. Any unexpected exceptions should be handled in code outside of this library. If you wish to restart the infinite iterator from where it left off that can be done by constructing a new InfiniteFeed object with the ``since`` option set to the sequence number of the last row of data prior to termination. :param source: Either a :class:`~cloudant.client.Cloudant` object or a :mod:`~cloudant.database` object. """ def __init__(self, source, **options): super(InfiniteFeed, self).__init__(source, False, **options) # Default feed to continuous if not explicitly set self._options['feed'] = self._options.get('feed', 'continuous') def _validate(self, key, val, arg_types): """ Ensures that the key and the value are valid arguments to be used with the feed. """ if key == 'feed' and val != 'continuous': raise CloudantArgumentError(121, val) super(InfiniteFeed, self)._validate(key, val, arg_types) def next(self): """ Handles the iteration by pulling the next line out of the stream and converting the response to JSON. :returns: Data representing what was seen in the feed """ while True: if self._source == 'CouchDB': raise CloudantFeedException(101) if self._last_seq: self._options.update({'since': self._last_seq}) self._resp = None self._last_seq = None if not self._resp: self._start() if self._stop: raise StopIteration skip, data = self._process_data(next_(self._lines)) if not skip: break return data ================================================ FILE: src/cloudant/index.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2019 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ API module for managing/viewing query indexes. """ import json from ._2to3 import STRTYPE, iteritems_ from ._common_util import JSON_INDEX_TYPE from ._common_util import TEXT_INDEX_TYPE from ._common_util import SPECIAL_INDEX_TYPE from ._common_util import TEXT_INDEX_ARGS from ._common_util import response_to_json_dict from .error import CloudantArgumentError, CloudantIndexException class Index(object): """ Provides an interface for managing a JSON query index. Primarily meant to be used by the database convenience methods :func:`~cloudant.database.CloudantDatabase.create_query_index`, :func:`~cloudant.database.CloudantDatabase.delete_query_index`, and :func:`~cloudant.database.CloudantDatabase.get_query_indexes`. It is recommended that you use those methods to manage an index rather than directly interfacing with Index objects. :param CloudantDatabase database: A Cloudant database instance used by the Index. :param str design_document_id: Optional identifier of the design document. :param str name: Optional name of the index. :param bool partitioned: Optional. Create as a partitioned index. Defaults to ``False`` for both partitioned and non-partitioned databases. :param kwargs: Options used to construct the index definition for the purposes of index creation. For more details on valid options See :func:`~cloudant.database.CloudantDatabase.create_query_index`. """ def __init__(self, database, design_document_id=None, name=None, partitioned=None, **kwargs): self._database = database self._r_session = self._database.r_session self._ddoc_id = design_document_id self._name = name self._type = JSON_INDEX_TYPE self._def = kwargs self._partitioned = partitioned @property def index_url(self): """ Constructs and returns the index URL. :returns: Index URL """ return '/'.join((self._database.database_url, '_index')) @property def design_document_id(self): """ Displays the design document id. :returns: Design document that this index belongs to """ return self._ddoc_id @property def name(self): """ Displays the index name. :returns: Name for this index """ return self._name @property def type(self): """ Displays the index type. :returns: Type of this index """ return self._type @property def definition(self): """ Displays the index definition. This could be either the definiton to be used to construct the index or the definition as it is returned by a GET request to the *_index* endpoint. :returns: Index definition as a dictionary """ return self._def @property def partitioned(self): """ Check if this index is partitioned. :return: ``True`` if index is partitioned, else ``False``. :rtype: bool """ return self._partitioned def as_a_dict(self): """ Displays the index as a dictionary. This includes the design document id, index name, index type, and index definition. :returns: Dictionary representation of the index as a dictionary """ index_dict = { 'ddoc': self._ddoc_id, 'name': self._name, 'type': self._type, 'def': self._def } if self._partitioned: index_dict['partitioned'] = True return index_dict def create(self): """ Creates the current index in the remote database. """ payload = {'type': self._type} if self._ddoc_id and self._ddoc_id != '': if isinstance(self._ddoc_id, STRTYPE): if self._ddoc_id.startswith('_design/'): payload['ddoc'] = self._ddoc_id[8:] else: payload['ddoc'] = self._ddoc_id else: raise CloudantArgumentError(122, self._ddoc_id) if self._name and self._name != '': if isinstance(self._name, STRTYPE): payload['name'] = self._name else: raise CloudantArgumentError(123, self._name) self._def_check() payload['index'] = self._def if self._partitioned is not None: payload['partitioned'] = bool(self._partitioned) headers = {'Content-Type': 'application/json'} resp = self._r_session.post( self.index_url, data=json.dumps(payload, cls=self._database.client.encoder), headers=headers ) resp.raise_for_status() self._ddoc_id = response_to_json_dict(resp)['id'] self._name = response_to_json_dict(resp)['name'] def _def_check(self): """ Checks that the only definition provided is a "fields" definition. """ if list(self._def.keys()) != ['fields']: raise CloudantArgumentError(124, self._def) def delete(self): """ Removes the current index from the remote database. """ if not self._ddoc_id: raise CloudantArgumentError(125) if not self._name: raise CloudantArgumentError(126) ddoc_id = self._ddoc_id if ddoc_id.startswith('_design/'): ddoc_id = ddoc_id[8:] url = '/'.join((self.index_url, ddoc_id, self._type, self._name)) resp = self._r_session.delete(url) resp.raise_for_status() class TextIndex(Index): """ Provides an interface for managing a text query index. Primarily meant to be used by the database convenience methods :func:`~cloudant.database.CloudantDatabase.create_query_index`, :func:`~cloudant.database.CloudantDatabase.delete_query_index`, and :func:`~cloudant.database.CloudantDatabase.get_query_indexes`. It is recommended that you use those methods to manage an index rather than directly interfacing with TextIndex objects. :param CloudantDatabase database: A Cloudant database instance used by the TextIndex. :param str design_document_id: Optional identifier of the design document. :param str name: Optional name of the index. :param kwargs: Options used to construct the index definition for the purposes of index creation. For more details on valid options See :func:`~cloudant.database.CloudantDatabase.create_query_index`. """ def __init__(self, database, design_document_id=None, name=None, **kwargs): super(TextIndex, self).__init__( database, design_document_id, name, **kwargs ) self._type = TEXT_INDEX_TYPE def _def_check(self): """ Checks that the definition provided contains only valid arguments for a text index. """ if self._def != dict(): for key, val in iteritems_(self._def): if key not in list(TEXT_INDEX_ARGS.keys()): raise CloudantArgumentError(127, key) if not isinstance(val, TEXT_INDEX_ARGS[key]): raise CloudantArgumentError(128, key, TEXT_INDEX_ARGS[key]) class SpecialIndex(Index): """ Provides an interface for viewing the "special" primary index of a database. Primarily meant to be used by the database convenience method :func:`~cloudant.database.CloudantDatabase.get_query_indexes`. It is recommended that you use that method to view the "special" index rather than directly interfacing with the SpecialIndex object. """ def __init__( self, database, design_document_id=None, name='_all_docs', **kwargs ): super(SpecialIndex, self).__init__( database, design_document_id, name, **kwargs ) self._type = SPECIAL_INDEX_TYPE def create(self): """ A "special" index cannot be created. This method is disabled for a SpecialIndex object. """ raise CloudantIndexException(101) def delete(self): """ A "special" index cannot be deleted. This method is disabled for a SpecialIndex object. """ raise CloudantIndexException(102) ================================================ FILE: src/cloudant/query.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2019 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ API module for composing and executing Cloudant queries. """ import json import contextlib from ._2to3 import iteritems_ from .result import QueryResult from .error import CloudantArgumentError from ._common_util import QUERY_ARG_TYPES from ._common_util import response_to_json_dict class Query(dict): """ Encapsulates a query as a dictionary based object, providing a sliceable and iterable query result collection that can be used to process query output data through the ``result`` attribute. For example: .. code-block:: python # Slicing to skip/limit: query.result[100:200] query.result[:200] query.result[100:] query.result[:] # Iteration is supported via the result attribute: for doc in query.result: print doc The query ``result`` collection provides basic functionality, which can be customized with other arguments using the :func:`~cloudant.query.Query.custom_result` context. For example: .. code-block:: python # Setting the read quorum as part of a custom result with query.custom_result(r=3) as rslt: rslt[100:200] # slice the result # Iteration for doc in rslt: print doc # Iteration over a query result sorted by the "name" field: with query.custom_result(sort=[{'name': 'asc'}]) as rslt: for doc in rslt: print doc :param CloudantDatabase database: A Cloudant database instance used by the Query. :param str bookmark: A string that enables you to specify which page of results you require. :param list fields: A list of fields to be returned by the query. :param int limit: Maximum number of results returned. :param int r: Read quorum needed for the result. Each document is read from at least 'r' number of replicas before it is returned in the results. :param dict selector: Dictionary object describing criteria used to select documents. :param int skip: Skip the first 'n' results, where 'n' is the value specified. :param list sort: A list of fields to sort by. Optionally the list can contain elements that are single member dictionary structures that specify sort direction. For example ``sort=['name', {'age': 'desc'}]`` means to sort the query results by the "name" field in ascending order and the "age" field in descending order. :param str use_index: Identifies a specific index for the query to run against, rather than using the Cloudant Query algorithm which finds what it believes to be the best index. :param str partition_key: Optional. Specify a query partition key. Defaults to ``None`` resulting in global queries. """ def __init__(self, database, **kwargs): super(Query, self).__init__() self._database = database self._partition_key = kwargs.pop('partition_key', None) self._r_session = self._database.r_session self._encoder = self._database.client.encoder if kwargs.get('fields', True) is None: del kwargs['fields'] # delete `None` fields kwarg if kwargs: super(Query, self).update(kwargs) self.result = QueryResult(self) @property def url(self): """ Constructs and returns the Query URL. :returns: Query URL """ if self._partition_key: base_url = self._database.database_partition_url( self._partition_key) else: base_url = self._database.database_url return base_url + '/_find' def __call__(self, **kwargs): """ Makes the Query object callable and retrieves the raw JSON content from the remote database based on the current Query definition, and any additional kwargs provided as query parameters. For example: .. code-block:: python # Construct a Query query = Query(database, selector={'_id': {'$gt': 0}}) # Use query as a callable limiting results to 100, # skipping the first 100. for doc in query(limit=100, skip=100)['docs']: # Process query data (in JSON format). Note: Rather than using the Query callable directly, if you wish to retrieve query results in raw JSON format use the provided database API of :func:`~cloudant.database.CouchDatabase.get_query_result` and set ``raw_result=True`` instead. :param str bookmark: A string that enables you to specify which page of results you require. :param list fields: A list of fields to be returned by the query. :param int limit: Maximum number of results returned. :param int r: Read quorum needed for the result. Each document is read from at least 'r' number of replicas before it is returned in the results. :param dict selector: Dictionary object describing criteria used to select documents. :param int skip: Skip the first 'n' results, where 'n' is the value specified. :param list sort: A list of fields to sort by. Optionally the list can contain elements that are single member dictionary structures that specify sort direction. For example ``sort=['name', {'age': 'desc'}]`` means to sort the query results by the "name" field in ascending order and the "age" field in descending order. :param str use_index: Identifies a specific index for the query to run against, rather than using the Cloudant Query algorithm which finds what it believes to be the best index. :returns: Query result data in JSON format """ data = dict(self) data.update(kwargs) # Validate query arguments and values for key, val in iteritems_(data): if key not in list(QUERY_ARG_TYPES.keys()): raise CloudantArgumentError(129, key) if not isinstance(val, QUERY_ARG_TYPES[key]): raise CloudantArgumentError(130, key, QUERY_ARG_TYPES[key]) if data.get('selector', None) is None or data.get('selector') == {}: raise CloudantArgumentError(131) # Execute query find headers = {'Content-Type': 'application/json'} resp = self._r_session.post( self.url, headers=headers, data=json.dumps(data, cls=self._encoder) ) resp.raise_for_status() return response_to_json_dict(resp) @contextlib.contextmanager def custom_result(self, **options): """ Customizes the :class:`~cloudant.result.QueryResult` behavior and provides a convenient context manager for the QueryResult. QueryResult customizations can be made by providing extra options to the query result call using this context manager. The use of ``skip`` and ``limit`` as options are not valid when using a QueryResult since the ``skip`` and ``limit`` functionality is handled in the QueryResult. For example: .. code-block:: python with query.custom_result(sort=[{'name': 'asc'}]) as rslt: data = rslt[100:200] :param str bookmark: A string that enables you to specify which page of results you require. :param list fields: A list of fields to be returned by the query. :param int page_size: Sets the page size for result iteration. Default is 100. :param int r: Read quorum needed for the result. Each document is read from at least 'r' number of replicas before it is returned in the results. :param dict selector: Dictionary object describing criteria used to select documents. :param list sort: A list of fields to sort by. Optionally the list can contain elements that are single member dictionary structures that specify sort direction. For example ``sort=['name', {'age': 'desc'}]`` means to sort the query results by the "name" field in ascending order and the "age" field in descending order. :param str use_index: Identifies a specific index for the query to run against, rather than using the Cloudant Query algorithm which finds what it believes to be the best index. :returns: Query result data wrapped in a QueryResult instance """ rslt = QueryResult(self, **options) yield rslt del rslt ================================================ FILE: src/cloudant/replicator.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ API module/class for handling database replications """ import uuid from requests.exceptions import HTTPError from .error import CloudantReplicatorException, CloudantClientException from .document import Document from .scheduler import Scheduler class Replicator(object): """ Provides a database replication API. A Replicator object is instantiated with a reference to a client/session. It retrieves the ``_replicator`` database for the specified client and uses that database object to manage replications. :param client: Client instance used by the database. Can either be a ``CouchDB`` or ``Cloudant`` client instance. """ def __init__(self, client): repl_db = '_replicator' self.client = client try: self.database = client[repl_db] except Exception: raise CloudantClientException(404, repl_db) def create_replication(self, source_db=None, target_db=None, repl_id=None, **kwargs): """ Creates a new replication task. :param source_db: Database object to replicate from. Can be either a ``CouchDatabase`` or ``CloudantDatabase`` instance. :param target_db: Database object to replicate to. Can be either a ``CouchDatabase`` or ``CloudantDatabase`` instance. :param str repl_id: Optional replication id. Generated internally if not explicitly set. :param dict user_ctx: Optional user to act as. Composed internally if not explicitly set. :param bool create_target: Specifies whether or not to create the target, if it does not already exist. :param bool continuous: If set to True then the replication will be continuous. :returns: Replication document as a Document instance """ if source_db is None: raise CloudantReplicatorException(101) if target_db is None: raise CloudantReplicatorException(102) data = dict( _id=repl_id if repl_id else str(uuid.uuid4()), **kwargs ) # replication source data['source'] = {'url': source_db.database_url} if source_db.admin_party: pass # no credentials required elif source_db.client.is_iam_authenticated: data['source'].update({'auth': { 'iam': {'api_key': source_db.client.r_session.get_api_key} }}) else: data['source'].update({'headers': { 'Authorization': source_db.creds['basic_auth'] }}) # replication target data['target'] = {'url': target_db.database_url} if target_db.admin_party: pass # no credentials required elif target_db.client.is_iam_authenticated: data['target'].update({'auth': { 'iam': {'api_key': target_db.client.r_session.get_api_key} }}) else: data['target'].update({'headers': { 'Authorization': target_db.creds['basic_auth'] }}) # add user context delegation if not data.get('user_ctx') and self.database.creds and \ self.database.creds.get('user_ctx'): data['user_ctx'] = self.database.creds['user_ctx'] return self.database.create_document(data, throw_on_exists=True) def list_replications(self): """ Retrieves all replication documents from the replication database. :returns: List containing replication Document objects """ docs = self.database.all_docs(include_docs=True)['rows'] documents = [] for doc in docs: if doc['id'].startswith('_design/'): continue document = Document(self.database, doc['id']) document.update(doc['doc']) documents.append(document) return documents def replication_state(self, repl_id): """ Retrieves the state for the given replication. Possible values are ``triggered``, ``completed``, ``error``, and ``None`` (meaning not yet triggered). :param str repl_id: Replication id used to identify the replication to inspect. :returns: Replication state as a ``str`` """ if "scheduler" in self.client.features(): try: repl_doc = Scheduler(self.client).get_doc(repl_id) except HTTPError as err: raise CloudantReplicatorException(err.response.status_code, repl_id) state = repl_doc['state'] else: try: repl_doc = self.database[repl_id] except KeyError: raise CloudantReplicatorException(404, repl_id) repl_doc.fetch() state = repl_doc.get('_replication_state') return state def follow_replication(self, repl_id): """ Blocks and streams status of a given replication. For example: .. code-block:: python for doc in replicator.follow_replication(repl_doc_id): # Process replication information as it comes in :param str repl_id: Replication id used to identify the replication to inspect. :returns: Iterable stream of copies of the replication Document and replication state as a ``str`` for the specified replication id """ def update_state(): """ Retrieves the replication state. """ if "scheduler" in self.client.features(): try: arepl_doc = Scheduler(self.client).get_doc(repl_id) return arepl_doc, arepl_doc['state'] except HTTPError: return None, None else: try: arepl_doc = self.database[repl_id] arepl_doc.fetch() return arepl_doc, arepl_doc.get('_replication_state') except KeyError: return None, None while True: # Make sure we fetch the state up front, just in case it moves # too fast and we miss it in the changes feed. repl_doc, state = update_state() if repl_doc: yield repl_doc # This is a little awkward, since 2.1 the terminal states are # "failed" and "completed", so those should be the exit states, but # for backwards compatibility with older versions "error" is also # needed. The code has always exited for "error" state even long # after 2.1 was available so that behaviour is retained. if state is not None and state in ['error', 'failed', 'completed']: return # Now listen on changes feed for the state for change in self.database.changes(): if change.get('id') == repl_id: repl_doc, state = update_state() if repl_doc is not None: yield repl_doc # See note about these states if state is not None and state in ['error', 'failed', 'completed']: return def stop_replication(self, repl_id): """ Stops a replication based on the provided replication id by deleting the replication document from the replication database. The replication can only be stopped if it has not yet completed. If it has already completed then the replication document is still deleted from replication database. :param str repl_id: Replication id used to identify the replication to stop. """ try: repl_doc = self.database[repl_id] except KeyError: raise CloudantReplicatorException(404, repl_id) repl_doc.fetch() repl_doc.delete() ================================================ FILE: src/cloudant/result.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2019 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ API module for interacting with result collections. """ from collections import deque from functools import partial from ._2to3 import STRTYPE from .error import ResultException from ._common_util import py_to_couch_validate, type_or_none class ResultByKey(object): """ Provides a wrapper for a value used to retrieve records from a result collection based on an actual document key value. This comes in handy when the document key value is an ``int``. For example: .. code-block:: python result = Result(callable) result[ResultByKey(9)] # gets records where the key matches 9 # as opposed to: result[9] # gets the 10th record of the result collection :param value: A value representing a Result key. """ def __init__(self, value): self._value = value def __call__(self): return self._value class Result(object): """ Provides a key accessible, sliceable, and iterable interface to result collections. A Result object is constructed with a raw data callable reference such as the database API convenience method :func:`~cloudant.database.CouchDatabase.all_docs` or the View :func:`~cloudant.view.View.__call__` callable, used to retrieve data. A Result object can also use optional extra arguments for result customization and supports efficient, paged iteration over the result collection to avoid large result data from adversely affecting memory. In Python, slicing returns by value, whereas iteration will yield elements of the sequence. This means that individual key access and slicing will perform better for smaller data collections, whereas iteration will be more efficient for larger data collections. For example: .. code-block:: python # Key access: # Access by index value: result = Result(callable) result[9] # skip first 9 records and get 10th # Access by key value: result = Result(callable) result['foo'] # get records matching 'foo' result[ResultByKey(9)] # get records matching 9 # Slice access: # Access by index slices: result = Result(callable) result[100: 200] # get records after the 100th and up to and including the 200th result[: 200] # get records up to and including the 200th result[100: ] # get all records after the 100th result[: ] # get all records # Access by key slices: result = Result(callable) result['bar':'foo'] # get records between and including 'bar' and 'foo' result['foo':] # get records after and including 'foo' result[:'foo'] # get records up to and including 'foo' result[['foo', 10]: ['foo', 11]] # Complex key access and slicing works the same as simple keys result[ResultByKey(5): ResultByKey(10)] # key slice access of integer keys # Iteration: # Iterate over the entire result collection result = Result(callable) for i in result: print i # Iterate over the result collection between startkey and endkey result = Result(callable, startkey='2013', endkey='2014') for i in result: print i # Iterate over the entire result collection in batches of 1000, including documents. result = Result(callable, include_docs=True, page_size=1000) for i in result: print i Note: Since Result object key access, slicing, and iteration use query parameters behind the scenes to handle their processing, some query parameters are not permitted as part of a Result customization, depending on whether key access, slicing, or iteration is being performed. Such as: +-------------------------------+-----------------------------------------------------------+ | Access/Slicing by index value | No restrictions | +-------------------------------+-----------------------------------------------------------+ | Access/Slicing by key value | ``key``, ``keys``, ``startkey``, ``endkey`` not permitted | +-------------------------------+-----------------------------------------------------------+ | Iteration | ``limit``, ``skip`` not permitted | +-------------------------------+-----------------------------------------------------------+ :param str method_ref: A reference to the method or callable that returns the JSON content result to be wrapped as a Result. :param bool descending: Return documents in descending key order. :param endkey: Stop returning records at this specified key. Not valid when used with key access and key slicing. :param str endkey_docid: Stop returning records when the specified document id is reached. :param bool group: Using the reduce function, group the results to a group or single row. :param group_level: Only applicable if the view uses complex keys: keys that are JSON arrays. Groups reduce results for the specified number of array fields. :param bool include_docs: Include the full content of the documents. :param bool inclusive_end: Include rows with the specified endkey. :param key: Return only documents that match the specified key. Not valid when used with key access and key slicing. :param list keys: Return only documents that match the specified keys. Not valid when used with key access and key slicing. :param int limit: Limit the number of returned documents to the specified count. Not valid when used with key iteration. :param int page_size: Sets the page size for result iteration. :param bool reduce: True to use the reduce function, false otherwise. :param int skip: Skip this number of rows from the start. Not valid when used with key iteration. :param bool stable: Whether or not the view results should be returned from a "stable" set of shards. :param str stale: Allow the results from a stale view to be used. This makes the request return immediately, even if the view has not been completely built yet. If this parameter is not given, a response is returned only after the view has been built. Note that this parameter is deprecated and the appropriate combination of `stable` and `update` should be used instead. :param startkey: Return records starting with the specified key. Not valid when used with key access and key slicing. :param str startkey_docid: Return records starting with the specified document ID. :param str update: Determine whether the view in question should be updated prior to or after responding to the user. Valid values are: false: return results before updating the view; true: Return results after updating the view; lazy: Return the view results without waiting for an update, but update them immediately after the request. """ def __init__(self, method_ref, **options): self.options = options self._ref = method_ref self._page_size = options.pop('page_size', 100) def __getitem__(self, arg): """ Provides Result key access and slicing support. An ``int`` argument will be interpreted as a ``skip`` and then a get of the next record. For example ``[100]`` means skip the first 100 records and then get the next record. A ``str``, ``list`` or :class:`~cloudant.result.ResultByKey` argument will be interpreted as a ``key`` and then get all records that match the given key. For example ``['foo']`` will get all records that match the key 'foo'. An ``int`` slice argument will be interpreted as a ``skip:limit-skip`` style pair. For example ``[100: 200]`` means skip the first 100 records then get up to and including the 200th record so that you get the range between the supplied slice values. A slice argument that contains ``str``, ``list``, or :class:`~cloudant.result.ResultByKey` will be interpreted as a ``startkey: endkey`` style pair. For example ``['bar': 'foo']`` means get the range of records where the keys are between and including 'bar' and 'foo'. See :class:`~cloudant.result.Result` for more detailed key access and slicing examples. :param arg: A single value representing a key or a pair of values representing a slice. The argument value(s) can be ``int``, ``str``, ``list`` (in the case of complex keys), or :class:`~cloudant.result.ResultByKey`. :returns: Rows data as a list in JSON format """ data = None if isinstance(arg, int): data = self._handle_result_by_index(arg) elif isinstance(arg, (STRTYPE, list)): data = self._handle_result_by_key(arg) elif isinstance(arg, ResultByKey): data = self._handle_result_by_key(arg()) elif isinstance(arg, slice): # slice is entire result set - no additional processing required if arg.start is None and arg.stop is None: data = self._ref(**self.options) # key slice identified elif (type_or_none((STRTYPE, list, ResultByKey), arg.start) and type_or_none((STRTYPE, list, ResultByKey), arg.stop)): data = self._handle_result_by_key_slice(arg) # index slice identified elif (type_or_none(int, arg.start) and type_or_none(int, arg.stop)): data = self._handle_result_by_idx_slice(arg) if data is None: raise ResultException(101, arg) return self._parse_data(data) def _handle_result_by_index(self, idx): """ Handle processing when the result argument provided is an integer. """ if idx < 0: return None opts = dict(self.options) skip = opts.pop('skip', 0) limit = opts.pop('limit', None) py_to_couch_validate('skip', skip) py_to_couch_validate('limit', limit) if limit is not None and idx >= limit: # Result is out of range return dict() return self._ref(skip=skip+idx, limit=1, **opts) def _handle_result_by_key(self, key): """ Handle processing when the result argument provided is a document key. """ invalid_options = ('key', 'keys', 'startkey', 'endkey') if any(x in invalid_options for x in self.options): raise ResultException(102, invalid_options, self.options) return self._ref(key=key, **self.options) def _handle_result_by_idx_slice(self, idx_slice): """ Handle processing when the result argument provided is an index slice. """ opts = dict(self.options) skip = opts.pop('skip', 0) limit = opts.pop('limit', None) py_to_couch_validate('skip', skip) py_to_couch_validate('limit', limit) start = idx_slice.start stop = idx_slice.stop data = None # start and stop cannot be None and both must be greater than 0 if all(i is not None and i >= 0 for i in [start, stop]) and start < stop: if limit is not None: if start >= limit: # Result is out of range return dict() if stop > limit: # Ensure that slice does not extend past original limit return self._ref(skip=skip+start, limit=limit-start, **opts) data = self._ref(skip=skip+start, limit=stop-start, **opts) elif start is not None and stop is None and start >= 0: if limit is not None: if start >= limit: # Result is out of range return dict() # Ensure that slice does not extend past original limit data = self._ref(skip=skip+start, limit=limit-start, **opts) else: data = self._ref(skip=skip+start, **opts) elif start is None and stop is not None and stop >= 0: if limit is not None and stop > limit: # Ensure that slice does not extend past original limit data = self._ref(skip=skip, limit=limit, **opts) else: data = self._ref(skip=skip, limit=stop, **opts) return data def _handle_result_by_key_slice(self, key_slice): """ Handle processing when the result argument provided is a key slice. """ invalid_options = ('key', 'keys', 'startkey', 'endkey') if any(x in invalid_options for x in self.options): raise ResultException(102, invalid_options, self.options) if isinstance(key_slice.start, ResultByKey): start = key_slice.start() else: start = key_slice.start if isinstance(key_slice.stop, ResultByKey): stop = key_slice.stop() else: stop = key_slice.stop if (start is not None and stop is not None and isinstance(start, type(stop))): data = self._ref(startkey=start, endkey=stop, **self.options) elif start is not None and stop is None: data = self._ref(startkey=start, **self.options) elif start is None and stop is not None: data = self._ref(endkey=stop, **self.options) else: data = None return data def __iter__(self): """ Provides iteration support, primarily for large data collections. The iterator uses the ``startkey``, ``startkey_docid``, and ``limit`` options to consume data in chunks controlled by the ``page_size`` option. It retrieves a batch of data from the result collection and then yields each element. See :class:`~cloudant.result.Result` for Result iteration examples. :returns: Iterable data sequence """ invalid_options = ('limit', ) if any(x in invalid_options for x in self.options): raise ResultException(103, invalid_options, self.options) try: self._page_size = int(self._page_size) if self._page_size <= 0: raise ResultException(104, self._page_size) except ValueError: raise ResultException(104, self._page_size) init_opts = { 'skip': self.options.pop('skip', None), 'startkey': self.options.pop('startkey', None) } self._call = partial(self._ref, #pylint: disable=attribute-defined-outside-init limit=self._real_page_size, **self.options) response = self._call(**{k: v for k, v in init_opts.items() if v is not None}) return self._iterator(response) @property def _real_page_size(self): ''' In views we paginate with N+1 items per page. https://docs.couchdb.org/en/stable/ddocs/views/pagination.html#paging-alternate-method ''' return self._page_size + 1 def _iterator(self, response): ''' Iterate through view data. ''' while True: result = deque(self._parse_data(response)) del response if result: doc_count = len(result) last = result.pop() while result: yield result.popleft() # We expect doc_count = self._page_size + 1 results, if # we have self._page_size or less it means we are on the # last page and need to return the last result. if doc_count < self._real_page_size: yield last break del result # if we are in a view, keys could be duplicate so we # need to start from the right docid last_doc_id = last.get('id') if last_doc_id is not None: response = self._call(startkey=last['key'], startkey_docid=last_doc_id) # reduce result keys are unique by definition else: response = self._call(startkey=last['key']) else: break # pylint: disable=no-self-use def _parse_data(self, data): """ Used to extract the rows content from the JSON result content """ return data.get('rows', []) def all(self): """ Retrieve all results. Specifying a ``limit`` parameter in the ``Result`` constructor will limit the number of documents returned. Be aware that the ``page_size`` parameter is not honoured. :return: results data as list in JSON format. """ return self[:] class QueryResult(Result): """ Provides a index key accessible, sliceable and iterable interface to query result collections by extending the :class:`~cloudant.result.Result` class. A QueryResult object is constructed with a raw data callable reference to the Query :func:`~cloudant.query.Query.__call__` callable, which is used to retrieve data. A QueryResult object can also use optional extra arguments for result customization and supports efficient, paged iteration over the result collection to avoid large result data from adversely affecting memory. In Python, slicing returns by value, whereas iteration will yield elements of the sequence. This means that index key access and slicing will perform better for smaller data collections, whereas iteration will be more efficient for larger data collections. For example: .. code-block:: python # Key access: # Access by index value: query_result = QueryResult(query) query_result[9] # skip first 9 documents and get 10th # Slice access: # Access by index slices: query_result = QueryResult(query) query_result[100: 200] # get documents after the 100th and up to and including the 200th query_result[ :200] # get documents up to and including the 200th query_result[100: ] # get all documents after the 100th query_result[: ] # get all documents # Iteration: # Iterate over the entire result collection query_result = QueryResult(query) for doc in query_result: print doc # Iterate over the result collection, with an overriding query sort query_result = QueryResult(query, sort=[{'name': 'desc'}]) for doc in query_result: print doc # Iterate over the entire result collection, # explicitly setting the index and in batches of 1000. query_result = QueryResult(query, use_index='my_index', page_size=1000) for doc in query_result: print doc Note: Only access by index value, slicing by index values and iteration are supported by QueryResult. Also, since QueryResult object iteration uses the ``skip`` and ``limit`` query parameters to handle its processing, ``skip`` and ``limit`` are not permitted to be part of the query callable or be included as part of the QueryResult customized parameters. :param query: A reference to the query callable that returns the JSON content result to be wrapped. :param str bookmark: A string that enables you to specify which page of results you require. :param list fields: A list of fields to be returned by the query. :param int page_size: Sets the page size for result iteration. Default is 100. :param int r: Read quorum needed for the result. Each document is read from at least 'r' number of replicas before it is returned in the results. :param dict selector: Dictionary object describing criteria used to select documents. :param list sort: A list of fields to sort by. Optionally the list can contain elements that are single member dictionary structures that specify sort direction. For example ``sort=['name', {'age': 'desc'}]`` means to sort the query results by the "name" field in ascending order and the "age" field in descending order. :param str use_index: Identifies a specific index for the query to run against, rather than using the Cloudant Query algorithm which finds what it believes to be the best index. """ def __init__(self, query, **options): # Move skip/limit to options so super class Result can handle as needed. if 'skip' in query and 'skip' not in options: options['skip'] = query['skip'] if 'limit' in query and 'limit' not in options: options['limit'] = query['limit'] super(QueryResult, self).__init__(query, **options) def __getitem__(self, arg): """ Provides QueryResult index access and index slicing support. An ``int`` argument will be interpreted as a ``skip`` and then a get of the next document. For example ``[100]`` means skip the first 100 documents and then get the next document. An ``int`` slice argument will be interpreted as a ``skip:limit-skip`` style pair. For example ``[100: 200]`` means skip the first 100 documents then get up to and including the 200th document so that you get the range between the supplied slice values. See :class:`~cloudant.result.QueryResult` for more detailed index access and index slicing examples. :param arg: A single value representing a key or a pair of values representing a slice. The argument value(s) must be ``int``. :returns: Document data as a list in JSON format """ # Argument can only be an integer or an integer slice. if ((isinstance(arg, int)) or (isinstance(arg, slice) and type_or_none(int, arg.start) and type_or_none(int, arg.stop))): return super(QueryResult, self).__getitem__(arg) raise ResultException(101, arg) def _parse_data(self, data): """ Overrides Result._parse_data to extract the docs content from the query result JSON response content """ return data.get('docs', []) @property def _real_page_size(self): ''' During queries iteration page size is user-specified ''' return self._page_size def _iterator(self, response): ''' Iterate through query data. ''' while True: result = self._parse_data(response) bookmark = response.get('bookmark') if result: for row in result: yield row del result if not bookmark: break response = self._call(bookmark=bookmark) else: break ================================================ FILE: src/cloudant/scheduler.py ================================================ #!/usr/bin/env python # Copyright (C) 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ API module for interacting with scheduler endpoints """ from ._common_util import response_to_json_dict class Scheduler(object): """ API for retrieving scheduler jobs and documents. :param client: Client instance used by the database. Can either be a ``CouchDB`` or ``Cloudant`` client instance. """ def __init__(self, client): self._client = client self._r_session = client.r_session self._scheduler = '/'.join([self._client.server_url, '_scheduler']) def list_docs(self, limit=None, skip=None): """ Lists replication documents. Includes information about all the documents, even in completed and failed states. For each document it returns the document ID, the database, the replication ID, source and target, and other information. :param limit: How many results to return. :param skip: How many result to skip starting at the beginning, if ordered by document ID. """ params = dict() if limit is not None: params["limit"] = limit if skip is not None: params["skip"] = skip resp = self._r_session.get('/'.join([self._scheduler, 'docs']), params=params) resp.raise_for_status() return response_to_json_dict(resp) def get_doc(self, doc_id): """ Get replication document state for a given replication document ID. """ resp = self._r_session.get('/'.join([self._scheduler, 'docs', '_replicator', doc_id])) resp.raise_for_status() return response_to_json_dict(resp) def list_jobs(self, limit=None, skip=None): """ Lists replication jobs. Includes replications created via /_replicate endpoint as well as those created from replication documents. Does not include replications which have completed or have failed to start because replication documents were malformed. Each job description will include source and target information, replication id, a history of recent event, and a few other things. :param limit: How many results to return. :param skip: How many result to skip starting at the beginning, if ordered by document ID. """ params = dict() if limit is not None: params["limit"] = limit if skip is not None: params["skip"] = skip resp = self._r_session.get('/'.join([self._scheduler, 'jobs']), params=params) resp.raise_for_status() return response_to_json_dict(resp) ================================================ FILE: src/cloudant/security_document.py ================================================ #!/usr/bin/env python # Copyright (C) 2016, 2018 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ API module/class for interacting with a security document in a database. """ import json from ._2to3 import url_quote_plus from ._common_util import response_to_json_dict class SecurityDocument(dict): """ Encapsulates a JSON security document. A SecurityDocument object is instantiated with a reference to a database and used to manipulate security document content in a CouchDB or Cloudant database instance. In addition to basic read/write operations, a SecurityDocument object also provides a convenient context manager. This context manager removes having to explicitly :func:`~cloudant.security_document.SecurityDocument.fetch` the security document from the remote database before commencing work on it as well as explicitly having to :func:`~cloudant.security_document.SecurityDocument.save` the security document once work is complete. For example: .. code-block:: python # Upon entry into the security document context, fetches the security # document from the remote database, if it exists. Upon exit from the # context, saves the security document to the remote database with # changes made within the context. with SecurityDocument(database) as security_document: # The security document is fetched from the remote database # Changes are made locally security_document['Cloudant']['julia'] = ['_reader', '_writer'] security_document['Cloudant']['ruby'] = ['_admin', '_replicator'] # The security document is saved to the remote database :param database: A database instance used by the SecurityDocument. Can be either a ``CouchDatabase`` or ``CloudantDatabase`` instance. """ def __init__(self, database): super(SecurityDocument, self).__init__() self._client = database.client self._database = database self._database_host = self._client.server_url self._database_name = database.database_name self.encoder = self._client.encoder @property def document_url(self): """ Constructs and returns the security document URL. :returns: Security document URL """ return '/'.join([ self._database_host, url_quote_plus(self._database_name), '_security' ]) @property def r_session(self): """ Returns the Python requests session used by the security document. :returns: The Python requests session """ return self._client.r_session def json(self): """ Retrieves the JSON string representation of the current locally cached security document object, encoded by the encoder specified in the associated client object. :returns: Encoded JSON string containing the security document data """ return json.dumps(dict(self), cls=self.encoder) def fetch(self): """ Retrieves the content of the current security document from the remote database and populates the locally cached SecurityDocument object with that content. A call to fetch will overwrite any dictionary content currently in the locally cached SecurityDocument object. """ resp = self.r_session.get(self.document_url) resp.raise_for_status() self.clear() self.update(response_to_json_dict(resp)) def save(self): """ Saves changes made to the locally cached SecurityDocument object's data structures to the remote database. """ resp = self.r_session.put( self.document_url, data=self.json(), headers={'Content-Type': 'application/json'} ) resp.raise_for_status() def __enter__(self): """ Supports context like editing of security document fields. Handles context entry logic. Executes a :func:`~cloudant.security_document.SecurityDocument.fetch` upon entry. """ self.fetch() return self def __exit__(self, *args): """ Support context like editing of security document fields. Handles context exit logic. Executes a :func:`~cloudant.security_document.SecurityDocument.save` upon exit. """ self.save() ================================================ FILE: src/cloudant/view.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2019 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ API module for interacting with a view in a design document. """ import contextlib from ._2to3 import STRTYPE from ._common_util import codify, get_docs, response_to_json_dict from .result import Result from .error import CloudantArgumentError, CloudantViewException class View(dict): """ Encapsulates a view as a dictionary based object, exposing the map and reduce functions as attributes and supporting query/data access through the view. A View object is instantiated with a reference to a DesignDocument and is typically used as part of the :class:`~cloudant.design_document.DesignDocument` view management API. A View object provides a key accessible, sliceable, and iterable default result collection that can be used to query the view data through the ``result`` attribute. For example: .. code-block:: python # Access result collection through individual keys view.result[100] view.result['foo'] # Access result collection through index slicing: view.result[100: 200] view.result[: 200] view.result[100: ] view.result[: ] # Access result collection through key slicing: view.result['bar': 'foo'] view.result['bar': ] view.result[: 'foo'] # Iterate over the result collection: for doc in view.result: print doc The default result collection provides basic functionality, which can be customized with other arguments using the :func:`~cloudant.view.View.custom_result` context manager. For example: .. code-block:: python # Including documents as part of a custom result with view.custom_result(include_docs=True) as rslt: rslt[100: 200] # slice by result rslt[['2013', '10']: ['2013', '11']] # slice by startkey/endkey # Iteration for doc in rslt: print doc # Iteration over a view within startkey/endkey range: with view.custom_result(startkey='2013', endkey='2014') as rslt: for doc in rslt: print doc Note: A view must exist as part of a design document remotely in order to access result content as depicted in the above examples. :param DesignDocument ddoc: DesignDocument instance used in part to identify the view. :param str view_name: Name used in part to identify the view. :param str map_func: Optional Javascript map function. :param str reduce_func: Optional Javascript reduce function. :param str partition_key: Optional. Specify a view partition key. Defaults to ``None`` resulting in global queries. """ def __init__( self, ddoc, view_name, map_func=None, reduce_func=None, partition_key=None, **kwargs ): super(View, self).__init__() self.design_doc = ddoc self._r_session = self.design_doc.r_session self.view_name = view_name if map_func is not None: self['map'] = codify(map_func) if reduce_func is not None: self['reduce'] = codify(reduce_func) self._partition_key = partition_key self.update(kwargs) self.result = Result(self) @property def map(self): """ Provides an map property accessor and setter. For example: .. code-block:: python # Set the View map property view.map = 'function (doc) {\\n emit(doc._id, 1);\\n}' print view.map :param str js_func: Javascript function. :returns: Codified map function """ return self.get('map') @map.setter def map(self, js_func): """ Provides a map property setter. """ self['map'] = codify(js_func) @property def reduce(self): """ Provides an reduce property accessor and setter. For example: .. code-block:: python # Set the View reduce property view.reduce = '_count' # Get and print the View reduce property print view.reduce :param str js_func: Javascript function. :returns: Codified reduce function """ return self.get('reduce') @reduce.setter def reduce(self, js_func): """ Provides a reduce property setter. """ self['reduce'] = codify(js_func) @property def url(self): """ Constructs and returns the View URL. :returns: View URL """ if self._partition_key: base_url = self.design_doc.document_partition_url( self._partition_key) else: base_url = self.design_doc.document_url return '/'.join(( base_url, '_view', self.view_name )) def __call__(self, **kwargs): """ Makes the View object callable and retrieves the raw JSON content from the remote database based on the View definition on the server, using the kwargs provided as query parameters. For example: .. code-block:: python # Construct a View view = View(ddoc, 'view001') # Assuming that 'view001' exists as part of the # design document ddoc in the remote database... # Use view as a callable for row in view(include_docs=True, limit=100, skip=100)['rows']: # Process view data (in JSON format). Note: Rather than using the View callable directly, if you wish to retrieve view results in raw JSON format use ``raw_result=True`` with the provided database API of :func:`~cloudant.database.CouchDatabase.get_view_result` instead. :param bool descending: Return documents in descending key order. :param endkey: Stop returning records at this specified key. :param str endkey_docid: Stop returning records when the specified document id is reached. :param bool group: Using the reduce function, group the results to a group or single row. :param group_level: Only applicable if the view uses complex keys: keys that are JSON arrays. Groups reduce results for the specified number of array fields. :param bool include_docs: Include the full content of the documents. :param bool inclusive_end: Include rows with the specified endkey. :param str key: Return only documents that match the specified key. :param list keys: Return only documents that match the specified keys. :param int limit: Limit the number of returned documents to the specified count. :param bool reduce: True to use the reduce function, false otherwise. :param int skip: Skip this number of rows from the start. :param str stale: Allow the results from a stale view to be used. This makes the request return immediately, even if the view has not been completely built yet. If this parameter is not given, a response is returned only after the view has been built. :param startkey: Return records starting with the specified key. :param str startkey_docid: Return records starting with the specified document ID. :returns: View result data in JSON format """ resp = get_docs(self._r_session, self.url, self.design_doc.encoder, **kwargs) return response_to_json_dict(resp) @contextlib.contextmanager def custom_result(self, **options): """ Customizes the :class:`~cloudant.result.Result` behavior and provides a convenient context manager for the Result. Result customizations can be made by providing extra options to the result call using this context manager. Depending on how you are accessing, slicing or iterating through your result collection certain query parameters are not permitted. See :class:`~cloudant.result.Result` for additional details. For example: .. code-block:: python with view.custom_result(include_docs=True, reduce=False) as rslt: data = rslt[100: 200] :param bool descending: Return documents in descending key order. :param endkey: Stop returning records at this specified key. Not valid when used with :class:`~cloudant.result.Result` key access and key slicing. :param str endkey_docid: Stop returning records when the specified document id is reached. :param bool group: Using the reduce function, group the results to a group or single row. :param group_level: Only applicable if the view uses complex keys: keys that are JSON arrays. Groups reduce results for the specified number of array fields. :param bool include_docs: Include the full content of the documents. :param bool inclusive_end: Include rows with the specified endkey. :param key: Return only documents that match the specified key. Not valid when used with :class:`~cloudant.result.Result` key access and key slicing. :param list keys: Return only documents that match the specified keys. Not valid when used with :class:`~cloudant.result.Result` key access and key slicing. :param int limit: Limit the number of returned documents to the specified count. Not valid when used with :class:`~cloudant.result.Result` iteration. :param int page_size: Sets the page size for result iteration. :param bool reduce: True to use the reduce function, false otherwise. :param int skip: Skip this number of rows from the start. Not valid when used with :class:`~cloudant.result.Result` iteration. :param str stale: Allow the results from a stale view to be used. This makes the request return immediately, even if the view has not been completely built yet. If this parameter is not given, a response is returned only after the view has been built. :param startkey: Return records starting with the specified key. Not valid when used with :class:`~cloudant.result.Result` key access and key slicing. :param str startkey_docid: Return records starting with the specified document ID. :returns: View result data wrapped in a Result instance """ rslt = Result(self, **options) yield rslt del rslt class QueryIndexView(View): """ A view that defines a JSON query index in a design document. If you wish to manage a view that represents a JSON query index it is strongly recommended that :func:`~cloudant.database.CloudantDatabase.create_query_index` and :func:`~cloudant.database.CloudantDatabase.delete_query_index` are used. """ def __init__(self, ddoc, view_name, map_fields, reduce_func, **kwargs): if not isinstance(map_fields, dict): raise CloudantArgumentError(132) if not isinstance(reduce_func, STRTYPE): raise CloudantArgumentError(133) super(QueryIndexView, self).__init__( ddoc, view_name, map_fields, reduce_func, **kwargs ) self['map'] = map_fields self['reduce'] = reduce_func self.result = None @property def map(self): """ Provides a map property accessor and setter. :param dict map_func: A dictionary of fields defining the index. :returns: Fields defining the index """ return self.get('map') @map.setter def map(self, map_func): """ Provides a map property setter. """ if isinstance(map_func, dict): self['map'] = map_func else: raise CloudantArgumentError(132) @property def reduce(self): """ Provides a reduce property accessor and setter. :param str reduce_func: A string representation of the reduce function used in part to define the index. :returns: Reduce function as a string """ return self.get('reduce') @reduce.setter def reduce(self, reduce_func): """ Provides a reduce property setter. """ if isinstance(reduce_func, STRTYPE): self['reduce'] = reduce_func else: raise CloudantArgumentError(133) def __call__(self, **kwargs): """ QueryIndexView objects are not callable. If you wish to execute a query using a query index, use :func:`~cloudant.database.CloudantDatabase.get_query_result` instead. """ raise CloudantViewException(101) def custom_result(self, **options): """ This method overrides the View base class :func:`~cloudant.view.View.custom_result` method with the sole purpose of disabling it. Since QueryIndexView objects are not callable, there is no reason to wrap their output in a Result. If you wish to execute a query using a query index, use :func:`~cloudant.database.CloudantDatabase.get_query_result` instead. """ raise CloudantViewException(102) ================================================ FILE: test-requirements.txt ================================================ mock==1.3.0 nose sphinx sphinx_rtd_theme pylint==2.5.2 flaky ================================================ FILE: tests/__init__.py ================================================ #!/usr/bin/env python # Copyright (c) 2015 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _tests_ Test coverage for package """ import sys PY2 = sys.version_info[0] < 3 BYTETYPE = str if PY2 else bytes def unicode_(s): return unicode(s) if PY2 else s def iteritems_(d): return d.iteritems() if PY2 else d.items() def bytes_(astr): return astr.encode('utf-8') if hasattr(astr, 'encode') else astr def str_(astr): return astr.decode('utf-8') if hasattr(astr, 'decode') else astr if PY2: from StringIO import StringIO else: from io import StringIO ================================================ FILE: tests/credentials.py ================================================ #!/usr/bin/env python # Copyright (c) 2015 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Module providing utilities to support using an INI style configuration file to allow users to pass credentials. Note: For use with tests only """ import os from cloudant._2to3 import RawConfigParser def read_dot_couch( filename='~/.couch', section='couchdb', username='user', password='password'): """ Provides a way to read an INI file containing a ``couchdb`` section that contains authentication credentials for connecting to a CouchDB instance. :param str filename: Path and name of INI file. Defaults to ``~/.couch``. :param str section: Name of the section in the INI file to find credentials. Defaults to ``couchdb``. :param str username: Name of the user entry in the INI file and section. Defaults to ``user``. :param str password: Name of the password entry in the INI file and section. Defaults to ``password``. :returns: A tuple containing user and password """ return _read_dot_file(filename, section, username, password) def read_dot_cloudant( filename='~/.cloudant', section='cloudant', username='user', password='password'): """ Provides a way to read an INI file containing a ``cloudant`` section that contains authentication credentials for connecting to a Cloudant instance. :param str filename: Path and name of INI file. Defaults to ``~/.cloudant``. :param str section: Name of the section in the INI file to find credentials. Defaults to ``cloudant``. :param str username: Name of the user entry in the INI file and section. Defaults to ``user``. :param str password: Name of the password entry in the INI file and section. Defaults to ``password``. :returns: A tuple containing user and password """ return _read_dot_file(filename, section, username, password) def _read_dot_file(filename, section, username, password): """ Handles the parsing of the configuration file for the username and password. :param str filename: Path and name of INI file. :param str section: Name of the section in the INI file to find credentials. :param str username: Name of the user entry in the INI file and section. :returns: A tuple containing user and password """ config_file = os.path.expanduser(filename) config = RawConfigParser() config.read(config_file) username_value = config.get(section, username) password_value = config.get(section, password) return username_value, password_value ================================================ FILE: tests/integration/__init__.py ================================================ #!/usr/bin/env python # Copyright (c) 2015 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _integration_ Integration test suite """ ================================================ FILE: tests/integration/changes_test.py ================================================ #!/usr/bin/env python # Copyright (c) 2015 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _changes_test_ changes module integration tests """ import logging import sys import unittest import uuid from cloudant import cloudant from cloudant.credentials import read_dot_cloudant from .. import unicode_ def setup_logging(): log = logging.getLogger() log.setLevel(logging.DEBUG) handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.DEBUG) log.addHandler(handler) return log LOG = setup_logging() class ChangesTest(unittest.TestCase): """ Verify that our database iterator works, and does the caching that we expect. """ @classmethod def setUp(self): self.user, self.password = read_dot_cloudant(filename="~/.clou") self.last_db = None def tearDown(self): if self.last_db is not None: with cloudant(self.user, self.password, account=self.user) as c: c.delete_database(self.last_db) def test_changes(self): """ _test_changes_ Test to verify that we can connect to a live changes feed. Verify that we are actually staying connected by creating new docs while reading from the _changes feed. """ dbname = "cloudant-changes-test-{0}".format(unicode_(uuid.uuid4())) self.last_db = dbname with cloudant(self.user, self.password, account=self.user) as c: session = c.session() db = c.create_database(dbname) n = 0 def make_doc(n): doc = db.create_document( {"_id": "doc{}".format(n), "testing": "doc{}".format(n)} ) return doc doc = make_doc(n) for change in db.changes(): LOG.debug(unicode_(change)) if change is not None: self.assertEqual(change['id'], doc['_id']) n += 1 doc = make_doc(n) if n > 10: break self.assertTrue(n > 10) def test_changes_include_docs(self): """ _test_changes_include_docs Test to verify that we can pass 'include_docs' successfully through the changes pipeline. """ dbname = "cloudant-changes-test-with-docs{0}".format( unicode_(uuid.uuid4())) self.last_db = dbname with cloudant(self.user, self.password, account=self.user) as c: session = c.session() db = c.create_database(dbname) n = 0 def make_doc(n): doc = db.create_document( {"_id": "doc{}".format(n), "testing": "doc{}".format(n)} ) return doc doc = make_doc(n) for change in db.changes(include_docs=True): LOG.debug(unicode_(change)) if change is not None: self.assertEqual(change['id'], doc['_id']) self.assertEqual( # Verify that doc is included, and looks like # the right doc. change.get('doc', {}).get('testing', {}), 'doc{}'.format(n) ) n += 1 doc = make_doc(n) if n > 10: break self.assertTrue(n > 10) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/integration/document_test.py ================================================ #!/usr/bin/env python # Copyright (c) 2015 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _document_test_ document module integration tests """ import requests import unittest import uuid from cloudant import cloudant from cloudant.credentials import read_dot_cloudant from .. import unicode_ class DocumentTest(unittest.TestCase): """ Verify that we can do stuff to a document. """ def setUp(self): self.user, self.passwd = read_dot_cloudant(filename="~/.clou") self.dbname = unicode_("cloudant-document-tests-{0}".format( unicode_(uuid.uuid4()) )) def tearDown(self): with cloudant(self.user, self.passwd, account=self.user) as c: c.delete_database(self.dbname) def test_delete(self): with cloudant(self.user, self.passwd, account=self.user) as c: db = c.create_database(self.dbname) doc1 = db.create_document({"_id": "doc1", "testing": "document 1"}) doc1.save() doc1.fetch() doc1.delete() self.assertRaises(requests.HTTPError, doc1.fetch) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/integration/end_to_end_example_test.py ================================================ #!/usr/bin/env python # Copyright (c) 2015 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _end_to_end_example_test_ End to end integration tests """ import uuid import unittest from cloudant import cloudant, couchdb from ..credentials import read_dot_cloudant, read_dot_couch class E2ECouchTest(unittest.TestCase): """ end to end operational test against a couchdb instance """ def setUp(self): self.user, self.passwd = read_dot_couch(filename="~/.clou") self.dbname = "couch-e2e-test-{0}".format(str(uuid.uuid4())) def test_end_to_end(self): """ End to end database and document crud tests """ with couchdb(self.user, self.passwd, url='http://127.0.0.1:5984') as c: session = c.session() self.assertEqual(session['userCtx']['name'], self.user) db = c.create_database(self.dbname) try: self.assertIn(self.dbname, c) self.assertTrue(db.exists()) # creating docs doc1 = db.new_document() doc2 = db.create_document({'_id': 'womp', "testing": "document2"}) doc3 = db.create_document({"testing": "document3"}) self.assertIn('_id', doc1) self.assertIn('_rev', doc1) self.assertIn('_id', doc2) self.assertIn('_rev', doc2) self.assertIn('_id', doc3) self.assertIn('_rev', doc3) # verifying access via dict api self.assertIn(doc1['_id'], db) self.assertIn(doc2['_id'], db) self.assertIn(doc3['_id'], db) self.assertEqual(db[doc1['_id']], doc1) self.assertEqual(db[doc2['_id']], doc2) self.assertEqual(db[doc3['_id']], doc3) # test working context for updating docs with doc2 as working_doc: working_doc['field1'] = [1, 2, 3] working_doc['field2'] = {'a': 'b'} self.assertEqual( c[self.dbname]['womp']['field2'], {'a': 'b'} ) finally: # remove test database c.delete_database(self.dbname) class E2ECloudantTest(unittest.TestCase): """ starting with a test account, create some databases, documents, updates, deletes etc """ def setUp(self): self.user, self.passwd = read_dot_cloudant(filename="~/.clou") self.dbname = "cloudant-e2e-test-{0}".format(str(uuid.uuid4())) def test_end_to_end(self): """ End to end database and document crud tests """ with cloudant(self.user, self.passwd, account=self.user) as c: session = c.session() self.assertEqual(session['userCtx']['name'], self.user) db = c.create_database(self.dbname) try: self.assertIn(self.dbname, c) self.assertTrue(db.exists()) # creating docs doc1 = db.new_document() doc2 = db.create_document({'_id': 'womp', "testing": "document2"}) doc3 = db.create_document({"testing": "document3"}) self.assertIn('_id', doc1) self.assertIn('_rev', doc1) self.assertIn('_id', doc2) self.assertIn('_rev', doc2) self.assertIn('_id', doc3) self.assertIn('_rev', doc3) # verifying access via dict api self.assertIn(doc1['_id'], db) self.assertIn(doc2['_id'], db) self.assertIn(doc3['_id'], db) self.assertEqual(db[doc1['_id']], doc1) self.assertEqual(db[doc2['_id']], doc2) self.assertEqual(db[doc3['_id']], doc3) # test working context for updating docs with doc2 as working_doc: working_doc['field1'] = [1, 2, 3] working_doc['field2'] = {'a': 'b'} self.assertEqual( c[self.dbname]['womp']['field2'], {'a': 'b'} ) finally: # remove test database c.delete_database(self.dbname) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/integration/iter_test.py ================================================ #!/usr/bin/env python # Copyright (c) 2015 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _iter_test_ Database iterator integration tests """ import unittest import uuid from cloudant import cloudant from cloudant.credentials import read_dot_cloudant from .. import unicode_ class IterTest(unittest.TestCase): """ Verify that our database iterator works, and does the caching that we expect. """ def setUp(self): self.user, self.password = read_dot_cloudant(filename="~/.clou") self.last_db = None def tearDown(self): if self.last_db is not None: with cloudant(self.user, self.password, account=self.user) as c: c.delete_database(self.last_db) def test_database_with_two_docs(self): """ _test_database_with_two_docs_ Test to make sure that our iterator works in the case where there are fewer docs to retrieve than it retrieves in one chunk. """ dbname = "cloudant-itertest-twodocs-{0}".format(unicode_(uuid.uuid4())) self.last_db = dbname with cloudant(self.user, self.password, account=self.user) as c: session = c.session() db = c.create_database(dbname) doc1 = db.create_document( {"_id": "doc1", "testing": "doc1"} ) doc2 = db.create_document( {"_id": "doc2", "testing": "doc2"} ) docs = [] # Make sure that iterator fetches docs for doc in db: docs.append(doc) self.assertEqual(len(docs), 2) def test_database_with_many_docs(self): """ _test_database_with_many_docs_ Test to make sure that we can iterator through stuff """ dbname = "cloudant-itertest-manydocs-{0}".format(unicode_(uuid.uuid4())) self.last_db = dbname with cloudant(self.user, self.password, account=self.user) as c: session = c.session() db = c.create_database(dbname) for i in range(0,300): db.create_document({ "_id": "doc{0}".format(i), "testing": "document {0}".format(i) }) docs = [] for doc in db: docs.append(doc) self.assertEqual(len(docs), 300) unique_ids = set([doc['id'] for doc in docs]) self.assertEqual(len(unique_ids), 300) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/integration/replicator_test.py ================================================ #!/usr/bin/env python # Copyright (c) 2015 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _replicator_test_ replicator integration tests """ import logging import sys import time import uuid import unittest from cloudant import cloudant from cloudant.credentials import read_dot_cloudant from cloudant.replicator import Replicator from .. import unicode_ def setup_logging(): log = logging.getLogger() log.setLevel(logging.DEBUG) handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.DEBUG) log.addHandler(handler) return log LOG = setup_logging() class ReplicatorTest(unittest.TestCase): """ Excercise our replicator class to ensure that it does its thing. """ def setUp(self): self.user, self.passwd = read_dot_cloudant(filename="~/.clou") self.replication_ids = [] self.dbs = [] def tearDown(self): with cloudant(self.user, self.passwd, account=self.user) as c: replicator = Replicator(c) while self.replication_ids: replicator.stop_replication(self.replication_ids.pop()) while self.dbs: c.delete_database(self.dbs.pop()) def test_init(self): """ _test_init_ Verify that we can init our database object. """ with cloudant(self.user, self.passwd, account=self.user) as c: replicator = Replicator(c) replicator.all_docs() def test_create_replication(self): """ _test_create_replication_ Make a couple of test databases, and confirm that docs from one get transferred to t'other. """ dbsource = unicode_("test_create_replication_source_{}".format( unicode_(uuid.uuid4()))) dbtarget = unicode_("test_create_replication_target_{}".format( unicode_(uuid.uuid4()))) self.dbs = [dbsource, dbtarget] with cloudant(self.user, self.passwd, account=self.user) as c: dbs = c.create_database(dbsource) dbt = c.create_database(dbtarget) doc1 = dbs.create_document( {"_id": "doc1", "testing": "document 1"} ) doc2 = dbs.create_document( {"_id": "doc2", "testing": "document 1"} ) doc3 = dbs.create_document( {"_id": "doc3", "testing": "document 1"} ) replicator = Replicator(c) repl_id = unicode_("test_create_replication_{}".format( unicode_(uuid.uuid4()))) self.replication_ids.append(repl_id) ret = replicator.create_replication( source_db=dbs, target_db=dbt, repl_id=repl_id, continuous=False ) try: repl_doc = replicator[repl_id] except KeyError: repl_doc = None if not repl_doc or not (repl_doc.get( '_replication_state', "none") in ('completed', 'error')): for change in replicator.changes(): if change.get('id') == repl_id: try: repl_doc = replicator[repl_id] repl_doc.fetch() except KeyError: pass if repl_doc and (repl_doc.get( '_replication_state', "none") in ('completed', 'error')): break else: LOG.debug( unicode_("Waiting for replication to complete " "(repl_doc: {})".format(repl_doc)) ) self.assertTrue(repl_doc) self.assertEqual(repl_doc.get('_replication_state'), 'completed') for d in ['doc1', 'doc2', 'doc3']: self.assertTrue(dbt[d]) self.assertEqual(dbt[d]['testing'], dbs[d]['testing']) def test_follow_replication(self): """ _test_follow_replication_ Test to make sure that we can follow a replication. """ dbsource = unicode_("test_follow_replication_source_{}".format( unicode_(uuid.uuid4()))) dbtarget = unicode_("test_follow_replication_target_{}".format( unicode_(uuid.uuid4()))) self.dbs = [dbsource, dbtarget] with cloudant(self.user, self.passwd, account=self.user) as c: dbs = c.create_database(dbsource) dbt = c.create_database(dbtarget) doc1 = dbs.create_document( {"_id": "doc1", "testing": "document 1"} ) doc2 = dbs.create_document( {"_id": "doc2", "testing": "document 1"} ) doc3 = dbs.create_document( {"_id": "doc3", "testing": "document 1"} ) replicator = Replicator(c) repl_id = unicode_("test_follow_replication_{}".format( unicode_(uuid.uuid4()))) self.replication_ids.append(repl_id) ret = replicator.create_replication( source_db=dbs, target_db=dbt, repl_id=repl_id, continuous=False, ) updates = [ update for update in replicator.follow_replication(repl_id) ] self.assertTrue(len(updates) > 0) self.assertEqual(updates[-1]['_replication_state'], 'completed') def test_replication_state(self): """ _test_replication_state_ Verify that we can get the replication state. """ dbsource = unicode_("test_replication_state_source_{}".format( unicode_(uuid.uuid4()))) dbtarget = unicode_("test_replication_state_target_{}".format( unicode_(uuid.uuid4()))) self.dbs = [dbsource, dbtarget] with cloudant(self.user, self.passwd, account=self.user) as c: dbs = c.create_database(dbsource) dbt = c.create_database(dbtarget) doc1 = dbs.create_document( {"_id": "doc1", "testing": "document 1"} ) doc2 = dbs.create_document( {"_id": "doc2", "testing": "document 1"} ) doc3 = dbs.create_document( {"_id": "doc3", "testing": "document 1"} ) replicator = Replicator(c) repl_id = unicode_("test_replication_state_{}".format( unicode_(uuid.uuid4()))) self.replication_ids.append(repl_id) ret = replicator.create_replication( source_db=dbs, target_db=dbt, repl_id=repl_id, continuous=False, ) replication_state = "not_yet_set" while True: # Verify that replication_state returns either None # (if the field doesn't exist yet), or a valid # replication state. replication_state = replicator.replication_state(repl_id) if replication_state is not None: self.assertTrue( replication_state in [ 'completed', 'error', 'triggered' ] ) if replication_state in ('error', 'completed'): break LOG.debug("got replication state: {}".format( replication_state)) time.sleep(1) def test_list_replications(self): """ _test_list_replications_ Verify that we get a list of replications documents back when we got to list replications. """ with cloudant(self.user, self.passwd, account=self.user) as c: replicator = Replicator(c) repl_ids = [] num_reps = 3 for i in range(0, num_reps): tag = "{0}_{1}".format(i, unicode_(uuid.uuid4())) dbsource = unicode_("test_list_repl_src_{}".format(tag)) dbtarget = unicode_("test_list_repl_tgt_{}".format(tag)) self.dbs.append(dbsource) self.dbs.append(dbtarget) dbs = c.create_database(dbsource) dbt = c.create_database(dbtarget) doc1 = dbs.create_document( {"_id": "doc1", "testing": "document 1"} ) repl_id = unicode_("test_create_replication_{}".format(tag)) self.replication_ids.append(repl_id) repl_ids.append(repl_id) ret = replicator.create_replication( source_db=dbs, target_db=dbt, repl_id=repl_id, continuous=False ) replications = replicator.list_replications() ids = [doc['_id'] for doc in replications] found_ids = [i for i in ids if i in repl_ids] self.assertEqual(num_reps, len(found_ids)) ================================================ FILE: tests/unit/__init__.py ================================================ #!/usr/bin/env python # Copyright (c) 2015 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _unit_ Unittests """ ================================================ FILE: tests/unit/_test_util.py ================================================ #!/usr/bin/env python # Copyright (C) 2017 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Module containing miscellaneous functions, and constants used for unit testing. """ from cloudant._2to3 import PY2 # Constants # Test long type in Python 2 LONG_NUMBER = PY2 and long(1) or 1 ================================================ FILE: tests/unit/adapter_tests.py ================================================ #!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright © 2016 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from cloudant.client import CouchDB from cloudant.adapters import Replay429Adapter from requests.packages.urllib3.util import Retry from .unit_t_db_base import UnitTestDbBase class AdapterTests(UnitTestDbBase): """ Unit tests for transport adapters """ def test_new_Replay429Adapter(self): """ Test that a new Replay429Adapter is accepted as a parameter for a client. """ self.client = CouchDB( self.user, self.pwd, url=self.url, adapter=Replay429Adapter()) def test_retries_arg_Replay429Adapter(self): """ Test constructing a new Replay429Adapter with a configured number of retries. """ self.client = CouchDB( self.user, self.pwd, url=self.url, adapter=Replay429Adapter(retries=10)) def test_backoff_arg_Replay429Adapter(self): """ Test constructing a new Replay429Adapter with a configured initial backoff. """ self.client = CouchDB( self.user, self.pwd, url=self.url, adapter=Replay429Adapter(initialBackoff=0.1)) def test_args_Replay429Adapter(self): """ Test constructing a new Replay429Adapter with configured retries and initial backoff. """ self.client = CouchDB( self.user, self.pwd, url=self.url, adapter=Replay429Adapter(retries=10, initialBackoff=0.01)) ================================================ FILE: tests/unit/auth_renewal_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2016, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Unit tests for the renewal of cookie auth See configuration options for environment variables in unit_t_db_base module docstring. """ import os import time import unittest import requests from cloudant._client_session import CookieSession from nose.plugins.attrib import attr from .unit_t_db_base import skip_if_not_cookie_auth, UnitTestDbBase @attr(db=['cloudant','couch']) @unittest.skipIf(os.environ.get('ADMIN_PARTY') == 'true', 'Skipping - Admin Party mode') class AuthRenewalTests(UnitTestDbBase): """ Auto renewal tests primarily testing the CookieSession functionality """ def setUp(self): """ Override UnitTestDbBase.setUp() with no set up """ pass def tearDown(self): """ Override UnitTestDbBase.tearDown() with no tear down """ pass @skip_if_not_cookie_auth def test_client_db_doc_stack_success(self): """ Ensure that auto renewal of cookie auth happens as expected and applies to all references of r_session throughout the library. """ try: self.set_up_client(auto_connect=True, auto_renew=True) db = self.client._DATABASE_CLASS(self.client, self.dbname()) db.create() db_2 = self.client._DATABASE_CLASS(self.client, self.dbname()) doc = db.create_document({'_id': 'julia001', 'name': 'julia'}) auth_session = self.client.r_session.cookies.get('AuthSession') db_auth_session = db.r_session.cookies.get('AuthSession') db_2_auth_session = db_2.r_session.cookies.get('AuthSession') doc_auth_session = doc.r_session.cookies.get('AuthSession') self.assertIsInstance(self.client.r_session, CookieSession) self.assertIsInstance(db.r_session, CookieSession) self.assertIsInstance(db_2.r_session, CookieSession) self.assertIsInstance(doc.r_session, CookieSession) self.assertIsNotNone(auth_session) self.assertTrue( auth_session == db_auth_session == db_2_auth_session == doc_auth_session ) self.assertTrue(db.exists()) self.assertTrue(doc.exists()) # Will cause a 401 response to be handled internally self.client.r_session.cookies.clear() self.assertIsNone(self.client.r_session.cookies.get('AuthSession')) self.assertIsNone(db.r_session.cookies.get('AuthSession')) self.assertIsNone(db_2.r_session.cookies.get('AuthSession')) self.assertIsNone(doc.r_session.cookies.get('AuthSession')) time.sleep(1) # Ensure a different cookie auth value # 401 response handled by renew of cookie auth and retry of request db_2.create() new_auth_session = self.client.r_session.cookies.get('AuthSession') new_db_auth_session = db.r_session.cookies.get('AuthSession') new_db_2_auth_session = db_2.r_session.cookies.get('AuthSession') new_doc_auth_session = doc.r_session.cookies.get('AuthSession') self.assertIsNotNone(new_auth_session) self.assertNotEqual(new_auth_session, auth_session) self.assertTrue( new_auth_session == new_db_auth_session == new_db_2_auth_session == new_doc_auth_session ) self.assertTrue(db.exists()) self.assertTrue(doc.exists()) finally: # Clean up self.client.delete_database(db.database_name) self.client.delete_database(db_2.database_name) self.client.disconnect() del self.client @skip_if_not_cookie_auth def test_client_db_doc_stack_failure(self): """ Ensure that when the regular requests.Session is used that cookie auth renewal is not handled. """ try: self.set_up_client(auto_connect=True) db = self.client._DATABASE_CLASS(self.client, self.dbname()) db.create() self.assertIsInstance(self.client.r_session, requests.Session) self.assertIsInstance(db.r_session, requests.Session) # Will cause a 401 response self.client.r_session.cookies.clear() # 401 response expected to raised with self.assertRaises(requests.HTTPError) as cm: db.delete() self.assertEqual(cm.exception.response.status_code, 401) finally: # Manual reconnect self.client.disconnect() self.client.connect() # Clean up self.client.delete_database(db.database_name) self.client.disconnect() del self.client if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/changes_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2016, 2021 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Unit tests for _changes feed """ import json import os import unittest from cloudant._2to3 import unicode_ from cloudant.design_document import DesignDocument from cloudant.document import Document from cloudant.error import CloudantArgumentError from cloudant.feed import Feed from nose.plugins.attrib import attr from requests import Session from .unit_t_db_base import UnitTestDbBase from .. import BYTETYPE @attr(db=['cloudant','couch']) class ChangesTests(UnitTestDbBase): """ _changes feed unit tests """ def setUp(self): """ Set up test attributes """ super(ChangesTests, self).setUp() self.db_set_up() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(ChangesTests, self).tearDown() def test_constructor_changes(self): """ Test constructing a _changes feed """ feed = Feed(self.db, raw_data=True, chunk_size=1, feed='continuous') self.assertEqual(feed._url, '/'.join([self.db.database_url, '_changes'])) self.assertIsInstance(feed._r_session, Session) self.assertTrue(feed._raw_data) self.assertDictEqual(feed._options, {'feed': 'continuous'}) self.assertEqual(feed._chunk_size, 1) def test_get_last_seq(self): """ Test getting the last sequence identifier """ self.populate_db_with_documents(10) feed = Feed(self.db) changes = [x for x in feed] self.assertTrue(str(feed.last_seq).startswith('10')) def test_stop_iteration(self): """ Test stopping the iteration """ self.populate_db_with_documents(10) feed = Feed(self.db, feed='continuous') count = 0 changes = list() for change in feed: changes.append(change) count += 1 if count == 3: feed.stop() self.assertEqual(len(change), 3) self.assertTrue(str(changes[0]['seq']).startswith('1')) self.assertTrue(str(changes[1]['seq']).startswith('2')) self.assertTrue(str(changes[2]['seq']).startswith('3')) self.assertIsNone(feed.last_seq) def test_get_raw_content(self): """ Test getting raw feed content """ self.populate_db_with_documents(3) feed = Feed(self.db, raw_data=True) raw_content = list() for raw_line in feed: self.assertIsInstance(raw_line, BYTETYPE) raw_content.append(raw_line) changes = json.loads(''.join([unicode_(x) for x in raw_content])) if self.is_couchdb_1x_version() is True: self.assertSetEqual( set(changes.keys()), set(['results', 'last_seq'])) else: self.assertSetEqual(set(changes.keys()), set(['results', 'last_seq', 'pending'])) results = list() for result in changes['results']: self.assertSetEqual(set(result.keys()), set(['seq', 'changes', 'id'])) results.append(result) expected = set(['julia000', 'julia001', 'julia002']) self.assertSetEqual(set([x['id'] for x in results]), expected) self.assertTrue(str(changes['last_seq']).startswith('3')) self.assertIsNone(feed.last_seq) def test_get_normal_feed_default(self): """ Test getting content back for a "normal" feed without feed option """ self.populate_db_with_documents(3) feed = Feed(self.db) changes = list() for change in feed: self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id'])) changes.append(change) expected = set(['julia000', 'julia001', 'julia002']) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertTrue(str(feed.last_seq).startswith('3')) def test_get_normal_feed_explicit(self): """ Test getting content back for a "normal" feed using feed option """ self.populate_db_with_documents(3) feed = Feed(self.db, feed='normal') changes = list() for change in feed: self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id'])) changes.append(change) expected = set(['julia000', 'julia001', 'julia002']) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertTrue(str(feed.last_seq).startswith('3')) def test_get_continuous_feed(self): """ Test getting content back for a "continuous" feed """ self.populate_db_with_documents() feed = Feed(self.db, feed='continuous') changes = list() for change in feed: self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id'])) changes.append(change) if len(changes) == 100: feed.stop() expected = set(['julia{0:03d}'.format(i) for i in range(100)]) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertIsNone(feed.last_seq) # Compare continuous with normal normal = Feed(self.db) self.assertSetEqual( set([x['id'] for x in changes]), set([n['id'] for n in normal])) def test_get_longpoll_feed(self): """ Test getting content back for a "longpoll" feed """ feed = Feed(self.db, feed='longpoll', heartbeat=10) changes = list() for change in feed: if not change: self.populate_db_with_documents(1) continue self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id'])) changes.append(change) self.assertListEqual([x['id'] for x in changes], ['julia000']) self.assertTrue(str(feed.last_seq).startswith('1')) def test_get_feed_with_heartbeat(self): """ Test getting content back for a feed with a heartbeat """ self.populate_db_with_documents() feed = Feed(self.db, feed='continuous', heartbeat=10) changes = list() heartbeats = 0 for change in feed: if not change: self.assertIsNone(change) heartbeats += 1 else: self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id'])) changes.append(change) if heartbeats == 3: feed.stop() expected = set(['julia{0:03d}'.format(i) for i in range(100)]) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertIsNone(feed.last_seq) def test_get_raw_feed_with_heartbeat(self): """ Test getting raw content back for a feed with a heartbeat """ self.populate_db_with_documents() feed = Feed(self.db, raw_data=True, feed='continuous', heartbeat=10) raw_content = list() heartbeats = 0 for raw_line in feed: if not raw_line: self.assertEqual(len(raw_line), 0) heartbeats += 1 else: self.assertIsInstance(raw_line, BYTETYPE) raw_content.append(raw_line) if heartbeats == 3: feed.stop() changes = [json.loads(unicode_(x)) for x in raw_content] expected = set(['julia{0:03d}'.format(i) for i in range(100)]) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertIsNone(feed.last_seq) def test_get_feed_descending(self): """ Test getting content back for a descending feed. When testing, the sequence identifier is in the form of -. Often times the number prefix sorts as expected when using descending but sometimes the number prefix is repeated. """ self.populate_db_with_documents(50) feed = Feed(self.db, descending=True) seq_list = list() last_seq = None for change in feed: if last_seq: if self.is_couchdb_1x_version() is True: self.assertTrue(change['seq'] < last_seq) else: current = int(change['seq'][0: change['seq'].find('-')]) last = int(last_seq[0:last_seq.find('-')]) try: self.assertTrue(current < last) except AssertionError: self.assertEqual(current, last) seq_list.append(change['seq']) last_seq = change['seq'] self.assertEqual(len(seq_list), 50) self.assertEqual(feed.last_seq, last_seq) def test_get_feed_include_docs(self): """ Test getting content back for a feed that includes documents """ self.populate_db_with_documents(3) feed = Feed(self.db, include_docs=True) ids = list() for change in feed: self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id', 'doc'])) self.assertSetEqual( set(change['doc'].keys()), set(['_id', '_rev', 'name', 'age'])) ids.append(change['id']) self.assertSetEqual(set(ids), set(['julia000', 'julia001', 'julia002'])) def test_get_feed_using_style_main_only(self): """ Test getting content back for a feed using style set to main_only """ self.populate_db_with_documents(3) for i in range(3): docid = 'julia{0:03d}'.format(i) doc = self.db[docid] doc.delete() with Document(self.db, docid) as doc: doc['name'] = 'Jules' doc['age'] = i feed = Feed(self.db, style='main_only') changes = list() for change in feed: self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id'])) self.assertEqual(len(change['changes']), 1) changes.append(change) expected = set(['julia000', 'julia001', 'julia002']) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertTrue(str(feed.last_seq).startswith('9')) def test_get_feed_using_style_all_docs(self): """ Test getting content back for a feed using style set to "all_docs" """ self.populate_db_with_documents(3) for i in range(3): docid = 'julia{0:03d}'.format(i) doc = self.db[docid] doc.delete() with Document(self.db, docid) as doc: doc['name'] = 'Jules' doc['age'] = i feed = Feed(self.db, style='all_docs') changes = list() for change in feed: self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id'])) changes.append(change) expected = set(['julia000', 'julia001', 'julia002']) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertTrue(str(feed.last_seq).startswith('9')) def test_get_feed_using_since(self): """ Test getting content back for a feed using the since option """ self.populate_db_with_documents(3) feed = Feed(self.db) changes = [change for change in feed] last_seq = feed.last_seq self.populate_db_with_documents(3, off_set=3) feed = Feed(self.db, since=last_seq) changes = list() for change in feed: self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id'])) changes.append(change) expected = set(['julia003', 'julia004', 'julia005']) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertTrue(str(feed.last_seq).startswith('6')) def test_get_feed_using_since_now(self): """ Test getting content back for a feed using since set to "now" """ self.populate_db_with_documents(3) feed = Feed(self.db, feed='continuous', heartbeat=1000, since='now') changes = list() first_pass = True beats = 0 for change in feed: if first_pass and not change: self.populate_db_with_documents(3, off_set=3) first_pass = False continue elif change: self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id'])) changes.append(change) beats = 0 else: beats += 1 if beats == 15 or len(changes) == 3: feed.stop() expected = set(['julia003', 'julia004', 'julia005']) self.assertSetEqual(set([x['id'] for x in changes]), expected) def test_get_feed_using_since_zero(self): """ Test getting content back for a feed using since set to zero """ self.populate_db_with_documents(3) feed = Feed(self.db, since=0) changes = list() for change in feed: self.assertSetEqual(set(change.keys()), {'seq', 'changes', 'id'}) changes.append(change) expected = set(['julia{0:03d}'.format(i) for i in range(3)]) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertTrue(str(feed.last_seq).startswith('3')) def test_get_feed_using_timeout(self): """ Test getting content back for a feed using timeout """ self.populate_db_with_documents() feed = Feed(self.db, feed='continuous', timeout=100) changes = list() for change in feed: self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id'])) changes.append(change) expected = set(['julia{0:03d}'.format(i) for i in range(100)]) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertTrue(str(feed.last_seq).startswith('100')) # Compare continuous with normal normal = Feed(self.db) self.assertSetEqual( set([x['id'] for x in changes]), set([n['id'] for n in normal])) def test_get_feed_using_limit(self): """ Test getting content back for a feed using limit """ self.populate_db_with_documents() feed = Feed(self.db, limit=3) seq_list = list() for change in feed: self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id'])) seq_list.append(change['seq']) self.assertEqual(len(seq_list), 3) self.assertTrue(str(seq_list[0]).startswith('1')) self.assertTrue(str(seq_list[1]).startswith('2')) self.assertTrue(str(seq_list[2]).startswith('3')) self.assertEqual(feed.last_seq, seq_list[2]) def test_get_feed_using_filter(self): """ Test getting content back for a feed using filter """ self.populate_db_with_documents(6) ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc['filters'] = { 'even_docs': 'function(doc, req){if (doc.age % 2 != 0){return false;} return true;}' } ddoc.create() feed = Feed(self.db, filter='ddoc001/even_docs') changes = list() for change in feed: self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id'])) changes.append(change) expected = set(['julia000', 'julia002', 'julia004']) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertTrue(str(feed.last_seq).startswith('7')) def test_get_feed_using_conflicts_true(self): """ Test getting content back for a feed using conflicts set to True. No conflicts were generated but this test ensures that the translation process for the conflicts option is working. """ self.populate_db_with_documents(3) feed = Feed(self.db, include_docs=True, conflicts=True) changes = list() for change in feed: self.assertSetEqual( set(change.keys()), set(['seq', 'changes', 'id', 'doc'])) changes.append(change) expected = set(['julia000', 'julia001', 'julia002']) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertTrue(str(feed.last_seq).startswith('3')) def test_get_feed_using_conflicts_false(self): """ Test getting content back for a feed using conflicts set to False """ self.populate_db_with_documents(3) feed = Feed(self.db, include_docs=True, conflicts=False) changes = list() for change in feed: self.assertSetEqual( set(change.keys()), set(['seq', 'changes', 'id', 'doc'])) changes.append(change) expected = set(['julia000', 'julia001', 'julia002']) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertTrue(str(feed.last_seq).startswith('3')) def test_get_feed_using_doc_ids(self): """ Test getting content back for a feed using doc_ids """ self.populate_db_with_documents() feed = Feed(self.db, filter='_doc_ids', doc_ids=['julia000', 'julia010', 'julia020']) changes = list() for change in feed: self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id'])) changes.append(change) expected = set(['julia000', 'julia010', 'julia020']) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertTrue(str(feed.last_seq).startswith('100')) def test_get_feed_with_custom_filter_query_params(self): """ Test using feed with custom filter query parameters. """ feed = Feed( self.db, filter='mailbox/new_mail', foo='bar', # query parameters to a custom filter include_docs=False ) params = feed._translate(feed._options) self.assertEqual(params['filter'], 'mailbox/new_mail') self.assertEqual(params['foo'], 'bar') self.assertEqual(params['include_docs'], 'false') def test_invalid_argument_type(self): """ Test that an invalid argument type is caught and an exception is raised """ feed = Feed(self.db, conflicts=0) with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertTrue( str(cm.exception).startswith('Argument conflicts not instance of expected type:') ) def test_invalid_non_positive_integer_argument(self): """ Test that an invalid integer argument type is caught and an exception is raised """ feed = Feed(self.db, limit=-1) with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertEqual( str(cm.exception), 'Argument limit must be > 0. Found: -1') def test_invalid_feed_value(self): """ Test that an invalid feed argument value is caught and an exception is raised """ feed = Feed(self.db, feed='foo') with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertTrue(str(cm.exception).startswith( 'Invalid value (foo) for feed option.')) def test_invalid_style_value(self): """ Test that an invalid feed argument value is caught and an exception is raised """ feed = Feed(self.db, style='foo') with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertEqual( str(cm.exception), 'Invalid value (foo) for style option. Must be main_only, or all_docs.') if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/client_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2021 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ client module - Unit tests for CouchDB and Cloudant client classes See configuration options for environment variables in unit_t_db_base module docstring. """ import base64 import datetime import json import os import sys import unittest from time import sleep from urllib.parse import urlparse import mock import requests from cloudant import cloudant, cloudant_bluemix, couchdb, couchdb_admin_party from cloudant._client_session import BasicSession, CookieSession from cloudant.client import Cloudant, CouchDB from cloudant.database import CloudantDatabase from cloudant.error import (CloudantArgumentError, CloudantClientException, CloudantDatabaseException) from cloudant.feed import Feed, InfiniteFeed from nose.plugins.attrib import attr from requests import ConnectTimeout, HTTPError from .unit_t_db_base import skip_if_iam, skip_if_not_cookie_auth, UnitTestDbBase from .. import bytes_, str_ class CloudantClientExceptionTests(unittest.TestCase): """ Ensure CloudantClientException functions as expected. """ def test_raise_without_code(self): """ Ensure that a default exception/code is used if none is provided. """ with self.assertRaises(CloudantClientException) as cm: raise CloudantClientException() self.assertEqual(cm.exception.status_code, 100) def test_raise_using_invalid_code(self): """ Ensure that a default exception/code is used if invalid code is provided. """ with self.assertRaises(CloudantClientException) as cm: raise CloudantClientException('foo') self.assertEqual(cm.exception.status_code, 100) def test_raise_without_args(self): """ Ensure that a default exception/code is used if the message requested by the code provided requires an argument list and none is provided. """ with self.assertRaises(CloudantClientException) as cm: raise CloudantClientException(404) self.assertEqual(cm.exception.status_code, 100) def test_raise_with_proper_code_and_args(self): """ Ensure that the requested exception is raised. """ with self.assertRaises(CloudantClientException) as cm: raise CloudantClientException(404, 'foo') self.assertEqual(cm.exception.status_code, 404) class ClientTests(UnitTestDbBase): """ CouchDB/Cloudant client unit tests """ @unittest.skipIf( ((os.environ.get('ADMIN_PARTY') and os.environ.get('ADMIN_PARTY') == 'true')), 'Skipping couchdb context manager test' ) @attr(db='couch') def test_couchdb_context_helper(self): """ Test that the couchdb context helper works as expected. """ try: with couchdb(self.user, self.pwd, url=self.url) as c: self.assertIsInstance(c, CouchDB) self.assertIsInstance(c.r_session, requests.Session) except Exception as err: self.fail('Exception {0} was raised.'.format(str(err))) @unittest.skipUnless( ((os.environ.get('ADMIN_PARTY') and os.environ.get('ADMIN_PARTY') == 'true')), 'Skipping couchdb_admin_party context manager test' ) @attr(db='couch') def test_couchdb_admin_party_context_helper(self): """ Test that the couchdb_admin_party context helper works as expected. """ try: with couchdb_admin_party(url=self.url) as c: self.assertIsInstance(c, CouchDB) self.assertIsInstance(c.r_session, requests.Session) except Exception as err: self.fail('Exception {0} was raised.'.format(str(err))) def test_constructor_with_url(self): """ Test instantiating a client object using a URL """ self.assertEqual( self.client.server_url, self.url ) self.assertEqual(self.client.encoder, json.JSONEncoder) self.assertIsNone(self.client.r_session) def test_constructor_with_creds_removed_from_url(self): """ Test instantiating a client object using a URL """ client = CouchDB(None, None, url='http://a9a9a9a9-a9a9-a9a9-a9a9-a9a9a9a9a9a9-bluemix' ':a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9' 'a9a9a9a9a9a9@d8a01891-e4d2-4102-b5f8-751fb735ce31-' 'bluemix.couchdb.local:5984') self.assertEqual(client.server_url, 'http://d8a01891-e4d2-4102-b5f8-751fb735ce31-' 'bluemix.couchdb.local:5984') self.assertEqual(client._user, 'a9a9a9a9-a9a9-a9a9-a9a9-a9a9a9a9a9a9-bluemix') self.assertEqual(client._auth_token, 'a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a' '9a9a9a9a9a9a9a9a9a9a9a9a9') def test_connect(self): """ Test connect and disconnect functionality. """ try: self.client.connect() self.assertIsInstance(self.client.r_session, requests.Session) finally: self.client.disconnect() self.assertIsNone(self.client.r_session) def test_auto_connect(self): """ Test connect during client instantiation option. """ try: self.set_up_client(auto_connect=True) self.assertIsInstance(self.client.r_session, requests.Session) finally: self.client.disconnect() self.assertIsNone(self.client.r_session) def test_multiple_connect(self): """ Test that issuing a connect call to an already connected client does not cause any issue. """ try: self.client.connect() self.set_up_client(auto_connect=True) self.client.connect() self.assertIsInstance(self.client.r_session, requests.Session) finally: self.client.disconnect() self.assertIsNone(self.client.r_session) @skip_if_not_cookie_auth def test_auto_renew_enabled(self): """ Test that CookieSession is used when auto_renew is enabled. """ try: self.set_up_client(auto_renew=True) self.client.connect() if os.environ.get('ADMIN_PARTY') == 'true': self.assertIsInstance(self.client.r_session, requests.Session) else: self.assertIsInstance(self.client.r_session, CookieSession) finally: self.client.disconnect() @skip_if_not_cookie_auth def test_auto_renew_enabled_with_auto_connect(self): """ Test that CookieSession is used when auto_renew is enabled along with an auto_connect. """ try: self.set_up_client(auto_connect=True, auto_renew=True) if os.environ.get('ADMIN_PARTY') == 'true': self.assertIsInstance(self.client.r_session, requests.Session) else: self.assertIsInstance(self.client.r_session, CookieSession) finally: self.client.disconnect() @skip_if_not_cookie_auth def test_session(self): """ Test getting session information. Session info is None if CouchDB Admin Party mode was selected. """ try: self.client.connect() session = self.client.session() if self.client.admin_party: self.assertIsNone(session) else: self.assertEqual(session['userCtx']['name'], self.user) finally: self.client.disconnect() @skip_if_not_cookie_auth def test_session_cookie(self): """ Test getting the session cookie. Session cookie is None if CouchDB Admin Party mode was selected. """ try: self.client.connect() if self.client.admin_party: self.assertIsNone(self.client.session_cookie()) else: self.assertIsNotNone(self.client.session_cookie()) finally: self.client.disconnect() @mock.patch('cloudant._client_session.Session.request') def test_session_basic(self, m_req): """ Test using basic access authentication. """ m_response_ok = mock.MagicMock() type(m_response_ok).status_code = mock.PropertyMock(return_value=200) type(m_response_ok).text = mock.PropertyMock(return_value='["animaldb"]') m_req.return_value = m_response_ok client = Cloudant('foo', 'bar', url=self.url, use_basic_auth=True) client.connect() self.assertIsInstance(client.r_session, BasicSession) all_dbs = client.all_dbs() m_req.assert_called_once_with( 'GET', self.url + '/_all_dbs', allow_redirects=True, auth=('foo', 'bar'), # uses HTTP Basic Auth timeout=None ) self.assertEqual(all_dbs, ['animaldb']) @mock.patch('cloudant._client_session.Session.request') def test_session_basic_with_no_credentials(self, m_req): """ Test using basic access authentication with no credentials. """ m_response_ok = mock.MagicMock() type(m_response_ok).status_code = mock.PropertyMock(return_value=200) m_req.return_value = m_response_ok client = Cloudant(None, None, url=self.url, use_basic_auth=True) client.connect() self.assertIsInstance(client.r_session, BasicSession) db = client['animaldb'] m_req.assert_called_once_with( 'HEAD', self.url + '/animaldb', allow_redirects=False, auth=None, # ensure no authentication specified timeout=None ) self.assertIsInstance(db, CloudantDatabase) @mock.patch('cloudant._client_session.Session.request') def test_change_credentials_basic(self, m_req): """ Test changing credentials when using basic access authentication. """ # mock 200 m_response_ok = mock.MagicMock() type(m_response_ok).text = mock.PropertyMock(return_value='["animaldb"]') # mock 401 m_response_bad = mock.MagicMock() m_response_bad.raise_for_status.side_effect = HTTPError('401 Unauthorized') m_req.side_effect = [m_response_bad, m_response_ok] client = Cloudant('foo', 'bar', url=self.url, use_basic_auth=True) client.connect() self.assertIsInstance(client.r_session, BasicSession) with self.assertRaises(HTTPError): client.all_dbs() # expected 401 m_req.assert_called_with( 'GET', self.url + '/_all_dbs', allow_redirects=True, auth=('foo', 'bar'), # uses HTTP Basic Auth timeout=None ) # use valid credentials client.change_credentials('baz', 'qux') all_dbs = client.all_dbs() m_req.assert_called_with( 'GET', self.url + '/_all_dbs', allow_redirects=True, auth=('baz', 'qux'), # uses HTTP Basic Auth timeout=None ) self.assertEqual(all_dbs, ['animaldb']) @skip_if_not_cookie_auth def test_basic_auth_str(self): """ Test getting the basic authentication string. Basic auth string is None if CouchDB Admin Party mode was selected. """ try: self.client.connect() if self.client.admin_party: self.assertIsNone(self.client.basic_auth_str()) else: expected = 'Basic {0}'.format( str_(base64.urlsafe_b64encode(bytes_("{0}:{1}".format( self.user, self.pwd )))) ) self.assertEqual(self.client.basic_auth_str(), expected) finally: self.client.disconnect() def test_all_dbs(self): """ Test getting a list of all of the databases """ dbnames = [self.dbname() for _ in range(3)] try: self.client.connect() for dbname in dbnames: self.client.create_database(dbname) self.assertTrue(set(dbnames).issubset(self.client.all_dbs())) finally: for dbname in dbnames: self.client.delete_database(dbname) self.client.disconnect() def test_create_delete_database(self): """ Test database creation and deletion """ try: self.client.connect() dbname = self.dbname() # Create database db = self.client.create_database(dbname) self.assertTrue(db.exists()) # Delete database self.assertIsNone(self.client.delete_database(dbname)) self.assertFalse(db.exists()) finally: self.client.disconnect() def test_create_existing_database(self): """ Test creation of already existing database """ dbname = self.dbname() self.client.connect() self.client.create_database(dbname) with self.assertRaises(CloudantClientException) as cm: self.client.create_database(dbname, throw_on_exists=True) self.assertEqual(cm.exception.status_code, 412) self.client.delete_database(dbname) self.client.disconnect() def test_create_invalid_database_name(self): """ Test creation of database with an invalid name """ dbname = 'invalidDbName_' self.client.connect() with self.assertRaises((CloudantDatabaseException, HTTPError)) as cm: self.client.create_database(dbname) code = cm.exception.status_code if hasattr(cm.exception, 'status_code') else cm.exception.response.status_code self.assertEqual(code, 400) self.client.disconnect() @skip_if_not_cookie_auth @mock.patch('cloudant._client_session.Session.request') def test_create_with_server_error(self, m_req): """ Test creation of database with a server error """ dbname = self.dbname() # mock 200 for authentication m_response_ok = mock.MagicMock() type(m_response_ok).status_code = mock.PropertyMock(return_value=200) # mock 404 for head request when verifying if database exists m_response_bad = mock.MagicMock() type(m_response_bad).status_code = mock.PropertyMock(return_value=404) # mock 500 when trying to create the database m_resp_service_error = mock.MagicMock() type(m_resp_service_error).status_code = mock.PropertyMock( return_value=500) type(m_resp_service_error).text = mock.PropertyMock( return_value='Internal Server Error') m_req.side_effect = [m_response_ok, m_response_bad, m_resp_service_error] self.client.connect() with self.assertRaises(CloudantDatabaseException) as cm: self.client.create_database(dbname) self.assertEqual(cm.exception.status_code, 500) self.assertEqual(m_req.call_count, 3) m_req.assert_called_with( 'PUT', '/'.join([self.url, dbname]), data=None, params={'partitioned': 'false'}, timeout=(30, 300) ) def test_delete_non_existing_database(self): """ Test deletion of non-existing database """ try: self.client.connect() self.client.delete_database('no_such_db') self.fail('Above statement should raise a CloudantException') except CloudantClientException as err: self.assertEqual(str(err), 'Database no_such_db does not exist. ' 'Verify that the client is valid and try again.') finally: self.client.disconnect() def test_keys(self): """ Test retrieving the list of database names """ dbs = [] try: self.client.connect() self.assertEqual(list(self.client.keys()), []) # create 10 new test dbs for _ in range(10): dbs.append(self.client.create_database(self.dbname()).database_name) self.assertTrue(set(dbs).issubset(set(self.client.keys(remote=True)))) self.assertTrue(set(dbs).issubset(set(self.client.all_dbs()))) finally: for db in dbs: self.client.delete_database(db) # remove test db self.client.disconnect() def test_get_non_existing_db_via_getitem(self): """ Test __getitem__ when retrieving a non-existing database """ try: self.client.connect() db = self.client['no_such_db'] self.fail('Above statement should raise a KeyError') except KeyError: pass finally: self.client.disconnect() def test_get_db_via_getitem(self): """ Test __getitem__ when retrieving a database """ dbname = self.dbname() try: self.client.connect() self.client.create_database(dbname) # Retrieve the database object from the server using __getitem__ db = self.client[dbname] self.assertIsInstance(db, self.client._DATABASE_CLASS) finally: self.client.delete_database(dbname) self.client.disconnect() def test_delete_cached_db_object_via_delitem(self): """ Test __delitem__ when removing a cached database object """ dbname = self.dbname() try: self.client.connect() db = self.client.create_database(dbname) self.assertIsNotNone(self.client.get(dbname)) del self.client[dbname] # Removed from local cache # Note: The get method returns a local db object by default self.assertIsNone(self.client.get(dbname)) # Database still exists remotely # Note: __getitem__ returns the db object from the server self.assertEqual(self.client[dbname], db) finally: self.client.delete_database(dbname) self.client.disconnect() def test_delete_remote_db_via_delitem(self): """ Test __delitem__ when removing a database """ dbname = self.dbname() try: self.client.connect() db = self.client.create_database(dbname) self.assertIsNotNone(self.client.get(dbname)) self.client.__delitem__(dbname, remote=True) # Removed from local cache self.assertIsNone(self.client.get(dbname)) # Database removed remotely as well try: db = self.client[dbname] self.fail('Above statement should raise a KeyError') except KeyError: pass finally: self.client.disconnect() def test_get_cached_db_object_via_get(self): """ Test retrieving a database from the client database cache """ dbname = self.dbname() try: self.client.connect() # Default returns None self.assertIsNone(self.client.get('no_such_db')) # Creates the database remotely and adds it to the # client database cache db = self.client.create_database(dbname) # Locally cached database object is returned self.assertEqual(self.client.get(dbname), db) finally: self.client.delete_database(dbname) self.client.disconnect() def test_get_remote_db_via_get(self): """ Test retrieving a database """ dbname = self.dbname() try: self.client.connect() # Default returns None self.assertIsNone(self.client.get('no_such_db', remote=True)) # Creates the database remotely and ensure that # it is not in the client database local cache db = self.client.create_database(dbname) del self.client[dbname] self.assertIsNone(self.client.get(dbname)) # Retrieve the database object from the server self.assertEqual(self.client.get(dbname, remote=True), db) finally: self.client.delete_database(dbname) self.client.disconnect() def test_set_non_db_value_via_setitem(self): """ Test raising exception when value is not a database object """ try: self.client.connect() self.client['not-a-db'] = 'This is not a database object' self.fail('Above statement should raise a CloudantException') except CloudantClientException as err: self.assertEqual( str(err), 'Value must be set to a Database object. Found type: str') finally: self.client.disconnect() def test_local_set_db_value_via_setitem(self): """ Test setting a database object to the local database cache """ try: self.client.connect() db = self.client._DATABASE_CLASS(self.client, 'local-not-on-server') # Value is set in the local database cache but not on the server self.client['local-not-on-server'] = db self.assertEqual(self.client.get('local-not-on-server'), db) self.assertFalse(db.exists()) finally: self.client.disconnect() def test_create_db_via_setitem(self): """ Test creating a database remotely using __setitem__ """ dbname = self.dbname() try: self.client.connect() db = self.client._DATABASE_CLASS(self.client, dbname) self.client.__setitem__(dbname, db, remote=True) self.assertTrue(db.exists()) finally: self.client.delete_database(dbname) self.client.disconnect() def test_db_updates_feed_call(self): """ Test that db_updates() method call constructs and returns a Feed object """ try: self.client.connect() db_updates = self.client.db_updates(limit=100) self.assertIs(type(db_updates), Feed) self.assertEqual( db_updates._url, '/'.join([self.client.server_url, '_db_updates'])) self.assertIsInstance(db_updates._r_session, requests.Session) self.assertFalse(db_updates._raw_data) self.assertEqual(db_updates._options.get('limit'), 100) finally: self.client.disconnect() @attr(db='cloudant') class CloudantClientTests(UnitTestDbBase): """ Cloudant specific client unit tests """ def test_constructor_with_creds_removed_from_url(self): """ Test instantiating a client object using a URL """ client = Cloudant(None, None, url='https://a9a9a9a9-a9a9-a9a9-a9a9-a9a9a9a9a9a9-bluemix' ':a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9' 'a9a9a9a9a9a9@d8a01891-e4d2-4102-b5f8-751fb735ce31-' 'bluemix.cloudant.com') self.assertEqual(client.server_url, 'https://d8a01891-e4d2-4102-b5f8-751fb735ce31-' 'bluemix.cloudant.com') self.assertEqual(client._user, 'a9a9a9a9-a9a9-a9a9-a9a9-a9a9a9a9a9a9-bluemix') self.assertEqual(client._auth_token, 'a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a9a' '9a9a9a9a9a9a9a9a9a9a9a9a9') @skip_if_not_cookie_auth def test_cloudant_session_login(self): """ Test that the Cloudant client session successfully authenticates. """ self.client.connect() old_cookie = self.client.session_cookie() sleep(5) # ensure we get a different cookie back self.client.session_login() self.assertNotEqual(self.client.session_cookie(), old_cookie) @skip_if_not_cookie_auth def test_cloudant_session_login_with_new_credentials(self): """ Test that the Cloudant client session fails to authenticate when passed incorrect credentials. """ self.client.connect() with self.assertRaises(HTTPError) as cm: self.client.session_login('invalid-user-123', 'pa$$w0rd01') self.assertTrue(str(cm.exception).find('Name or password is incorrect')) @skip_if_not_cookie_auth def test_cloudant_context_helper(self): """ Test that the cloudant context helper works as expected. """ try: with cloudant(self.user, self.pwd, url=self.url) as c: self.assertIsInstance(c, Cloudant) self.assertIsInstance(c.r_session, requests.Session) except Exception as err: self.fail('Exception {0} was raised.'.format(str(err))) @skip_if_not_cookie_auth def test_cloudant_bluemix_context_helper_with_legacy_creds(self): """ Test that the cloudant_bluemix context helper with legacy creds works as expected. """ instance_name = 'Cloudant NoSQL DB-lv' vcap_services = {'cloudantNoSQLDB': [{ 'credentials': { 'username': self.user, 'password': self.pwd, 'host': urlparse(self.url).hostname, 'port': 443, 'url': self.url }, 'name': instance_name, }]} try: with cloudant_bluemix(vcap_services, instance_name=instance_name) as c: self.assertIsInstance(c, Cloudant) self.assertIsInstance(c.r_session, requests.Session) self.assertEqual(c.session()['userCtx']['name'], self.user) except Exception as err: self.fail('Exception {0} was raised.'.format(str(err))) @unittest.skipUnless(os.environ.get('IAM_API_KEY'), 'Skipping Cloudant Bluemix context helper with IAM test') def test_cloudant_bluemix_context_helper_with_iam(self): """ Test that the cloudant_bluemix context helper with IAM works as expected. """ instance_name = 'Cloudant NoSQL DB-lv' vcap_services = {'cloudantNoSQLDB': [{ 'credentials': { 'apikey': self.iam_api_key, 'username': self.user, 'host': urlparse(self.url).hostname, 'port': 443, 'url': self.url }, 'name': instance_name, }]} try: with cloudant_bluemix(vcap_services, instance_name=instance_name) as c: self.assertIsInstance(c, Cloudant) self.assertIsInstance(c.r_session, requests.Session) except Exception as err: self.fail('Exception {0} was raised.'.format(str(err))) def test_cloudant_bluemix_context_helper_raise_error_for_missing_iam_and_creds(self): """ Test that the cloudant_bluemix context helper raises a CloudantClientException when the IAM key, username, and password are missing in the VCAP_SERVICES env variable. """ instance_name = 'Cloudant NoSQL DB-lv' vcap_services = {'cloudantNoSQLDB': [{ 'credentials': { 'host': urlparse(self.url).hostname, 'port': 443, 'url': self.url }, 'name': instance_name, }]} try: with cloudant_bluemix(vcap_services, instance_name=instance_name) as c: self.assertIsInstance(c, Cloudant) self.assertIsInstance(c.r_session, requests.Session) except CloudantClientException as err: self.assertEqual( 'Invalid service: IAM API key or username/password credentials are required.', str(err) ) @skip_if_iam def test_cloudant_bluemix_dedicated_context_helper(self): """ Test that the cloudant_bluemix context helper works as expected when specifying a service name. """ instance_name = 'Cloudant NoSQL DB-wq' service_name = 'cloudantNoSQLDB Dedicated' vcap_services = {service_name: [{ 'credentials': { 'username': self.user, 'password': self.pwd, 'host': urlparse(self.url).hostname, 'port': 443, 'url': self.url }, 'name': instance_name, }]} try: with cloudant_bluemix(vcap_services, instance_name=instance_name, service_name=service_name) as c: self.assertIsInstance(c, Cloudant) self.assertIsInstance(c.r_session, requests.Session) self.assertEqual(c.session()['userCtx']['name'], self.user) except Exception as err: self.fail('Exception {0} was raised.'.format(str(err))) def test_constructor_with_account(self): """ Test instantiating a client object using an account name """ # Ensure that the client is new del self.client self.client = Cloudant('user', 'pass', account='foo') self.assertEqual( self.client.server_url, 'https://foo.cloudant.com' ) @skip_if_not_cookie_auth def test_bluemix_constructor_with_legacy_creds(self): """ Test instantiating a client object using a VCAP_SERVICES environment variable. """ instance_name = 'Cloudant NoSQL DB-lv' vcap_services = {'cloudantNoSQLDB': [{ 'credentials': { 'username': self.user, 'password': self.pwd, 'host': urlparse(self.url).hostname, 'port': 443, 'url': self.url }, 'name': instance_name }]} # create Cloudant Bluemix client c = Cloudant.bluemix(vcap_services) try: c.connect() self.assertIsInstance(c, Cloudant) self.assertIsInstance(c.r_session, requests.Session) self.assertEqual(c.session()['userCtx']['name'], self.user) except Exception as err: self.fail('Exception {0} was raised.'.format(str(err))) finally: c.disconnect() @unittest.skipUnless(os.environ.get('IAM_API_KEY'), 'Skipping Cloudant Bluemix constructor with IAM test') def test_bluemix_constructor_with_iam(self): """ Test instantiating a client object using a VCAP_SERVICES environment variable. """ instance_name = 'Cloudant NoSQL DB-lv' vcap_services = {'cloudantNoSQLDB': [{ 'credentials': { 'apikey': self.iam_api_key, 'username': self.user, 'host': urlparse(self.url).hostname, 'port': 443 }, 'name': instance_name }]} # create Cloudant Bluemix client c = Cloudant.bluemix(vcap_services) try: c.connect() self.assertIsInstance(c, Cloudant) self.assertIsInstance(c.r_session, requests.Session) except Exception as err: self.fail('Exception {0} was raised.'.format(str(err))) finally: c.disconnect() @skip_if_iam def test_bluemix_constructor_specify_instance_name(self): """ Test instantiating a client object using a VCAP_SERVICES environment variable and specifying which instance name to use. """ instance_name = 'Cloudant NoSQL DB-lv' vcap_services = {'cloudantNoSQLDB': [{ 'credentials': { 'username': self.user, 'password': self.pwd, 'host': urlparse(self.url).hostname, 'port': 443, 'url': self.url }, 'name': instance_name }]} # create Cloudant Bluemix client c = Cloudant.bluemix(vcap_services, instance_name=instance_name) try: c.connect() self.assertIsInstance(c, Cloudant) self.assertIsInstance(c.r_session, requests.Session) self.assertEqual(c.session()['userCtx']['name'], self.user) except Exception as err: self.fail('Exception {0} was raised.'.format(str(err))) finally: c.disconnect() @skip_if_not_cookie_auth def test_bluemix_constructor_with_multiple_services(self): """ Test instantiating a client object using a VCAP_SERVICES environment variable that contains multiple services. """ instance_name = 'Cloudant NoSQL DB-lv' vcap_services = {'cloudantNoSQLDB': [ { 'credentials': { 'apikey': '1234api', 'host': urlparse(self.url).hostname, 'port': 443, 'url': self.url }, 'name': instance_name }, { 'credentials': { 'username': 'foo', 'password': 'bar', 'host': 'baz.com', 'port': 1234, 'url': 'https://foo:bar@baz.com:1234' }, 'name': 'Cloudant NoSQL DB-yu' } ]} # create Cloudant Bluemix client c = Cloudant.bluemix(vcap_services, instance_name=instance_name) try: c.connect() self.assertIsInstance(c, Cloudant) self.assertIsInstance(c.r_session, requests.Session) self.assertEqual(c.session()['userCtx']['name'], self.user) except Exception as err: self.fail('Exception {0} was raised.'.format(str(err))) finally: c.disconnect() def test_connect_headers(self): """ Test that the appropriate request headers are set """ try: self.client.connect() if (self.account): self.assertEqual( self.client.r_session.headers['X-Cloudant-User'], self.account ) agent = self.client.r_session.headers.get('User-Agent') ua_parts = agent.split('/') self.assertEqual(len(ua_parts), 6) self.assertEqual(ua_parts[0], 'python-cloudant') self.assertEqual(ua_parts[1], sys.modules['cloudant'].__version__) self.assertEqual(ua_parts[2], 'Python') self.assertEqual(ua_parts[3], '{0}.{1}.{2}'.format( sys.version_info[0], sys.version_info[1], sys.version_info[2])), self.assertEqual(ua_parts[4], os.uname()[0]), self.assertEqual(ua_parts[5], os.uname()[4]) finally: self.client.disconnect() @skip_if_not_cookie_auth def test_connect_timeout(self): """ Test that a connect timeout occurs when instantiating a client object with a timeout of 10 ms. """ with self.assertRaises(ConnectTimeout) as cm: self.set_up_client(auto_connect=True, timeout=.01) self.assertTrue(str(cm.exception).find('timed out.')) def test_db_updates_infinite_feed_call(self): """ Test that infinite_db_updates() method call constructs and returns an InfiniteFeed object """ try: self.client.connect() db_updates = self.client.infinite_db_updates() self.assertIsInstance(db_updates, InfiniteFeed) self.assertEqual( db_updates._url, '/'.join([self.client.server_url, '_db_updates'])) self.assertIsInstance(db_updates._r_session, requests.Session) self.assertFalse(db_updates._raw_data) self.assertDictEqual(db_updates._options, {'feed': 'continuous'}) finally: self.client.disconnect() @skip_if_not_cookie_auth def test_billing_data(self): """ Test the retrieval of billing data """ try: self.client.connect() now = datetime.datetime.now() expected = [ 'data_volume', 'total', 'start', 'end', 'http_heavy', 'http_light', 'bill_type' ] # Test using year and month year = now.year month = now.month data = self.client.bill(year, month) self.assertTrue(all(x in expected for x in data.keys())) #Test without year and month arguments del data data = self.client.bill() self.assertTrue(all(x in expected for x in data.keys())) finally: self.client.disconnect() def test_set_year_without_month_for_billing_data(self): """ Test raising an exception when retrieving billing data with only year parameter """ try: self.client.connect() year = 2016 with self.assertRaises(CloudantArgumentError) as cm: self.client.bill(year) expected = ('Invalid year and/or month supplied. ' 'Found: year - 2016, month - None') self.assertEqual(str(cm.exception), expected) finally: self.client.disconnect() def test_set_month_without_year_for_billing_data(self): """ Test raising an exception when retrieving billing data with only month parameter """ try: self.client.connect() month = 1 with self.assertRaises(CloudantArgumentError) as cm: self.client.bill(None, month) expected = ('Invalid year and/or month supplied. ' 'Found: year - None, month - 1') self.assertEqual(str(cm.exception), expected) finally: self.client.disconnect() def test_set_invalid_type_year_for_billing_data(self): """ Test raising an exception when retrieving billing data with a type string for the year parameter """ try: self.client.connect() year = 'foo' month = 1 with self.assertRaises(CloudantArgumentError) as cm: self.client.bill(year, month) expected = ('Invalid year and/or month supplied. ' 'Found: year - foo, month - 1') self.assertEqual(str(cm.exception), expected) finally: self.client.disconnect() def test_set_year_with_invalid_month_for_billing_data(self): """ Test raising an exception when retrieving billing data with an invalid month parameter """ try: self.client.connect() year = 2016 month = 13 with self.assertRaises(CloudantArgumentError) as cm: self.client.bill(year, month) expected = ('Invalid year and/or month supplied. ' 'Found: year - 2016, month - 13') self.assertEqual(str(cm.exception), expected) finally: self.client.disconnect() @skip_if_not_cookie_auth def test_volume_usage_data(self): """ Test the retrieval of volume usage data """ try: self.client.connect() now = datetime.datetime.now() expected = [ 'data_vol', 'granularity', 'start', 'end' ] # Test using year and month year = now.year month = now.month data = self.client.volume_usage(year, month) self.assertTrue(all(x in expected for x in data.keys())) #Test without year and month arguments del data data = self.client.volume_usage() self.assertTrue(all(x in expected for x in data.keys())) finally: self.client.disconnect() def test_set_year_without_month_for_volume_usage_data(self): """ Test raising an exception when retrieving volume usage data with only year parameter """ try: self.client.connect() year = 2016 with self.assertRaises(CloudantArgumentError) as cm: self.client.volume_usage(year) expected = ('Invalid year and/or month supplied. ' 'Found: year - 2016, month - None') self.assertEqual(str(cm.exception), expected) finally: self.client.disconnect() def test_set_month_without_year_for_volume_usage_data(self): """ Test raising an exception when retrieving volume usage data with only month parameter """ try: self.client.connect() month = 1 with self.assertRaises(CloudantArgumentError) as cm: self.client.volume_usage(None, month) expected = ('Invalid year and/or month supplied. ' 'Found: year - None, month - 1') self.assertEqual(str(cm.exception), expected) finally: self.client.disconnect() def test_set_invalid_type_year_for_volume_usage_data(self): """ Test raising an exception when retrieving volume usage data with a type string for the year parameter """ try: self.client.connect() year = 'foo' month = 1 with self.assertRaises(CloudantArgumentError) as cm: self.client.volume_usage(year, month) expected = ('Invalid year and/or month supplied. ' 'Found: year - foo, month - 1') self.assertEqual(str(cm.exception), expected) finally: self.client.disconnect() def test_set_year_with_invalid_month_for_volume_usage_data(self): """ Test raising an exception when retrieving volume usage data with an invalid month parameter """ try: self.client.connect() year = 2016 month = 13 with self.assertRaises(CloudantArgumentError) as cm: self.client.volume_usage(year, month) expected = ('Invalid year and/or month supplied. ' 'Found: year - 2016, month - 13') self.assertEqual(str(cm.exception), expected) finally: self.client.disconnect() @skip_if_not_cookie_auth def test_requests_usage_data(self): """ Test the retrieval of requests usage data """ try: self.client.connect() now = datetime.datetime.now() expected = [ 'requests', 'granularity', 'start', 'end' ] # Test using year and month year = now.year month = now.month data = self.client.requests_usage(year, month) self.assertTrue(all(x in expected for x in data.keys())) #Test without year and month arguments del data data = self.client.requests_usage() self.assertTrue(all(x in expected for x in data.keys())) finally: self.client.disconnect() def test_set_year_without_month_for_requests_usage_data(self): """ Test raising an exception when retrieving requests usage data with an invalid month parameter """ try: self.client.connect() year = 2016 with self.assertRaises(CloudantArgumentError) as cm: self.client.requests_usage(year) expected = ('Invalid year and/or month supplied. ' 'Found: year - 2016, month - None') self.assertEqual(str(cm.exception), expected) finally: self.client.disconnect() def test_set_month_without_year_for_requests_usage_data(self): """ Test raising an exception when retrieving requests usage data with only month parameter """ try: self.client.connect() month = 1 with self.assertRaises(CloudantArgumentError) as cm: self.client.requests_usage(None, month) expected = ('Invalid year and/or month supplied. ' 'Found: year - None, month - 1') self.assertEqual(str(cm.exception), expected) finally: self.client.disconnect() def test_set_invalid_type_year_for_requests_usage_data(self): """ Test raising an exception when retrieving requests usage data with a type string for the year parameter """ try: self.client.connect() year = 'foo' month = 1 with self.assertRaises(CloudantArgumentError) as cm: self.client.requests_usage(year, month) expected = ('Invalid year and/or month supplied. ' 'Found: year - foo, month - 1') self.assertEqual(str(cm.exception), expected) finally: self.client.disconnect() def test_set_year_with_invalid_month_for_requests_usage_data(self): """ Test raising an exception when retrieving requests usage data with only year parameter """ try: self.client.connect() year = 2016 month = 13 with self.assertRaises(CloudantArgumentError) as cm: self.client.requests_usage(year, month) expected = ('Invalid year and/or month supplied. ' 'Found: year - 2016, month - 13') self.assertEqual(str(cm.exception), expected) finally: self.client.disconnect() @skip_if_not_cookie_auth def test_shared_databases(self): """ Test the retrieval of shared database list """ try: self.client.connect() self.assertIsInstance(self.client.shared_databases(), list) finally: self.client.disconnect() @skip_if_not_cookie_auth def test_generate_api_key(self): """ Test the generation of an API key for this client account """ try: self.client.connect() expected = ['key', 'password', 'ok'] api_key = self.client.generate_api_key() self.assertTrue(all(x in expected for x in api_key.keys())) self.assertTrue(api_key['ok']) finally: self.client.disconnect() @skip_if_not_cookie_auth def test_cors_configuration(self): """ Test the retrieval of the current CORS configuration for this client account """ try: self.client.connect() expected = ['allow_credentials', 'enable_cors', 'origins'] cors = self.client.cors_configuration() self.assertTrue(all(x in expected for x in cors.keys())) finally: self.client.disconnect() @skip_if_not_cookie_auth def test_cors_origins(self): """ Test the retrieval of the CORS origins list """ try: self.client.connect() origins = self.client.cors_origins() self.assertIsInstance(origins, list) finally: self.client.disconnect() @skip_if_not_cookie_auth def test_disable_cors(self): """ Test disabling CORS (assuming CORS is enabled) """ try: self.client.connect() # Save original CORS settings save = self.client.cors_configuration() # Test CORS disable self.assertEqual(self.client.disable_cors(), {'ok': True}) # Restore original CORS settings self.client.update_cors_configuration( save['enable_cors'], save['allow_credentials'], save['origins'], True ) finally: self.client.disconnect() @skip_if_not_cookie_auth def test_update_cors_configuration(self): """ Test updating CORS configuration """ try: self.client.connect() # Save original CORS settings save = self.client.cors_configuration() # Test updating CORS settings, overwriting origins result = self.client.update_cors_configuration( True, True, ['https://ibm.com'], True) self.assertEqual(result, {'ok': True}) updated_cors = self.client.cors_configuration() self.assertTrue(updated_cors['enable_cors']) self.assertTrue(updated_cors['allow_credentials']) expected = ['https://ibm.com'] self.assertTrue(all(x in expected for x in updated_cors['origins'])) # Test updating CORS settings, adding to origins result = self.client.update_cors_configuration( True, True, ['https://ibm.cloudant.com'] ) self.assertEqual(result, {'ok': True}) del updated_cors updated_cors = self.client.cors_configuration() self.assertTrue(updated_cors['enable_cors']) self.assertTrue(updated_cors['allow_credentials']) expected.append('https://ibm.cloudant.com') self.assertTrue(all(x in expected for x in updated_cors['origins'])) # Restore original CORS settings self.client.update_cors_configuration( save['enable_cors'], save['allow_credentials'], save['origins'], True ) finally: self.client.disconnect() if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/cloud_foundry_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2016, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _cloud_foundry_tests_ Unit tests for the CloudFoundryService class. """ import json import unittest from cloudant._common_util import CloudFoundryService from cloudant.error import CloudantException class CloudFoundryServiceTests(unittest.TestCase): def __init__(self, *args, **kwargs): super(CloudFoundryServiceTests, self).__init__(*args, **kwargs) self._test_vcap_services_single_legacy_credentials_enabled = json.dumps({'cloudantNoSQLDB': [{ 'name': 'Cloudant NoSQL DB 1', # valid service with legacy creds enabled 'credentials': { 'apikey': '1234api', 'username': 'user-bluemix', 'password': 'password', 'port': 443, 'host': 'user-bluemix.cloudant.com' } } ]}) self._test_vcap_services_single = json.dumps({'cloudantNoSQLDB': [{ 'name': 'Cloudant NoSQL DB 1', # valid service 'credentials': { 'apikey': '1234api', 'username': 'user-bluemix', 'port': 443, 'host': 'user-bluemix.cloudant.com' } } ]}) self._test_legacy_vcap_services_multiple = json.dumps({'cloudantNoSQLDB': [ { 'name': 'Cloudant NoSQL DB 1', # valid legacy service 'credentials': { 'host': 'example.cloudant.com', 'password': 'pa$$w0rd01', 'port': 1234, 'username': 'example' } }, { 'name': 'Cloudant NoSQL DB 2', # valid service, default port 'credentials': { 'host': 'example.cloudant.com', 'password': 'pa$$w0rd01', 'username': 'example' } }, { 'name': 'Cloudant NoSQL DB 3', # missing host 'credentials': { 'password': 'pa$$w0rd01', 'port': 1234, 'username': 'example' } }, { 'name': 'Cloudant NoSQL DB 4', # missing password 'credentials': { 'host': 'example.cloudant.com', 'port': 1234, 'username': 'example' } }, { 'name': 'Cloudant NoSQL DB 5', # missing username 'credentials': { 'host': 'example.cloudant.com', 'password': 'pa$$w0rd01', 'port': 1234, } }, { 'name': 'Cloudant NoSQL DB 6', # invalid credentials type 'credentials': [ 'example.cloudant.com', 'pa$$w0rd01', 'example' ] }, { 'name': 'Cloudant NoSQL DB 7', # missing iam api key and creds 'credentials': { 'host': 'example.cloudant.com', 'port': 1234, 'username': 'example' } }, { 'name': 'Cloudant NoSQL DB 8', # valid service with IAM api 'credentials': { 'apikey': '1234api', 'username': 'example', 'host': 'example.cloudant.com', 'port': 1234 } }, ]}) self._test_vcap_services_dedicated = json.dumps({ 'cloudantNoSQLDB Dedicated': [ # dedicated service name { 'name': 'Cloudant NoSQL DB 1', # valid service 'credentials': { 'host': 'example.cloudant.com', 'password': 'pa$$w0rd01', 'port': 1234, 'username': 'example' } } ] }) def test_get_vcap_service_legacy_creds_success(self): service = CloudFoundryService( self._test_vcap_services_single_legacy_credentials_enabled, service_name='cloudantNoSQLDB' ) self.assertEqual('Cloudant NoSQL DB 1', service.name) def test_get_vcap_service_iam_api_no_creds_success(self): service = CloudFoundryService( self._test_vcap_services_single, service_name='cloudantNoSQLDB' ) self.assertEqual('Cloudant NoSQL DB 1', service.name) self.assertEqual('1234api', service.iam_api_key) with self.assertRaises(AttributeError) as cm: service.password self.assertEqual("'CloudFoundryService' object has no attribute '_password'", str(cm.exception)) def test_get_vcap_service_default_success_as_dict(self): service = CloudFoundryService( json.loads(self._test_vcap_services_single_legacy_credentials_enabled), service_name='cloudantNoSQLDB' ) self.assertEqual('Cloudant NoSQL DB 1', service.name) def test_get_vcap_service_default_failure_multiple_services(self): with self.assertRaises(CloudantException) as cm: CloudFoundryService( self._test_legacy_vcap_services_multiple, service_name='cloudantNoSQLDB' ) self.assertEqual('Missing service in VCAP_SERVICES', str(cm.exception)) def test_get_vcap_service_instance_host(self): service = CloudFoundryService( self._test_legacy_vcap_services_multiple, instance_name='Cloudant NoSQL DB 1', service_name='cloudantNoSQLDB' ) self.assertEqual('example.cloudant.com', service.host) def test_get_vcap_service_instance_password(self): service = CloudFoundryService( self._test_legacy_vcap_services_multiple, instance_name='Cloudant NoSQL DB 1', service_name='cloudantNoSQLDB' ) self.assertEqual('pa$$w0rd01', service.password) def test_get_vcap_service_instance_port(self): service = CloudFoundryService( self._test_legacy_vcap_services_multiple, instance_name='Cloudant NoSQL DB 1', service_name='cloudantNoSQLDB' ) self.assertEqual('1234', service.port) def test_get_vcap_service_instance_port_default(self): service = CloudFoundryService( self._test_legacy_vcap_services_multiple, instance_name='Cloudant NoSQL DB 2', service_name='cloudantNoSQLDB' ) self.assertEqual('443', service.port) def test_get_vcap_service_instance_url(self): service = CloudFoundryService( self._test_legacy_vcap_services_multiple, instance_name='Cloudant NoSQL DB 1', service_name='cloudantNoSQLDB' ) self.assertEqual('https://example.cloudant.com:1234', service.url) def test_get_vcap_service_instance_username(self): service = CloudFoundryService( self._test_legacy_vcap_services_multiple, instance_name='Cloudant NoSQL DB 1', service_name='cloudantNoSQLDB' ) self.assertEqual('example', service.username) def test_get_vcap_service_instance_iam_api_key(self): service = CloudFoundryService( self._test_legacy_vcap_services_multiple, instance_name='Cloudant NoSQL DB 8', service_name='cloudantNoSQLDB' ) self.assertEqual('1234api', service.iam_api_key) def test_raise_error_for_missing_host(self): with self.assertRaises(CloudantException): CloudFoundryService( self._test_legacy_vcap_services_multiple, instance_name='Cloudant NoSQL DB 3', service_name='cloudantNoSQLDB' ) def test_raise_error_for_missing_password(self): with self.assertRaises(CloudantException) as cm: CloudFoundryService( self._test_legacy_vcap_services_multiple, instance_name='Cloudant NoSQL DB 4', service_name='cloudantNoSQLDB' ) self.assertEqual( 'Invalid service: IAM API key or username/password credentials are required.', str(cm.exception) ) def test_raise_error_for_missing_username(self): with self.assertRaises(CloudantException) as cm: CloudFoundryService( self._test_legacy_vcap_services_multiple, instance_name='Cloudant NoSQL DB 5', service_name='cloudantNoSQLDB' ) self.assertEqual( "Invalid service: 'username' missing", str(cm.exception) ) def test_raise_error_for_invalid_credentials_type(self): with self.assertRaises(CloudantException) as cm: CloudFoundryService( self._test_legacy_vcap_services_multiple, instance_name='Cloudant NoSQL DB 6', service_name='cloudantNoSQLDB' ) self.assertEqual( 'Failed to decode VCAP_SERVICES service credentials', str(cm.exception) ) def test_raise_error_for_missing_iam_api_key_and_credentials(self): with self.assertRaises(CloudantException) as cm: CloudFoundryService( self._test_legacy_vcap_services_multiple, instance_name='Cloudant NoSQL DB 7', service_name='cloudantNoSQLDB' ) self.assertEqual( 'Invalid service: IAM API key or username/password credentials are required.', str(cm.exception) ) def test_raise_error_for_missing_service(self): with self.assertRaises(CloudantException) as cm: CloudFoundryService( self._test_legacy_vcap_services_multiple, instance_name='Cloudant NoSQL DB 9', service_name='cloudantNoSQLDB' ) self.assertEqual('Missing service in VCAP_SERVICES', str(cm.exception)) def test_raise_error_for_invalid_vcap(self): with self.assertRaises(CloudantException) as cm: CloudFoundryService('{', 'Cloudant NoSQL DB 1') # invalid JSON self.assertEqual('Failed to decode VCAP_SERVICES JSON', str(cm.exception)) def test_get_vcap_service_with_dedicated_service_name_success(self): service = CloudFoundryService( self._test_vcap_services_dedicated, service_name='cloudantNoSQLDB Dedicated' ) self.assertEqual('Cloudant NoSQL DB 1', service.name) ================================================ FILE: tests/unit/database_partition_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2019 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _database_partition_tests_ """ from cloudant.design_document import DesignDocument from cloudant.index import Index, SpecialIndex from nose.plugins.attrib import attr from .unit_t_db_base import UnitTestDbBase @attr(db=['cloudant']) class DatabasePartitionTests(UnitTestDbBase): def setUp(self): super(DatabasePartitionTests, self).setUp() self.db_set_up(partitioned=True) def tearDown(self): self.db_tear_down() super(DatabasePartitionTests, self).tearDown() def test_is_partitioned_database(self): self.assertTrue(self.db.metadata()['props']['partitioned']) def test_create_partitioned_design_document(self): ddoc_id = 'empty_ddoc' ddoc = DesignDocument(self.db, ddoc_id, partitioned=True) ddoc.save() r = self.db.r_session.get(ddoc.document_url) r.raise_for_status() self.assertTrue(r.json()['options']['partitioned']) def test_create_non_partitioned_design_document(self): ddoc_id = 'empty_ddoc' ddoc = DesignDocument(self.db, ddoc_id, partitioned=False) ddoc.save() r = self.db.r_session.get(ddoc.document_url) r.raise_for_status() self.assertFalse(r.json()['options']['partitioned']) def test_partitioned_all_docs(self): for partition_key in self.populate_db_with_partitioned_documents(5, 25): docs = self.db.partitioned_all_docs(partition_key) self.assertEqual(len(docs['rows']), 25) for doc in docs['rows']: self.assertTrue(doc['id'].startswith(partition_key + ':')) def test_partition_metadata(self): for partition_key in self.populate_db_with_partitioned_documents(5, 25): meta = self.db.partition_metadata(partition_key) self.assertEqual(meta['partition'], partition_key) self.assertEqual(meta['doc_count'], 25) def test_partitioned_search(self): ddoc = DesignDocument(self.db, 'partitioned_search', partitioned=True) ddoc.add_search_index( 'search1', 'function(doc) { index("id", doc._id, {"store": true}); }' ) ddoc.save() for partition_key in self.populate_db_with_partitioned_documents(2, 10): results = self.db.get_partitioned_search_result( partition_key, ddoc['_id'], 'search1', query='*:*') i = 0 for result in results['rows']: print(result) self.assertTrue(result['id'].startswith(partition_key + ':')) i += 1 self.assertEqual(i, 10) def test_get_partitioned_index(self): index_name = 'test_partitioned_index' self.db.create_query_index(index_name=index_name, fields=['foo']) results = self.db.get_query_indexes() self.assertEqual(len(results), 2) index_all_docs = results[0] self.assertEqual(index_all_docs.name, '_all_docs') self.assertEqual(type(index_all_docs), SpecialIndex) self.assertFalse(index_all_docs.partitioned) index_partitioned = results[1] self.assertEqual(index_partitioned.name, index_name) self.assertEqual(type(index_partitioned), Index) self.assertTrue(index_partitioned.partitioned) def test_partitioned_query(self): self.db.create_query_index(fields=['foo']) for partition_key in self.populate_db_with_partitioned_documents(2, 10): results = self.db.get_partitioned_query_result( partition_key, selector={'foo': {'$eq': 'bar'}}) i = 0 for result in results: self.assertTrue(result['_id'].startswith(partition_key + ':')) i += 1 self.assertEqual(i, 10) def test_partitioned_view(self): ddoc = DesignDocument(self.db, 'partitioned_view', partitioned=True) ddoc.add_view('view1', 'function(doc) { emit(doc._id, 1); }') ddoc.save() for partition_key in self.populate_db_with_partitioned_documents(2, 10): results = self.db.get_partitioned_view_result( partition_key, ddoc['_id'], 'view1') i = 0 for result in results: self.assertTrue( result['id'].startswith(partition_key + ':')) i += 1 self.assertEqual(i, 10) ================================================ FILE: tests/unit/database_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2019 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _database_tests_ database module - Unit tests for CouchDatabase and CloudantDatabase classes See configuration options for environment variables in unit_t_db_base module docstring. """ import os import unittest import uuid import mock import requests from cloudant._2to3 import UNICHR from cloudant._common_util import response_to_json_dict from cloudant.design_document import DesignDocument from cloudant.document import Document from cloudant.error import CloudantArgumentError, CloudantDatabaseException from cloudant.feed import Feed, InfiniteFeed from cloudant.index import Index, TextIndex, SpecialIndex from cloudant.result import Result, QueryResult from cloudant.security_document import SecurityDocument from nose.plugins.attrib import attr from tests.unit._test_util import LONG_NUMBER from .unit_t_db_base import skip_if_not_cookie_auth, UnitTestDbBase, skip_if_iam from .. import unicode_ class CloudantDatabaseExceptionTests(unittest.TestCase): """ Ensure CloudantDatabaseException functions as expected. """ def test_raise_without_code(self): """ Ensure that a default exception/code is used if none is provided. """ with self.assertRaises(CloudantDatabaseException) as cm: raise CloudantDatabaseException() self.assertEqual(cm.exception.status_code, 100) def test_raise_using_invalid_code(self): """ Ensure that a default exception/code is used if invalid code is provided. """ with self.assertRaises(CloudantDatabaseException) as cm: raise CloudantDatabaseException('foo') self.assertEqual(cm.exception.status_code, 100) def test_raise_without_args(self): """ Ensure that a default exception/code is used if the message requested by the code provided requires an argument list and none is provided. """ with self.assertRaises(CloudantDatabaseException) as cm: raise CloudantDatabaseException(400) self.assertEqual(cm.exception.status_code, 100) def test_raise_with_proper_code_and_args(self): """ Ensure that the requested exception is raised. """ with self.assertRaises(CloudantDatabaseException) as cm: raise CloudantDatabaseException(400, 'foo') self.assertEqual(cm.exception.status_code, 400) @attr(db=['cloudant','couch']) class DatabaseTests(UnitTestDbBase): """ CouchDatabase/CloudantDatabase unit tests """ def setUp(self): """ Set up test attributes """ super(DatabaseTests, self).setUp() self.db_set_up() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(DatabaseTests, self).tearDown() def test_constructor(self): """ Test instantiating a database """ self.assertEqual(self.db.client, self.client) self.assertEqual(self.db.database_name, self.test_dbname) self.assertEqual(self.db.r_session, self.client.r_session) self.assertIsInstance(self.db.result, Result) def test_bulk_docs_uses_custom_encoder(self): """ Test that the bulk_docs method uses the custom encoder """ self.set_up_client(auto_connect=True, encoder="AEncoder") docs = [ {'_id': 'julia{0:03d}'.format(i), 'name': 'julia', 'age': i} for i in range(3) ] database = self.client[self.test_dbname] with self.assertRaises(TypeError): # since the encoder is a str a type error should be thrown. database.bulk_docs(docs) def test_missing_revisions_uses_custom_encoder(self): """ Test that missing_revisions uses the custom encoder. """ revs = ['1-1', '2-1', '3-1'] self.set_up_client(auto_connect=True, encoder="AEncoder") database = self.client[self.test_dbname] with self.assertRaises(TypeError): # since the encoder is a str a type error should be thrown. database.missing_revisions('no-such-doc', *revs) def test_revs_diff_uses_custom_encoder(self): """ Test that revisions_diff uses the custom encoder. """ revs = ['1-1', '2-1', '3-1'] self.set_up_client(auto_connect=True, encoder="AEncoder") database = self.client[self.test_dbname] with self.assertRaises(TypeError): database.revisions_diff('no-such-doc', *revs) def test_retrieve_db_url(self): """ Test retrieving the database URL """ self.assertEqual( self.db.database_url, '/'.join((self.client.server_url, self.test_dbname)) ) @skip_if_not_cookie_auth def test_retrieve_creds(self): """ Test retrieving client credentials. The client credentials are None if CouchDB Admin Party mode was selected. """ if self.client.admin_party: self.assertIsNone(self.db.creds) else: expected_keys = ['basic_auth', 'user_ctx'] self.assertTrue( all(x in expected_keys for x in self.db.creds.keys()) ) self.assertTrue(self.db.creds['basic_auth'].startswith('Basic')) self.assertEqual(self.db.creds['user_ctx']['name'], self.user) def test_exists(self): """ Tests that the result of True is expected when the database exists, and False is expected when the database is nonexistent remotely. """ self.assertTrue(self.db.exists()) # Construct a database object that does not exist remotely fake_db = self.client._DATABASE_CLASS(self.client, 'no-such-db') self.assertFalse(fake_db.exists()) def test_exists_raises_httperror(self): """ Test database exists raises an HTTPError. """ # Mock HTTPError when running against CouchDB and Cloudant resp = requests.Response() resp.status_code = 400 self.client.r_session.head = mock.Mock(return_value=resp) with self.assertRaises(requests.HTTPError) as cm: self.db.exists() err = cm.exception self.assertEqual(err.response.status_code, 400) self.client.r_session.head.assert_called_with(self.db.database_url) def test_create_db_delete_db(self): """ Test creating and deleting a database """ dbname = self.dbname() db = self.client._DATABASE_CLASS(self.client, dbname) try: db.create() self.assertTrue(db.exists()) # No issue should arise if attempting to create existing database db_2 = db.create() self.assertEqual(db, db_2) # If we use throw_on_exists=True, it will raise a # CloudantDatabaseException if the database already exists. with self.assertRaises(CloudantDatabaseException) as cm: db.create(throw_on_exists=True) self.assertEqual(cm.exception.status_code, 412) except Exception as err: self.fail('Exception {0} was raised.'.format(str(err))) finally: db.delete() self.assertFalse(db.exists()) def test_delete_exception(self): """ Test deleting a database that does not exist """ try: fake_db = self.client._DATABASE_CLASS(self.client, 'no-such-db') fake_db.delete() self.fail('Above statement should raise an Exception') except requests.HTTPError as err: self.assertEqual(err.response.status_code, 404) def test_retrieve_db_metadata(self): """ Test retrieving the database metadata information. The metadata values may differ slightly each time it is retrieved such as is the case with the update sequence, however, the metadata keys should always remain the same. Therefore comparing keys is a valid test of this functionality. """ resp = self.db.r_session.get( '/'.join((self.client.server_url, self.test_dbname))) expected = response_to_json_dict(resp) actual = self.db.metadata() self.assertListEqual(list(actual.keys()), list(expected.keys())) def test_retrieve_document_count(self): """ Test retrieving the number of documents currently in the database """ self.populate_db_with_documents(6) self.assertEqual(self.db.doc_count(), 6) def test_create_document_with_id(self): """ Test creating a document using a supplied document id """ data = {'_id': 'julia06', 'name': 'julia', 'age': 6} doc = self.db.create_document(data) self.assertEqual(self.db['julia06'], doc) self.assertEqual(self.db.get('julia06'), doc) self.assertEqual(self.db.get('julia06', remote=True), doc) self.assertEqual(doc['_id'], data['_id']) self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(doc['name'], data['name']) self.assertEqual(doc['age'], data['age']) self.assertIsInstance(doc, Document) self.assertIsInstance(self.db['julia06'], Document) try: self.db.create_document(data, throw_on_exists=True) self.fail('Above statement should raise a CloudantException') except CloudantDatabaseException as err: self.assertEqual( str(err), 'Document with id julia06 already exists.' ) def test_get_non_existing_document_from_remote(self): """ Test dict's get on non existing document from remote. """ doc = self.db.get('non-existing', remote=True) self.assertIsNone(doc) def test_get_non_existing_document_from_cache(self): """ Test dict's get on non existing document from cache. """ doc = self.db.get('non-existing') self.assertIsNone(doc) def test_get_document_from_cache(self): """ Test dict's get on a document from cache. """ doc = Document(self.db, document_id='julia06') self.db['julia06'] = doc self.assertEqual(self.db.get('julia06'), doc) # doc is fetched from the local dict preferentially to remote even with remote=True self.assertEqual(self.db.get('julia06', remote=True), doc) self.assertEqual(self.db['julia06'], doc) def test_get_document_from_remote(self): """ Test dict's get on a document from remote. """ data = {'_id': 'julia06','name': 'julia06', 'age': 6} doc = self.db.create_document(data) self.db.clear() self.assertIsNone(self.db.get('julia06')) self.assertEqual(self.db.get('julia06', remote=True), doc) self.assertEqual(self.db['julia06'], doc) def test_create_document_that_already_exists(self): """ Test creating a document that already exists """ data = {'_id': 'julia'} doc = self.db.create_document(data) self.assertEqual(self.db['julia'], doc) self.assertEqual(self.db.get('julia'), doc) self.assertEqual(self.db.get('julia', remote=True), doc) self.assertTrue(doc['_rev'].startswith('1-')) # attempt to recreate document self.db.create_document(data, throw_on_exists=False) def test_create_document_without_id(self): """ Test creating a document without supplying a document id """ data = {'name': 'julia', 'age': 6} doc = self.db.create_document(data) self.assertEqual(self.db[doc['_id']], doc) self.assertEqual(self.db.get(doc['_id']), doc) self.assertEqual(self.db.get(doc['_id'], remote=True), doc) self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(doc['name'], data['name']) self.assertEqual(doc['age'], data['age']) self.assertIsInstance(doc, Document) self.assertIsInstance(self.db[doc['_id']], Document) def test_create_design_document(self): """ Test creating a document using a supplied document id """ data = {'_id': '_design/julia06', 'name': 'julia', 'age': 6} doc = self.db.create_document(data) self.assertEqual(self.db['_design/julia06'], doc) self.assertEqual(self.db.get('_design/julia06'), doc) self.assertEqual(self.db.get('_design/julia06', remote=True), doc) self.assertEqual(doc['_id'], data['_id']) self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(doc['name'], data['name']) self.assertEqual(doc['age'], data['age']) self.assertEqual(doc.views, dict()) self.assertIsInstance(doc, DesignDocument) self.assertIsInstance(self.db['_design/julia06'], DesignDocument) def test_create_empty_document(self): """ Test creating an empty document """ empty_doc = self.db.new_document() self.assertEqual(self.db[empty_doc['_id']], empty_doc) self.assertEqual(self.db.get(empty_doc['_id']), empty_doc) self.assertEqual(self.db.get(empty_doc['_id'], remote=True), empty_doc) self.assertTrue(all(x in ['_id', '_rev'] for x in empty_doc.keys())) self.assertTrue(empty_doc['_rev'].startswith('1-')) def test_retrieve_design_documents(self): """ Test retrieving all design documents """ map_func = 'function(doc) {\n emit(doc._id, 1); \n}' data = {'_id': '_design/ddoc01','views': {'view01': {"map": map_func}}} ddoc1 = self.db.create_document(data) data = {'_id': '_design/ddoc02','views': {'view02': {"map": map_func}}} ddoc2 = self.db.create_document(data) raw_ddocs = self.db.design_documents() self.assertEqual(len(raw_ddocs), 2) self.assertTrue( all(x in [raw_ddocs[0]['key'], raw_ddocs[1]['key']] for x in self.db.keys() ) ) self.assertTrue( all(x in [raw_ddocs[0]['id'], raw_ddocs[1]['id']] for x in self.db.keys() ) ) self.assertTrue( all(x in [raw_ddocs[0]['doc'], raw_ddocs[1]['doc']] for x in [ddoc1, ddoc2] ) ) def test_retrieve_design_document_list(self): """ Test retrieving a list of design document names """ map_func = 'function(doc) {\n emit(doc._id, 1); \n}' data = {'_id': '_design/ddoc01','views': {'view01': {"map": map_func}}} self.db.create_document(data) data = {'_id': '_design/ddoc02','views': {'view02': {"map": map_func}}} self.db.create_document(data) ddoc_list = self.db.list_design_documents() self.assertTrue(all(x in ddoc_list for x in self.db.keys())) def test_retrieve_design_document(self): """ Test retrieve a specific design document """ # Get an empty design document object that does not exist remotely local_ddoc = self.db.get_design_document('_design/ddoc01') self.assertEqual(local_ddoc, {'_id': '_design/ddoc01', 'indexes': {}, 'options': {'partitioned': False}, 'views': {}, 'lists': {}, 'shows': {}}) # Add the design document to the database map_func = 'function(doc) {\n emit(doc._id, 1); \n}' local_ddoc.add_view('view01', map_func) local_ddoc.save() # Get the recently created design document that now exists remotely ddoc = self.db.get_design_document('_design/ddoc01') self.assertEqual(ddoc, local_ddoc) @skip_if_not_cookie_auth def test_get_security_document(self): """ Test retrieving the database security document """ self.load_security_document_data() sdoc = self.db.get_security_document() self.assertIsInstance(sdoc, SecurityDocument) self.assertDictEqual(sdoc, self.sdoc) def test_retrieve_view_results(self): """ Test retrieving Result wrapped output from a design document view """ map_func = 'function(doc) {\n emit(doc._id, 1); \n}' data = {'_id': '_design/ddoc01','views': {'view01': {"map": map_func}}} self.db.create_document(data) self.populate_db_with_documents() # Test with default Result rslt = self.db.get_view_result('_design/ddoc01', 'view01') self.assertIsInstance(rslt, Result) self.assertEqual(rslt[:1], rslt['julia000']) # Test with custom Result rslt = self.db.get_view_result( '_design/ddoc01', 'view01', descending=True, reduce=False) self.assertIsInstance(rslt, Result) self.assertEqual(rslt[:1], rslt['julia099']) def test_retrieve_grouped_view_result_with_page_size(self): """ Test retrieving Result wrapped output from a design document grouped view that uses a custom page size The view used here along with group=True will generate rows of data where each key will be grouped into groups of 2. Such as: {'key': 0, 'value': 2}, {'key': 1, 'value': 2}, ... """ map_func = 'function(doc) {\n emit(Math.floor(doc.age / 2), 1); \n}' data = {'_id': '_design/ddoc01','views': {'view01': {"map": map_func, "reduce": "_count"}}} self.db.create_document(data) self.populate_db_with_documents(5) rslt = self.db.get_view_result( '_design/ddoc01', 'view01', group=True, page_size=1) self.assertIsInstance(rslt, Result) i = 0 for row in rslt: self.assertIsNotNone(row) self.assertEqual(row['key'], i) i += 1 def test_retrieve_raw_view_results(self): """ Test retrieving raw output from a design document view """ map_func = 'function(doc) {\n emit(doc._id, 1); \n}' data = {'_id': '_design/ddoc01','views': {'view01': {"map": map_func}}} self.db.create_document(data) self.populate_db_with_documents() raw_rslt = self.db.get_view_result( '_design/ddoc01', 'view01', raw_result=True) self.assertIsInstance(raw_rslt, dict) self.assertEqual(len(raw_rslt.get('rows')), 100) def test_all_docs_post(self): """ Test the all_docs POST request functionality using keys param """ # Create 200 documents with ids julia000, julia001, julia002, ..., julia199 self.populate_db_with_documents(200) # Generate keys list for every other document created # with ids julia000, julia002, julia004, ..., julia198 keys_list = ['julia{0:03d}'.format(i) for i in range(0, 200, 2)] self.assertEqual(len(keys_list), 100) rows = self.db.all_docs(keys=keys_list).get('rows') self.assertEqual(len(rows), 100) keys_returned = [row['key'] for row in rows] self.assertTrue(all(x in keys_returned for x in keys_list)) def test_all_docs_post_empty_key_list(self): """ Test the all_docs POST request functionality using empty keys param """ self.populate_db_with_documents() # Request all_docs using an empty key list rows = self.db.all_docs(keys=[]).get('rows') self.assertEqual(len(rows), 0) def test_all_docs_post_multiple_params(self): """ Test the all_docs POST request functionality using keys and other params """ # Create 200 documents with ids julia000, julia001, julia002, ..., julia199 self.populate_db_with_documents(200) # Generate keys list for every other document created # with ids julia000, julia002, julia004, ..., julia198 keys_list = ['julia{0:03d}'.format(i) for i in range(0, 200, 2)] self.assertEqual(len(keys_list), 100) data = self.db.all_docs(limit=3, skip=10, keys=keys_list) self.assertEqual(len(data.get('rows')), 3) self.assertEqual(data['rows'][0]['key'], 'julia020') self.assertEqual(data['rows'][1]['key'], 'julia022') self.assertEqual(data['rows'][2]['key'], 'julia024') def test_all_docs_get(self): """ Test the all_docs GET request functionality """ self.populate_db_with_documents() data = self.db.all_docs(limit=3, skip=10) self.assertEqual(len(data.get('rows')), 3) self.assertEqual(data['rows'][0]['key'], 'julia010') self.assertEqual(data['rows'][1]['key'], 'julia011') self.assertEqual(data['rows'][2]['key'], 'julia012') def test_all_docs_get_with_long_type(self): """ Test the all_docs GET request functionality """ self.populate_db_with_documents() data = self.db.all_docs(limit=LONG_NUMBER, skip=10) self.assertEqual(len(data.get('rows')), 1) self.assertEqual(data['rows'][0]['key'], 'julia010') data = self.db.all_docs(limit=1, skip=LONG_NUMBER) self.assertEqual(len(data.get('rows')), 1) def test_all_docs_get_uses_custom_encoder(self): """ Test that all_docs uses the custom encoder. """ self.set_up_client(auto_connect=True, encoder="AEncoder") database = self.client[self.test_dbname] with self.assertRaises(CloudantArgumentError): database.all_docs(endkey=['foo', 10]) def test_custom_result_context_manager(self): """ Test using the database custom result context manager """ self.populate_db_with_documents() with self.db.custom_result(startkey='julia010', endkey='julia012') as rslt: self.assertIsInstance(rslt, Result) keys_returned = [i['key'] for i in rslt] expected_keys = ['julia010', 'julia011', 'julia012'] self.assertTrue(all(x in keys_returned for x in expected_keys)) def test_keys(self): """ Test retrieving the document keys from the database """ self.assertEqual(list(self.db.keys()), []) self.populate_db_with_documents(3) self.assertEqual( self.db.keys(remote=True), ['julia000', 'julia001', 'julia002'] ) def test_doc_id_in_db(self): """ Test checking if a document exists in a DB with in operator """ self.populate_db_with_documents(1) self.assertTrue('julia000' in self.db) def test_doc_id_not_in_db(self): """ Test checking if a document exists in a DB with in operator """ self.populate_db_with_documents(1) self.assertFalse('julia001' in self.db) def test_get_non_existing_doc_via_getitem(self): """ Test __getitem__ when retrieving a non-existing document """ try: doc = self.db['no_such_doc'] self.fail('Above statement should raise a KeyError') except KeyError: pass def test_get_db_via_getitem(self): """ Test __getitem__ when retrieving a document """ # Add a design document map_func = 'function(doc) {\n emit(doc._id, 1); \n}' expected_ddoc = self.db.get_design_document('_design/ddoc01') expected_ddoc.add_view('view01', map_func) expected_ddoc.save() # Add three standard documents self.populate_db_with_documents(3) # Test __get_item__ for standard document doc = self.db['julia001'] self.assertIsInstance(doc, Document) self.assertEqual(doc.get('_id'), 'julia001') self.assertTrue(doc.get('_rev').startswith('1-')) self.assertEqual(doc.get('name'), 'julia') self.assertEqual(doc.get('age'), 1) # Test __get_item__ for design document ddoc = self.db['_design/ddoc01'] self.assertIsInstance(ddoc, DesignDocument) self.assertTrue(ddoc, expected_ddoc) def test_document_iteration_under_fetch_limit(self): """ Test __iter__ works as expected when the number of documents in the database is less than the database fetch limit """ docs = [] # Check iterating when no documents exist for doc in self.db: self.fail('There should be no documents in the database yet!!') # Check that iteration yields appropriate contents self.populate_db_with_documents(3) age = 0 for doc in self.db: self.assertIsInstance(doc, Document) self.assertEqual(doc['_id'], 'julia{0:03d}'.format(age)) self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(doc['name'], 'julia') self.assertEqual(doc['age'], age) docs.append(doc) age += 1 self.assertEqual(len(docs), 3) # Check that the local database object has been populated # with the appropriate documents expected_keys = ['julia{0:03d}'.format(i) for i in range(3)] self.assertTrue(all(x in self.db.keys()for x in expected_keys)) for id in self.db.keys(): doc = self.db.get(id) self.assertIsInstance(doc, Document) self.assertEqual(doc['_id'], id) self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(doc['name'], 'julia') self.assertEqual(doc['age'], int(id[len(id) - 3 : len(id)])) def test_document_iteration_over_fetch_limit(self): """ Test __iter__ works as expected when the number of documents in the database is more than the database fetch limit """ docs = [] # Check iterating when no documents exist for doc in self.db: self.fail('There should be no documents in the database yet!!') # Check that iteration yields appropriate contents self.populate_db_with_documents(103) age = 0 for doc in self.db: self.assertIsInstance(doc, Document) self.assertEqual(doc['_id'], 'julia{0:03d}'.format(age)) self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(doc['name'], 'julia') self.assertEqual(doc['age'], age) docs.append(doc) age += 1 self.assertEqual(len(docs), 103) # Check that the local database object has been populated # with the appropriate documents expected_keys = ['julia{0:03d}'.format(i) for i in range(103)] self.assertTrue(all(x in self.db.keys()for x in expected_keys)) for id in self.db.keys(): doc = self.db.get(id) self.assertIsInstance(doc, Document) self.assertEqual(doc['_id'], id) self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(doc['name'], 'julia') self.assertEqual(doc['age'], int(id[len(id) - 3: len(id)])) def test_document_iteration_completeness(self): """ Test __iter__ works as expected, fetching all documents from the database. """ for _ in self.db: self.fail('There should be no documents in the database yet!!') # sample code point ranges include_ranges = [ (0x0023, 0x0026), (0x00A1, 0x00AC), (0x0370, 0x0377), (0x037A, 0x037E), (0x0384, 0x038A), (0x16A0, 0x16F0), (0x2C60, 0x2C7F) ] all_docs = [{'_id': UNICHR(i) + UNICHR(j)} for a, b in include_ranges for i in range(a, b) for j in range(a, b)] batch_size = 500 for i in range(0, len(all_docs), batch_size): self.db.bulk_docs(all_docs[i:i+batch_size]) doc_count = 0 for i, doc in enumerate(self.db): doc_count += 1 self.assertEqual(doc['_id'], all_docs[i]['_id']) self.assertEqual(doc_count, len(all_docs)) def test_document_iteration_returns_valid_documents(self): """ This test will check that the __iter__ method returns documents that are valid Document or DesignDocument objects and that they can be managed remotely. In this test we will delete the documents as part of the test to ensure that remote management is working as expected and confirming that the documents are valid. """ self.populate_db_with_documents(3) with DesignDocument(self.db, '_design/ddoc001') as ddoc: ddoc.add_view('view001', 'function (doc) {\n emit(doc._id, 1);\n}') docs = [] ddocs = [] for doc in self.db: # A valid document must have a document_url self.assertEqual( doc.document_url, '/'.join((self.db.database_url, doc['_id'])) ) if isinstance(doc, DesignDocument): self.assertEqual(doc['_id'], '_design/ddoc001') ddocs.append(doc) elif isinstance(doc, Document): self.assertTrue( doc['_id'] in ['julia000', 'julia001', 'julia002'] ) docs.append(doc) doc.delete() # Confirm successful deletions for doc in self.db: self.fail('All documents should have been deleted!!!') # Confirm that the correct number of Document (3) and DesignDocument (1) # objects were returned self.assertEqual(len(docs), 3) self.assertEqual(len(ddocs), 1) def test_bulk_docs_creation(self): docs = [ {'_id': 'julia{0:03d}'.format(i), 'name': 'julia', 'age': i} for i in range(3) ] results = self.db.bulk_docs(docs) self.assertEqual(len(results), 3) i = 0 for result in results: self.assertEqual(result['id'], 'julia{0:03d}'.format(i)) self.assertTrue(result['rev'].startswith('1-')) i += 1 def test_bulk_docs_update(self): """ Test update of documents in bulk """ self.populate_db_with_documents(3) docs = [] for doc in self.db: doc['name'] = 'jules' docs.append(doc) results = self.db.bulk_docs(docs) self.assertEqual(len(results), 3) i = 0 for result in results: self.assertEqual(result['id'], 'julia{0:03d}'.format(i)) self.assertTrue(result['rev'].startswith('2-')) i += 1 age = 0 for doc in self.db: self.assertEqual(doc['_id'], 'julia{0:03d}'.format(age)) self.assertTrue(doc['_rev'].startswith('2-')) self.assertEqual(doc['name'], 'jules') self.assertEqual(doc['age'], age) age += 1 self.assertEqual(age, 3) def test_missing_revisions(self): """ Test retrieving missing revisions """ doc = self.db.create_document( {'_id': 'julia006', 'name': 'julia', 'age': 6} ) # Test when the doc is not found revs = ['1-1', '2-1', '3-1'] self.assertEqual(self.db.missing_revisions('no-such-doc', *revs), revs) # Test all revs not found self.assertEqual(self.db.missing_revisions('julia006', *revs), revs) # Test when some revs not found self.assertEqual( self.db.missing_revisions('julia006', doc['_rev'], *revs), revs ) # Test no missing revs self.assertEqual(self.db.missing_revisions('julia006', doc['_rev']), []) def test_revisions_diff(self): """ Test retrieving differences in revisions """ doc = self.db.create_document( {'_id': 'julia006', 'name': 'julia', 'age': 6} ) # Test when the doc is not found revs = ['1-1', '2-1', '3-1'] self.assertEqual( self.db.revisions_diff('no-such-doc', *revs), {'no-such-doc': {'missing': revs}} ) # Test differences self.assertEqual( self.db.revisions_diff('julia006', *revs), {'julia006': {'missing': revs, 'possible_ancestors': [doc['_rev']]}} ) # Test no differences self.assertEqual(self.db.revisions_diff('julia006', doc['_rev']), {}) @mock.patch('cloudant._client_session.ClientSession.request') def test_get_set_revision_limit(self, m_req): """ Test setting and getting revision limits """ # Setup mock responses. mock_200_get_1 = mock.MagicMock() type(mock_200_get_1).status_code = mock.PropertyMock(return_value=200) type(mock_200_get_1).text = mock.PropertyMock(return_value='4321') mock_200_get_2 = mock.MagicMock() type(mock_200_get_2).status_code = mock.PropertyMock(return_value=200) type(mock_200_get_2).text = mock.PropertyMock(return_value='1234') mock_200_set = mock.MagicMock() type(mock_200_set).status_code = mock.PropertyMock(return_value=200) type(mock_200_set).text = mock.PropertyMock(return_value='{"ok":true}') m_req.side_effect = [mock_200_get_1, mock_200_set, mock_200_get_2] # Get current revisions limit. self.assertEqual(self.db.get_revision_limit(), 4321) # Set new revisions limit. self.assertEqual(self.db.set_revision_limit(1234), {'ok': True}) # Get new revisions limit. self.assertEqual(self.db.get_revision_limit(), 1234) self.assertEqual(m_req.call_count, 3) @attr(db='couch') def test_view_clean_up(self): """ Test cleaning up old view files """ self.assertEqual(self.db.view_cleanup(), {'ok': True}) def test_changes_feed_call(self): """ Test that changes() method call constructs and returns a Feed object """ changes = self.db.changes(limit=100) self.assertIs(type(changes), Feed) self.assertEqual(changes._url, '/'.join([self.db.database_url, '_changes'])) self.assertIsInstance(changes._r_session, requests.Session) self.assertFalse(changes._raw_data) self.assertDictEqual(changes._options, {'limit': 100}) def test_changes_inifinite_feed_call(self): """ Test that infinite_changes() method call constructs and returns an InfiniteFeed object """ changes = self.db.infinite_changes() self.assertIsInstance(changes, InfiniteFeed) self.assertEqual(changes._url, '/'.join([self.db.database_url, '_changes'])) self.assertIsInstance(changes._r_session, requests.Session) self.assertFalse(changes._raw_data) self.assertDictEqual(changes._options, {'feed': 'continuous'}) def test_get_list_function_result_with_invalid_argument(self): """ Test get_list_result by passing in invalid arguments """ with self.assertRaises(CloudantArgumentError) as cm: self.db.get_list_function_result('ddoc001', 'list001', 'view001', foo={'bar': 'baz'}) err = cm.exception self.assertEqual(str(err), 'Invalid argument foo') def test_get_list_function_result(self): """ Test get_list_result executes a list function against a view's MapReduce function. """ self.populate_db_with_documents() ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_view('view001', 'function (doc) {\n emit(doc._id, 1);\n}') ddoc.add_list_function( 'list001', 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }') ddoc.save() # Execute list function resp = self.db.get_list_function_result( '_design/ddoc001', 'list001', 'view001', limit=5 ) self.assertEqual( resp, '
    \n' '
  1. julia000:1
  2. \n' '
  3. julia001:1
  4. \n' '
  5. julia002:1
  6. \n' '
  7. julia003:1
  8. \n' '
  9. julia004:1
  10. \n' '
' ) def test_get_show_result(self): """ Test get_show_result executes a show function against a document. """ self.populate_db_with_documents() ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_show_function( 'show001', 'function(doc, req) { ' 'if (doc) { return \'Hello from \' + doc._id + \'!\'; } ' 'else { return \'Hello, world!\'; } }') ddoc.save() doc = Document(self.db, 'doc001') doc.save() # Execute show function resp = self.db.get_show_function_result( '_design/ddoc001', 'show001', 'doc001' ) self.assertEqual( resp, 'Hello from doc001!' ) @skip_if_iam def test_create_doc_with_update_handler(self): """ Test update_handler_result executes an update handler function that creates a new document """ self.populate_db_with_documents() ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc['updates'] = { 'update001': 'function(doc, req) { if (!doc) { var new_doc = req.form; ' 'new_doc._id = \'testDoc\'; return [new_doc, ' '\'Created new doc: \' + JSON.stringify(new_doc)]; }} ' } ddoc.save() resp = self.db.update_handler_result('ddoc001', 'update001', data={'message': 'hello'}) self.assertEqual( resp, 'Created new doc: {"message":"hello","_id":"testDoc"}' ) @skip_if_iam def test_update_doc_with_update_handler(self): """ Test update_handler_result executes an update handler function that updates a document with query parameters """ self.populate_db_with_documents() ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc['updates'] = { 'update001': 'function(doc, req) { ' 'var field = req.query.field; ' 'var value = req.query.value; ' 'var new_doc = doc; ' 'doc[field] = value; ' 'for(var key in req.form) doc[key]=req.form[key]; ' 'var message = \'set \'+field+\' to \'+value' '+\' and add data \'+ JSON.stringify(req.form); ' 'return [doc, message]; } ' } ddoc.save() resp = self.db.update_handler_result('ddoc001', 'update001', 'julia001', field='new_field', value='new_value', data={'message': 'hello'}) self.assertEqual( resp, 'set new_field to new_value and add data {"message":"hello"}' ) ddoc_remote = Document(self.db, 'julia001') ddoc_remote.fetch() self.assertEqual( ddoc_remote, {'age': 1, 'name': 'julia', 'new_field': 'new_value', '_rev': ddoc_remote['_rev'], '_id': 'julia001', 'message': 'hello'} ) def test_update_handler_raises_httperror(self): """ Test update_handler_result raises an HTTPError. """ # Mock HTTPError when running against CouchDB or Cloudant resp = requests.Response() resp.status_code = 400 self.client.r_session.put = mock.Mock(return_value=resp) with self.assertRaises(requests.HTTPError) as cm: self.db.update_handler_result('ddoc001', 'update001', 'julia001', field='new_field', value='new_value', data={'message': 'hello'}) err = cm.exception self.assertEqual(err.response.status_code, 400) ddoc = DesignDocument(self.db, 'ddoc001') self.client.r_session.put.assert_called_with( '/'.join([ddoc.document_url, '_update', 'update001', 'julia001']), data={'message': 'hello'}, params={'field': 'new_field', 'value': 'new_value'}) def test_database_request_fails_after_client_disconnects(self): """ Test that after disconnecting from a client any objects created based on that client are not able to make requests. """ self.client.disconnect() try: with self.assertRaises(AttributeError): self.db.metadata() self.assertIsNone(self.db.r_session) finally: self.client.connect() @attr(couchapi=2) def test_create_json_index(self): """ Ensure that a JSON index is created as expected. """ index = self.db.create_query_index(fields=['name', 'age']) self.assertIsInstance(index, Index) ddoc = self.db[index.design_document_id] self.assertEqual(ddoc['_id'], index.design_document_id) self.assertTrue(ddoc['_rev'].startswith('1-')) self.assertEqual(ddoc['indexes'], {}) self.assertEqual(ddoc['language'], 'query') self.assertEqual(ddoc['lists'], {}) self.assertEqual(ddoc['shows'], {}) index = ddoc['views'][index.name] self.assertEqual(index['map']['fields']['age'], 'asc') self.assertEqual(index['map']['fields']['name'], 'asc') self.assertEqual(index['options']['def']['fields'], ['name', 'age']) self.assertEqual(index['reduce'], '_count') @attr(couchapi=2) def test_delete_json_index(self): """ Ensure that a JSON index is deleted as expected. """ index = self.db.create_query_index( 'ddoc001', 'index001', fields=['name', 'age']) self.assertIsInstance(index, Index) ddoc = self.db['_design/ddoc001'] self.assertTrue(ddoc.exists()) self.db.delete_query_index('ddoc001', 'json', 'index001') self.assertFalse(ddoc.exists()) @attr(db='cloudant') class CloudantDatabaseTests(UnitTestDbBase): """ Cloudant specific Database unit tests """ def setUp(self): """ Set up test attributes """ super(CloudantDatabaseTests, self).setUp() self.db_set_up() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(CloudantDatabaseTests, self).tearDown() def test_share_database_uses_custom_encoder(self): """ Test that share_database uses custom encoder """ share = 'user-{0}'.format(unicode_(uuid.uuid4())) self.set_up_client(auto_connect=True, encoder="AEncoder") database = self.client[self.test_dbname] with self.assertRaises(TypeError): database.share_database(share) def test_unshare_database_uses_custom_encoder(self): """ Test that unshare_database uses custom encoder """ share = 'user-{0}'.format(unicode_(uuid.uuid4())) self.set_up_client(auto_connect=True, encoder="AEncoder") database = self.client[self.test_dbname] with self.assertRaises(TypeError): database.unshare_database(share) @skip_if_not_cookie_auth def test_security_document(self): """ Test the retrieval of the security document. """ share = 'user-{0}'.format(unicode_(uuid.uuid4())) self.db.share_database(share) expected = {'cloudant': {share: ['_reader']}} self.assertDictEqual(self.db.security_document(), expected) @skip_if_not_cookie_auth def test_share_database_default_permissions(self): """ Test the sharing of a database applying default permissions. """ self.assertDictEqual(self.db.security_document(), dict()) share = 'user-{0}'.format(unicode_(uuid.uuid4())) self.db.share_database(share) expected = {'cloudant': {share: ['_reader']}} self.assertDictEqual(self.db.security_document(), expected) @skip_if_not_cookie_auth def test_share_database(self): """ Test the sharing of a database. """ self.assertDictEqual(self.db.security_document(), dict()) share = 'user-{0}'.format(unicode_(uuid.uuid4())) self.db.share_database(share, ['_writer']) expected = {'cloudant': {share: ['_writer']}} self.assertDictEqual(self.db.security_document(), expected) @skip_if_not_cookie_auth def test_share_database_with_redundant_role_entries(self): """ Test the sharing of a database works when the list of roles contains valid entries but some entries are duplicates. """ self.assertDictEqual(self.db.security_document(), dict()) share = 'user-{0}'.format(unicode_(uuid.uuid4())) self.db.share_database(share, ['_writer', '_writer']) expected = {'cloudant': {share: ['_writer']}} self.assertDictEqual(self.db.security_document(), expected) def test_share_database_invalid_role(self): """ Test the sharing of a database fails when provided an invalid role. """ share = 'user-{0}'.format(unicode_(uuid.uuid4())) with self.assertRaises(CloudantArgumentError) as cm: self.db.share_database(share, ['_writer', '_invalid_role']) err = cm.exception self.assertEqual( str(err), 'Invalid role(s) provided: ' '[\'_writer\', \'_invalid_role\']. Valid roles are: ' '[\'_reader\', \'_writer\', \'_admin\', \'_replicator\', ' '\'_db_updates\', \'_design\', \'_shards\', \'_security\']' ) def test_share_database_empty_role_list(self): """ Test the sharing of a database fails when provided an empty role list. """ share = 'user-{0}'.format(unicode_(uuid.uuid4())) with self.assertRaises(CloudantArgumentError) as cm: self.db.share_database(share, []) err = cm.exception self.assertEqual( str(err), 'Invalid role(s) provided: []. Valid roles are: ' '[\'_reader\', \'_writer\', \'_admin\', \'_replicator\', ' '\'_db_updates\', \'_design\', \'_shards\', \'_security\']' ) @skip_if_not_cookie_auth def test_unshare_database(self): """ Test the un-sharing of a database from a specified user. """ share = 'user-{0}'.format(unicode_(uuid.uuid4())) self.db.share_database(share) expected = {'cloudant': {share: ['_reader']}} self.assertDictEqual(self.db.security_document(), expected) self.assertDictEqual(self.db.unshare_database(share), {'ok': True}) self.assertDictEqual(self.db.security_document(), {'cloudant': dict()}) def test_retrieve_shards(self): shards = self.db.shards() self.assertTrue(all(x in shards.keys() for x in ['shards'])) self.assertIsInstance(shards['shards'], dict) def test_get_raw_query_result(self): """ Test that retrieving the raw JSON response for a query works as expected """ self.populate_db_with_documents(100) result = self.db.get_query_result( {'$and': [ {'_id': {'$gte': 'julia001'}}, {'_id': {'$lt': 'julia005'}} ]}, ['_id', '_rev'], True ) self.assertNotIsInstance(result, QueryResult) self.assertIsInstance(result, dict) self.assertEqual( [doc['_id'] for doc in result['docs']], ['julia001', 'julia002', 'julia003', 'julia004'] ) def test_get_query_result_with_kwargs(self): """ Test that retrieving the QueryResult for a query works as expected when additional options are added via kwargs """ self.populate_db_with_documents(100) result = self.db.get_query_result( {'$and': [ {'_id': {'$gte': 'julia001'}}, {'_id': {'$lt': 'julia005'}} ]}, ['_id', '_rev'], sort=[{'_id': 'desc'}] ) self.assertIsInstance(result, QueryResult) self.assertEqual( [doc['_id'] for doc in result], ['julia004', 'julia003', 'julia002', 'julia001'] ) def test_get_query_result_without_kwargs(self): """ Test that retrieving the QueryResult for a query works as expected when executing a query """ self.populate_db_with_documents(100) result = self.db.get_query_result( {'$and': [ {'_id': {'$gte': 'julia001'}}, {'_id': {'$lt': 'julia005'}} ]}, ['_id', '_rev'] ) self.assertIsInstance(result, QueryResult) self.assertEqual( [doc['_id'] for doc in result], ['julia001', 'julia002', 'julia003', 'julia004'] ) def test_get_query_result_without_fields(self): """ Assert that the QueryResult docs include all the expected fields when no fields parameter is provided. """ self.populate_db_with_documents(100) expected_fields = ['_id', '_rev', 'age', 'name'] # Sort the list of expected fields so we can assert list equality later expected_fields.sort() result = self.db.get_query_result( {'$and': [ {'_id': {'$gte': 'julia001'}}, {'_id': {'$lt': 'julia005'}} ]} ) self.assertIsInstance(result, QueryResult) for doc in result: doc_fields = list(doc.keys()) doc_fields.sort() self.assertEqual(doc_fields, expected_fields) self.assertEqual( [doc['_id'] for doc in result], ['julia001', 'julia002', 'julia003', 'julia004'] ) def test_get_query_result_with_empty_fields_list(self): """ Assert that the QueryResult docs include all the expected fields when an empty fields list is provided. """ self.populate_db_with_documents(100) expected_fields = ['_id', '_rev', 'age', 'name'] # Sort the list of expected fields so we can assert list equality later expected_fields.sort() result = self.db.get_query_result( {'$and': [ {'_id': {'$gte': 'julia001'}}, {'_id': {'$lt': 'julia005'}} ]}, fields=[] ) self.assertIsInstance(result, QueryResult) for doc in result: doc_fields = list(doc.keys()) doc_fields.sort() self.assertEqual(doc_fields, expected_fields) self.assertEqual( [doc['_id'] for doc in result], ['julia001', 'julia002', 'julia003', 'julia004'] ) def test_create_text_index(self): """ Ensure that a text index is created as expected. """ index = self.db.create_query_index( index_type='text', fields=[{'name': 'name', 'type':'string'}, {'name': 'age', 'type':'number'}] ) self.assertIsInstance(index, TextIndex) ddoc = self.db[index.design_document_id] self.assertEqual(ddoc['_id'], index.design_document_id) self.assertTrue(ddoc['_rev'].startswith('1-')) self.assertEqual(ddoc['language'], 'query') self.assertEqual(ddoc['lists'], {}) self.assertEqual(ddoc['shows'], {}) self.assertEqual(ddoc['views'], {}) text_index = ddoc['indexes'][index.name] self.assertEqual(text_index['analyzer']['default'], 'keyword') self.assertEqual(text_index['analyzer']['fields']['$default'], 'standard') self.assertEqual(text_index['analyzer']['name'], 'perfield') self.assertEqual(text_index['index']['default_analyzer'], 'keyword') self.assertEqual(text_index['index']['default_field'], {}) self.assertEqual(text_index['index']['fields'], [{'name': 'name', 'type': 'string'}, {'name': 'age', 'type': 'number'}]) self.assertEqual(text_index['index']['selector'], {}) self.assertTrue(text_index['index']['index_array_lengths']) def test_create_all_fields_text_index(self): """ Ensure that a text index is created for all fields as expected. """ index = self.db.create_query_index(index_type='text') self.assertIsInstance(index, TextIndex) ddoc = self.db[index.design_document_id] self.assertEqual(ddoc['_id'], index.design_document_id) self.assertTrue(ddoc['_rev'].startswith('1-')) self.assertEqual(ddoc['language'], 'query') self.assertEqual(ddoc['lists'], {}) self.assertEqual(ddoc['shows'], {}) self.assertEqual(ddoc['views'], {}) index = ddoc['indexes'][index.name] self.assertEqual(index['analyzer']['default'], 'keyword') self.assertEqual(index['analyzer']['fields'], {'$default': 'standard'}) self.assertEqual(index['analyzer']['name'], 'perfield') self.assertEqual(index['index']['default_analyzer'], 'keyword') self.assertEqual(index['index']['default_field'], {}) self.assertEqual(index['index']['fields'], 'all_fields') self.assertEqual(index['index']['selector'], {}) self.assertTrue(index['index']['index_array_lengths']) def test_create_multiple_indexes_one_ddoc(self): """ Tests that multiple indexes of different types can be stored in one design document. """ index = self.db.create_query_index( 'ddoc001', 'json-index-001', fields=['name', 'age'] ) self.assertIsInstance(index, Index) search_index = self.db.create_query_index( 'ddoc001', 'text-index-001', 'text', fields=[{'name': 'name', 'type':'string'}, {'name': 'age', 'type':'number'}] ) self.assertIsInstance(search_index, TextIndex) ddoc = self.db['_design/ddoc001'] self.assertEqual(ddoc['_id'], index.design_document_id) self.assertTrue(ddoc['_rev'].startswith('2-')) self.assertEqual(ddoc['language'], 'query') self.assertEqual(ddoc['lists'], {}) self.assertEqual(ddoc['shows'], {}) json_index = ddoc['views']['json-index-001'] self.assertEqual(json_index['map']['fields']['age'], 'asc') self.assertEqual(json_index['map']['fields']['name'], 'asc') self.assertEqual(json_index['options']['def']['fields'], ['name', 'age']) self.assertEqual(json_index['reduce'], '_count') text_index = ddoc['indexes']['text-index-001'] self.assertEqual(text_index['analyzer']['default'], 'keyword') self.assertEqual(text_index['analyzer']['fields']['$default'], 'standard') self.assertEqual(text_index['analyzer']['name'], 'perfield') self.assertEqual(text_index['index']['default_analyzer'], 'keyword') self.assertEqual(text_index['index']['default_field'], {}) self.assertEqual(text_index['index']['fields'], [{'name': 'name', 'type': 'string'}, {'name': 'age', 'type': 'number'}]) self.assertEqual(text_index['index']['selector'], {}) self.assertTrue(text_index['index']['index_array_lengths']) def test_create_query_index_failure(self): """ Tests that a type of something other than 'json' or 'text' will cause failure. """ with self.assertRaises(CloudantArgumentError) as cm: self.db.create_query_index( None, '_all_docs', 'special', fields=[{'_id': 'asc'}] ) err = cm.exception self.assertEqual( str(err), 'Invalid index type: special. ' 'Index type must be either \"json\" or \"text\".' ) def test_delete_text_index(self): """ Ensure that a text index is deleted as expected. """ index = self.db.create_query_index('ddoc001', 'index001', 'text') self.assertIsInstance(index, TextIndex) ddoc = self.db['_design/ddoc001'] self.assertTrue(ddoc.exists()) self.db.delete_query_index('ddoc001', 'text', 'index001') self.assertFalse(ddoc.exists()) def test_delete_query_index_failure(self): """ Tests that a type of something other than 'json' or 'text' will cause failure. """ with self.assertRaises(CloudantArgumentError) as cm: self.db.delete_query_index(None, 'special', '_all_docs') err = cm.exception self.assertEqual( str(err), 'Invalid index type: special. ' 'Index type must be either \"json\" or \"text\".' ) def test_get_query_indexes_raw(self): """ Tests getting all query indexes from the _index endpoint in JSON format. """ self.db.create_query_index('ddoc001', 'json-idx-001', fields=['name', 'age']) self.db.create_query_index('ddoc001', 'text-idx-001', 'text') indexes = self.db.get_query_indexes(raw_result=True) self.assertEqual(indexes['total_rows'], 3) all_docs_index = indexes['indexes'][0] self.assertEqual(all_docs_index['ddoc'], None) self.assertEqual(all_docs_index['def']['fields'], [{'_id': 'asc'}]) self.assertEqual(all_docs_index['name'], '_all_docs') self.assertEqual(all_docs_index['type'], 'special') json_index = indexes['indexes'][1] self.assertEqual(json_index['ddoc'], '_design/ddoc001') self.assertEqual(json_index['def']['fields'], [{'name': 'asc'}, {'age': 'asc'}]) self.assertEqual(json_index['name'], 'json-idx-001') self.assertEqual(json_index['type'], 'json') text_index = indexes['indexes'][2] self.assertEqual(text_index['ddoc'], '_design/ddoc001') self.assertEqual(text_index['def']['default_analyzer'], 'keyword') self.assertEqual(text_index['def']['default_field'], {}) self.assertEqual(text_index['def']['fields'], []) self.assertEqual(text_index['def']['selector'], {}) self.assertEqual(text_index['name'], 'text-idx-001') self.assertEqual(text_index['type'], 'text') self.assertTrue(text_index['def']['index_array_lengths']) def test_get_query_indexes(self): """ Tests getting all query indexes from the _index endpoint wrapped as Index, TextIndex, and SpecialIndex. """ self.db.create_query_index('ddoc001', 'json-idx-001', fields=['name', 'age']) self.db.create_query_index('ddoc001', 'text-idx-001', 'text') indexes = self.db.get_query_indexes() self.assertIsInstance(indexes[0], SpecialIndex) self.assertIsNone(indexes[0].design_document_id) self.assertEqual(indexes[0].name, '_all_docs') self.assertIsInstance(indexes[1], Index) self.assertEqual(indexes[1].design_document_id, '_design/ddoc001') self.assertEqual(indexes[1].name, 'json-idx-001') self.assertIsInstance(indexes[2], TextIndex) self.assertEqual(indexes[2].design_document_id, '_design/ddoc001') self.assertEqual(indexes[2].name, 'text-idx-001') def test_get_search_result_with_invalid_argument(self): """ Test get_search_result by passing in invalid arguments """ with self.assertRaises(CloudantArgumentError) as cm: self.db.get_search_result('searchddoc001', 'searchindex001', query='julia*', foo={'bar': 'baz'}) err = cm.exception self.assertEqual(str(err), 'Invalid argument: foo') def test_get_search_result_with_both_q_and_query(self): """ Test get_search_result by passing in both a q and query parameter """ with self.assertRaises(CloudantArgumentError) as cm: self.db.get_search_result('searchddoc001', 'searchindex001', query='julia*', q='julia*') err = cm.exception self.assertTrue(str(err).startswith('A single query/q parameter is required.')) def test_get_search_result_with_invalid_value_types(self): """ Test get_search_result by passing in invalid value types for query parameters """ test_data = [ {'bookmark': 1}, # Should be a STRTYPE {'counts': 'blah'}, # Should be a list {'drilldown': 'blah'}, # Should be a list {'group_field': ['blah']}, # Should be a STRTYPE {'group_limit': 'int'}, # Should be an int {'group_sort': 3}, # Should be a STRTYPE or list {'include_docs': 'blah'}, # Should be a boolean {'limit': 'blah'}, # Should be an int {'ranges': 1}, # Should be a dict {'sort': 10}, # Should be a STRTYPE or list {'stale': ['blah']}, # Should be a STRTYPE {'highlight_fields': 'blah'}, # Should be a list {'highlight_pre_tag': ['blah']}, # Should be a STRTYPE {'highlight_post_tag': 1}, # Should be a STRTYPE {'highlight_number': ['int']}, # Should be an int {'highlight_size': 'blah'}, # Should be an int {'include_fields': 'list'}, # Should be a list ] for argument in test_data: with self.assertRaises(CloudantArgumentError) as cm: self.db.get_search_result('searchddoc001', 'searchindex001', query='julia*', **argument) err = cm.exception self.assertTrue(str(err).startswith( 'Argument {0} is not an instance of expected type:'.format( list(argument.keys())[0]) )) def test_get_search_result_without_query(self): """ Test get_search_result without providing a search query """ with self.assertRaises(CloudantArgumentError) as cm: self.db.get_search_result('searchddoc001', 'searchindex001', limit=10, include_docs=True) err = cm.exception # Validate that the error message starts as expected self.assertTrue(str(err).startswith('A single query/q parameter is required.')) # Validate that the error message includes the supplied parameters (in an order independent way) self.assertTrue(str(err).find("'limit': 10") >= 0) self.assertTrue(str(err).find("'include_docs': True") >= 0) def test_get_search_result_with_invalid_query_type(self): """ Test get_search_result by passing an invalid query type """ with self.assertRaises(CloudantArgumentError) as cm: self.db.get_search_result( 'searchddoc001', 'searchindex001', query=['blah'] ) err = cm.exception self.assertTrue(str(err).startswith( 'Argument query is not an instance of expected type:' )) def test_get_search_result_executes_search_query(self): """ Test get_search_result executes a search with query parameter. """ self.create_search_index() self.populate_db_with_documents(100) resp = self.db.get_search_result( 'searchddoc001', 'searchindex001', query='julia*', sort='_id', limit=5, include_docs=True ) self.assertEqual(5, len(resp['rows'])) self.assertTrue(resp['bookmark']) for i, row in enumerate(resp['rows']): doc_id = 'julia00{0}'.format(i) self.assertEqual(doc_id, row['id']) self.assertEqual('julia', row['fields']['name']) # Note: The second element in the order array can be ignored. It is # used for troubleshooting purposes only. self.assertEqual(doc_id, row['order'][0]) doc = row['doc'] self.assertEqual(doc_id, doc['_id']) self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(i, doc['age']) self.assertEqual('julia', doc['name']) def test_get_search_result_executes_search_q(self): """ Test get_search_result executes a search query with q parameter. """ self.create_search_index() self.populate_db_with_documents(100) resp = self.db.get_search_result( 'searchddoc001', 'searchindex001', q='julia*', sort='_id', limit=1 ) self.assertTrue(resp['bookmark']) self.assertEqual(100, resp['total_rows']) self.assertEqual(1, len(resp['rows'])) row = resp['rows'][0] self.assertEqual('julia000', row['id']) # Note: The second element in the order array can be ignored. It is # used for troubleshooting purposes only. self.assertEqual('julia000', row['order'][0]) self.assertEqual('julia', row['fields']['name']) def test_get_search_result_executes_search_query_with_group_option(self): """ Test get_search_result executes a search query with grouping parameters. """ self.create_search_index() self.populate_db_with_documents(100) resp = self.db.get_search_result( 'searchddoc001', 'searchindex001', query='name:julia*', group_field='_id', group_limit=5, group_sort='_id' ) # for group parameter options, 'rows' results are within 'groups' key self.assertEqual(len(resp['groups']), 5) for i, group in enumerate(resp['groups']): by_id = 'julia00{0}'.format(i) self.assertEqual(by_id, group['by']) self.assertEqual(1, group['total_rows']) self.assertEqual(1, len(group['rows'])) row = group['rows'][0] self.assertEqual(by_id, row['id']) self.assertEqual('julia', row['fields']['name']) # Note: The second element in the order array can be ignored. It is # used for troubleshooting purposes only. self.assertEqual(1.0, row['order'][0]) self.assertEqual(100, resp['total_rows']) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/db_updates_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2016, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Unit tests for _db_updates feed """ import json import os import unittest from cloudant._2to3 import unicode_ from cloudant.error import CloudantArgumentError from cloudant.feed import Feed from nose.plugins.attrib import attr from requests import Session from .unit_t_db_base import UnitTestDbBase from .. import BYTETYPE class DbUpdatesTestsBase(UnitTestDbBase): """ Common _db_updates tests methods """ def setUp(self): """ Set up test attributes """ super(DbUpdatesTestsBase, self).setUp() self.client.connect() self.db_names = list() self.new_dbs = list() if not self.is_couchdb_1x_version(): self.create_db_updates() self.create_dbs() def tearDown(self): """ Reset test attributes """ test_dbs_deleted = False changes = list() [db.delete() for db in self.new_dbs] # Check the changes in the _db_updates feed to assert that the test databases are deleted if not self.is_couchdb_1x_version(): while not test_dbs_deleted and not self.is_couchdb_1x_version(): feed = Feed(self.client, timeout=1000) for change in feed: if change['db_name'] in self.db_names and change['type'] == 'deleted': changes.append(change) if len(changes) == 2: test_dbs_deleted = True feed.stop() self.delete_db_updates() self.client.disconnect() super(DbUpdatesTestsBase, self).tearDown() def create_dbs(self): if not self.is_couchdb_1x_version(): self.db_names = [self.dbname() for x in range(2)] self.new_dbs += [self.client.create_database(dbname) for dbname in self.db_names] # Verify that all created databases are listed in _db_updates all_dbs_exist = False while not all_dbs_exist: changes = list() feed = Feed(self.client, timeout=1000) for change in feed: changes.append(change) if len(changes) == 3: all_dbs_exist = True feed.stop() else: self.new_dbs += [(self.client.create_database(self.dbname())) for x in range(3)] def assert_changes_in_db_updates_feed(self, changes): """ Assert that databases created in setup for db_updates_tests exist when looping through _db_updates feed Note: During the creation of _global_changes database, a doc called '_dbs' is created and seen in _db_updates """ if not self.is_couchdb_1x_version(): self.dbs = ['_dbs', self.new_dbs[0].database_name, self.new_dbs[1].database_name] types = ['created', 'updated'] for doc in changes: self.assertIsNotNone(doc['seq']) self.assertTrue(doc['db_name'] in self.dbs) self.assertTrue(doc['type'] in types) else: self.assertDictEqual( changes[0], {'db_name': self.new_dbs[0].database_name, 'type': 'created'}) self.assertDictEqual( changes[1], {'db_name': self.new_dbs[1].database_name, 'type': 'created'}) self.assertDictEqual( changes[2], {'db_name': self.new_dbs[2].database_name, 'type': 'created'}) @attr(db='couch') class CouchDbUpdatesTests(DbUpdatesTestsBase): """ CouchDB _db_updates feed unit tests """ def test_constructor_db_updates(self): """ Test constructing a _db_updates feed """ feed = Feed(self.client, feed='continuous', heartbeat=False, timeout=2) self.assertEqual(feed._url, '/'.join([self.client.server_url, '_db_updates'])) self.assertIsInstance(feed._r_session, Session) self.assertFalse(feed._raw_data) self.assertDictEqual(feed._options, {'feed': 'continuous', 'heartbeat': False, 'timeout': 2}) def test_stop_iteration_of_continuous_feed_with_heartbeat(self): """ Test stopping the iteration, test a continuous feed, and test heartbeat functionality. """ feed = Feed(self.client, feed='continuous', timeout=100) changes = list() for change in feed: if not change and self.is_couchdb_1x_version(): self.create_dbs() else: changes.append(change) if len(changes) == 3: feed.stop() self.assert_changes_in_db_updates_feed(changes) self.assertEqual(len(changes), 3) def test_get_raw_content(self): """ Test getting raw feed content """ feed = Feed(self.client, raw_data=True, feed='continuous', timeout=100) raw_content = list() for raw_line in feed: self.assertIsInstance(raw_line, BYTETYPE) if not raw_line and self.is_couchdb_1x_version(): self.create_dbs() else: raw_content.append(raw_line) if len(raw_content) == 3: feed.stop() changes = [json.loads(unicode_(x)) for x in raw_content] self.assert_changes_in_db_updates_feed(changes) def test_get_longpoll_feed_as_default(self): """ Test getting content back for a "longpoll" feed """ feed = Feed(self.client, timeout=1000) changes = list() if self.is_couchdb_1x_version(): for change in feed: self.assertIsNone(change) changes.append(change) self.assertEqual(len(changes), 1) self.assertIsNone(changes[0]) else: for change in feed: self.assertIsNotNone(change) changes.append(change) if len(changes) == 3: feed.stop() self.assert_changes_in_db_updates_feed(changes) self.assertEqual(len(changes), 3) def test_get_longpoll_feed_explicit(self): """ Test getting content back for a "longpoll" feed while setting feed to longpoll explicitly """ feed = Feed(self.client, timeout=1000, feed='longpoll') changes = list() if self.is_couchdb_1x_version(): for change in feed: self.assertIsNone(change) changes.append(change) self.assertEqual(len(changes), 1) self.assertIsNone(changes[0]) else: for change in feed: self.assertIsNotNone(change) changes.append(change) if len(changes) == 3: feed.stop() self.assert_changes_in_db_updates_feed(changes) self.assertEqual(len(changes), 3) def test_get_continuous_with_timeout(self): """ Test getting content back for a "continuous" feed with timeout set and no heartbeat """ feed = Feed(self.client, feed='continuous', heartbeat=False, timeout=1000) changes = list() if self.is_couchdb_1x_version(): self.assertListEqual([x for x in feed], []) else: for change in feed: self.assertIsNotNone(change) changes.append(change) if len(changes) == 3: feed.stop() self.assert_changes_in_db_updates_feed(changes) self.assertEqual(len(changes), 3) def test_invalid_argument(self): """ Test that an invalid argument is caught and an exception is raised """ feed = Feed(self.client, foo='bar') with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertEqual(str(cm.exception), 'Invalid argument foo') feed = Feed(self.client, style='all_docs') with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertEqual(str(cm.exception), 'Invalid argument style') feed = Feed(self.client, descending=True) with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertEqual(str(cm.exception), 'Invalid argument descending') def test_invalid_argument_type(self): """ Test that an invalid argument type is caught and an exception is raised """ feed = Feed(self.client, heartbeat=6) with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertTrue( str(cm.exception).startswith( 'Argument heartbeat not instance of expected type:') ) def test_invalid_non_positive_integer_argument(self): """ Test that an invalid integer argument type is caught and an exception is raised """ feed = Feed(self.client, timeout=-1) with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertEqual( str(cm.exception), 'Argument timeout must be > 0. Found: -1') def test_invalid_feed_value(self): """ Test that an invalid feed argument value is caught and an exception is raised """ feed = Feed(self.client, feed='foo') with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertTrue(str(cm.exception).startswith( 'Invalid value (foo) for feed option.')) feed = Feed(self.client, feed='normal') with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertTrue(str(cm.exception).startswith( 'Invalid value (normal) for feed option.')) @attr(db='cloudant') @unittest.skipIf(os.environ.get('SKIP_DB_UPDATES'), 'Skipping Cloudant _db_updates feed tests') class CloudantDbUpdatesTests(DbUpdatesTestsBase): """ Cloudant _db_updates feed unit tests """ def test_constructor_db_updates(self): """ Test constructing a _db_updates feed """ feed = Feed(self.client, feed='continuous', heartbeat=5000) self.assertEqual(feed._url, '/'.join([self.client.server_url, '_db_updates'])) self.assertIsInstance(feed._r_session, Session) self.assertFalse(feed._raw_data) self.assertDictEqual(feed._options, {'feed': 'continuous', 'heartbeat': 5000}) def test_get_last_seq(self): """ Test getting the last sequence identifier """ self.create_dbs(1) feed = Feed(self.client, since='now') self.assertIsNone(feed.last_seq) [x for x in feed] self.assertIsNotNone(feed.last_seq) def test_stop_iteration_of_continuous_feed_using_since_now(self): """ Test stopping the iteration, test continuous feed functionality, test using since='now' option. """ feed = Feed(self.client, feed='continuous', since='now') count = 0 changes = list() for change in feed: self.assertTrue(all(x in change for x in ('seq', 'type'))) changes.append(change) count += 1 if count == 2: feed.stop() self.assertEqual(len(changes), 2) self.assertTrue(changes[0]['seq'] < changes[1]['seq']) self.assertIsNone(feed.last_seq) def test_get_raw_content(self): """ Test getting raw feed content """ self.create_dbs(3) feed = Feed(self.client, limit=3, raw_data=True) raw_content = list() for raw_line in feed: self.assertIsInstance(raw_line, BYTETYPE) raw_content.append(raw_line) changes = json.loads(''.join([unicode_(x) for x in raw_content])) self.assertSetEqual(set(changes.keys()), set(['results', 'last_seq'])) self.assertEqual(len(changes['results']), 3) self.assertIsNotNone(changes['last_seq']) self.assertIsNone(feed.last_seq) def test_get_normal_feed_default(self): """ Test getting content back for a "normal" feed without feed option. Also using limit since we don't know how many updates have occurred on client. """ feed = Feed(self.client, limit=3) changes = list() for change in feed: self.assertTrue(all(x in change for x in ('seq', 'type'))) changes.append(change) self.assertEqual(len(changes), 3) self.assertTrue(changes[0]['seq'] < changes[1]['seq'] < changes[2]['seq']) self.assertIsNotNone(feed.last_seq) def test_get_normal_feed_explicit(self): """ Test getting content back for a "normal" feed using feed option. Also using limit since we don't know how many updates have occurred on client. """ feed = Feed(self.client, feed='normal', limit=3) changes = list() for change in feed: self.assertTrue(all(x in change for x in ('seq', 'type'))) changes.append(change) self.assertEqual(len(changes), 3) self.assertTrue(changes[0]['seq'] < changes[1]['seq'] < changes[2]['seq']) self.assertIsNotNone(feed.last_seq) def test_get_longpoll_feed(self): """ Test getting content back for a "longpoll" feed """ feed = Feed(self.client, feed='longpoll', limit=3) changes = list() for change in feed: self.assertTrue(all(x in change for x in ('seq', 'type'))) changes.append(change) self.assertEqual(len(changes), 3) self.assertIsNotNone(feed.last_seq) def test_get_feed_with_heartbeat(self): """ Test getting content back for a feed with a heartbeat """ feed = Feed(self.client, feed='continuous', since='now', heartbeat=1000) changes = list() heartbeats = 0 for change in feed: if not change: self.assertIsNone(change) heartbeats += 1 if heartbeats < 4: self.create_dbs(1) else: self.assertTrue(all(x in change for x in ('seq', 'type'))) if len(changes) < 3: changes.append(change) if heartbeats >= 3 and len(changes) == 3: feed.stop() self.assertTrue(changes[0]['seq'] < changes[1]['seq'] < changes[2]['seq']) self.assertIsNone(feed.last_seq) def test_get_raw_feed_with_heartbeat(self): """ Test getting raw content back for a feed with a heartbeat """ feed = Feed(self.client, raw_data=True, feed='continuous', since='now', heartbeat=1000) raw_content = list() heartbeats = 0 for raw_line in feed: if not raw_line: self.assertEqual(len(raw_line), 0) heartbeats += 1 if heartbeats < 4: self.create_dbs(1) else: self.assertIsInstance(raw_line, BYTETYPE) raw_content.append(raw_line) if heartbeats >= 3 and len(raw_content) >= 3: feed.stop() changes = [json.loads(unicode_(x)) for x in raw_content] self.assertTrue(changes[0]['seq'] < changes[1]['seq'] < changes[2]['seq']) self.assertIsNone(feed.last_seq) def test_get_feed_descending(self): """ Test getting content back for a descending feed. """ self.create_dbs(3) feed = Feed(self.client, limit=3, descending=True) changes = list() for change in feed: self.assertTrue(all(x in change for x in ('seq', 'type'))) changes.append(change) self.assertEqual(len(changes), 3) self.assertTrue(changes[0]['seq'] > changes[1]['seq'] > changes[2]['seq']) self.assertIsNotNone(feed.last_seq) def test_get_feed_using_since(self): """ Test getting content back for a feed using the since option """ self.create_dbs(1) feed = Feed(self.client, since='now') [x for x in feed] last_seq = feed.last_seq self.create_dbs(3) feed = Feed(self.client, since=last_seq) for change in feed: self.assertTrue(all(x in change for x in ('seq', 'type'))) self.assertTrue(feed.last_seq > last_seq) def test_get_feed_using_timeout(self): """ Test getting content back for a feed using timeout. Since we do not have control over updates happening within the account as we do within a database, this test is stopped after 15 changes are received which should theoretically not happen but we still need a way to break out of the test if necessary. """ feed = Feed(self.client, feed='continuous', since='now', timeout=1000) count = 0 self.create_dbs(1) for change in feed: self.assertTrue(all(x in change for x in ('seq', 'type'))) count += 1 if count == 15: feed.stop() # The test is considered a success if the last_seq value exists on the # feed object. One would not exist if the feed was stopped via .stop(). # If failure occurs it does not necessarily mean that the InfiniteFeed # is not functioning as expected, it might also mean that we reached the # changes limit threshold of 15 before a timeout could happen. self.assertIsNotNone(feed.last_seq) def test_invalid_argument(self): """ Test that an invalid argument is caught and an exception is raised """ feed = Feed(self.client, foo='bar') with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertEqual(str(cm.exception), 'Invalid argument foo') feed = Feed(self.client, style='all_docs') with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertEqual(str(cm.exception), 'Invalid argument style') def test_invalid_argument_type(self): """ Test that an invalid argument type is caught and an exception is raised """ feed = Feed(self.client, descending=6) with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertTrue( str(cm.exception).startswith( 'Argument descending not instance of expected type:') ) def test_invalid_non_positive_integer_argument(self): """ Test that an invalid integer argument type is caught and an exception is raised """ feed = Feed(self.client, limit=-1) with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertEqual( str(cm.exception), 'Argument limit must be > 0. Found: -1') def test_invalid_feed_value(self): """ Test that an invalid feed argument value is caught and an exception is raised """ feed = Feed(self.client, feed='foo') with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertTrue(str(cm.exception).startswith( 'Invalid value (foo) for feed option.')) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/design_document_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2020 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _design_document_tests_ design_document module - Unit tests for the DesignDocument class See configuration options for environment variables in unit_t_db_base module docstring. """ import json import os import unittest import mock import requests from cloudant._common_util import response_to_json_dict from cloudant.design_document import DesignDocument from cloudant.document import Document from cloudant.error import CloudantArgumentError, CloudantDesignDocumentException from cloudant.view import View, QueryIndexView from nose.plugins.attrib import attr from .unit_t_db_base import UnitTestDbBase, skip_if_iam class CloudantDesignDocumentExceptionTests(unittest.TestCase): """ Ensure CloudantDesignDocumentException functions as expected. """ def test_raise_without_code(self): """ Ensure that a default exception/code is used if none is provided. """ with self.assertRaises(CloudantDesignDocumentException) as cm: raise CloudantDesignDocumentException() self.assertEqual(cm.exception.status_code, 100) def test_raise_using_invalid_code(self): """ Ensure that a default exception/code is used if invalid code is provided. """ with self.assertRaises(CloudantDesignDocumentException) as cm: raise CloudantDesignDocumentException('foo') self.assertEqual(cm.exception.status_code, 100) def test_raise_without_args(self): """ Ensure that a default exception/code is used if the message requested by the code provided requires an argument list and none is provided. """ with self.assertRaises(CloudantDesignDocumentException) as cm: raise CloudantDesignDocumentException(104) self.assertEqual(cm.exception.status_code, 100) def test_raise_with_proper_code_and_args(self): """ Ensure that the requested exception is raised. """ with self.assertRaises(CloudantDesignDocumentException) as cm: raise CloudantDesignDocumentException(104, 'foo') self.assertEqual(cm.exception.status_code, 104) @attr(db=['cloudant','couch']) class DesignDocumentTests(UnitTestDbBase): """ DesignDocument unit tests """ def setUp(self): """ Set up test attributes """ super(DesignDocumentTests, self).setUp() self.db_set_up() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(DesignDocumentTests, self).tearDown() def test_constructor_with_docid(self): """ Test instantiating a DesignDocument providing an id not prefaced with '_design/' """ ddoc = DesignDocument(self.db, 'ddoc001') self.assertIsInstance(ddoc, DesignDocument) self.assertEqual(ddoc.get('_id'), '_design/ddoc001') self.assertEqual(ddoc.get('views'), {}) def test_constructor_with_design_docid(self): """ Test instantiating a DesignDocument providing an id prefaced with '_design/' """ ddoc = DesignDocument(self.db, '_design/ddoc001') self.assertIsInstance(ddoc, DesignDocument) self.assertEqual(ddoc.get('_id'), '_design/ddoc001') self.assertEqual(ddoc.get('views'), {}) def test_constructor_without_docid(self): """ Test instantiating a DesignDocument without providing an id """ ddoc = DesignDocument(self.db) self.assertIsInstance(ddoc, DesignDocument) self.assertIsNone(ddoc.get('_id')) self.assertEqual(ddoc.get('views'), {}) def test_create_design_document_with_docid_encoded_url(self): """ Test creating a design document providing an id that has an encoded url """ ddoc = DesignDocument(self.db, '_design/http://example.com') self.assertFalse(ddoc.exists()) self.assertIsNone(ddoc.get('_rev')) ddoc.create() self.assertTrue(ddoc.exists()) self.assertTrue(ddoc.get('_rev').startswith('1-')) def test_fetch_existing_design_document_with_docid_encoded_url(self): """ Test fetching design document content from an existing document where the document id requires an encoded url """ ddoc = DesignDocument(self.db, '_design/http://example.com') ddoc.create() new_ddoc = DesignDocument(self.db, '_design/http://example.com') new_ddoc.fetch() self.assertEqual(new_ddoc, ddoc) def test_update_design_document_with_encoded_url(self): """ Test that updating a design document where the document id requires that the document url be encoded is successful. """ # First create the design document ddoc = DesignDocument(self.db, '_design/http://example.com') ddoc.save() # Now test that the design document gets updated ddoc.save() self.assertTrue(ddoc['_rev'].startswith('2-')) remote_ddoc = DesignDocument(self.db, '_design/http://example.com') remote_ddoc.fetch() self.assertEqual(remote_ddoc, ddoc) def test_delete_design_document_success_with_encoded_url(self): """ Test that we can remove a design document from the remote database successfully when the document id requires an encoded url. """ ddoc = DesignDocument(self.db, '_design/http://example.com') ddoc.create() self.assertTrue(ddoc.exists()) ddoc.delete() self.assertFalse(ddoc.exists()) self.assertEqual(ddoc, {'_id': '_design/http://example.com'}) def test_add_a_view(self): """ Test that adding a view adds a View object to the DesignDocument dictionary. """ ddoc = DesignDocument(self.db, '_design/ddoc001') self.assertEqual(ddoc.get('views'), {}) ddoc.add_view( 'view001', 'function (doc) {\n emit(doc._id, 1);\n}' ) self.assertListEqual(list(ddoc.get('views').keys()), ['view001']) self.assertIsInstance(ddoc.get('views')['view001'], View) self.assertEqual( ddoc.get('views')['view001'], {'map': 'function (doc) {\n emit(doc._id, 1);\n}'} ) def test_adding_existing_view(self): """ Test that adding an existing view fails as expected. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_view( 'view001', 'function (doc) {\n emit(doc._id, 1);\n}' ) try: ddoc.add_view('view001', 'function (doc) {\n emit(doc._id, 2);\n}') self.fail('Above statement should raise an Exception') except CloudantArgumentError as err: self.assertEqual( str(err), 'View view001 already exists in this design doc.' ) def test_adding_query_index_view(self): """ Test that adding a query index view fails as expected. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc['language'] = 'query' with self.assertRaises(CloudantDesignDocumentException) as cm: ddoc.add_view('view001', {'foo': 'bar'}) err = cm.exception self.assertEqual( str(err), 'Cannot add a MapReduce view to a ' 'design document for query indexes.' ) def test_update_a_view(self): """ Test that updating a view updates the contents of the correct View object in the DesignDocument dictionary. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_view('view001', 'not-a-valid-map-function') self.assertEqual( ddoc.get('views')['view001'], {'map': 'not-a-valid-map-function'} ) ddoc.update_view( 'view001', 'function (doc) {\n emit(doc._id, 1);\n}' ) self.assertEqual( ddoc.get('views')['view001'], {'map': 'function (doc) {\n emit(doc._id, 1);\n}'} ) def test_update_non_existing_view(self): """ Test that updating a non-existing view fails as expected. """ ddoc = DesignDocument(self.db, '_design/ddoc001') try: ddoc.update_view( 'view001', 'function (doc) {\n emit(doc._id, 1);\n}' ) self.fail('Above statement should raise an Exception') except CloudantArgumentError as err: self.assertEqual( str(err), 'View view001 does not exist in this design doc.' ) def test_update_query_index_view(self): """ Test that updating a query index view fails as expected. """ # This is not the preferred way of dealing with query index # views but it works best for this test. data = { '_id': '_design/ddoc001', 'language': 'query', 'views': { 'view001': {'map': {'fields': {'name': 'asc', 'age': 'asc'}}, 'reduce': '_count', 'options': {'def': {'fields': ['name', 'age']}, 'w': 2} } } } self.db.create_document(data) ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.fetch() with self.assertRaises(CloudantDesignDocumentException) as cm: ddoc.update_view( 'view001', 'function (doc) {\n emit(doc._id, 1);\n}' ) err = cm.exception self.assertEqual( str(err), 'Cannot update a query index view using this method.' ) def test_delete_a_view(self): """ Test deleting a view from the DesignDocument dictionary. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_view('view001', 'function (doc) {\n emit(doc._id, 1);\n}') self.assertEqual( ddoc.get('views')['view001'], {'map': 'function (doc) {\n emit(doc._id, 1);\n}'} ) ddoc.delete_view('view001') self.assertEqual(ddoc.get('views'), {}) def test_delete_a_query_index_view(self): """ Test deleting a query index view fails as expected. """ # This is not the preferred way of dealing with query index # views but it works best for this test. data = { '_id': '_design/ddoc001', 'language': 'query', 'views': { 'view001': {'map': {'fields': {'name': 'asc', 'age': 'asc'}}, 'reduce': '_count', 'options': {'def': {'fields': ['name', 'age']}, 'w': 2} } } } self.db.create_document(data) ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.fetch() with self.assertRaises(CloudantDesignDocumentException) as cm: ddoc.delete_view('view001') err = cm.exception self.assertEqual( str(err), 'Cannot delete a query index view using this method.' ) def test_fetch_map_reduce(self): """ Ensure that the document fetch from the database returns the DesignDocument format as expected when retrieving a design document containing MapReduce views. """ ddoc = DesignDocument(self.db, '_design/ddoc001') view_map = 'function (doc) {\n emit(doc._id, 1);\n}' view_reduce = '_count' ddoc.add_view('view001', view_map, view_reduce) ddoc.add_view('view003', view_map) ddoc.save() ddoc_remote = DesignDocument(self.db, '_design/ddoc001') self.assertNotEqual(ddoc_remote, ddoc) ddoc_remote.fetch() self.assertEqual(ddoc_remote, ddoc) self.assertTrue(ddoc_remote['_rev'].startswith('1-')) self.assertEqual(ddoc_remote, { '_id': '_design/ddoc001', '_rev': ddoc['_rev'], 'options': {'partitioned': False}, 'lists': {}, 'shows': {}, 'indexes': {}, 'views': { 'view001': {'map': view_map, 'reduce': view_reduce}, 'view003': {'map': view_map} } }) self.assertIsInstance(ddoc_remote['views']['view001'], View) self.assertIsInstance(ddoc_remote['views']['view003'], View) @attr(db='cloudant') def test_fetch_dbcopy(self): """ Ensure that the document fetch from the database returns the DesignDocument format as expected when retrieving a view that has dbcopy. Note: this asserts the expected dbcopy location from Cloudant versions based on CouchDB >= 2.0 """ ddoc = DesignDocument(self.db, '_design/ddoc001') view_map = 'function (doc) {\n emit(doc._id, 1);\n}' view_reduce = '_count' db_copy = '{0}-copy'.format(self.db.database_name) ddoc.add_view('view002', view_map, view_reduce, dbcopy=db_copy) ddoc.save() ddoc_remote = DesignDocument(self.db, '_design/ddoc001') self.assertNotEqual(ddoc_remote, ddoc) ddoc_remote.fetch() # The local ddoc will not contain the server plugin options # so we need to manipulate the equalities by removing # the options from remote. The remote ddoc won't contain # the dbcopy entry in the view dict so that needs to be removed # before comparison also. Compare the removed values with # the expected content in each case. self.assertEqual(db_copy, ddoc['views']['view002'].pop('dbcopy')) self.assertEqual({'epi': {'dbcopy': {'view002': db_copy}}, 'partitioned': False}, ddoc_remote.pop('options')) self.assertEqual({'partitioned': False}, ddoc.pop('options')) self.assertEqual(ddoc_remote, ddoc) self.assertTrue(ddoc_remote['_rev'].startswith('1-')) self.assertEqual(ddoc_remote, { '_id': '_design/ddoc001', '_rev': ddoc['_rev'], 'lists': {}, 'shows': {}, 'indexes': {}, 'views': { 'view002': {'map': view_map, 'reduce': view_reduce} } }) self.assertIsInstance(ddoc_remote['views']['view002'], View) def test_fetch_no_views(self): """ Ensure that the document fetched from the database returns the DesignDocument format as expected when retrieving a design document containing no views. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.save() ddoc_remote = DesignDocument(self.db, '_design/ddoc001') ddoc_remote.fetch() self.assertEqual(set(ddoc_remote.keys()), {'_id', '_rev', 'indexes', 'views', 'options', 'lists', 'shows'}) self.assertEqual(ddoc_remote['_id'], '_design/ddoc001') self.assertTrue(ddoc_remote['_rev'].startswith('1-')) self.assertEqual(ddoc_remote['_rev'], ddoc['_rev']) self.assertEqual(ddoc_remote.views, {}) def test_fetch_query_views(self): """ Ensure that the document fetch from the database returns the DesignDocument format as expected when retrieving a design document containing query index views. """ # This is not the preferred way of dealing with query index # views but it works best for this test. data = { '_id': '_design/ddoc001', 'indexes': {}, 'options': {'partitioned': False}, 'lists': {}, 'shows': {}, 'language': 'query', 'views': { 'view001': {'map': {'fields': {'name': 'asc', 'age': 'asc'}}, 'reduce': '_count', 'options': {'def': {'fields': ['name', 'age']}, 'w': 2} } } } doc = self.db.create_document(data) self.assertIsInstance(doc, Document) data['_rev'] = doc['_rev'] ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.fetch() self.assertIsInstance(ddoc, DesignDocument) self.assertEqual(ddoc, data) self.assertIsInstance(ddoc['views']['view001'], QueryIndexView) def test_fetch_text_indexes(self): """ Ensure that the document fetch from the database returns the DesignDocument format as expected when retrieving a design document containing query index views. """ # This is not the preferred way of dealing with query index # views but it works best for this test. data = { '_id': '_design/ddoc001', 'language': 'query', 'options': {'partitioned': False}, 'lists': {}, 'shows': {}, 'indexes': {'index001': {'index': {'index_array_lengths': True, 'fields': [{'name': 'name', 'type': 'string'}, {'name': 'age', 'type': 'number'}], 'default_field': {'enabled': True, 'analyzer': 'german'}, 'default_analyzer': 'keyword', 'selector': {}}, 'analyzer': {'name': 'perfield', 'default': 'keyword', 'fields': {'$default': 'german'}}}}} doc = self.db.create_document(data) self.assertIsInstance(doc, Document) ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.fetch() self.assertIsInstance(ddoc, DesignDocument) data['_rev'] = doc['_rev'] data['views'] = dict() self.assertEqual(ddoc, data) self.assertIsInstance(ddoc['indexes']['index001'], dict) def test_fetch_text_indexes_and_query_views(self): """ Ensure that the document fetch from the database returns the DesignDocument format as expected when retrieving a design document containing query index views and text index definitions. """ # This is not the preferred way of dealing with query index # views but it works best for this test. data = { '_id': '_design/ddoc001', 'language': 'query', 'lists': {}, 'shows': {}, 'options': {'partitioned': False}, 'views': { 'view001': {'map': {'fields': {'name': 'asc', 'age': 'asc'}}, 'reduce': '_count', 'options': {'def': {'fields': ['name', 'age']}, 'w': 2} } }, 'indexes': {'index001': { 'index': {'index_array_lengths': True, 'fields': [{'name': 'name', 'type': 'string'}, {'name': 'age', 'type': 'number'}], 'default_field': {'enabled': True, 'analyzer': 'german'}, 'default_analyzer': 'keyword', 'selector': {}}, 'analyzer': {'name': 'perfield', 'default': 'keyword', 'fields': {'$default': 'german'}}}}} doc = self.db.create_document(data) self.assertIsInstance(doc, Document) data['_rev'] = doc['_rev'] ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.fetch() self.assertIsInstance(ddoc, DesignDocument) self.assertEqual(ddoc, data) self.assertIsInstance(ddoc['indexes']['index001'], dict) self.assertIsInstance(ddoc['views']['view001'], QueryIndexView) def test_text_index_save_fails_when_lang_is_not_query(self): """ Tests that save fails when language is not query and a search index string function is expected. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc['indexes']['index001'] = { 'index': {'index_array_lengths': True, 'fields': [{'name': 'name', 'type': 'string'}, {'name': 'age', 'type': 'number'}], 'default_field': {'enabled': True, 'analyzer': 'german'}, 'default_analyzer': 'keyword', 'selector': {}}, 'analyzer': {'name': 'perfield','default': 'keyword', 'fields': {'$default': 'german'}}} self.assertIsInstance(ddoc['indexes']['index001']['index'], dict) with self.assertRaises(CloudantDesignDocumentException) as cm: ddoc.save() err = cm.exception self.assertEqual( str(err), 'Function for search index index001 must be of type string.' ) def test_text_index_save_fails_with_existing_search_index(self): """ Tests that save fails when language is not query and both a query text index and a search index exist in the design document. """ ddoc = DesignDocument(self.db, '_design/ddoc001') search_index = ('function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n}') ddoc.add_search_index('search001', search_index) self.assertIsInstance( ddoc['indexes']['search001']['index'], str ) ddoc.save() self.assertTrue(ddoc['_rev'].startswith('1-')) ddoc_remote = DesignDocument(self.db, '_design/ddoc001') ddoc_remote.fetch() ddoc_remote['indexes']['index001'] = { 'index': {'index_array_lengths': True, 'fields': [{'name': 'name', 'type': 'string'}, {'name': 'age', 'type': 'number'}], 'default_field': {'enabled': True, 'analyzer': 'german'}, 'default_analyzer': 'keyword', 'selector': {}}, 'analyzer': {'name': 'perfield','default': 'keyword', 'fields': {'$default': 'german'}}} self.assertIsInstance(ddoc_remote['indexes']['index001']['index'], dict) with self.assertRaises(CloudantDesignDocumentException) as cm: ddoc_remote.save() err = cm.exception self.assertEqual( str(err), 'Function for search index index001 must be of type string.' ) def test_mr_view_save_fails_when_lang_is_query(self): """ Tests that save fails when language is query but views are map reduce views. """ ddoc = DesignDocument(self.db, '_design/ddoc001') view_map = 'function (doc) {\n emit(doc._id, 1);\n}' view_reduce = '_count' db_copy = '{0}-copy'.format(self.db.database_name) ddoc.add_view('view001', view_map, view_reduce) ddoc['language'] = 'query' with self.assertRaises(CloudantDesignDocumentException) as cm: ddoc.save() err = cm.exception self.assertEqual( str(err), 'View view001 must be of type QueryIndexView.' ) def test_mr_view_save_succeeds(self): """ Tests that save succeeds when no language is specified and views are map reduce views. """ ddoc = DesignDocument(self.db, '_design/ddoc001') view_map = 'function (doc) {\n emit(doc._id, 1);\n}' view_reduce = '_count' db_copy = '{0}-copy'.format(self.db.database_name) ddoc.add_view('view001', view_map, view_reduce) ddoc.save() self.assertTrue(ddoc['_rev'].startswith('1-')) def test_query_view_save_fails_when_lang_is_not_query(self): """ Tests that save fails when language is not query but views are query index views. """ # This is not the preferred way of dealing with query index # views but it works best for this test. data = { '_id': '_design/ddoc001', 'language': 'query', 'views': { 'view001': {'map': {'fields': {'name': 'asc', 'age': 'asc'}}, 'reduce': '_count', 'options': {'def': {'fields': ['name', 'age']}, 'w': 2} } } } self.db.create_document(data) ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.fetch() with self.assertRaises(CloudantDesignDocumentException) as cm: ddoc['language'] = 'not-query' ddoc.save() err = cm.exception self.assertEqual(str(err), 'View view001 must be of type View.') with self.assertRaises(CloudantDesignDocumentException) as cm: del ddoc['language'] ddoc.save() err = cm.exception self.assertEqual(str(err), 'View view001 must be of type View.') def test_query_view_save_succeeds(self): """ Tests that save succeeds when language is query and views are query index views. """ # This is not the preferred way of dealing with query index # views but it works best for this test. data = { '_id': '_design/ddoc001', 'language': 'query', 'views': { 'view001': {'map': {'fields': {'name': 'asc', 'age': 'asc'}}, 'reduce': '_count', 'options': {'def': {'fields': ['name', 'age']}, 'w': 2} } } } self.db.create_document(data) ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.fetch() self.assertTrue(ddoc['_rev'].startswith('1-')) ddoc.save() self.assertTrue(ddoc['_rev'].startswith('2-')) def test_save_with_no_views(self): """ Tests the functionality when saving a design document without a view. The locally cached DesignDocument should contain an empty views dict while the design document saved remotely should not include the empty views sub-document. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.save() # Ensure that locally cached DesignDocument contains an # empty views dict. self.assertEqual(set(ddoc.keys()), {'_id', '_rev', 'indexes', 'options', 'views', 'lists', 'shows'}) self.assertEqual(ddoc['_id'], '_design/ddoc001') self.assertTrue(ddoc['_rev'].startswith('1-')) self.assertEqual(ddoc.views, {}) # Ensure that remotely saved design document does not # include a views sub-document. resp = self.client.r_session.get(ddoc.document_url) raw_ddoc = response_to_json_dict(resp) self.assertEqual(set(raw_ddoc.keys()), {'_id', '_rev','options'}) self.assertEqual(raw_ddoc['_id'], ddoc['_id']) self.assertEqual(raw_ddoc['_rev'], ddoc['_rev']) def test_setting_id(self): """ Ensure when setting the design document id that it is prefaced by '_design/' """ ddoc = DesignDocument(self.db) ddoc['_id'] = 'ddoc001' self.assertEqual(ddoc['_id'], '_design/ddoc001') del ddoc['_id'] self.assertIsNone(ddoc.get('_id')) ddoc['_id'] = '_design/ddoc002' self.assertEqual(ddoc['_id'], '_design/ddoc002') def test_iterating_over_views(self): """ Test iterating over views within the DesignDocument """ ddoc = DesignDocument(self.db, '_design/ddoc001') view_map = 'function (doc) {\n emit(doc._id, 1);\n}' ddoc.add_view('view001', view_map) ddoc.add_view('view002', view_map) ddoc.add_view('view003', view_map) view_names = [] for view_name, view in ddoc.iterviews(): self.assertIsInstance(view, View) view_names.append(view_name) self.assertTrue( all(x in view_names for x in ['view001', 'view002', 'view003']) ) def test_list_views(self): """ Test the retrieval of view name list from DesignDocument """ ddoc = DesignDocument(self.db, '_design/ddoc001') view_map = 'function (doc) {\n emit(doc._id, 1);\n}' ddoc.add_view('view001', view_map) ddoc.add_view('view002', view_map) ddoc.add_view('view003', view_map) self.assertTrue( all(x in ddoc.list_views() for x in [ 'view001', 'view002', 'view003' ]) ) def test_get_view(self): """ Test retrieval of a view from the DesignDocument """ ddoc = DesignDocument(self.db, '_design/ddoc001') view_map = 'function (doc) {\n emit(doc._id, 1);\n}' view_reduce = '_count' ddoc.add_view('view001', view_map) ddoc.add_view('view002', view_map, view_reduce) ddoc.add_view('view003', view_map) self.assertIsInstance(ddoc.get_view('view002'), View) self.assertEqual( ddoc.get_view('view002'), { 'map': 'function (doc) {\n emit(doc._id, 1);\n}', 'reduce': '_count' } ) def test_get_info(self): """ Test retrieval of info endpoint from the DesignDocument. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.save() ddoc_remote = DesignDocument(self.db, '_design/ddoc001') ddoc_remote.fetch() info = ddoc_remote.info() # Remove variable fields to make equality easier to check info['view_index'].pop('signature') if 'disk_size' in info['view_index']: info['view_index'].pop('disk_size') if 'data_size' in info['view_index']: info['view_index'].pop('data_size') # Remove Cloudant/Couch 2 fields if present to allow test to pass on Couch 1.6 if 'sizes' in info['view_index']: info['view_index'].pop('sizes') if 'updates_pending' in info['view_index']: info['view_index'].pop('updates_pending') name = 'ddoc001' self.assertEqual( info, {'view_index': {'update_seq': 0, 'waiting_clients': 0, 'language': 'javascript', 'purge_seq': 0, 'compact_running': False, 'waiting_commit': False, 'updater_running': False }, 'name': name }) def test_get_info_raises_httperror(self): """ Test get_info raises an HTTPError. """ # Mock HTTPError when running against CouchDB and Cloudant resp = requests.Response() resp.status_code = 400 self.client.r_session.get = mock.Mock(return_value=resp) ddoc = DesignDocument(self.db, '_design/ddoc001') with self.assertRaises(requests.HTTPError) as cm: ddoc.info() err = cm.exception self.assertEqual(err.response.status_code, 400) self.client.r_session.get.assert_called_with( '/'.join([ddoc.document_url, '_info'])) @attr(db='cloudant') def test_get_search_info(self): """ Test retrieval of search_info endpoint from the DesignDocument. """ self.populate_db_with_documents(100) ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_search_index( 'search001', 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, {"store": true}); }\n}' ) ddoc.save() ddoc_remote = DesignDocument(self.db, '_design/ddoc001') ddoc_remote.fetch() # Make a request to the search index to ensure it is built self.db.get_search_result('_design/ddoc001', 'search001', query='name:julia*') search_info = ddoc_remote.search_info('search001') # Check the search index name self.assertEqual(search_info['name'], '_design/ddoc001/search001', 'The search index name should be correct.') # Validate the metadata search_index_metadata = search_info['search_index'] self.assertIsNotNone(search_index_metadata) self.assertEqual(search_index_metadata['doc_del_count'], 0, 'There should be no deleted docs.') self.assertTrue(search_index_metadata['doc_count'] <= 100, 'There should be 100 or fewer docs.') self.assertEqual(search_index_metadata['committed_seq'], 0, 'The committed_seq should be 0.') self.assertTrue(search_index_metadata['pending_seq'] <= 101, 'The pending_seq should be 101 or fewer.') self.assertTrue(search_index_metadata['disk_size'] >0, 'The disk_size should be greater than 0.') @attr(db='cloudant') def test_get_search_disk_size(self): """ Test retrieval of search_disk_size endpoint from the DesignDocument. """ self.populate_db_with_documents(100) ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_search_index( 'search001', 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, {"store": true}); }\n}' ) ddoc.save() ddoc_remote = DesignDocument(self.db, '_design/ddoc001') ddoc_remote.fetch() # Make a request to the search index to ensure it is built self.db.get_search_result('_design/ddoc001', 'search001', query='name:julia*') search_disk_size = ddoc_remote.search_disk_size('search001') self.assertEqual( sorted(search_disk_size.keys()), ['name', 'search_index'], 'The search disk size should contain only keys "name" and "search_index"') self.assertEqual( search_disk_size['name'], '_design/ddoc001/search001', 'The search index "name" should be correct.') self.assertEqual( sorted(search_disk_size['search_index'].keys()), ['disk_size'], 'The search index should contain only key "disk_size"') self.assertTrue( isinstance(search_disk_size['search_index']['disk_size'], int), 'The "disk_size" value should be an integer.') self.assertTrue( search_disk_size['search_index']['disk_size'] > 0, 'The "disk_size" should be greater than 0.') @attr(db='cloudant') def test_get_search_info_raises_httperror(self): """ Test get_search_info raises an HTTPError. """ # Mock HTTPError when running against Cloudant search_index = 'search001' resp = requests.Response() resp.status_code = 400 self.client.r_session.get = mock.Mock(return_value=resp) ddoc = DesignDocument(self.db, '_design/ddoc001') with self.assertRaises(requests.HTTPError) as cm: ddoc.search_info(search_index) err = cm.exception self.assertEqual(err.response.status_code, 400) self.client.r_session.get.assert_called_with( '/'.join([ddoc.document_url, '_search_info', search_index])) def test_add_a_search_index(self): """ Test that adding a search index adds a search index object to the DesignDocument dictionary. """ ddoc = DesignDocument(self.db, '_design/ddoc001') self.assertEqual(ddoc.get('indexes'), {}) ddoc.add_search_index( 'search001', 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, {"store": true}); }\n}' ) self.assertListEqual(list(ddoc.get('indexes').keys()), ['search001']) self.assertEqual( ddoc.get('indexes')['search001'], {'index': 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, {"store": true}); }\n}'} ) def test_add_a_search_index_with_analyzer(self): """ Test that adding a search index with an analyzer adds a search index object to the DesignDocument dictionary. """ ddoc = DesignDocument(self.db, '_design/ddoc001') self.assertEqual(ddoc.get('indexes'), {}) ddoc.add_search_index( 'search001', 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, {"store": true}); }\n}', {'name': 'perfield', 'default': 'english', 'fields': { 'spanish': 'spanish'}} ) self.assertListEqual(list(ddoc.get('indexes').keys()), ['search001']) self.assertEqual( ddoc.get('indexes')['search001'], {'index': 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, {"store": true}); }\n}', 'analyzer': {"name": "perfield", "default": "english", "fields": {"spanish": "spanish"}}} ) def test_adding_existing_search_index(self): """ Test that adding an existing search index fails as expected. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_search_index( 'search001', 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, {"store": true}); }\n}', ) with self.assertRaises(CloudantArgumentError) as cm: ddoc.add_search_index( 'search001', 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n}' ) err = cm.exception self.assertEqual( str(err), 'An index with name search001 already exists in this design doc.' ) def test_update_a_search_index(self): """ Test that updating a search index updates the contents of the correct search index object in the DesignDocument dictionary. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_search_index('search001', 'not-a-valid-search-index') self.assertEqual( ddoc.get('indexes')['search001'], {'index': 'not-a-valid-search-index'} ) ddoc.update_search_index( 'search001', 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, {"store": true}); }\n}', ) self.assertEqual( ddoc.get('indexes')['search001'], {'index': 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n}'} ) def test_update_a_search_index_with_analyzer(self): """ Test that updating a search analyzer updates the contents of the correct search index object in the DesignDocument dictionary. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_search_index('search001', 'not-a-valid-search-index', 'email') self.assertEqual( ddoc.get('indexes')['search001'], {'index': 'not-a-valid-search-index', 'analyzer': 'email'} ) ddoc.update_search_index( 'search001', 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, {"store": true}); }\n}', 'simple' ) self.assertEqual( ddoc.get('indexes')['search001'], {'index': 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n}', 'analyzer': 'simple' } ) def test_update_non_existing_search_index(self): """ Test that updating a non-existing search index fails as expected. """ ddoc = DesignDocument(self.db, '_design/ddoc001') with self.assertRaises(CloudantArgumentError) as cm: ddoc.update_search_index( 'search001', 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n}' ) err = cm.exception self.assertEqual( str(err), 'An index with name search001 does not exist in this design doc.' ) def test_delete_a_search_index(self): """ Test deleting a search index from the DesignDocument dictionary. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_search_index( 'search001', 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n}' ) self.assertEqual( ddoc.get('indexes')['search001'], {'index': 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n}'} ) ddoc.delete_index('search001') self.assertEqual(ddoc.get('indexes'), {}) def test_fetch_search_index(self): """ Ensure that the document fetch from the database returns the DesignDocument format as expected when retrieving a design document containing search indexes. """ ddoc = DesignDocument(self.db, '_design/ddoc001') search_index = ('function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n} ') ddoc.add_search_index('search001', search_index) ddoc.add_search_index('search002', search_index, 'simple') ddoc.add_search_index('search003', search_index, 'standard') ddoc.save() ddoc_remote = DesignDocument(self.db, '_design/ddoc001') self.assertNotEqual(ddoc_remote, ddoc) ddoc_remote.fetch() self.assertEqual(ddoc_remote, ddoc) self.assertTrue(ddoc_remote['_rev'].startswith('1-')) self.assertEqual(ddoc_remote, { '_id': '_design/ddoc001', '_rev': ddoc['_rev'], 'options': {'partitioned': False}, 'indexes': { 'search001': {'index': search_index}, 'search002': {'index': search_index, 'analyzer': 'simple'}, 'search003': {'index': search_index, 'analyzer': 'standard'} }, 'views': {}, 'lists': {}, 'shows': {} }) def test_fetch_no_search_index(self): """ Ensure that the document fetched from the database returns the DesignDocument format as expected when retrieving a design document containing no search indexes. The :func:`~cloudant.design_document.DesignDocument.fetch` function adds the ``indexes`` key in the locally cached DesignDocument if indexes do not exist in the remote design document. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.save() ddoc_remote = DesignDocument(self.db, '_design/ddoc001') ddoc_remote.fetch() self.assertEqual(set(ddoc_remote.keys()), {'_id', '_rev', 'indexes', 'options', 'views', 'lists', 'shows'}) self.assertEqual(ddoc_remote['_id'], '_design/ddoc001') self.assertTrue(ddoc_remote['_rev'].startswith('1-')) self.assertEqual(ddoc_remote['_rev'], ddoc['_rev']) self.assertEqual(ddoc_remote.indexes, {}) def test_search_index_save_fails_when_lang_is_query(self): """ Tests that save fails when language is query and a text index dict definition is expected. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc['language'] = 'query' ddoc['indexes']['search001'] = { 'index': 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n}', 'analyzer': 'standard'} self.assertIsInstance(ddoc['indexes']['search001']['index'], str) with self.assertRaises(CloudantDesignDocumentException) as cm: ddoc.save() err = cm.exception self.assertEqual( str(err), 'Definition for query text index search001 must be of type dict.' ) def test_search_index_save_fails_with_existing_text_index(self): """ Tests that save fails when language is query and both a search index and a text index exist in the design document. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc['language'] = 'query' ddoc['indexes']['index001'] = { 'index': {'index_array_lengths': True, 'fields': [{'name': 'name', 'type': 'string'}, {'name': 'age', 'type': 'number'}], 'default_field': {'enabled': True, 'analyzer': 'german'}, 'default_analyzer': 'keyword', 'selector': {}}, 'analyzer': {'name': 'perfield','default': 'keyword', 'fields': {'$default': 'german'}}} ddoc.save() self.assertTrue(ddoc['_rev'].startswith('1-')) search_index = ('function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n}') ddoc.add_search_index('search001', search_index) self.assertIsInstance( ddoc['indexes']['search001']['index'], str ) with self.assertRaises(CloudantDesignDocumentException) as cm: ddoc.save() err = cm.exception self.assertEqual( str(err), 'Definition for query text index search001 must be of type dict.' ) def test_search_index_save_succeeds(self): """ Tests that save succeeds when no language is specified for search indexes. """ ddoc = DesignDocument(self.db, '_design/ddoc001') search_index = ('function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n}') ddoc.add_search_index('search001', search_index) ddoc.save() self.assertTrue(ddoc['_rev'].startswith('1-')) def test_save_with_no_search_indexes(self): """ Tests the functionality when saving a design document without a search index. Both the locally cached and remote DesignDocument should not include the empty indexes sub-document. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.save() # Ensure that locally cached DesignDocument contains an # empty search indexes and views dict. self.assertEqual(set(ddoc.keys()), {'_id', '_rev', 'indexes','options', 'views', 'lists', 'shows'}) self.assertEqual(ddoc['_id'], '_design/ddoc001') self.assertTrue(ddoc['_rev'].startswith('1-')) # Ensure that remotely saved design document does not # include a search indexes sub-document. resp = self.client.r_session.get(ddoc.document_url) raw_ddoc = response_to_json_dict(resp) self.assertEqual(set(raw_ddoc.keys()), {'_id', '_rev','options'}) self.assertEqual(raw_ddoc['_id'], ddoc['_id']) self.assertEqual(raw_ddoc['_rev'], ddoc['_rev']) def test_iterating_over_search_indexes(self): """ Test iterating over search indexes within the DesignDocument. """ ddoc = DesignDocument(self.db, '_design/ddoc001') search_index = ('function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n}') ddoc.add_search_index('search001', search_index) ddoc.add_search_index('search002', search_index) ddoc.add_search_index('search003', search_index) search_index_names = [] for search_index_name, search_index in ddoc.iterindexes(): search_index_names.append(search_index_name) self.assertTrue( all(x in search_index_names for x in ['search001', 'search002', 'search003']) ) def test_list_search_indexes(self): """ Test the retrieval of search index name list from DesignDocument. """ ddoc = DesignDocument(self.db, '_design/ddoc001') index = 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n}' ddoc.add_search_index('search001', index) ddoc.add_search_index('search002', index) ddoc.add_search_index('search003', index) self.assertTrue( all(x in ddoc.list_indexes() for x in [ 'search001', 'search002', 'search003' ]) ) def test_get_search_index(self): """ Test retrieval of a search index from the DesignDocument. """ ddoc = DesignDocument(self.db, '_design/ddoc001') index = ('function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n}') ddoc.add_search_index('search001', index) ddoc.add_search_index('search002', index) ddoc.add_search_index('search003', index) self.assertEqual( ddoc.get_index('search002'), {'index': 'function (doc) {\n index("default", doc._id); ' 'if (doc._id) {index("name", doc.name, ' '{"store": true}); }\n}'} ) @skip_if_iam def test_rewrite_rule(self): """ Test that design document URL is rewritten to the expected test document. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc['rewrites'] = [ {"from": "", "to": "/../../rewrite_doc", "method": "GET", "query": {} } ] self.assertIsInstance(ddoc.rewrites, list) self.assertIsInstance(ddoc.rewrites[0], dict) ddoc.save() doc = Document(self.db, 'rewrite_doc') doc.save() resp = self.client.r_session.get('/'.join([ddoc.document_url, '_rewrite'])) self.assertEqual( response_to_json_dict(resp), { '_id': 'rewrite_doc', '_rev': doc['_rev'] } ) def test_add_a_list_function(self): """ Test that adding a list function adds a list object to the DesignDocument dictionary. """ ddoc = DesignDocument(self.db, '_design/ddoc001') self.assertEqual(ddoc.get('lists'), {}) ddoc.add_list_function( 'list001', 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) self.assertListEqual(list(ddoc.get('lists').keys()), ['list001']) self.assertEqual( ddoc.get('lists'), {'list001': 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }'} ) def test_adding_existing_list_function(self): """ Test that adding an existing list function fails as expected. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_list_function( 'list001', 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) with self.assertRaises(CloudantArgumentError) as cm: ddoc.add_list_function( 'list001', 'function (doc) { existing list }' ) err = cm.exception self.assertEqual( str(err), 'A list with name list001 already exists in this design doc.' ) def test_update_a_list_function(self): """ Test that updating a list function updates the contents of the correct list object in the DesignDocument dictionary. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_list_function('list001', 'not-a-valid-list-function') self.assertEqual( ddoc.get('lists')['list001'], 'not-a-valid-list-function' ) ddoc.update_list_function( 'list001', 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) self.assertEqual( ddoc.get('lists')['list001'], 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) def test_update_non_existing_list_function(self): """ Test that updating a non-existing list function fails as expected. """ ddoc = DesignDocument(self.db, '_design/ddoc001') with self.assertRaises(CloudantArgumentError) as cm: ddoc.update_list_function( 'list001', 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) err = cm.exception self.assertEqual( str(err), 'A list with name list001 does not exist in this design doc.' ) def test_delete_a_list_function(self): """ Test deleting a list function from the DesignDocument dictionary. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_list_function( 'list001', 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) self.assertEqual( ddoc.get('lists')['list001'], 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) ddoc.delete_list_function('list001') self.assertEqual(ddoc.get('lists'), {}) def test_fetch_list_functions(self): """ Ensure that the document fetch from the database returns the DesignDocument format as expected when retrieving a design document containing list functions. """ ddoc = DesignDocument(self.db, '_design/ddoc001') list_func = ('function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }') ddoc.add_list_function('list001', list_func) ddoc.add_list_function('list002', list_func) ddoc.add_list_function('list003', list_func) ddoc.save() ddoc_remote = DesignDocument(self.db, '_design/ddoc001') self.assertNotEqual(ddoc_remote, ddoc) ddoc_remote.fetch() self.assertEqual(ddoc_remote, ddoc) self.assertTrue(ddoc_remote['_rev'].startswith('1-')) self.assertEqual(ddoc_remote, { '_id': '_design/ddoc001', '_rev': ddoc['_rev'], 'options': {'partitioned': False}, 'lists': { 'list001': list_func, 'list002': list_func, 'list003': list_func }, 'shows': {}, 'indexes': {}, 'views': {} }) def test_fetch_no_list_functions(self): """ Ensure that the document fetched from the database returns the DesignDocument format as expected when retrieving a design document containing no list functions. The :func:`~cloudant.design_document.DesignDocument.fetch` function adds the ``lists`` key in the locally cached DesignDocument if list functions do not exist in the remote design document. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.save() ddoc_remote = DesignDocument(self.db, '_design/ddoc001') ddoc_remote.fetch() self.assertEqual(set(ddoc_remote.keys()), {'_id', '_rev', 'options', 'indexes', 'views', 'lists', 'shows'}) self.assertEqual(ddoc_remote['_id'], '_design/ddoc001') self.assertTrue(ddoc_remote['_rev'].startswith('1-')) self.assertEqual(ddoc_remote['_rev'], ddoc['_rev']) self.assertEqual(ddoc_remote.lists, {}) def test_save_with_no_list_functions(self): """ Tests the functionality when saving a design document without list functions. Both the locally cached and remote DesignDocument should not include the empty lists sub-document. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.save() # Ensure that locally cached DesignDocument contains lists dict self.assertEqual(set(ddoc.keys()), {'_id', '_rev', 'lists', 'options', 'shows', 'indexes', 'views'}) self.assertEqual(ddoc['_id'], '_design/ddoc001') self.assertTrue(ddoc['_rev'].startswith('1-')) # Ensure that remotely saved design document does not # include a lists sub-document. resp = self.client.r_session.get(ddoc.document_url) raw_ddoc = response_to_json_dict(resp) self.assertEqual(set(raw_ddoc.keys()), {'_id', '_rev','options'}) self.assertEqual(raw_ddoc['_id'], ddoc['_id']) self.assertEqual(raw_ddoc['_rev'], ddoc['_rev']) def test_iterating_over_list_functions(self): """ Test iterating over list functions within the DesignDocument. """ ddoc = DesignDocument(self.db, '_design/ddoc001') list_func = ('function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }') ddoc.add_list_function('list001', list_func) ddoc.add_list_function('list002', list_func) ddoc.add_list_function('list003', list_func) list_names = [] for list_name, list_func in ddoc.iterlists(): list_names.append(list_name) self.assertTrue( all(x in list_names for x in ['list001', 'list002', 'list003']) ) def test_listing_list_functions(self): """ Test retrieving a list of list function names from DesignDocument. """ ddoc = DesignDocument(self.db, '_design/ddoc001') list_func = ('function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }') ddoc.add_list_function('list001', list_func) ddoc.add_list_function('list002', list_func) ddoc.add_list_function('list003', list_func) self.assertTrue( all(x in ddoc.list_list_functions() for x in [ 'list001', 'list002', 'list003' ]) ) def test_get_list_function(self): """ Test retrieval of a list function from the DesignDocument. """ ddoc = DesignDocument(self.db, '_design/ddoc001') list_func = ('function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }') ddoc.add_list_function('list001', list_func) ddoc.add_list_function('list002', list_func) ddoc.add_list_function('list003', list_func) self.assertEqual( ddoc.get_list_function('list002'), 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) @attr(db='cloudant') def test_geospatial_index(self): """ Test retrieval and query of Cloudant Geo indexes from the DesignDocument. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc['st_indexes'] = { 'geoidx': { 'index': 'function(doc) { ' 'if (doc.geometry && doc.geometry.coordinates) { ' 'st_index(doc.geometry);}} ' } } ddoc.save() ddoc_remote = DesignDocument(self.db, '_design/ddoc001') self.assertNotEqual(ddoc_remote, ddoc) ddoc_remote.fetch() self.assertEqual(ddoc_remote, { '_id': '_design/ddoc001', '_rev': ddoc['_rev'], 'st_indexes': ddoc['st_indexes'], 'indexes': {}, 'views': {}, 'lists': {}, 'shows': {}, 'options': {'partitioned': False} }) # Document with geospatial point geodoc = Document(self.db, 'doc001') geodoc['type'] = 'Feature' geodoc['geometry'] = { "type": "Point", "coordinates": [ -71.1, 42.3 ] } geodoc.save() # Geospatial query for a well known point geo_result = self.client.r_session.get('/'.join([ddoc_remote.document_url, '_geo', 'geoidx?g=point(-71.1%2042.3)'])).json() self.assertIsNotNone(geo_result['bookmark']) geo_result.pop('bookmark') rows = geo_result.pop('rows') self.assertEqual(1, len(rows), "There should be 1 row.") row = rows[0] # Remove the rev before comparison row.pop('rev') self.assertEqual(row, {'id': 'doc001', 'geometry': {'type': 'Point', 'coordinates': [-71.1, 42.3]}}) def test_add_a_show_function(self): """ Test that adding a show function adds a show object to the DesignDocument dictionary. """ ddoc = DesignDocument(self.db, '_design/ddoc001') self.assertEqual(ddoc.get('shows'), {}) ddoc.add_show_function( 'show001', 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) self.assertListEqual(list(ddoc.get('shows').keys()), ['show001']) self.assertEqual( ddoc.get('shows'), {'show001': 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }'} ) def test_adding_existing_show_functions(self): """ Test that adding an existing show function fails as expected. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_show_function( 'show001', 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) with self.assertRaises(CloudantArgumentError) as cm: ddoc.add_show_function( 'show001', 'function (doc) { existing show function }' ) err = cm.exception self.assertEqual( str(err), 'A show function with name show001 already exists in this design doc.' ) def test_update_a_show_function(self): """ Test that updating a show function updates the contents of the correct show object in the DesignDocument dictionary. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_show_function('show001', 'not-a-valid-show-function') self.assertEqual( ddoc.get('shows')['show001'], 'not-a-valid-show-function' ) ddoc.update_show_function( 'show001', 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) self.assertEqual( ddoc.get('shows')['show001'], 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) def test_update_non_existing_show_function(self): """ Test that updating a non-existing show function fails as expected. """ ddoc = DesignDocument(self.db, '_design/ddoc001') with self.assertRaises(CloudantArgumentError) as cm: ddoc.update_show_function( 'show001', 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) err = cm.exception self.assertEqual( str(err), 'A show function with name show001 does not exist in this design doc.' ) def test_delete_a_show_function(self): """ Test deleting a show function from the DesignDocument dictionary. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.add_show_function( 'show001', 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) self.assertEqual( ddoc.get('shows')['show001'], 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) ddoc.delete_show_function('show001') self.assertEqual(ddoc.get('shows'), {}) def test_fetch_show_functions(self): """ Ensure that the document fetch from the database returns the DesignDocument format as expected when retrieving a design document containing show functions. """ ddoc = DesignDocument(self.db, '_design/ddoc001') show_func = ('function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }') ddoc.add_show_function('show001', show_func) ddoc.add_show_function('show002', show_func) ddoc.add_show_function('show003', show_func) ddoc.save() ddoc_remote = DesignDocument(self.db, '_design/ddoc001') self.assertNotEqual(ddoc_remote, ddoc) ddoc_remote.fetch() self.assertEqual(ddoc_remote, ddoc) self.assertTrue(ddoc_remote['_rev'].startswith('1-')) self.assertEqual(ddoc_remote, { '_id': '_design/ddoc001', '_rev': ddoc['_rev'], 'options': {'partitioned': False}, 'lists': {}, 'shows': { 'show001': show_func, 'show002': show_func, 'show003': show_func }, 'indexes': {}, 'views': {} }) def test_fetch_no_show_functions(self): """ Ensure that the document fetched from the database returns the DesignDocument format as expected when retrieving a design document containing no show functions. The :func:`~cloudant.design_document.DesignDocument.fetch` function adds the ``shows`` key in the locally cached DesignDocument if show functions do not exist in the remote design document. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.save() ddoc_remote = DesignDocument(self.db, '_design/ddoc001') ddoc_remote.fetch() self.assertEqual(set(ddoc_remote.keys()), {'_id', '_rev', 'indexes', 'options', 'views', 'lists', 'shows'}) self.assertEqual(ddoc_remote['_id'], '_design/ddoc001') self.assertTrue(ddoc_remote['_rev'].startswith('1-')) self.assertEqual(ddoc_remote['_rev'], ddoc['_rev']) self.assertEqual(ddoc_remote.shows, {}) def test_save_with_no_show_functions(self): """ Tests the functionality when saving a design document without a show function. Both the locally cached and remote DesignDocument should not include the empty shows sub-document. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc.save() # Ensure that locally cached DesignDocument contains shows dict self.assertEqual(set(ddoc.keys()), {'_id', '_rev', 'lists','options', 'shows', 'indexes', 'views'}) self.assertEqual(ddoc['_id'], '_design/ddoc001') self.assertTrue(ddoc['_rev'].startswith('1-')) # Ensure that remotely saved design document does not # include a shows sub-document. resp = self.client.r_session.get(ddoc.document_url) raw_ddoc = response_to_json_dict(resp) self.assertEqual(set(raw_ddoc.keys()), {'_id', '_rev','options'}) self.assertEqual(raw_ddoc['_id'], ddoc['_id']) self.assertEqual(raw_ddoc['_rev'], ddoc['_rev']) def test_iterating_over_show_functions(self): """ Test iterating over show functions within the DesignDocument. """ ddoc = DesignDocument(self.db, '_design/ddoc001') show_func = ('function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }') ddoc.add_show_function('show001', show_func) ddoc.add_show_function('show002', show_func) ddoc.add_show_function('show003', show_func) show_names = [] for show_name, show_func in ddoc.itershows(): show_names.append(show_name) self.assertTrue( all(x in show_names for x in ['show001', 'show002', 'show003']) ) def test_listing_show_functions(self): """ Test the retrieval of show functions list from DesignDocument. """ ddoc = DesignDocument(self.db, '_design/ddoc001') show_func = ('function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }') ddoc.add_show_function('show001', show_func) ddoc.add_show_function('show002', show_func) ddoc.add_show_function('show003', show_func) self.assertTrue( all(x in ddoc.list_show_functions() for x in [ 'show001', 'show002', 'show003' ]) ) def test_get_show_function(self): """ Test retrieval of a show function from the DesignDocument. """ ddoc = DesignDocument(self.db, '_design/ddoc001') show_func = ('function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }') ddoc.add_show_function('show001', show_func) ddoc.add_show_function('show002', show_func) ddoc.add_show_function('show003', show_func) self.assertEqual( ddoc.get_show_function('show002'), 'function(head, req) { provides(\'html\', function() ' '{var html = \'
    \\n\'; while (row = getRow()) ' '{ html += \'
  1. \' + row.key + \':\' + row.value + \'
  2. \\n\';} ' 'html += \'
\'; return html; }); }' ) def test_update_validator(self): """ Test that update validator requires an address key for a new document. """ ddoc = DesignDocument(self.db, '_design/ddoc001') ddoc['validate_doc_update'] = ( 'function(newDoc, oldDoc, userCtx, secObj) { ' 'if (newDoc.address === undefined) { ' 'throw({forbidden: \'Document must have an address.\'}); }}') ddoc.save() headers = {'Content-Type': 'application/json'} resp = self.client.r_session.post( self.db.database_url, headers=headers, data=json.dumps({'_id': 'test001'}) ) self.assertEqual( response_to_json_dict(resp), {'reason': 'Document must have an address.', 'error': 'forbidden'} ) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/document_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import cloudant """ _document_tests_ document module - Unit tests for the Document class See configuration options for environment variables in unit_t_db_base module docstring. """ import inspect import json import os import unittest import uuid from datetime import datetime import mock import requests from cloudant.document import Document from cloudant.error import CloudantDocumentException from nose.plugins.attrib import attr from .unit_t_db_base import UnitTestDbBase from .. import StringIO, unicode_ def find_fixture(name): import tests.unit.fixtures as fixtures dirname = os.path.dirname(inspect.getsourcefile(fixtures)) filename = os.path.join(dirname, name) return filename class CloudantDocumentExceptionTests(unittest.TestCase): """ Ensure CloudantDocumentException functions as expected. """ def test_raise_without_code(self): """ Ensure that a default exception/code is used if none is provided. """ with self.assertRaises(CloudantDocumentException) as cm: raise CloudantDocumentException() self.assertEqual(cm.exception.status_code, 100) def test_raise_using_invalid_code(self): """ Ensure that a default exception/code is used if invalid code is provided. """ with self.assertRaises(CloudantDocumentException) as cm: raise CloudantDocumentException('foo') self.assertEqual(cm.exception.status_code, 100) def test_raise_without_args(self): """ Ensure that a default exception/code is used if the message requested by the code provided requires an argument list and none is provided. """ with self.assertRaises(CloudantDocumentException) as cm: raise CloudantDocumentException(102) self.assertEqual(cm.exception.status_code, 100) def test_raise_with_proper_code_and_args(self): """ Ensure that the requested exception is raised. """ with self.assertRaises(CloudantDocumentException) as cm: raise CloudantDocumentException(102, 'foo') self.assertEqual(cm.exception.status_code, 102) @attr(db=['cloudant','couch']) class DocumentTests(UnitTestDbBase): """ Document unit tests """ def setUp(self): """ Set up test attributes """ super(DocumentTests, self).setUp() self.db_set_up() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(DocumentTests, self).tearDown() def test_constructor_with_docid(self): """ Test instantiating a Document providing an id """ doc = Document(self.db, 'julia006') self.assertIsInstance(doc, Document) self.assertEqual(doc.r_session, self.db.r_session) self.assertEqual(doc.get('_id'), 'julia006') def test_document_url(self): """ Test that the document url is populated correctly """ doc = Document(self.db, 'julia006') self.assertEqual( doc.document_url, '/'.join((self.db.database_url, 'julia006')) ) def test_document_url_encodes_correctly(self): """ Test that the document url is populated and encoded correctly """ doc = Document(self.db, 'http://example.com') self.assertEqual( doc.document_url, '/'.join((self.db.database_url, 'http%3A%2F%2Fexample.com')) ) def test_design_document_url(self): """ Test that the document url is populated correctly when a design document id is provided. """ doc = Document(self.db, '_design/ddoc001') self.assertEqual( doc.document_url, '/'.join((self.db.database_url, '_design/ddoc001')) ) def test_design_document_url_encodes_correctly(self): """ Test that the document url is populated and encoded correctly """ doc = Document(self.db, '_design/http://example.com') self.assertEqual( doc.document_url, '/'.join((self.db.database_url, '_design/http%3A%2F%2Fexample.com')) ) def test_constructor_without_docid(self): """ Test instantiating a Document without providing an id """ doc = Document(self.db) self.assertIsInstance(doc, Document) self.assertEqual(doc.r_session, self.db.r_session) self.assertIsNone(doc.get('_id')) self.assertIsNone(doc.document_url) def test_document_exists(self): """ Tests that the result of True is expected when the document exists, and False is expected when the document is nonexistent remotely. """ doc = Document(self.db) self.assertFalse(doc.exists()) doc['_id'] = 'julia006' self.assertFalse(doc.exists()) doc.create() self.assertTrue(doc.exists()) def test_document_exists_raises_httperror(self): """ Test document exists raises an HTTPError. """ # Mock HTTPError when running against CouchDB and Cloudant resp = requests.Response() resp.status_code = 400 self.client.r_session.head = mock.Mock(return_value=resp) doc = Document(self.db) doc['_id'] = 'julia006' with self.assertRaises(requests.HTTPError) as cm: doc.exists() err = cm.exception self.assertEqual(err.response.status_code, 400) self.client.r_session.head.assert_called_with(doc.document_url) def test_retrieve_document_json(self): """ Test the document dictionary renders as json appropriately """ doc = Document(self.db) doc['_id'] = 'julia006' doc['name'] = 'julia' doc['age'] = 6 doc_as_json = doc.json() self.assertIsInstance(doc_as_json, str) self.assertEqual(json.loads(doc_as_json), doc) def test_create_document_with_docid(self): """ Test creating a document providing an id """ doc = Document(self.db, 'julia006') doc['name'] = 'julia' doc['age'] = 6 self.assertFalse(doc.exists()) self.assertIsNone(doc.get('_rev')) doc.create() self.assertTrue(doc.exists()) self.assertTrue(doc.get('_rev').startswith('1-')) def test_create_document_with_docid_encoded_url(self): """ Test creating a document providing an id that has an encoded url """ doc = Document(self.db, 'http://example.com') doc['name'] = 'julia' doc['age'] = 6 self.assertFalse(doc.exists()) self.assertIsNone(doc.get('_rev')) doc.create() self.assertTrue(doc.exists()) self.assertTrue(doc.get('_rev').startswith('1-')) def test_create_document_without_docid(self): """ Test creating a document remotely without providing an id """ doc = Document(self.db) doc['name'] = 'julia' doc['age'] = 6 self.assertFalse(doc.exists()) self.assertIsNone(doc.get('_id')) self.assertIsNone(doc.get('_rev')) doc.create() self.assertTrue(doc.exists()) self.assertIsNotNone(doc.get('_id')) self.assertTrue(doc.get('_rev').startswith('1-')) def test_create_existing_document(self): """ Test creating an already existing document """ doc = Document(self.db, 'julia006') doc.create() with self.assertRaises(requests.HTTPError) as cm: doc.create() err = cm.exception self.assertEqual( err.response.status_code, 409 ) def test_fetch_document_without_docid(self): """ Test fetching document content with no id provided """ doc = Document(self.db) try: doc.fetch() self.fail('Above statement should raise an Exception') except CloudantDocumentException as err: self.assertEqual( str(err), 'A document id is required to fetch document contents. ' 'Add an _id key and value to the document and re-try.' ) def test_fetch_non_existing_document(self): """ Test fetching document content from a non-existing document """ doc = Document(self.db, 'julia006') try: doc.fetch() self.fail('Above statement should raise an Exception') except requests.HTTPError as err: self.assertEqual(err.response.status_code, 404) def test_fetch_existing_document_with_docid(self): """ Test fetching document content from an existing document """ doc = Document(self.db, 'julia006') doc['name'] = 'julia' doc['age'] = 6 doc.create() new_doc = Document(self.db, 'julia006') new_doc.fetch() self.assertEqual(new_doc, doc) def test_appended_error_message_using_save_with_invalid_key(self): """ Test that saving a document with an invalid remote key will throw an HTTPError with additional error details from util method append_response_error_content. """ # First create the document doc = Document(self.db, 'julia006') # Add an invalid key and try to save document doc['_invalid_key'] = 'jules' with self.assertRaises(requests.HTTPError) as cm: doc.save() err = cm.exception # Should be a 400 error code, but CouchDB 1.6 issues a 500 if err.response.status_code == 500: # Check this is CouchDB 1.x self.assertTrue(self.client.r_session.head(self.url).headers['Server'].find('CouchDB/1.') >= 0, '500 returned but was not CouchDB 1.x') self.assertEqual( str(err.response.reason), 'Internal Server Error doc_validation Bad special document member: _invalid_key' ) else: self.assertEqual( str(err.response.reason), 'Bad Request doc_validation Bad special document member: _invalid_key' ) self.assertEqual( err.response.status_code, 400 ) def test_fetch_existing_document_with_docid_encoded_url(self): """ Test fetching document content from an existing document where the document id requires an encoded url """ doc = Document(self.db, 'http://example.com') doc['name'] = 'julia' doc['age'] = 6 doc.create() new_doc = Document(self.db, 'http://example.com') new_doc.fetch() self.assertEqual(new_doc, doc) def test_create_document_using_save(self): """ Test that save functionality works. If a document does not exist remotely then create it. """ doc = Document(self.db, 'julia006') doc['name'] = 'julia' doc['age'] = 6 self.assertIsNone(doc.get('_rev')) doc.save() self.assertTrue(doc.exists()) self.assertTrue(doc['_rev'].startswith('1-')) remote_doc = Document(self.db, 'julia006') remote_doc.fetch() self.assertEqual(remote_doc, doc) def test_update_document_using_save(self): """ Test that save functionality works. If a document exists remotely then update it. """ # First create the document doc = Document(self.db, 'julia006') doc['name'] = 'julia' doc['age'] = 6 doc.save() # Now test that the document gets updated doc['name'] = 'jules' doc.save() self.assertTrue(doc['_rev'].startswith('2-')) remote_doc = Document(self.db, 'julia006') remote_doc.fetch() self.assertEqual(remote_doc, doc) self.assertEqual(remote_doc['name'], 'jules') def test_update_document_with_encoded_url(self): """ Test that updating a document where the document id requires that the document url be encoded is successful. """ # First create the document doc = Document(self.db, 'http://example.com') doc['name'] = 'julia' doc['age'] = 6 doc.save() # Now test that the document gets updated doc['name'] = 'jules' doc.save() self.assertTrue(doc['_rev'].startswith('2-')) remote_doc = Document(self.db, 'http://example.com') remote_doc.fetch() self.assertEqual(remote_doc, doc) self.assertEqual(remote_doc['name'], 'jules') def test_list_field_append_successfully(self): """ Test the static helper method to successfully append to a list field. """ doc = Document(self.db) self.assertEqual(doc, {}) doc.list_field_append(doc, 'pets', 'cat') self.assertEqual(doc, {'pets': ['cat']}) doc.list_field_append(doc, 'pets', 'dog') self.assertEqual(doc, {'pets': ['cat', 'dog']}) doc.list_field_append(doc, 'pets', None) self.assertEqual(doc, {'pets': ['cat', 'dog']}) def test_list_field_append_failure(self): """ Test the static helper method to append to a list field errors as expected. """ doc = Document(self.db) doc.field_set(doc, 'name', 'julia') try: doc.list_field_append(doc, 'name', 'isabel') self.fail('Above statement should raise an Exception') except CloudantDocumentException as err: self.assertEqual(str(err), 'The field name is not a list.') self.assertEqual(doc, {'name': 'julia'}) def test_list_field_remove_successfully(self): """ Test the static helper method to successfully remove from a list field. """ doc = Document(self.db) self.assertEqual(doc, {}) doc.list_field_append(doc, 'pets', 'cat') doc.list_field_append(doc, 'pets', 'dog') self.assertEqual(doc, {'pets': ['cat', 'dog']}) doc.list_field_remove(doc, 'pets', 'dog') self.assertEqual(doc, {'pets': ['cat']}) def test_list_field_remove_failure(self): """ Test the static helper method to remove from a list field errors as expected. """ doc = Document(self.db) doc.field_set(doc, 'name', 'julia') try: doc.list_field_remove(doc, 'name', 'julia') self.fail('Above statement should raise an Exception') except CloudantDocumentException as err: self.assertEqual(str(err), 'The field name is not a list.') self.assertEqual(doc, {'name': 'julia'}) def test_field_set_and_replace(self): """ Test the static helper method to set or replace a field value. """ doc = Document(self.db) self.assertEqual(doc, {}) doc.field_set(doc, 'name', 'julia') self.assertEqual(doc, {'name': 'julia'}) doc.field_set(doc, 'name', 'jules') self.assertEqual(doc, {'name': 'jules'}) doc.field_set(doc, 'pets', ['cat', 'dog']) self.assertEqual(doc, {'name': 'jules', 'pets': ['cat', 'dog']}) doc.field_set(doc, 'pets', None) self.assertEqual(doc, {'name': 'jules'}) def test_update_field(self): """ Test that we can update a single field remotely using the update_field method. """ doc = Document(self.db, 'julia006') doc['name'] = 'julia' doc['age'] = 6 doc['pets'] = ['cat', 'dog'] doc.create() self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(doc['pets'], ['cat', 'dog']) doc.update_field(doc.list_field_append, 'pets', 'fish') self.assertTrue(doc['_rev'].startswith('2-')) self.assertEqual(doc['pets'], ['cat', 'dog', 'fish']) @mock.patch('cloudant.document.Document.save') def test_update_field_maxretries(self, m_save): """ Test that conflict retries work for updating a single field. """ # Create a doc doc = Document(self.db, 'julia006') doc['name'] = 'julia' doc['age'] = 6 doc.create() self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(doc['age'], 6) # Mock conflicts when saving updates m_save.side_effect = requests.HTTPError(response=mock.Mock(status_code=409, reason='conflict')) # Tests that failing on retry eventually throws with self.assertRaises(requests.HTTPError) as cm: doc.update_field(doc.field_set, 'age', 7, max_tries=2) # There is an off-by-one error for "max_tries" # It really means max_retries i.e. 1 attempt # followed by a max of 2 retries self.assertEqual(m_save.call_count, 3) self.assertEqual(cm.exception.response.status_code, 409) self.assertEqual(cm.exception.response.reason, 'conflict') # Fetch again before asserting, otherwise we assert against # the locally updated age field doc.fetch() self.assertFalse(doc['_rev'].startswith('2-')) self.assertNotEqual(doc['age'], 7) def test_update_field_success_on_retry(self): """ Test that conflict retries work for updating a single field. """ # Create a doc doc = Document(self.db, 'julia006') doc['name'] = 'julia' doc['age'] = 6 doc.create() self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(doc['age'], 6) # Mock when saving the document # 1st call throw a 409 # 2nd call delegate to the real doc.save() class SaveMock(object): calls = 0 def save(self): if self.calls == 0: self.calls += 1 raise requests.HTTPError(response=mock.Mock(status_code=409, reason='conflict')) else: return cloudant.document.Document.save(doc) with mock.patch.object(doc, 'save', side_effect=SaveMock().save) as m_save: # A list of side effects containing only 1 element doc.update_field(doc.field_set, 'age', 7, max_tries=1) # Two calls to save, one with a 409 and one that succeeds self.assertEqual(m_save.call_count, 2) # Check that the _rev and age field were updated self.assertTrue(doc['_rev'].startswith('2-')) self.assertEqual(doc['age'], 7) def test_delete_document_failure(self): """ Test failure condition when attempting to remove a document from the remote database. """ doc = Document(self.db, 'julia006') doc['name'] = 'julia' doc['age'] = 6 doc['pets'] = ['cat', 'dog'] try: doc.delete() self.fail('Above statement should raise an Exception') except CloudantDocumentException as err: self.assertEqual( str(err), 'Attempting to delete a doc with no _rev. ' 'Try running .fetch and re-try.' ) def test_delete_document_success(self): """ Test that we can remove a document from the remote database successfully. """ doc = Document(self.db, 'julia006') doc['name'] = 'julia' doc['age'] = 6 doc['pets'] = ['cat', 'dog'] doc.create() self.assertTrue(doc.exists()) doc.delete() self.assertFalse(doc.exists()) self.assertEqual(doc, {'_id': 'julia006'}) def test_delete_document_success_with_encoded_url(self): """ Test that we can remove a document from the remote database successfully when the document id requires an encoded url. """ doc = Document(self.db, 'http://example.com') doc['name'] = 'julia' doc['age'] = 6 doc['pets'] = ['cat', 'dog'] doc.create() self.assertTrue(doc.exists()) doc.delete() self.assertFalse(doc.exists()) self.assertEqual(doc, {'_id': 'http://example.com'}) def test_document_context_manager(self): """ Test that the __enter__ and __exit__ methods perform as expected when initiated through a document context manager. """ new_doc = Document(self.db, 'julia006') new_doc.create() self.assertTrue(new_doc.exists()) del new_doc with Document(self.db, 'julia006') as doc: self.assertTrue(all(x in list(doc.keys()) for x in ['_id', '_rev'])) self.assertTrue(doc['_rev'].startswith('1-')) doc['name'] = 'julia' doc['age'] = 6 self.assertTrue(doc['_rev'].startswith('2-')) self.assertEqual(self.db['julia006'], doc) def test_document_context_manager_no_doc_id(self): """ Test that the __enter__ and __exit__ methods perform as expected with no document id when initiated through a document context manager """ with Document(self.db) as doc: doc['_id'] = 'julia006' doc['name'] = 'julia' doc['age'] = 6 self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(self.db['julia006'], doc) def test_document_context_manager_creation_failure_on_error(self): """ Test that the document context manager skips document creation if there is an error. """ with self.assertRaises(ZeroDivisionError), Document(self.db, 'julia006') as doc: doc['name'] = 'julia' doc['age'] = 6 raise ZeroDivisionError() doc = Document(self.db, 'julia006') try: doc.fetch() except requests.HTTPError as err: self.assertEqual(err.response.status_code, 404) else: self.fail('Above statement should raise a HTTPError.') def test_document_context_manager_update_failure_on_error(self): """ Test that the document context manager skips document update if there is an error. """ # Create the document. doc = Document(self.db, 'julia006') doc['name'] = 'julia' doc['age'] = 6 doc.save() # Make a document update and then raise an error. with self.assertRaises(ZeroDivisionError), Document(self.db, 'julia006') as doc: doc['age'] = 7 raise ZeroDivisionError() # Assert the change persists locally. self.assertEqual(doc['age'], 7) # Assert the document has not been saved to remote server. self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(self.db['julia006']['age'], 6) def test_document_context_manager_doc_create(self): """ Test that the document context manager will create a doc if it does not yet exist. """ with Document(self.db, 'julia006') as doc: doc['name'] = 'julia' doc['age'] = 6 self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(self.db['julia006'], doc) def test_setting_id(self): """ Ensure that proper processing occurs when setting the _id """ doc = Document(self.db) self.assertIsNone(doc.get('_id')) doc['_id'] = 'julia006' self.assertEqual(doc['_id'], 'julia006') def test_removing_id(self): """ Ensure that proper processing occurs when removing the _id """ doc = Document(self.db) doc['_id'] = 'julia006' del doc['_id'] self.assertIsNone(doc.get('_id')) def test_get_text_attachment(self): """ Test the retrieval of a text attachment """ doc = self.db.create_document( {'_id': 'julia006', 'name': 'julia', 'age': 6} ) attachment = StringIO() try: filename = 'attachment-{0}{1}'.format(unicode_(uuid.uuid4()), '.txt') attachment.write('This is line one of the attachment.\n') attachment.write('This is line two of the attachment.\n') resp = doc.put_attachment( filename, 'text/plain', attachment.getvalue() ) with open(find_fixture(filename), 'wt') as f: text_attachment = doc.get_attachment(filename, write_to=f) self.assertEqual(text_attachment, attachment.getvalue()) with open(find_fixture(filename), 'rt') as f: self.assertEqual(f.read(), attachment.getvalue()) finally: attachment.close() os.remove(find_fixture(filename)) def test_get_json_attachment(self): """ Test the retrieval of a json attachment """ doc = self.db.create_document( {'_id': 'julia006', 'name': 'julia', 'age': 6} ) try: filename = 'attachment-{0}{1}'.format(unicode_(uuid.uuid4()), '.json') data = {'foo': 'bar', 'baz': 99} resp = doc.put_attachment( filename, 'application/json', json.dumps(data) ) with open(find_fixture(filename), 'wt') as f: json_attachment = doc.get_attachment(filename, write_to=f) self.assertIsInstance(json_attachment, dict) self.assertEqual(json_attachment, data) with open(find_fixture(filename), 'rt') as f: self.assertEqual(f.read(), json.dumps(data)) finally: os.remove(find_fixture(filename)) def test_get_binary_attachment(self): """ Test the retrieval of a binary attachment """ doc = self.db.create_document( {'_id': 'julia006', 'name': 'julia', 'age': 6} ) try: filename = 'attachment-{0}{1}'.format(unicode_(uuid.uuid4()), '.jpg') data = None with open(find_fixture('smile.jpg'), 'rb') as f: data = f.read() resp = doc.put_attachment(filename,'image/jpeg', data) with open(find_fixture(filename), 'wb') as f: binary_attachment = doc.get_attachment(filename, write_to=f) self.assertEqual(binary_attachment, data) with open(find_fixture(filename), 'rb') as f: self.assertEqual(f.read(), data) finally: os.remove(find_fixture(filename)) def test_attachment_management(self): """ Test the adding, retrieving, updating, and deleting of attachments """ doc = self.db.create_document( {'_id': 'julia006', 'name': 'julia', 'age': 6} ) attachment = StringIO() try: attachment.write('This is line one of the attachment.\n') attachment.write('This is line two of the attachment.\n') self.assertTrue(doc['_rev'].startswith('1-')) # Test adding an attachment resp = doc.put_attachment( 'attachment.txt', 'text/plain', attachment.getvalue() ) self.assertTrue(resp['ok']) self.assertTrue(resp['rev'].startswith('2-')) self.assertEqual(doc['_rev'], resp['rev']) self.assertTrue( all(x in list(doc.keys()) for x in [ '_id', '_rev', 'name', 'age', '_attachments' ]) ) self.assertTrue( all(x in list(doc['_attachments'].keys()) for x in [ 'attachment.txt' ]) ) orig_size = doc['_attachments']['attachment.txt']['length'] self.assertEqual(orig_size, len(attachment.getvalue())) # Confirm that the local document dictionary matches # the document on the database. expected = Document(self.db, 'julia006') expected.fetch() # Test retrieving an attachment self.assertEqual( doc.get_attachment('attachment.txt', attachment_type='text'), attachment.getvalue() ) # Test update an attachment attachment.write('This is line three of the attachment.\n') resp = doc.put_attachment( 'attachment.txt', 'text/plain', attachment.getvalue() ) self.assertTrue(resp['ok']) self.assertTrue(resp['rev'].startswith('3-')) self.assertEqual(doc['_rev'], resp['rev']) self.assertTrue( all(x in list(doc.keys()) for x in [ '_id', '_rev', 'name', 'age', '_attachments' ]) ) self.assertTrue( all(x in list(doc['_attachments'].keys()) for x in [ 'attachment.txt' ]) ) updated_size = doc['_attachments']['attachment.txt']['length'] self.assertTrue(updated_size > orig_size) self.assertEqual(updated_size, len(attachment.getvalue())) self.assertEqual( doc.get_attachment('attachment.txt', attachment_type='text'), attachment.getvalue() ) # Confirm that the local document dictionary matches # the document on the database. expected = Document(self.db, 'julia006') expected.fetch() # Test delete attachments # Add a second attachment so we can fully test # delete functionality. resp = doc.put_attachment( 'attachment2.txt', 'text/plain', attachment.getvalue() ) # Test deleting an attachment from a document # with multiple atatchments. resp = doc.delete_attachment('attachment.txt') self.assertTrue(resp['ok']) self.assertTrue(resp['rev'].startswith('5-')) self.assertEqual(doc['_rev'], resp['rev']) self.assertTrue( all(x in list(doc.keys()) for x in [ '_id', '_rev', 'name', 'age', '_attachments' ]) ) # Confirm that the local document dictionary matches # the document on the database. expected = Document(self.db, 'julia006') expected.fetch() self.assertEqual(doc, expected) # Test deleting an attachment from a document # with a single attachment. resp = doc.delete_attachment('attachment2.txt') self.assertTrue(resp['ok']) self.assertTrue(resp['rev'].startswith('6-')) self.assertEqual(doc['_rev'], resp['rev']) self.assertTrue( all(x in list(doc.keys()) for x in [ '_id', '_rev', 'name', 'age' ]) ) # Confirm that the local document dictionary matches # the document on the database. expected = Document(self.db, 'julia006') expected.fetch() self.assertEqual(doc, expected) finally: attachment.close() def test_document_request_fails_after_client_disconnects(self): """ Test that after disconnecting from a client any objects created based on that client are not able to make requests. """ self.client.connect() doc = Document(self.db, 'julia001') doc.save() self.client.disconnect() try: with self.assertRaises(AttributeError): doc.fetch() self.assertIsNone(doc.r_session) finally: self.client.connect() def test_document_custom_json_encoder_and_decoder(self): dt_format = '%Y-%m-%dT%H:%M:%S' class DTEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, datetime): return { '_type': 'datetime', 'value': obj.strftime(dt_format) } return super(DTEncoder, self).default(obj) class DTDecoder(json.JSONDecoder): def __init__(self, *args, **kwargs): json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs) def object_hook(self, obj): if '_type' not in obj: return obj if obj['_type'] == 'datetime': return datetime.strptime(obj['value'], dt_format) return obj doc = Document(self.db, encoder=DTEncoder) doc['name'] = 'julia' doc['dt'] = datetime(2018, 7, 9, 15, 11, 10, 0) doc.save() raw_doc = self.db.all_docs(include_docs=True)['rows'][0]['doc'] self.assertEqual(raw_doc['name'], 'julia') self.assertEqual(raw_doc['dt']['_type'], 'datetime') self.assertEqual(raw_doc['dt']['value'], '2018-07-09T15:11:10') doc2 = Document(self.db, doc['_id'], decoder=DTDecoder) doc2.fetch() self.assertEqual(doc2['dt'], doc['dt']) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/document_validation_tests.py ================================================ #!/usr/bin/env python # Copyright © 2021 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from enum import Enum from unittest.mock import Mock, patch from mock import create_autospec import requests from urllib.parse import urlparse from cloudant import database from cloudant.design_document import DesignDocument from cloudant.document import Document from cloudant.error import CloudantArgumentError class ValidationExceptionMsg(Enum): DOC = 'Invalid document ID:' ATTACHMENT = 'Invalid attachment name:' class Expect(Enum): VALIDATION_EXCEPTION_DOCID = CloudantArgumentError(137, '') VALIDATION_EXCEPTION_ATT = CloudantArgumentError(138, '') RESPONSE_404 = 404 RESPONSE_200 = 200 RESPONSE_201 = 201 class ValidationTests(unittest.TestCase): """ Document validation unit tests """ def setUp(self): self.doc_r_session_patcher = patch('cloudant.document.Document.r_session') self.requests_get_patcher = patch('requests.get') self.addCleanup(patch.stopall) self.doc_r_session_mock = self.doc_r_session_patcher.start() self.requests_get_mock = self.requests_get_patcher.start() self.db = create_autospec(database) self.db.client = Mock() self.db.client.server_url = 'http://mocked.url.com' self.db.database_url = 'http://mocked.url.com/my_db' self.db.database_name = 'mydb' def teardown(self): self.addCleanup(patch.stopall) del self.db del self.doc_r_session_patcher del self.requests_get_patcher del self.doc_r_session_mock del self.requests_get_mock # GET and HEAD _all_docs # EXPECTED: validation failure def test_get_invalid_all_docs(self): """ Test GET/HEAD request for invalid '_all_docs' document ID """ self.get_document_variants('_all_docs', Expect.VALIDATION_EXCEPTION_DOCID.value) # GET and HEAD _design/foo # EXPECTED: 200 def test_get_valid_ddoc(self): """ Test GET/HEAD request for valid '_design/foo' document ID """ self.get_document_variants('_design/foo', Expect.RESPONSE_200.value, path_segment_count=3) self.get_document_variants('_design/foo', Expect.RESPONSE_200.value, True, path_segment_count=3) # GET and HEAD _design # EXPECTED: Validation exception def test_get_invalid_design(self): """ Test GET/HEAD request for invalid '_design' document ID """ self.get_document_variants('_design', Expect.VALIDATION_EXCEPTION_DOCID.value) self.get_document_variants('_design', Expect.VALIDATION_EXCEPTION_DOCID.value, True) # GET and HEAD /_design/foo with a slash # EXPECTED: 404 def test_get_missing_ddoc_with_slash(self): """ Test GET/HEAD request for missing '/_design/foo' document ID """ self.get_document_variants('/_design/foo', Expect.RESPONSE_404.value, path_segment_count=2) # GET and HEAD _design/foo/_view/bar # EXPECTED: 404 def test_get_invalid_view(self): """ Test GET/HEAD request for missing '_design/foo' document ID """ self.get_document_variants('_design/foo/_view/bar', Expect.RESPONSE_404.value, path_segment_count=3) self.get_document_variants('_design/foo/_view/bar', Expect.RESPONSE_404.value, True, path_segment_count=3) # GET and HEAD _design/foo/_info # EXPECTED: 404 def test_get_invalid_view_info(self): """ Test GET/HEAD request for missing '_design/foo/_info' document ID """ self.get_document_variants('_design/foo/_info', Expect.RESPONSE_404.value, path_segment_count=3) self.get_document_variants('_design/foo/_info', Expect.RESPONSE_404.value, True, path_segment_count=3) # GET and HEAD _design/foo/_search/bar # EXPECTED: 404 def test_get_invalid_search(self): """ Test GET/HEAD request for missing '_design/foo/_search/bar' document ID """ self.get_document_variants('_design/foo/_search/bar', Expect.RESPONSE_404.value, path_segment_count=3) self.get_document_variants('_design/foo/_search/bar', Expect.RESPONSE_404.value, True, path_segment_count=3) # GET and HEAD _design/foo/_search_info/bar # EXPECTED: 404 def test_get_invalid_search_info(self): """ Test GET/HEAD request for missing '_design/foo/_search_info/bar' document ID """ self.get_document_variants('_design/foo/_search_info/bar', Expect.RESPONSE_404.value, path_segment_count=3) self.get_document_variants('_design/foo/_search_info/bar', Expect.RESPONSE_404.value, True, path_segment_count=3) # GET and HEAD _design/foo/_geo/bar # EXPECTED: 404 def test_get_missing_geo(self): """ Test GET/HEAD request for missing '_design/foo/_geo/bar' document ID """ self.get_document_variants('_design/foo/_geo/bar', Expect.RESPONSE_404.value, path_segment_count=3) self.get_document_variants('_design/foo/_geo/bar', Expect.RESPONSE_404.value, True, path_segment_count=3) # with a parameter self.get_document_variants('_design/foo/_geo/bar?bbox=-50.52,-4.46,54.59,1.45', Expect.RESPONSE_404.value, path_segment_count=3) self.get_document_variants('_design/foo/_geo/bar?bbox=-50.52,-4.46,54.59,1.45', Expect.RESPONSE_404.value, True, path_segment_count=3) # GET and HEAD _design/foo/_geo_info/bar # EXPECTED: 404 def test_get_missing_geo_info(self): """ Test GET/HEAD request for missing '_design/foo/_geo_info/bar' document ID """ self.get_document_variants('_design/foo/_geo_info/bar', Expect.RESPONSE_404.value, path_segment_count=3) self.get_document_variants('_design/foo/_geo_info/bar', Expect.RESPONSE_404.value, True, path_segment_count=3) # GET and HEAD _local/foo # EXPECTED: 200 def test_get_local_doc(self): """ Test GET/HEAD request for valid '_local/foo' document ID """ self.get_document_variants('_local/foo', Expect.RESPONSE_200.value, path_segment_count=3) # GET and HEAD _local # EXPECTED: Validation exception def test_get_invalid_local(self): """ Test GET/HEAD request for invalid '_local' document ID """ self.get_document_variants('_local', Expect.VALIDATION_EXCEPTION_DOCID.value) # GET and HEAD _local_docs # EXPECTED: Validation exception def test_get_invalid_local_docs(self): """ Test GET/HEAD request for invalid '_local_docs' document ID """ self.get_document_variants('_local_docs', Expect.VALIDATION_EXCEPTION_DOCID.value) # GET and HEAD _design_docs # EXPECTED: Validation exception def test_get_invalid_design_docs(self): """ Test GET/HEAD request for invalid '_design_docs' document ID """ self.get_document_variants('_design_docs', Expect.VALIDATION_EXCEPTION_DOCID.value) # GET and HEAD _changes # EXPECTED: Validation exception def test_get_invalid_changes(self): """ Test GET/HEAD request for invalid '_changes' document ID """ self.get_document_variants('_changes', Expect.VALIDATION_EXCEPTION_DOCID.value) # GET and HEAD _ensure_full_commit # EXPECTED: Validation exception def test_get_invalid_ensure_full_commit(self): """ Test GET/HEAD request for invalid '_ensure_full_commit' document ID """ self.get_document_variants('_ensure_full_commit', Expect.VALIDATION_EXCEPTION_DOCID.value) # GET and HEAD _index # EXPECTED: Validation exception def test_get_invalid_index(self): """ Test GET/HEAD request for invalid '_index' document ID """ self.get_document_variants('_index', Expect.VALIDATION_EXCEPTION_DOCID.value) # GET and HEAD _revs_limit # EXPECTED: Validation exception def test_get_invalid_revs_limit(self): """ Test GET/HEAD request for invalid '_revs_limit' document ID """ self.get_document_variants('_revs_limit', Expect.VALIDATION_EXCEPTION_DOCID.value) # GET and HEAD _security # EXPECTED: Validation exception def test_get_invalid_security(self): """ Test GET/HEAD request for invalid '_security' document ID """ self.get_document_variants('_security', Expect.VALIDATION_EXCEPTION_DOCID.value) # GET and HEAD _shards # EXPECTED: Validation exception def test_get_invalid_shards(self): """ Test GET/HEAD request for invalid '_shards' document ID """ self.get_document_variants('_shards', Expect.VALIDATION_EXCEPTION_DOCID.value) # DELETE _index/_design/foo/json/bar # EXPECTED: Validation exception def test_delete_invalid_index(self): """ Test DELETE request for invalid '_index/_design/foo/json/bar' document ID """ self.delete_document_variants('_index/_design/foo/json/bar', Expect.VALIDATION_EXCEPTION_DOCID.value) # DELETE _design/foo # EXPECTED: 200 def test_delete_valid_ddoc(self): """ Test DELETE request for valid '_design/foo' document ID """ self.delete_document_variants('_design/foo', Expect.RESPONSE_200.value, path_segment_count=3) # DELETE _design # EXPECTED: Validation exception def test_delete_invalid_ddoc(self): """ Test DELETE request for invalid '_design' document ID """ # no trailing '/' on _design prefix self.delete_document_variants('_design', Expect.VALIDATION_EXCEPTION_DOCID.value) # DELETE _local/foo # EXPECTED: 200 def test_delete_valid_local_doc(self): """ Test DELETE request for valid '_local/foo' document ID """ self.delete_document_variants('_local/foo', Expect.RESPONSE_200.value, path_segment_count=3) # DELETE _local # EXPECTED: Validation exception def test_delete_invalid_local(self): """ Test DELETE request for invalid '_local' document ID """ # no trailing '/' on _local prefix self.delete_document_variants('_local', Expect.VALIDATION_EXCEPTION_DOCID.value) # PUT _design/foo # EXPECTED: 201 def test_put_valid_ddoc(self): """ Test PUT request for valid '_design/foo' document ID """ self.put_document_variants('_design/foo', Expect.RESPONSE_201.value, path_segment_count=3) # PUT _design # EXPECTED: Validation exception def test_put_invalid_ddoc(self): """ Test PUT request for invalid '_design' document ID """ self.put_document_variants('_design', Expect.VALIDATION_EXCEPTION_DOCID.value) # PUT _local/foo # EXPECTED: 201 def test_put_valid_local_doc(self): """ Test PUT request for valid '_local/foo' document ID """ self.put_document_variants('_local/foo', Expect.RESPONSE_201.value, path_segment_count=3) # PUT _local # EXPECTED: Validation exception def test_put_invalid_local_doc(self): """ Test PUT request for invalid '_local' document ID """ self.put_document_variants('_local', Expect.VALIDATION_EXCEPTION_DOCID.value) # PUT _revs_limit # EXPECTED: Validation exception def test_put_invalid_revs_limit(self): """ Test PUT request for invalid '_revs_limit' document ID """ self.put_document_variants('_revs_limit', Expect.VALIDATION_EXCEPTION_DOCID.value) # PUT _security # EXPECTED: Validation exception def test_put_invalid_security(self): """ Test PUT request for invalid '_security' document ID """ self.put_document_variants('_security', Expect.VALIDATION_EXCEPTION_DOCID.value) # GET _design/foo/bar # EXPECTED: 200 def test_get_valid_ddoc_attachment(self): """ Test PUT request for valid '_design/foo/bar' document ID """ self.get_doc_attachment_variants('_design/foo', 'bar', Expect.RESPONSE_200.value, True, path_segment_count=4) # PUT _design/foo/bar # EXPECTED: 201 def test_put_valid_ddoc_attachment(self): """ Test PUT request for valid '_design/foo/bar' document ID """ self.put_doc_attachment_variants('_design/foo', 'bar', Expect.RESPONSE_201.value, True, path_segment_count=4) # DELETE _design/foo/bar # EXPECTED: 200 def test_delete_valid_ddoc_attachment(self): """ Test DELETE request for valid '_design/foo/bar' document ID """ self.delete_doc_attachment_variants('_design/foo', 'bar', Expect.RESPONSE_200.value, True, path_segment_count=4) # GET _design/foo # EXPECTED: Validation exception def test_get_invalid_ddoc_attachment(self): """ Test GET request for invalid '_design/foo' document ID """ # with ddoc option enabled self.get_doc_attachment_variants('_design', 'foo', Expect.VALIDATION_EXCEPTION_DOCID.value, True) self.get_doc_attachment_variants('_design', 'foo', Expect.VALIDATION_EXCEPTION_DOCID.value) # PUT _design/foo # EXPECTED: Validation exception def test_put_invalid_ddoc_attachment(self): """ Test PUT request for invalid '_design/foo' document ID """ # with ddoc option enabled self.put_doc_attachment_variants('_design', 'foo', Expect.VALIDATION_EXCEPTION_DOCID.value, True) self.put_doc_attachment_variants('_design', 'foo', Expect.VALIDATION_EXCEPTION_DOCID.value) # DELETE _design/foo # EXPECTED: Validation exception def test_delete_invalid_ddoc_attachment(self): """ Test DELETE request for invalid '_design/foo' document ID """ # with ddoc option enabled self.delete_doc_attachment_variants('_design', 'foo', Expect.VALIDATION_EXCEPTION_DOCID.value, True) self.delete_doc_attachment_variants('_design', 'foo', Expect.VALIDATION_EXCEPTION_DOCID.value) # DELETE _index/_design/foo/json/bar # EXPECTED: Validation exception def test_delete_index_via_attachment(self): """ Test DELETE requests for invalid '_index/_design/foo/json/bar' """ self.delete_doc_attachment_variants('_index', '_design/foo/json/bar', Expect.VALIDATION_EXCEPTION_DOCID.value) self.delete_doc_attachment_variants('_index', '_design/foo/json/bar', Expect.VALIDATION_EXCEPTION_DOCID.value, True) self.delete_doc_attachment_variants('_index/_design', 'foo/json/bar', Expect.VALIDATION_EXCEPTION_DOCID.value) self.delete_doc_attachment_variants('_index/_design', 'foo/json/bar', Expect.VALIDATION_EXCEPTION_DOCID.value, True) self.delete_doc_attachment_variants('_index/_design/foo', 'json/bar', Expect.VALIDATION_EXCEPTION_DOCID.value) self.delete_doc_attachment_variants('_index/_design/foo', 'json/bar', Expect.VALIDATION_EXCEPTION_DOCID.value, True) self.delete_doc_attachment_variants('_index/_design/foo/json', 'bar', Expect.VALIDATION_EXCEPTION_DOCID.value) self.delete_doc_attachment_variants('_index/_design/foo/json', 'bar', Expect.VALIDATION_EXCEPTION_DOCID.value, True) # GET _design/foo/_view/bar def test_get_view_via_ddoc_attachment(self): """ Test GET requests for '_design/foo/_view/bar' """ # EXPECTED: 404 self.get_doc_attachment_variants('_design/foo/_view', 'bar', Expect.RESPONSE_404.value, path_segment_count=4) self.get_doc_attachment_variants('_design/foo/_view', 'bar', Expect.RESPONSE_404.value, True, path_segment_count=4) self.get_doc_attachment_variants('_design/foo', '/_view/bar', Expect.RESPONSE_404.value, path_segment_count=4) self.get_doc_attachment_variants('_design/foo', '/_view/bar', Expect.RESPONSE_404.value, True, path_segment_count=4) # EXPECTED: Validation exception self.get_doc_attachment_variants('_design/foo', '_view/bar', Expect.VALIDATION_EXCEPTION_ATT.value) self.get_doc_attachment_variants('_design/foo', '_view/bar', Expect.VALIDATION_EXCEPTION_ATT.value, True) self.get_doc_attachment_variants('_design', 'foo/_view/bar', Expect.VALIDATION_EXCEPTION_DOCID.value) self.get_doc_attachment_variants('_design', 'foo/_view/bar', Expect.VALIDATION_EXCEPTION_DOCID.value, True) self.get_doc_attachment_variants('_design/', 'foo/_view/bar', Expect.VALIDATION_EXCEPTION_DOCID.value) self.get_doc_attachment_variants('_design/', 'foo/_view/bar', Expect.VALIDATION_EXCEPTION_DOCID.value, True) # PUT _design/foo/_view/bar def test_put_view_via_ddoc_attachment(self): """ Test PUT requests for '_design/foo/_view/bar' """ # EXPECTED: Validation exception self.put_doc_attachment_variants('_design/foo', '_view/bar', Expect.VALIDATION_EXCEPTION_ATT.value) self.put_doc_attachment_variants('_design/foo', '_view/bar', Expect.VALIDATION_EXCEPTION_ATT.value, True) self.put_doc_attachment_variants('_design', 'foo/_view/bar', Expect.VALIDATION_EXCEPTION_DOCID.value) self.put_doc_attachment_variants('_design', 'foo/_view/bar', Expect.VALIDATION_EXCEPTION_DOCID.value, True) self.put_doc_attachment_variants('_design/', 'foo/_view/bar', Expect.VALIDATION_EXCEPTION_DOCID.value) self.put_doc_attachment_variants('_design/', 'foo/_view/bar', Expect.VALIDATION_EXCEPTION_DOCID.value, True) # DELETE _design/foo/_view/bar def test_delete_view_via_ddoc_attachment(self): """ Test DELETE requests for '_design/foo/_view/bar' """ # EXPECTED: Validation exception self.delete_doc_attachment_variants('_design/foo', '_view/bar', Expect.VALIDATION_EXCEPTION_ATT.value) self.delete_doc_attachment_variants('_design/foo', '_view/bar', Expect.VALIDATION_EXCEPTION_ATT.value, True) self.delete_doc_attachment_variants('_design', 'foo/_view/bar', Expect.VALIDATION_EXCEPTION_DOCID.value) self.delete_doc_attachment_variants('_design', 'foo/_view/bar', Expect.VALIDATION_EXCEPTION_DOCID.value, True) self.delete_doc_attachment_variants('_design/', 'foo/_view/bar', Expect.VALIDATION_EXCEPTION_DOCID.value) self.delete_doc_attachment_variants('_design/', 'foo/_view/bar', Expect.VALIDATION_EXCEPTION_DOCID.value, True) # GET _design/foo/_info def test_get_view_info_via_ddoc_attachment(self): """ Test GET requests for '_design/foo/_info' """ # EXPECTED: Validation exception self.get_doc_attachment_variants('_design/foo', '_info', Expect.VALIDATION_EXCEPTION_ATT.value) self.get_doc_attachment_variants('_design/foo', '_info', Expect.VALIDATION_EXCEPTION_ATT.value, True) self.get_doc_attachment_variants('_design', 'foo/_info', Expect.VALIDATION_EXCEPTION_DOCID.value) self.get_doc_attachment_variants('_design', 'foo/_info', Expect.VALIDATION_EXCEPTION_DOCID.value, True) self.get_doc_attachment_variants('_design/', 'foo/_info', Expect.VALIDATION_EXCEPTION_DOCID.value) self.get_doc_attachment_variants('_design/', 'foo/_info', Expect.VALIDATION_EXCEPTION_DOCID.value, True) # GET _design/foo/_search/bar def test_get_search_via_ddoc_attachment(self): """ Test GET requests for '_design/foo/_search/bar' """ # EXPECTED: 404 self.get_doc_attachment_variants('_design/foo/_search', 'bar', Expect.RESPONSE_404.value, path_segment_count=4) self.get_doc_attachment_variants('_design/foo/_search', 'bar', Expect.RESPONSE_404.value, True, path_segment_count=4) self.get_doc_attachment_variants('_design/foo/_search', 'bar?q=*.*', Expect.RESPONSE_404.value, path_segment_count=4) self.get_doc_attachment_variants('_design/foo/_search', 'bar?q=*.*', Expect.RESPONSE_404.value, True, path_segment_count=4) # EXPECTED: Validation exception self.get_doc_attachment_variants('_design/foo', '_search/bar', Expect.VALIDATION_EXCEPTION_ATT.value) self.get_doc_attachment_variants('_design/foo', '_search/bar', Expect.VALIDATION_EXCEPTION_ATT.value, True) self.get_doc_attachment_variants('_design', 'foo/_search/bar', Expect.VALIDATION_EXCEPTION_DOCID.value) self.get_doc_attachment_variants('_design', 'foo/_search/bar', Expect.VALIDATION_EXCEPTION_DOCID.value, True) self.get_doc_attachment_variants('_design/', 'foo/_search/bar', Expect.VALIDATION_EXCEPTION_DOCID.value) self.get_doc_attachment_variants('_design/', 'foo/_search/bar', Expect.VALIDATION_EXCEPTION_DOCID.value, True) # GET _design/foo/_search_info/bar def test_get_search_info_via_ddoc_attachment(self): """ Test GET requests for '_design/foo/_search_info/bar' """ # EXPECTED: 404 self.get_doc_attachment_variants('_design/foo/_search_info', 'bar', Expect.RESPONSE_404.value, path_segment_count=4) self.get_doc_attachment_variants('_design/foo/_search_info', 'bar', Expect.RESPONSE_404.value, True, path_segment_count=4) # EXPECTED: Validation exception self.get_doc_attachment_variants('_design/foo', '_search_info/bar', Expect.VALIDATION_EXCEPTION_ATT.value) self.get_doc_attachment_variants('_design/foo', '_search_info/bar', Expect.VALIDATION_EXCEPTION_ATT.value, True) # GET _design/foo/_geo/bar def test_get_geo_via_ddoc_attachment(self): """ Test GET requests for '_design/foo/_geo/bar' """ # EXPECTED: 404 self.get_doc_attachment_variants('_design/foo/_geo', 'bar', Expect.RESPONSE_404.value, path_segment_count=4) self.get_doc_attachment_variants('_design/foo/_geo', 'bar', Expect.RESPONSE_404.value, True, path_segment_count=4) self.get_doc_attachment_variants('_design/foo/_geo', 'bar?bbox=-50.52,-4.46,54.59,1.45', Expect.RESPONSE_404.value, path_segment_count=4) self.get_doc_attachment_variants('_design/foo/_geo', 'bar?bbox=-50.52,-4.46,54.59,1.45', Expect.RESPONSE_404.value, True, path_segment_count=4) # EXPECTED: Validation exception self.get_doc_attachment_variants('_design/foo', '_geo/bar', Expect.VALIDATION_EXCEPTION_ATT.value) self.get_doc_attachment_variants('_design/foo', '_geo/bar', Expect.VALIDATION_EXCEPTION_ATT.value, True) # GET _design/foo/_geo_info/bar def test_get_geo_info_via_ddoc_attachment(self): """ Test GET requests for '_design/foo/_geo_info/bar' """ # EXPECTED: 404 self.get_doc_attachment_variants('_design/foo/_geo_info', 'bar', Expect.RESPONSE_404.value, path_segment_count=4) self.get_doc_attachment_variants('_design/foo/_geo_info', 'bar', Expect.RESPONSE_404.value, True, path_segment_count=4) # EXPECTED: Validation exception self.get_doc_attachment_variants('_design/foo', '_geo_info/bar', Expect.VALIDATION_EXCEPTION_ATT.value) self.get_doc_attachment_variants('_design/foo', '_geo_info/bar', Expect.VALIDATION_EXCEPTION_ATT.value, True) # GET _partition/foo # EXPECTED: Validation exception def test_get_invalid_partition_info(self): """ Test GET requests for '_partition/foo' """ self.get_document_variants('_partition/foo', Expect.VALIDATION_EXCEPTION_DOCID.value) # GET _partition/foo # EXPECTED: Validation exception def test_get_invalid_partition_info_via_attachment(self): """ Test GET requests for '_partition/foo' """ self.get_doc_attachment_variants('_partition', 'foo', Expect.VALIDATION_EXCEPTION_DOCID.value) # GET _partition/foo/_all_docs # EXPECTED: Validation exception def test_get_partition_info(self): """ Test GET requests for '_partition/foo/_all_docs' """ self.get_document_variants('_partition/foo/_all_docs', Expect.VALIDATION_EXCEPTION_DOCID.value) # GET _partition/foo/_all_docs # EXPECTED: Validation exception def test_get_invalid_partition_all_docs_via_attachment(self): """ Test GET requests for '_partition/foo/_all_docs' """ self.get_doc_attachment_variants('_partition', 'foo/_all_docs', Expect.VALIDATION_EXCEPTION_DOCID.value) self.get_doc_attachment_variants('_partition/foo', '_all_docs', Expect.VALIDATION_EXCEPTION_DOCID.value) """UTIL FUNCTIONS""" def mocked_get_requests(self, rev=None, override_status_code=None): """ Create a mock GET request for documents with the expected status code :param rev: the doc's revision (default None) :param override_status_code: override the status code for handling inner `fetch` request call within `get_attachment` :return: mocked Response object """ resp_mock = create_autospec(requests.Response) if override_status_code is not None: resp_mock.status_code = override_status_code else: resp_mock.status_code = self.expected_enum if (resp_mock.status_code == 200 or resp_mock.status_code == 201 and self.doc_id is not None): if rev is not None: resp_mock.text = f"""{{"_id": "{self.doc_id}", "_rev": "{rev}"}}""" else: resp_mock.text = f"""{{"_id": "{self.doc_id}", "_rev": "1-abc"}}""" elif resp_mock.status_code == 404: resp_mock.raise_for_status.side_effect = requests.exceptions.HTTPError resp_mock.encoding = None return resp_mock def mocked_get_att_requests(self): """ Create a mock GET request for attachments with the expected status code """ self.expected_att_content = f"""this is a text attachment""" # first fetch doc call with rev fetch_mock = self.mocked_get_requests(rev=None, override_status_code=200) # second get to attachment resp_mock = create_autospec(requests.Response) resp_mock.status_code = self.expected_enum if self.expected_enum == 200 and self.doc_id is not None and self.att_name is not None: resp_mock.text = self.expected_att_content if self.expected_enum == 404: resp_mock.raise_for_status.side_effect = requests.exceptions.HTTPError self.doc_r_session_mock.get.side_effect = [fetch_mock, resp_mock] def mocked_head_requests(self, override_status_code=None): """ Create a mock HEAD request for documents and attachments with the expected status code """ resp_mock = create_autospec(requests.Response) if override_status_code is not None: resp_mock.status_code = override_status_code else: resp_mock.status_code = self.expected_enum self.doc_r_session_mock.head = Mock(return_value=resp_mock) def mocked_delete_requests(self): """ Create a mock DELETE request for documents with the expected status code """ resp_mock = create_autospec(requests.Response) resp_mock.status_code = self.expected_enum if self.expected_enum == 201 and self.doc_id is not None: resp_mock.text = f"""{{"id": "{self.doc_id}", "rev": "2-abc", "ok": true}}""" self.doc_r_session_mock.delete = Mock(return_value=resp_mock) def mocked_delete_att_requests(self): """ Create a mock DELETE request for attachments with the expected status code """ # first `fetch` document call with rev self.doc_r_session_mock.get = Mock(return_value=self.mocked_get_requests(rev=None, override_status_code=200)) # second delete to attachment resp_mock = create_autospec(requests.Response) resp_mock.status_code = self.expected_enum resp_mock.encoding = None if self.expected_enum == 200 and self.doc_id is not None and self.att_name is not None: resp_mock.text = f"""{{"id": "{self.doc_id}", "rev": "2-abc", "ok": true}}""" elif self.expected_enum == 404: resp_mock.raise_for_status.side_effect = requests.exceptions.HTTPError self.doc_r_session_mock.delete = Mock(return_value=resp_mock) def mocked_put_doc_requests(self): """ Create a mock PUT request for documents with the expected status code """ # mock 'doc.exists' request call within 'doc.save' function self.mocked_head_requests(200) resp_mock = create_autospec(requests.Response) resp_mock.status_code = self.expected_enum resp_mock.encoding = None if self.expected_enum == 201 and self.doc_id is not None: resp_mock.text = f"""{{"id": "{self.doc_id}", "rev": "1-abc", "ok": true}}""" if self.expected_enum == 404: resp_mock.raise_for_status.side_effect = requests.exceptions.HTTPError self.doc_r_session_mock.put = Mock(return_value=resp_mock) def mocked_put_att_requests(self): """ Create a mock PUT request for attachments with the expected status code """ # first `fetch` document call within `put_attachment` fetch_mock = self.mocked_get_requests(rev=None, override_status_code=200) # create Response object for PUT attachment resp_mock = create_autospec(requests.Response) resp_mock.status_code = self.expected_enum resp_mock.encoding = None if self.expected_enum == 201 and self.doc_id is not None: resp_mock.text = f"""{{"id": "{self.doc_id}", "rev": "2-def", "ok": true}}""" if self.expected_enum == 404: resp_mock.raise_for_status.side_effect = requests.exceptions.HTTPError # final fetch doc call second_fetch_mock = self.mocked_get_requests(rev='2-def', override_status_code=200) self.doc_r_session_mock.get.side_effect = [fetch_mock, second_fetch_mock] self.doc_r_session_mock.put = Mock(return_value=resp_mock) def get_document_variants(self, doc_id, expected_enum, is_ddoc=False, path_segment_count=None): """ Function to setup mock requests and execute GET/HEAD document requests """ self.doc_id = doc_id self.expected_enum = expected_enum self.is_ddoc = is_ddoc self.mocked_head_requests() self.head_document() self.doc_r_session_mock.get.return_value = self.mocked_get_requests() self.fetch_document() self.assert_path_segments(self.doc_r_session_mock.get.call_args_list, path_segment_count) def get_doc_attachment_variants(self, doc_id, att_name, expected_enum, is_ddoc=False, path_segment_count=None): """ Function to setup mock requests and execute GET attachment requests """ self.att_name = att_name self.doc_id = doc_id self.expected_enum = expected_enum self.is_ddoc = is_ddoc self.mocked_get_att_requests() self.get_doc_attachment() self.assert_path_segments(self.doc_r_session_mock.get.call_args_list, path_segment_count) def put_document_variants(self, doc_id, expected_enum, is_ddoc=False, path_segment_count=None): """ Function to setup mock requests and execute PUT document requests """ self.doc_id = doc_id self.expected_enum = expected_enum self.is_ddoc = is_ddoc self.mocked_put_doc_requests() self.put_document() self.assert_path_segments(self.doc_r_session_mock.put.call_args_list, path_segment_count) def put_doc_attachment_variants(self, doc_id, att_name, expected_enum, is_ddoc=False, path_segment_count=None): """ Function to setup mock requests and execute PUT attachment requests """ self.att_name = att_name self.doc_id = doc_id self.expected_enum = expected_enum self.is_ddoc = is_ddoc self.mocked_put_att_requests() self.put_doc_attachment() self.assert_path_segments(self.doc_r_session_mock.put.call_args_list, path_segment_count) def delete_document_variants(self, doc_id, expected_enum, is_ddoc=False, path_segment_count=None): """ Function to setup mock requests and execute DELETE document requests """ self.doc_id = doc_id self.expected_enum = expected_enum self.is_ddoc = is_ddoc self.mocked_delete_requests() self.delete_document() self.assert_path_segments(self.doc_r_session_mock.delete.call_args_list, path_segment_count) def delete_doc_attachment_variants(self, doc_id, attname, expected_enum, is_ddoc=False, path_segment_count=None): """ Function to setup mock requests and execute DELETE attachment requests """ self.doc_id = doc_id self.att_name = attname self.expected_enum = expected_enum self.is_ddoc = is_ddoc self.mocked_delete_att_requests() self.delete_doc_attachment() self.assert_path_segments(self.doc_r_session_mock.delete.call_args_list, path_segment_count) """HTTP REQUEST FUNCTIONS""" def head_document(self): try: resp = self.create_doc(self.doc_id, self.is_ddoc).exists() if self.expected_enum == 200 or self.expected_enum == 201: self.assertTrue(resp) elif self.expected_enum == 404: self.assertFalse(resp) except CloudantArgumentError as cae: self.assert_exception_msg(cae) def delete_document(self): try: doc = self.create_doc(self.doc_id, self.is_ddoc) doc['_rev'] = '1-abc' doc.delete() self.assertTrue(isinstance(self.expected_enum, int), f"""Expected value {self.expected_enum} is not an int status code.""") self.assertTrue(self.expected_enum < 400, f"""Expected value {self.expected_enum} is not a successful status code.""") self.assertEqual(self.doc_id, doc['_id']) self.assertFalse('rev' in doc) except CloudantArgumentError as cae: self.assert_exception_msg(cae) except requests.exceptions.HTTPError as err: self.assertTrue(id(self.expected_enum), id(err)) def fetch_document(self): try: doc = self.create_doc(self.doc_id, self.is_ddoc) doc.fetch() self.assertTrue(isinstance(self.expected_enum, int), f"""Expected value {self.expected_enum} is not an int status code.""") self.assertTrue(self.expected_enum < 400, f"""Expected value {self.expected_enum} is not a successful status code.""") self.assertEqual(self.doc_id, doc['_id']) self.assertIsNotNone(doc['_rev']) except CloudantArgumentError as cae: self.assert_exception_msg(cae) except requests.exceptions.HTTPError as err: self.assertTrue(id(self.expected_enum), id(err)) def put_document(self): try: doc = self.create_doc(self.doc_id, self.is_ddoc) doc.save() self.assertTrue(isinstance(self.expected_enum, int), f"""Expected value {self.expected_enum} is not an int status code.""") self.assertTrue(self.expected_enum < 400, f"""Expected value {self.expected_enum} is not a successful status code.""") self.assertEqual(self.doc_id, doc['_id']) self.assertIsNotNone(doc['_rev']) except CloudantArgumentError as cae: self.assert_exception_msg(cae) except requests.exceptions.HTTPError as err: self.assertTrue(id(self.expected_enum), id(err)) def delete_doc_attachment(self): try: doc = self.create_doc(self.doc_id, self.is_ddoc) doc['_rev'] = '1-abc' resp = doc.delete_attachment(self.att_name) self.assertTrue(isinstance(self.expected_enum, int), f"""Expected value {self.expected_enum} is not an int status code.""") self.assertTrue(self.expected_enum < 400, f"""Expected value {self.expected_enum} is not a successful status code.""") self.assertEqual(self.doc_id, doc['_id']) self.assertEqual(self.doc_id, resp['id']) self.assertEqual(doc['_rev'], resp['rev']) except CloudantArgumentError as cae: self.assert_exception_msg(cae) except requests.exceptions.HTTPError as err: self.assertTrue(id(self.expected_enum), id(err)) def get_doc_attachment(self): try: doc = self.create_doc(self.doc_id, self.is_ddoc) resp_att = doc.get_attachment(self.att_name, attachment_type='text') self.assertTrue(isinstance(self.expected_enum, int), f"""Expected value {self.expected_enum} is not an int status code.""") self.assertTrue(self.expected_enum < 400, f"""Expected value {self.expected_enum} is not a successful status code.""") self.assertEqual(self.doc_id, doc['_id']) self.assertIsNotNone(resp_att) self.assertEqual(resp_att, self.expected_att_content) except CloudantArgumentError as cae: self.assert_exception_msg(cae) except requests.exceptions.HTTPError as err: self.assertTrue(id(self.expected_enum), id(err)) def put_doc_attachment(self): try: doc = self.create_doc(self.doc_id, self.is_ddoc) resp_att = doc.put_attachment(self.att_name, content_type='utf-8', data='test') self.assertIsNotNone(resp_att) self.assertTrue(isinstance(self.expected_enum, int), f"""Expected value {self.expected_enum} is not an int status code.""") self.assertTrue(self.expected_enum < 400, f"""Expected value {self.expected_enum} is not a successful status code.""") self.assertEqual(self.doc_id, resp_att['id']) self.assertEqual(resp_att['id'], doc['_id']) self.assertEqual(doc['_rev'], resp_att['rev']) self.assertEqual(resp_att['ok'], True) except CloudantArgumentError as cae: self.assert_exception_msg(cae) except requests.exceptions.HTTPError as err: self.assertTrue(id(self.expected_enum), id(err)) """HELPER FUNCTIONS""" def create_doc(self, doc_id=None, is_ddoc=False): """ Function to create and return a Document or DesignDocument object. """ if is_ddoc: if doc_id is not None: doc = DesignDocument(self.db, doc_id) else: doc = DesignDocument(self.db) elif doc_id is not None: doc = Document(self.db, doc_id) else: doc = Document(self.db) self.assertIsNone(doc.get('_rev')) return doc def assert_exception_msg(self, cae): """ Function to assert whether the exception message is for an invalid document ID or an attachment name. """ self.assertTrue(id(self.expected_enum), id(cae)) # Check that actual exception message starts with the expected msg if str(cae).startswith(str(self.expected_enum)): # Figure out which exception msg to assert against if str(cae).startswith(ValidationExceptionMsg.ATTACHMENT.value): self.assertEqual(str(cae), f"""{ValidationExceptionMsg.ATTACHMENT.value} {self.att_name}""") elif str(cae).startswith(ValidationExceptionMsg.DOC.value): self.assertEqual(str(cae), f"""{ValidationExceptionMsg.DOC.value} {self.doc_id}""") else: self.fail('Expected CloudantArgumentError message should equal actual error message.') def assert_path_segments(self, actual_call_args_list, exp_segment_count): """ Function to assert the number of path segments from a mock request """ # If there's no segment count, verify that the test case expects an argument error if exp_segment_count is None: self.assertTrue(isinstance(self.expected_enum, CloudantArgumentError), 'Path segment count should exist ' 'when testing against valid ' 'document or attachment names.') else: # get latest call in list url, headers = actual_call_args_list[len(actual_call_args_list) - 1] # there should only be one mocked url self.assertEqual(len(url), 1) # parse path of url and remove first / path segment path = urlparse(url[0]).path[1:] actual_segment_count = len(path.split('/')) self.assertEqual(actual_segment_count, exp_segment_count) ================================================ FILE: tests/unit/fixtures/__init__.py ================================================ #!/usr/bin/env python # Copyright (c) 2015 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _fixtures_ Fixtures folder containing files used by db tests """ ================================================ FILE: tests/unit/iam_auth_tests.py ================================================ #!/usr/bin/env python # Copyright (c) 2017, 2019 IBM. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Unit tests for IAM authentication. """ import time import unittest import json import mock from cloudant._2to3 import Cookie from cloudant.client import Cloudant from cloudant._client_session import IAMSession MOCK_API_KEY = 'CqbrIYzdO3btWV-5t4teJLY_etfT_dkccq-vO-5vCXSo' MOCK_ACCESS_TOKEN = ('eyJraWQiOiIyMDE3MDQwMi0wMDowMDowMCIsImFsZyI6IlJTMjU2In0.e' 'yJpYW1faWQiOiJJQk1pZC0yNzAwMDdHRjBEIiwiaWQiOiJJQk1pZC0yNz' 'AwMDdHRjBEIiwicmVhbG1pZCI6IklCTWlkIiwiaWRlbnRpZmllciI6IjI' '3MDAwN0dGMEQiLCJnaXZlbl9uYW1lIjoiVG9tIiwiZmFtaWx5X25hbWUi' 'OiJCbGVuY2giLCJuYW1lIjoiVG9tIEJsZW5jaCIsImVtYWlsIjoidGJsZ' 'W5jaEB1ay5pYm0uY29tIiwic3ViIjoidGJsZW5jaEB1ay5pYm0uY29tIi' 'wiYWNjb3VudCI6eyJic3MiOiI1ZTM1ZTZhMjlmYjJlZWNhNDAwYWU0YzN' 'lMWZhY2Y2MSJ9LCJpYXQiOjE1MDA0NjcxMDIsImV4cCI6MTUwMDQ3MDcw' 'MiwiaXNzIjoiaHR0cHM6Ly9pYW0ubmcuYmx1ZW1peC5uZXQvb2lkYy90b' '2tlbiIsImdyYW50X3R5cGUiOiJ1cm46aWJtOnBhcmFtczpvYXV0aDpncm' 'FudC10eXBlOmFwaWtleSIsInNjb3BlIjoib3BlbmlkIiwiY2xpZW50X2l' 'kIjoiZGVmYXVsdCJ9.XAPdb5K4n2nYih-JWTWBGoKkxTXM31c1BB1g-Ci' 'auc2LxuoNXVTyz_mNqf1zQL07FUde1Cb_dwrbotjickNcxVPost6byQzt' 'fc0mRF1x2S6VR8tn7SGiRmXBjLofkTh1JQq-jutp2MS315XbTG6K6m16u' 'YzL9qfMnRvQHxsZWErzfPiJx-Trg_j7OX-qNFjdNUGnRpU7FmULy0r7Rx' 'Ld8mhG-M1yxVzRBAZzvM63s0XXfMnk1oLi-BuUUTqVOdrM0KyYMWfD0Q7' '2PTo4Exa17V-R_73Nq8VPCwpOvZcwKRA2sPTVgTMzU34max8b5kpTzVGJ' '6SXSItTVOUdAygZBng') MOCK_IAM_TOKEN_RESPONSE = '{"access_token": "%s",\ "refresh_token": "MO61FKNvVRWkSa4vmBZqYv_Jt1kkGMUc-XzTcNnR-GnIhVKXHUWxJVV3\ RddE8Kqh3X_TZRmyK8UySIWKxoJ2t6obUSUalPm90SBpTdoXtaljpNyo\ rmqCCYPROnk6JBym72ikSJqKHHEZVQkT0B5ggZCwPMnKagFj0ufs-VIh\ CF97xhDxDKcIPMWG02xxPuESaSTJJug7e_dUDoak_ZXm9xxBmOTRKwOx\ n5sTKthNyvVpEYPE7jIHeiRdVDOWhN5LomgCn3TqFCLpMErnqwgNYbyC\ Bd9rNm-alYKDb6Jle4njuIBpXxQPb4euDwLd1osApaSME3nEarFWqRBz\ hjoqCe1Kv564s_rY7qzD1nHGvKOdpSa0ZkMcfJ0LbXSQPs7gBTSVrBFZ\ qwlg-2F-U3Cto62-9qRR_cEu_K9ZyVwL4jWgOlngKmxV6Ku4L5mHp4Kg\ EJSnY_78_V2nm64E--i2ZA1FhiKwIVHDOivVNhggE9oabxg54vd63glp\ 4GfpNnmZsMOUYG9blJJpH4fDX4Ifjbw-iNBD7S2LRpP8b8vG9pb4WioG\ zN43lE5CysveKYWrQEZpThznxXlw1snDu_A48JiL3Lrvo1LobLhF3zFV\ -kQ=",\ "token_type": "Bearer",\ "expires_in": 3600,\ "expiration": 1500470702}'%(MOCK_ACCESS_TOKEN) class IAMAuthTests(unittest.TestCase): """ Unit tests for IAM authentication. """ @staticmethod def _mock_cookie(expires_secs=300): return Cookie( version=0, name='IAMSession', value=('SQJCaUQxMqEfMEAyRKU6UopLVXceS0c9RPuQgDArCEYoN3l_TEY4gdf-DJ7' '4sHfjcNEUVjfdOvA'), port=None, port_specified=False, domain='localhost', domain_specified=False, domain_initial_dot=False, path="/", path_specified=True, secure=True, expires=int(time.time() + expires_secs), discard=False, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=True) def test_iam_set_credentials(self): iam = IAMSession(MOCK_API_KEY, 'http://127.0.0.1:5984') self.assertEqual(iam._api_key, MOCK_API_KEY) new_api_key = 'some_new_api_key' iam.set_credentials(None, new_api_key) self.assertEqual(iam._api_key, new_api_key) @mock.patch('cloudant._client_session.ClientSession.request') def test_iam_get_access_token(self, m_req): m_response = mock.MagicMock() mock_token_response_text = mock.PropertyMock(return_value=MOCK_IAM_TOKEN_RESPONSE) type(m_response).text = mock_token_response_text m_req.return_value = m_response iam = IAMSession(MOCK_API_KEY, 'http://127.0.0.1:5984') access_token = iam._get_access_token() m_req.assert_called_once_with( 'POST', iam._token_url, auth=None, headers={'Accepts': 'application/json'}, data={ 'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 'response_type': 'cloud_iam', 'apikey': MOCK_API_KEY } ) self.assertEqual(access_token, MOCK_ACCESS_TOKEN) self.assertTrue(m_response.raise_for_status.called) mock_token_response_text.assert_called_with() @mock.patch('cloudant._client_session.ClientSession.request') def test_iam_get_access_token_with_iam_client_id_and_secret(self, m_req): m_response = mock.MagicMock() mock_token_response_text = mock.PropertyMock(return_value=MOCK_IAM_TOKEN_RESPONSE) type(m_response).text = mock_token_response_text m_req.return_value = m_response iam_client_id = 'foo' iam_client_secret = 'bar' iam = IAMSession(MOCK_API_KEY, 'http://127.0.0.1:5984', client_id=iam_client_id, client_secret=iam_client_secret) access_token = iam._get_access_token() m_req.assert_called_once_with( 'POST', iam._token_url, auth=(iam_client_id, iam_client_secret), headers={'Accepts': 'application/json'}, data={ 'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 'response_type': 'cloud_iam', 'apikey': MOCK_API_KEY } ) self.assertEqual(access_token, MOCK_ACCESS_TOKEN) self.assertTrue(m_response.raise_for_status.called) mock_token_response_text.assert_called_with() @mock.patch('cloudant._client_session.ClientSession.request') @mock.patch('cloudant._client_session.IAMSession._get_access_token') def test_iam_login(self, m_token, m_req): m_token.return_value = MOCK_ACCESS_TOKEN m_response = mock.MagicMock() m_req.return_value = m_response iam = IAMSession(MOCK_API_KEY, 'http://127.0.0.1:5984') iam.login() m_req.assert_called_once_with( 'POST', iam._session_url, headers={'Content-Type': 'application/json'}, data=json.dumps({'access_token': MOCK_ACCESS_TOKEN}) ) self.assertEqual(m_token.call_count, 1) self.assertTrue(m_response.raise_for_status.called) def test_iam_logout(self): iam = IAMSession(MOCK_API_KEY, 'http://127.0.0.1:5984') # add a valid cookie to jar iam.cookies.set_cookie(self._mock_cookie()) self.assertEqual(len(iam.cookies.keys()), 1) iam.logout() self.assertEqual(len(iam.cookies.keys()), 0) @mock.patch('cloudant._client_session.ClientSession.get') def test_iam_get_session_info(self, m_get): m_info = '{"ok": true, "info": {"authentication_db": "_users"}}' m_response = mock.MagicMock() type(m_response).text = mock.PropertyMock(return_value=m_info) m_get.return_value = m_response iam = IAMSession(MOCK_API_KEY, 'http://127.0.0.1:5984') info = iam.info() m_get.assert_called_once_with(iam._session_url) self.assertEqual(info, json.loads(m_info)) self.assertTrue(m_response.raise_for_status.called) @mock.patch('cloudant._client_session.IAMSession.login') @mock.patch('cloudant._client_session.ClientSession.request') def test_iam_first_request(self, m_req, m_login): # mock 200 m_response_ok = mock.MagicMock() type(m_response_ok).status_code = mock.PropertyMock(return_value=200) type(m_response_ok).text = mock.PropertyMock(return_value='{"ok": true}') m_req.return_value = m_response_ok iam = IAMSession(MOCK_API_KEY, 'http://127.0.0.1:5984', auto_renew=True) iam.login() self.assertEqual(m_login.call_count, 1) self.assertEqual(m_req.call_count, 0) # add a valid cookie to jar iam.cookies.set_cookie(self._mock_cookie()) resp = iam.request('GET', 'http://127.0.0.1:5984/mydb1') self.assertEqual(m_login.call_count, 1) self.assertEqual(m_req.call_count, 1) self.assertEqual(resp.status_code, 200) @mock.patch('cloudant._client_session.IAMSession.login') @mock.patch('cloudant._client_session.ClientSession.request') def test_iam_renew_cookie_on_expiry(self, m_req, m_login): # mock 200 m_response_ok = mock.MagicMock() type(m_response_ok).status_code = mock.PropertyMock(return_value=200) type(m_response_ok).text = mock.PropertyMock(return_value='{"ok": true}') m_req.return_value = m_response_ok iam = IAMSession(MOCK_API_KEY, 'http://127.0.0.1:5984', auto_renew=True) iam.login() # add an expired cookie to jar iam.cookies.set_cookie(self._mock_cookie(expires_secs=-300)) resp = iam.request('GET', 'http://127.0.0.1:5984/mydb1') self.assertEqual(m_login.call_count, 2) self.assertEqual(m_req.call_count, 1) self.assertEqual(resp.status_code, 200) @mock.patch('cloudant._client_session.IAMSession.login') @mock.patch('cloudant._client_session.ClientSession.request') def test_iam_renew_cookie_on_401_success(self, m_req, m_login): # mock 200 m_response_ok = mock.MagicMock() type(m_response_ok).status_code = mock.PropertyMock(return_value=200) type(m_response_ok).text = mock.PropertyMock(return_value='{"ok": true}') # mock 401 m_response_bad = mock.MagicMock() type(m_response_bad).status_code = mock.PropertyMock(return_value=401) m_req.side_effect = [m_response_bad, m_response_ok, m_response_ok] iam = IAMSession(MOCK_API_KEY, 'http://127.0.0.1:5984', auto_renew=True) iam.login() self.assertEqual(m_login.call_count, 1) # add a valid cookie to jar iam.cookies.set_cookie(self._mock_cookie()) resp = iam.request('GET', 'http://127.0.0.1:5984/mydb1') self.assertEqual(resp.status_code, 200) self.assertEqual(m_login.call_count, 2) self.assertEqual(m_req.call_count, 2) resp = iam.request('GET', 'http://127.0.0.1:5984/mydb1') self.assertEqual(resp.status_code, 200) self.assertEqual(m_login.call_count, 2) self.assertEqual(m_req.call_count, 3) @mock.patch('cloudant._client_session.IAMSession.login') @mock.patch('cloudant._client_session.ClientSession.request') def test_iam_renew_cookie_on_401_failure(self, m_req, m_login): # mock 401 m_response_bad = mock.MagicMock() type(m_response_bad).status_code = mock.PropertyMock(return_value=401) m_req.return_value = m_response_bad iam = IAMSession(MOCK_API_KEY, 'http://127.0.0.1:5984', auto_renew=True) iam.login() self.assertEqual(m_login.call_count, 1) # add a valid cookie to jar iam.cookies.set_cookie(self._mock_cookie()) resp = iam.request('GET', 'http://127.0.0.1:5984/mydb1') self.assertEqual(resp.status_code, 401) self.assertEqual(m_login.call_count, 2) self.assertEqual(m_req.call_count, 2) resp = iam.request('GET', 'http://127.0.0.1:5984/mydb1') self.assertEqual(resp.status_code, 401) self.assertEqual(m_login.call_count, 3) self.assertEqual(m_req.call_count, 4) @mock.patch('cloudant._client_session.IAMSession.login') @mock.patch('cloudant._client_session.ClientSession.request') def test_iam_renew_cookie_disabled(self, m_req, m_login): # mock 401 m_response_bad = mock.MagicMock() type(m_response_bad).status_code = mock.PropertyMock(return_value=401) m_req.return_value = m_response_bad iam = IAMSession(MOCK_API_KEY, 'http://127.0.0.1:5984', auto_renew=False) iam.login() self.assertEqual(m_login.call_count, 1) resp = iam.request('GET', 'http://127.0.0.1:5984/mydb1') self.assertEqual(resp.status_code, 401) self.assertEqual(m_login.call_count, 1) # no attempt to renew self.assertEqual(m_req.call_count, 1) resp = iam.request('GET', 'http://127.0.0.1:5984/mydb1') self.assertEqual(resp.status_code, 401) self.assertEqual(m_login.call_count, 1) # no attempt to renew self.assertEqual(m_req.call_count, 2) @mock.patch('cloudant._client_session.IAMSession.login') @mock.patch('cloudant._client_session.ClientSession.request') def test_iam_client_create(self, m_req, m_login): # mock 200 m_response_ok = mock.MagicMock() type(m_response_ok).status_code = mock.PropertyMock(return_value=200) type(m_response_ok).text = mock.PropertyMock(return_value='["animaldb"]') m_req.return_value = m_response_ok # create IAM client client = Cloudant.iam('foo', MOCK_API_KEY) client.connect() # add a valid cookie to jar client.r_session.cookies.set_cookie(self._mock_cookie()) dbs = client.all_dbs() self.assertEqual(m_login.call_count, 1) self.assertEqual(m_req.call_count, 1) self.assertEqual(dbs, ['animaldb']) @mock.patch('cloudant._client_session.IAMSession.login') @mock.patch('cloudant._client_session.IAMSession.set_credentials') def test_iam_client_session_login(self, m_set, m_login): # create IAM client client = Cloudant.iam('foo', MOCK_API_KEY) client.connect() # add a valid cookie to jar client.r_session.cookies.set_cookie(self._mock_cookie()) client.session_login() m_set.assert_called_with(None, None) self.assertEqual(m_login.call_count, 2) self.assertEqual(m_set.call_count, 2) @mock.patch('cloudant._client_session.IAMSession.login') @mock.patch('cloudant._client_session.IAMSession.set_credentials') def test_iam_client_session_login_with_new_credentials(self, m_set, m_login): # create IAM client client = Cloudant.iam('foo', MOCK_API_KEY) client.connect() # add a valid cookie to jar client.r_session.cookies.set_cookie(self._mock_cookie()) client.session_login('bar', 'baz') # new creds m_set.assert_called_with('bar', 'baz') self.assertEqual(m_login.call_count, 2) self.assertEqual(m_set.call_count, 2) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/index_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2020 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Unit tests for the Index module. IndexTests and TextIndexTests are tested against Cloudant only. See configuration options for environment variables in unit_t_db_base module docstring. """ from __future__ import absolute_import import os import unittest import mock import requests from cloudant.design_document import DesignDocument from cloudant.document import Document from cloudant.error import CloudantArgumentError, CloudantIndexException from cloudant.index import Index, TextIndex, SpecialIndex from cloudant.query import Query from cloudant.view import QueryIndexView from nose.plugins.attrib import attr from .unit_t_db_base import UnitTestDbBase from .. import PY2 class CloudantIndexExceptionTests(unittest.TestCase): """ Ensure CloudantIndexException functions as expected. """ def test_raise_without_code(self): """ Ensure that a default exception/code is used if none is provided. """ with self.assertRaises(CloudantIndexException) as cm: raise CloudantIndexException() self.assertEqual(cm.exception.status_code, 100) def test_raise_using_invalid_code(self): """ Ensure that a default exception/code is used if invalid code is provided. """ with self.assertRaises(CloudantIndexException) as cm: raise CloudantIndexException('foo') self.assertEqual(cm.exception.status_code, 100) def test_raise_with_proper_code_and_args(self): """ Ensure that the requested exception is raised. """ with self.assertRaises(CloudantIndexException) as cm: raise CloudantIndexException(101) self.assertEqual(cm.exception.status_code, 101) @attr(db=['cloudant','couch']) @attr(couchapi=2) class IndexTests(UnitTestDbBase): """ Index unit tests """ def setUp(self): """ Set up test attributes """ super(IndexTests, self).setUp() self.db_set_up() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(IndexTests, self).tearDown() def test_constructor_with_args(self): """ Test instantiating an Index by passing in arguments. As a side effect this test also tests the design_document_id, name, type, and definition property methods. """ index = Index(self.db, 'ddoc-id', 'index-name', foo={'bar': 'baz'}) self.assertIsInstance(index, Index) self.assertEqual(index.design_document_id, 'ddoc-id') self.assertEqual(index.name, 'index-name') self.assertEqual(index.type, 'json') self.assertEqual(index.definition, {'foo': {'bar': 'baz'}}) def test_constructor_with_only_a_db(self): """ Test instantiating an Index with a database only. As a side effect this test also tests the design_document_id, name, type, and definition property methods. """ index = Index(self.db) self.assertIsInstance(index, Index) self.assertIsNone(index.design_document_id) self.assertIsNone(index.name) self.assertEqual(index.type, 'json') self.assertEqual(index.definition, {}) def test_retrieve_index_url(self): """ Test constructing the Index url """ index = Index(self.db) self.assertEqual( index.index_url, '/'.join((self.db.database_url, '_index')) ) def test_index_to_dictionary(self): """ Test the conversion of an Index object into a dictionary representation of that object. """ index = Index(self.db, 'ddoc-id', 'index-name', foo={'bar': 'baz'}) self.assertEqual(index.as_a_dict(), { 'ddoc': 'ddoc-id', 'name': 'index-name', 'type': 'json', 'def': {'foo': {'bar': 'baz'}} }) def test_index_as_a_dict_with_none_attributes(self): """ Test the conversion of an Index object that contains attributes set to None into a dictionary representation of that object. """ index = Index(self.db) self.assertEqual(index.as_a_dict(), { 'ddoc': None, 'name': None, 'type': 'json', 'def': {} }) def test_create_an_index_using_ddoc_index_name(self): """ Test that a JSON index is created in the remote database. """ index = Index(self.db, 'ddoc001', 'index001', fields=['name', 'age']) index.create() self.assertEqual(index.design_document_id, '_design/ddoc001') self.assertEqual(index.name, 'index001') with DesignDocument(self.db, index.design_document_id) as ddoc: self.assertIsInstance(ddoc.get_view(index.name), QueryIndexView) self.assertEqual(ddoc['_id'], index.design_document_id) self.assertTrue(ddoc['_rev'].startswith('1-')) self.assertEqual(ddoc['indexes'], {}) self.assertEqual(ddoc['language'], 'query') self.assertEqual(ddoc['lists'], {}) self.assertEqual(ddoc['shows'], {}) self.assertListEqual(list(ddoc['views'].keys()), ['index001']) view = ddoc['views'][index.name] self.assertEqual(view['map']['fields']['age'], 'asc') self.assertEqual(view['map']['fields']['name'], 'asc') self.assertEqual(view['options']['def']['fields'], ['name', 'age']) self.assertEqual(view['reduce'], '_count') def test_create_an_index_without_ddoc_index_name(self): """ Test that a JSON index is created in the remote database. """ index = Index(self.db, fields=['name', 'age']) index.create() self.assertIsNotNone(index.design_document_id) self.assertTrue(index.design_document_id.startswith('_design/')) self.assertIsNotNone(index.name) with DesignDocument(self.db, index.design_document_id) as ddoc: self.assertIsInstance(ddoc.get_view(index.name), QueryIndexView) self.assertEqual(ddoc['_id'], index.design_document_id) self.assertTrue(ddoc['_rev'].startswith('1-')) self.assertEqual(ddoc['indexes'], {}) self.assertEqual(ddoc['language'], 'query') self.assertEqual(ddoc['lists'], {}) self.assertEqual(ddoc['shows'], {}) self.assertListEqual(list(ddoc['views'].keys()), [index.name]) view = ddoc['views'][index.name] self.assertEqual(view['map']['fields']['age'], 'asc') self.assertEqual(view['map']['fields']['name'], 'asc') self.assertEqual(view['options']['def']['fields'], ['name', 'age']) self.assertEqual(view['reduce'], '_count') def test_create_an_index_with_empty_ddoc_index_name(self): """ Test that a JSON index is created in the remote database. """ index = Index(self.db, '', '', fields=['name', 'age']) index.create() self.assertIsNotNone(index.design_document_id) self.assertTrue(index.design_document_id.startswith('_design/')) self.assertIsNotNone(index.name) with DesignDocument(self.db, index.design_document_id) as ddoc: self.assertIsInstance(ddoc.get_view(index.name), QueryIndexView) self.assertEqual(ddoc['_id'], index.design_document_id) self.assertTrue(ddoc['_rev'].startswith('1-')) self.assertEqual(ddoc['indexes'], {}) self.assertEqual(ddoc['language'], 'query') self.assertEqual(ddoc['lists'], {}) self.assertEqual(ddoc['shows'], {}) self.assertListEqual(list(ddoc['views'].keys()), [index.name]) view = ddoc['views'][index.name] self.assertEqual(view['map']['fields']['age'], 'asc') self.assertEqual(view['map']['fields']['name'], 'asc') self.assertEqual(view['options']['def']['fields'], ['name', 'age']) self.assertEqual(view['reduce'], '_count') def test_create_an_index_using_design_prefix(self): """ Test that a JSON index is created correctly in the remote database when the ddoc id is already prefixed by '_design/' """ index = Index(self.db, '_design/ddoc001', 'index001', fields=['name', 'age']) index.create() self.assertEqual(index.design_document_id, '_design/ddoc001') self.assertEqual(index.name, 'index001') with DesignDocument(self.db, index.design_document_id) as ddoc: self.assertIsInstance(ddoc.get_view(index.name), QueryIndexView) self.assertEqual(ddoc['_id'], index.design_document_id) self.assertTrue(ddoc['_rev'].startswith('1-')) self.assertEqual(ddoc['indexes'], {}) self.assertEqual(ddoc['language'], 'query') self.assertEqual(ddoc['lists'], {}) self.assertEqual(ddoc['shows'], {}) self.assertListEqual(list(ddoc['views'].keys()), [index.name]) view = ddoc['views'][index.name] self.assertEqual(view['map']['fields']['age'], 'asc') self.assertEqual(view['map']['fields']['name'], 'asc') self.assertEqual(view['options']['def']['fields'], ['name', 'age']) self.assertEqual(view['reduce'], '_count') def test_create_uses_custom_encoder(self): """ Test that the create method uses the custom encoder """ self.set_up_client(auto_connect=True, encoder="AEncoder") database = self.client[self.test_dbname] index = Index(database, '_design/ddoc001', 'index001', fields=['name', 'age']) with self.assertRaises(TypeError): index.create() def test_create_fails_due_to_ddocid_validation(self): """ Ensure that if the design doc id is not a string the create call fails. """ index = Index(self.db, ['ddoc001'], 'index001', fields=['name', 'age']) with self.assertRaises(CloudantArgumentError) as cm: index.create() err = cm.exception self.assertEqual( str(err), 'The design document id: [\'ddoc001\'] is not a string.' ) def test_create_fails_due_to_index_name_validation(self): """ Ensure that if the index name is not a string the create call fails. """ index = Index(self.db, 'ddoc001', ['index001'], fields=['name', 'age']) with self.assertRaises(CloudantArgumentError) as cm: index.create() err = cm.exception self.assertEqual( str(err), 'The index name: [\'index001\'] is not a string.' ) def test_create_fails_due_to_def_validation(self): """ Ensure that if the index definition contains anything other than "fields" the create call fails. """ index = Index(self.db, fields=['name', 'age'], selector={}) with self.assertRaises(CloudantArgumentError) as cm: index.create() err = cm.exception self.assertTrue(str(err).endswith( 'A JSON index requires that only a \'fields\' argument is provided.')) def test_deleting_index(self): """ Test that deleting an index works as expected. """ ddoc = DesignDocument(self.db, '_design/ddoc001') self.assertFalse(ddoc.exists()) index = Index(self.db, 'ddoc001', 'index001', fields=['name', 'age']) index.create() self.assertTrue(ddoc.exists()) index.delete() self.assertFalse(ddoc.exists()) def test_deleting_non_existing_index(self): """ Tests how deleting a non-existing index is handled. """ ddoc = DesignDocument(self.db, '_design/ddoc001') index = Index(self.db, 'ddoc001', 'index001', fields=['name', 'age']) self.assertFalse(ddoc.exists()) with self.assertRaises(requests.HTTPError) as cm: index.delete() err = cm.exception self.assertEqual(err.response.status_code, 404) def test_deleting_index_without_ddoc(self): """ Tests that deleting an index without a ddoc id provided fails as expected. """ ddoc = DesignDocument(self.db, '_design/ddoc001') index = Index(self.db, None, 'index001', fields=['name', 'age']) self.assertFalse(ddoc.exists()) with self.assertRaises(CloudantArgumentError) as cm: index.delete() err = cm.exception self.assertEqual( str(err), 'Deleting an index requires a design document id be provided.' ) def test_deleting_index_without_index_name(self): """ Tests that deleting an index without an index name provided fails as expected. """ ddoc = DesignDocument(self.db, '_design/ddoc001') index = Index(self.db, 'ddoc001', fields=['name', 'age']) self.assertFalse(ddoc.exists()) with self.assertRaises(CloudantArgumentError) as cm: index.delete() err = cm.exception self.assertEqual( str(err), 'Deleting an index requires an index name be provided.' ) def test_index_via_query(self): """ Test that a created index will produce expected query results. """ index = Index(self.db, 'ddoc001', 'index001', fields=['age']) index.create() self.populate_db_with_documents(100) query = Query(self.db) resp = query( fields=['name', 'age'], selector={'age': {'$eq': 6}} ) self.assertEqual(resp['docs'], [{'name': 'julia', 'age': 6}]) def test_index_usage_via_query(self): """ Test that a query will warn if the indexes that exist do not satisfy the query selector. """ index = Index(self.db, 'ddoc001', 'index001', fields=['name']) index.create() self.populate_db_with_documents(100) result = self.db.get_query_result(fields=['name', 'age'], selector={'age': {'$eq': 6}}, raw_result=True) self.assertTrue(str(result['warning']).lower().startswith("no matching index found")) @attr(db='cloudant') class TextIndexTests(UnitTestDbBase): """ Search Index unit tests """ def setUp(self): """ Set up test attributes """ super(TextIndexTests, self).setUp() self.db_set_up() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(TextIndexTests, self).tearDown() def test_constructor_with_args(self): """ Test instantiating a TextIndex by passing in arguments. As a side effect this test also tests the design_document_id, name, type, and definition property methods. """ index = TextIndex(self.db, 'ddoc-id', 'index-name', foo={'bar': 'baz'}) self.assertIsInstance(index, TextIndex) self.assertEqual(index.design_document_id, 'ddoc-id') self.assertEqual(index.name, 'index-name') self.assertEqual(index.type, 'text') self.assertEqual(index.definition, {'foo': {'bar': 'baz'}}) def test_constructor_with_only_a_db(self): """ Test instantiating an TextIndex with a database only. As a side effect this test also tests the design_document_id, name, type, and definition property methods. """ index = TextIndex(self.db) self.assertIsInstance(index, TextIndex) self.assertIsNone(index.design_document_id) self.assertIsNone(index.name) self.assertEqual(index.type, 'text') self.assertEqual(index.definition, {}) def test_create_a_search_index_no_kwargs(self): """ Test that a TEXT index is created in the remote database. """ index = TextIndex(self.db, 'ddoc001', 'index001') index.create() self.assertEqual(index.design_document_id, '_design/ddoc001') self.assertEqual(index.name, 'index001') with DesignDocument(self.db, index.design_document_id) as ddoc: self.assertEqual(ddoc['_id'], index.design_document_id) self.assertTrue(ddoc['_rev'].startswith('1-')) self.assertEqual(ddoc['language'], 'query') self.assertEqual(ddoc['lists'], {}) self.assertEqual(ddoc['shows'], {}) self.assertEqual(ddoc['views'], {}) index = ddoc['indexes']['index001'] self.assertEqual(index['analyzer']['default'], 'keyword') self.assertEqual(index['analyzer']['fields']['$default'], 'standard') self.assertEqual(index['analyzer']['name'], 'perfield') self.assertEqual(index['index']['default_analyzer'], 'keyword') self.assertEqual(index['index']['default_field'], {}) self.assertEqual(index['index']['fields'], 'all_fields') self.assertEqual(index['index']['selector'], {}) self.assertTrue(index['index']['index_array_lengths']) def test_create_a_search_index_with_kwargs(self): """ Test that a TEXT index is created in the remote database. """ index = TextIndex( self.db, 'ddoc001', 'index001', fields=[{'name': 'name', 'type':'string'}, {'name': 'age', 'type':'number'}], selector={}, default_field={'enabled': True, 'analyzer': 'german'}) index.create() self.assertEqual(index.design_document_id, '_design/ddoc001') self.assertEqual(index.name, 'index001') with DesignDocument(self.db, index.design_document_id) as ddoc: self.assertEqual(ddoc['_id'], index.design_document_id) self.assertTrue(ddoc['_rev'].startswith('1-')) self.assertEqual(ddoc['language'], 'query') self.assertEqual(ddoc['lists'], {}) self.assertEqual(ddoc['shows'], {}) self.assertEqual(ddoc['views'], {}) index = ddoc['indexes']['index001'] self.assertEqual(index['analyzer']['default'], 'keyword') self.assertEqual(index['analyzer']['fields']['$default'], 'german') self.assertEqual(index['analyzer']['name'], 'perfield') self.assertEqual(index['index']['default_analyzer'], 'keyword') self.assertEqual(index['index']['default_field']['analyzer'], 'german') self.assertEqual(index['index']['fields'], [{'name': 'name', 'type': 'string'}, {'name': 'age', 'type': 'number'}]) self.assertEqual(index['index']['selector'], {}) self.assertTrue(index['index']['default_field']['enabled']) self.assertTrue(index['index']['index_array_lengths']) def test_create_a_search_index_invalid_argument(self): """ Test that a TEXT index is not created when an invalid argument is given. """ index = TextIndex(self.db, 'ddoc001', 'index001', foo='bar') with self.assertRaises(CloudantArgumentError) as cm: index.create() err = cm.exception self.assertEqual(str(err), 'Invalid argument: foo') def test_create_a_search_index_invalid_fields_value(self): """ Test that a TEXT index is not created when an invalid fields value is given. """ index = TextIndex(self.db, 'ddoc001', 'index001', fields=5) with self.assertRaises(CloudantArgumentError) as cm: index.create() err = cm.exception self.assertEqual( str(err), 'Argument fields is not an instance of expected type: ' '<{} \'list\'>'.format('type' if PY2 else 'class') ) def test_create_a_search_index_invalid_default_field_value(self): """ Test that a TEXT index is not created when an invalid default_field value is given. """ index = TextIndex(self.db, 'ddoc001', 'index001', default_field=5) with self.assertRaises(CloudantArgumentError) as cm: index.create() err = cm.exception self.assertEqual( str(err), 'Argument default_field is not an instance of expected type: ' '<{} \'dict\'>'.format('type' if PY2 else 'class') ) def test_create_a_search_index_invalid_selector_value(self): """ Test that a TEXT index is not created when an invalid selector value is given. """ index = TextIndex(self.db, 'ddoc001', 'index001', selector=5) with self.assertRaises(CloudantArgumentError) as cm: index.create() err = cm.exception self.assertEqual( str(err), 'Argument selector is not an instance of expected type: ' '<{} \'dict\'>'.format('type' if PY2 else 'class') ) def test_create_unpartitioned_query_index(self): """ Test that create_query_index works on an unpartitioned database """ ddoc = DesignDocument(self.db, document_id="unpartitioned_query_index_ddoc") ddoc["language"] = "query" ddoc.save() index = self.db.create_query_index( design_document_id="_design/unpartitioned_query_index_ddoc", fields=["key"], partitioned=False ) index.create() self.assertGreater(len(self.db.get_query_indexes()), 0) def test_search_index_via_query(self): """ Test that a created TEXT index will produce expected query results. """ index = TextIndex(self.db, 'ddoc001', 'index001') index.create() self.populate_db_with_documents(100) with Document(self.db, 'julia006') as doc: doc['name'] = 'julia isabel' query = Query(self.db) resp = query( fields=['name', 'age'], selector={'$text': 'isabel'} ) self.assertEqual(resp['docs'], [{'name': 'julia isabel', 'age': 6}]) class SpecialIndexTests(unittest.TestCase): """ Special Index unit tests """ def setUp(self): """ Set up test attributes """ self.db = mock.Mock() self.db.r_session = 'mocked-session' self.db.database_url = 'http://mocked.url.com/my_db' def test_constructor(self): """ Test that the constructor instantiates a SpecialIndex object. """ index = SpecialIndex(self.db, fields=[{'_id': 'asc'}]) self.assertIsInstance(index, SpecialIndex) self.assertEqual(index.as_a_dict(), { 'ddoc': None, 'name': '_all_docs', 'type': 'special', 'def': {'fields': [{'_id': 'asc'}]}}) def test_create_disabled(self): """ Test that the SpecialIndex create method is disabled. """ index = SpecialIndex(self.db, fields=[{'_id': 'asc'}]) with self.assertRaises(CloudantIndexException) as cm: index.create() err = cm.exception self.assertEqual( str(err), 'Creating the \"special\" index is not allowed.' ) def test_delete_disabled(self): """ Test that the SpecialIndex delete method is disabled. """ index = SpecialIndex(self.db, fields=[{'_id': 'asc'}]) with self.assertRaises(CloudantIndexException) as cm: index.delete() err = cm.exception self.assertEqual( str(err), 'Deleting the \"special\" index is not allowed.' ) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/infinite_feed_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2016, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ feed module - Unit tests for Feed class """ import os import unittest from time import sleep from cloudant.error import CloudantArgumentError, CloudantFeedException from cloudant.feed import InfiniteFeed, Feed from nose.plugins.attrib import attr from requests import Session from .unit_t_db_base import UnitTestDbBase class MethodCallCount(object): """ This callable class is used as a proxy by the infinite feed tests to wrap method calls with the intent of tracking the number of times a specific method has been called. """ def __init__(self, meth_ref): self._ref = meth_ref self.called_count = 0 def __call__(self): self.called_count += 1 self._ref() class CloudantFeedExceptionTests(unittest.TestCase): """ Ensure CloudantFeedException functions as expected. """ def test_raise_without_code(self): """ Ensure that a default exception/code is used if none is provided. """ with self.assertRaises(CloudantFeedException) as cm: raise CloudantFeedException() self.assertEqual(cm.exception.status_code, 100) def test_raise_using_invalid_code(self): """ Ensure that a default exception/code is used if invalid code is provided. """ with self.assertRaises(CloudantFeedException) as cm: raise CloudantFeedException('foo') self.assertEqual(cm.exception.status_code, 100) def test_raise_with_proper_code_and_args(self): """ Ensure that the requested exception is raised. """ with self.assertRaises(CloudantFeedException) as cm: raise CloudantFeedException(101) self.assertEqual(cm.exception.status_code, 101) @attr(db=['cloudant','couch']) class InfiniteFeedTests(UnitTestDbBase): """ Infinite Feed unit tests """ def setUp(self): """ Set up test attributes """ super(InfiniteFeedTests, self).setUp() self.db_set_up() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(InfiniteFeedTests, self).tearDown() def test_constructor_no_feed_option(self): """ Test constructing an infinite feed when no feed option is set """ feed = InfiniteFeed(self.db, chunk_size=1, timeout=100) self.assertEqual(feed._url, '/'.join([self.db.database_url, '_changes'])) self.assertIsInstance(feed._r_session, Session) self.assertFalse(feed._raw_data) self.assertDictEqual(feed._options, {'feed': 'continuous', 'timeout': 100}) self.assertEqual(feed._chunk_size, 1) def test_constructor_with_feed_option(self): """ Test constructing an infinite feed when the continuous feed option is set. """ feed = InfiniteFeed(self.db, chunk_size=1, timeout=100, feed='continuous') self.assertEqual(feed._url, '/'.join([self.db.database_url, '_changes'])) self.assertIsInstance(feed._r_session, Session) self.assertFalse(feed._raw_data) self.assertDictEqual(feed._options, {'feed': 'continuous', 'timeout': 100}) self.assertEqual(feed._chunk_size, 1) def test_constructor_with_invalid_feed_option(self): """ Test constructing an infinite feed when a feed option is set to an invalid value raises an exception. """ feed = InfiniteFeed(self.db, feed='longpoll') with self.assertRaises(CloudantArgumentError) as cm: invalid_feed = [x for x in feed] self.assertEqual( str(cm.exception), 'Invalid infinite feed option: longpoll. Must be set to continuous.' ) @attr(db='couch') def test_invalid_source_couchdb(self): """ Ensure that a CouchDB client cannot be used with an infinite feed. """ with self.assertRaises(CloudantFeedException) as cm: invalid_feed = [x for x in InfiniteFeed(self.client)] self.assertEqual(str(cm.exception), 'Infinite _db_updates feed not supported for CouchDB.') @unittest.skipIf(os.environ.get('SKIP_DB_UPDATES'), 'Skipping Cloudant _db_updates feed tests') @attr(db='cloudant') def test_constructor_db_updates(self): """ Test constructing an infinite _db_updates feed. """ feed = InfiniteFeed(self.client, chunk_size=1, timeout=100, feed='continuous') self.assertEqual(feed._url, '/'.join([self.client.server_url, '_db_updates'])) self.assertIsInstance(feed._r_session, Session) self.assertFalse(feed._raw_data) self.assertDictEqual(feed._options, {'feed': 'continuous', 'timeout': 100}) self.assertEqual(feed._chunk_size, 1) def test_infinite_feed(self): """ Test that an infinite feed will continue to issue multiple requests until stopped. This check is performed in combination by creating documents 3 separate times and checking that the "_start" method on the InfiniteFeed object was called 3 times as well. """ self.populate_db_with_documents() feed = InfiniteFeed(self.db, timeout=100) # Create a proxy for the feed._start method so that we can track how # many times it has been called. feed._start = MethodCallCount(feed._start) changes = list() for change in feed: self.assertSetEqual(set(change.keys()), set(['seq', 'changes', 'id'])) changes.append(change) if len(changes) in (100, 200): sleep(1) # 1 second > .1 second (timeout) self.populate_db_with_documents(off_set=len(changes)) elif len(changes) == 300: feed.stop() expected = set(['julia{0:03d}'.format(i) for i in range(300)]) self.assertSetEqual(set([x['id'] for x in changes]), expected) self.assertIsNone(feed.last_seq) # Compare infinite/continuous with normal normal = Feed(self.db) self.assertSetEqual( set([x['id'] for x in changes]), set([n['id'] for n in normal])) # Ensuring that the feed._start method was called 3 times, verifies that # the continuous feed was started/restarted 3 separate times. self.assertEqual(feed._start.called_count, 3) @unittest.skipIf(os.environ.get('SKIP_DB_UPDATES'), 'Skipping Cloudant _db_updates feed tests') @attr(db='cloudant') def test_infinite_db_updates_feed(self): """ Test that an _db_updates infinite feed will continue to issue multiple requests until stopped. Since we do not have control over updates happening within the account as we do within a database, this test is stopped after 15 database creations regardless. Within that span of time we expect that the feed would have been restarted at least once. """ feed = InfiniteFeed(self.client, since='now', timeout=100) # Create a proxy for the feed._start method so that we can track how # many times it has been called. feed._start = MethodCallCount(feed._start) new_dbs = list() try: new_dbs.append(self.client.create_database(self.dbname())) for change in feed: self.assertTrue(all(x in change for x in ('seq', 'type'))) new_dbs.append(self.client.create_database(self.dbname())) if feed._start.called_count >= 3 and len(new_dbs) >= 3: feed.stop() if len(new_dbs) >= 15: # We stop regardless after 15 databases have been created feed.stop() finally: [db.delete() for db in new_dbs] # The test is considered a success if feed._start was called 2+ times. # If failure occurs it does not necessarily mean that the InfiniteFeed # is not functioning as expected, it might also mean that we reached the # db limit threshold of 15 before a timeout and restart of the # InfiniteFeed could happen. self.assertTrue(feed._start.called_count > 1) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/param_translation_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2016, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Unit tests for Python to CouchDB translation of query parameters. """ import unittest from cloudant.error import CloudantArgumentError from cloudant._common_util import python_to_couch from tests.unit._test_util import LONG_NUMBER class PythonToCouchTests(unittest.TestCase): """ Test cases for validating python_to_couch translation functionality """ def test_valid_descending(self): """ Test descending translation is successful. """ self.assertEqual( python_to_couch({'descending': True}), {'descending': 'true'} ) self.assertEqual( python_to_couch({'descending': False}), {'descending': 'false'} ) def test_valid_endkey(self): """ Test endkey translation is successful. """ self.assertEqual(python_to_couch({'endkey': 10}), {'endkey': '10'}) # Test with long type self.assertEqual(python_to_couch({'endkey': LONG_NUMBER}), {'endkey': str(LONG_NUMBER)}) self.assertEqual( python_to_couch({'endkey': 'foo'}), {'endkey': '"foo"'} ) self.assertEqual( python_to_couch({'endkey': ['foo', 10]}), {'endkey': '["foo", 10]'} ) self.assertEqual( python_to_couch({'endkey': True}), {'endkey': 'true'} ) def test_valid_endkey_docid(self): """ Test endkey_docid translation is successful. """ self.assertEqual( python_to_couch({'endkey_docid': 'foo'}), {'endkey_docid': 'foo'} ) def test_valid_group(self): """ Test group translation is successful. """ self.assertEqual(python_to_couch({'group': True}), {'group': 'true'}) self.assertEqual(python_to_couch({'group': False}), {'group': 'false'}) def test_valid_group_level(self): """ Test group_level translation is successful. """ self.assertEqual( python_to_couch({'group_level': 100}), {'group_level': 100} ) # Test with long type self.assertEqual( python_to_couch({'group_level': LONG_NUMBER}), {'group_level': LONG_NUMBER} ) self.assertEqual( python_to_couch({'group_level': None}), {'group_level': None} ) def test_valid_include_docs(self): """ Test include_docs translation is successful. """ self.assertEqual( python_to_couch({'include_docs': True}), {'include_docs': 'true'} ) self.assertEqual( python_to_couch({'include_docs': False}), {'include_docs': 'false'} ) def test_valid_inclusive_end(self): """ Test inclusive_end translation is successful. """ self.assertEqual( python_to_couch({'inclusive_end': True}), {'inclusive_end': 'true'} ) self.assertEqual( python_to_couch({'inclusive_end': False}), {'inclusive_end': 'false'} ) def test_valid_key(self): """ Test key translation is successful. """ self.assertEqual(python_to_couch({'key': 10}), {'key': '10'}) # Test with long type self.assertEqual(python_to_couch({'key': LONG_NUMBER}), {'key': str(LONG_NUMBER)}) self.assertEqual(python_to_couch({'key': 'foo'}), {'key': '"foo"'}) self.assertEqual( python_to_couch({'key': ['foo', 10]}), {'key': '["foo", 10]'} ) self.assertEqual( python_to_couch({'key': True}), {'key': 'true'} ) def test_valid_keys(self): """ Test keys translation is successful. """ self.assertEqual( python_to_couch({'keys': [100, 200]}), {'keys': [100, 200]} ) # Test with long type LONG_NUM_KEY = 92233720368547758071 self.assertEqual( python_to_couch({'keys': [LONG_NUMBER, LONG_NUM_KEY]}), {'keys': [LONG_NUMBER, LONG_NUM_KEY]} ) self.assertEqual( python_to_couch({'keys': ['foo', 'bar']}), {'keys': ['foo', 'bar']} ) self.assertEqual( python_to_couch({'keys': [['foo', 100], ['bar', 200]]}), {'keys': [['foo', 100], ['bar', 200]]} ) def test_valid_limit(self): """ Test limit translation is successful. """ self.assertEqual(python_to_couch({'limit': 100}), {'limit': 100}) # Test with long type self.assertEqual(python_to_couch({'limit': LONG_NUMBER}), {'limit': LONG_NUMBER}) self.assertEqual(python_to_couch({'limit': None}), {'limit': None}) def test_valid_reduce(self): """ Test reduce translation is successful. """ self.assertEqual(python_to_couch({'reduce': True}), {'reduce': 'true'}) self.assertEqual( python_to_couch({'reduce': False}), {'reduce': 'false'} ) def test_valid_skip(self): """ Test skip translation is successful. """ self.assertEqual(python_to_couch({'skip': 100}), {'skip': 100}) # Test with long type self.assertEqual(python_to_couch({'skip': LONG_NUMBER}), {'skip': LONG_NUMBER}) self.assertEqual(python_to_couch({'skip': None}), {'skip': None}) def test_valid_stale(self): """ Test stale translation is successful. """ self.assertEqual(python_to_couch({'stale': 'ok'}), {'stale': 'ok'}) self.assertEqual( python_to_couch({'stale': 'update_after'}), {'stale': 'update_after'} ) def test_valid_startkey(self): """ Test startkey translation is successful. """ self.assertEqual(python_to_couch({'startkey': 10}), {'startkey': '10'}) # Test with long type self.assertEqual(python_to_couch({'startkey': LONG_NUMBER}), {'startkey': str(LONG_NUMBER)}) self.assertEqual( python_to_couch({'startkey': 'foo'}), {'startkey': '"foo"'} ) self.assertEqual( python_to_couch({'startkey': ['foo', 10]}), {'startkey': '["foo", 10]'} ) self.assertEqual( python_to_couch({'startkey': True}), {'startkey': 'true'} ) def test_valid_startkey_docid(self): """ Test startkey_docid translation is successful. """ self.assertEqual( python_to_couch({'startkey_docid': 'foo'}), {'startkey_docid': 'foo'} ) def test_valid_update(self): """ Test lazy translation is successful. """ self.assertEqual(python_to_couch({'update': 'true'}), {'update': 'true'}) self.assertEqual(python_to_couch({'update': 'false'}), {'update': 'false'}) self.assertEqual(python_to_couch({'update': 'lazy'}), {'update': 'lazy'}) def test_invalid_argument(self): """ Test translation fails when an invalid argument is passed in. """ with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'foo': 'bar'}) self.assertEqual(str(cm.exception), 'Invalid argument foo') def test_invalid_descending(self): """ Test descending translation fails when a non-bool value is used. """ msg = 'Argument descending not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'descending': 10}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_endkey(self): """ Test endkey translation fails when a non-string or a non-list value is used. """ msg = 'Argument endkey not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'endkey': {'foo': 'bar'}}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_endkey_docid(self): """ Test endkey_docid translation fails when a non-string value is used. """ msg = 'Argument endkey_docid not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'endkey_docid': 10}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_group(self): """ Test group translation fails when a non-bool value is used. """ msg = 'Argument group not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'group': 10}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_group_level(self): """ Test group_level translation fails when a non-integer value is used. """ msg = 'Argument group_level not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'group_level': True}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_include_docs(self): """ Test include_docs translation fails when a non-bool value is used. """ msg = 'Argument include_docs not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'include_docs': 10}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_inclusive_end(self): """ Test inclusive_end translation fails when a non-bool value is used. """ msg = 'Argument inclusive_end not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'inclusive_end': 10}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_key(self): """ Test key translation fails when a non-string or a non-list value is used. """ msg = 'Argument key not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'key': {'foo': 'bar'}}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_keys_not_list(self): """ Test keys translation fails when a non-list value is used. """ msg = 'Argument keys not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'keys': 'foo'}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_keys_invalid_key(self): """ Test keys translation fails when a key value used in the key list is not a valid value. """ msg = 'Key list element not of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'keys': ['foo', True, 'bar']}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_limit(self): """ Test limit translation fails when a non-integer value is used. """ msg = 'Argument limit not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'limit': True}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_reduce(self): """ Test reduce translation fails when a non-bool value is used. """ msg = 'Argument reduce not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'reduce': 10}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_skip(self): """ Test skip translation fails when a non-integer value is used. """ msg = 'Argument skip not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'skip': True}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_stale(self): """ Test stale translation fails when the value is not either 'ok' or 'update_after' is used. """ msg = 'Argument stale not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'stale': 10}) self.assertTrue(str(cm.exception).startswith(msg)) msg = 'Invalid value for stale option foo' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'stale': 'foo'}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_startkey(self): """ Test startkey translation fails when a non-string or a non-list value is used. """ msg = 'Argument startkey not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'startkey': {'foo': 'bar'}}) self.assertTrue(str(cm.exception).startswith(msg)) def test_invalid_startkey_docid(self): """ Test startkey_docid translation fails when a non-string value is used. """ msg = 'Argument startkey_docid not instance of expected type:' with self.assertRaises(CloudantArgumentError) as cm: python_to_couch({'startkey_docid': 10}) self.assertTrue(str(cm.exception).startswith(msg)) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/query_result_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Unit tests for the QueryResult class tested against Cloudant only. See configuration options for environment variables in unit_t_db_base module docstring. """ import unittest import os from cloudant.query import Query from cloudant.result import QueryResult from cloudant.error import ResultException from nose.plugins.attrib import attr from .unit_t_db_base import UnitTestDbBase @attr(db=['cloudant','couch']) @attr(couchapi=2) class QueryResultTests(UnitTestDbBase): """ QueryResult unit tests """ def setUp(self): """ Set up test attributes """ super(QueryResultTests, self).setUp() self.db_set_up() self.populate_db_with_documents() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(QueryResultTests, self).tearDown() def create_result(self, selector={'_id': {'$gt': 0}}, fields=['_id', 'name', 'age'], **kwargs): if kwargs.get('q_parms', None): query = Query(self.db, **kwargs['q_parms']) else: query = Query(self.db) if kwargs.get('qr_parms', None): return QueryResult(query, selector=selector, fields=fields, **kwargs['qr_parms']) else: return QueryResult(query, selector=selector, fields=fields) def test_constructor_with_options(self): """ Test instantiating a QueryResult by passing in query parameters """ query = Query(self.db) result = QueryResult(query, foo='bar', page_size=10) self.assertIsInstance(result, QueryResult) self.assertEqual(result.options, {'foo': 'bar'}) self.assertEqual(result._ref, query) self.assertEqual(result._page_size, 10) def test_constructor_without_options(self): """ Test instantiating a Query without parameters """ query = Query(self.db) result = QueryResult(query) self.assertIsInstance(result, QueryResult) self.assertEqual(result.options, {}) self.assertEqual(result._ref, query) self.assertEqual(result._page_size, 100) def test_constructor_with_query_skip_limit(self): """ Test instantiating a QueryResult when query callable already has skip and/or limit """ query = Query(self.db, skip=10, limit=10) result = QueryResult(query) self.assertIsInstance(result, QueryResult) self.assertDictEqual(result.options, {'skip': 10, 'limit': 10}) self.assertEqual(result._ref, query) def test_constructor_with_query_skip_limit_options_skip_limit(self): """ Ensure that options skip and/or limit override the values in the query callable if present when constructing a QueryResult """ query = Query(self.db, skip=10, limit=10) result = QueryResult(query, skip=100, limit=100) self.assertIsInstance(result, QueryResult) self.assertDictEqual(result.options, {'skip': 100, 'limit': 100}) self.assertEqual(result._ref, query) def test_key_value_access_is_not_supported(self): """ Test __getitem__() fails when a key value is provided """ result = self.create_result() with self.assertRaises(ResultException) as cm: invalid_result = result['foo'] self.assertEqual(cm.exception.status_code, 101) def test_key_value_slicing_is_not_supported(self): """ Test __getitem__() fails when non-integer values for start and stop are provided """ result = self.create_result() with self.assertRaises(ResultException) as cm: invalid_result = result['bar': 'foo'] self.assertEqual(cm.exception.status_code, 101) def test_get_item_by_index(self): """ Test retrieving a result using a value that refers to an index of the result. """ result = self.create_result() expected = {0: [{'_id': 'julia000', 'name': 'julia', 'age': 0}], 10: [{'_id': 'julia010', 'name': 'julia', 'age': 10}], 99: [{'_id': 'julia099', 'name': 'julia', 'age': 99}], 100: [], 110: []} for key in expected: self.assertEqual(result[key], expected[key]) def test_get_item_by_index_using_skip_limit(self): """ Test retrieving a result using a value that refers to an index of the result when the result uses skip and limit. QueryResult skip/limit parameters take precedence over Query skip/limit parameters. """ results = [self.create_result(q_parms={'skip': 10, 'limit': 10}), self.create_result(qr_parms={'skip': 10, 'limit': 10}), self.create_result(q_parms={'skip': 100, 'limit': 100}, qr_parms={'skip': 10, 'limit': 10})] expected = {0: [{'_id': 'julia010', 'name': 'julia', 'age': 10}], 5: [{'_id': 'julia015', 'name': 'julia', 'age': 15}], 9: [{'_id': 'julia019', 'name': 'julia', 'age': 19}], 10: [], 20: []} for key in expected: for result in results: self.assertEqual(result[key], expected[key]) def test_get_item_by_index_using_limit(self): """ Test retrieving a result using a value that refers to an index of the result when the result uses limit. QueryResult limit parameter takes precedence over Query limit parameter. """ results = [self.create_result(q_parms={'limit': 10}), self.create_result(qr_parms={'limit': 10}), self.create_result(q_parms={'limit': 100}, qr_parms={'limit': 10})] expected = {0: [{'_id': 'julia000', 'name': 'julia', 'age': 0}], 5: [{'_id': 'julia005', 'name': 'julia', 'age': 5}], 9: [{'_id': 'julia009', 'name': 'julia', 'age': 9}], 10: [], 20: []} for key in expected: for result in results: self.assertEqual(result[key], expected[key]) def test_get_item_by_index_using_skip(self): """ Test retrieving a result using a value that refers to an index of the result when the result uses skip. QueryResult skip parameter takes precedence over Query skip parameter. """ results = [self.create_result(q_parms={'skip': 10}), self.create_result(qr_parms={'skip': 10}), self.create_result(q_parms={'skip': 100}, qr_parms={'skip': 10})] expected = {0: [{'_id': 'julia010', 'name': 'julia', 'age': 10}], 5: [{'_id': 'julia015', 'name': 'julia', 'age': 15}], 89: [{'_id': 'julia099', 'name': 'julia', 'age': 99}], 90: [], 100: []} for key in expected: for result in results: self.assertEqual(result[key], expected[key]) def test_get_item_by_negative_index(self): """ Test retrieving a result raises an exception when using a negative index. """ result = self.create_result() with self.assertRaises(ResultException) as cm: invalid_result = result[-1] self.assertEqual(cm.exception.status_code, 101) def test_get_item_slice_no_start_no_stop(self): """ Test that by not providing a start and a stop slice value, the entire result is returned. """ result = self.create_result({'_id': {'$lte': 'julia002'}}) expected = [{'_id': 'julia000', 'name': 'julia', 'age': 0}, {'_id': 'julia001', 'name': 'julia', 'age': 1}, {'_id': 'julia002', 'name': 'julia', 'age': 2}] self.assertEqual(result[:], expected) def test_get_item_invalid_index_slice(self): """ Test that when invalid start and stop values are provided in a slice an exception is raised. """ result = self.create_result() with self.assertRaises(ResultException) as cm: invalid_result = result[-1: 10] self.assertEqual(cm.exception.status_code, 101) with self.assertRaises(ResultException) as cm: invalid_result = result[1: -10] self.assertEqual(cm.exception.status_code, 101) with self.assertRaises(ResultException) as cm: invalid_result = result[-1: -10] self.assertEqual(cm.exception.status_code, 101) with self.assertRaises(ResultException) as cm: invalid_result = result[5: 2] self.assertEqual(cm.exception.status_code, 101) with self.assertRaises(ResultException) as cm: invalid_result = result[5: 5] self.assertEqual(cm.exception.status_code, 101) def test_get_item_index_slice_using_start_stop(self): """ Test getting an index slice by using start and stop slice values. """ result = self.create_result() expected = [{'_id': 'julia098', 'name': 'julia', 'age': 98}, {'_id': 'julia099', 'name': 'julia', 'age': 99}] self.assertEqual(result[98:100], expected) self.assertEqual(result[98:102], expected) self.assertEqual(result[100:102], []) def test_get_item_index_slice_using_start_stop_limit(self): """ Test getting an index slice by using start and stop slice values when the limit parameter is also used. QueryResult limit parameter takes precedence over Query limit parameter. """ results = [self.create_result(q_parms={'limit': 20}), self.create_result(qr_parms={'limit': 20}), self.create_result(q_parms={'limit': 100}, qr_parms={'limit': 20})] expected = [{'_id': 'julia018', 'name': 'julia', 'age': 18}, {'_id': 'julia019', 'name': 'julia', 'age': 19}] for result in results: self.assertEqual(result[18:20], expected) self.assertEqual(result[18:22], expected) self.assertEqual(result[20:22], []) def test_get_item_index_slice_using_start_stop_skip(self): """ Test getting an index slice by using start and stop slice values when the skip parameter is also used. QueryResult skip parameter takes precedence over Query skip parameter. """ results = [self.create_result(q_parms={'skip': 98}), self.create_result(qr_parms={'skip': 98}), self.create_result(q_parms={'skip': 100}, qr_parms={'skip': 98})] expected = [{'_id': 'julia098', 'name': 'julia', 'age': 98}, {'_id': 'julia099', 'name': 'julia', 'age': 99}] for result in results: self.assertEqual(result[0:2], expected) self.assertEqual(result[0:4], expected) self.assertEqual(result[2:4], []) def test_get_item_index_slice_using_start_stop_limit_skip(self): """ Test getting an index slice by using start and stop slice values when the skip and limit parameters are also used. QueryResult skip/limit parameters take precedence over Query skip/limit parameters. """ results = [self.create_result(q_parms={'limit': 20, 'skip': 20}), self.create_result(qr_parms={'limit': 20, 'skip': 20}), self.create_result(q_parms={'limit': 100, 'skip': 100}, qr_parms={'limit': 20, 'skip': 20})] expected = [{'_id': 'julia038', 'name': 'julia', 'age': 38}, {'_id': 'julia039', 'name': 'julia', 'age': 39}] for result in results: self.assertEqual(result[18:20], expected) self.assertEqual(result[18:22], expected) self.assertEqual(result[20:22], []) def test_get_item_index_slice_using_start_only(self): """ Test getting an index slice by using start slice value only. """ result = self.create_result() expected = [{'_id': 'julia098', 'name': 'julia', 'age': 98}, {'_id': 'julia099', 'name': 'julia', 'age': 99}] self.assertEqual(result[98:], expected) self.assertEqual(result[100:], []) def test_get_item_index_slice_using_start_only_limit(self): """ Test getting an index slice by using a start slice value when the limit parameter is also used. QueryResult limit parameter takes precedence over Query limit parameter. """ results = [self.create_result(q_parms={'limit': 20}), self.create_result(qr_parms={'limit': 20}), self.create_result(q_parms={'limit': 100}, qr_parms={'limit': 20})] expected = [{'_id': 'julia018', 'name': 'julia', 'age': 18}, {'_id': 'julia019', 'name': 'julia', 'age': 19}] for result in results: self.assertEqual(result[18:], expected) self.assertEqual(result[20:], []) def test_get_item_index_slice_using_start_only_skip(self): """ Test getting an index slice by using a start slice value when the skip parameter is also used. QueryResult skip parameter takes precedence over Query skip parameter. """ results = [self.create_result(q_parms={'skip': 98}), self.create_result(qr_parms={'skip': 98}), self.create_result(q_parms={'skip': 100}, qr_parms={'skip': 98})] expected = [{'_id': 'julia098', 'name': 'julia', 'age': 98}, {'_id': 'julia099', 'name': 'julia', 'age': 99}] for result in results: self.assertEqual(result[0:], expected) self.assertEqual(result[2:], []) def test_get_item_index_slice_using_start_only_limit_skip(self): """ Test getting an index slice by using a start slice value when the skip and limit parameters are also used. QueryResult skip/limit parameters take precedence over Query skip/limit parameters. """ results = [self.create_result(q_parms={'limit': 20, 'skip': 20}), self.create_result(qr_parms={'limit': 20, 'skip': 20}), self.create_result(q_parms={'limit': 100, 'skip': 100}, qr_parms={'limit': 20, 'skip': 20})] expected = [{'_id': 'julia038', 'name': 'julia', 'age': 38}, {'_id': 'julia039', 'name': 'julia', 'age': 39}] for result in results: self.assertEqual(result[18:], expected) self.assertEqual(result[20:], []) def test_get_item_index_slice_using_stop_only(self): """ Test getting an index slice by using stop slice value only. """ result = self.create_result() expected = {2: [{'_id': 'julia000', 'name': 'julia', 'age': 0}, {'_id': 'julia001', 'name': 'julia', 'age': 1}], 102: [{'_id': 'julia{0:03d}'.format(x), 'name': 'julia', 'age': x} for x in range(100)]} for key in expected: self.assertEqual(result[:key], expected[key]) def test_get_item_index_slice_using_stop_only_limit(self): """ Test getting an index slice by using a stop slice value only when the limit parameter is also used. QueryResult limit parameter takes precedence over Query limit parameter. """ results = [self.create_result(q_parms={'limit': 20}), self.create_result(qr_parms={'limit': 20}), self.create_result(q_parms={'limit': 100}, qr_parms={'limit': 20})] expected = {2: [{'_id': 'julia000', 'name': 'julia', 'age': 0}, {'_id': 'julia001', 'name': 'julia', 'age': 1}], 22: [{'_id': 'julia{0:03d}'.format(x), 'name': 'julia', 'age': x} for x in range(20)]} for result in results: for key in expected: self.assertEqual(result[:key], expected[key]) def test_get_item_index_slice_using_stop_only_skip(self): """ Test getting an index slice by using a stop slice value only when the skip parameter is also used. QueryResult skip parameter takes precedence over Query skip parameter. """ results = [self.create_result(q_parms={'skip': 98}), self.create_result(qr_parms={'skip': 98}), self.create_result(q_parms={'skip': 100}, qr_parms={'skip': 98})] expected = [{'_id': 'julia098', 'name': 'julia', 'age': 98}, {'_id': 'julia099', 'name': 'julia', 'age': 99}] for result in results: self.assertEqual(result[:2], expected) self.assertEqual(result[:4], expected) def test_get_item_index_slice_using_stop_only_limit_skip(self): """ Test getting an index slice by using a start slice value when the skip and limit parameters are also used. QueryResult skip/limit parameters take precedence over Query skip/limit parameters. """ results = [self.create_result(q_parms={'limit':2, 'skip': 20}), self.create_result(qr_parms={'limit':2, 'skip': 20}), self.create_result(q_parms={'limit':100, 'skip': 100}, qr_parms={'limit':2, 'skip': 20})] expected = [{'_id': 'julia020', 'name': 'julia', 'age': 20}, {'_id': 'julia021', 'name': 'julia', 'age': 21}] for result in results: self.assertEqual(result[:2], expected) self.assertEqual(result[:4], expected) def test_iteration_with_invalid_options(self): """ Test that iteration raises an exception when "limit" is used as option for the result. """ result = self.create_result(q_parms={'limit': 10}) with self.assertRaises(ResultException) as cm: invalid_result = [row for row in result] self.assertEqual(cm.exception.status_code, 103) def test_iteration_invalid_page_size(self): """ Test that iteration raises an exception when and invalid "page_size" is is used as an option for the result. """ result = self.create_result(qr_parms={'page_size': -1}) with self.assertRaises(ResultException) as cm: invalid_result = [row for row in result] self.assertEqual(cm.exception.status_code, 104) result = self.create_result(qr_parms={'page_size': 'foo'}) with self.assertRaises(ResultException) as cm: invalid_result = [row for row in result] self.assertEqual(cm.exception.status_code, 104) def test_iteration_using_valid_page_size(self): """ Test that iteration works as expected when "page_size" is provided as an option for the result. """ result = self.create_result({'_id': {'$lte': 'julia004'}}, qr_parms={'page_size': 3}) expected = [{'_id': 'julia000', 'name': 'julia', 'age': 0}, {'_id': 'julia001', 'name': 'julia', 'age': 1}, {'_id': 'julia002', 'name': 'julia', 'age': 2}, {'_id': 'julia003', 'name': 'julia', 'age': 3}, {'_id': 'julia004', 'name': 'julia', 'age': 4}] self.assertEqual([x for x in result], expected) result = self.create_result({'_id': {'$lte': 'julia002'}}, qr_parms={'page_size': 3}) expected = [{'_id': 'julia000', 'name': 'julia', 'age': 0}, {'_id': 'julia001', 'name': 'julia', 'age': 1}, {'_id': 'julia002', 'name': 'julia', 'age': 2}] self.assertEqual([x for x in result], expected) result = self.create_result({'_id': {'$lte': 'julia001'}}, qr_parms={'page_size': 3}) expected = [{'_id': 'julia000', 'name': 'julia', 'age': 0}, {'_id': 'julia001', 'name': 'julia', 'age': 1}] self.assertEqual([x for x in result], expected) def test_iteration_using_default_page_size(self): """ Test that iteration works as expected when "page_size" is not provided as an option for the result. """ result = self.create_result({'_id': {'$lte': 'julia004'}}) expected = [{'_id': 'julia000', 'name': 'julia', 'age': 0}, {'_id': 'julia001', 'name': 'julia', 'age': 1}, {'_id': 'julia002', 'name': 'julia', 'age': 2}, {'_id': 'julia003', 'name': 'julia', 'age': 3}, {'_id': 'julia004', 'name': 'julia', 'age': 4}] self.assertEqual([x for x in result], expected) def test_iteration_no_data(self): """ Test that iteration works as expected when no data matches the result. """ result = self.create_result({'_id': {'$gt': 'ruby'}}) self.assertEqual([x for x in result], []) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/query_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Unit tests for the Query class tested against Cloudant only. See configuration options for environment variables in unit_t_db_base module docstring. """ import os import unittest from cloudant.error import CloudantArgumentError from cloudant.query import Query from cloudant.result import QueryResult from nose.plugins.attrib import attr from .unit_t_db_base import UnitTestDbBase @attr(db=['cloudant','couch']) @attr(couchapi=2) class QueryTests(UnitTestDbBase): """ Query unit tests """ def setUp(self): """ Set up test attributes """ super(QueryTests, self).setUp() self.db_set_up() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(QueryTests, self).tearDown() def test_constructor_with_kwargs(self): """ Test instantiating a Query by passing in query parameters """ query = Query(self.db, foo={'bar': 'baz'}) self.assertIsInstance(query, Query) self.assertIsInstance(query.result, QueryResult) self.assertEqual(query, {'foo': {'bar': 'baz'}}) def test_constructor_without_kwargs(self): """ Test instantiating a Query without parameters """ query = Query(self.db) self.assertIsInstance(query, Query) self.assertIsInstance(query.result, QueryResult) self.assertEqual(query, {}) def test_retrieve_query_url(self): """ Test constructing the query test url """ query = Query(self.db) self.assertEqual( query.url, '/'.join((self.db.database_url, '_find')) ) def test_callable_with_invalid_argument(self): """ Test Query __call__ by passing in invalid arguments """ query = Query(self.db) try: query(foo={'bar': 'baz'}) self.fail('Above statement should raise an Exception') except CloudantArgumentError as err: self.assertEqual(str(err), 'Invalid argument: foo') def test_callable_with_invalid_value_types(self): """ Test Query __call__ by passing in invalid selector """ test_data = [ {'selector': 'blah'}, # Should be a dict {'limit': 'blah'}, # Should be an int {'skip': 'blah'}, # Should be an int {'sort': 'blah'}, # Should be a list {'fields': 'blah'}, # Should be a list {'r': 'blah'}, # Should be an int {'bookmark': 1}, # Should be a basestring {'use_index': 1} # Should be a basestring ] for argument in test_data: query = Query(self.db) try: query(**argument) self.fail('Above statement should raise an Exception') except CloudantArgumentError as err: self.assertTrue(str(err).startswith( 'Argument {0} is not an instance of expected type:'.format( list(argument.keys())[0] ) )) def test_callable_without_selector(self): """ Test Query __call__ without providing a selector """ query = Query(self.db) try: query(fields=['_id', '_rev']) self.fail('Above statement should raise an Exception') except CloudantArgumentError as err: self.assertEqual( str(err), 'No selector in the query or the selector was empty. ' 'Add a selector to define the query and retry.' ) def test_callable_with_empty_selector(self): """ Test Query __call__ without providing a selector """ query = Query(self.db) try: query(selector={}, fields=['_id', '_rev']) self.fail('Above statement should raise an Exception') except CloudantArgumentError as err: self.assertEqual( str(err), 'No selector in the query or the selector was empty. ' 'Add a selector to define the query and retry.' ) def test_callable_executes_query(self): """ Test Query __call__ executes a query """ self.populate_db_with_documents(100) query = Query(self.db) resp = query( selector={'_id': {'$lt': 'julia050'}}, fields=['_id'], sort=[{'_id': 'desc'}], skip=10, limit=3, r=1 ) self.assertEqual( resp['docs'], [{'_id': 'julia039'}, {'_id': 'julia038'}, {'_id': 'julia037'}] ) def test_custom_result_context_manager(self): """ Test that custom_result yields a context manager and returns expected content """ self.populate_db_with_documents(100) query = Query( self.db, selector={'_id': {'$lt': 'julia050'}}, fields=['_id'], r=1 ) with query.custom_result(sort=[{'_id': 'desc'}]) as rslt: self.assertEqual( rslt[10:13], [{'_id': 'julia039'}, {'_id': 'julia038'}, {'_id': 'julia037'}] ) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/replicator_mock_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _replicator_mock_tests_ replicator module - Mock unit tests for the Replicator class """ import mock import unittest from cloudant.database import CouchDatabase from cloudant.replicator import Replicator from tests.unit.iam_auth_tests import MOCK_API_KEY class ReplicatorDocumentValidationMockTests(unittest.TestCase): """ Replicator document validation tests """ def setUp(self): self.repl_id = 'rep_test' self.server_url = 'http://localhost:5984' self.user_ctx = { 'name': 'foo', 'roles': ['erlanger', 'researcher'] } self.source_db = 'source_db' self.target_db = 'target_db' def setUpClientMocks(self, admin_party=False, iam_api_key=None): m_client = mock.MagicMock() type(m_client).server_url = mock.PropertyMock( return_value=self.server_url) type(m_client).admin_party = mock.PropertyMock( return_value=admin_party) iam_authenticated = False if iam_api_key is not None: iam_authenticated = True m_session = mock.MagicMock() type(m_session).get_api_key = mock.PropertyMock( return_value=iam_api_key) type(m_client).r_session = mock.PropertyMock( return_value=m_session) type(m_client).is_iam_authenticated = mock.PropertyMock( return_value=iam_authenticated) return m_client def test_using_admin_party_source_and_target(self): m_admin_party_client = self.setUpClientMocks(admin_party=True) m_replicator = mock.MagicMock() type(m_replicator).creds = mock.PropertyMock(return_value=None) m_admin_party_client.__getitem__.return_value = m_replicator # create source/target databases src = CouchDatabase(m_admin_party_client, self.source_db) tgt = CouchDatabase(m_admin_party_client, self.target_db) # trigger replication rep = Replicator(m_admin_party_client) rep.create_replication(src, tgt, repl_id=self.repl_id) kcall = m_replicator.create_document.call_args_list self.assertEqual(len(kcall), 1) args, kwargs = kcall[0] self.assertEqual(len(args), 1) expected_doc = { '_id': self.repl_id, 'source': {'url': '/'.join((self.server_url, self.source_db))}, 'target': {'url': '/'.join((self.server_url, self.target_db))} } self.assertDictEqual(args[0], expected_doc) self.assertTrue(kwargs['throw_on_exists']) def test_using_basic_auth_source_and_target(self): test_basic_auth_header = 'abc' m_basic_auth_client = self.setUpClientMocks() m_replicator = mock.MagicMock() m_basic_auth_client.__getitem__.return_value = m_replicator m_basic_auth_client.basic_auth_str.return_value = test_basic_auth_header # create source/target databases src = CouchDatabase(m_basic_auth_client, self.source_db) tgt = CouchDatabase(m_basic_auth_client, self.target_db) # trigger replication rep = Replicator(m_basic_auth_client) rep.create_replication( src, tgt, repl_id=self.repl_id, user_ctx=self.user_ctx) kcall = m_replicator.create_document.call_args_list self.assertEqual(len(kcall), 1) args, kwargs = kcall[0] self.assertEqual(len(args), 1) expected_doc = { '_id': self.repl_id, 'user_ctx': self.user_ctx, 'source': { 'headers': {'Authorization': test_basic_auth_header}, 'url': '/'.join((self.server_url, self.source_db)) }, 'target': { 'headers': {'Authorization': test_basic_auth_header}, 'url': '/'.join((self.server_url, self.target_db)) } } self.assertDictEqual(args[0], expected_doc) self.assertTrue(kwargs['throw_on_exists']) def test_using_iam_auth_source_and_target(self): m_iam_auth_client = self.setUpClientMocks(iam_api_key=MOCK_API_KEY) m_replicator = mock.MagicMock() m_iam_auth_client.__getitem__.return_value = m_replicator # create source/target databases src = CouchDatabase(m_iam_auth_client, self.source_db) tgt = CouchDatabase(m_iam_auth_client, self.target_db) # trigger replication rep = Replicator(m_iam_auth_client) rep.create_replication( src, tgt, repl_id=self.repl_id, user_ctx=self.user_ctx) kcall = m_replicator.create_document.call_args_list self.assertEqual(len(kcall), 1) args, kwargs = kcall[0] self.assertEqual(len(args), 1) expected_doc = { '_id': self.repl_id, 'user_ctx': self.user_ctx, 'source': { 'auth': {'iam': {'api_key': MOCK_API_KEY}}, 'url': '/'.join((self.server_url, self.source_db)) }, 'target': { 'auth': {'iam': {'api_key': MOCK_API_KEY}}, 'url': '/'.join((self.server_url, self.target_db)) } } self.assertDictEqual(args[0], expected_doc) self.assertTrue(kwargs['throw_on_exists']) ================================================ FILE: tests/unit/replicator_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2020 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _replicator_tests_ replicator module - Unit tests for the Replicator class See configuration options for environment variables in unit_t_db_base module docstring. """ import time import unittest import uuid import requests from cloudant.document import Document from cloudant.error import CloudantReplicatorException, CloudantClientException from cloudant.replicator import Replicator from flaky import flaky from nose.plugins.attrib import attr from requests import ConnectionError from .unit_t_db_base import UnitTestDbBase from .. import unicode_ class CloudantReplicatorExceptionTests(unittest.TestCase): """ Ensure CloudantReplicatorException functions as expected. """ def test_raise_without_code(self): """ Ensure that a default exception/code is used if none is provided. """ with self.assertRaises(CloudantReplicatorException) as cm: raise CloudantReplicatorException() self.assertEqual(cm.exception.status_code, 100) def test_raise_using_invalid_code(self): """ Ensure that a default exception/code is used if invalid code is provided. """ with self.assertRaises(CloudantReplicatorException) as cm: raise CloudantReplicatorException('foo') self.assertEqual(cm.exception.status_code, 100) def test_raise_without_args(self): """ Ensure that a default exception/code is used if the message requested by the code provided requires an argument list and none is provided. """ with self.assertRaises(CloudantReplicatorException) as cm: raise CloudantReplicatorException(404) self.assertEqual(cm.exception.status_code, 100) def test_raise_with_proper_code_and_args(self): """ Ensure that the requested exception is raised. """ with self.assertRaises(CloudantReplicatorException) as cm: raise CloudantReplicatorException(404, 'foo') self.assertEqual(cm.exception.status_code, 404) @attr(db=['cloudant','couch']) class ReplicatorTests(UnitTestDbBase): """ Replicator unit tests """ def setUp(self): """ Set up test attributes """ super(ReplicatorTests, self).setUp() self.db_set_up() self.test_target_dbname = self.dbname() self.target_db = self.client._DATABASE_CLASS( self.client, self.test_target_dbname ) self.target_db.create() self.replicator = Replicator(self.client) self.replication_ids = [] def tearDown(self): """ Reset test attributes """ self.target_db.delete() del self.test_target_dbname del self.target_db for rep_id in self.replication_ids: max_retry = 5 while True: try: self.replicator.stop_replication(rep_id) break except requests.HTTPError as ex: # Retry failed attempt to delete replication document. It's # likely in an error state and receiving constant updates # via the replicator. max_retry -= 1 if ex.response.status_code != 409 or max_retry == 0: raise del self.replicator self.db_tear_down() super(ReplicatorTests, self).tearDown() def test_constructor(self): """ Test constructing a Replicator """ self.assertIsInstance(self.replicator, Replicator) self.assertIsInstance( self.replicator.database, self.client._DATABASE_CLASS ) self.assertEqual(self.replicator.database, self.client['_replicator']) def test_constructor_failure(self): """ Test that constructing a Replicator will not work without a valid client. """ repl = None try: self.client.disconnect() repl = Replicator(self.client) self.fail('Above statement should raise a CloudantException') except CloudantClientException as err: self.assertEqual( str(err), 'Database _replicator does not exist. ' 'Verify that the client is valid and try again.' ) finally: self.assertIsNone(repl) self.client.connect() def test_replication_with_generated_id(self): clone = Replicator(self.client) repl_id = clone.create_replication( self.db, self.target_db ) self.replication_ids.append(repl_id['_id']) @flaky(max_runs=3) def test_create_replication(self): """ Test that the replication document gets created and that the replication is successful. """ self.populate_db_with_documents(3) repl_id = 'test-repl-{}'.format(unicode_(uuid.uuid4())) repl_doc = self.replicator.create_replication( self.db, self.target_db, repl_id ) self.replication_ids.append(repl_id) # Test that the replication document was created expected_keys = ['_id', '_rev', 'source', 'target', 'user_ctx'] # If Admin Party mode then user_ctx will not be in the key list if self.client.admin_party or self.client.is_iam_authenticated: expected_keys.pop() self.assertTrue(all(x in list(repl_doc.keys()) for x in expected_keys)) self.assertEqual(repl_doc['_id'], repl_id) self.assertTrue(repl_doc['_rev'].startswith('1-')) # Now that we know that the replication document was created, # check that the replication occurred. repl_doc = Document(self.replicator.database, repl_id) repl_doc.fetch() if repl_doc.get('_replication_state') not in ('completed', 'error'): changes = self.replicator.database.changes( feed='continuous', heartbeat=1000) beats = 0 for change in changes: if beats == 300: changes.stop() if not change: beats += 1 continue elif change.get('id') == repl_id: beats = 0 repl_doc = Document(self.replicator.database, repl_id) repl_doc.fetch() if repl_doc.get('_replication_state') in ('completed', 'error'): changes.stop() self.assertEqual(repl_doc.get('_replication_state'), 'completed') self.assertEqual(self.db.all_docs(), self.target_db.all_docs()) self.assertTrue( all(x in self.target_db.keys(True) for x in [ 'julia000', 'julia001', 'julia002' ]) ) def test_timeout_in_create_replication(self): """ Test that a read timeout exception is thrown when creating a replicator with a read timeout value of 5 s. """ # Setup client with a read timeout (but the standard connect timeout) # Note that this timeout applies to all connections from this client # setting it too short can cause intermittent failures when responses # are not quick enough. Setting it too long makes the test take longer. self.set_up_client(auto_connect=True, timeout=(30,5)) self.db = self.client[self.test_target_dbname] self.target_db = self.client[self.test_dbname] # Construct a replicator with the updated client self.replicator = Replicator(self.client) repl_id = 'test-repl-{}'.format(unicode_(uuid.uuid4())) repl_doc = self.replicator.create_replication( self.db, self.target_db, repl_id ) self.replication_ids.append(repl_id) # Test that the replication document was created expected_keys = ['_id', '_rev', 'source', 'target', 'user_ctx'] # If Admin Party mode then user_ctx will not be in the key list if self.client.admin_party or self.client.is_iam_authenticated: expected_keys.pop() self.assertTrue(all(x in list(repl_doc.keys()) for x in expected_keys)) self.assertEqual(repl_doc['_id'], repl_id) self.assertTrue(repl_doc['_rev'].startswith('1-')) # Now that we know that the replication document was created, # check that the replication timed out. repl_doc = Document(self.replicator.database, repl_id) repl_doc.fetch() if repl_doc.get('_replication_state') not in ('completed', 'error'): # assert that a connection error is thrown because the read timed out with self.assertRaises(ConnectionError) as cm: changes = self.replicator.database.changes( feed='continuous') for change in changes: continue self.assertTrue(str(cm.exception).endswith('Read timed out.')) def test_create_replication_without_a_source(self): """ Test that the replication document is not created and fails as expected when no source database is provided. """ try: repl_doc = self.replicator.create_replication() self.fail('Above statement should raise a CloudantException') except CloudantReplicatorException as err: self.assertEqual( str(err), 'You must specify either a source_db Database ' 'object or a manually composed \'source\' string/dict.' ) def test_create_replication_without_a_target(self): """ Test that the replication document is not created and fails as expected when no target database is provided. """ try: repl_doc = self.replicator.create_replication(self.db) self.fail('Above statement should raise a CloudantException') except CloudantReplicatorException as err: self.assertEqual( str(err), 'You must specify either a target_db Database ' 'object or a manually composed \'target\' string/dict.' ) def test_list_replications(self): """ Test that a list of Document wrapped objects are returned. """ self.populate_db_with_documents(3) repl_ids = ['test-repl-{}'.format( unicode_(uuid.uuid4()) ) for _ in range(3)] repl_docs = [self.replicator.create_replication( self.db, self.target_db, repl_id ) for repl_id in repl_ids] self.replication_ids.extend(repl_ids) replications = self.replicator.list_replications() all_repl_ids = [doc['_id'] for doc in replications] match = [repl_id for repl_id in all_repl_ids if repl_id in repl_ids] self.assertEqual(set(repl_ids), set(match)) def test_retrieve_replication_state(self): """ Test that the replication state can be retrieved for a replication """ self.populate_db_with_documents(3) repl_id = "test-repl-{}".format(unicode_(uuid.uuid4())) repl_doc = self.replicator.create_replication( self.db, self.target_db, repl_id ) self.replication_ids.append(repl_id) repl_state = None # note triggered is for versions prior to 2.1 valid_states = ['completed', 'error', 'initializing', 'triggered', 'pending', 'running', 'failed', 'crashing', None] finished = False # Wait for 5 minutes or a terminal replication state for _ in range(300): repl_state = self.replicator.replication_state(repl_id) self.assertTrue(repl_state in valid_states) if repl_state in ('error', 'failed', 'completed'): finished = True break time.sleep(1) self.assertTrue(finished) def test_retrieve_replication_state_using_invalid_id(self): """ Test that replication_state(...) raises an exception as expected when an invalid replication id is provided. """ repl_id = 'fake-repl-id-{}'.format(unicode_(uuid.uuid4())) repl_state = None try: self.replicator.replication_state(repl_id) self.fail('Above statement should raise a CloudantException') except CloudantReplicatorException as err: self.assertEqual( str(err), 'Replication with id {} not found.'.format(repl_id) ) self.assertIsNone(repl_state) def test_stop_replication(self): """ Test that a replication can be stopped. """ self.populate_db_with_documents(3) repl_id = "test-repl-{}".format(unicode_(uuid.uuid4())) repl_doc = self.replicator.create_replication( self.db, self.target_db, repl_id ) max_retry = 3 while True: try: max_retry -= 1 self.replicator.stop_replication(repl_id) break except requests.HTTPError as err: self.assertEqual(err.response.status_code, 409) if max_retry == 0: self.fail('Failed to stop replication: {0}'.format(err)) try: # The .fetch() will fail since the replication has been stopped # and the replication document has been removed from the db. repl_doc.fetch() self.fail('Above statement should raise a CloudantException') except requests.HTTPError as err: self.assertEqual(err.response.status_code, 404) def test_stop_replication_using_invalid_id(self): """ Test that stop_replication(...) raises an exception as expected when an invalid replication id is provided. """ repl_id = 'fake-repl-id-{}'.format(unicode_(uuid.uuid4())) try: self.replicator.stop_replication(repl_id) self.fail('Above statement should raise a CloudantException') except CloudantReplicatorException as err: self.assertEqual( str(err), 'Replication with id {} not found.'.format(repl_id) ) def test_follow_replication(self): """ Test that follow_replication(...) properly iterates updated replication documents while the replication is executing. """ self.populate_db_with_documents(3) repl_id = "test-repl-{}".format(unicode_(uuid.uuid4())) repl_doc = self.replicator.create_replication( self.db, self.target_db, repl_id ) self.replication_ids.append(repl_id) # note triggered is for versions prior to 2.1 valid_states = ['completed', 'error', 'initializing', 'triggered', 'pending', 'running', 'failed', 'crashing', None] repl_states = [] if 'scheduler' in self.client.features(): state_key = 'state' else: state_key = '_replication_state' for doc in self.replicator.follow_replication(repl_id): self.assertIn(doc.get(state_key), valid_states) repl_states.append(doc.get(state_key)) self.assertTrue(len(repl_states) > 0) self.assertEqual(repl_states[-1], 'completed') self.assertNotIn('error', repl_states) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/result_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2016, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import mock """ result module - Unit tests for Result class """ import unittest from cloudant.error import ResultException from cloudant.result import Result, ResultByKey from cloudant.view import View from nose.plugins.attrib import attr from requests.exceptions import HTTPError from .unit_t_db_base import UnitTestDbBase class ResultExceptionTests(unittest.TestCase): """ Ensure ResultException functions as expected. """ def test_raise_without_code(self): """ Ensure that a default exception/code is used if none is provided. """ with self.assertRaises(ResultException) as cm: raise ResultException() self.assertEqual(cm.exception.status_code, 100) def test_raise_using_invalid_code(self): """ Ensure that a default exception/code is used if invalid code is provided. """ with self.assertRaises(ResultException) as cm: raise ResultException('foo') self.assertEqual(cm.exception.status_code, 100) def test_raise_without_args(self): """ Ensure that a default exception/code is used if the message requested by the code provided requires an argument list and none is provided. """ with self.assertRaises(ResultException) as cm: raise ResultException(101) self.assertEqual(cm.exception.status_code, 100) def test_raise_without_insufficient_args(self): """ Ensure that a default exception/code is used if the message requested by the code provided requires an argument list but the one provided does not contain the correct amount of arguments. """ with self.assertRaises(ResultException) as cm: raise ResultException(102, 'foo') self.assertEqual(cm.exception.status_code, 100) def test_raise_with_proper_code_and_args(self): """ Ensure that the requested exception is raised. """ with self.assertRaises(ResultException) as cm: raise ResultException(102, 'foo', 'bar') self.assertEqual(cm.exception.status_code, 102) @attr(db=['cloudant','couch']) class ResultTests(UnitTestDbBase): """ Result unit tests """ def setUp(self): """ Set up test attributes """ super(ResultTests, self).setUp() self.db_set_up() self.populate_db_with_documents() self.create_views() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(ResultTests, self).tearDown() def test_constructor(self): """ Test instantiating a Result """ result = Result( self.ddoc.get_view('view001'), startkey='1', endkey='9', page_size=1000 ) self.assertIsInstance(result, Result) self.assertDictEqual(result.options, {'startkey': '1', 'endkey': '9'}) def test_get_item_by_index(self): """ Test retrieving a result using a value that refers to an index of the result. """ result = Result(self.view001) expected = [{'key': 'julia000', 'id': 'julia000', 'value': 1}] self.assertEqual(result[0], expected) expected = [{'key': 'julia010', 'id': 'julia010', 'value': 1}] self.assertEqual(result[10], expected) expected = [{'key': 'julia099', 'id': 'julia099', 'value': 1}] self.assertEqual(result[99], expected) self.assertEqual(result[100], []) self.assertEqual(result[110], []) def test_get_item_by_index_using_skip_limit(self): """ Test retrieving a result using a value that refers to an index of the result when the result uses skip and limit. """ result = Result(self.view001, skip=10, limit=10) expected = [{'key': 'julia010', 'id': 'julia010', 'value': 1}] self.assertEqual(result[0], expected) expected = [{'key': 'julia015', 'id': 'julia015', 'value': 1}] self.assertEqual(result[5], expected) expected = [{'key': 'julia019', 'id': 'julia019', 'value': 1}] self.assertEqual(result[9], expected) self.assertEqual(result[10], []) self.assertEqual(result[20], []) def test_get_item_by_index_using_limit(self): """ Test retrieving a result using a value that refers to an index of the result when the result uses limit. """ result = Result(self.view001, limit=10) expected = [{'key': 'julia000', 'id': 'julia000', 'value': 1}] self.assertEqual(result[0], expected) expected = [{'key': 'julia005', 'id': 'julia005', 'value': 1}] self.assertEqual(result[5], expected) expected = [{'key': 'julia009', 'id': 'julia009', 'value': 1}] self.assertEqual(result[9], expected) self.assertEqual(result[10], []) self.assertEqual(result[20], []) def test_get_item_by_index_using_skip(self): """ Test retrieving a result using a value that refers to an index of the result when the result uses limit. """ result = Result(self.view001, skip=10) expected = [{'key': 'julia010', 'id': 'julia010', 'value': 1}] self.assertEqual(result[0], expected) expected = [{'key': 'julia015', 'id': 'julia015', 'value': 1}] self.assertEqual(result[5], expected) expected = [{'key': 'julia099', 'id': 'julia099', 'value': 1}] self.assertEqual(result[89], expected) self.assertEqual(result[90], []) self.assertEqual(result[100], []) def test_get_item_by_negative_index(self): """ Test retrieving a result raises an exception when using a negative index. """ result = Result(self.view001) with self.assertRaises(ResultException) as cm: invalid_result = result[-1] self.assertEqual(cm.exception.status_code, 101) def test_get_item_by_key_using_invalid_options(self): """ Since the __getitem__ method uses the 'key' parameter to retrieve the specified data using a Result, any Result that uses any of 'key', 'keys', 'startkey' or 'endkey' as arguments would yield unexpected results. For this reason a check was added to ensure that these options are not used in this case. This test verifies that check. """ options = ('key', 'keys', 'startkey', 'endkey') for option in options: result = Result(self.view001, **{option: 'julia010'}) with self.assertRaises(ResultException) as cm: invalid_result = result['julia000'] self.assertEqual(cm.exception.status_code, 102) def test_get_item_by_key(self): """ Test retrieving a result using value that refers to a key of the result. """ result = Result(self.view001) expected = [{'key': 'julia010', 'id': 'julia010', 'value': 1}] self.assertEqual(result['julia010'], expected) self.assertEqual(result[ResultByKey('julia010')], expected) def test_get_item_by_missing_key(self): """ Test retrieving a result using value that refers to a key that does not exist in the result. """ result = Result(self.view001) self.assertEqual(result['ruby010'], []) self.assertEqual(result[ResultByKey('ruby010')], []) def test_get_item_by_complex_key(self): """ Test retrieving a result using value that refers to a complex key of the result. """ result = Result(self.view005) expected = [{'key': ['julia', 10], 'id': 'julia010', 'value': 1}] self.assertEqual(result[['julia', 10]], expected) self.assertEqual(result[ResultByKey(['julia', 10])], expected) def test_get_item_by_integer_key(self): """ Test retrieving a result using an integer value that refers to a key of the result. """ result = Result(self.view003) expected = [{'key': 10, 'id': 'julia020', 'value': 1}, {'key': 10, 'id': 'julia021', 'value': 1}] self.assertEqual(result[ResultByKey(10)], expected) def test_get_item_by_missing_integer_key(self): """ Test retrieving a result using an integer value that refers to a key that does not exist in the result. """ result = Result(self.view003) self.assertEqual(result[ResultByKey(99)], []) def test_get_item_slice_no_start_no_stop(self): """ Test that by not providing a start and a stop slice value, the entire result is returned. """ result = Result(self.view001, limit=3) expected = [{'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}] self.assertEqual(result[:], expected) def test_get_all_items(self): """ Test that all results can be retrieved. """ result = Result(self.view001, limit=3) expected = [{'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}] self.assertEqual(result.all(), expected) def test_get_item_invalid_index_slice(self): """ Test that when invalid start and stop values are provided in a slice an exception is raised. """ result = Result(self.view001) with self.assertRaises(ResultException) as cm: invalid_result = result[-1: 10] self.assertEqual(cm.exception.status_code, 101) with self.assertRaises(ResultException) as cm: invalid_result = result[1: -10] self.assertEqual(cm.exception.status_code, 101) with self.assertRaises(ResultException) as cm: invalid_result = result[-1: -10] self.assertEqual(cm.exception.status_code, 101) with self.assertRaises(ResultException) as cm: invalid_result = result[2: 2] self.assertEqual(cm.exception.status_code, 101) with self.assertRaises(ResultException) as cm: invalid_result = result[5: 2] self.assertEqual(cm.exception.status_code, 101) def test_get_item_index_slice_using_start_stop(self): """ Test getting an index slice by using start and stop slice values. """ result = Result(self.view001) expected = [{'key': 'julia098', 'id': 'julia098', 'value': 1}, {'key': 'julia099', 'id': 'julia099', 'value': 1}] self.assertEqual(result[98:100], expected) self.assertEqual(result[98:102], expected) self.assertEqual(result[100:102], []) result = Result(self.view001, limit=20) expected = [{'key': 'julia018', 'id': 'julia018', 'value': 1}, {'key': 'julia019', 'id': 'julia019', 'value': 1}] self.assertEqual(result[18:20], expected) self.assertEqual(result[18:22], expected) self.assertEqual(result[20:22], []) result = Result(self.view001, skip=98) expected = [{'key': 'julia098', 'id': 'julia098', 'value': 1}, {'key': 'julia099', 'id': 'julia099', 'value': 1}] self.assertEqual(result[0:2], expected) self.assertEqual(result[0:4], expected) self.assertEqual(result[2:4], []) result = Result(self.view001, limit=20, skip=20) expected = [{'key': 'julia038', 'id': 'julia038', 'value': 1}, {'key': 'julia039', 'id': 'julia039', 'value': 1}] self.assertEqual(result[18:20], expected) self.assertEqual(result[18:22], expected) self.assertEqual(result[20:22], []) def test_get_item_index_slice_using_start_only(self): """ Test getting an index slice by using start slice value only. """ result = Result(self.view001) expected = [{'key': 'julia098', 'id': 'julia098', 'value': 1}, {'key': 'julia099', 'id': 'julia099', 'value': 1}] self.assertEqual(result[98:], expected) self.assertEqual(result[100:], []) result = Result(self.view001, limit=20) expected = [{'key': 'julia018', 'id': 'julia018', 'value': 1}, {'key': 'julia019', 'id': 'julia019', 'value': 1}] self.assertEqual(result[18:], expected) self.assertEqual(result[20:], []) result = Result(self.view001, skip=98) expected = [{'key': 'julia098', 'id': 'julia098', 'value': 1}, {'key': 'julia099', 'id': 'julia099', 'value': 1}] self.assertEqual(result[0:], expected) self.assertEqual(result[2:], []) result = Result(self.view001, limit=20, skip=20) expected = [{'key': 'julia038', 'id': 'julia038', 'value': 1}, {'key': 'julia039', 'id': 'julia039', 'value': 1}] self.assertEqual(result[18:], expected) self.assertEqual(result[20:], []) def test_get_item_index_slice_using_stop_only(self): """ Test getting an index slice by using stop slice value only. """ result = Result(self.view001) expected = [{'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}] self.assertEqual(result[:2], expected) expected = [{'key': 'julia{0:03d}'.format(x), 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(100)] self.assertEqual(result[:102], expected) result = Result(self.view001, limit=20) expected = [{'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}] self.assertEqual(result[:2], expected) expected = [{'key': 'julia{0:03d}'.format(x), 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(20)] self.assertEqual(result[:22], expected) result = Result(self.view001, skip=98) expected = [{'key': 'julia098', 'id': 'julia098', 'value': 1}, {'key': 'julia099', 'id': 'julia099', 'value': 1}] self.assertEqual(result[:2], expected) self.assertEqual(result[:4], expected) result = Result(self.view001, limit=2, skip=20) expected = [{'key': 'julia020', 'id': 'julia020', 'value': 1}, {'key': 'julia021', 'id': 'julia021', 'value': 1}] self.assertEqual(result[:2], expected) self.assertEqual(result[:4], expected) def test_get_item_key_slice_using_invalid_options(self): """ Test that when "key" and/or "keys" are used in the result an exception is raised. """ result = Result(self.view001, key='foo') with self.assertRaises(ResultException) as cm: invalid_result = result['foo':] self.assertEqual(cm.exception.status_code, 102) result = Result(self.view001, keys=['foo', 'bar']) with self.assertRaises(ResultException) as cm: invalid_result = result['foo':] self.assertEqual(cm.exception.status_code, 102) result = Result(self.view001, startkey='foo') with self.assertRaises(ResultException) as cm: invalid_result = result['foo':] self.assertEqual(cm.exception.status_code, 102) result = Result(self.view001, endkey='foo') with self.assertRaises(ResultException) as cm: invalid_result = result['foo':] self.assertEqual(cm.exception.status_code, 102) def test_get_item_invalid_key_slice(self): """ Test that when invalid start and stop values are provided in a slice an exception is raised. Specifically this happens when the slice start and stop are different types. """ result = Result(self.view001) with self.assertRaises(ResultException) as cm: invalid_result = result['foo': ['bar', 'baz']] self.assertEqual(cm.exception.status_code, 101) ten = ResultByKey(10) with self.assertRaises(ResultException) as cm: invalid_result = result['foo': ten] self.assertEqual(cm.exception.status_code, 101) def test_get_item_key_slice_using_start_stop(self): """ Test getting a key slice by using start and stop slice values. """ result = Result(self.view001) expected = [{'key': 'julia097', 'id': 'julia097', 'value': 1}, {'key': 'julia098', 'id': 'julia098', 'value': 1}, {'key': 'julia099', 'id': 'julia099', 'value': 1}] self.assertEqual(result['julia097': 'julia099'], expected) self.assertEqual( result[ResultByKey('julia097'): ResultByKey('julia099')], expected ) self.assertEqual(result['julia097': 'ruby'], expected) self.assertEqual( result['julia098': 'julia098'], [{'key': 'julia098', 'id': 'julia098', 'value': 1}] ) self.assertEqual(result['bar': 'foo'], []) result = Result(self.view003) expected = [{'key': 47, 'id': 'julia094', 'value': 1}, {'key': 47, 'id': 'julia095', 'value': 1}, {'key': 48, 'id': 'julia096', 'value': 1}, {'key': 48, 'id': 'julia097', 'value': 1}, {'key': 49, 'id': 'julia098', 'value': 1}, {'key': 49, 'id': 'julia099', 'value': 1}] self.assertEqual(result[ResultByKey(47): ResultByKey(49)], expected) self.assertEqual(result[ResultByKey(47): ResultByKey(52)], expected) self.assertEqual( result[ResultByKey(48): ResultByKey(48)], [{'key': 48, 'id': 'julia096', 'value': 1}, {'key': 48, 'id': 'julia097', 'value': 1}] ) self.assertEqual(result[ResultByKey(52): ResultByKey(54)], []) result = Result(self.view005) expected = [{'key': ['julia', 97], 'id': 'julia097', 'value': 1}, {'key': ['julia', 98], 'id': 'julia098', 'value': 1}, {'key': ['julia', 99], 'id': 'julia099', 'value': 1}] self.assertEqual(result[['julia', 97]: ['julia', 99]], expected) self.assertEqual( result[ResultByKey(['julia', 97]): ResultByKey(['julia', 99])], expected ) self.assertEqual(result[['julia', 97]: ['ruby', 97]], expected) self.assertEqual( result[['julia', 98]: ['julia', 98]], [{'key': ['julia', 98], 'id': 'julia098', 'value': 1}] ) self.assertEqual(result[['ruby', 'bar']: ['ruby', 'foo']], []) def test_get_item_key_slice_start_greater_than_stop(self): """ Test getting a key slice by using start value greater than stop value. The behavior when using CouchDB and newer versions of Cloudant is to return an HTTP 400 Bad Request. """ result = Result(self.view001) with self.assertRaises(HTTPError) as cm: invalid_result = result['foo': 'bar'] self.assertTrue( str(cm.exception).startswith('400 Client Error: Bad Request')) def test_get_item_key_slice_using_start_only(self): """ Test getting a key slice by using the start slice value only. """ result = Result(self.view001) expected = [{'key': 'julia097', 'id': 'julia097', 'value': 1}, {'key': 'julia098', 'id': 'julia098', 'value': 1}, {'key': 'julia099', 'id': 'julia099', 'value': 1}] self.assertEqual(result['julia097':], expected) self.assertEqual(result[ResultByKey('julia097'):], expected) self.assertEqual(result['ruby':], []) result = Result(self.view003) expected = [{'key': 47, 'id': 'julia094', 'value': 1}, {'key': 47, 'id': 'julia095', 'value': 1}, {'key': 48, 'id': 'julia096', 'value': 1}, {'key': 48, 'id': 'julia097', 'value': 1}, {'key': 49, 'id': 'julia098', 'value': 1}, {'key': 49, 'id': 'julia099', 'value': 1}] self.assertEqual(result[ResultByKey(47):], expected) self.assertEqual(result[ResultByKey(52):], []) result = Result(self.view005) expected = [{'key': ['julia', 97], 'id': 'julia097', 'value': 1}, {'key': ['julia', 98], 'id': 'julia098', 'value': 1}, {'key': ['julia', 99], 'id': 'julia099', 'value': 1}] self.assertEqual(result[['julia', 97]:], expected) self.assertEqual(result[ResultByKey(['julia', 97]):], expected) self.assertEqual(result[ResultByKey(['ruby', 'foo']):], []) def test_get_item_key_slice_using_stop_only(self): """ Test getting a key slice by using the stop slice value only. """ result = Result(self.view001) expected = [{'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}] self.assertEqual(result[:'julia002'], expected) self.assertEqual(result[:ResultByKey('julia002')], expected) self.assertEqual( result[:'ruby'], [{'key': 'julia{0:03d}'.format(x), 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(100)] ) self.assertEqual(result[:'foo'], []) result = Result(self.view003) expected = [{'key': 0, 'id': 'julia000', 'value': 1}, {'key': 0, 'id': 'julia001', 'value': 1}, {'key': 1, 'id': 'julia002', 'value': 1}, {'key': 1, 'id': 'julia003', 'value': 1}, {'key': 2, 'id': 'julia004', 'value': 1}, {'key': 2, 'id': 'julia005', 'value': 1}] self.assertEqual(result[:ResultByKey(2)], expected) self.assertEqual( result[:ResultByKey(51)], [{'key': x // 2, 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(100)] ) self.assertEqual(result[:ResultByKey(-10)], []) result = Result(self.view005) expected = [{'key': ['julia', 0], 'id': 'julia000', 'value': 1}, {'key': ['julia', 1], 'id': 'julia001', 'value': 1}, {'key': ['julia', 2], 'id': 'julia002', 'value': 1}] self.assertEqual(result[:['julia', 2]], expected) self.assertEqual(result[:ResultByKey(['julia', 2])], expected) self.assertEqual( result[:['julia', 102]], [{'key': ['julia', x], 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(100)] ) self.assertEqual(result[:ResultByKey(['foo', 'bar'])], []) def test_iteration_with_invalid_options(self): """ Test that iteration raises an exception when "limit" is used as option for the result. """ result = Result(self.view001, limit=10) with self.assertRaises(ResultException) as cm: invalid_result = [row for row in result] self.assertEqual(cm.exception.status_code, 103) def test_iteration_invalid_page_size(self): """ Test that iteration raises an exception when and invalid "page_size" is is used as an option for the result. """ result = Result(self.view001, page_size=-1) with self.assertRaises(ResultException) as cm: invalid_result = [row for row in result] self.assertEqual(cm.exception.status_code, 104) result = Result(self.view001, page_size='foo') with self.assertRaises(ResultException) as cm: invalid_result = [row for row in result] self.assertEqual(cm.exception.status_code, 104) def test_iteration_using_valid_page_size(self): """ Test that iteration works as expected when "page_size" is provided as an option for the result. """ result = Result(self.view001, endkey='julia004', page_size=3) expected = [{'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}, {'key': 'julia003', 'id': 'julia003', 'value': 1}, {'key': 'julia004', 'id': 'julia004', 'value': 1}] self.assertEqual([x for x in result], expected) result = Result(self.view001, endkey='julia004', page_size='3') self.assertEqual([x for x in result], expected) result = Result(self.view001, endkey='julia002', page_size=3) expected = [{'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}] self.assertEqual([x for x in result], expected) result = Result(self.view001, endkey='julia001', page_size=3) expected = [{'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}] self.assertEqual([x for x in result], expected) def test_iteration_using_default_page_size(self): """ Test that iteration works as expected when "page_size" is not provided as an option for the result. """ result = Result(self.view001, endkey='julia004') expected = [{'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}, {'key': 'julia003', 'id': 'julia003', 'value': 1}, {'key': 'julia004', 'id': 'julia004', 'value': 1}] self.assertEqual([x for x in result], expected) def test_iteration_no_data(self): """ Test that iteration works as expected when no data matches the result. """ result = Result(self.view001, startkey='ruby') self.assertEqual([x for x in result], []) def test_iteration_integer_keys(self): """ Test that iteration works as expected when keys are integer. """ result = Result(self.view007, page_size=10) self.assertEqual(len([x for x in result]), 100) def test_iteration_pagination(self): """ Test that iteration pagination works as expected. """ class CallMock: expected_calls = [ {'limit': 28}, {'limit': 28, 'startkey': 1, 'startkey_docid': 'julia027'}, {'limit': 28, 'startkey': 1, 'startkey_docid': 'julia054'}, {'limit': 28, 'startkey': 1, 'startkey_docid': 'julia081'}, ] def __init__(self, outer): self.outer = outer self.expected_calls.reverse() def call(self, *args, **kwargs): self.outer.assertEqual(dict(kwargs), self.expected_calls.pop(), 'pagination error') return View.__call__(self.outer.view007, *args, **kwargs) with mock.patch.object(self, 'view007', CallMock(self).call) as _: result = Result(self.view007, page_size=27) expected = [ {'id': 'julia{0:03d}'.format(i), 'key': 1, 'value': 'julia'} for i in range(100) ] self.assertEqual([x for x in result], expected) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/scheduler_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Unit tests for the Scheduler class """ import unittest import requests import json import mock from cloudant.scheduler import Scheduler from .unit_t_db_base import UnitTestDbBase class SchedulerTests(UnitTestDbBase): def setUp(self): """ Set up test attributes """ super(SchedulerTests, self).setUp() self.db_set_up() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(SchedulerTests, self).tearDown() def test_scheduler_docs(self): """ Test scheduler docs """ # set up mock response using a real captured response m_response_ok = mock.MagicMock() type(m_response_ok).status_code = mock.PropertyMock(return_value=200) type(m_response_ok).text = mock.PropertyMock(return_value='{"total_rows":6,"offset":0,"docs":[\ {"database":"tomblench/_replicator",\ "doc_id":"296e48244e003eba8764b2156b3bf302",\ "id":null,\ "source":"https://tomblench.cloudant.com/animaldb/",\ "target":"https://tomblench.cloudant.com/animaldb_copy/",\ "state":"completed",\ "error_count":0,\ "info":{"revisions_checked":15,\ "missing_revisions_found":2,\ "docs_read":2,\ "docs_written":2,\ "changes_pending":null,\ "doc_write_failures":0,\ "checkpointed_source_seq":"19-g1AAAAGjeJyVz10KwjAMB_BoJ4KX8AZF2tWPJ3eVpqnO0XUg27PeTG9Wa_VhwmT6kkDIPz_iACArGcGS0DRnWxDmHE9HdJ3lxjUdad9yb1sXF6cacB9CqEqmZ3UczKUh2uGhHxeD8U9i_Z3AIla8vJVJUlBIZYTqX5A_KMM7SfFZrHCNLUK3p7RIkl5tSRD-K6kx6f6S0k8sScpYJTb5uFQ9AI9Ch9c"},\ "start_time":null,\ "last_updated":"2017-04-13T14:53:50+00:00"},\ {"database":"tomblench/_replicator",\ "doc_id":"3b749f320867d703550b0f758a4000ae",\ "id":null,\ "source":"https://examples.cloudant.com/animaldb/",\ "target":"https://tomblench.cloudant.com/animaldb/",\ "state":"completed",\ "error_count":0,\ "info":{"revisions_checked":15,\ "missing_revisions_found":15,\ "docs_read":15,\ "docs_written":15,\ "changes_pending":null,\ "doc_write_failures":0,\ "checkpointed_source_seq":"56-g1AAAAGveJzLYWBgYMlgTmFQSElKzi9KdUhJstDLTS3KLElMT9VLzskvTUnMK9HLSy3JAapkSmRIsv___39WBnMiby5QgN04JS3FLDUJWb8Jdv0gSxThigyN8diS5AAkk-qhFvFALEo2MTEwMSXGDDSbTPHYlMcCJBkagBTQsv0g28TBtpkbGCQapaF4C4cxJFt2AGIZ2GscYMuMDEzMUizMkC0zw25MFgBKoovi"},\ "start_time":null,\ "last_updated":"2017-04-27T12:28:44+00:00"},\ {"database":"tomblench/_replicator",\ "doc_id":"ad8f7896480b8081c8f0a2267ffd1859",\ "id":null,\ "source":"https://tortytherlediffecareette:*****@mikerhodestesty008.cloudant.com/moviesdb/",\ "target":"https://tomblench.cloudant.com/moviesdb_rep/",\ "state":"completed",\ "error_count":0,\ "info":{"revisions_checked":5997,\ "missing_revisions_found":5997,\ "docs_read":5997,\ "docs_written":5997,\ "changes_pending":null,\ "doc_write_failures":0,\ "checkpointed_source_seq":"5997-g1AAAANreJy10UEKwjAQAMBgBcVP2BeUpEm1PdmfaDYJSKkVtB486U_0J_oBTz5AHyAI3jxIjUml1x7ayy67LDssmyKE-nNHIleCWK5ULIF6uVrnW4xDT6TLjeRZ7mUqT_VkhyMYFkWRzB3Q1XOhez3iczKKghor6jvg6giTiroYiuNQYYqbpeIfNa2oh72KhQGosFlq9qN2FfUyFPgUCKOnullXR7TXSWuHkvsYjjEWjQVvgTta7lRyV_szKgmRbVx3ttzNcs7AcEoKCHAb3N1y_9-9DYeBYzEiNTYlX3EcE0s"},\ "start_time":null,\ "last_updated":"2016-08-23T13:11:26+00:00"},\ {"database":"tomblench/_replicator",\ "doc_id":"b63c053ecd95a4047b55ed8847b046f1",\ "id":null,\ "source":"https://tomblench.cloudant.com/atestdb2/",\ "target":"https://tomblench.cloudant.com/atestdb1/",\ "state":"completed",\ "error_count":0,\ "info":{"revisions_checked":1,\ "missing_revisions_found":1,\ "docs_read":1,\ "docs_written":1,\ "changes_pending":null,\ "doc_write_failures":0,\ "checkpointed_source_seq":"2-g1AAAAFHeJyNjkEOgjAQRSdAYjyFN2jSFCtdyVU6nSKQWhJC13ozvVktsoEF0c2fTPL_-98BQNHmBCdCM4y2JuQMuxu6YJlxQyDtJ-bt5JIx04DXGGOvYRsR-xGsk-JjTrW5hnv6Dg0XplRngmPwZJvOW9ry5D7PF0nhmU5CvmZm9mVKVVacLr8pfy9fmt5L02q9qEhJbtbr-w-AQmfD"},\ "start_time":null,\ "last_updated":"2017-05-16T16:25:22+00:00"},\ {"database":"tomblench/_replicator",\ "doc_id":"c71c9e69e30a182dc91d8938277bc85e",\ "id":null,\ "source":"https://tomblench.cloudant.com/animaldb/",\ "target":"https://tomblench.cloudant.com/animaldb_copy/",\ "state":"completed",\ "error_count":0,\ "info":{"revisions_checked":15,\ "missing_revisions_found":15,\ "docs_read":15,\ "docs_written":15,\ "changes_pending":null,\ "doc_write_failures":0,\ "checkpointed_source_seq":"14-g1AAAAEueJzLYWBgYMlgTmGQSUlKzi9KdUhJMtTLTU1M0UvOyS9NScwr0ctLLckBqmJKZEiy____f1YGUyJrLlCAPdHEPCktJZk43UkOQDKpHmoAI9gAw2STxCTzJOIMyGMBkgwNQApoxv6sDGaoK0yN04wsk80IGEGKHQcgdoAdygxxaIplklFaWhYAu2FdOA"},\ "start_time":null,\ "last_updated":"2015-05-12T11:47:33+00:00"},\ {"database":"tomblench/_replicator",\ "doc_id":"e6242d1e9ce059b0388fc75af3116a39",\ "id":null,\ "source":"https://tomblench.cloudant.com/atestdb1/",\ "target":"https://tomblench.cloudant.com/atestdb2/",\ "state":"completed",\ "error_count":0,\ "info":{"revisions_checked":1,\ "missing_revisions_found":1,\ "docs_read":1,\ "docs_written":1,\ "changes_pending":null,\ "doc_write_failures":0,\ "checkpointed_source_seq":"1-g1AAAAFheJyFzkEOgjAQBdBRSIyn8AZNgEJgJVeZ6bQCqSUhdK0305th1Q1dEDYzyWTy_rcAkHYJw4VJjZNumQpB_Y2s10LZ0TO6WTg92_B4RKDrsixDlyDcw-FUVUiFahjO3rE2vdMcY9k2Rm2Y9Ig8bWqspdz25Lbn0jDhGVYgX1_z8DMblnlp8n0lTir3kt7_pFV7NE2WYbluP3wATr5vQA"},\ "start_time":null,\ "last_updated":"2017-05-16T16:24:02+00:00"}]}') self.client.r_session.get = mock.Mock(return_value=m_response_ok) scheduler = Scheduler(self.client) response = scheduler.list_docs(skip=0, limit=10) # assert on request and response self.client.r_session.get.assert_called_with( self.url + '/_scheduler/docs', params={"skip":0, "limit":10}, ) self.assertEqual(response["total_rows"], 6) def test_scheduler_doc(self): """ Test scheduler doc """ # set up mock response using a real captured response m_response_ok = mock.MagicMock() type(m_response_ok).status_code = mock.PropertyMock(return_value=200) type(m_response_ok).text = mock.PropertyMock(return_value='{"database":"tomblench/_replicator",\ "doc_id":"296e48244e003eba8764b2156b3bf302",\ "id":null,\ "source":"https://tomblench.cloudant.com/animaldb/",\ "target":"https://tomblench.cloudant.com/animaldb_copy/",\ "state":"completed",\ "error_count":0,\ "info":{"revisions_checked":15,\ "missing_revisions_found":2,\ "docs_read":2,\ "docs_written":2,\ "changes_pending":null,\ "doc_write_failures":0,\ "checkpointed_source_seq":"19-g1AAAAGjeJyVz10KwjAMB_BoJ4KX8AZF2tWPJ3eVpqnO0XUg27PeTG9Wa_VhwmT6kkDIPz_iACArGcGS0DRnWxDmHE9HdJ3lxjUdad9yb1sXF6cacB9CqEqmZ3UczKUh2uGhHxeD8U9i_Z3AIla8vJVJUlBIZYTqX5A_KMM7SfFZrHCNLUK3p7RIkl5tSRD-K6kx6f6S0k8sScpYJTb5uFQ9AI9Ch9c"},\ "start_time":null,\ "last_updated":"2017-04-13T14:53:50+00:00"}') self.client.r_session.get = mock.Mock(return_value=m_response_ok) scheduler = Scheduler(self.client) response = scheduler.get_doc("296e48244e003eba8764b2156b3bf302") # assert on request and response self.client.r_session.get.assert_called_with( self.url + '/_scheduler/docs/_replicator/296e48244e003eba8764b2156b3bf302', ) self.assertEqual(response["doc_id"], "296e48244e003eba8764b2156b3bf302") def test_scheduler_jobs(self): """ Test scheduler jobs """ # set up mock response using a real captured response m_response_ok = mock.MagicMock() type(m_response_ok).status_code = mock.PropertyMock(return_value=200) type(m_response_ok).text = mock.PropertyMock(return_value='{"total_rows":1,"offset":0,\ "jobs":[{"database":null,\ "id":"f11105eaaded4981d21ff8ebf846f48b+create_target",\ "pid":"<0.5866.6800>",\ "source":"https://clientlibs-test:*****@clientlibs-test.cloudant.com/largedb1g/",\ "target":"https://tomblench:*****@tomblench.cloudant.com/largedb1g/",\ "user":"tomblench",\ "doc_id":null,\ "history":[{"timestamp":"2018-04-12T13:06:20Z",\ "type":"started"},\ {"timestamp":"2018-04-12T13:06:20Z",\ "type":"added"}],\ "node":"dbcore@db2.bigblue.cloudant.net",\ "start_time":"2018-04-12T13:06:20Z"}]}') self.client.r_session.get = mock.Mock(return_value=m_response_ok) scheduler = Scheduler(self.client) response = scheduler.list_jobs(skip=0, limit=10) # assert on request and response self.client.r_session.get.assert_called_with( self.url + '/_scheduler/jobs', params={"skip":0, "limit":10}, ) self.assertEqual(response["total_rows"], 1) ================================================ FILE: tests/unit/security_document_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2016, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ security_document module - Unit tests for the SecurityDocument class See configuration options for environment variables in unit_t_db_base module docstring. """ import json import unittest from cloudant.security_document import SecurityDocument from nose.plugins.attrib import attr from .unit_t_db_base import UnitTestDbBase @attr(db=['cloudant','couch']) class SecurityDocumentTests(UnitTestDbBase): """ SecurityDocument unit tests """ def setUp(self): """ Set up test attributes """ super(SecurityDocumentTests, self).setUp() self.db_set_up() self.load_security_document_data() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(SecurityDocumentTests, self).tearDown() def test_constructor(self): """ Test constructing a SecurityDocument """ sdoc = SecurityDocument(self.db) self.assertIsInstance(sdoc, SecurityDocument) self.assertEqual(sdoc.r_session, self.db.r_session) def test_document_url(self): """ Test that the document url is populated correctly """ sdoc = SecurityDocument(self.db) self.assertEqual( sdoc.document_url, '/'.join([self.db.database_url, '_security']) ) def test_json(self): """ Test the security document dictionary renders as a JSON string """ sdoc = SecurityDocument(self.db) sdoc.fetch() sdoc_as_json_string = sdoc.json() self.assertIsInstance(sdoc_as_json_string, str) sdoc_as_a_dict = json.loads(sdoc_as_json_string) self.assertDictEqual(sdoc_as_a_dict, sdoc) def test_fetch(self): """ Test that the security document is retrieved as expected """ sdoc = SecurityDocument(self.db) sdoc.fetch() self.assertDictEqual(sdoc, self.sdoc) def test_save(self): """ Test that the security document is updated correctly """ sdoc = SecurityDocument(self.db) sdoc.fetch() sdoc.update(self.mod_sdoc) sdoc.save() mod_sdoc = SecurityDocument(self.db) mod_sdoc.fetch() self.assertDictEqual(mod_sdoc, self.mod_sdoc) def test_context_manager(self): """ Test that the context SecurityDocument context manager enter and exit routines work as expected. """ with SecurityDocument(self.db) as sdoc: self.assertDictEqual(sdoc, self.sdoc) sdoc.update(self.mod_sdoc) mod_sdoc = SecurityDocument(self.db) mod_sdoc.fetch() self.assertDictEqual(mod_sdoc, self.mod_sdoc) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/unit_t_db_base.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2020 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _unit_t_db_base_ unit_t_db_base module - The base class for all unit tests that target a db The unit tests are set to execute by default against a CouchDB instance. To run the tests using Admin Party security mode in Couchdb, set the ADMIN_PARTY environment variable to true. example: export ADMIN_PARTY=true In order to run the unit tests against a Cloudant instance, set the RUN_CLOUDANT_TESTS environment variable to something. example: export RUN_CLOUDANT_TESTS=1 Other valid environment variables: CLOUDANT_ACCOUNT: Set this to the Cloudant account that you wish to connect to. - This is used for Cloudant tests only. example: export CLOUDANT_ACCOUNT=account DB_USER: Set this to the username to connect with. - Optional for CouchDB tests. If omitted and ADMIN_PARTY is not "true" then a user will be created before tests are executed in CouchDB. - Mandatory for Cloudant tests. example: export DB_USER=user DB_PASSWORD: Set this to the password for the user specified. example: export DB_PASSWORD=password DB_URL: Optionally set this to override the construction of the database URL. example: export DB_URL=https://account.cloudant.com SKIP_DB_UPDATES: Set this to something to bypass all Cloudant _db_updates tests. example: export SKIP_DB_UPDATES=1 """ import unittest import requests import os import uuid import json from cloudant.client import CouchDB, Cloudant from cloudant.design_document import DesignDocument from cloudant.error import CloudantClientException from .. import unicode_ def skip_if_not_cookie_auth(f): def wrapper(*args): if not args[0].use_cookie_auth: raise unittest.SkipTest('Test only supports cookie authentication') return f(*args) return wrapper def skip_if_iam(f): def wrapper(*args): if os.environ.get('IAM_API_KEY'): raise unittest.SkipTest('Test only supports non-IAM authentication') return f(*args) return wrapper class UnitTestDbBase(unittest.TestCase): """ The base class for all unit tests targeting a database """ @classmethod def setUpClass(cls): """ If targeting CouchDB, Set up a CouchDB instance otherwise do nothing. """ if os.environ.get('RUN_CLOUDANT_TESTS') is None: if os.environ.get('DB_URL') is None: os.environ['DB_URL'] = 'http://127.0.0.1:5984' if (os.environ.get('ADMIN_PARTY') and os.environ.get('ADMIN_PARTY') == 'true'): if os.environ.get('DB_USER'): del os.environ['DB_USER'] if os.environ.get('DB_PASSWORD'): del os.environ['DB_PASSWORD'] return if os.environ.get('DB_USER') is None: # Get couchdb docker node name if os.environ.get('COUCHDB_VERSION') == '2.3.1': os.environ['NODENAME'] = requests.get( '{0}/_membership'.format(os.environ['DB_URL'])).json()['all_nodes'][0] os.environ['DB_USER_CREATED'] = '1' os.environ['DB_USER'] = 'user-{0}'.format( unicode_(uuid.uuid4()) ) os.environ['DB_PASSWORD'] = 'password' if os.environ.get('COUCHDB_VERSION') == '2.3.1': resp = requests.put( '{0}/_node/{1}/_config/admins/{2}'.format( os.environ['DB_URL'], os.environ['NODENAME'], os.environ['DB_USER'] ), data='"{0}"'.format(os.environ['DB_PASSWORD']) ) else: resp = requests.put( '{0}/_config/admins/{1}'.format( os.environ['DB_URL'], os.environ['DB_USER'] ), data='"{0}"'.format(os.environ['DB_PASSWORD']) ) resp.raise_for_status() @classmethod def tearDownClass(cls): """ If necessary, clean up CouchDB instance once all tests are complete. """ if (os.environ.get('RUN_CLOUDANT_TESTS') is None and os.environ.get('DB_USER_CREATED') is not None): if os.environ.get('COUCHDB_VERSION') == '2.3.1': resp = requests.delete( '{0}://{1}:{2}@{3}/_node/{4}/_config/admins/{5}'.format( os.environ['DB_URL'].split('://', 1)[0], os.environ['DB_USER'], os.environ['DB_PASSWORD'], os.environ['DB_URL'].split('://', 1)[1], os.environ['NODENAME'], os.environ['DB_USER'] ) ) else: resp = requests.delete( '{0}://{1}:{2}@{3}/_config/admins/{4}'.format( os.environ['DB_URL'].split('://', 1)[0], os.environ['DB_USER'], os.environ['DB_PASSWORD'], os.environ['DB_URL'].split('://', 1)[1], os.environ['DB_USER'] ) ) del os.environ['DB_USER_CREATED'] del os.environ['DB_USER'] resp.raise_for_status() def setUp(self): """ Set up test attributes for unit tests targeting a database """ self.set_up_client() def set_up_client(self, auto_connect=False, auto_renew=False, encoder=None, timeout=(30,300)): self.user = os.environ.get('DB_USER', None) self.pwd = os.environ.get('DB_PASSWORD', None) self.use_cookie_auth = True self.iam_api_key = os.environ.get('IAM_API_KEY', None) if os.environ.get('RUN_CLOUDANT_TESTS') is None: self.url = os.environ['DB_URL'] admin_party = False if os.environ.get('ADMIN_PARTY') == 'true': admin_party = True self.use_cookie_auth = False # construct Cloudant client (using admin party mode) self.client = CouchDB( self.user, self.pwd, admin_party, url=self.url, connect=auto_connect, auto_renew=auto_renew, encoder=encoder, timeout=timeout ) else: self.account = os.environ.get('CLOUDANT_ACCOUNT') self.url = os.environ.get( 'DB_URL', 'https://{0}.cloudant.com'.format(self.account)) if os.environ.get('RUN_BASIC_AUTH_TESTS'): self.use_cookie_auth = False # construct Cloudant client (using basic access authentication) self.client = Cloudant( self.user, self.pwd, url=self.url, x_cloudant_user=self.account, connect=auto_connect, auto_renew=auto_renew, encoder=encoder, timeout=timeout, use_basic_auth=True, ) elif self.iam_api_key: self.use_cookie_auth = False # construct Cloudant client (using IAM authentication) self.client = Cloudant( None, # username is not required self.iam_api_key, url=self.url, x_cloudant_user=self.account, connect=auto_connect, auto_renew=auto_renew, encoder=encoder, timeout=timeout, use_iam=True, ) else: # construct Cloudant client (using cookie authentication) self.client = Cloudant( self.user, self.pwd, url=self.url, x_cloudant_user=self.account, connect=auto_connect, auto_renew=auto_renew, encoder=encoder, timeout=timeout ) def tearDown(self): """ Ensure the client is new for each test """ del self.client def db_set_up(self, partitioned=False): """ Set up test attributes for Database tests """ self.client.connect() self.test_dbname = self.dbname() self.db = self.client._DATABASE_CLASS( self.client, self.test_dbname, partitioned=partitioned) self.db.create() def db_tear_down(self): """ Reset test attributes for each test """ self.db.delete() self.client.disconnect() del self.test_dbname del self.db def dbname(self, database_name='db'): return '{0}-{1}-{2}'.format(database_name, self._testMethodName, unicode_(uuid.uuid4())) def populate_db_with_documents(self, doc_count=100, **kwargs): off_set = kwargs.get('off_set', 0) docs = [ {'_id': 'julia{0:03d}'.format(i), 'name': 'julia', 'age': i} for i in range(off_set, off_set + doc_count) ] return self.db.bulk_docs(docs) def populate_db_with_partitioned_documents(self, key_count, docs_per_partition): partition_keys = [uuid.uuid4().hex.upper()[:8] for _ in range(key_count)] for partition_key in partition_keys: docs = [] for i in range(docs_per_partition): docs.append({ '_id': '{0}:doc{1}'.format(partition_key, i), 'foo': 'bar' }) self.db.bulk_docs(docs) return partition_keys def create_views(self): """ Create a design document with views for use with tests. """ self.ddoc = DesignDocument(self.db, 'ddoc001') self.ddoc.add_view( 'view001', 'function (doc) {\n emit(doc._id, 1);\n}' ) self.ddoc.add_view( 'view002', 'function (doc) {\n emit(doc._id, 1);\n}', '_count' ) self.ddoc.add_view( 'view003', 'function (doc) {\n emit(Math.floor(doc.age / 2), 1);\n}' ) self.ddoc.add_view( 'view004', 'function (doc) {\n emit(Math.floor(doc.age / 2), 1);\n}', '_count' ) self.ddoc.add_view( 'view005', 'function (doc) {\n emit([doc.name, doc.age], 1);\n}' ) self.ddoc.add_view( 'view006', 'function (doc) {\n emit([doc.name, doc.age], 1);\n}', '_count' ) self.ddoc.add_view( 'view007', 'function (doc) {\n emit(1, doc.name);\n}' ) self.ddoc.save() self.view001 = self.ddoc.get_view('view001') self.view002 = self.ddoc.get_view('view002') self.view003 = self.ddoc.get_view('view003') self.view004 = self.ddoc.get_view('view004') self.view005 = self.ddoc.get_view('view005') self.view006 = self.ddoc.get_view('view006') self.view007 = self.ddoc.get_view('view007') def create_search_index(self): """ Create a design document with search indexes for use with search query tests. """ self.search_ddoc = DesignDocument(self.db, 'searchddoc001') self.search_ddoc['indexes'] = {'searchindex001': { 'index': 'function (doc) {\n index("default", doc._id); \n ' 'if (doc.name) {\n index("name", doc.name, {"store": true}); \n} ' 'if (doc.age) {\n index("age", doc.age, {"facet": true}); \n} \n} ' } } self.search_ddoc.save() def load_security_document_data(self): """ Create a security document in the specified database and assign attributes to be used during unit tests """ self.sdoc = { 'admins': {'names': ['foo'], 'roles': ['admins']}, 'members': {'names': ['foo1', 'foo2'], 'roles': ['developers']} } self.mod_sdoc = { 'admins': {'names': ['bar'], 'roles': ['admins']}, 'members': {'names': ['bar1', 'bar2'], 'roles': ['developers']} } if os.environ.get('RUN_CLOUDANT_TESTS') is not None: self.sdoc = { 'cloudant': { 'foo1': ['_reader', '_writer'], 'foo2': ['_reader'] } } self.mod_sdoc = { 'cloudant': { 'bar1': ['_reader', '_writer'], 'bar2': ['_reader'] } } resp = self.client.r_session.put( '/'.join([self.db.database_url, '_security']), data=json.dumps(self.sdoc), headers={'Content-Type': 'application/json'} ) self.assertEqual(resp.status_code, 200) def create_db_updates(self): """ Create '_global_changes' system database required for testing against _db_updates """ self.DB_UPDATES = '_global_changes' try: self.client.create_database(self.DB_UPDATES, throw_on_exists=True) except CloudantClientException: self.delete_db_updates() self.create_db_updates() def delete_db_updates(self): """ Delete '_global_changes' system database used for _db_updates testing """ try: self.client.delete_database(self.DB_UPDATES) except CloudantClientException: pass def is_couchdb_1x_version(self): if os.environ.get('COUCHDB_VERSION') and os.environ.get('COUCHDB_VERSION').startswith('1'): return True else: # Get version from server info couchdb_info = json.loads(self.client.r_session.get(self.client.server_url).text) if couchdb_info and couchdb_info['version'].startswith('1'): return True else: return False ================================================ FILE: tests/unit/view_execution_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2016, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Unit tests for the execution of view queries using translated parameters. """ import unittest from nose.plugins.attrib import attr from .unit_t_db_base import UnitTestDbBase @attr(db=['cloudant','couch']) class QueryParmExecutionTests(UnitTestDbBase): """ Test cases for the execution of views queries using translated parameters. """ def setUp(self): """ Set up test attributes """ super(QueryParmExecutionTests, self).setUp() self.db_set_up() self.populate_db_with_documents() self.create_views() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(QueryParmExecutionTests, self).tearDown() def test_descending_true(self): """ Test view query using descending parameter set to True. The view used here will generate rows of data where each key will equal the document id. Such as: {'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}, ... """ actual = self.view001(descending=True)['rows'] expected = [{'key': 'julia{0:03d}'.format(x), 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(100)] self.assertEqual(actual, list(reversed(expected))) def test_descending_false(self): """ Test view query using descending parameter set to False. The view used here will generate rows of data where each key will equal the document id. Such as: {'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}, ... """ actual = self.view001(descending=False)['rows'] expected = [{'key': 'julia{0:03d}'.format(x), 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(100)] self.assertEqual(actual, expected) def test_endkey_int(self): """ Test view query using endkey parameter as an integer. The view used here will generate rows of data where each key will be an integer. Such as: {'key': 0, 'id': 'julia000', 'value': 1}, {'key': 0, 'id': 'julia001', 'value': 1}, {'key': 1, 'id': 'julia002', 'value': 1}, {'key': 1, 'id': 'julia003', 'value': 1}, ... {'key': 5, 'id': 'julia010', 'value': 1}, {'key': 5, 'id': 'julia011', 'value': 1}, ... """ actual = self.view003(endkey=4)['rows'] expected = [{'key': x // 2, 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(10)] self.assertEqual(len(actual), 10) self.assertEqual(len(expected), 10) self.assertEqual(actual, expected) def test_endkey_str(self): """ Test view query using endkey parameter as a string. The view used here will generate rows of data where each key will equal the document id. Such as: {'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}, ... """ actual = self.view001(endkey='julia009')['rows'] expected = [{'key': 'julia{0:03d}'.format(x), 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(10)] self.assertEqual(len(actual), 10) self.assertEqual(len(expected), 10) self.assertEqual(actual, expected) def test_endkey_complex(self): """ Test view query using endkey parameter as a complex key. The view used here will generate rows of data where each key is a complex key. Such as: {'key': ['julia', 0], 'id': 'julia000', 'value': 1}, {'key': ['julia', 1], 'id': 'julia001', 'value': 1}, {'key': ['julia', 2], 'id': 'julia002', 'value': 1}, ... """ actual = self.view005(endkey=['julia', 9])['rows'] expected = [{'key': ['julia', x], 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(10)] self.assertEqual(len(actual), 10) self.assertEqual(len(expected), 10) self.assertEqual(actual, expected) def test_endkey_docid(self): """ Test view query using endkey_docid parameter. The view used here will generate rows of data where each key will have two ids associated with it. Such as: {'key': 0, 'id': 'julia000', 'value': 1}, {'key': 0, 'id': 'julia001', 'value': 1}, {'key': 1, 'id': 'julia002', 'value': 1}, {'key': 1, 'id': 'julia003', 'value': 1}, ... {'key': 5, 'id': 'julia010', 'value': 1}, {'key': 5, 'id': 'julia011', 'value': 1}, ... """ # Ensure that only rows of data up to and including the first document # where the key is 5 are returned. actual = self.view003(endkey_docid='julia010', endkey=5)['rows'] expected = [{'key': x // 2, 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(11)] self.assertEqual(len(actual), 11) self.assertEqual(len(expected), 11) self.assertEqual(actual, expected) def test_group_true(self): """ Test view query using group parameter set to True. The view used here along with group=True will generate rows of data where each key will be grouped into groups of 2. Such as: {'key': 0, 'value': 2}, {'key': 1, 'value': 2}, {'key': 2, 'value': 2}, ... {'key': 49, 'value': 2} """ actual = self.view004(group=True)['rows'] expected = [{'key': x, 'value': 2} for x in range(50)] self.assertEqual(len(actual), 50) self.assertEqual(len(expected), 50) self.assertEqual(actual, expected) def test_group_false(self): """ Test view query using group parameter set to False. The view used here will generate a row of data containing the number of documents matching the view query. Such as: {'key': None, 'value': 100} """ actual = self.view004(group=False)['rows'] self.assertEqual(actual, [{'key': None, 'value': 100}]) def test_group_level(self): """ Test view query using group_level parameter. The view used here along with group_level=1 will generate rows of data that calculate the count for a grouping of the first element in the complex key defined by this view. In this case the output will yield a single row of data for the key ['julia']. Such as: {'key': ['julia'], 'value': 100} """ actual = self.view006(group_level=1)['rows'] expected = [{'key': ['julia'], 'value': 100}] self.assertEqual(actual, expected) def test_include_docs_true(self): """ Test view query using include_docs set to True and the key parameter. The view used here will generate rows of data where each key will equal the document id. Such as: {'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}, ... """ data = self.view001(key='julia010', include_docs=True)['rows'] self.assertEqual(len(data), 1) self.assertTrue( all(x in ['key', 'id', 'value', 'doc'] for x in data[0].keys()) ) self.assertEqual(data[0]['key'], 'julia010') self.assertEqual(data[0]['id'], 'julia010') self.assertEqual(data[0]['value'], 1) self.assertTrue( all(x in ['_id', '_rev', 'name', 'age'] for x in data[0]['doc'].keys()) ) self.assertEqual(data[0]['doc']['_id'], 'julia010') self.assertTrue(data[0]['doc']['_rev'].startswith('1-')) self.assertEqual(data[0]['doc']['name'], 'julia') self.assertEqual(data[0]['doc']['age'], 10) def test_include_docs_false(self): """ Test view query using include_docs set to False and the key parameter. The view used here will generate rows of data where each key will equal the document id. Such as: {'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}, ... """ actual = self.view001(key='julia010', include_docs=False)['rows'] expected = [{'key': 'julia010', 'id': 'julia010', 'value': 1}] self.assertEqual(actual, expected) def test_inclusive_end_true(self): """ Test view query using inclusive_end set to True and the endkey parameter. The view used here will generate rows of data where each key will equal the document id. Such as: {'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}, ... """ actual = self.view001(endkey='julia010', inclusive_end=True)['rows'] expected = [{'key': 'julia{0:03d}'.format(x), 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(11)] self.assertEqual(actual, expected) def test_inclusive_end_false(self): """ Test view query using inclusive_end set to False and the endkey parameter. The view used here will generate rows of data where each key will equal the document id. Such as: {'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}, ... """ actual = self.view001(endkey='julia010', inclusive_end=False)['rows'] expected = [{'key': 'julia{0:03d}'.format(x), 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(10)] self.assertEqual(actual, expected) def test_key_int(self): """ Test view query using key parameter as an integer. The view used here will generate rows of data where each key will be an integer. Such as: {'key': 0, 'id': 'julia000', 'value': 1}, {'key': 0, 'id': 'julia001', 'value': 1}, {'key': 1, 'id': 'julia002', 'value': 1}, {'key': 1, 'id': 'julia003', 'value': 1}, ... {'key': 5, 'id': 'julia010', 'value': 1}, {'key': 5, 'id': 'julia011', 'value': 1}, ... """ actual = self.view003(key=5)['rows'] expected = [{'key': 5, 'id': 'julia010', 'value': 1}, {'key': 5, 'id': 'julia011', 'value': 1}] self.assertEqual(actual, expected) def test_key_str(self): """ Test view query using key parameter as a string. The view used here will generate rows of data where each key will equal the document id. Such as: {'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}, ... """ actual = self.view001(key='julia010')['rows'] expected = [{'key': 'julia010', 'id': 'julia010', 'value': 1}] self.assertEqual(actual, expected) def test_key_complex(self): """ Test view query using key parameter as a complex key. The view used here will generate rows of data where each key is a complex key. Such as: {'key': ['julia', 0], 'id': 'julia000', 'value': 1}, {'key': ['julia', 1], 'id': 'julia001', 'value': 1}, {'key': ['julia', 2], 'id': 'julia002', 'value': 1}, ... """ actual = self.view005(key=['julia', 10])['rows'] expected = [{'key': ['julia', 10], 'id': 'julia010', 'value': 1}] self.assertEqual(actual, expected) def test_keys_int(self): """ Test view query using keys parameter as a list of integers. The view used here will generate rows of data where each key will be an integer. Such as: {'key': 0, 'id': 'julia000', 'value': 1}, {'key': 0, 'id': 'julia001', 'value': 1}, {'key': 1, 'id': 'julia002', 'value': 1}, {'key': 1, 'id': 'julia003', 'value': 1}, ... {'key': 5, 'id': 'julia010', 'value': 1}, {'key': 5, 'id': 'julia011', 'value': 1}, ... """ actual = self.view003(keys=[10, 20, 30])['rows'] expected = [{'key': 10, 'id': 'julia020', 'value': 1}, {'key': 10, 'id': 'julia021', 'value': 1}, {'key': 20, 'id': 'julia040', 'value': 1}, {'key': 20, 'id': 'julia041', 'value': 1}, {'key': 30, 'id': 'julia060', 'value': 1}, {'key': 30, 'id': 'julia061', 'value': 1}] self.assertEqual(actual, expected) def test_keys_str(self): """ Test view query using keys parameter as a list of strings. The view used here will generate rows of data where each key will equal the document id. Such as: {'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}, ... """ actual = self.view001(keys=['julia010', 'julia020', 'julia030'])['rows'] expected = [{'key': 'julia010', 'id': 'julia010', 'value': 1}, {'key': 'julia020', 'id': 'julia020', 'value': 1}, {'key': 'julia030', 'id': 'julia030', 'value': 1}] self.assertEqual(actual, expected) def test_keys_complex(self): """ Test view query using keys parameter as a list of complex keys. The view used here will generate rows of data where each key is a complex key. Such as: {'key': ['julia', 0], 'id': 'julia000', 'value': 1}, {'key': ['julia', 1], 'id': 'julia001', 'value': 1}, {'key': ['julia', 2], 'id': 'julia002', 'value': 1}, ... """ actual = self.view005(keys=[['julia', 10], ['julia', 20], ['julia', 30]])['rows'] expected = [{'key': ['julia', 10], 'id': 'julia010', 'value': 1}, {'key': ['julia', 20], 'id': 'julia020', 'value': 1}, {'key': ['julia', 30], 'id': 'julia030', 'value': 1}] self.assertEqual(actual, expected) def test_limit(self): """ Test view query using the limit parameter. The view used here will generate rows of data where each key will equal the document id. Such as: {'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}, ... """ actual = self.view001(limit=10)['rows'] expected = [{'key': 'julia{0:03d}'.format(x), 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(10)] self.assertEqual(actual, expected) def test_reduce_true(self): """ Test view query using the reduce parameter set to True. The view used here along with reduce=True will generate a row of data containing the count of documents that match the query. Such as: {'key': None, 'value': 100} """ actual = self.view004(reduce=True)['rows'] self.assertEqual(actual, [{'key': None, 'value': 100}]) def test_reduce_false(self): """ Test view query using the reduce parameter set to False. The view used here along with reduce=False will generate rows of data where each key will be an integer. Such as: {'key': 0, 'id': 'julia000', 'value': 1}, {'key': 0, 'id': 'julia001', 'value': 1}, {'key': 1, 'id': 'julia002', 'value': 1}, {'key': 1, 'id': 'julia003', 'value': 1}, ... {'key': 5, 'id': 'julia010', 'value': 1}, {'key': 5, 'id': 'julia011', 'value': 1}, ... """ actual = self.view004(reduce=False)['rows'] expected = [{'key': x // 2, 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(100)] self.assertEqual(len(actual), 100) self.assertEqual(len(expected), 100) self.assertEqual(actual, expected) def test_skip(self): """ Test view query using the skip parameter. The view used here will generate rows of data where each key will equal the document id. Such as: {'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}, ... """ actual = self.view001(skip=10)['rows'] expected = [{'key': 'julia{0:03d}'.format(x), 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(10, 100)] self.assertEqual(actual, expected) def test_stale_ok(self): """ Test view query using the stale parameter set to ok. Since there is no way to know whether the view will return a stale response or not the test here focuses on ensuring that the call itself is successful. """ try: self.view001(stale='ok') except Exception as err: self.fail('An unexpected error was encountered: '+str(err)) def test_stale_update_after(self): """ Test view query using the stale parameter set to update_after. Since there is no way to know whether the view will return a stale response or not the test here focuses on ensuring that the call itself is successful. """ try: self.view001(stale='update_after') except Exception as err: self.fail('An unexpected error was encountered:' +str(err)) def test_stable_true(self): """ Test view query using the stable parameter set to true Since there is no way to know whether the view will return a response from a stable set of shards or not the test here focuses on ensuring that the call itself is successful. """ try: self.view001(stable=True) except Exception as err: self.fail('An unexpected error was encountered: '+str(err)) def test_stable_update_lazy(self): """ Test view query using the update parameter set to lazy Since there is no way to know whether the view will update lazily or not the test here focuses on ensuring that the call itself is successful. """ try: self.view001(update='lazy') except Exception as err: self.fail('An unexpected error was encountered: '+str(err)) def test_stable_update_true(self): """ Test view query using the update parameter set to true Since there is no way to know whether the view will update or not the test here focuses on ensuring that the call itself is successful. """ try: self.view001(update='true') except Exception as err: self.fail('An unexpected error was encountered: '+str(err)) def test_startkey_int(self): """ Test view query using startkey parameter as an integer. The view used here will generate rows of data where each key will be an integer. Such as: {'key': 0, 'id': 'julia000', 'value': 1}, {'key': 0, 'id': 'julia001', 'value': 1}, {'key': 1, 'id': 'julia002', 'value': 1}, {'key': 1, 'id': 'julia003', 'value': 1}, ... {'key': 5, 'id': 'julia010', 'value': 1}, {'key': 5, 'id': 'julia011', 'value': 1}, ... """ actual = self.view003(startkey=5)['rows'] expected = [{'key': x // 2, 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(10, 100)] self.assertEqual(len(actual), 90) self.assertEqual(len(expected), 90) self.assertEqual(actual, expected) def test_startkey_str(self): """ Test view query using startkey parameter as a string. The view used here will generate rows of data where each key will equal the document id. Such as: {'key': 'julia000', 'id': 'julia000', 'value': 1}, {'key': 'julia001', 'id': 'julia001', 'value': 1}, {'key': 'julia002', 'id': 'julia002', 'value': 1}, ... """ actual = self.view001(startkey='julia010')['rows'] expected = [{'key': 'julia{0:03d}'.format(x), 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(10, 100)] self.assertEqual(len(actual), 90) self.assertEqual(len(expected), 90) self.assertEqual(actual, expected) def test_startkey_complex(self): """ Test view query using startkey parameter as a complex key. The view used here will generate rows of data where each key is a complex key. Such as: {'key': ['julia', 0], 'id': 'julia000', 'value': 1}, {'key': ['julia', 1], 'id': 'julia001', 'value': 1}, {'key': ['julia', 2], 'id': 'julia002', 'value': 1}, ... """ actual = self.view005(startkey=['julia', 10])['rows'] expected = [{'key': ['julia', x], 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(10, 100)] self.assertEqual(len(actual), 90) self.assertEqual(len(expected), 90) self.assertEqual(actual, expected) def test_startkey_docid(self): """ Test view query using startkey_docid parameter. The view used here will generate rows of data where each key will have two ids associated with it. Such as: {'key': 0, 'id': 'julia000', 'value': 1}, {'key': 0, 'id': 'julia001', 'value': 1}, {'key': 1, 'id': 'julia002', 'value': 1}, {'key': 1, 'id': 'julia003', 'value': 1}, ... {'key': 5, 'id': 'julia010', 'value': 1}, {'key': 5, 'id': 'julia011', 'value': 1}, ... """ # Ensure that only rows of data starting at the second document # where the key is 5 are returned. actual = self.view003(startkey_docid='julia011', startkey=5)['rows'] expected = [{'key': x // 2, 'id': 'julia{0:03d}'.format(x), 'value': 1} for x in range(11, 100)] self.assertEqual(len(actual), 89) self.assertEqual(len(expected), 89) self.assertEqual(actual, expected) if __name__ == '__main__': unittest.main() ================================================ FILE: tests/unit/view_tests.py ================================================ #!/usr/bin/env python # Copyright (C) 2015, 2018 IBM Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ _view_tests_ view module - Unit tests for the View/QueryIndexView classes See configuration options for environment variables in unit_t_db_base module docstring. """ import unittest import mock import requests from cloudant._common_util import _Code from cloudant.design_document import DesignDocument from cloudant.error import CloudantArgumentError, CloudantViewException from cloudant.result import Result from cloudant.view import View, QueryIndexView from nose.plugins.attrib import attr from .unit_t_db_base import UnitTestDbBase class CodeTests(unittest.TestCase): """ _Code class unit test """ def test_constructor(self): """ Ensure that the _Code class constructor returns a _Code object that wraps a Python str """ code = _Code('this is code.') self.assertIsInstance(code, _Code) self.assertEqual(code, 'this is code.') class CloudantViewExceptionTests(unittest.TestCase): """ Ensure CloudantReplicatorException functions as expected. """ def test_raise_without_code(self): """ Ensure that a default exception/code is used if none is provided. """ with self.assertRaises(CloudantViewException) as cm: raise CloudantViewException() self.assertEqual(cm.exception.status_code, 100) def test_raise_using_invalid_code(self): """ Ensure that a default exception/code is used if invalid code is provided. """ with self.assertRaises(CloudantViewException) as cm: raise CloudantViewException('foo') self.assertEqual(cm.exception.status_code, 100) def test_raise_with_proper_code(self): """ Ensure that the requested exception is raised. """ with self.assertRaises(CloudantViewException) as cm: raise CloudantViewException(101) self.assertEqual(cm.exception.status_code, 101) @attr(db=['cloudant','couch']) class ViewTests(UnitTestDbBase): """ View class unit tests """ def setUp(self): """ Set up test attributes """ super(ViewTests, self).setUp() self.db_set_up() def tearDown(self): """ Reset test attributes """ self.db_tear_down() super(ViewTests, self).tearDown() def test_constructor(self): """ Test instantiating a View """ ddoc = DesignDocument(self.db, 'ddoc001') view = View( ddoc, 'view001', 'function (doc) {\n emit(doc._id, 1);\n}', '_count', dbcopy='{0}-copy'.format(self.db.database_name) ) self.assertEqual(view.design_doc, ddoc) self.assertEqual(view.view_name, 'view001') self.assertIsInstance(view['map'], _Code) self.assertEqual( view['map'], 'function (doc) {\n emit(doc._id, 1);\n}' ) self.assertIsInstance(view['reduce'], _Code) self.assertEqual(view['reduce'], '_count') self.assertEqual( view['dbcopy'], '{0}-copy'.format(self.db.database_name) ) self.assertEqual(view, { 'map': 'function (doc) {\n emit(doc._id, 1);\n}', 'reduce': '_count', 'dbcopy': '{0}-copy'.format(self.db.database_name) }) def test_map_setter(self): """ Test that the map setter works """ ddoc = DesignDocument(self.db, 'ddoc001') view = View(ddoc, 'view001') self.assertIsNone(view.get('map')) view.map = 'function (doc) {\n emit(doc._id, 1);\n}' self.assertEqual( view.get('map'), 'function (doc) {\n emit(doc._id, 1);\n}' ) def test_map_getter(self): """ Test that the map getter works """ ddoc = DesignDocument(self.db, 'ddoc001') view = View(ddoc, 'view001') self.assertIsNone(view.map) view.map = 'function (doc) {\n emit(doc._id, 1);\n}' self.assertIsInstance(view.map, _Code) self.assertEqual(view.map, 'function (doc) {\n emit(doc._id, 1);\n}') def test_reduce_setter(self): """ Test that the reduce setter works """ ddoc = DesignDocument(self.db, 'ddoc001') view = View(ddoc, 'view001') self.assertIsNone(view.get('reduce')) view.reduce = '_count' self.assertEqual(view.get('reduce'), '_count') def test_reduce_getter(self): """ Test that the reduce getter works """ ddoc = DesignDocument(self.db, 'ddoc001') view = View(ddoc, 'view001') self.assertIsNone(view.reduce) view.reduce = '_count' self.assertIsInstance(view.reduce, _Code) self.assertEqual(view.reduce, '_count') def test_retrieve_view_url(self): """ Test the retrieval of the View url """ ddoc = DesignDocument(self.db, 'ddoc001') view = View(ddoc, 'view001') self.assertEqual( view.url, '/'.join((ddoc.document_url, '_view/view001')) ) def test_get_view_callable_raw_json(self): """ Test that the GET request of the View __call__ method that is invoked when calling the view object returns the appropriate raw JSON response. """ self.populate_db_with_documents() ddoc = DesignDocument(self.db, 'ddoc001') ddoc.add_view( 'view001', 'function (doc) {\n emit(doc._id, 1);\n}' ) ddoc.save() view = ddoc.get_view('view001') ids = [] # view(limit=3) calls the view object and passes it the limit parameter # where a HTTP GET request is made. for row in view(limit=3)['rows']: ids.append(row['id']) expected = ['julia000', 'julia001', 'julia002'] self.assertTrue(all(x in ids for x in expected)) def test_post_view_callable_raw_json(self): """ Using the "keys" parameter test that the POST request of the View __call__ method that is invoked when calling the view object returns the appropriate raw JSON response. """ # Create 200 documents with ids julia000, julia001, julia002, ..., julia199 self.populate_db_with_documents(200) # Generate keys list for every other document created # with ids julia000, julia002, julia004, ..., julia198 keys_list = ['julia{0:03d}'.format(i) for i in range(0, 200, 2)] self.assertEqual(len(keys_list), 100) ddoc = DesignDocument(self.db, 'ddoc001') ddoc.add_view( 'view001', 'function (doc) {\n emit(doc._id, 1);\n}' ) ddoc.save() view = ddoc.get_view('view001') # view(keys=keys_list) calls the view object and passes keys parameter ids = [row['id'] for row in view(keys=keys_list)['rows']] self.assertEqual(len(ids), 100) self.assertTrue(all(x in ids for x in keys_list)) def test_post_view_callable_raw_json_multiple_params(self): """ Using "keys" and other parameters test that the POST request of the View __call__ method that is invoked when calling the view object returns the appropriate raw JSON response. """ # Create 200 documents with ids julia000, julia001, julia002, ..., julia199 self.populate_db_with_documents(200) # Generate keys list for every other document created # with ids julia000, julia002, julia004, ..., julia198 keys_list = ['julia{0:03d}'.format(i) for i in range(0, 200, 2)] self.assertEqual(len(keys_list), 100) ddoc = DesignDocument(self.db, 'ddoc001') ddoc.add_view( 'view001', 'function (doc) {\n emit(doc._id, 1);\n}' ) ddoc.save() view = ddoc.get_view('view001') # view(keys=keys_list, limit=3) calls the view object and passes keys # and limit parameters ids = [row['id'] for row in view(keys=keys_list, limit=3)['rows']] self.assertTrue(all(x in ids for x in ['julia000', 'julia002', 'julia004'])) def test_view_callable_view_result(self): """ Test that by referencing the .result attribute the view callable method is invoked and the data returned is wrapped as a Result. """ self.populate_db_with_documents() ddoc = DesignDocument(self.db, 'ddoc001') ddoc.add_view( 'view001', 'function (doc) {\n emit(doc._id, 1);\n}' ) ddoc.save() view = ddoc.get_view('view001') rslt = view.result self.assertIsInstance(rslt, Result) ids = [] # rslt[:3] limits the Result to the first 3 elements for row in rslt[:3]: ids.append(row['id']) expected = ['julia000', 'julia001', 'julia002'] self.assertTrue(all(x in ids for x in expected)) def test_view_callable_with_non_existing_view(self): """ Test error condition when view used does not exist remotely. """ self.populate_db_with_documents() # The view "missing-view" does not exist in the remote database view = View( DesignDocument(self.db, 'ddoc001'), 'missing-view', 'function (doc) {\n emit(doc._id, 1);\n}' ) self.assertIsInstance(view, View) try: for row in view.result: self.fail('Above statement should raise an Exception') except requests.HTTPError as err: self.assertEqual(err.response.status_code, 404) def test_custom_result_context_manager(self): """ Test that the context manager for custom results returns the expected Results """ self.populate_db_with_documents() ddoc = DesignDocument(self.db, 'ddoc001') ddoc.add_view( 'view001', 'function (doc) {\n emit(doc._id, 1);\n}' ) ddoc.save() view = ddoc.get_view('view001') # Return a custom result by including documents with view.custom_result(include_docs=True, reduce=False) as rslt: i = 0 for row in rslt: self.assertEqual(row['doc']['_id'], 'julia{0:03d}'.format(i)) self.assertTrue(row['doc']['_rev'].startswith('1-')) self.assertEqual(row['doc']['name'], 'julia') self.assertEqual(row['doc']['age'], i) i += 1 self.assertEqual(i, 100) class QueryIndexViewTests(unittest.TestCase): """ QueryIndexView class unit tests. These tests use a mocked DesignDocument since a QueryIndexView object is not callable so an actual connection is not necessary. """ def setUp(self): """ Set up test attributes """ self.ddoc = mock.Mock() self.ddoc.r_session = 'mocked-session' self.ddoc.document_url = 'http://mock.example.com/my_db/_design/ddoc001' self.view = QueryIndexView( self.ddoc, 'view001', {'fields': {'name': 'asc', 'age': 'asc'}}, '_count', options = {'def': {'fields': ['name', 'age']}, 'w': 2} ) def test_constructor(self): """ Test constructing a QueryIndexView """ self.assertIsInstance(self.view, QueryIndexView) self.assertEqual(self.view.design_doc, self.ddoc) self.assertEqual(self.view.view_name, 'view001') self.assertIsNone(self.view.result) self.assertEqual(self.view, { 'map': {'fields': {'name': 'asc', 'age': 'asc'}}, 'reduce': '_count', 'options': {'def': {'fields': ['name', 'age']}, 'w': 2} }) def test_map_getter(self): """ Test that the map getter works """ self.assertEqual( self.view.map, {'fields': {'name': 'asc', 'age': 'asc'}} ) self.assertEqual(self.view.map, self.view['map']) def test_map_setter(self): """ Test that the map setter works """ self.view.map = {'fields': {'name': 'desc', 'age': 'desc'}} self.assertEqual( self.view.map, {'fields': {'name': 'desc', 'age': 'desc'}} ) self.assertEqual(self.view.map, self.view['map']) def test_map_setter_failure(self): """ Test that the map setter fails if a dict is not supplied """ try: self.view.map = 'function (doc) {\n emit(doc._id, 1);\n}' self.fail('Above statement should raise an Exception') except CloudantArgumentError as err: self.assertEqual( str(err), 'The map property must be a dictionary.' ) def test_reduce_getter(self): """ Test that the reduce getter works """ self.assertEqual(self.view.reduce, '_count') self.assertEqual(self.view.reduce, self.view['reduce']) def test_reduce_setter(self): """ Test that the reduce setter works """ self.view.reduce = '_sum' self.assertEqual(self.view.reduce, '_sum') self.assertEqual(self.view.reduce, self.view['reduce']) def test_reduce_setter_failure(self): """ Test that the reduce setter fails if a string is not supplied """ with self.assertRaises(CloudantArgumentError) as cm: self.view.reduce = {'_count'} err = cm.exception self.assertEqual(str(err), 'The reduce property must be a string.') def test_callable_disabled(self): """ Test that the callable for QueryIndexView does not execute. """ with self.assertRaises(CloudantViewException) as cm: self.view() err = cm.exception self.assertEqual( str(err), 'A QueryIndexView is not callable. ' 'If you wish to execute a query ' 'use the database \'get_query_result\' convenience method.' ) def test_custom_result_disabled(self): """ Test that the custom_result context manager for QueryIndexView does not execute. """ with self.assertRaises(CloudantViewException) as cm: with self.view.custom_result() as result: pass err = cm.exception self.assertEqual( str(err), 'Cannot create a custom result context manager using a ' 'QueryIndexView. If you wish to execute a query use the ' 'database \'get_query_result\' convenience method instead.' ) if __name__ == '__main__': unittest.main()