[
  {
    "path": ".gitignore",
    "content": ".DS_Store\n*.pyc\ndata/*.h5\ndata/*.db\ntest/unit/*.h5\ntest/unit/*.db\n\n# jetbrains IDE stuff\n.idea\n*.iml\n\n"
  },
  {
    "path": ".gitmodules",
    "content": "[submodule \"hdf5-json\"]\n\tpath = hdf5-json\n\turl = https://github.com/HDFGroup/hdf5-json.git\n[submodules \"hdf5-json\"]\n\tbranch = stable\n"
  },
  {
    "path": ".travis.yml",
    "content": "language: python\n\nnotifications:\n  email: false\n\npython:\n  - \"2.7\"\n  - \"3.4\"\n  - \"3.5\"\n  - \"3.6\"\n  - \"3.7\"\n  - \"3.8\"\n\n\ninstall:\n    - sudo apt-get update -qq\n    - sudo apt-get install -qq libhdf5-serial-dev\n    - pip uninstall numpy -y \n    - pip install numpy>=1.10.4\n    - pip install h5py\n    - pip install requests\n    - pip install pytz\n    - pip install tornado\n    - pip install watchdog\n    - git clone https://github.com/HDFGroup/hdf5-json.git\n    - cd hdf5-json\n    - python setup.py install\n    - cd ..\n    - pip install .\n\nscript:\n    - PYTHONPATH=\"test/unit\" python test/testall.py --unit --failslow\n    - python h5serv >h5serv.out &\n    - sleep 5\n    - PYTHONPATH=\"test/integ\" python test/testall.py --integ --failslow\n"
  },
  {
    "path": "COPYING",
    "content": "\nCopyright Notice and License Terms for \nh5serv Software Service, Libraries and Utilities\n-----------------------------------------------------------------------------\n\nh5serv (HDF5 REST Server) Service, Libraries and Utilities\nCopyright 2014-2017 by The HDF Group.\n\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without \nmodification, are permitted for any purpose (including commercial purposes) \nprovided that the following conditions are met:\n\n1. Redistributions of source code must retain the above copyright notice, \n   this list of conditions, and the following disclaimer.\n\n2. Redistributions in binary form must reproduce the above copyright notice, \n   this list of conditions, and the following disclaimer in the documentation \n   and/or materials provided with the distribution.\n\n3. In addition, redistributions of modified forms of the source or binary \n   code must carry prominent notices stating that the original code was \n   changed and the date of the change.\n\n4. All publications or advertising materials mentioning features or use of \n   this software are asked, but not required, to acknowledge that it was \n   developed by The HDF Group and credit the contributors.\n\n5. Neither the name of The HDF Group, nor the name of any Contributor may \n   be used to endorse or promote products derived from this software \n   without specific prior written permission from The HDF Group or the \n   Contributor, respectively.\n\nDISCLAIMER: \nTHIS SOFTWARE IS PROVIDED BY THE HDF GROUP AND THE CONTRIBUTORS \n\"AS IS\" WITH NO WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED.  In no \nevent shall The HDF Group or the Contributors be liable for any damages \nsuffered by the users arising out of the use of this software, even if \nadvised of the possibility of such damage. \n"
  },
  {
    "path": "Dockerfile",
    "content": "FROM python:3.6\nMAINTAINER John Readey <jreadey@hdfgroup.org>\nRUN cd /usr/local/src                                    ; \\\n    pip install --upgrade pip                            ; \\\n    pip install h5py                                     ; \\\n    pip install tornado                                  ; \\\n    pip install requests                                 ; \\\n    pip install pytz                                     ; \\\n    pip install watchdog                                 ; \\\n    pip install pymongo       \nWORKDIR /usr/local/src         \nRUN git clone https://github.com/HDFGroup/hdf5-json.git  ; \\\n    cd hdf5-json                                         ; \\\n    python setup.py install                              ; \\\n    cd ..                                                ; \\\n    mkdir h5serv       \nWORKDIR /usr/local/src/h5serv                                                          \nCOPY h5serv h5serv                                       \nCOPY util util                                         \nCOPY test test                                        \nCOPY data /data \nRUN  cp /usr/local/src/hdf5-json/data/hdf5/tall.h5 /data ; \\                                      \n     ln -s /data \n                              \nEXPOSE 5000 \n\nCOPY entrypoint.sh /\n\nENTRYPOINT [\"/entrypoint.sh\"]\n"
  },
  {
    "path": "README.rst",
    "content": "h5serv - REST-based service for HDF5 data\n===========================================\n\n.. image:: https://travis-ci.org/HDFGroup/h5serv.svg?branch=develop\n    :target: https://travis-ci.org/HDFGroup/h5serv\n\nIntroduction\n------------\nh5serv is a web service that implements a REST-based web service for HDF5 data stores\nas described in the paper: http://hdfgroup.org/pubs/papers/RESTful_HDF5.pdf. \n\nNotice\n------\nh5serv has been deprecated.  Users looking for a RESTful way of accessing HDF data should \nuse HSDS (https://github.com/HDFGroup/hsds) instead.\n\nWebsites\n--------\n\n* Main website: http://www.hdfgroup.org\n* Source code: https://github.com/HDFGroup/h5serv\n* Mailing list: hdf-forum@lists.hdfgroup.org <hdf-forum@lists.hdfgroup.org>\n* Documentation: http://h5serv.readthedocs.org\n\n\nQuick Install\n-------------\n\nInstall Python (2.7 or later) and the following packages:\n\n* NumPy 1.10.4 or later\n* h5py 2.5 or later\n* tornado 4.0.2 or later\n* watchdog 0.8.3 or later\n* requests 2.3 or later (for client tests)\n\nClone the hdf5-json project: ``git clone https://github.com/HDFGroup/hdf5-json.git`` .\nNext, cd to the hdf5-json folder and run: ``python setup.py install``.\n\nClone this project: ``git clone https://github.com/HDFGroup/h5serv.git``.\n\nRunning the Server\n------------------\n\nStart the server:  ``cd h5serv; python h5serv``.\n\nBy default the server will listen on port 5000.  The port and and several other defaults can be modified\nwith command line options.  For example to use port 8888 run:  ``python h5serv --port=8888``.\n\nSee test cases for examples of interacting with the server.  Run: ``python testall.py`` from the test directory \nto run through the entire test suite.\n\nAlso, the interface (at least as far as read requests) can be explored in a browser. Go to: http://127.0.0.1:5000/.  \nA JSON browser plugin will be helpful for formatting responses from the server to be more human readable.\n\nSee h5serv/docs/Installation.rst for step by step install instructions.\n\nRunning with Docker\n-------------------\n\nTo run h5serv as a docker container you just need to install Docker (no Python, h5py, etc. needed).\n\n* Install docker: https://docs.docker.com/installation/#installation.\n* Run the h5serv image: ``docker run -p 5000:5000 -d -v <mydata>:/data hdfgroup/h5serv`` where <mydata> is the folder path that contains any HDF5 files you want to made available through the h5serv REST API.  Since requests to the server can modify (or delete!) content, you probably want to create a new folder and copy files to it.\n* Go to http://192.168.99.100:5000/ in your browser to verify the server is up and running (replace 192.168.99.100 with the IP address of the system or VM that is running the container).\n\nWriting Client Applications\n----------------------------\nAs a REST service, clients be developed using almost any programming language.  The \ntest programs under: h5serv/test/integ illustrate some of the methods for peforming\ndifferent operations using Python. \n\nThe related project: https://github.com/HDFGroup/h5pyd provides a (mostly) h5py-compatible \ninterface to the server for Python clients.\n\n\nUninstalling\n------------\n\nh5serv does not modify anything in the system outside the directory where it was \ninstalled, so just remove the install directory and all contents to uninstall.\n\n    \nReporting bugs (and general feedback)\n-------------------------------------\n\nCreate new issues at http://github.com/HDFGroup/h5serv/issues for any problems you find. \n\nFor general questions/feedback, please use the list (hdf-forum@lists.hdfgroup.org).\n"
  },
  {
    "path": "data/readme.txt",
    "content": "This is the default location for HDF5 data files to be visible in h5serv.\n"
  },
  {
    "path": "docs/AclOps/GET_ACL.rst",
    "content": "**********************************************\nGET ACL\n**********************************************\n\nDescription\n===========\nReturns access information for the given user for the object with the UUID provided in the URI.\n\nRequests\n========\n\nSyntax\n------\n\nTo get a user's default access for a domain:\n\n.. code-block:: http\n\n    GET /acls/<userid> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n\nTo get a user's access information for a group:\n\n.. code-block:: http\n\n    GET /groups/<id>/acls/<userid> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>   \n\nTo get a user's access information for a dataset:\n\n.. code-block:: http\n\n    GET /datasets/<id>/acls/<userid> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n\nTo get a user's access information for a committed datatype:\n\n.. code-block:: http\n\n    GET /datatypes/<id>/acls/<userid> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n\nwhere:\n    \n* <id> is the UUID of the requested dataset/group/committed datatype\n* <userid> is the userid for the requested user.  Use the special userid \"default\" to get the default access permisions for the object\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\n\nacl\n^^^\nA JSON object that describe a users acces permisions.  Subkeys of acl are:\n\nuserName: the userid of the requested user\n\ncreate: A boolean flag that indicated if the user is authorized to create new resources\n\ndelete: A boolean flag that indicated if the user is authorized to delete resources\n\nread: A boolean flag that indicated if the user is authorized to read (GET) resources\n\nupdate: A boolean flag that indicated if the user is authorized to update resources\n\nreadACL: A boolean flag that indicated if the user is authorized to read the object's ACL\n\nupdateACL: A boolean flag that indicated if the user is authorized to update the object's ACL\n\n \nhrefs\n^^^^^\nAn array of hypertext links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    GET /groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e/acls/test_user1 HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Fri, 16 Jan 2015 20:06:08 GMT\n    Content-Length: 660\n    Etag: \"2c410d1c469786f25ed0075571a8e7a3f313cec1\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"acl\": {\n        \"create\": false,\n        \"delete\": false,\n        \"read\": true,\n        \"readACL\": false,\n        \"update\": false,\n        \"updateACL\": false,\n        \"userName\": \"test_user1\"\n    },\n    \"hrefs\": [\n        {\n            \"href\": \"http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e/acls/test_user1\",\n            \"rel\": \"self\"\n        },\n        {\n            \"href\": \"http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e\",\n            \"rel\": \"root\"\n        },\n        {\n            \"href\": \"http://tall_acl.test.hdfgroup.org/\",\n            \"rel\": \"home\"\n        },\n        {\n            \"href\": \"http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e\",\n            \"rel\": \"owner\"\n        }\n    ]\n    \nRelated Resources\n=================\n\n* :doc:`PUT_ACL`\n* :doc:`GET_ACLs`\n\n \n\n "
  },
  {
    "path": "docs/AclOps/GET_ACLs.rst",
    "content": "**********************************************\nGET ACLs\n**********************************************\n\nDescription\n===========\nReturns access information for all users defined in the ACL (Access Control List) \nfor the object with the UUID provided in the URI.\n\nRequests\n========\n\nSyntax\n------\n\nTo get the ACL for a domain:\n\n.. code-block:: http\n\n    GET /acls HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n\nTo get the ACL for a group:\n\n.. code-block:: http\n\n    GET /groups/<id>/acls HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n\nTo get the ACL for a dataset:\n\n.. code-block:: http\n\n    GET /datasets/<id>/acls HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n\nTo get the ACL for a committed datatype:\n\n.. code-block:: http\n\n    GET /datatypes/<id>/acls HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n\nwhere:\n    \n* <id> is the UUID of the requested dataset/group/committed datatype\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\n\nacls\n^^^^\nA JSON list that contains one element for each user specified in the ACL.\nThe elements will be JSON object that describe the users acces permisions.  \nSubkeys of the element are are:\n\nuserName: the userid of the user ('default' for the default access)\n\ncreate: A boolean flag that indicated if the user is authorized to create new resources\n\ndelete: A boolean flag that indicated if the user is authorized to delete resources\n\nread: A boolean flag that indicated if the user is authorized to read (GET) resources\n\nupdate: A boolean flag that indicated if the user is authorized to update resources\n\nreadACL: A boolean flag that indicated if the user is authorized to read the object's ACL\n\nupdateACL: A boolean flag that indicated if the user is authorized to update the object's ACL\n\n \nhrefs\n^^^^^\nAn array of hypertext links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    GET /groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e/acls  HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Fri, 16 Jan 2015 20:06:08 GMT\n    Content-Length: 660\n    Etag: \"2c410d1c469786f25ed0075571a8e7a3f313cec1\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"acls\": [\n        {\n            \"create\": true,\n            \"delete\": true,\n            \"read\": true,\n            \"readACL\": true,\n            \"update\": true,\n            \"updateACL\": true,\n            \"userName\": \"test_user2\"\n        },\n        {\n            \"create\": false,\n            \"delete\": false,\n            \"read\": true,\n            \"readACL\": false,\n            \"update\": false,\n            \"updateACL\": false,\n            \"userName\": \"test_user1\"\n        },\n        {\n            \"create\": false,\n            \"delete\": false,\n            \"read\": false,\n            \"readACL\": false,\n            \"update\": false,\n            \"updateACL\": false,\n            \"userName\": \"default\"\n        }\n    ],\n    \"hrefs\": [\n        {\n            \"href\": \"http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e/acls\",\n            \"rel\": \"self\"\n        },\n        {\n            \"href\": \"http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e\",\n            \"rel\": \"root\"\n        },\n        {\n            \"href\": \"http://tall_acl.test.hdfgroup.org/\",\n            \"rel\": \"home\"\n        },\n        {\n            \"href\": \"http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e\",\n            \"rel\": \"owner\"\n        }\n    ]\n    \nRelated Resources\n=================\n\n* :doc:`PUT_ACL`\n* :doc:`GET_ACL`\n\n \n\n "
  },
  {
    "path": "docs/AclOps/PUT_ACL.rst",
    "content": "**********************************************\nPUT ACL\n**********************************************\n\nDescription\n===========\nUpdate the access information for the given user for the object with the UUID provided in the URI.\n\nRequests\n========\n\nSyntax\n------\n\nTo update a user's access information for a domain:\n\n.. code-block:: http\n\n    PUT /acls/<userid> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n\nTo update a user's access information for a group:\n\n.. code-block:: http\n\n    PUT /groups/<id>/acls/<userid> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n\nTo get a user's access information for a dataset:\n\n.. code-block:: http\n\n    PUT /datasets/<id>/acls/<userid> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n\nTo get a user's access information for a committed datatype:\n\n.. code-block:: http\n\n    PUT /datatypes/<id>/acls/<userid> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n\nwhere:\n    \n* <id> is the UUID of the requested dataset/group/committed datatype\n* <userid> is the userid for the requested user.  Use the special userid \"default\" to get the default access permisions for the object\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nRequest Elements\n----------------\n\nThe request body most include a JSON object that has the following keys and boolean values:\n\n { \n 'read': <True or False>, \n \n 'create': <True or False>, \n \n 'update': <True or False>, \n \n 'delete': <True or False>, \n \n 'readACL': <True or False>, \n \n 'updateACL': <True or False> \n \n }\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\n \nhrefs\n^^^^^\nAn array of hypertext links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    PUT /groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e/acls/test_user1 HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \n    { 'read': True, 'create': False, 'update': False, \n             'delete': False, 'readACL': False, 'updateACL': False }\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Fri, 16 Jan 2015 20:06:08 GMT\n    Content-Length: 660\n    Etag: \"2c410d1c469786f25ed0075571a8e7a3f313cec1\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    \n    \"hrefs\": [\n        {\n            \"href\": \"http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e/acls/test_user1\",\n            \"rel\": \"self\"\n        },\n        {\n            \"href\": \"http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e\",\n            \"rel\": \"root\"\n        },\n        {\n            \"href\": \"http://tall_acl.test.hdfgroup.org/\",\n            \"rel\": \"home\"\n        },\n        {\n            \"href\": \"http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e\",\n            \"rel\": \"owner\"\n        }\n    ]\n    \nRelated Resources\n=================\n\n* :doc:`GET_ACL`\n* :doc:`GET_ACLs`\n\n \n\n "
  },
  {
    "path": "docs/AclOps/index.rst",
    "content": "####################\nAccess Control List\n####################\n\nAccess Control List (ACL) are key-value stores that can be used to manage what operations can \nbe performed by which user on group, dataset, or committed type objects.  Operations on other \nobjects (e.g. links, dataspace, or attributes) use the ACL of the object they belong to.\n\nEach ACL consists of 1 or more items in the form:\n\n(username, read, create, update, delete, readACL, updateACL)\n\nwhere username is a string, and read, create, update, delete, readACL, updateACL are booleans.\nThere flags have the following semantics when the given username is provided in the http\nAuthorization header:\n\n* read: The given user is authorized for read access to the resource (generally all GET requests)\n* create: The given user is authorized to create new resources (generally POST or PUT requests)\n* update: The given user is authorized to modified a resource (e.g. :doc:`../DatasetOps\\PUT_Value`)\n* delete: The given user is authorized to delete a resource (e.g. Delete a Group)\n* readACL: The given user is authorized to read the ACLs of a resource\n* updateACL: The given user is authorized to modify the ACLs of a resource\n\nA special username 'default' is used to denote the access permission for all other users who\nor not list in the ACL (including un-authenticated requests that don't provide a username).\n\nExample\n-------\n\nSuppose a given dataset has the following ACL:\n\n========   ====  ======   ======  ======  =======  ========\nusername   read  create   update  delete  readACL  writeACL\n========   ====  ======   ======  ======  =======  ========\ndefault    true  false    false   false   false    false\njoe        true  false    true    false   false    false\nann        true  true     true    true    true     true\n========   ====  ======   ======  ======  =======  ========\n\nThis ACL would enable anyone to read (perform GET requests).  User 'joe' would be able \nto read and update (modify values in the dataset).  While user 'ann' would have full \ncontrol to do any operation on the dataset (including modifying permissions for herself or\nother users).\n\nThe following unauthenticated (no HTTP Authorization header) \nrequests on the dataset would be granted or denied as follows:\n\n* GET /datasets/<id> - granted (returns HTTP Status 200 - OK)\n* POST /datasets/<id>/value - granted (returns HTTP Status 200 - OK)\n* PUT /datasets/<id>/shape) - denied (returns HTTP Status 401 - Unauthorized)\n* PUT /datasets/<id>/attributes/<name> - denied (returns HTTP Status 401 - Unauthorized)\n* DELETE /datasets/<id>  - denied (returns HTTP Status 401 - Unauthorized)\n\nNext the same set of requests are sent with 'joe' as the user in the HTTP Authorization header:\n\n* GET /datasets/<id> - granted (returns HTTP Status 200 - OK)\n* POST /datasets/<id>/value - granted (returns HTTP Status 200 - OK)\n* PUT /datasets/<id>/shape) - grant (returns HTTP Status 200 - OK)\n* PUT /datasets/<id>/attributes/<name> - denied (returns HTTP Status 403 - Forbidden)\n* DELETE /datasets/<id>  - denied (returns HTTP Status 403 - Forbidden)\n\nFinally the same set of requests are sent with 'ann' as the user:\n\n* GET /datasets/<id> - granted (returns HTTP Status 200 - OK)\n* POST /datasets/<id>/value - granted (returns HTTP Status 200 - OK)\n* PUT /datasets/<id>/shape) - grant (returns HTTP Status 200 - OK)\n* PUT /datasets/<id>/attributes/<name> - denied (returns HTTP Status 201 - Created)\n* DELETE /datasets/<id>  - denied (returns HTTP Status 200 - OK)\n \nNote: HTTP Status 401 basically says: \"you can't have access until you tell me who your are\", \nwhile HTTP Status 403 says: \"I know who you are, but you don't have permissions to access this\nresource.\"\n\nRoot ACL Inheritance\n--------------------\n\nIn many cases it will be desired to have a default ACL that applies to each resource in the domain.\nThis can be accomplished by defining an ACL for the root group.  This will control the access \nrights for any resource unless of ACL is present in that resource for the requesting user.\n\nThe default ACL can be read or updated by forming a request with a uri that includes the root group id, \ni.e.: \"/groups/<root_id>/acls\", or by using the uri path for the domain, i.e. \"/acls\".\n\n\nFor a given user then, the permissions for a resource are found in the following way:\n\n#. If the user is present in the resources ACL, those permissions are used\n#. If no user is present in the resources ACL, but is present in the root group, those permissions are used\n#. Otherwise, if a 'default' user is present in the resource ACL, those permissions are used\n#. If a 'default' user is not present in the resource ACL, but is present in the root ACL, those permissions are used\n#. If no 'default' user is present in the root ACL, the permissions defined in the 'default_acl' config is used\n  \nList of Operations\n------------------\n\n.. toctree::\n   :maxdepth: 1\n\n   GET_ACL\n   GET_ACLs\n   PUT_ACL\n    \n    \n"
  },
  {
    "path": "docs/AdminTools.rst",
    "content": "###################\nAdmin Tools\n###################\n\nThe scripts described here are intended to be run on the server by \"privileged\" users.  These are all\nlocated in the ``util\\admin`` directory.\n\nmakepwd_file.py\n---------------\n\nThis script creates an initial password file \"passwd.h5\".  The password file will be used to manage \nhttp basic authentication.  After creation, move the file into the location referenced by \nthe 'password_file' configuration value.\n\nUsage:\n\n``python makepwd_file.py``\n\nUse the update_pwd.py utility to create user accounts.\n\nupdate_pwd.py\n-------------\n\nThis script can be used to add users and passwords to the password file, list information about\none or more users, or to update a user's information (e.g. change the password).\n\nUsage: \n\n``python update_pwd.py [-h] [-r] [-a] [-f FILE] [-u USER] [-p PASSWD]``\n  \nOptions:\n * ``-h``: print usage information\n * ``-r``: update a user's entry\n * ``-a``: add a user (requires -u and -p options)\n * ``-f``: password file to be used\n * ``-u``: print/update information for specified user (otherwise show all users)\n * ``-p``: password to be set for the given users\n \n\n  Example - list all users\n       ``python update_pwd.py -f passwd.h5``\n  Example - list user 'bob':\n       ``python update_pwd.py -f passwd.h5 -u bob``\n  Example - add a user 'ann':\n       ``python update_pwd.py -f passwd.h5 -a -u ann -p mysecret``\n  Example - changes password for user 'paul':\n       ``python update_pwd.py -f passwd.h5 -r -u paul -p mysecret2``\n       \n Note, there is no way to display the passwords for any user.  If a password is \n lost, that users password must be reset.\n  \n        \ngetacl.py\n-----------\n\nThis script displays ACL's of a given file or object within a file.\n\nusage: ``python getacl.py [-h] [-file <file>]  [-path <h5path>] [userid_1, userid_2, ... userid_n]``\n\nOptions:\n * ``-h``: print usage information\n * ``-file``: (required) data file to be used \n * ``-path``: h5path to object.  If not present, ACLs of the root group will be displayed\n * ``<userids>``: list of user ids to fetch ACLs for.  If not present, ACLs for all users will be printed\n\n \n  Example - get all ACLs of tall.h5 root group\n       ``python getacl.py -file ../../data/tall.h5``\n  Example - get ACLs for userid 123 of root group in tall.h5\n       ``python getacl.py -file ../../data/tall.h5 123``\n  Example - get ACLs for userid 123 of the dataset identified by path '/g1/g1.1/dset1.1.1'\n       ``python getacl.py -file ../../data/tall.h5 -path /g1/g1.1/dset1.1.1``\n       \nsetacl.py\n-----------\n\nThis script creates or modifies ACL's of a given file or object within a file.\n\nusage: ``python setacl.py [-h] [-file <file>]  [-path <h5path>] [+-][crudep] [userid_1, userid_2, ... userid_n]``\n\nOptions:\n * ``-h``: print usage information\n * ``-file``: (required) data file to be used \n * ``-path``: h5path to object.  If not present, ACLs of the root group will be modified\n * ``[+-][crudep]``: add (+) or remove (-) permisions for Create (c), Read (r), Update (u), Delete (d), rEadAcl (e), and Putacl (p)\n * ``<userids>``: list of user ids to sets ACLs for.  If not present, ACLs for the default user will be set.\n\n \n  Example - set default permission of tall.h5 to read only\n       ``python setacl.py -file ../../data/tall.h5 +r-cudep``\n  Example - give userid 123 full control of tall.h5:\n       ``python setacl.py -file ../../data/tall.h5 +crudep 123``\n  Example - give userid read/update access to dataset at path '/g1/g1.1/dset1.1.1' \n       ``python setacl.py -file ../../data/tall.h5 -path /g1/g1.1/dset1.1.1 +ru-cdep 123``\n         \n \n \n \n\n\n\n\n    \n"
  },
  {
    "path": "docs/AttrOps/DELETE_Attribute.rst",
    "content": "**********************************************\nDELETE Attribute\n**********************************************\n\nDescription\n===========\nThe implementation of the DELETE operation deletes the attribute named in the URI.  All \nattributes and links of the dataset will also be deleted.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    DELETE /groups/<id>/<name> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n* *<id>* is the UUID of the dataset/group/committed datatype\n* *<name>* is the url-encoded name of the requested attribute\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    DELETE /groups/36ae688a-ac0e-11e4-a44b-3c15c2da029e/attributes/attr1 HTTP/1.1\n    Content-Length: 0\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: tall_updated.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Wed, 04 Feb 2015 01:36:17 GMT\n    Content-Length: 420\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n  \n    {\n    \"hrefs\": [\n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/36ae688a-ac0e-11e4-a44b-3c15c2da029e/attributes\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/36ae688a-ac0e-11e4-a44b-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/36ae688a-ac0e-11e4-a44b-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/\", \"rel\": \"home\"}\n      ]\n    }\n    \nRelated Resources\n=================\n\n* :doc:`GET_Attributes`\n* :doc:`GET_Attribute`\n* :doc:`../DatasetOps/GET_Dataset`\n* :doc:`../DatatypeOps/GET_Datatype`\n* :doc:`../GroupOps/GET_Group`\n* :doc:`PUT_Attribute`\n \n\n "
  },
  {
    "path": "docs/AttrOps/GET_Attribute.rst",
    "content": "**********************************************\nGET Attribute\n**********************************************\n\nDescription\n===========\nGets the specified attribute of a dataset, group, or committed datatype.\n\nRequests\n========\n\nSyntax\n------\n\nTo get an attribute of a group:\n\n.. code-block:: http\n\n    GET /groups/<id>/attributes/<name> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nTo get an attribute of a dataset:\n\n.. code-block:: http\n\n    GET /datasets/<id>/attributes/<name> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nTo get an attribute of a datatype:\n\n.. code-block:: http\n\n    GET /datatypes/<id>/attributes/<name> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n \nwhere:    \n    \n* *<id>* is the UUID of the dataset/group/committed datatype\n* *<name>* is the url-encoded name of the requested attribute\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\ntype\n^^^^\nA JSON object representing the type of the attribute.  See :doc:`../Types/index` for \ndetails of the type representation.\n\nshape\n^^^^^\n\nA JSON object that represents the dataspace of the attribute.  Subkeys of shape are:\n\nclass: A string with one of the following values:\n\n * H5S_NULL: A null dataspace, which has no elements\n * H5S_SCALAR: A dataspace with a single element (although possibly of a complext datatype)\n * H5S_SIMPLE: A dataspace that consists of a regular array of elements\n \ndims: An integer array whose length is equal to the number of dimensions (rank) of the \ndataspace.  The value of each element gives the the current size of each dimension.  Dims\nis not returned for H5S_NULL or H5S_SCALAR dataspaces.\n\nvalue\n^^^^^\nA json array (or string or number for scalar datasets) giving the values of the requested \nattribute.\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\nGet an attribute named \"attr1\" from a group with UUID: \"45a882e1-...\".\n\n.. code-block:: http\n\n    GET /groups/1a956e54-abf6-11e4-b878-3c15c2da029e/attributes/attr1 HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Tue, 03 Feb 2015 22:44:04 GMT\n    Content-Length: 648\n    Etag: \"55b2e2ce2d3a2449a49cfd76c4dae635ec43a150\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"name\": \"attr1\", \n    \"type\": {\n        \"class\": \"H5T_INTEGER\",\n        \"base\": \"H5T_STD_I8LE\" \n    },\n    \"shape\": {\n        \"class\": \"H5S_SIMPLE\",\n        \"dims\": [10]\n    }, \n    \"value\": [97, 98, 99, 100, 101, 102, 103, 104, 105, 0], \n    \"created\": \"2015-02-03T22:40:09Z\", \n    \"lastModified\": \"2015-02-03T22:40:09Z\",   \n    \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/groups/1a956e54-abf6-11e4-b878-3c15c2da029e/attributes/attr1\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/1a956e54-abf6-11e4-b878-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/1a956e54-abf6-11e4-b878-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/\", \"rel\": \"home\"}\n      ]\n    }\n    \nRelated Resources\n=================\n\n* :doc:`DELETE_Attribute`\n* :doc:`GET_Attributes`\n* :doc:`../DatasetOps/GET_Dataset`\n* :doc:`../DatatypeOps/GET_Datatype`\n* :doc:`../GroupOps/GET_Group`\n* :doc:`PUT_Attribute`\n \n\n "
  },
  {
    "path": "docs/AttrOps/GET_Attributes.rst",
    "content": "**********************************************\nGET Attributes\n**********************************************\n\nDescription\n===========\nGets all the attributes of a dataset, group, or committed datatype.\nFor each attribute the request returns the attributes name, type, and shape.  To get \nthe attribute data use :doc:`GET_Attribute`.\n\nRequests\n========\n\nSyntax\n------\n\nTo get the attributes of a group:\n\n.. code-block:: http\n\n    GET /groups/<id>/attributes HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nTo get the attributes of a dataset:\n\n.. code-block:: http\n\n    GET /datasets/<id>/attributes HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nTo get the attributes of a datatype:\n\n.. code-block:: http\n\n    GET /datatypes/<id>/attributes HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n \nwhere:    \n    \n* *<id>* is the UUID of the dataset/group/committed datatype\n    \nRequest Parameters\n------------------\nThis implementation of the operation uses the following request parameters (both \noptional):\n\nLimit\n^^^^^\nIf provided, a positive integer value specifying the maximum number of attributes to return.\n\nMarker\n^^^^^^\nIf provided, a string value indicating that only attributes that occur after the\nmarker value will be returned.\n*Note:* the marker expression should be url-encoded.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\n\nattributes\n^^^^^^^^^^\n\nAn array of JSON objects with an element for each returned attribute.\nEach element will have keys: name, type, shape, created, and lastModified.  See \n:doc:`GET_Attribute` for a description of these keys.\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\nGet attributes of a group with UUID: \"45a882e1-...\".\n\n.. code-block:: http\n\n    GET /groups/1a956e54-abf6-11e4-b878-3c15c2da029e/attributes HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Wed, 04 Feb 2015 00:49:28 GMT\n    Content-Length: 807\n    Etag: \"7cbeefcf8d9997a8865bdea3bf2d541a14e9bf71\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"attributes\": [\n        {\n        \"name\": \"attr1\", \n        \"type\": {\n            \"base\": \"H5T_STD_I8LE\", \n            \"class\": \"H5T_INTEGER\"\n            },\n        \"shape\": {\n            \"dims\": [10], \n            \"class\": \"H5S_SIMPLE\"\n            },\n        \"created\": \"2015-02-03T22:40:09Z\",\n        \"lastModified\": \"2015-02-03T22:40:09Z\", \n        },\n        \"name\": \"attr2\", \n         \"type\": {\n            \"base\": \"H5T_STD_I32BE\", \n            \"class\": \"H5T_INTEGER\"\n            }, \n        \"shape\": {\n            \"dims\": [2, 2], \n            \"class\": \"H5S_SIMPLE\"\n            }, \n        \"created\": \"2015-02-03T22:40:09Z\",\n        \"lastModified\": \"2015-02-03T22:40:09Z\",    \n        }\n      ], \n      \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/groups/1a956e54-abf6-11e4-b878-3c15c2da029e/attributes\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/1a956e54-abf6-11e4-b878-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/1a956e54-abf6-11e4-b878-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/\", \"rel\": \"home\"}\n      ]\n    }\n    \n\nSample Request - get Batch\n---------------------------\n\nGet 5 the five attributes that occur after attribute \"a0004\" from a of a group with UUID: \n\"45a882e1-...\".\n\n.. code-block:: http\n\n    GET /groups/4cecd4dc-ac0a-11e4-af59-3c15c2da029e/attributes?Marker=a0004&Limit=5 HTTP/1.1\n    host: attr1k.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response - get Batch\n---------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Wed, 04 Feb 2015 01:08:16 GMT\n    Content-Length: 1767\n    Etag: \"9483f4356e08d12b719aa64ece09e659b05adaf2\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n \n    {\n    \"attributes\": [\n        {\n        \"name\": \"a0005\", \n        \"type\": {\"cset\": \"H5T_CSET_ASCII\", \"order\": \"H5T_ORDER_NONE\", \"class\": \"H5T_STRING\", \"strpad\": \"H5T_STR_NULLTERM\", \"strsize\": \"H5T_VARIABLE\"}, \n        \"shape\": {\"class\": \"H5S_SCALAR\"}, \n        \"created\": \"2015-02-03T22:40:09Z\",\n        \"lastModified\": \"2015-02-03T22:40:09Z\"\n        }, {\n        \"name\": \"a0006\", \n        \"type\": {\"cset\": \"H5T_CSET_ASCII\", \"order\": \"H5T_ORDER_NONE\", \"class\": \"H5T_STRING\", \"strpad\": \"H5T_STR_NULLTERM\", \"strsize\": \"H5T_VARIABLE\"}, \n        \"shape\": {\"class\": \"H5S_SCALAR\"}, \n        \"created\": \"2015-02-03T22:40:09Z\",\n        \"lastModified\": \"2015-02-03T22:40:09Z\"\n        }, {\n        \"name\": \"a0007\",\n        \"type\": {\"cset\": \"H5T_CSET_ASCII\", \"order\": \"H5T_ORDER_NONE\", \"class\": \"H5T_STRING\", \"strpad\": \"H5T_STR_NULLTERM\", \"strsize\": \"H5T_VARIABLE\"}, \n        \"shape\": {\"class\": \"H5S_SCALAR\"}, \n        \"created\": \"2015-02-03T22:40:09Z\",\n        \"lastModified\": \"2015-02-03T22:40:09Z\"\n        }, {\n        \"name\": \"a0008\", \n        \"type\": {\"cset\": \"H5T_CSET_ASCII\", \"order\": \"H5T_ORDER_NONE\", \"class\": \"H5T_STRING\", \"strpad\": \"H5T_STR_NULLTERM\", \"strsize\": \"H5T_VARIABLE\"}, \n        \"shape\": {\"class\": \"H5S_SCALAR\"}, \n        \"created\": \"2015-02-03T22:40:09Z\",\n        \"lastModified\": \"2015-02-03T22:40:09Z\"\n        }, {\n        \"name\": \"a0009\", \n        \"type\": {\"cset\": \"H5T_CSET_ASCII\", \"order\": \"H5T_ORDER_NONE\", \"class\": \"H5T_STRING\", \"strpad\": \"H5T_STR_NULLTERM\", \"strsize\": \"H5T_VARIABLE\"}, \n        \"shape\": {\"class\": \"H5S_SCALAR\"}, \n        \"created\": \"2015-02-03T22:40:09Z\",\n        \"lastModified\": \"2015-02-03T22:40:09Z\"\n        }\n      ], \n    \"hrefs\": [\n        {\"href\": \"http://attr1k.test.hdfgroup.org/groups/4cecd4dc-ac0a-11e4-af59-3c15c2da029e/attributes\", \"rel\": \"self\"}, \n        {\"href\": \"http://attr1k.test.hdfgroup.org/groups/4cecd4dc-ac0a-11e4-af59-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://attr1k.test.hdfgroup.org/groups/4cecd4dc-ac0a-11e4-af59-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://attr1k.test.hdfgroup.org/\", \"rel\": \"home\"}\n      ]\n    }\n    \nRelated Resources\n=================\n\n* :doc:`DELETE_Attribute`\n* :doc:`GET_Attributes`\n* :doc:`../DatasetOps/GET_Dataset`\n* :doc:`../DatatypeOps/GET_Datatype`\n* :doc:`../GroupOps/GET_Group`\n* :doc:`PUT_Attribute`\n \n\n "
  },
  {
    "path": "docs/AttrOps/PUT_Attribute.rst",
    "content": "**********************************************\nPUT Attribute\n**********************************************\n\nDescription\n===========\nCreates a new attribute in a group, dataset, or committed datatype.\n\n*Note*: The new attribute will replace any existing attribute with the same name.\n\nRequests\n========\n\nSyntax\n------\n\nTo create a group attribute:\n\n.. code-block:: http\n\n    PUT /groups/<id>/attributes/<name> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nTo create a dataset attribute:\n\n.. code-block:: http\n\n    PUT /datasets/<id>/attributes/<name> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nTo create a committed datatype attribute:\n\n.. code-block:: http\n\n    PUT /datatypes/<id>/attributes/<name> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n* *<id>* is the UUID of the dataset/group/committed datatype\n* *<name>* is the url-encoded name of the requested attribute\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nRequest Elements\n----------------\n\nThe request body must include a JSON object with \"type\" key.  Optionally a \"shape\"\nkey can be provide to make a non-scalar attribute.\n\n\ntype\n^^^^\n\nSpecify's the desired type of the attribute.  Either a string that is one of the \npredefined type values, a uuid of a committed type, or a JSON object describing the type.\nSee :doc:`../Types/index` for details of the type specification.\n\nshape\n^^^^^^\n\nEither a string with the value ``H5S_NULL`` or an\ninteger array describing the dimensions of the attribute. \nIf shape is not provided, a scalar attribute will be created.\nIf a shape value of ``H5S_NULL`` is specified a null space attribute will be created.\n(Null space attributes can not contain any data values.)\n\nvalue\n^^^^^\n\nA JSON array (or number or string for scalar attributes with primitive types) that \nspecifies the initial values for the attribute.  The elements of the array must be \ncompatible with the type of the attribute.\nNot valid to provide if the shape is ``H5S_NULL``.\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request - scalar attribute\n----------------------------------\n\nCreate an integer scalar attribute in the group with UUID of \"be319519-\" named \"attr4\".  \nThe value of the attribute will be 42.\n\n.. code-block:: http\n\n    PUT /groups/be319519-acff-11e4-bf8e-3c15c2da029e/attributes/attr4 HTTP/1.1\n    Content-Length: 38\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: tall_updated.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n    \n.. code-block:: json\n\n    {\n    \"type\": \"H5T_STD_I32LE\", \n    \"value\": 42\n    }\n    \nSample Response - scalar attribute\n-----------------------------------\n\n.. code-block:: http\n\n   HTTP/1.1 201 Created\n   Date: Thu, 05 Feb 2015 06:25:30 GMT\n   Content-Length: 359\n   Content-Type: application/json\n   Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n  \n    {\"hrefs\": [\n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e/attributes/attr4\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e\", \"rel\": \"root\"}\n      ]\n    }\n    \nSample Request - string attribute\n----------------------------------\n\nCreate a two-element, fixed width string  attribute in the group with UUID of \n\"be319519-\" named \"attr6\".  \nThe attributes values will be \"Hello, ...\" and \"Goodbye!\".\n\n.. code-block:: http\n\n    PUT /groups/be319519-acff-11e4-bf8e-3c15c2da029e/attributes/attr6 HTTP/1.1\n    Content-Length: 162\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: tall_updated.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n    \n.. code-block:: json\n  \n    {\n    \"shape\": [2], \n    \"type\": {\n        \"class\": \"H5T_STRING\",\n        \"cset\": \"H5T_CSET_ASCII\",  \n        \"strpad\": \"H5T_STR_NULLPAD\", \n        \"strsize\": 40\n    }, \n    \"value\": [\"Hello, I'm a fixed-width string!\", \"Goodbye!\"]\n    }\n    \nSample Response - string attribute\n-----------------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Thu, 05 Feb 2015 06:42:14 GMT\n    Content-Length: 359\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n     \n    {\n    \"hrefs\": [\n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e/attributes/attr6\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e\", \"rel\": \"root\"}\n      ]\n    }\n    \nSample Request - compound type\n----------------------------------\n\nCreate a two-element, attribute of group with UUID of \n\"be319519-\" named \"attr_compound\".   The attribute has a compound type with an integer\nand a floating point element. \n\n.. code-block:: http\n\n    PUT /groups/be319519-acff-11e4-bf8e-3c15c2da029e/attributes/attr_compound HTTP/1.1\n    Content-Length: 187\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: tall_updated.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n  \n    \n    {\n    \"shape\": 2, \n    \"type\": {\n        \"class\": \"H5T_COMPOUND\",\n        \"fields\": [\n            {\"type\": \"H5T_STD_I32LE\", \"name\": \"temp\"}, \n            {\"type\": \"H5T_IEEE_F32LE\", \"name\": \"pressure\"}\n        ] \n    }, \n    \"value\": [[55, 32.34], [59, 29.34]]\n    }\n    \nSample Response - compound type \n-----------------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Thu, 05 Feb 2015 06:49:19 GMT\n    Content-Length: 367\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n       \n    {\n    \"hrefs\": [\n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e/attributes/attr_compound\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e\", \"rel\": \"root\"}\n      ]\n    }\n    \n    \n    \nRelated Resources\n=================\n\n* :doc:`DELETE_Attribute`\n* :doc:`GET_Attribute`\n* :doc:`GET_Attributes`\n* :doc:`../DatasetOps/GET_Dataset`\n* :doc:`../DatatypeOps/GET_Datatype`\n* :doc:`../GroupOps/GET_Group`\n \n\n \n"
  },
  {
    "path": "docs/AttrOps/index.rst",
    "content": "########################\nAttributes\n########################\n\nLike datasets (see :doc:`../DatasetOps/index`), attributes are objects that contain a \nhomogeneous collection of elements\nand have associatted type information.  Attributes are typically small metadata objects\nthat describe some aspect of the object (dataset, group, or committed datatype) that \ncontains the attribute.\n\nCreating Attributes\n--------------------\n\nUse :doc:`PUT_Attribute` to create an attribute.  If there is an existing attribute\nwith the same name, it will be overwritten by this request.  You can use\n:doc:`GET_Attribute` to inquire if the attribute already exists or not.\nWhen creating an attribute, the attribute name, type, and shape (for non-scalar\nattributes) is included in the request.\n\n\nReading and Writing Data\n-------------------------\nUnlike datasets, attribute's data can not be\nread or written partially.  Data can only be written as part of the PUT requests.  \nReading the data of an attribute is done by :doc:`GET_Attribute`.\n\nListing attributes\n------------------\nUse :doc:`GET_Attributes` to get information about all the attributes of a group, \ndataset, or committed datatype.\n\nDeleting Attributes\n-------------------\n\nUse :doc:`DELETE_Attribute` to delete an attribute.\n\nList of Operations\n------------------\n\n.. toctree::\n   :maxdepth: 1\n\n   DELETE_Attribute\n   GET_Attribute\n   GET_Attributes\n   PUT_Attribute\n \n    \n    \n"
  },
  {
    "path": "docs/Authorization.rst",
    "content": "*********************************\nAuthorization and Authentication\n*********************************\n\nRequest Authentication\n-----------------------\nh5serv supports HTTP Basic authentication to authenticate users by comparing an encrypted \nusername and password against a value stored within a password file.  \n(See :doc:`AdminTools` to create a password file and add user accounts.) \n\nIf neither the requested object (Group, Dataset, or Committed Datatype) nor the object's root group\nhas an Access Control List (ACL), authorization is not required and no authentication string\nneeds to be supplied. See :doc:`../AclOps`) for information on how to use ACL's.\n\nIf the requested object (or object's root group), does have an ACL, authorization may be required \n(if the object is not publically readable),\nand if so the requestor will need to provide an Authorization header in the request.  If \nauthoriazation is required, but not provided, the server will return an HTTP Status of 401 - \nUnauthorized.\n\nIf authorization is required (i.e. a 401 response is received), the client should provide an authorization header in the\nhttp request which conveys the userid and password.\n\nThe authorization string is constructed as follows:\n\n 1. Username and password are combined into a string \"username:password\". Note that username cannot contain the \":\" character\n 2. The resulting string is then encoded using the RFC2045-MIME variant of Base64, except not limited to 76 char/line\n 3. The authorization method and a space i.e. \"Basic \" is then put before the encoded string\n\nFor example, if the user agent uses 'Aladdin' as the username and 'open sesame' as the password then the field is \nformed as follows:\n``Authorization: Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==``.  When passwords are being sent over an open\nnetwork, SSL connections should be used to avoid \"man in the middle attacks\".  The Base64 encoding is\neasily reversible and if using plain http there is no assurance that the password will not be compromised.\n\nIf the authorization string is validated, the server will verify the request is authorized as\nper the object's ACL list.  If not authorized a http status 403 - Forbidden will be returned.\n\n\nUser ids and passwords\n----------------------\n\nUser ids and passwords are maintained in an HDF5 file referenced in the server config: \n'password_file'.  The admin tool (See :doc:`AdminTools`) script: update_pwd.py can be used \nto create new users and update passwords.\n\n\n \n"
  },
  {
    "path": "docs/CommonErrorResponses.rst",
    "content": "***************************\nCommon Error Responses\n***************************\n\nFor each request, h5serv returns a standard HTTP status code as described below.\nIn general 2xx codes indicate success, 3xx codes some form of redirection, 4xx codes \nclient error, and 5xx codes for server errors.  In addition to the numeric code, h5serv\nwill return an informational message as part of the response providing further \ninformation on the nature of the error.\n\n * ``200 OK`` - The request was completed successfully\n * ``201 Created`` - The request was fulfilled and a new resource (e.g. group, dataset, attribute was created) \n * ``400 Bad Request`` - The request was not structured correctly (e.g. a required key was missing).\n * ``401 Unauthorization`` - Use authentitcation is required, supply an Authentication header with valid user and password\n * ``403 Forbidden`` - The requesting user does not have access to the requested resource\n * ``404 Not Found`` - The requested resource was not found (e.g. ``GET /groups/<id>`` where <id> was not a valid identifier for a group in the domain).\n * ``409 Conflict`` - This error is used with PUT requests where the resources cannot be created because there is an existing resource with the same name (e.g. PUT / where the requested domain is already present).\n * ``410 Gone`` - The resource requested has been recently deleted.\n * ``500 Internal Error`` - An unexpected error that indicates some problem occurred on the server.\n * ``501 Not Implemented`` - The request depends on a feature that is not yet implemented.\n"
  },
  {
    "path": "docs/CommonRequestHeaders.rst",
    "content": "***********************\nCommon Request Headers\n***********************\n\nThe following describe common HTTP request headers as used in h5serv:\n\n * Request line: The first line of the request, the format is of the form HTTP verb (GET, PUT, DELETE, or POST) followed by the path to the resource (e.g. /group/<uuid>.  Some operations take one or more query parameters (see relevant documentation) \n * Accept: Specified the media type that is acceptable for the response.  Valid values are \"application/json\", and \"*/*.  In addiiton, GET Value (see :doc:`DatasetOps/GET_Value`) supports the value \"application/octet-stream\"\n * Authorization: A string that provides the requester's credentials for the request. See  :doc:`Authorization`\n * Host: the domain (i.e. related collection of groups, datasets, and attributes) that the request should apply to\n \n Note: the host header can also be provided as a query paramter.  Example: https://data.hdfgroup.org:7258/?host=tall.test.data.hdfgroup.org \n"
  },
  {
    "path": "docs/CommonResponseHeaders.rst",
    "content": "***************************\nCommon Response Headers\n***************************\n\nThe following describes some of the common response lines returned by h5serv.\n\n * Status Line: the first line of the ressponse will always by: \"``HTTP/1.1``\" followed by \n    a status code (e.g. 200) followed by a reason message (e.g. \"``OK``\").  For errors, \n    an additional error message may be included on this line.\n    \n * Content-Length: the response size in bytes.\n \n * Etag: a hash code that indicates the state of the requested resource.  If the client\n    sees the same Etag value for the same request, it can assume the resource has not           \n    changes since the last request.\n    \n * Content-Type: the mime type of the response.  Currently always \"``application/json``\".\n    \n"
  },
  {
    "path": "docs/DatasetOps/DELETE_Dataset.rst",
    "content": "**********************************************\nDELETE Dataset\n**********************************************\n\nDescription\n===========\nThe implementation of the DELETE operation deletes the dataset named in the URI.  All \nattributes and links of the dataset will also be deleted.  In addition any \nlinks from other groups to the deleted group will be removed.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    DELETE /datasets/<id> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n*<id>* is the UUID of the requested dataset to be deleted.\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    DELETE /datasets/289bb654-a2c6-11e4-97d8-3c15c2da029e HTTP/1.1\n    Content-Length: 0\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: tall_dset112_deleted.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Fri, 23 Jan 2015 06:07:49 GMT\n    Content-Length: 287\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"hrefs\": [\n        {\"href\": \"http://tall_dset112_deleted.test.hdfgroup.org/datasets\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall_dset112_deleted.test.hdfgroup.org/groups/289b4873-a2c6-11e4-adfb-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall_dset112_deleted.test.hdfgroup.org/\", \"rel\": \"home\"}\n      ]\n    }\n    \nRelated Resources\n=================\n\n* :doc:`GET_Datasets`\n* :doc:`GET_Dataset`\n* :doc:`POST_Dataset`\n \n\n "
  },
  {
    "path": "docs/DatasetOps/GET_Dataset.rst",
    "content": "**********************************************\nGET Dataset\n**********************************************\n\nDescription\n===========\nReturns information about the dataset with the UUID given in the URI.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    GET /datasets/<id> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n**<id>** is the UUID of the requested dataset.\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nid\n^^\nThe UUID of the dataset object.\n\ntype\n^^^^\nA JSON object representing the type of the dataset.  See :doc:`../Types/index` for \ndetails of the type representation.\n\nshape\n^^^^^\nA JSON object representing the shape of the dataset.  See :doc:`GET_DatasetShape` for\ndetails of the shape representation.\n\ncreationProperties\n^^^^^^^^^^^^^^^^^^\nA JSON object that describes chunk layout, filters, fill value, and other aspects of the dataset.\nSee: http://hdf5-json.readthedocs.org/en/latest/bnf/dataset.html#grammar-token-dcpl for a complete \ndescription of fields that can be used.\n\nattributeCount\n^^^^^^^^^^^^^^\nThe number of attributes belonging to the dataset.\n\ncreated\n^^^^^^^\nA timestamp giving the time the dataset was created in UTC (ISO-8601 format).\n\nlastModified\n^^^^^^^^^^^^\nA timestamp giving the most recent time the group has been modified (i.e. attributes or \nlinks updated) in UTC (ISO-8601 format).\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    GET /datasets/c8d83759-a2c6-11e4-8713-3c15c2da029e HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Fri, 23 Jan 2015 06:15:33 GMT\n    Content-Length: 755\n    Etag: \"ecbd7e52654b0a8f4ccbebac06175ce5df5f8c79\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n       \n    {\n    \"id\": \"c8d83759-a2c6-11e4-8713-3c15c2da029e\",\n    \"shape\": {\n        \"dims\": [10], \n        \"class\": \"H5S_SIMPLE\"\n    },\n    \"type\": {\n        \"base\": \"H5T_IEEE_F32BE\", \n        \"class\": \"H5T_FLOAT\"\n    },\n    \"creationProperties\": {\n        \"allocTime\": \"H5D_ALLOC_TIME_LATE\",\n        \"fillTime\": \"H5D_FILL_TIME_IFSET\",\n        \"layout\": {\n            \"class\": \"H5D_CONTIGUOUS\"\n        }\n    },\n    \"attributeCount\": 0,  \n    \"created\": \"2015-01-23T06:12:18Z\", \n    \"lastModified\": \"2015-01-23T06:12:18Z\",     \n    \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/datasets/c8d83759-a2c6-11e4-8713-3c15c2da029e\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/c8d7842b-a2c6-11e4-b4f1-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/datasets/c8d83759-a2c6-11e4-8713-3c15c2da029e/attributes\", \"rel\": \"attributes\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/datasets/c8d83759-a2c6-11e4-8713-3c15c2da029e/value\", \"rel\": \"data\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/\", \"rel\": \"home\"}\n      ] \n    }\n    \nRelated Resources\n=================\n\n* :doc:`DELETE_Dataset`\n* :doc:`../AttrOps/GET_Attributes`\n* :doc:`GET_DatasetShape`\n* :doc:`GET_DatasetType`\n* :doc:`GET_Datasets`\n* :doc:`GET_Value`\n* :doc:`POST_Value`\n* :doc:`PUT_Value`\n \n\n "
  },
  {
    "path": "docs/DatasetOps/GET_DatasetShape.rst",
    "content": "**********************************************\nGET Shape\n**********************************************\n\nDescription\n===========\nGets shape of a dataset.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    GET /datasets/<id>/shape HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n*<id>* is the UUID of the dataset that shape is requested for.\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nshape\n^^^^^\n\nA JSON object with the following keys:\n\nclass: A string with one of the following values:\n\n * H5S_NULL: A null dataspace, which has no elements\n * H5S_SCALAR: A dataspace with a single element (although possibly of a complext datatype)\n * H5S_SIMPLE: A dataspace that consists of a regular array of elements\n \ndims: An integer array whose length is equal to the number of dimensions (rank) of the \ndataspace.  The value of each element gives the the current size of each dimension.  Dims\nis not returned for H5S_NULL or H5S_SCALAR dataspaces.\n\nmaxdims: An integer array whose length is equal to the number of dimensions of the \ndataspace.  The value of each element gives the maximum size of each dimension. A value\nof 0 indicates that the dimension has *unlimited* extent.  maxdims is not returned for\nH5S_SIMPLE dataspaces which are not extensible or for H5S_NULL or H5S_SCALAR dataspaces.\n\nfillvalue: A value of compatible with the dataset's type, which gives the *fill* value\nfor the dataset (the value for which elements will be initialized to when a dataspace\nis extended).  fillvalue is only returned for extensible dataspaces.\n\ncreated\n^^^^^^^\nA timestamp giving the time the datashape (same as the dataset) was created in \nUTC (ISO-8601 format).\n\nlastModified\n^^^^^^^^^^^^\nA timestamp giving the most recent time the dataspace has been modified (i.e. a  \ndimension has been extended) in UTC (ISO-8601 format).\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    GET /datasets/3b57b6d4-a6a8-11e4-96b5-3c15c2da029e/shape HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Wed, 28 Jan 2015 04:43:41 GMT\n    Content-Length: 445\n    Etag: \"76ed777f151c70d0560d1414bffe1515a3df86b0\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n    \n   {    \n   \"shape\": {\n        \"class\": \"H5S_SIMPLE\"\n        \"dims\": [10], \n    },\n    \"created\": \"2015-01-28T04:40:23Z\",\n    \"lastModified\": \"2015-01-28T04:40:23Z\", \n    \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/datasets/3b57b6d4-a6a8-11e4-96b5-3c15c2da029e\", \"rel\": \"self\"},\n        {\"href\": \"http://tall.test.hdfgroup.org/datasets/3b57b6d4-a6a8-11e4-96b5-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/3b56ee54-a6a8-11e4-b2ae-3c15c2da029e\", \"rel\": \"root\"}\n      ], \n    }\n    \nSample Request - Resizable\n--------------------------\n\n.. code-block:: http\n\n    GET /datasets/a64010e8-a6aa-11e4-98c8-3c15c2da029e/shape HTTP/1.1\n    host: resizable.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response - Resizable\n----------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Wed, 28 Jan 2015 05:00:59 GMT\n    Content-Length: 500\n    Etag: \"1082800980d6809a8008b22e225f1adde8afc73f\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n       \n    {\n    \"shape\": {\n        \"class\": \"H5S_SIMPLE\",\n        \"dims\": [10, 10], \n        \"maxdims\": [10, 0],\n    }, \n    \"created\": \"2015-01-28T04:40:23Z\",\n    \"lastModified\": \"2015-01-28T04:40:23Z\", \n    \"hrefs\": [\n        {\"href\": \"http://resizable.test.hdfgroup.org/datasets/a64010e8-a6aa-11e4-98c8-3c15c2da029e\", \"rel\": \"self\"}, \n        {\"href\": \"http://resizable.test.hdfgroup.org/datasets/a64010e8-a6aa-11e4-98c8-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://resizable.test.hdfgroup.org/groups/a63f5dcf-a6aa-11e4-ab68-3c15c2da029e\", \"rel\": \"root\"}\n      ] \n    }\n    \nRelated Resources\n=================\n\n* :doc:`GET_Dataset`\n* :doc:`GET_DatasetType`\n* :doc:`PUT_DatasetShape`\n \n\n \n"
  },
  {
    "path": "docs/DatasetOps/GET_DatasetType.rst",
    "content": "**********************************************\nGET Type\n**********************************************\n\nDescription\n===========\nGets Type Information for a dataset.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    GET /datasets/<id>/type HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n*<id>* is the UUID of the dataset the type information is requested for.\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\ntype\n^^^^\nA JSON object representing the type definition for the dataset. See :doc:`../Types/index`\nfor information on how different types are represented.\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request - Predefined Type\n--------------------------------\n\n.. code-block:: http\n\n    GET /datasets/ba06ce68-a6b5-11e4-8ed3-3c15c2da029e/type HTTP/1.1\n    host: scalar.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response - Predefined Type\n---------------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Wed, 28 Jan 2015 06:20:16 GMT\n    Content-Length: 519\n    Etag: \"802b160bf786596a9cb9f6d5cd6faa4fe1127e8c\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"type\": {\n        \"class\": \"H5T_INTEGER\", \n        \"order\": \"H5T_ORDER_LE\", \n        \"base_size\": 4, \n        \"base\": \"H5T_STD_I32LE\", \n        \"size\": 4\n    }, \n    \"hrefs\": [\n        {\"href\": \"http://scalar.test.hdfgroup.org/datasets/ba06ce68-a6b5-11e4-8ed3-3c15c2da029e/type\", \"rel\": \"self\"}, \n        {\"href\": \"http://scalar.test.hdfgroup.org/datasets/ba06ce68-a6b5-11e4-8ed3-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://scalar.test.hdfgroup.org/groups/ba06992e-a6b5-11e4-9ba5-3c15c2da029e\", \"rel\": \"root\"}\n      ] \n    }\n    \nSample Request - Compound Type\n--------------------------------\n\n.. code-block:: http\n\n    GET /datasets/b9edddd7-a6b5-11e4-9afd-3c15c2da029e/type HTTP/1.1\n    host: compound.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response - Compound Type\n--------------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Wed, 28 Jan 2015 06:20:16 GMT\n    Content-Length: 1199\n    Etag: \"1f97eac24aa18d3c462a2f2797c4782a1f2a0aa2\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"type\": {\n        \"class\": \"H5T_COMPOUND\",\n        \"fields\": [\n            {\n            \"type\": {\n                \"order\": \"H5T_ORDER_LE\", \n                \"base_size\": 8, \n                \"class\": \"H5T_INTEGER\", \n                \"base\": \"H5T_STD_I64LE\", \n                \"size\": 8}, \n            \"name\": \"date\"\n            }, {\n            \"type\": {\n                \"strpad\": \"H5T_STR_NULLPAD\", \n                \"base_size\": 6, \"order\": \"H5T_ORDER_NONE\", \n                \"cset\": \"H5T_CSET_ASCII\", \n                \"strsize\": 6, \n                \"class\": \"H5T_STRING\", \n                \"size\": 6}, \n            \"name\": \"time\"\n            }, {\n            \"type\": {\n                \"order\": \"H5T_ORDER_LE\", \n                \"base_size\": 8, \n                \"class\": \"H5T_INTEGER\", \n                \"base\": \"H5T_STD_I64LE\", \n                \"size\": 8}, \n            \"name\": \"temp\"\n            }, {\n            \"type\": {\n                \"order\": \"H5T_ORDER_LE\", \n                \"base_size\": 8, \n                \"class\": \"H5T_FLOAT\", \n                \"base\": \"H5T_IEEE_F64LE\", \n                \"size\": 8}, \n            \"name\": \"pressure\"\n            }, {\n                \"type\": {\n                    \"strpad\": \"H5T_STR_NULLPAD\", \n                    \"base_size\": 6, \n                    \"order\": \"H5T_ORDER_NONE\", \n                    \"cset\": \"H5T_CSET_ASCII\", \n                    \"strsize\": 6, \n                    \"class\": \"H5T_STRING\", \n                    \"size\": 6}, \n                \"name\": \"wind\"}\n            ] \n        }, \n        \"hrefs\": [\n            {\"href\": \"http://compound.test.hdfgroup.org/datasets/b9edddd7-a6b5-11e4-9afd-3c15c2da029e/type\", \"rel\": \"self\"}, \n            {\"href\": \"http://compound.test.hdfgroup.org/datasets/b9edddd7-a6b5-11e4-9afd-3c15c2da029e\", \"rel\": \"owner\"}, \n            {\"href\": \"http://compound.test.hdfgroup.org/groups/b9eda805-a6b5-11e4-aa52-3c15c2da029e\", \"rel\": \"root\"}\n          ] \n        }\n    \nRelated Resources\n=================\n\n* :doc:`GET_Dataset`\n* :doc:`GET_DatasetShape`\n* :doc:`POST_Dataset`\n \n\n "
  },
  {
    "path": "docs/DatasetOps/GET_Datasets.rst",
    "content": "**********************************************\nGET Datasets\n**********************************************\n\nDescription\n===========\nReturns UUIDs for all the datasets in a domain.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    GET /datasets HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nRequest Parameters\n------------------\nThis implementation of the operation uses the following request parameters (both \noptional):\n\nLimit\n^^^^^\nIf provided, a positive integer value specifying the maximum number of UUID's to return.\n\nMarker\n^^^^^^\nIf provided, a string value indicating that only UUID's that occur after the\nmarker value will be returned.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\ndatasets\n^^^^^^^^\nAn array of UUID's, one for each dataset in the domain.\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    GET /datasets HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Fri, 23 Jan 2015 06:33:36 GMT\n    Content-Length: 413\n    Etag: \"977e96c7bc63a6e05d10d56565df2ab8d30e404d\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n  \n    \n    {\n    \"datasets\": [\n        \"c8d7dd14-a2c6-11e4-a68c-3c15c2da029e\", \n        \"c8d7f159-a2c6-11e4-99af-3c15c2da029e\", \n        \"c8d83759-a2c6-11e4-8713-3c15c2da029e\", \n        \"c8d84a8a-a2c6-11e4-b457-3c15c2da029e\"\n      ],\n    \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/datasets\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/c8d7842b-a2c6-11e4-b4f1-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/\", \"rel\": \"home\"}\n      ]\n    }\n    \nSample Request with Marker and Limit\n------------------------------------\n\nThis example uses the \"Marker\" request parameter to return only UUIDs after the given\nMarker value.\nThe \"Limit\" request parameter is used to limit the number of UUIDs in the response to 5.\n\n.. code-block:: http\n\n    GET /datasets?Marker=817db263-a2cc-11e4-87f2-3c15c2da029e&Limit=5 HTTP/1.1\n    host: dset1k.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n \nSample Response with Marker and Limit\n-------------------------------------\n\n.. code-block:: http\n \n    HTTP/1.1 200 OK\n    Date: Fri, 23 Jan 2015 06:53:52 GMT\n    Content-Length: 459\n    Etag: \"cb708d4839cc1e165fe6bb30718e49589ef140f4\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n   \n.. code-block:: json\n     \n    {\n    \"datasets\": [\n        \"817dcfb8-a2cc-11e4-9197-3c15c2da029e\", \n        \"817de9ee-a2cc-11e4-8378-3c15c2da029e\", \n        \"817e028a-a2cc-11e4-8ce3-3c15c2da029e\", \n        \"817e1b61-a2cc-11e4-ba39-3c15c2da029e\", \n        \"817e341c-a2cc-11e4-a16f-3c15c2da029e\"\n      ],\n    \"hrefs\": [\n        {\"href\": \"http://dset1k.test.hdfgroup.org/datasets\", \"rel\": \"self\"}, \n        {\"href\": \"http://dset1k.test.hdfgroup.org/groups/81760a80-a2cc-11e4-bb55-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://dset1k.test.hdfgroup.org/\", \"rel\": \"home\"}\n      ]\n    } \n    \nRelated Resources\n=================\n\n* :doc:`DELETE_Dataset`\n* :doc:`GET_Dataset`\n* :doc:`POST_Dataset`\n \n\n "
  },
  {
    "path": "docs/DatasetOps/GET_Value.rst",
    "content": "**********************************************\nGET Value\n**********************************************\n\nDescription\n===========\nGets data values of a dataset.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    GET /datasets/<id>/value HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n*<id>* is the UUID of the requested dataset.\n    \nRequest Parameters\n------------------\n\nselect\n^^^^^^\nOptionally the request can provide a select value to indicate a hyperslab selection for\nthe values to be returned - i.e. a rectangular (in 1, 2, or more dimensions) region of \nthe dataset.   Format is the following as a url-encoded value:\n\n[dim1_start:dim1_end:dim1_step, dim2_start:dim2_end:dim2_step, ... , dimn_start:dimn_stop:dimn_step]\n\nThe number of tuples \"start:stop:step\" should equal the number of dimensions of the dataset. \n\nFor each tuple:\n\n* start must be greater than equal to zero and less than the dimension extent\n* stop must be greater than or equal to start and less than or equal to the dimension extent\n* step is optional and if provided must be greater than 0.  If not provided, the step value for that dimension is assumed to be 1.\n\nquery\n^^^^^\nOptionally the request can provide a query value to select items from a dataset based on a \ncondition expression.  E.g. The condition: \"(temp > 32.0) & (dir == 'N')\" would return elements \nof the dataset where the 'temp' field was greater than 32.0 and the 'dir' field was equal to 'N'.\n\nNote: the query value needs to be url-encoded.\n\nNote: the query parameter can be used in conjunction with the select parameter to restrict the return set to\nthe provided selection.\n\nNote: the query parameter can be used in conjunction with the Limit parameter to limit the \nnumber of matches returned.\n\nNote: Currently the query parameter can only be used with compound type datasets that are\none-dimensional.\n\nLimit\n^^^^^\nIf provided, a positive integer value specifying the maximum number of elements to return.\nOnly has an effect if used in conjunction with the query parameter.\n\n\nRequest Headers\n---------------\nThis implementation of the operation supports the common headers in addition to the \"Accept\" header value\nof \"application/octet-stream\".  Use this accept value if a binary response is desired.  Binary data will be\nmore efficient for large data requests.  If a binary response can be returned, the \"Content-Type\" response\nheader will be \"application/octet-stream\".  Otherwise the response header will be \"json\".\n\nNote: Binary responses are only supported for dataset that have a fixed-length type\n(i.e. either a fixed length primitive type or compound type that in turn consists of fixed=length types).  Namely\nvariable length strings and variable length data types will always be returned as JSON.\n\nNote: if a binary response is returned, it will consist of the equivalent binary data of the \"data\" item in the JSON\nresponse.  No data representing \"hrefs\" is returned.\n\nFor other request headers, see :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nvalue\n^^^^^\nA json array (integer or string for scalar datasets) giving the values of the requested \ndataset region.\n\nindex\n^^^^^\nA list of indexes for each element that met the query condition (only provided when \nthe query request parameter is used).\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    GET /datasets/548f2f21-a83c-11e4-8baf-3c15c2da029e/value HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Fri, 30 Jan 2015 04:56:20 GMT\n    Content-Length: 776\n    Etag: \"788efb3caaba7fd2ae5d1edb40b474ba94c877a8\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n    \n.. code-block:: json\n\n    {\n    \"value\": [\n        [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], \n        [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], \n        [0, 2, 4, 6, 8, 10, 12, 14, 16, 18], \n        [0, 3, 6, 9, 12, 15, 18, 21, 24, 27], \n        [0, 4, 8, 12, 16, 20, 24, 28, 32, 36], \n        [0, 5, 10, 15, 20, 25, 30, 35, 40, 45], \n        [0, 6, 12, 18, 24, 30, 36, 42, 48, 54], \n        [0, 7, 14, 21, 28, 35, 42, 49, 56, 63], \n        [0, 8, 16, 24, 32, 40, 48, 56, 64, 72], \n        [0, 9, 18, 27, 36, 45, 54, 63, 72, 81]\n      ],\n    \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/datasets/548f2f21-a83c-11e4-8baf-3c15c2da029e/value\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/548ed535-a83c-11e4-b58b-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/datasets/548f2f21-a83c-11e4-8baf-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/\", \"rel\": \"home\"}\n      ] \n    }\n    \nSample Request - Selection\n--------------------------\n\n.. code-block:: http\n\n    GET /datasets/a299db70-ab57-11e4-9c00-3c15c2da029e/value?select=[1:9,1:9:2] HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response - Selection\n---------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Tue, 03 Feb 2015 04:01:41 GMT\n    Content-Length: 529\n    Etag: \"b370a3d34bdd7ebf57a496bc7f0da7bc5a1aafb9\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2    \n    \n.. code-block:: json\n   \n    {\n    \"value\": [\n       [1, 3, 5, 7], \n       [2, 6, 10, 14], \n       [3, 9, 15, 21], \n       [4, 12, 20, 28], \n       [5, 15, 25, 35], \n       [6, 18, 30, 42], \n       [7, 21, 35, 49], \n       [8, 24, 40, 56]\n    ],  \n    \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/datasets/a299db70-ab57-11e4-9c00-3c15c2da029e/value\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/a29982cf-ab57-11e4-b976-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/datasets/a299db70-ab57-11e4-9c00-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/\", \"rel\": \"home\"}\n      ]\n    }\n    \n    \nSample Request - Query\n--------------------------\n\nGet elements from dataset where the 'date' field is equal to 20 and the 'temp' field is greater or equal to 70.\n\n.. code-block:: http\n\n    GET /datasets/b2c82938-0e2e-11e5-9092-3c15c2da029e/value?query=(date%20==%2021)%20%26%20(temp%20%3E=%2072) HTTP/1.1\n    host: compound.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response - Query\n-------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Thu, 11 Jun 2015 21:05:06 GMT\n    Content-Length: 805\n    Etag: \"927b5ed89616896d3dce7df8bdddac058321076a\"\n    Content-Type: application/json\n    Server: TornadoServer/4.1    \n    \n.. code-block:: json\n   \n    {\n    \"index\": [68, 69, 70, 71], \n    \"value\": [\n       [21, \"17:53\", 74, 29.87, \"S 9\"], \n       [21, \"16:53\", 75, 29.87, \"SW 10\"], \n       [21, \"15:53\", 79, 29.87, \"S 12\"], \n       [21, \"14:53\", 78, 29.87, \"SW 9\"]\n      ]\n    },\n    \"hrefs\": [\n        {\"href\": \"http://compound.test.hdfgroup.org/datasets/b2c82938-0e2e-11e5-9092-3c15c2da029e/value\", \"rel\": \"self\"}, \n        {\"href\": \"http://compound.test.hdfgroup.org/groups/b2c7f935-0e2e-11e5-96ae-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://compound.test.hdfgroup.org/datasets/b2c82938-0e2e-11e5-9092-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://compound.test.hdfgroup.org/\", \"rel\": \"home\"}\n    ]\n    \nSample Request - Query Batch\n-----------------------------\n\nGet elements where the 'date' field is equal to 23 and the index is between 24 and 72.  Limit the number of results to 5.  \n\n.. code-block:: http\n\n    GET /datasets/b2c82938-0e2e-11e5-9092-3c15c2da029e/value?query=date%20==%2023&Limit=5&select=[24:72] HTTP/1.1\n    host: compound.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response - Query Batch\n-----------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Thu, 11 Jun 2015 21:15:28 GMT\n    Content-Length: 610\n    Etag: \"927b5ed89616896d3dce7df8bdddac058321076a\"\n    Content-Type: application/json\n    Server: TornadoServer/4.1    \n    \n.. code-block:: json\n   \n    {\n    \"index\": [24, 25, 26, 27, 28], \n    \"value\": [\n        [23, \"13:53\", 65, 29.83, \"W 5\"], \n        [23, \"12:53\", 66, 29.84, \"W 5\"], \n        [23, \"11:53\", 64, 29.84, \"E 6\"], \n        [23, \"10:53\", 61, 29.86, \"SE 5\"], \n        [23, \"9:53\", 62, 29.86, \"S 6\"]\n       ],\n    \"hrefs\": [\n        {\"href\": \"http://compound.test.hdfgroup.org/datasets/b2c82938-0e2e-11e5-9092-3c15c2da029e/value\", \"rel\": \"self\"}, \n        {\"href\": \"http://compound.test.hdfgroup.org/groups/b2c7f935-0e2e-11e5-96ae-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://compound.test.hdfgroup.org/datasets/b2c82938-0e2e-11e5-9092-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://compound.test.hdfgroup.org/\", \"rel\": \"home\"}\n    ]\n        \nRelated Resources\n=================\n\n* :doc:`GET_Dataset`\n* :doc:`POST_Value`\n* :doc:`PUT_Value`\n \n\n "
  },
  {
    "path": "docs/DatasetOps/POST_Dataset.rst",
    "content": "**********************************************\nPOST Dataset\n**********************************************\n\nDescription\n===========\nCreates a new Dataset.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    POST /datasets HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nRequest Elements\n----------------\nThe request body must include a JSON object with a \"type\" key.  Optionally \"shape\", \n\"maxdims\", and \"link\" keys can be provided.\n\ntype\n^^^^\nEither a string that is one of the predefined type values, a uuid of a committed type,\nor a JSON object describing the type.  See :doc:`../Types/index` for details of the\ntype specification.\n\nshape\n^^^^^^\nEither a string with the value ``H5S_NULL`` or an\ninteger array describing the initial dimensions of the dataset.  If shape is not\nprovided, a scalar dataset will be created.\nIf the shape value of ``H5S_NULL`` is specified a dataset with a null dataspace will be \ncreated.  A null\ndataset has attributes and a type, but will not be able to store any values.\n\nmaxdims\n^^^^^^^\nAn integer array describing the maximum extent of each dimension (or 0 for unlimited\ndimensions).  If maxdims is not provided that resulting dataset will be non-extensible.\nNot valid to include if ``H5S_NULL`` is specified for the shape.\n\ncreationProperties\n^^^^^^^^^^^^^^^^^^\nA JSON object that can specify chunk layout, filters, fill value, and other aspects of the dataset.\nSee: http://hdf5-json.readthedocs.org/en/latest/bnf/dataset.html#grammar-token-dcpl for a complete \ndescription of fields that can be used.\n\nIf creationProperties is not provided, default values will be used\n\nlink[\"id\"]\n^^^^^^^^^^\nThe UUID of the group the new group should be linked to.  If the UUID is not valid,\nthe request will fail and a new group will not be created.\n\nlink[\"name\"]\n^^^^^^^^^^^^\nThe name of the new link.\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nid\n^^\nThe UUID of the newly created dataset.\n\nattributeCount\n^^^^^^^^^^^^^^\nThe number of attributes belonging to the dataset.\n\ncreated\n^^^^^^^\nA timestamp giving the time the dataset was created in UTC (ISO-8601 format).\n\nlastModified\n^^^^^^^^^^^^\nA timestamp giving the most recent time the dataset has been modified (i.e. attributes or \nlinks updated) in UTC (ISO-8601 format).\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\nCreate a one-dimensional dataset with 10 floating point elements.\n\n.. code-block:: http\n\n    POST /datasets HTTP/1.1\n    Content-Length: 39\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: newdset.datasettest.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n\n    {\n    \"shape\": 10, \n    \"type\": \"H5T_IEEE_F32LE\"\n    }\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Thu, 29 Jan 2015 06:14:02 GMT\n    Content-Length: 651\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n   \n    {\n    \"id\": \"0568d8c5-a77e-11e4-9f7a-3c15c2da029e\", \n    \"attributeCount\": 0, \n    \"created\": \"2015-01-29T06:14:02Z\",\n    \"lastModified\": \"2015-01-29T06:14:02Z\",\n    \"hrefs\": [\n        {\"href\": \"http://newdset.datasettest.test.hdfgroup.org/datasets/0568d8c5-a77e-11e4-9f7a-3c15c2da029e\", \"rel\": \"self\"}, \n        {\"href\": \"http://newdset.datasettest.test.hdfgroup.org/groups/055fe7de-a77e-11e4-bbe9-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://newdset.datasettest.test.hdfgroup.org/datasets/0568d8c5-a77e-11e4-9f7a-3c15c2da029e/attributes\", \"rel\": \"attributes\"}, \n        {\"href\": \"http://newdset.datasettest.test.hdfgroup.org/datasets/0568d8c5-a77e-11e4-9f7a-3c15c2da029e/value\", \"rel\": \"value\"}\n      ]\n    }\n    \nSample Request with Link\n------------------------\n\nCreate a dataset with 10 variable length string elements.  Create link in group: \n\"5e441dcf-...\" with name: \"linked_dset\".\n\n.. code-block:: http\n\n    POST /datasets HTTP/1.1\n    Content-Length: 235\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: newdsetwithlink.datasettest.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n\n    {\n    \"type\": {\n        \"class\": \"H5T_STRING\",\n        \"strsize\": \"H5T_VARIABLE\", \n        \"cset\": \"H5T_CSET_ASCII\", \n        \"order\": \"H5T_ORDER_NONE\", \n        \"strpad\": \"H5T_STR_NULLTERM\"\n    },\n    \"shape\": 10, \n    \"link\": {\n        \"id\": \"5e441dcf-a782-11e4-bd6b-3c15c2da029e\", \n        \"name\": \"linked_dset\"\n      }\n    \n    }\n    \nSample Response with Link\n-------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Thu, 29 Jan 2015 06:45:09 GMT\n    Content-Length: 683\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n   \n    \n    {\n    \"id\": \"5e579297-a782-11e4-93f9-3c15c2da029e\",\n    \"attributeCount\": 0,\n    \"created\": \"2015-01-29T06:45:09Z\",\n    \"lastModified\": \"2015-01-29T06:45:09Z\",\n    \"hrefs\": [\n        {\"href\": \"http://newdsetwithlink.datasettest.test.hdfgroup.org/datasets/5e579297-a782-11e4-93f9-3c15c2da029e\", \"rel\": \"self\"}, \n        {\"href\": \"http://newdsetwithlink.datasettest.test.hdfgroup.org/groups/5e441dcf-a782-11e4-bd6b-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://newdsetwithlink.datasettest.test.hdfgroup.org/datasets/5e579297-a782-11e4-93f9-3c15c2da029e/attributes\", \"rel\": \"attributes\"}, \n        {\"href\": \"http://newdsetwithlink.datasettest.test.hdfgroup.org/datasets/5e579297-a782-11e4-93f9-3c15c2da029e/value\", \"rel\": \"value\"}\n      ]\n    }\n    \nSample Request - Resizable Dataset\n----------------------------------\n\n  Create a one-dimensional dataset with 10 elements, but extendable to an unlimited\n  dimension.\n  \n.. code-block:: http\n\n    POST /datasets HTTP/1.1\n    Content-Length: 54\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: resizabledset.datasettest.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n\n    {\n    \"type\": \"H5T_IEEE_F32LE\",\n    \"shape\": 10,\n    \"maxdims\": 0\n    }\n    \nSample Response - Resizable Dataset\n-----------------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Thu, 29 Jan 2015 08:28:19 GMT\n    Content-Length: 675\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n     \n   {\n   \"id\": \"c79933ab-a790-11e4-b36d-3c15c2da029e\", \n   \"attributeCount\": 0, \n   \"created\": \"2015-01-29T08:28:19Z\",\n   \"lastModified\": \"2015-01-29T08:28:19Z\", \n   \"hrefs\": [\n        {\"href\": \"http://resizabledset.datasettest.test.hdfgroup.org/datasets/c79933ab-a790-11e4-b36d-3c15c2da029e\", \"rel\": \"self\"}, \n        {\"href\": \"http://resizabledset.datasettest.test.hdfgroup.org/groups/c7759c11-a790-11e4-ae03-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://resizabledset.datasettest.test.hdfgroup.org/datasets/c79933ab-a790-11e4-b36d-3c15c2da029e/attributes\", \"rel\": \"attributes\"}, \n        {\"href\": \"http://resizabledset.datasettest.test.hdfgroup.org/datasets/c79933ab-a790-11e4-b36d-3c15c2da029e/value\", \"rel\": \"value\"}\n      ]\n    }\n    \nSample Request - Committed Type\n----------------------------------\n\n  Create a two-dimensional dataset which uses a committed type with uuid: \n  \n.. code-block:: http\n\n    POST /datasets HTTP/1.1\n    Content-Length: 67\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: committedtype.datasettest.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n\n    {\n    \"type\": \"accd0b1e-a792-11e4-bada-3c15c2da029e\",\n    \"shape\": [10, 10]\n    }\n    \nSample Response - Committed Type\n-----------------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Thu, 29 Jan 2015 08:41:53 GMT\n    Content-Length: 675\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n     \n    {\n    \"id\": \"ace8cdca-a792-11e4-ad88-3c15c2da029e\", \n    \"attributeCount\": 0, \n    \"created\": \"2015-01-29T08:41:53Z\",\n    \"lastModified\": \"2015-01-29T08:41:53Z\",\n    \"hrefs\": [\n        {\"href\": \"http://committedtype.datasettest.test.hdfgroup.org/datasets/ace8cdca-a792-11e4-ad88-3c15c2da029e\", \"rel\": \"self\"}, \n        {\"href\": \"http://committedtype.datasettest.test.hdfgroup.org/groups/acc4d37d-a792-11e4-b326-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://committedtype.datasettest.test.hdfgroup.org/datasets/ace8cdca-a792-11e4-ad88-3c15c2da029e/attributes\", \"rel\": \"attributes\"}, \n        {\"href\": \"http://committedtype.datasettest.test.hdfgroup.org/datasets/ace8cdca-a792-11e4-ad88-3c15c2da029e/value\", \"rel\": \"value\"}\n      ]\n    }\n    \nSample Request - SZIP Compression with chunking\n-----------------------------------------------\n\n.. code-block:: http\n\n    POST /datasets HTTP/1.1\n    Content-Length: 67\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: szip.datasettest.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n\n    {\n    \"creationProperties\": {\n        \"filters\": [\n            {\n                \"bitsPerPixel\": 8,\n                \"coding\": \"H5_SZIP_EC_OPTION_MASK\",\n                \"id\": 4,\n                \"pixelsPerBlock\": 32,\n                \"pixelsPerScanline\": 100\n            }\n        ],\n        \"layout\": {\n            \"class\": \"H5D_CHUNKED\",\n            \"dims\": [\n                100,\n                100\n            ]\n        }\n    },\n    \"shape\": [\n        1000,\n        1000\n    ],\n    \"type\": \"H5T_IEEE_F32LE\"\n   }\n   \nSample Response - SZIP Compression with chunking\n------------------------------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Thu, 18 Jun 2015 08:41:53 GMT\n    Content-Length: 975\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"id\": \"ad283c05-158c-11e5-bd67-3c15c2da029e\",\n    \"attributeCount\": 0,\n    \"created\": \"2015-06-18T07:36:04Z\",\n    \"lastModified\": \"2015-06-18T07:36:04Z\",\n    \"hrefs\": [\n        {\n            \"href\": \"http://newdset_szip.datasettest.test.hdfgroup.org/datasets/ad283c05-158c-11e5-bd67-3c15c2da029e\",\n            \"rel\": \"self\"\n        },\n        {\n            \"href\": \"http://newdset_szip.datasettest.test.hdfgroup.org/groups/ad2746d4-158c-11e5-a083-3c15c2da029e\",\n            \"rel\": \"root\"\n        },\n        {\n            \"href\": \"http://newdset_szip.datasettest.test.hdfgroup.org/datasets/ad283c05-158c-11e5-bd67-3c15c2da029e/attributes\",\n            \"rel\": \"attributes\"\n        },\n        {\n            \"href\": \"http://newdset_szip.datasettest.test.hdfgroup.org/datasets/ad283c05-158c-11e5-bd67-3c15c2da029e/value\",\n            \"rel\": \"value\"\n        }\n    ]\n    }\n\n\n    \nRelated Resources\n=================\n\n* :doc:`GET_Dataset`\n* :doc:`GET_Datasets`\n* :doc:`GET_Value`\n* :doc:`POST_Value`\n* :doc:`PUT_Value`\n \n\n "
  },
  {
    "path": "docs/DatasetOps/POST_Value.rst",
    "content": "**********************************************\nPOST Value\n**********************************************\n\nDescription\n===========\nGets values of a data for a given point selection (provided in the body of the \nrequest).\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    POST /datasets/<id>/value HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n*<id>* is the UUID of the requested dataset t\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nRequest Body\n------------\n\nThe request body should be a JSON object with the following key:\n\npoints\n^^^^^^\n\nAn array of points defining the selection.  Each point can either be an integer\n(if the dataset has just one dimension), or an array where the length of the \narray is equal to the number of dimensions of the dataset.\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nvalue\n^^^^^\nAn array of values where the length of the array is equal to the number of points \nin the request.  Each value will be a string, integer, or JSON object consist\nwith the dataset type (e.g. an compound type).\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    POST /datasets/4e83ad1c-ab6e-11e4-babb-3c15c2da029e/value HTTP/1.1\n    Content-Length: 92\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: tall.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n\n    {\n    \"points\": [19, 17, 13, 11, 7, 5, 3, 2]\n    }\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Tue, 03 Feb 2015 06:31:38 GMT\n    Content-Length: 47\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n \n    {\n    \"value\": [0, 1, 4, 9, 16, 25, 36, 49, 64, 81]\n    }\n    \nRelated Resources\n=================\n\n* :doc:`GET_Dataset`\n* :doc:`GET_Value`\n* :doc:`PUT_Value`\n \n\n "
  },
  {
    "path": "docs/DatasetOps/PUT_DatasetShape.rst",
    "content": "**********************************************\nPUT Shape\n**********************************************\n\nDescription\n===========\nModifies the dimensions of a dataset.  Dimensions can only be changed if the dataset\nwas initially created with that dimension as *extensible* - i.e. the maxdims value\nfor that dimension is larger than the initial dimension size (or maxdims set to 0).\n\n*Note:* Dimensions can only be made larger, they can not be reduced.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    PUT /datasets/<id>/shape HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n*<id>* is the UUID of the dataset whose shape will be modified.\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nRequest Elements\n----------------\nThe request body must include a JSON object with a \"shape\" key as described below:\n\nshape\n^^^^^\nAn integer array giving the new dimensions of the dataset.\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    PUT /datasets/b9b6acc0-a839-11e4-aa86-3c15c2da029e/shape HTTP/1.1\n    Content-Length: 19\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: resized.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n\n    {\n    \"shape\": [10, 25]\n    }\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Fri, 30 Jan 2015 04:47:47 GMT\n    Content-Length: 331\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2   \n    \n.. code-block:: json\n\n    {\n    \"hrefs\": [\n        {\"href\": \"http://resized.test.hdfgroup.org/datasets/22e1b235-a83b-11e4-97f4-3c15c2da029e\", \"rel\": \"self\"}, \n        {\"href\": \"http://resized.test.hdfgroup.org/datasets/22e1b235-a83b-11e4-97f4-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://resized.test.hdfgroup.org/groups/22dfff8f-a83b-11e4-883d-3c15c2da029e\", \"rel\": \"root\"}\n      ]\n    }\n    \nRelated Resources\n=================\n\n* :doc:`GET_Dataset`\n* :doc:`GET_DatasetShape`\n* :doc:`GET_Value`\n* :doc:`POST_Value`\n* :doc:`PUT_Value`\n \n\n "
  },
  {
    "path": "docs/DatasetOps/PUT_Value.rst",
    "content": "**********************************************\nPUT Value\n**********************************************\n\nDescription\n===========\nUpdate the values in a dataset.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    PUT /datasets/<id>/value HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n*<id>* is the UUID of the requested dataset.\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nRequest Body\n------------\nThe request body should be a JSON object with the following keys:\n\nstart:\n^^^^^^\nAn optional key that gives the starting coordinate of the selection to be updated.  The\nstart value can either be an integer (for 1 dimensional arrays) or an array of integers\nwhere the length of the array is equal to the number of dimensions of the dataset.  Each\nvalue must be greater than equal to zero and less than the extent of the corresponding\ndimension.\n\nIf start is not provided, the selection starts at 0 for each dimension.\n\nstop:\n^^^^^\nAn optional key that gives the ending coordinate of the selection to be updated.\nThe stop value can either be an integer (for 1 dimensional arrays) or an array of integers\nwhere the length of the array is equal to the number of dimensions of the dataset.  Each\nvalue must be greater than equal to start (or zero if start is not provided) and less than\nthe extent of the corresponding dimension.\n\nstep:\n^^^^^\nAn optional key that gives the step value (i.e. the increment of the coordinate for\neach supplied value). The step value can either be an integer (for 1 dimensional arrays) or\nan array of integers where the length of the array is equal to the number of dimensions of\nthe dataset.  Each value must be greater than equal to start (or zero if start is not \nprovided) and less than or equal to the extent of the corresponding dimension.\n\npoints:\n^^^^^^^\n\nAn optional key that contains a list of array elements to be updated.  Each element of the list should be an \nan integer if the dataset is of rank 1 or an n-element list (which n is the dataset rank) is the dataset\nrank is greater than 1.  If points is provided (indicating a point selection update), then start, stop, \nand step (used for hyperslab selection) should not be provied.\n\nvalue:\n^^^^^^\nA JSON array containing the data values to be written.\n\nvalue_base64:\n^^^^^^^^^^^^^\n\nUse this key instead of \"value\" to use base64-encoded binary data rather than JSON ascii.  This will be more\nefficient for large data transfers than using a JSON array.\n\nNote: \"value_base64\" is only supported for fixed length datatypes.\n\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nNo response elements are returned.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\n\nSample Request\n--------------\n\nThis example writes a 10x10 integer datasets with the values 0-99 inclusive.\n\n.. code-block:: http\n\n    PUT /datasets/817e2280-ab5d-11e4-afe6-3c15c2da029e/value HTTP/1.1\n    Content-Length: 465\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: valueput.datasettest.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n\n    {\n    \"value\": [\n        [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], \n        [10, 11, 12, 13, 14, 15, 16, 17, 18, 19], \n        [20, 21, 22, 23, 24, 25, 26, 27, 28, 29], \n        [30, 31, 32, 33, 34, 35, 36, 37, 38, 39], \n        [40, 41, 42, 43, 44, 45, 46, 47, 48, 49], \n        [50, 51, 52, 53, 54, 55, 56, 57, 58, 59], \n        [60, 61, 62, 63, 64, 65, 66, 67, 68, 69], \n        [70, 71, 72, 73, 74, 75, 76, 77, 78, 79], \n        [80, 81, 82, 83, 84, 85, 86, 87, 88, 89], \n        [90, 91, 92, 93, 94, 95, 96, 97, 98, 99]\n      ]\n    }\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Tue, 03 Feb 2015 04:31:22 GMT\n    Content-Length: 0\n    Content-Type: text/html; charset=UTF-8\n    Server: TornadoServer/3.2.2\n    \n    \nSample Request - Selection\n--------------------------\n\nThis example writes a portion of the dataset by using the start and stop keys in the\nrequest.\n\n.. code-block:: http\n\n    PUT /datasets/b2d0af00-ab65-11e4-a874-3c15c2da029e/value HTTP/1.1\n    Content-Length: 92\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: valueputsel.datasettest.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n\n    {     \n    \"start\": 5, \n    \"stop\": 10,\n    \"value\": [13, 17, 19, 23, 29]\n    }\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Tue, 03 Feb 2015 05:30:01 GMT\n    Content-Length: 0\n    Content-Type: text/html; charset=UTF-8\n    Server: TornadoServer/3.2.2\n    \n    \nRelated Resources\n=================\n\n* :doc:`GET_Dataset`\n* :doc:`GET_Value`\n* :doc:`POST_Value`\n \n\n "
  },
  {
    "path": "docs/DatasetOps/index.rst",
    "content": "######################\nDatasets\n######################\n\nDatasets are objects that a composed of a homogenous collection of data elements.   Each\ndataset has a *type* that specifies the structure of the individual elements (float, string,\ncompound, etc.), and a *shape* that specifies the layout of the data elements (scalar, \none-dimensional, multi-dimensional).  In addition meta-data can be attached to a dataset\nin the form of attributes.  See: :doc:`../AttrOps/index`.\n\nCreating Datasets\n-----------------\n\nUse the :doc:`POST_Dataset` operation to create new datasets.  As part of the POST\nrequest, JSON descriptions for the type and shape of the dataset are included with the\nrequest.  Optionally, creation properties can be used to specify the chunk layout (how\nthe data elements are stored in the server) and compression filter (e.g. GZIP, LZF, SZIP).\n\nGetting information about a dataset\n-----------------------------------\nUse the :doc:`GET_Dataset` operation to retrieve information about a datasets type,\nshape, creation properties, and number of attributes.  To list all the datasets within a domain use \n:doc:`GET_Datasets`.  To list the datasets linked to a particular group use \n:doc:`../GroupOps/GET_Links` and look at links with a \"collection\" key of \"datsets\".\n\nWriting data to a dataset\n-------------------------\nTo write data into the dataset, use the :doc:`PUT_Value` operation.  The request can\neither provide values for the entire dataset, or values for a hyperslab (rectangular\nsub-region) selection.  In addition, if it desired to update a specific list of \ndata elements, a point selection (series of element coordinates) can be passed to the \n:doc:`PUT_Value` operation.\n \nReading data from a dataset\n---------------------------\nTo read either the entire dataset, or a specified selection, use the :doc:`GET_Value`\noperation.  Without any request parameters, the GET operation retuns all data values.  \nTo read a specific hyperslab, use the select parameter to start and end indexes of the hyperslab\n(the selection can also include a step value to include a regular subset of the hyperslab).\nFinally, for one-dimensional datasets with compound types, a *where* parameter can be used to \nselect elements meeting a specified condition. \n\nTo read a specific list of elements (by index values), use the :doc:`POST_Value` operation  (POST is \nused in this case rather than GET since the point selection values may be to \nlarge to include in the URI.) \n\nResizable datasets\n------------------\nIf one or more of the dimensions of a dataset may need to be extended after creation,\nprovide a *maxdims* key to the shape during creation (see :doc:`POST_dataset`).  If the value of the maxdims\ndimension is 0, that dimension is *unlimited* and may be extended as much as desired.\nIf an upper limit is known, use that value in maxdims which will allow that dimension\nto be extended up to the given value.\nTo resize the dataset, use :doc:`PUT_DataShape.rst` operation with the desired shape value(s) for\nthe new dimensions.\n\nNote: dimensions can only be increased, not decreased.\n\nDeleting datasets\n-----------------\nThe :doc:`DELETE_Dataset` operation will remove the dataset, its attributes, and any\nlinks to the object.\n\nList of Operations\n-------------------\n\n.. toctree::\n   :maxdepth: 1\n\n   DELETE_Dataset\n   GET_Dataset\n   GET_Datasets\n   GET_DatasetShape\n   GET_DatasetType\n   GET_Value\n   POST_Dataset\n   POST_Value\n   PUT_DatasetShape\n   PUT_Value\n    \n    \n"
  },
  {
    "path": "docs/DatatypeOps/DELETE_Datatype.rst",
    "content": "**********************************************\nDELETE Datatype\n**********************************************\n\nDescription\n===========\nThe implementation of the DELETE operation deletes the committed datatype\n named in the URI.  All attributes the datatype will also be deleted.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    DELETE /datatypes/<id> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n*<id>* is the UUID of the datatype to be deleted.\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    DELETE /datatypes/93b6a335-ac44-11e4-8d71-3c15c2da029e HTTP/1.1\n    Content-Length: 0\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: namedtype_deleted.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Wed, 04 Feb 2015 08:05:26 GMT\n    Content-Length: 363\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n  \n    {\n    \"hrefs\": [\n        {\"href\": \"http://namedtype_deleted.test.hdfgroup.org/datatypes\", \"rel\": \"self\"}, \n        {\"href\": \"http://namedtype_deleted.test.hdfgroup.org/\", \"rel\": \"home\"}, \n        {\"href\": \"http://namedtype_deleted.test.hdfgroup.org/groups/93b51245-ac44-11e4-8a21-3c15c2da029e\", \"rel\": \"root\"}\n      ]\n    }\n    \nRelated Resources\n=================\n\n* :doc:`../AttrOps/GET_Attributes`\n* :doc:`GET_Datatype`\n* :doc:`GET_Datatypes`\n* :doc:`POST_Datatype`\n* :doc:`../DatasetOps/POST_Dataset`\n* :doc:`../AttrOps/PUT_Attribute`\n \n\n "
  },
  {
    "path": "docs/DatatypeOps/GET_Datatype.rst",
    "content": "**********************************************\nGET Datatype\n**********************************************\n\nDescription\n===========\nReturns information about the committed datatype with the UUID given in the URI.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    GET /datatypes/<id> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n*<id>* is the UUID of the requested datatype.\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nid\n^^\n\nThe UUID of the datatype object.\n\ntype\n^^^^\nA JSON object representing the type of the datatype object.\n\nattributeCount\n^^^^^^^^^^^^^^\nThe number of attributes belonging to the datatype.\n\ncreated\n^^^^^^^\nA timestamp giving the time the dataset was created in UTC (ISO-8601 format).\n\nlastModified\n^^^^^^^^^^^^\nA timestamp giving the most recent time the dataset has been modified (i.e. attributes updated) in UTC (ISO-8601 format).\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nGet the committed datatype with UUID: \"f545543d-...\".\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    GET /datatypes/f545543d-a1b4-11e4-8fa4-3c15c2da029e HTTP/1.1\n    host: namedtype.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Wed, 21 Jan 2015 21:36:49 GMT\n    Content-Length: 619\n    Etag: \"c53bc5b2d3c3b5059b71ef92ca7d144a2df54456\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"id\": \"f545543d-a1b4-11e4-8fa4-3c15c2da029e\",\n    \"type\": {\n        \"base\": \"H5T_IEEE_F32LE\", \n        \"class\": \"H5T_FLOAT\"\n      }, \n    \"created\": \"2015-01-21T21:32:01Z\", \n    \"lastModified\": \"2015-01-21T21:32:01Z\", \n    \"attributeCount\": 1, \n    \"hrefs\": [\n        {\"href\": \"http://namedtype.test.hdfgroup.org/datatypes/f545543d-a1b4-11e4-8fa4-3c15c2da029e\", \"rel\": \"self\"}, \n        {\"href\": \"http://namedtype.test.hdfgroup.org/groups/f545103d-a1b4-11e4-b4a1-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://namedtype.test.hdfgroup.org/datatypes/f545543d-a1b4-11e4-8fa4-3c15c2da029e/attributes\", \"rel\": \"attributes\"}, \n        {\"href\": \"http://namedtype.test.hdfgroup.org/\", \"rel\": \"home\"}\n      ]     \n    }\n    \nRelated Resources\n=================\n\n* :doc:`DELETE_Datatype`\n* :doc:`GET_Datatypes`\n* :doc:`POST_Datatype`\n* :doc:`../DatasetOps/POST_Dataset`\n* :doc:`../AttrOps/PUT_Attribute`\n \n\n "
  },
  {
    "path": "docs/DatatypeOps/GET_Datatypes.rst",
    "content": "**********************************************\nGET Datatypes\n**********************************************\n\nDescription\n===========\nGets all the committed datatypes in a domain.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    GET /datatypes HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nRequest Parameters\n------------------\nThis implementation of the operation uses the following request parameters (both \noptional):\n\nLimit\n^^^^^\nIf provided, a positive integer value specifying the maximum number of UUID's to return.\n\nMarker\n^^^^^^\nIf provided, a string value indicating that only UUID's that occur after the\nmarker value will be returned.\n\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    GET /datatypes HTTP/1.1\n    host: namedtype.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Wed, 21 Jan 2015 22:42:30 GMT\n    Content-Length: 350\n    Etag: \"e01f56869a9a919b1496c463f3569a2a7c319f11\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"datatypes\": [\n        \"f54542e6-a1b4-11e4-90bf-3c15c2da029e\", \n        \"f545543d-a1b4-11e4-8fa4-3c15c2da029e\"\n    ], \n    \"hrefs\": [\n        {\"href\": \"http://namedtype.test.hdfgroup.org/datatypes\", \"rel\": \"self\"}, \n        {\"href\": \"http://namedtype.test.hdfgroup.org/groups/f545103d-a1b4-11e4-b4a1-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://namedtype.test.hdfgroup.org/\", \"rel\": \"home\"}\n      ]\n    }\n    \nSample Request with Marker and Limit\n------------------------------------\n\nThis example uses the \"Marker\" request parameter to return only UUIDs after the given\nMarker value.\nAlso the \"Limit\" request parameter is used to limit the number of UUIDs in the response to 5.\n\n.. code-block:: http\n\n    GET /datatypes?Marker=d779cd5e-a1e6-11e4-8fc5-3c15c2da029e&Limit=5 HTTP/1.1\n    host: type1k.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n \nSample Response with Marker and Limit\n-------------------------------------\n\n .. code-block:: http\n \n    HTTP/1.1 200 OK\n    Date: Thu, 22 Jan 2015 03:32:13 GMT\n    Content-Length: 461\n    Etag: \"a2e2d5a3ae63cd504d02b51d99f27b30d17b75b5\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n   \n .. code-block:: json\n      \n    {\n    \"datatypes\": [\n        \"d779ddd9-a1e6-11e4-89e5-3c15c2da029e\", \n        \"d779ef11-a1e6-11e4-8837-3c15c2da029e\", \n        \"d77a008a-a1e6-11e4-8840-3c15c2da029e\", \n        \"d77a121e-a1e6-11e4-b2b0-3c15c2da029e\", \n        \"d77a2523-a1e6-11e4-aa6d-3c15c2da029e\"\n      ], \n    \"hrefs\": [\n        {\"href\": \"http://type1k.test.hdfgroup.org/datatypes\", \"rel\": \"self\"}, \n        {\"href\": \"http://type1k.test.hdfgroup.org/groups/d7742c14-a1e6-11e4-b2a8-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://type1k.test.hdfgroup.org/\", \"rel\": \"home\"}\n      ]\n    }\n        \n    \nRelated Resources\n=================\n\n* :doc:`DELETE_Datatype`\n* :doc:`GET_Datatype`\n* :doc:`POST_Datatype`\n* :doc:`../DatasetOps/POST_Dataset`\n* :doc:`../AttrOps/PUT_Attribute`\n \n\n "
  },
  {
    "path": "docs/DatatypeOps/POST_Datatype.rst",
    "content": "**********************************************\nPOST Datatype\n**********************************************\n\nDescription\n===========\nCreates a new committed datatype.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    POST /datatypes  HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nRequest Elements\n----------------\nThe request body must be a JSON object with a 'type' link key as described below.\nOptionally, the request body can include a 'link' key that describes how the new\ncommitted datatype will be linked.\n\ntype\n^^^^\nThe value of the type key can either be one of the predefined type strings \n(see predefined types), or a JSON representation of a type. (see :doc:`../Types/index`).\n\nlink\n^^^^\nIf present, the link value must include the following subkeys:\n\nlink['id']\n^^^^^^^^^^\nThe UUID of the group the new datatype should be linked from.  If the UUID is not valid,\nthe request will fail and a new datatype will not be created.\n\nlink['name']\n^^^^^^^^^^^^\nThe name of the new link.\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nid\n^^\n\nThe UUID of the newly created datatype object.\n\nattributeCount\n^^^^^^^^^^^^^^\nThe number of attributes belonging to the datatype.\n\ncreated\n^^^^^^^\nA timestamp giving the time the group was created in UTC (ISO-8601 format).\n\nlastModified\n^^^^^^^^^^^^\nA timestamp giving the most recent time the group has been modified (i.e. attributes or \nlinks updated) in UTC (ISO-8601 format).\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\nCreate a new committed datatype using the \"H5T_IEEE_F32LE\" (32-bit float) predefined type.\n\n.. code-block:: http\n\n    POST /datatypes HTTP/1.1\n    Content-Length: 26\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: newdtype.datatypetest.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n\n    {\n    \"type\": \"H5T_IEEE_F32LE\"\n    }\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Thu, 22 Jan 2015 19:06:17 GMT\n    Content-Length: 533\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n  \n    {\n    \"id\": \"be08d40c-a269-11e4-84db-3c15c2da029e\", \n    \"attributeCount\": 0, \n    \"created\": \"2015-01-22T19:06:17Z\",\n    \"lastModified\": \"2015-01-22T19:06:17Z\",\n    \"hrefs\": [\n        {\"href\": \"http://newdtype.datatypetest.test.hdfgroup.org/datatypes/be08d40c-a269-11e4-84db-3c15c2da029e\", \"rel\": \"self\"}, \n        {\"href\": \"http://newdtype.datatypetest.test.hdfgroup.org/groups/be00807d-a269-11e4-8d9c-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://newdtype.datatypetest.test.hdfgroup.org/datatypes/be08d40c-a269-11e4-84db-3c15c2da029e/attributes\", \"rel\": \"attributes\"}\n        ]\n    }\n    \n    \nSample Request with Link\n------------------------\n\nCreate a new committed datatype and link to root as \"linked_dtype\".\n\n.. code-block:: http\n\n    POST /datatypes HTTP/1.1\n    Content-Length: 106\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: newlinkedtype.datatypetest.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n\n    {\n    \"type\": \"H5T_IEEE_F64LE\",\n    \"link\": {\n        \"id\": \"76b0bbf8-a26c-11e4-8d4c-3c15c2da029e\", \n        \"name\": \"linked_dtype\"\n      }\n    }\n    \nSample Response with Link\n-------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Thu, 22 Jan 2015 19:25:46 GMT\n    Content-Length: 548\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"id\": \"76c3c33a-a26c-11e4-998c-3c15c2da029e\", \n    \"attributeCount\": 0, \n    \"created\": \"2015-01-22T19:25:46Z\",\n    \"lastModified\": \"2015-01-22T19:25:46Z\", \n    \"hrefs\": [\n        {\"href\": \"http://newlinkedtype.datatypetest.test.hdfgroup.org/datatypes/76c3c33a-a26c-11e4-998c-3c15c2da029e\", \"rel\": \"self\"}, \n        {\"href\": \"http://newlinkedtype.datatypetest.test.hdfgroup.org/groups/76b0bbf8-a26c-11e4-8d4c-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://newlinkedtype.datatypetest.test.hdfgroup.org/datatypes/76c3c33a-a26c-11e4-998c-3c15c2da029e/attributes\", \"rel\": \"attributes\"}\n      ]\n    }\n    \nRelated Resources\n=================\n\n* :doc:`DELETE_Datatype`\n* :doc:`GET_Datatype`\n* :doc:`GET_Datatypes`\n* :doc:`../DatasetOps/POST_Dataset`\n* :doc:`../AttrOps/PUT_Attribute`\n \n\n "
  },
  {
    "path": "docs/DatatypeOps/index.rst",
    "content": "#######################\nCommitted Datatypes\n#######################\n\nCommitted datatypes (also know as \"named types\"), are object that describe types.  These\ntypes can be used in the creation of datasets and attributes.\n\nCommitted datatypes can be linked to from a Group and can contain attributes, just like\na dataset or group object.\n\nCreating committed datatypes\n----------------------------\n\nUse :doc:`POST_Datatype` to create a new datatype.  A complete description of the \ntype must be sent with the POST request.\n\nGetting information about a committed datatype\n-----------------------------------------------\n\nUse the :doc:`GET_Datatype` operation to retrieve information about a committed datatype.\nTo list all the committed datatypes within a domain use \n:doc:`GET_Datatypes`.  To list the committed types linked to a particular group use \n:doc:`../GroupOps/GET_Links` and examine link object with a \"collection\" key of \n\"datatypes\".\n\nDeleting committed datatypes\n----------------------------\n\nUse :doc:`DELETE_Datatype` to delete a datatype.  Links from any group to the datatype\nwill be deleted.  \n\nList of Operations\n------------------\n\n.. toctree::\n   :maxdepth: 1\n\n   DELETE_Datatype\n   GET_Datatype\n   GET_Datatypes\n   POST_Datatype\n    \n    \n"
  },
  {
    "path": "docs/Diagram.rst",
    "content": "***************************\nDiagram of REST operations\n***************************\n\n \n.. image:: RESTful_HDF5.png\n   :width: 100 %\n   :alt: alternate text\n   :align: right"
  },
  {
    "path": "docs/DomainOps/DELETE_Domain.rst",
    "content": "**********************************************\nDELETE Domain\n**********************************************\n\nDescription\n===========\nThe DELETE operation deletes the given domain and\nall its resources (groups, datasets, attributes, etc.).\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    DELETE /  HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nThis implementation of the operation does not return any response elements.\n\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n   DELETE / HTTP/1.1\n   Content-Length: 0\n   User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n   host: deleteme.test.hdfgroup.org\n   Accept: */*\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Fri, 16 Jan 2015 03:47:33 GMT\n    Content-Length: 0\n    Content-Type: text/html; charset=UTF-8\n    Server: TornadoServer/3.2.2\n    \n \n    \nRelated Resources\n=================\n\n* :doc:`GET_Domain`\n* :doc:`PUT_Domain`\n \n\n "
  },
  {
    "path": "docs/DomainOps/GET_Domain.rst",
    "content": "**********************************************\nGET Domain\n**********************************************\n\nDescription\n===========\nThis operation retrieves information about the requested domain.\n\n*Note:* If the HDF Dynamic DNS Server (see https://github.com/HDFGroup/dynamic-dns) is running, \nthe operations can specify the domain as part of the URI.  Example:  \nhttp://tall.data.hdfgroup.org:7253/ \nreturns data about the domain \"tall\" hosted on data.hdfgroup.org.  \nThe DNS server will determine the proper IP that maps to this domain.\n\nIf the DNS Server is not setup, specify the desired domain in the Host line of the http\nheader.\n\nAlternatively, the domain can be specified as a 'Host' query parameter.  Example:\nhttp://127.0.0.1:7253?host=tall.data.hdfgroup.org.\n\nIf no Host value is supplied, the default Table of Contents (TOC) domain is returned.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    GET / HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nroot\n^^^^\nThe UUID of the root group of this domain.\n\ncreated\n^^^^^^^\nA timestamp giving the time the domain was created in UTC (ISO-8601 format).\n\nlastModified\n^^^^^^^^^^^^\nA timestamp giving the most recent time that any content in the domain has been\nmodified in UTC (ISO-8601 format).\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return any special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    GET / HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Fri, 16 Jan 2015 03:51:58 GMT\n    Content-Length: 508\n    Etag: \"e45bef255ffc0530c33857b88b15f551f371de38\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n    \n    {\n    \"root\": \"052dcbbd-9d33-11e4-86ce-3c15c2da029e\", \n    \"created\": \"2015-01-16T03:51:58Z\",\n    \"lastModified\": \"2015-01-16T03:51:58Z\", \n    \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/\", \"rel\": \"self\"},\n        {\"href\": \"http://tall.test.hdfgroup.org/datasets\", \"rel\": \"database\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups\", \"rel\": \"groupbase\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/datatypes\", \"rel\": \"typebase\"},\n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e\", \"rel\": \"root\"}\n    ]      \n    }\n    \nRelated Resources\n=================\n\n* :doc:`DELETE_Domain`\n* :doc:`../GroupOps/GET_Group`\n* :doc:`PUT_Domain`\n \n\n "
  },
  {
    "path": "docs/DomainOps/PUT_Domain.rst",
    "content": "**********************************************\nPUT Domain\n**********************************************\n\nDescription\n===========\nThis operation creates a new domain.\n\n*Note*: Initially the only object contained in the domain is the root group.  Use other\nPUT and POST operations to create new objects in the domain.\n\n*Note*: The operation will fail if the domain already exists (a 409 code will be returned).\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    PUT / HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nroot\n^^^^\nThe UUID of the root group of this domain.\n\ncreated\n^^^^^^^\nA timestamp giving the time the domain was created in UTC (ISO-8601 format).\n\nlastModified\n^^^^^^^^^^^^\nA timestamp giving the most recent time that any content in the domain has been\nmodified in UTC (ISO-8601 format).\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return any special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nAn http status code of 409 (Conflict) will be returned if the domain already exists.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    PUT / HTTP/1.1\n    Content-Length: 0\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: newfile.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Fri, 16 Jan 2015 04:11:52 GMT\n    Content-Length: 523\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    \n  {\n    \"root\": \"cd31cfdc-9d35-11e4-aa58-3c15c2da029e\",\n    \"created\": \"2015-01-16T04:11:52Z\",\n    \"lastModified\": \"2015-01-16T04:11:52Z\", \n    \"hrefs\": [\n       {\"href\": \"http://newfile.test.hdfgroup.org/\", \"rel\": \"self\"}, \n       {\"href\": \"http://newfile.test.hdfgroup.org/datasets\", \"rel\": \"database\"}, \n       {\"href\": \"http://newfile.test.hdfgroup.org/groups\", \"rel\": \"groupbase\"}, \n       {\"href\": \"http://newfile.test.hdfgroup.org/datatypes\", \"rel\": \"typebase\"}, \n       {\"href\": \"http://newfile.test.hdfgroup.org/groups/cd31cfdc-9d35-11e4-aa58-3c15c2da029e\", \"rel\": \"root\"}\n       ]    \n  }\n    \nRelated Resources\n=================\n\n* :doc:`DELETE_Domain`\n* :doc:`../GroupOps/GET_Group`\n* :doc:`GET_Domain`\n \n\n "
  },
  {
    "path": "docs/DomainOps/index.rst",
    "content": "#######################\nDomains\n#######################\n\nIn h5serv, domains are containers for related collection of resources, similar to a\nfile in the traditional HDF5 library.  In the h5serv implementation of the HDF5 REST API,\ndomains *are* files, but in general the HDF REST API supports alternative implementations \n(e.g. data that is stored in a database).\nMost operations of the service act on a domain resource that is provided in \nthe *Host* http header or (alternatively) the Host query parameter.\n\nMapping of file paths to domain names\n-------------------------------------\n\nTo convert a file path to a domain name:\n\n#. Remove the extension\n#. Determine the path relative to the data directory\n#. Replace '/' with '.'\n#. Reverse the path\n#. Add the domain suffix (using the domain config value)\n\nAs an example consider a server installation where that data directory is '/data'\nand an HDF5 is located at ``/data/myfolder/an_hdf_file.h5`` and ``hdfgroup.org``\nis the base domain.  The above sequence of steps would look like the following:\n\n#. /data/myfolder/an_hdf_file\n#. myfolder/an_hdf_file\n#. myfolder.an_hdf_file\n#. an_hdf_file.myfolder\n#. an_hdf_file.myfolder.hdfgroup.org\n\nThe final expression is what should be used in the Host field for any request that access\nthat file.  \n\nFor path names that include non-alphanumeric charters, replace any such characters with \nthe string '%XX' where XX is the hexidecimal value of the character.  For example:\n\n``this.file.has.dots.h5``\n\nbecomes:\n\n``this%2Efile%2Ehase%2Edots``\n\n\nCreating Domains\n----------------\nUse :doc:`PUT_Domain` to create a domain.  The domain name must follow DNS conventions\n(e.g. two consecutive \"dots\" are not allowed).  After creation, the domain will contain\njust one resource, the root group.  \n\nUse :doc:`GET_Domain` to get information about a domain, including the UUID of the \ndomain's root group.\n\nGetting Information about Domains\n---------------------------------\n\nUse :doc:`GET_Domain` to retreive information about a specific domain (specified in the Host\nheader).  If the Host value is not supplied, the service returns information on the \nauto-generated Table of Contents (TOC) that provides information on domains that are available.\n\nDeleting Domains\n----------------\nUse :doc:`DELETE_Domain` to delete a domain.  All resources within the domain will be\ndeleted!\n\nThe TOC domain cannot be deleted.\n\nList of Operations\n------------------\n\n.. toctree::\n   :maxdepth: 1\n\n   DELETE_Domain\n   GET_Domain\n   PUT_Domain\n    \n    \n"
  },
  {
    "path": "docs/FAQ/index.rst",
    "content": "###################\nFAQ\n###################\n\n\nWhat datatypes are supported?\n-----------------------------\n\n=========================           ============================================    \nType                                Precisions                                       \n=========================           ============================================    \nInteger                             1, 2, 4 or 8 byte, BE/LE, signed/unsigned\nFloat                               4, 8  byte, BE/LE\nCompound                            Arbitrary names and offsets\nStrings (fixed-length)              Any length\nStrings (variable-length)           Any length, ASCII \nOpaque                              Any length\nArray                               Any supported type\nEnumeration                         Any integer type                           \nReferences                          Region and object\n=========================           ============================================     \n\nUnsupported types:\n\n=========================           ============================================\nType                                Status                                 \n=========================           ============================================\nHDF5 VLEN (non-string)              Coming soon!\nHDF5 \"time\" type\nOpaque                              \nBitfields                            \n=========================           ============================================\n\n\nWhy does h5serv use those long ids?\n------------------------------------\n\nh5serv uses the UUID standard (http://en.wikipedia.org/wiki/Universally_unique_identifier)\nto identify objects (datasets, groups, and committed datatypes) uniquely.  The benefit of\nusing UUIDs is that objects can be uniquely identified without requiring any central \ncoordination.\n\nHow can I get a dataset (or group) via a pathname?\n--------------------------------------------------\n\nYou will need to iterate through the path to get the UUID of each subgroup.\nE.g. suppose the path of interest is \"/g1/g1.1\" in the domain: \"tall.data.hdfgroup.org\".\nPerform these actions to get the UUID of the group at /g1/g1.1.\n\n#. ``GET /``  // returns the UUID of the root group\n#. ``GET /groups/<root_uuid>/links/g1``  // returns the UUID of the group at \"/g1\"\n#. ``GET /groups/<g1_uuid>/links/g1.1``  // returns the UUID of the group at \"/g1/g1.1'\n\nHow do I guard against an attribute (dataset/group/file) from being deleted by a request?\n-----------------------------------------------------------------------------------------\nFuture releases of h5serv will support authorization and permissions to protect content\nthat shouldn't be altered.\n\nFor now the choices are:\n\n#. Don't expose the h5serv endpoint on a non-trusted network\n#. Make the files readonly\n#. Make periodic backups of all data files\n#. Don't share the domain name with non-trusted sources.  Since h5serv doesn't provide an operation to list all domains on the server, creating a non-trivial domain name (e.g. \"mydata_18494\") will be relatively secure.\n\nHow can I display my data in a nice Web UI?\n-------------------------------------------\nThere are many Javascript libraries (e.g. http://d3js.org) that can take the data \nreturned by h5serv to create compelling graphics.  \n\nI have a C or Fortran application that uses HDF5, how can I take advantage of h5serv?\n-------------------------------------------------------------------------------------\nWe are planning on creating a hdf5 library plugin that will transparently invoke the \nREST api.  For now, you can use C-libraries such as libcurl to invoke h5serv requests.\n\nIs there documentation on the JSON format generated by h5tojson.py:\n-------------------------------------------------------------------\nYes.  There is a grammer published here: http://hdf5-json.readthedocs.org/en/latest/index.html. \n\nHow do I submit a bug report?\n------------------------------\nIf you have a Github account, create an issue here: \nhttps://github.com/HDFGroup/h5serv/issues.\n\nAlternatively, you send a email to the HDF Group help desk: help@hdfgroup.org.\n"
  },
  {
    "path": "docs/GroupOps/DELETE_Group.rst",
    "content": "**********************************************\nDELETE Group\n**********************************************\n\nDescription\n===========\nThe implementation of the DELETE operation deletes the group with the UUID given in the\nURI.  All attributes and links of the group will also be deleted.  In addition any \nlinks from other groups **TO** the deleted group will be removed.\n\n*Note:* Groups, datatypes, and datasets that are referenced by the group's links will \n**not** be deleted.  Use the DELETE operation for those objects to remove.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    DELETE /groups/<id> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n \n*<id>* is the UUID of the group to be deleted.\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    DELETE /groups/45a882e1-9d01-11e4-8acf-3c15c2da029e HTTP/1.1\n    Host: testGroupDelete.test.hdfgroup.org\n    Authorization: authorization_string\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Thu, 15 Jan 2015 21:55:51 GMT\n    Content-Length: 270\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    \n    {\n    \"hrefs\": [\n        {\"href\": \"http://testGroupDelete.test.hdfgroup.org/groups\", \"rel\": \"self\"}, \n        {\"href\": \"http://testGroupDelete.test.hdfgroup.org/groups/45a06719-9d01-11e4-9b1c-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://testGroupDelete.test.hdfgroup.org/\", \"rel\": \"home\"}\n    ]\n    }\n    \nRelated Resources\n=================\n\n* :doc:`POST_Group`\n* :doc:`GET_Group`\n \n\n "
  },
  {
    "path": "docs/GroupOps/DELETE_Link.rst",
    "content": "**********************************************\nDELETE Link\n**********************************************\n\nDescription\n===========\nThe implementation of the DELETE operation deletes the link named in the URI.   \n\nGroups, datatypes, and datasets that are referenced by the link will **not** be\ndeleted.   To delete groups, datatypes or datasets, use the appropriate DELETE operation\nfor those objects.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    DELETE /groups/<id>/links/<name> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n* *<id>* is the UUID of the group the link is a member of.\n* *<name>* is the URL-encoded name of the link.\n    \n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nAn attempt to delete the root group will return 403 - Forbidden.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    DELETE /groups/25dd052b-a06d-11e4-a29e-3c15c2da029e/links/deleteme HTTP/1.1\n    Content-Length: 0\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: tall_updated.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Tue, 20 Jan 2015 06:25:37 GMT\n    Content-Length: 299\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n  \n    {\n    \"hrefs\": [\n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/25dd052b-a06d-11e4-a29e-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/\", \"rel\": \"home\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/25dd052b-a06d-11e4-a29e-3c15c2da029e\", \"rel\": \"owner\"}\n        ]\n    }\n    \nRelated Resources\n=================\n\n* :doc:`../DatasetOps/DELETE_Dataset`\n* :doc:`../DatatypeOps/DELETE_Datatype`\n* :doc:`DELETE_Group`\n* :doc:`GET_Link`\n* :doc:`GET_Groups`\n* :doc:`POST_Group`\n \n\n "
  },
  {
    "path": "docs/GroupOps/GET_Group.rst",
    "content": "**********************************************\nGET Group\n**********************************************\n\nDescription\n===========\nReturns information about the group with the UUID given in the URI.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    GET /groups/<id> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n*<id>* is the UUID of the requested group.\n    \nRequest Parameters\n------------------\n\ninclude_links\n^^^^^^^^^^^^^\n\nIf this request parameter is provided, the links of the group are included in the response.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nid\n^^\nThe UUID of the requested group\n\nattributeCount\n^^^^^^^^^^^^^^\nThe number of attributes belonging to the group.\n\nlinkCount\n^^^^^^^^^\nThe number of links belonging to the group.\n\ncreated\n^^^^^^^\nA timestamp giving the time the group was created in UTC (ISO-8601 format).\n\nlastModified\n^^^^^^^^^^^^\nA timestamp giving the most recent time the group has been modified (i.e. attributes or \nlinks updated) in UTC (ISO-8601 format).\n\nhrefs\n^^^^^\nAn array of hypertext links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    GET /groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Fri, 16 Jan 2015 20:06:08 GMT\n    Content-Length: 660\n    Etag: \"2c410d1c469786f25ed0075571a8e7a3f313cec1\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"id\": \"052dcbbd-9d33-11e4-86ce-3c15c2da029e\",\n    \"attributeCount\": 2,\n    \"linkCount\": 2,\n    \"created\": \"2015-01-16T03:47:22Z\", \n    \"lastModified\": \"2015-01-16T03:47:22Z\",    \n    \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e/links\", \"rel\": \"links\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/\", \"rel\": \"home\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e/attributes\", \"rel\": \"attributes\"}\n        ]\n     }\n    \nRelated Resources\n=================\n\n* :doc:`DELETE_Group`\n* :doc:`GET_Links`\n* :doc:`GET_Groups`\n* :doc:`POST_Group`\n* :doc:`../AttrOps/GET_Attribute`\n \n\n "
  },
  {
    "path": "docs/GroupOps/GET_Groups.rst",
    "content": "**********************************************\nGET Groups\n**********************************************\n\nDescription\n===========\nReturns UUIDs for all the groups in a domain (other than the root group).\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    GET /groups HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nRequest Parameters\n------------------\nThis implementation of the operation uses the following request parameters (both \noptional):\n\nLimit\n^^^^^\nIf provided, a positive integer value specifying the maximum number of UUID's to return.\n\nMarker\n^^^^^^\nIf provided, a string value indicating that only UUID's that occur after the\nmarker value will be returned.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\ngroups\n^^^^^^\nAn array of UUIDs - one for each group (including the root group) in the domain.\nIf the \"Marker\" and/or \"Limit\" request parameters are used, a subset of the UUIDs\nmay be returned.\n\nhrefs\n^^^^^\nAn array of hypertext links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    GET /groups HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Fri, 16 Jan 2015 21:53:48 GMT\n    Content-Length: 449\n    Etag: \"83575a7865761b6d4eaf5d285ab1de062c49250b\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n    \n    {\n    \"groups\": [\n        \"052e001e-9d33-11e4-9a3d-3c15c2da029e\", \n        \"052e13bd-9d33-11e4-91a6-3c15c2da029e\", \n        \"052e5ae8-9d33-11e4-888d-3c15c2da029e\", \n        \"052e700a-9d33-11e4-9fe4-3c15c2da029e\", \n        \"052e89c7-9d33-11e4-b9bc-3c15c2da029e\"\n        ],\n    \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/groups\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/\", \"rel\": \"home\"}\n        ] \n    }\n    \nSample Request with Marker and Limit\n------------------------------------\n\nThis example uses the \"Marker\" request parameter to return only UUIDs after the given\nMarker value.\nThe \"Limit\" request parameter is used to limit the number of UUIDs in the response to 5.\n\n.. code-block:: http\n\n    GET /groups?Marker=cba6e3fd-9dbd-11e4-bf4a-3c15c2da029e&Limit=5 HTTP/1.1\n    host: group1k.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n \nSample Response with Marker and Limit\n-------------------------------------\n\n .. code-block:: http\n \n    HTTP/1.1 200 OK\n    Date: Fri, 16 Jan 2015 22:02:46 GMT\n    Content-Length: 458\n    Etag: \"49221af3436fdaca7e26c74b491ccf8698555f08\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n   \n .. code-block:: json\n    \n    {\n    \"groups\": [\n        \"cba6fc19-9dbd-11e4-846e-3c15c2da029e\", \n        \"cba71842-9dbd-11e4-abd0-3c15c2da029e\", \n        \"cba73442-9dbd-11e4-a6e9-3c15c2da029e\", \n        \"cba74fc5-9dbd-11e4-bc15-3c15c2da029e\", \n        \"cba77c2e-9dbd-11e4-9c71-3c15c2da029e\"\n        ],  \n    \"hrefs\": [\n        {\"href\": \"http://group1k.test.hdfgroup.org/groups\", \"rel\": \"self\"}, \n        {\"href\": \"http://group1k.test.hdfgroup.org/groups/cb9ebf11-9dbd-11e4-9e83-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://group1k.test.hdfgroup.org/\", \"rel\": \"home\"}\n        ]\n    } \n        \nRelated Resources\n=================\n\n* :doc:`DELETE_Group`\n* :doc:`GET_Links`\n* :doc:`GET_Group`\n* :doc:`POST_Group`\n \n\n "
  },
  {
    "path": "docs/GroupOps/GET_Link.rst",
    "content": "**********************************************\nGET Link\n**********************************************\n\nDescription\n===========\nReturns information about a Link.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    GET /groups/<id>/links/<name> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n* *<id>* is the UUID of the group the link is a member of.\n* *<name>* is the URL-encoded name of the link.\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nlink[\"title\"]\n^^^^^^^^^^^^^\nThe name of the link.\n\nlink[\"collection\"]\n^^^^^^^^^^^^^^^^^^\nFor hard links, the domain collection for which the object the link points to is a \nmember of.  The value will be one of: \"groups\", \"datasets\", \"datatypes\".\nFor symbol links, this element is not present.\n\nlink[\"class\"]\n^^^^^^^^^^^^^\nIndicates the type of link.  One of the following values will be returned:\n\n* H5L_TYPE_HARD: A direct link to a group, dataset, or committed datatype object in the domain\n* H5L_TYPE_SOFT: A symbolic link that gives a path to an object within the domain (object may or may not be present).\n* H5L_TYPE_EXTERNAL: A symbolic link to an object that is external to the domain\n* H5L_TYPE_UDLINK: A user-defined link (this implementation only provides title and class for user-defined links)\n\nlink[\"h5path\"]\n^^^^^^^^^^^^^^\nFor symbolic links (\"H5L_TYPE_SOFT\" or \"H5L_TYPE_EXTERNAL\"), the path to the resource the\nlink references.  \n\nlink[\"h5domain\"]\n^^^^^^^^^^^^^^^^\nFor external links, the path of the external domain containing the object that is linked.\n*Note:* The domain may or may not exist.  Use GET / with the domain to verify.\n\nlink[\"id\"]\n^^^^^^^^^^^^\nFor hard links, the uuid of the object the link points to.  For symbolic links this\nelement is not present\n\ncreated\n^^^^^^^\nA timestamp giving the time the link was created in UTC (ISO-8601 format).\n\nlastModified\n^^^^^^^^^^^^\nA timestamp giving the most recent time the group has been\nmodified in UTC (ISO-8601 format).\n\nhrefs\n^^^^^\nAn array of hypertext links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request - Hard Link\n--------------------------\n\n.. code-block:: http\n\n    GET /groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e/links/g1 HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response - Hard Link\n---------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Fri, 16 Jan 2015 22:42:05 GMT\n    Content-Length: 688\n    Etag: \"70c5c4f2f7cac9f7f155fe026f4c492f65e3fb8e\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n        \n    {\n    \"link\": {\n        \"title\": \"g1\", \n        \"collection\": \"groups\", \n        \"class\": \"H5L_TYPE_HARD\", \n        \"id\": \"052e001e-9d33-11e4-9a3d-3c15c2da029e\"\n    }, \n    \"created\": \"2015-01-16T03:47:22Z\",\n    \"lastModified\": \"2015-01-16T03:47:22Z\", \n    \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e/links/g1\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/\", \"rel\": \"home\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052e001e-9d33-11e4-9a3d-3c15c2da029e\", \"rel\": \"target\"}\n     ]\n    } \n       \nSample Request - Soft Link\n--------------------------\n\n.. code-block:: http\n\n    GET /groups/052e700a-9d33-11e4-9fe4-3c15c2da029e/links/slink HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0    \n    Related Resources\n    \nSample Response - Soft Link\n---------------------------\n\n.. code-block:: http\n    \n    HTTP/1.1 200 OK\n    Date: Fri, 16 Jan 2015 23:29:27 GMT\n    Content-Length: 620\n    Etag: \"7bd777729ac5af261c85c7e3b87ef0045739bf77\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"link\": {\n            \"title\": \"slink\",\n            \"class\": \"H5L_TYPE_SOFT\",\n            \"h5path\": \"somevalue\"\n             }, \n    \"created\": \"2015-01-16T03:47:22Z\",\n    \"lastModified\": \"2015-01-16T03:47:22Z\", \n    \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052e700a-9d33-11e4-9fe4-3c15c2da029e/links/slink\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e\", \"rel\": \"root\"},\n        {\"href\": \"http://tall.test.hdfgroup.org/\", \"rel\": \"home\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052e700a-9d33-11e4-9fe4-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/#h5path(somevalue)\", \"rel\": \"target\"}\n      ] \n    }\n         \n        \nSample Request - External Link\n------------------------------\n\n.. code-block:: http\n\n    GET /groups/052e5ae8-9d33-11e4-888d-3c15c2da029e/links/extlink HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    \nSample Response - External Link\n-------------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Tue, 20 Jan 2015 05:47:55 GMT\n    Content-Length: 644\n    Etag: \"1b7a228acdb19f7259ed8a1b3ba4bc442b405ef9\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"link\": {\n        \"title\": \"extlink\", \n        \"class\": \"H5L_TYPE_EXTERNAL\",\n        \"h5path\": \"somepath\",\n        \"h5domain\": \"somefile\"\n    }, \n    \"created\": \"2015-01-16T03:47:22Z\",\n    \"lastModified\": \"2015-01-16T03:47:22Z\", \n    \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052e5ae8-9d33-11e4-888d-3c15c2da029e/links/extlink\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e\", \"rel\": \"root\"},\n        {\"href\": \"http://tall.test.hdfgroup.org/\", \"rel\": \"home\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/052e5ae8-9d33-11e4-888d-3c15c2da029e\", \"rel\": \"owner\"}, \n        {\"href\": \"http://somefile.hdfgroup.org#h5path(somepath)\", \"rel\": \"target\"}\n      ] \n    }\n    \n    \n        \nSample Request - User Defined Link\n----------------------------------\n\n.. code-block:: http\n\n    GET /groups/0262c3a6-a069-11e4-8905-3c15c2da029e/links/udlink HTTP/1.1\n    host: tall_with_udlink.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n\n\nSample Response - User Defined Link\n-----------------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Tue, 20 Jan 2015 05:56:00 GMT\n    Content-Length: 576\n    Etag: \"2ab310eba3bb4282f84d643fcc30e591da485576\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n\n    {\n    \"link\": {\n        \"class\": \"H5L_TYPE_USER_DEFINED\", \n        \"title\": \"udlink\"\n        }, \n    \"created\": \"2015-01-16T03:47:22Z\",\n    \"lastModified\": \"2015-01-16T03:47:22Z\", \n    \"hrefs\": [\n        {\"href\": \"http://tall_with_udlink.test.hdfgroup.org/groups/0262c3a6-a069-11e4-8905-3c15c2da029e/links/udlink\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall_with_udlink.test.hdfgroup.org/groups/0260b214-a069-11e4-a840-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall_with_udlink.test.hdfgroup.org/\", \"rel\": \"home\"}, \n        {\"href\": \"http://tall_with_udlink.test.hdfgroup.org/groups/0262c3a6-a069-11e4-8905-3c15c2da029e\", \"rel\": \"owner\"}\n    ]       \n    }\n    \n=================\n\n* :doc:`DELETE_Link`\n* :doc:`GET_Links`\n* :doc:`PUT_Link`\n \n\n "
  },
  {
    "path": "docs/GroupOps/GET_Links.rst",
    "content": "**********************************************\nGET Links\n**********************************************\n\nDescription\n===========\nReturns all the links for a given group.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    GET /groups/<id>/links HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n* *<id>* is the UUID of the group the links to be returned are a member of.\n    \nRequest Parameters\n------------------\nThis implementation of the operation uses the following request parameters (both \noptional):\n\nLimit\n^^^^^\nIf provided, a positive integer value specifying the maximum number of links to return.\n\nMarker\n^^^^^^\nIf provided, a string value indicating that only links that occur after the\nmarker value will be returned.\n\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nlinks\n^^^^^\nAn array of JSON objects giving information about each link returned.\nSee :doc:`GET_Link` for a description of the link response elements.\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\n.. code-block:: http\n\n    GET /groups/0ad37be1-a06f-11e4-8651-3c15c2da029e/links HTTP/1.1\n    host: tall.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0  \n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Tue, 20 Jan 2015 06:55:19 GMT\n    Content-Length: 607\n    Etag: \"49edcce6a8f724108d41d52c98002d6255286ff8\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n   \n    {\n    \"links\": [\n        {\n            \"title\": \"g1.2.1\",\n            \"class\": \"H5L_TYPE_HARD\",\n            \"collection\": \"groups\",\n            \"id\": \"0ad38d45-a06f-11e4-a909-3c15c2da029e\"\n        }, \n        {\n            \"title\": \"extlink\",\n            \"class\": \"H5L_TYPE_EXTERNAL\",\n            \"h5path\": \"somepath\",\n            \"file\": \"somefile\"  \n        }\n    ],\n    \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/groups/0ad37be1-a06f-11e4-8651-3c15c2da029e/links\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/0ad2e151-a06f-11e4-bc68-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/\", \"rel\": \"home\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/0ad37be1-a06f-11e4-8651-3c15c2da029e\", \"rel\": \"owner\"}\n        ]\n    } \n    \nSample Request Batch\n--------------------\n\n.. code-block:: http\n\n    GET /groups/76bddb1e-a06e-11e4-86d6-3c15c2da029e/links?Marker=g0089&Limit=5 HTTP/1.1\n    host: group1k.test.hdfgroup.org\n    Accept-Encoding: gzip, deflate\n    Accept: */*\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0  \n    \nSample Response Batch\n---------------------\n\n.. code-block:: http\n\n    HTTP/1.1 200 OK\n    Date: Tue, 20 Jan 2015 07:30:03 GMT\n    Content-Length: 996\n    Etag: \"221affdeae54076d3493ce8ce0ed80ddb89c6e27\"\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n    \n.. code-block:: json\n   \n     \n    {\n    \"links\": [\n        {\"title\": \"g0090\", \"id\": \"76c53485-a06e-11e4-96f3-3c15c2da029e\", \"class\": \"H5L_TYPE_HARD\", \"collection\": \"groups\"}, \n        {\"title\": \"g0091\", \"id\": \"76c54d40-a06e-11e4-a342-3c15c2da029e\", \"class\": \"H5L_TYPE_HARD\", \"collection\": \"groups\"}, \n        {\"title\": \"g0092\", \"id\": \"76c564f5-a06e-11e4-bccd-3c15c2da029e\", \"class\": \"H5L_TYPE_HARD\", \"collection\": \"groups\"}, \n        {\"title\": \"g0093\", \"id\": \"76c57d19-a06e-11e4-a9a8-3c15c2da029e\", \"class\": \"H5L_TYPE_HARD\", \"collection\": \"groups\"}, \n        {\"title\": \"g0094\", \"id\": \"76c5941c-a06e-11e4-b641-3c15c2da029e\", \"class\": \"H5L_TYPE_HARD\", \"collection\": \"groups\"}\n      ],\n    \"hrefs\": [\n        {\"href\": \"http://group1k.test.hdfgroup.org/groups/76bddb1e-a06e-11e4-86d6-3c15c2da029e/links\", \"rel\": \"self\"}, \n        {\"href\": \"http://group1k.test.hdfgroup.org/groups/76bddb1e-a06e-11e4-86d6-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://group1k.test.hdfgroup.org/\", \"rel\": \"home\"}, \n        {\"href\": \"http://group1k.test.hdfgroup.org/groups/76bddb1e-a06e-11e4-86d6-3c15c2da029e\", \"rel\": \"owner\"}\n      ]\n    } \n       \nRelated Resources\n=================\n\n* :doc:`DELETE_Link`\n* :doc:`GET_Link`\n* :doc:`GET_Group`\n* :doc:`PUT_Link`\n \n\n "
  },
  {
    "path": "docs/GroupOps/POST_Group.rst",
    "content": "**********************************************\nPOST Group\n**********************************************\n\nDescription\n===========\nCreates a new Group.\n\n*Note:* By default he new Group will not be linked from any other group in the domain.\nA link element can be included in the request body to have an existing group link to \nthe new group.\nAlternatively, use the *PUT link* operation to link the new \ngroup.\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    POST /groups HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nRequest Elements\n----------------\nOptionally the request body can be a JSON object that has a link key with sub-keys:\n\nid\n^^\nThe UUID of the group the new group should be linked to.  If the UUID is not valid,\nthe request will fail and a new group will not be created.\n\nname\n^^^^\nThe name of the new link.\n\n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nid\n^^\nThe UUID of the newly created group\n\nattributeCount\n^^^^^^^^^^^^^^\nThe number of attributes belonging to the group.\n\nlinkCount\n^^^^^^^^^\nThe number of links belonging to the group.\n\ncreated\n^^^^^^^\nA timestamp giving the time the group was created in UTC (ISO-8601 format).\n\nlastModified\n^^^^^^^^^^^^\nA timestamp giving the most recent time the group has been modified (i.e. attributes or \nlinks updated) in UTC (ISO-8601 format).\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request\n--------------\n\nCreate a new, un-linked Group.\n\n.. code-block:: http\n\n    POST /groups HTTP/1.1\n    Content-Length: 0\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: testGroupPost.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \nSample Response\n---------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Content-Length: 705\n    Content-Location: http://testGroupPost.test.hdfgroup.org/groups/777978c5-a078-11e4-8755-3c15c2da029e\n    Server: TornadoServer/3.2.2\n    Location: http://testGroupPost.test.hdfgroup.org/groups/777978c5-a078-11e4-8755-3c15c2da029e\n    Date: Tue, 20 Jan 2015 07:46:38 GMT\n    Content-Type: application/json\n    \n.. code-block:: json\n  \n    {\n    \"id\": \"777978c5-a078-11e4-8755-3c15c2da029e\",\n    \"created\": \"2015-01-20T07:46:38Z\", \n    \"lastModified\": \"2015-01-20T07:46:38Z\", \n    \"attributeCount\": 0, \n    \"linkCount\": 0,\n    \"hrefs\": [\n        {\"href\": \"http://testGroupPost.test.hdfgroup.org/groups/777978c5-a078-11e4-8755-3c15c2da029e\", \"rel\": \"self\"}, \n        {\"href\": \"http://testGroupPost.test.hdfgroup.org/groups/777978c5-a078-11e4-8755-3c15c2da029e/links\", \"rel\": \"links\"}, \n        {\"href\": \"http://testGroupPost.test.hdfgroup.org/groups/777109b3-a078-11e4-8512-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://testGroupPost.test.hdfgroup.org/\", \"rel\": \"home\"}, \n        {\"href\": \"http://testGroupPost.test.hdfgroup.org/groups/777978c5-a078-11e4-8755-3c15c2da029e/attributes\", \"rel\": \"attributes\"}\n      ]\n    }\n    \nSample Request with Link\n------------------------\n\nCreate a new Group, link to root (which has uuid of \"36b921f3-...\") as \"linked_group\".\n\n.. code-block:: http\n\n    POST /groups HTTP/1.1\n    Content-Length: 79\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: testGroupPostWithLink.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n\n    {\n    \"link\": {\n        \"id\": \"36b921f3-a07a-11e4-88da-3c15c2da029e\", \n        \"name\": \"linked_group\"\n      }\n    }\n    \nSample Response with Link\n-------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Content-Length: 745\n    Content-Location: http://testGroupPostWithLink.test.hdfgroup.org/groups/36cbe08a-a07a-11e4-8301-3c15c2da029e\n    Server: TornadoServer/3.2.2\n    Location: http://testGroupPostWithLink.test.hdfgroup.org/groups/36cbe08a-a07a-11e4-8301-3c15c2da029e\n    Date: Tue, 20 Jan 2015 07:59:09 GMT\n    Content-Type: application/json\n    \n.. code-block:: json\n     \n    {\n    \"id\": \"36cbe08a-a07a-11e4-8301-3c15c2da029e\",   \n    \"attributeCount\": 0, \n    \"linkCount\": 0, \n    \"created\": \"2015-01-20T07:59:09Z\", \n    \"lastModified\": \"2015-01-20T07:59:09Z\", \n    \"hrefs\": [\n        {\"href\": \"http://testGroupPostWithLink.test.hdfgroup.org/groups/36cbe08a-a07a-11e4-8301-3c15c2da029e\", \"rel\": \"self\"}, \n        {\"href\": \"http://testGroupPostWithLink.test.hdfgroup.org/groups/36cbe08a-a07a-11e4-8301-3c15c2da029e/links\", \"rel\": \"links\"}, \n        {\"href\": \"http://testGroupPostWithLink.test.hdfgroup.org/groups/36b921f3-a07a-11e4-88da-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://testGroupPostWithLink.test.hdfgroup.org/\", \"rel\": \"home\"}, \n        {\"href\": \"http://testGroupPostWithLink.test.hdfgroup.org/groups/36cbe08a-a07a-11e4-8301-3c15c2da029e/attributes\", \"rel\": \"attributes\"}\n        ]\n    }\n    \nRelated Resources\n=================\n\n* :doc:`DELETE_Group`\n* :doc:`GET_Links`\n* :doc:`PUT_Link`\n* :doc:`GET_Group`\n* :doc:`GET_Groups`\n \n\n "
  },
  {
    "path": "docs/GroupOps/PUT_Link.rst",
    "content": "**********************************************\nPUT Link\n**********************************************\n\nDescription\n===========\nCreates a new link in a given group.\n\nEither hard, soft, or external links can be created based on the request elements.\nSee examples below.\n\n*Note:* any existing link with the same name will be replaced with the new link.\n\n\nRequests\n========\n\nSyntax\n------\n.. code-block:: http\n\n    PUT /groups/<id>/links/<name> HTTP/1.1\n    Host: DOMAIN\n    Authorization: <authorization_string>\n    \n* *<id>* is the UUID of the group that the link will be created in.\n* *<name>* is the URL-encoded name of the link.\n    \nRequest Parameters\n------------------\nThis implementation of the operation does not use request parameters.\n\nRequest Headers\n---------------\nThis implementation of the operation uses only the request headers that are common\nto most requests.  See :doc:`../CommonRequestHeaders`\n\nRequest Elements\n----------------\nThe request body must include a JSON object that has the following key:\n\nid\n^^\nThe UUID of the group the new group should be linked to.  If the UUID is not valid,\nthe request will fail and a new group will not be created.\nIf this key is present, the h5path and h5domain keys will be ignored\n\nh5path\n^^^^^^\nA string describing a path to an external resource.  If this key is present an\nsoft or external link will be created.\n\nh5domain\n^^^^^^^^\nA string giving the external domain where the resource is present.\nIf this key is present, the h5path key must be provided as well.\n \n\nResponses\n=========\n\nResponse Headers\n----------------\n\nThis implementation of the operation uses only response headers that are common to \nmost responses.  See :doc:`../CommonResponseHeaders`.\n\nResponse Elements\n-----------------\n\nOn success, a JSON response will be returned with the following elements:\n\nhrefs\n^^^^^\nAn array of links to related resources.  See :doc:`../Hypermedia`.\n\nSpecial Errors\n--------------\n\nThe implementation of the operation does not return special errors.  For general \ninformation on standard error codes, see :doc:`../CommonErrorResponses`.\n\nExamples\n========\n\nSample Request - Create Hard Link\n---------------------------------\n\nIn group \"e0309a0a-...\", create a hard link named \"g3\" that points to the object \nwith uuid \"e032ad9c-...\".\n\n.. code-block:: http\n\n    PUT /groups/e0309a0a-a198-11e4-b127-3c15c2da029e/links/g3 HTTP/1.1\n    Content-Length: 46\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: tall_updated.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n\n    {\"id\": \"e032ad9c-a198-11e4-8d53-3c15c2da029e\"}\n    \nSample Response - Create Hard Link\n----------------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Wed, 21 Jan 2015 18:11:09 GMT\n    Content-Length: 418\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n\n    \n.. code-block:: json\n  \n    {\n    \"hrefs\": [\n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/e0309a0a-a198-11e4-b127-3c15c2da029e/links/g3\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/e0309a0a-a198-11e4-b127-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/\", \"rel\": \"home\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/e0309a0a-a198-11e4-b127-3c15c2da029e\", \"rel\": \"owner\"}\n      ]\n    }\n    \nSample Request - Create Soft Link\n---------------------------------\n\nIn group \"e0309a0a-...\", create a soft link named \"softlink\" that contains the path \n\"/somewhere\".\n\n.. code-block:: http\n\n    PUT /groups/e0309a0a-a198-11e4-b127-3c15c2da029e/links/softlink HTTP/1.1\n    Content-Length: 24\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: tall_updated.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n   \n    {\"h5path\": \"/somewhere\"}\n    \nSample Response - Create Soft Link\n----------------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Wed, 21 Jan 2015 18:35:26 GMT\n    Content-Length: 424\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n  \n.. code-block:: json\n      \n    {\n    \"hrefs\": [\n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/e0309a0a-a198-11e4-b127-3c15c2da029e/links/softlink\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/e0309a0a-a198-11e4-b127-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/\", \"rel\": \"home\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/e0309a0a-a198-11e4-b127-3c15c2da029e\", \"rel\": \"owner\"}\n      ]\n    }\n    \nSample Request - Create External Link\n-------------------------------------\n\nIn group \"d2f8bd6b-...\", create an external link named \"extlink\" that references the  \nobject at path: \"/somewhere\" in domain: \"external_target.test.hdfgroup.org\".\n\n.. code-block:: http\n\n    PUT /groups/d2f8bd6b-a1b1-11e4-ae1c-3c15c2da029e/links/extlink HTTP/1.1\n    Content-Length: 69\n    User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0\n    host: tall_updated.test.hdfgroup.org\n    Accept: */*\n    Accept-Encoding: gzip, deflate\n    \n.. code-block:: json\n   \n    {\"h5domain\": \"external_target.test.hdfgroup.org\", \"h5path\": \"/dset1\"}\n    \nSample Response - Create External Link\n--------------------------------------\n\n.. code-block:: http\n\n    HTTP/1.1 201 Created\n    Date: Wed, 21 Jan 2015 21:09:45 GMT\n    Content-Length: 423\n    Content-Type: application/json\n    Server: TornadoServer/3.2.2\n  \n.. code-block:: json\n         \n    {\n    \"hrefs\": [\n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/d2f8bd6b-a1b1-11e4-ae1c-3c15c2da029e/links/extlink\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/d2f8bd6b-a1b1-11e4-ae1c-3c15c2da029e\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/\", \"rel\": \"home\"}, \n        {\"href\": \"http://tall_updated.test.hdfgroup.org/groups/d2f8bd6b-a1b1-11e4-ae1c-3c15c2da029e\", \"rel\": \"owner\"}\n        ]\n    }\n    \n    \nRelated Resources\n=================\n\n* :doc:`DELETE_Link`\n* :doc:`GET_Link`\n* :doc:`GET_Links`\n* :doc:`GET_Group`\n \n\n "
  },
  {
    "path": "docs/GroupOps/index.rst",
    "content": "####################\nGroups\n####################\n\nGroups are objects that can be used to organize objects within a domain.  Groups contain\n*links* which can reference other objects (datasets, groups or committed datatypes).\nThere are four different types of links that can be used:\n\n* hard: A direct link to a group, dataset, or committed datatype object in the domain.\n* soft: A symbolic link that gives a path to an object within the domain (object may or may not be present).\n* external: A symbolic link to an object that is external to the domain.\n* user-defined: A user-defined link (this implementation only provides title and class for user-defined links).\n\nGroups all have attributes which can be used to store meta-data about the group.\n\nCreating Groups\n---------------\n\nUse the :doc:`POST_Group` to create new Groups.  Initially the new group will have no\nlinks and no attributes.\n\n\nGetting information about Groups\n--------------------------------\n\nUse :doc:`GET_Group` to get information about a group: attribute count, link count,\ncreation and modification times.\n\nTo retrieve the UUIDs of all the groups in a domain, use :doc:`GET_Groups`.\n\nTo retrieve the links of a group use :doc:`GET_Links`. Use :doc:`GET_Link` to get\ninformation about a specific link.\n\nTo get a group's attributes, use :doc:`../AttrOps/GET_Attributes`. \n\nUpdating Links\n---------------\n\nTo create a hard, soft, or external link, use :doc:`PUT_Link`.   \n\nTo delete a link use :doc:`DELETE_Link`.\n\n*Note*: deleting a link doesn't delete the object that it refers to.\n\n\nDeleting Groups\n---------------\nUse :doc:`DELETE_Group` to remove a group.  All attributes and links of the group\nwill be deleted.\n\n*Note:* deleting a group will not delete any objects (datasets or other groups) that the\nthe group's links points to.  These objects may become *anonymous*, i.e. they are not\nreferenced by any link, but can still be accessed via ``GET`` request with the object\nuuid.\n\nList of Operations\n------------------\n\n.. toctree::\n   :maxdepth: 1\n\n   DELETE_Group\n   DELETE_Link\n   GET_Group\n   GET_Groups\n   GET_Link\n   GET_Links\n   POST_Group\n   PUT_Link\n    \n    \n"
  },
  {
    "path": "docs/Hypermedia.rst",
    "content": "*************************\nHypermedia\n*************************\n\nh5serv supports the REST convention of **HATEOAS** or *Hypermedia as the Engine of \nApplication State*.  The idea is (see http://en.wikipedia.org/wiki/HATEOS for a full \nexplanation) is that each response include links to related resources related to \nthe requested resource.\n\nFor example, consider the request for a dataset: ``GET /datasets/<id>``.  The response\nwill be a JSON representation of the dataset describing it's type, shape, and other\naspects.  Related resources to the dataset would include:\n\n * the dataset's attributes\n * the dataset's value\n * the dataset collection of the domain\n * the root group of the domain the dataset is in\n * the domain resource\n \nSo the ``GET /datasets/<id>`` response includes a key ``hrefs`` that contains an\na JSON array.  Each array element has a key: ``href`` - the related resource, and a key:\n``rel`` that denotes the type of relation.   Example:\n\n.. code-block:: json\n       \n    {\n    \"hrefs\": [\n        {\"href\": \"http://tall.test.hdfgroup.org/datasets/<id>\", \"rel\": \"self\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/groups/<id>\", \"rel\": \"root\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/datasets/<id>/attributes\", \"rel\": \"attributes\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/datasets/<id>/value\", \"rel\": \"data\"}, \n        {\"href\": \"http://tall.test.hdfgroup.org/\", \"rel\": \"home\"}\n      ] \n    }\n    \nThis enables clients to \"explore\" the api without detailed knowledge of the API.\n\nThis is the list of relations used in h5serv:\n\n * attributes - the attributes of the resource \n * data - the resources data (used for datasets)\n * database - the collection of all datasets in the domain\n * groupbase - the collection of all groups in the domain\n * home - the domain the resource is a member of\n * owner - the containing object of this resource (e.g. the group an attribute is a member of)\n * root - the root group of the domain the resource is a member of\n * self - this resource\n * typebase - the collection of all committed types in the domain\n"
  },
  {
    "path": "docs/Installation/ServerSetup.rst",
    "content": "###################\nInstalling h5serv\n###################\n\nYou should find h5serv quite easy to setup.  The server (based on Python Tornado) is \nself-contained, so you will not need to setup Apache or other web server software to utilize\nh5serv.\n\n\nPrerequisites\n-------------\n\nA computer running a 64-bit version of Windows, Mac OS X, or Linux.\n\nYou will also need the following Python packages:\n\n* Python 2.7 or later\n* NumPy 1.10.4 or later\n* h5py 2.5 or later\n* tornado 4.0.2 or later\n* watchdog 0.8.3 or later\n* requests 2.3 or later (for client tests)\n\nIf you are not familiar with installing Python packages, the easiest route is to \nuse a package manager such as Anaconda (as described below).\n\nIf you have a git client installed on your system, you can directly download the h5serv \nsource from GitHub: ``git clone --recursive https://github.com/HDFGroup/h5serv.git``.  \nOtherwise, you can download a zip file of the source from GitHub (as described below).\n\n\nInstalling on Windows\n---------------------\n\nAnaconda from Continuum Analytics can be used to easily manage the package dependencies \nneeded for HDF Server.  \n\nIn a browser go to: http://continuum.io/downloads and click the \"Windows 64-bit \nPython 2.7 Graphical Installer\" button.\n\nInstall Anaconda using the default options.\n\nOnce Anaconda is installed select \"Anaconda Command Prompt\" from the start menu.\n\nIn the command window that appears, create a new anaconda environment using the following command:\n``conda create -n h5serv python=2.7 h5py tornado requests pytz``\n\nAnswer 'y' to the prompt, and the packages will be fetched.\n\nIn the same command window, run: ``activate h5serv``\n\nInstall the watchdog package (this is currently not available through Anaconda):\n``pip install watchdog``\n\nDownload the hdf5-json project: ``git clone https://github.com/HDFGroup/hdf5-json.git`` .\nAlternatively, in a browser go to: https://github.com/HDFGroup/hdf5-json and click the \n\"Download ZIP\" button (right side of page).   Download the zip file and extract to\nthe destination directory of your choice.  \n\nNext, cd to the hdf5-json folder and run: ``python setup.py install``.\n\nDownload the h5serv project: ``git clone https://github.com/HDFGroup/h5serv.git`` .\nAlternatively, download the source zip as described above. \n\nNext, in the command window, cd to the folder you extracted the source files to.\n\nRun: ``python h5serv``\nYou should see the output: \"Starting event loop on port: 5000\".\n\nYou may then see a security alert: \"Windows Firewall has blocked some features of this \nprogram\".  Click \"Allow access\" with the default option (Private network access).\n\nAt this point the server is running, waiting on any requests being sent to port 5000.\nGo to the \"verification\" section below to try out the service.\n\nInstalling on Linux/Mac OS X\n-----------------------------\n\nAnaconda from Continuum Analytics can be used to easily manage the package dependencies \nneeded for HDF Server.  \n\nIn a browser go to: http://continuum.io/downloads and click the \"Mac OS X 64-bit \nPython 2.7 Graphical Installer\" button for Mac OS X or: \"Linux 64-bit Python 2.7\".\n\nInstall Anaconda using the default options.\n\nOnce Anaconda is installed, open a new shell and run the following on the command line:\n\n``conda create -n h5serv python=2.7 h5py tornado requests pytz``\n\nAnswer 'y' to the prompt, and the packages will be fetched.\n\nInstall the watchdog package (this is currently not available through Anaconda):\n``pip install watchdog``\n\nIn the same shell, run: ``source activate h5serv``\n\nDownload the hdf5-json project: ``git clone https://github.com/HDFGroup/hdf5-json.git`` .\nAlternatively, in a browser go to: https://github.com/HDFGroup/hdf5-json and click the \n\"Download ZIP\" button (right side of page).   Download the zip file and extract to\nthe destination directory of your choice.  \n\nNext, cd to the hdf5-json folder and run: ``python setup.py install``.\n\nDownload the h5serv project: ``git clone https://github.com/HDFGroup/h5serv.git`` .\nAlternatively, download the source zip as described above. \n\nNext, in the command window, cd to the folder you extracted the source files to.\n\nRun: ``python h5serv``\nYou should see the output: \"Starting event loop on port: 5000\".\n\nAt this point the server is running, waiting on any requests being sent to port 5000.\nGo to the \"verification\" section below to try out the service.\n\n\nVerification\n-------------\n\nTo verify that h5serv was installed correctly, you can run the test suite included\nwith the installation.  \n\nOpen a new shell (on Windows, run \"Annaconda Command Prompt\" from the start menu).\n\nIn this shell, run the following commands:\n\n* source activate h5serv  (just: activate h5serv on Windows)\n* cd <h5serv installation directory>\n* cd test\n* python testall.py\n\nAll tests should report OK. \n\nServer Configuration\n--------------------\n\nThe file ``h5serv/server/config.py`` provides several configuration options that can be\nused to customize h5serv.  Each of the options can be changed by:\n\n * Changing the value in the config.py file and re-starting the service.\n * Passing a command line option to ``h5serv`` on startup. E.g. ``python h5serv --port=7253``\n * Setting an environment variable with the option name in upper case.  E.g. ``export PORT=5000; python h5serv``\n\nThe config options are:\n\nport \n^^^^\nThe port that h5serv will listen on.  Change this if 5000 conflicts with another service.\n\nDefault: 5000\n \ndebug \n^^^^^\nIf ``True`` the server will report debug info (e.g. a stack trace) to the requester on \nerror.  If  ``False``, just the status code and message will be reported. \n\nDefault: ``True``\n\ndatapath\n^^^^^^^^\nA path indicating the directory where HDF5 files will be be stored.\n\n*Note*: Any HDF5 file content that you put in this directory will be exposed via the\nserver REST api (unless the domain's ACL is configured to prevent public access, see: \n:doc:`../AclOps`).\n\nDefault: ``../data/``\n\npublic_dir\n^^^^^^^^^^\nA list of directories under datapath which will be visible to any autenticated user's \nrequest.\n\nDefault: ``['public', 'test']``\n\ndomain\n^^^^^^\nThe base DNS path for domain access  (see comment to hdf5_ext config option).\n\nDefault. ``hdfgroup.org``\n\nhdf5_ext\n^^^^^^^^\n\nThe extension to assume for HDF5 files.  The REST requests don't assume an extension, so\na request such as:\n\n.. code-block:: http\n\n  GET /\n  HOST: tall.data.hdfgroup.org\n  \nTranslates to: \"Get the file tall.h5 in the directory given by datapath\".\n\nDefault: ``.h5``\n \ntoc_name\n^^^^^^^^\n\nName of the auto-generated HDF5 that provides a \"Table Of Contents\" list of all HDF5\nfiles in the datapath directory and sub-directories.\n\nDefault: ``.toc.h5``\n\nhome_dir\n^^^^^^^^\n\nA directory under data_path that will be the parent directory of user home directores.\nFor example if ``datapath`` is ``../data``, ``home_dir`` is ``home``, the authenticated request\nof ``GET /`` for userid ``knuth`` would return a list of files in the directory: \n``../data/home/knuth``.\n\nDefault: ``home``\n\nssl_port\n^^^^^^^^\n\nThe SSL port the server will listen on for HTTPS requests.\n\nDefault: 6050\n\nssl_cert\n^^^^^^^^\n\nLocation of the SSL cert.\n\ndefault: \n\nssl_key\n^^^^^^^\n\nThe SSL key.\n\ndefault:\n\nssl_cert_pwd\n^^^^^^^^^^^^\n\nThe SSL cert password\n\ndefault:\n\npassword_uri\n^^^^^^^^^^^^\n\nResource path to be used for user authentication.\nCurrently two methods are supported:\n\nHDF5 Password file: An HDF5 that contains userids and (encrypted) passwords.\nSee: :doc:`../AdminTools`.  In this case the password_uri config is a path\nto the password file.\n\nMongoDB: A MongoDB database that contains a \"users\" collection of userids and \npasswords.  In this case the password_uri would be of the form: \n``mongodb://<mongo_ip>:<port>`` where ``<mongo_ip>`` is the IP \naddress of the host running the mongo database and ``<port>`` is the port of \nthe mongo database (typically 27017).\n\ndefault: ``../util/admin/passwd.h5``\n\nmongo_dbname\n^^^^^^^^^^^^\n\nMongo database named used for MongoDB-based authentication as described above.\n\ndefault: ``hdfdevtest``\n\nstatic_url\n^^^^^^^^^^\n\nURI path that will be used to map any static HTML content to be displayed by the server.\n\ndefault: ``/views/(.*)``\n\nstatic_path\n^^^^^^^^^^^\n\nFile path for files (i.e. regular HTML files) to be hosted statically.\n\ndefault: ``../static``\n\ncors_domain\n^^^^^^^^^^^\n\nDomains to allow for CORS (cross-origin resource sharing).  Use ``*`` to allow\nany domain, None to disallow.\n\ndefault: ``*``\n\nlog_file\n^^^^^^^^\n\nFile path for server log files.  Set to None to have logout go to standard out.\n\nlog_level\n^^^^^^^^^\n\nVerbosity level for logging.  One of: ``ERROR, WARNING, INFO, DEBUG, NOTSET``.\n\ndefault: ``INFO``\n\nbackground_timeout\n^^^^^^^^^^^^^^^^^^\n\nTime interval in milliseconds to check for updates in the datapath folder (e.g. a file\nthat is added through some external process).  Set to 0 to disable background processsing.\n\ndefault: 1000\n\n\nData files\n----------\n\nCopy any HDF5 files you would like exposed by the service to the datapath directory\n(h5serv/data).  If you do not wish to have the files updatable by the service make the \nfiles read-only.\n\nOn the first request to the service, a Table of Contents (TOC) file will be generated which\nwill contain links to all HDF5 files in the data folder (and sub-folders).\n\n*Note:* Do not modify files once they have been placed in the datapath directory.  h5serv\ninventories new files on first access, but won't see some changes (e.g. new group is created)\nmade to the file outside the REST api.\n\n*Note: HDF5 that are newly created (copied into) the datapath directory will be \"noticed\"\nby the service and added into the TOC.\n"
  },
  {
    "path": "docs/Installation/index.rst",
    "content": "###################\nInstallation \n###################\n\n.. toctree::\n   :maxdepth: 2\n\n   ServerSetup\n"
  },
  {
    "path": "docs/Introduction/index.rst",
    "content": "###################\nIntroduction\n###################\n\nh5serv is a web service that can be used to send and receive HDF5 data.   \nh5serv uses a REST interface to support CRUD (create, read, update, delete) operations on \nthe full spectrum of HDF5 objects including: groups, links, datasets, attributes, and \ncommitted data types.   As a REST-based service a variety of clients can be developed in \nJavaScript, Python, C, and other common languages.\n\n \n\n"
  },
  {
    "path": "docs/License/index.rst",
    "content": "#######################\nLicense and Legal Info\n#######################\n \nCopyright Notice and License Terms for h5serv Software Service, Libraries and Utilities\n---------------------------------------------------------------------------------------\n\nh5serv (HDF5 REST Server) Service, Libraries and Utilities\n\nCopyright (c) |copyright|\n\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without \nmodification, are permitted for any purpose (including commercial purposes) \nprovided that the following conditions are met:\n\n1. Redistributions of source code must retain the above copyright notice, \n   this list of conditions, and the following disclaimer.\n\n2. Redistributions in binary form must reproduce the above copyright notice, \n   this list of conditions, and the following disclaimer in the documentation \n   and/or materials provided with the distribution.\n\n3. In addition, redistributions of modified forms of the source or binary \n   code must carry prominent notices stating that the original code was \n   changed and the date of the change.\n\n4. All publications or advertising materials mentioning features or use of \n   this software are asked, but not required, to acknowledge that it was \n   developed by The HDF Group and credit the contributors.\n\n5. Neither the name of The HDF Group, nor the name of any Contributor may \n   be used to endorse or promote products derived from this software \n   without specific prior written permission from The HDF Group or the \n   Contributor, respectively.\n\nDISCLAIMER: \nTHIS SOFTWARE IS PROVIDED BY THE HDF GROUP AND THE CONTRIBUTORS \n\"AS IS\" WITH NO WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED.  In no \nevent shall The HDF Group or the Contributors be liable for any damages \nsuffered by the users arising out of the use of this software, even if \nadvised of the possibility of such damage. \n\n"
  },
  {
    "path": "docs/Makefile",
    "content": "# Makefile for Sphinx documentation\n#\n\n# You can set these variables from the command line.\nSPHINXOPTS    =\nSPHINXBUILD   = sphinx-build\nPAPER         =\nBUILDDIR      = _build\n\n# User-friendly check for sphinx-build\nifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)\n$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)\nendif\n\n# Internal variables.\nPAPEROPT_a4     = -D latex_paper_size=a4\nPAPEROPT_letter = -D latex_paper_size=letter\nALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .\n# the i18n builder cannot share the environment and doctrees with the others\nI18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .\n\n.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext\n\nhelp:\n\t@echo \"Please use \\`make <target>' where <target> is one of\"\n\t@echo \"  html       to make standalone HTML files\"\n\t@echo \"  dirhtml    to make HTML files named index.html in directories\"\n\t@echo \"  singlehtml to make a single large HTML file\"\n\t@echo \"  pickle     to make pickle files\"\n\t@echo \"  json       to make JSON files\"\n\t@echo \"  htmlhelp   to make HTML files and a HTML help project\"\n\t@echo \"  qthelp     to make HTML files and a qthelp project\"\n\t@echo \"  devhelp    to make HTML files and a Devhelp project\"\n\t@echo \"  epub       to make an epub\"\n\t@echo \"  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter\"\n\t@echo \"  latexpdf   to make LaTeX files and run them through pdflatex\"\n\t@echo \"  latexpdfja to make LaTeX files and run them through platex/dvipdfmx\"\n\t@echo \"  text       to make text files\"\n\t@echo \"  man        to make manual pages\"\n\t@echo \"  texinfo    to make Texinfo files\"\n\t@echo \"  info       to make Texinfo files and run them through makeinfo\"\n\t@echo \"  gettext    to make PO message catalogs\"\n\t@echo \"  changes    to make an overview of all changed/added/deprecated items\"\n\t@echo \"  xml        to make Docutils-native XML files\"\n\t@echo \"  pseudoxml  to make pseudoxml-XML files for display purposes\"\n\t@echo \"  linkcheck  to check all external links for integrity\"\n\t@echo \"  doctest    to run all doctests embedded in the documentation (if enabled)\"\n\nclean:\n\trm -rf $(BUILDDIR)/*\n\nhtml:\n\t$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html\n\t@echo\n\t@echo \"Build finished. The HTML pages are in $(BUILDDIR)/html.\"\n\ndirhtml:\n\t$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml\n\t@echo\n\t@echo \"Build finished. The HTML pages are in $(BUILDDIR)/dirhtml.\"\n\nsinglehtml:\n\t$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml\n\t@echo\n\t@echo \"Build finished. The HTML page is in $(BUILDDIR)/singlehtml.\"\n\npickle:\n\t$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle\n\t@echo\n\t@echo \"Build finished; now you can process the pickle files.\"\n\njson:\n\t$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json\n\t@echo\n\t@echo \"Build finished; now you can process the JSON files.\"\n\nhtmlhelp:\n\t$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp\n\t@echo\n\t@echo \"Build finished; now you can run HTML Help Workshop with the\" \\\n\t      \".hhp project file in $(BUILDDIR)/htmlhelp.\"\n\nqthelp:\n\t$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp\n\t@echo\n\t@echo \"Build finished; now you can run \"qcollectiongenerator\" with the\" \\\n\t      \".qhcp project file in $(BUILDDIR)/qthelp, like this:\"\n\t@echo \"# qcollectiongenerator $(BUILDDIR)/qthelp/h5serv.qhcp\"\n\t@echo \"To view the help file:\"\n\t@echo \"# assistant -collectionFile $(BUILDDIR)/qthelp/h5serv.qhc\"\n\ndevhelp:\n\t$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp\n\t@echo\n\t@echo \"Build finished.\"\n\t@echo \"To view the help file:\"\n\t@echo \"# mkdir -p $$HOME/.local/share/devhelp/h5serv\"\n\t@echo \"# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/h5serv\"\n\t@echo \"# devhelp\"\n\nepub:\n\t$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub\n\t@echo\n\t@echo \"Build finished. The epub file is in $(BUILDDIR)/epub.\"\n\nlatex:\n\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex\n\t@echo\n\t@echo \"Build finished; the LaTeX files are in $(BUILDDIR)/latex.\"\n\t@echo \"Run \\`make' in that directory to run these through (pdf)latex\" \\\n\t      \"(use \\`make latexpdf' here to do that automatically).\"\n\nlatexpdf:\n\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex\n\t@echo \"Running LaTeX files through pdflatex...\"\n\t$(MAKE) -C $(BUILDDIR)/latex all-pdf\n\t@echo \"pdflatex finished; the PDF files are in $(BUILDDIR)/latex.\"\n\nlatexpdfja:\n\t$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex\n\t@echo \"Running LaTeX files through platex and dvipdfmx...\"\n\t$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja\n\t@echo \"pdflatex finished; the PDF files are in $(BUILDDIR)/latex.\"\n\ntext:\n\t$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text\n\t@echo\n\t@echo \"Build finished. The text files are in $(BUILDDIR)/text.\"\n\nman:\n\t$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man\n\t@echo\n\t@echo \"Build finished. The manual pages are in $(BUILDDIR)/man.\"\n\ntexinfo:\n\t$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo\n\t@echo\n\t@echo \"Build finished. The Texinfo files are in $(BUILDDIR)/texinfo.\"\n\t@echo \"Run \\`make' in that directory to run these through makeinfo\" \\\n\t      \"(use \\`make info' here to do that automatically).\"\n\ninfo:\n\t$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo\n\t@echo \"Running Texinfo files through makeinfo...\"\n\tmake -C $(BUILDDIR)/texinfo info\n\t@echo \"makeinfo finished; the Info files are in $(BUILDDIR)/texinfo.\"\n\ngettext:\n\t$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale\n\t@echo\n\t@echo \"Build finished. The message catalogs are in $(BUILDDIR)/locale.\"\n\nchanges:\n\t$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes\n\t@echo\n\t@echo \"The overview file is in $(BUILDDIR)/changes.\"\n\nlinkcheck:\n\t$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck\n\t@echo\n\t@echo \"Link check complete; look for any errors in the above output \" \\\n\t      \"or in $(BUILDDIR)/linkcheck/output.txt.\"\n\ndoctest:\n\t$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest\n\t@echo \"Testing of doctests in the sources finished, look at the \" \\\n\t      \"results in $(BUILDDIR)/doctest/output.txt.\"\n\nxml:\n\t$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml\n\t@echo\n\t@echo \"Build finished. The XML files are in $(BUILDDIR)/xml.\"\n\npseudoxml:\n\t$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml\n\t@echo\n\t@echo \"Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml.\"\n"
  },
  {
    "path": "docs/Reference.rst",
    "content": "###################\nReference\n###################\n\n.. toctree::\n   :maxdepth: 2\n\n   Authorization\n   CommonRequestHeaders\n   CommonResponseHeaders\n   CommonErrorResponses\n   Diagram\n   Hypermedia\n   Resources\n   UsingIteration\n   \n    \n"
  },
  {
    "path": "docs/Resources.rst",
    "content": "**************\nResource List\n**************\n\nList of Resources\n=================\n\n+----------------+------+------+------+--------+-----------------------------------------+\n| Resource       | GET  | PUT  | POST | DELETE |  Description                            |\n+================+======+======+======+========+=========================================+\n| Domain         |  Y   |  Y   |  N   |    Y   | A related collection of HDF objects     |\n+----------------+------+------+------+--------+-----------------------------------------+ \n| Group          |  Y   |  N   |  N   |    Y   | Represents an HDF Group                 |\n+----------------+------+------+------+--------+-----------------------------------------+   \n| Links          |  Y   |  N   |  N   |    N   | Collection of links within a group      |\n+----------------+------+------+------+--------+-----------------------------------------+   \n| Link           |  Y   |  Y   |  N   |    Y   | Represents an HDF link                  |\n+----------------+------+------+------+--------+-----------------------------------------+   \n| Dataset        |  Y   |  N   |  N   |    Y   | Represents an HDF Dataset               |\n+----------------+------+------+------+--------+-----------------------------------------+   \n| Attributes     |  Y   |  N   |  N   |    N   | Collection of Attributes                |\n+----------------+------+------+------+--------+-----------------------------------------+   \n| Attribute      |  Y   |  Y   |  N   |    Y   | Represents an HDF Attribute              |\n+----------------+------+------+------+--------+-----------------------------------------+   \n| Dataspace      |  Y   |  Y   |  N   |    N   | Shape of a dataset                      |\n+----------------+------+------+------+--------+-----------------------------------------+ \n| Type           |  Y   |  N   |  N   |    N   | Type of a dataset                       |\n+----------------+------+------+------+--------+-----------------------------------------+   \n| Value          |  Y   |  Y   |  Y   |    N   | Data values of a datset                 | \n+----------------+------+------+------+--------+-----------------------------------------+   \n| Datatype       |  Y   |  N   |  N   |    Y   | Committed datatype                      |\n+----------------+------+------+------+--------+-----------------------------------------+   \n| Groups         |  Y   |  N   |  Y   |    N   | Collection of groups within a domain    |\n+----------------+------+------+------+--------+-----------------------------------------+   \n| Datasets       |  Y   |  N   |  Y   |    N   | Collection of datasets within a domain  |\n+----------------+------+------+------+--------+-----------------------------------------+   \n| Datatypes      |  Y   |  N   |  Y   |    N   | Collection of datatypes within a domain |\n+----------------+------+------+------+--------+-----------------------------------------+   \n\n \n"
  },
  {
    "path": "docs/Tutorials/IPython_samples.rst",
    "content": "###################\nSamples \n###################\nTBD: Some walkthroughs here.\n"
  },
  {
    "path": "docs/Tutorials/index.rst",
    "content": "###################\nTutorials \n###################\n\n.. toctree::\n   :maxdepth: 2\n\n   IPython_samples\n"
  },
  {
    "path": "docs/Types/index.rst",
    "content": "####################\nTypes\n####################\n\nThe h5serv REST API supports the rich type capabilities provided by HDF.  Types are \nare described in JSON and these JSON descriptions are used in operations involving \ndatasets, attributes, and committed types.  \n\nThere is not a separate request for creating types, rather the description of the type in\nincluded with the request to create the dataset, attribute, or committed type.   Once\na type is created it is immutable and will exist until the containing object is deleted.\n\nType information is returned as a JSON object in dataset, attribute, or committed type\nGET requests (under the type key).  \n\n\nPredefined Types\n================\n\nPredefined types are base integer and floating point types that are identified via\none of the following strings:\n\n * ``H5T_STD_U8{LE|BE}``: a one byte unsigned integer\n * ``H5T_STD_I8{LE|BE}``: a one byte signed integer\n * ``H5T_STD_U6{LE|BE}``: a two byte unsigned integer\n * ``H5T_STD_I16{LE|BE}``: a two byte signed integer\n * ``H5T_STD_U32{LE|BE}``: a four byte unsigned integer\n * ``H5T_STD_I32{LE|BE}``: a four byte signed integer\n * ``H5T_STD_U64{LE|BE}``: a eight byte unsigned integer\n * ``H5T_STD_I64{LE|BE}``: a eight byte signed integer\n * ``H5T_IEEE_F32{LE|BE}``: a four byte floating-point value\n * ``H5T_IEEE_F64{LE|BE}``: a eight byte floating-point integer\n        \nPredefined types ending in \"LE\" or little-endian formatted and types ending in \"BE\"\nare big-endian.  E.g. ``H5T_STD_I64LE`` would be an eight byte, signed, little-endian\ninteger.    \n\n*Note:* little vs. big endian are used to specify the byte ordering in the server storage\nsystem and are not reflected in the JSON representation of the values.\n\nExample \n-------\n\nJSON Representation of an attribute with a ``H5T_STD_I8LE`` (signed, one byte) type:\n\n.. code-block:: json\n\n    {\n    \"name\": \"attr1\", \n    \"shape\": {\n        \"class\": \"H5S_SIMPLE\", \n        \"dims\": [27]\n    }, \n    \"type\": {\n        \"class\": \"H5T_INTEGER\",\n        \"base\": \"H5T_STD_I8LE\"\n        },\n    \"value\": [49, 115, 116, 32, 97, 116, 116, 114, 105, 98, 117, 116, 101, 32, \n              111, 102, 32, 100, 115, 101, 116, 49, 46, 49, 46, 49, 0]\n    }\n\n\nString Types - Fixed Length\n============================\n\n                     \nFixed length strings have a specified length (supplied when the object is created) that \nis used for each data element.  Any values that are assigned that exceed that length \nwill be truncated. \n\nTo specify a fixed length string, create a JSON object with class, charSet, strPad,\nand length keys (see definitions of these keys below).\n\n*Note:* Current only the ASCII character set is supported.\n\n*Note:* Fixed width unicode strings are not currently supported.\n\n*Note:* String Padding other than \"H5T_STR_NULLPAD\" will get stored as \"H5T_STR_NULLPAD\"\n\nExample \n-------\n\nJSON representation of a dataset using a fixed width string of 40 characters:\n\n.. code-block:: json\n\n    {\n    \"id\": \"1e8a359c-ac46-11e4-9f3e-3c15c2da029e\",\n    \"shape\": {\n        \"class\": \"H5S_SCALAR\", \n    }, \n    \"type\": {\n        \"class\": \"H5T_STRING\", \n        \"charSet\": \"H5T_CSET_ASCII\", \n        \"strPad\": \"H5T_STR_NULLPAD\", \n        \"length\": 40\n        },\n    \"value\": \"Hello, World!\"\n    }\n    \nString Types - Variable Length\n==============================\n\nVariable length strings allow each element of an array to only use as much storage\nas needed.  This is convenient when the maximum string length is not know before hand,\nor there is a great deal of variability in the lengths of strings.  \n\n*Note:* Typically there is a slight performance penalty in accessing variable length\nstring elements of an array in the server.\n\nTo specify a variable length string, create a JSON object with class, charSet, strPad,\nand length keys (see definitions of these keys below) where the value of \"length\" is:\n``H5T_VARIABLE``.\n\n*Note:* Current only the ASCII character set is supported.\n\n*Note:* Variable width unicode strings are not currently supported.\n\n*Note:* String Padding other than \"H5T_STR_NULLTERM\" will get stored as \"H5T_STR_NULLTERM\"\n\nExample \n-------\n\nJSON representation of a attribute using a variable length string:\n\n.. code-block:: json\n\n    {\n    \"name\": \"A1\", \n    \"shape\": {\n        \"class\": \"H5S_SIMPLE\", \n        \"dims\": [4]\n    }, \n    \"type\": {\n        \"class\": \"H5T_STRING\", \n        \"charSet\": \"H5T_CSET_ASCII\", \n        \"strPad\": \"H5T_STR_NULLTERM\", \n        \"length\": \"H5T_VARIABLE\"\n    }, \n    \"value\": [\n        \"Hypermedia\", \n        \"as the\", \n        \"engine\", \n        \"of state.\"\n      ]\n    }\n\n    \n\nCompound Types\n==============\n\nFor some types of data it makes sense to store sets of related items together rather\nthan in separate datasets or attributes.  For these use cases a compound datatype\ncan be defined.  A compound datatype has class: ``H5T_COMPOUND`` and a field key which\ncontains an array of sub-types.  \nEach of these sub-types can be a primitive type, a string, or another \ncompound type.  Each sub-type has a name that can be used to refer to the element.\n\n*Note:* The field names are not shown in the representation of an dataset or attribute's\nvalues.\n\nExample \n-------\n\nJSON representation of a scalar attribute with a compound type that consists of two \nfloating point elements:\n\n.. code-block:: json\n\n    {\n    \"name\": \"mycomplex\", \n    \"shape\": {\n        \"class\": \"H5S_SCALAR\" \n    }, \n    \"type\": {\n        \"class\": \"H5T_COMPOUND\", \n        \"fields\": [\n                {\n                \"name\": \"real_part\", \n                \"type\": {\n                        \"base\": \"H5T_IEEE_F64LE\", \n                        \"class\": \"H5T_FLOAT\"\n                    }\n                }, \n                {\n                \"name\": \"imaginary_part\", \n                \"type\": {\n                        \"base\": \"H5T_IEEE_F64LE\", \n                        \"class\": \"H5T_FLOAT\"\n                    }\n                }\n            ]\n    }, \n    \"value\": [ 1.2345, -2.468 ]\n    }\n    \nEnumerated Types\n=================\n\nEnumerated types enable the integer values of a dataset or attribute to be mapped to\na set of strings.  This allows the semantic meaning of a given set of values to be\ndescribed along with the data.\n\nTo specify an enumerated type, use the class ``H5T_ENUM``, provide a base type (must be\nsome form of integer), and a \"mapping\" key that list strings with their associated \nnumeric values.\n\n\nExample \n-------\n\n.. code-block:: json\n    \n    {\n    \"id\": \"1e8a359c-ac46-11e4-9f3e-3c15c2da029e\",\n    \"shape\": {\n        \"class\": \"H5S_SIMPLE\", \n        \"dims\": [ 7 ]\n    }, \n    \"type\": {\n        \"class\": \"H5T_ENUM\",\n        \"base\": {\n            \"class\": \"H5T_INTEGER\",\n            \"base\": \"H5T_STD_I16BE\" \n        },  \n        \"mapping\": {\n            \"GAS\": 2, \n            \"LIQUID\": 1, \n            \"PLASMA\": 3, \n            \"SOLID\": 0\n        }\n    }, \n    \"value\": [ 0, 2, 3, 2, 0, 1, 1 ]\n    }\n                \nArray Types\n===========\n\nArray types are used when it is desired for each element of a attribute or dataset\nto itself be a (typically small) array.\n\nTo specify an array type, use the class ``H5T_ARRAY`` and provide the dimensions \nof the array with the type.  Use the \"base\" key to specify the type of the elements\nof the array type.\n\nExample \n-------\n\nA dataset with 3 elements, each of which is a 2x2 array of integers.\n\n.. code-block:: json\n\n    {\n    \"id\": \"9348ad51-7bf7-11e4-a66f-3c15c2da029e\",\n    \"shape\": {\n        \"class\": \"H5S_SIMPLE\", \n        \"dims\": [ 3 ]\n    }, \n    \"type\": {\n        \"class\": \"H5T_ARRAY\", \n        \"base\": {\n            \"class\": \"H5T_INTEGER\",\n            \"base\": \"H5T_STD_I16BE\"\n        }, \n        \"dims\": [ 2, 2 ]\n    }, \n    \"value\": [\n        [ [1, 2], [3, 4] ],\n        [ [2, 1], [4, 3] ],\n        [ [1, 1], [4, 4] ]\n      ]\n    }\n    \nOpaque Types\n=============\n\nTBD\n\nExample\n-------\nTBD\n\nObject Reference Types\n======================\n\nAn object reference type enables you to define an array where each element of the\narray is a reference to another dataset, group or committed datatype.\n\nTo specify a object reference type, use ``H5T_REFERENCE`` as the type class, and\n``H5T_STD_REF_OBJ`` as the base type.\n\nThe elements of the array consist of strings that have the prefix: \"groups/\", \n\"datasets/\", or \"datatypes/\" followed by the UUID of the referenced object.\n\n\nExample \n-------\n\nA JSON representation of an attribute that consist of a 3 element array of object \nreferences.  The first element points to a group, the second element is null, and the \nthird element points to a group.\n\n.. code-block:: json\n\n    {\n    \"name\": \"objref_attr\", \n    \"shape\": {\n        \"class\": \"H5S_SIMPLE\", \n        \"dims\": [ 3 ]\n    }, \n    \"type\": {\n        \"class\": \"H5T_REFERENCE\",\n        \"base\": \"H5T_STD_REF_OBJ\"\n    }, \n    \"value\": [\n        \"groups/a09a9b99-7bf7-11e4-aa4b-3c15c2da029e\", \n        \"\",\n        \"datasets/a09a8efa-7bf7-11e4-9fb6-3c15c2da029e\"\n      ]\n    }\n    \nRegion Reference Types\n======================\n\nA region reference types allows the creation of attributes or datasets where each array\nelement references a section (point selection or hyperslab) of another dataset.\n\nTo specify a region reference type, use ``H5T_REFERENCE`` as the type class, and\n``H5T_STD_REF_DSETREG`` as the base type.\n\n*Note:* When writing values to the dataset, each element of the dataset must be \na JSON object with keys: \"id\", \"select_type\", and \"selection\" (as in the example below).\n\nExample \n-------\n\nA JSON representation of a region reference dataset with two elements.\n\nThe first element is a point selection element that references 4 elements\nin the dataset with UUID of \"68ee967a-...\".\n\nThe second element is a hyperslab selection that references 4 hyper-slabs in \nthe same dataset as the first element.  Each element is a pair of points that\ngives the boundary of the selection.\n\n.. code-block:: json\n\n    {\n    \"id\": \"68ee8647-7bed-11e4-9397-3c15c2da029e\",\n    \"shape\": {\n        \"class\": \"H5S_SIMPLE\", \n        \"dims\": [2]\n    }, \n    \"type\": {\n        \"class\": \"H5T_REFERENCE\",\n        \"base\": \"H5T_STD_REF_DSETREG\"\n    }, \n    \"value\": [\n        {\n        \"id\": \"68ee967a-7bed-11e4-819c-3c15c2da029e\", \n        \"select_type\": \"H5S_SEL_POINTS\", \n        \"selection\": [ \n            [0, 1], [2, 11], [1, 0], [2, 4]\n          ]\n        }, \n        {\n          \"id\": \"68ee967a-7bed-11e4-819c-3c15c2da029e\", \n          \"select_type\": \"H5S_SEL_HYPERSLABS\", \n          \"selection\": [\n            [ [0, 0],  [0, 2] ], \n            [ [0, 11],  [0, 13] ], \n            [ [2, 0],  [2, 2] ], \n            [ [2, 11],  [2, 13] ]\n          ]\n        }\n      ]\n    }  \n    \nType Keys\n=========\n\nInformation on the JSON keys used in type specifications.\n\nclass\n-----\nThe type class.  One of:\n\n* ``H5T_INTEGER``: an integer type\n* ``H5T_FLOAT``: a floating point type\n* ``H5T_STRING``: a string type\n* ``H5T_OPAQUE``: an opaque type\n* ``H5T_COMPOUND``: a compound type\n* ``H5T_ARRAY``: an array type\n* ``H5T_ENUM``: an enum type\n* ``H5T_REFERENCE``: a reference type\n\nbase\n----\n\nA string that gives the base predefined type used (or reference type for the \nreference class).\n\norder\n-----\n\nThe byte ordering.  One of:\n\n* ``H5T_NONE``: Ordering is not relevant (e.g. for string types)\n* ``H5T_ORDER_LE``: Little endian ordering (e.g. native ordering for x86 computers)\n* ``H5T_ORDER_BE``: Big endian ordering\n\ncharSet\n-------\n\nCharacter set for strings.  Currently only ``H5T_CSET_ASCII`` is supported.\n\nstrPad\n-------\n\nDefines how fixed length strings are padded.  One of:\n\n* ``H5T_STR_NULLPAD``: String is padded with nulls\n* ``H5T_STR_NULLTERM``: String is null terminated\n* ``H5T_STR_SPACEPAD``: String is padded with spaces\n\nlength\n--------\n\nDefines the string length.  Either a positive integer or the string: ``H5T_VARIABLE``.\n\nname\n----\n\nThe field name for compound types.\n\nmapping\n-------\n\nThe enum name for enum types.\n\nselect_type\n-----------\n\nThe selection type for reference types.  One of:\n\n* ``H5S_SEL_POINTS``: selection is a series of points\n* ``H5S_SEL_HYPERSLABS``: selection is a series of hyper-slabs.\n\nRelated Resources\n=================\n\n* :doc:`../DatasetOps/GET_Dataset`\n* :doc:`../DatasetOps/GET_DatasetType`\n* :doc:`../DatasetOps/POST_Dataset`\n* :doc:`../AttrOps/GET_Attribute`\n* :doc:`../AttrOps/PUT_Attribute`\n* :doc:`../DatatypeOps/GET_Datatype`\n\n\n* :doc:`../DatatypeOps/POST_Datatype`\n\n \n    \n"
  },
  {
    "path": "docs/UsingIteration.rst",
    "content": "***************\nUsing Iteration\n***************\n\nThere are some operations that may return an arbitrary large list of results. For \nexample: ``GET /groups/<id>/attributes`` returns all the attributes of the \ngroup object with the given id.  It's possible (if not common in practice) that the \ngroup may contain hundreds or more attributes.\n\nIf you desire to retrieve the list of attributes in batches (say you are developing a \nuser interface that has a \"get next page\" style button), you can use iteration.\n\nThis is accomplished by adding query parameters to the request the limit the number of\nitems returned and a marker parameter that identifies where the iteration should start \noff.\n\nLet's flush out our example by supposing the group with UUID <id> has 1000 attributes \nnamed \"a0000\", \"a0001\", and so on.\n\nIf we'd like to retrieve just the first 100 attributes, we can add a limit value to the \nrequest like so:\n\n``GET /groups/<id>/attributes?Limit=100``\n\nNow the response will return attributes \"a0000\", \"a0001\", through \"a0099\". \n\nTo get the next hundred, use the URL-encoded name of the last attribute received as the \nmarker value for the next request:\n\n``GET /groups/<id>/attributes?Limit=100&Marker=\"a0099\"``\n\nThis request will return attributes \"a0100\", \"a0101\", through \"a0199\".\n\nRepeat this pattern until less the limit items are returned.  This indicates that you've\ncompleted the iteration through all elements of the group.\n\nIteration is also supported for links in a group, and the groups, datasets, and datatypes\ncollections.\n\nRelated Resources\n=================\n\n* :doc:`AttrOps/GET_Attributes`\n* :doc:`GroupOps/GET_Groups`\n* :doc:`GroupOps/GET_Links`\n* :doc:`DatasetOps/GET_Datasets`\n* :doc:`DatatypeOps/GET_Datatypes`\n\n"
  },
  {
    "path": "docs/Utilities.rst",
    "content": "###################\nUtilities\n###################\n\nThe h5serv distribution includes the following utility scripts.  These are all\nlocated in the ``util`` directory.\n\ndumpobjdb.py\n------------\n\nThis script prints all the UUID's in an h5serv data file.\n\nUsage:\n\n``python dumpobjdb.py <hdf5_file>``\n\nhdf5_file is a file from the h5serv data directory.  Output is a list of All UUID's and\na path to the associated object.\n\nexportjson.py\n-------------\n\nThis script makes a series of rest requests to the desired h5serv endpoint and\nconstructs a JSON file representing the domain's contents.\n\nUsage: \n\n``python exportjson.py [-v] [-D|d] [-endpoint=<server_ip>]  [-port=<port] <domain>``\n  \nOptions:\n * ``-v``: verbose, print request and response codes from server\n * ``-D``: suppress all data output\n * ``-d``: suppress data output for datasets (but not attributes)\n * ``-endpoint``: specify IP endpoint of server\n * ``-port``: port address of server [default 7253]\n\n  Example - get 'tall' collection from HDF Group server:\n       ``python exportjson.py tall.data.hdfgroup.org``\n  Example - get 'tall' collection from a local server instance \n        (assuming the server is using port 5000):\n        ``python exportjson.py -endpoint=127.0.0.1 -port=5000 tall.test.hdfgroup.org``\n        \nexporth5.py\n-----------\n\nThis script makes a series of rest requests to the desired h5serv endpoint and\nconstructs a HDF5 file representing the domain's contents.\n\nusage: ``python exporth5.py [-v] [-endpoint=<server_ip>]  [-port=<port] <domain> <filename>``\n\nOptions:\n * ``-v``: verbose, print request and response codes from server\n * ``-endpoint``: specify IP endpoint of server\n * ``-port``: port address of server [default 7253]\n \n  Example - get 'tall' collection from HDF Group server, save to tall.h5:\n       ``python exporth5.py tall.data.hdfgroup.org tall.h5``\n  Example - get 'tall' collection from a local server instance \n        (assuming the server is using port 5000):\n        ``python exporth5.py -endpoint=127.0.0.1 -port=5000 tall.test.hdfgroup.org tall.h5``\n\nThe following two utilities are located in hdf5-json submodule: hdf5-json/util.\n\njsontoh5.py\n-----------\n\nConverts a JSON representation of an HDF5 file to an HDF5 file.\n\nUsage:\n\n``jsontoh5.py [-h] <json_file> <h5_file>``\n\n<json_file> is the input .json file.\n<h5_file> is the output file (will be created by the script)\n\nOptions:\n * ``-h``: prints help message\n \nh5tojson.py\n-----------\n\nThis script converts the given HDF5 file to a JSON representation of the file.\n\nUsage:\n\n``python h5tojson.py [-h] -[D|-d] <hdf5_file>``\n\nOutput is a file the hdf5 file base name and the extension ``.json``.\n\nOptions:\n * ``-h``: prints help message\n * ``-D``: suppress all data output\n * ``-d``: suppress data output for datasets (but not attributes)\n \n \n \n\n\n\n\n    \n"
  },
  {
    "path": "docs/WhatsNew/index.rst",
    "content": "###################\nWhat's New \n###################\n\n\nh5serv 1.1\n-----------\nSignificant features:\n  * Support was added for http over SSL (https)\n  * Support was added for authentication and simple user management\n  * Acess Control List (ACL) - Enables access to HDF objects to be controled for specific users\n\nh5serv 1.0\n----------\nThis is the first release of h5serv.  \n\nSignificant features:\n\n * An implementation of the REST API as outlined in the RESTful HDF5 paper: \n   http://www.hdfgroup.org/pubs/papers/RESTful_HDF5.pdf \n * A simple DNS Server that maps DNS domains to HDF5 collections (see: https://github.com/HDFGroup/dynamic-dns)\n * Utilities to convert native HDF5 files to HDF5-JSON and HDF5-JSON to HDF5\n * UUID and timestamp extensions for HDF5 datasets, groups, and committed data types\n\n \n"
  },
  {
    "path": "docs/_static/README",
    "content": "static files\n"
  },
  {
    "path": "docs/build.sh",
    "content": "sphinx-build -b html . _build\n"
  },
  {
    "path": "docs/conf.py",
    "content": "# -*- coding: utf-8 -*-\n#\n# h5serv documentation build configuration file, created by\n# sphinx-quickstart on Mon Nov  3 22:14:28 2014.\n#\n# This file is execfile()d with the current directory set to its\n# containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\nimport six\nimport sys\nimport os\nfrom datetime import datetime\n\n#sys.path.append(os.path.abspath(\"sphinx_ext\"))\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#sys.path.insert(0, os.path.abspath('.'))\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\n#extensions = ['JSONLexer']\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The suffix of source filenames.\nsource_suffix = '.rst'\n\n# The encoding of source files.\n#source_encoding = 'utf-8-sig'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = u'h5serv'\nif six.PY3:\n  copyright = str(datetime.now().year) + ', The HDF Group'\nelse:\n  copyright = unicode(datetime.now().year) + u', The HDF Group'\n\nrst_epilog = '.. |copyright| replace:: %s' % copyright\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = '0.1'\n# The full version, including alpha/beta/rc tags.\nrelease = '0.1'\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#language = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n#today = ''\n# Else, today_fmt is used as the format for a strftime call.\n#today_fmt = '%B %d, %Y'\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\nexclude_patterns = ['_build']\n\n# The reST default role (used for this markup: `text`) to use for all\n# documents.\n#default_role = None\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\n#add_function_parentheses = True\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\n#add_module_names = True\n\n# If true, sectionauthor and moduleauthor directives will be shown in the\n# output. They are ignored by default.\n#show_authors = False\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# A list of ignored prefixes for module index sorting.\n#modindex_common_prefix = []\n\n# If true, keep warnings as \"system message\" paragraphs in the built documents.\n#keep_warnings = False\n\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages.  See the documentation for\n# a list of builtin themes.\nhtml_theme = 'default'\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further.  For a list of options available for each theme, see the\n# documentation.\n#html_theme_options = {}\n\n# Add any paths that contain custom themes here, relative to this directory.\n#html_theme_path = []\n\n# The name for this set of Sphinx documents.  If None, it defaults to\n# \"<project> v<release> documentation\".\n#html_title = None\n\n# A shorter title for the navigation bar.  Default is the same as html_title.\n#html_short_title = None\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\n#html_logo = None\n\n# The name of an image file (within the static path) to use as favicon of the\n# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\n#html_favicon = None\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# Add any extra paths that contain custom files (such as robots.txt or\n# .htaccess) here, relative to this directory. These files are copied\n# directly to the root of the documentation.\n#html_extra_path = []\n\n# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,\n# using the given strftime format.\n#html_last_updated_fmt = '%b %d, %Y'\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\n#html_use_smartypants = True\n\n# Custom sidebar templates, maps document names to template names.\n#html_sidebars = {}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n#html_additional_pages = {}\n\n# If false, no module index is generated.\n#html_domain_indices = True\n\n# If false, no index is generated.\n#html_use_index = True\n\n# If true, the index is split into individual pages for each letter.\n#html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\n#html_show_sourcelink = True\n\n# If true, \"Created using Sphinx\" is shown in the HTML footer. Default is True.\n#html_show_sphinx = True\n\n# If true, \"(C) Copyright ...\" is shown in the HTML footer. Default is True.\n#html_show_copyright = True\n\n# If true, an OpenSearch description file will be output, and all pages will\n# contain a <link> tag referring to it.  The value of this option must be the\n# base URL from which the finished HTML is served.\n#html_use_opensearch = ''\n\n# This is the file name suffix for HTML files (e.g. \".xhtml\").\n#html_file_suffix = None\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'h5servdoc'\n\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n# The paper size ('letterpaper' or 'a4paper').\n#'papersize': 'letterpaper',\n\n# The font size ('10pt', '11pt' or '12pt').\n#'pointsize': '10pt',\n\n# Additional stuff for the LaTeX preamble.\n#'preamble': '',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n#  author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n  ('index', 'h5serv.tex', u'h5serv Documentation',\n   u'HDFGroup', 'manual'),\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n#latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n#latex_use_parts = False\n\n# If true, show page references after internal links.\n#latex_show_pagerefs = False\n\n# If true, show URL addresses after external links.\n#latex_show_urls = False\n\n# Documents to append as an appendix to all manuals.\n#latex_appendices = []\n\n# If false, no module index is generated.\n#latex_domain_indices = True\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n    ('index', 'h5serv', u'h5serv Documentation',\n     [u'HDFGroup'], 1)\n]\n\n# If true, show URL addresses after external links.\n#man_show_urls = False\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n#  dir menu entry, description, category)\ntexinfo_documents = [\n  ('index', 'h5serv', u'h5serv Documentation',\n   u'HDFGroup', 'h5serv', 'One line description of project.',\n   'Miscellaneous'),\n]\n\n# Documents to append as an appendix to all manuals.\n#texinfo_appendices = []\n\n# If false, no module index is generated.\n#texinfo_domain_indices = True\n\n# How to display URL addresses: 'footnote', 'no', or 'inline'.\n#texinfo_show_urls = 'footnote'\n\n# If true, do not generate a @detailmenu in the \"Top\" node's menu.\n#texinfo_no_detailmenu = False\n"
  },
  {
    "path": "docs/index.rst",
    "content": ".. h5serv documentation master file, created by\n   sphinx-quickstart on Fri Oct 24 14:51:58 2014.\n   You can adapt this file completely to your liking, but it should at least\n   contain the root `toctree` directive.\n\nh5serv Developer Documentation\n==================================\n\nThis is the developer documentation for h5serv, a WebService for HDF5 content.\n\nContents:\n\n.. toctree::\n   :maxdepth: 2\n   \n   Introduction/index\n   Installation/index\n   DomainOps/index\n   GroupOps/index\n   DatasetOps/index\n   DatatypeOps/index\n   AttrOps/index\n   Types/index\n   AclOps/index\n   Reference\n   Utilities\n   AdminTools\n   WhatsNew/index\n   Tutorials/index\n   FAQ/index\n   License/index\n\n\n\nIndices and tables\n==================\n\n* :ref:`genindex`\n* :ref:`modindex`\n* :ref:`search`\n\n"
  },
  {
    "path": "docs/make.bat",
    "content": "@ECHO OFF\r\n\r\nREM Command file for Sphinx documentation\r\n\r\nif \"%SPHINXBUILD%\" == \"\" (\r\n\tset SPHINXBUILD=sphinx-build\r\n)\r\nset BUILDDIR=_build\r\nset ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .\r\nset I18NSPHINXOPTS=%SPHINXOPTS% .\r\nif NOT \"%PAPER%\" == \"\" (\r\n\tset ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%\r\n\tset I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%\r\n)\r\n\r\nif \"%1\" == \"\" goto help\r\n\r\nif \"%1\" == \"help\" (\r\n\t:help\r\n\techo.Please use `make ^<target^>` where ^<target^> is one of\r\n\techo.  html       to make standalone HTML files\r\n\techo.  dirhtml    to make HTML files named index.html in directories\r\n\techo.  singlehtml to make a single large HTML file\r\n\techo.  pickle     to make pickle files\r\n\techo.  json       to make JSON files\r\n\techo.  htmlhelp   to make HTML files and a HTML help project\r\n\techo.  qthelp     to make HTML files and a qthelp project\r\n\techo.  devhelp    to make HTML files and a Devhelp project\r\n\techo.  epub       to make an epub\r\n\techo.  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter\r\n\techo.  text       to make text files\r\n\techo.  man        to make manual pages\r\n\techo.  texinfo    to make Texinfo files\r\n\techo.  gettext    to make PO message catalogs\r\n\techo.  changes    to make an overview over all changed/added/deprecated items\r\n\techo.  xml        to make Docutils-native XML files\r\n\techo.  pseudoxml  to make pseudoxml-XML files for display purposes\r\n\techo.  linkcheck  to check all external links for integrity\r\n\techo.  doctest    to run all doctests embedded in the documentation if enabled\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"clean\" (\r\n\tfor /d %%i in (%BUILDDIR%\\*) do rmdir /q /s %%i\r\n\tdel /q /s %BUILDDIR%\\*\r\n\tgoto end\r\n)\r\n\r\n\r\n%SPHINXBUILD% 2> nul\r\nif errorlevel 9009 (\r\n\techo.\r\n\techo.The 'sphinx-build' command was not found. Make sure you have Sphinx\r\n\techo.installed, then set the SPHINXBUILD environment variable to point\r\n\techo.to the full path of the 'sphinx-build' executable. Alternatively you\r\n\techo.may add the Sphinx directory to PATH.\r\n\techo.\r\n\techo.If you don't have Sphinx installed, grab it from\r\n\techo.http://sphinx-doc.org/\r\n\texit /b 1\r\n)\r\n\r\nif \"%1\" == \"html\" (\r\n\t%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The HTML pages are in %BUILDDIR%/html.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"dirhtml\" (\r\n\t%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"singlehtml\" (\r\n\t%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"pickle\" (\r\n\t%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; now you can process the pickle files.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"json\" (\r\n\t%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; now you can process the JSON files.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"htmlhelp\" (\r\n\t%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; now you can run HTML Help Workshop with the ^\r\n.hhp project file in %BUILDDIR%/htmlhelp.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"qthelp\" (\r\n\t%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; now you can run \"qcollectiongenerator\" with the ^\r\n.qhcp project file in %BUILDDIR%/qthelp, like this:\r\n\techo.^> qcollectiongenerator %BUILDDIR%\\qthelp\\h5serv.qhcp\r\n\techo.To view the help file:\r\n\techo.^> assistant -collectionFile %BUILDDIR%\\qthelp\\h5serv.ghc\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"devhelp\" (\r\n\t%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"epub\" (\r\n\t%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The epub file is in %BUILDDIR%/epub.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"latex\" (\r\n\t%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; the LaTeX files are in %BUILDDIR%/latex.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"latexpdf\" (\r\n\t%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\r\n\tcd %BUILDDIR%/latex\r\n\tmake all-pdf\r\n\tcd %BUILDDIR%/..\r\n\techo.\r\n\techo.Build finished; the PDF files are in %BUILDDIR%/latex.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"latexpdfja\" (\r\n\t%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\r\n\tcd %BUILDDIR%/latex\r\n\tmake all-pdf-ja\r\n\tcd %BUILDDIR%/..\r\n\techo.\r\n\techo.Build finished; the PDF files are in %BUILDDIR%/latex.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"text\" (\r\n\t%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The text files are in %BUILDDIR%/text.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"man\" (\r\n\t%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The manual pages are in %BUILDDIR%/man.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"texinfo\" (\r\n\t%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"gettext\" (\r\n\t%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The message catalogs are in %BUILDDIR%/locale.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"changes\" (\r\n\t%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.The overview file is in %BUILDDIR%/changes.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"linkcheck\" (\r\n\t%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Link check complete; look for any errors in the above output ^\r\nor in %BUILDDIR%/linkcheck/output.txt.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"doctest\" (\r\n\t%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Testing of doctests in the sources finished, look at the ^\r\nresults in %BUILDDIR%/doctest/output.txt.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"xml\" (\r\n\t%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The XML files are in %BUILDDIR%/xml.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"pseudoxml\" (\r\n\t%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.\r\n\tgoto end\r\n)\r\n\r\n:end\r\n"
  },
  {
    "path": "entrypoint.sh",
    "content": "#!/bin/bash\n# entrypoint for Docker container\ncd /usr/local/src/h5serv\npython h5serv --datapath=/data --log_file="
  },
  {
    "path": "examples/h5pyd_ex1.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"%matplotlib inline\\n\",\n    \"import h5pyd\\n\",\n    \"import numpy as np\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"# get handle to domain object\\n\",\n    \"f = h5pyd.File(\\\"craterlake.test.hdfgroup.org\\\", \\\"r\\\", endpoint=\\\"http://127.0.0.1:5000\\\")\\n\",\n    \"# this is the root group uuid\\n\",\n    \"f.id.uuid\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"dset = f['/Data']\\n\",\n    \"dset.id.uuid\\n\",\n    \"print \\\"name:\\\", dset.name\\n\",\n    \"print \\\"shape:\\\", dset.shape\\n\",\n    \"print \\\"type:\\\", dset.dtype\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"#plot the data\\n\",\n    \"ndarr = dset[...]\\n\",\n    \"import matplotlib.pyplot as plt\\n\",\n    \"plt.imshow(ndarr)\\n\",\n    \"plt.set_cmap('spectral')\\n\",\n    \"plt.colorbar()\\n\",\n    \"plt.show()\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"# zoom in on wizard island\\n\",\n    \"ndarr = dset[140:180,90:130]\\n\",\n    \"plt.imshow(ndarr)\\n\",\n    \"plt.set_cmap('spectral')\\n\",\n    \"plt.colorbar()\\n\",\n    \"plt.show()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"g = h5pyd.File(\\\"nanex.test.hdfgroup.org\\\", \\\"r\\\", endpoint=\\\"http://127.0.0.1:5000\\\")\\n\",\n    \"dset = g['/Nanex/OKey']\\n\",\n    \"print \\\"shape:\\\", dset.shape\\n\",\n    \"print \\\"type:\\\", dset.dtype\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"aapl = dset.read_where('RootSymbol == \\\"AAPL\\\"') #get all rows where the symbol is AAPL\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"aapl.shape\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"aapl[0:5]  # first 5 rows\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": true\n   },\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"Python 2\",\n   \"language\": \"python\",\n   \"name\": \"python2\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 2\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython2\",\n   \"version\": \"2.7.9\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 0\n}\n"
  },
  {
    "path": "examples/h5pyd_ex2.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 18,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"%matplotlib inline\\n\",\n    \"import h5pyd\\n\",\n    \"import numpy as np\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 19,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"u'39400ce6-263e-11e5-bd87-3c15c2da029e'\"\n      ]\n     },\n     \"execution_count\": 19,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# get handle to domain object\\n\",\n    \"f = h5pyd.File(\\\"Land_and_Ocean_LatLong1.test.hdfgroup.org\\\", \\\"r\\\", endpoint=\\\"http://127.0.0.1:5000\\\")\\n\",\n    \"# this is the root group uuid\\n\",\n    \"f.id.uuid\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 20,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"datasets: [u'longitude', u'latitude', u'land_mask', u'temperature', u'month_number', u'climatology', u'time']\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# print name of all the top-level items\\n\",\n    \"print \\\"datasets:\\\", f.keys()\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 21,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"temperatures shape: [1980, 180, 360]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# get the 'tempaerature' dataset (doesn't retrieve data)\\n\",\n    \"temp = f['/temperature']\\n\",\n    \"print \\\"temperatures shape:\\\", temp.shape\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 22,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"(1, 180, 360)\\n\",\n      \"(180, 360)\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# retreive one slice from the dataset\\n\",\n    \"one_slice = temp[16,:,:]\\n\",\n    \"print one_slice.shape\\n\",\n    \"one_slice = np.squeeze(one_slice) # convert to 2d\\n\",\n    \"print one_slice.shape\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 23,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [\n    {\n     \"data\": {\n      \"image/png\": \"iVBORw0KGgoAAAANSUhEUgAAAVwAAADtCAYAAADz981IAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXm4JVdV9//ZNZzhzj3cHtPpEDInZGIKYQqDgiGRSRBE\\nRBEcf87gCwgSFBAUeFVUkJ8iEEVFQFCmIIZAAkhCZtJJOkPP6eF2953PWFX7/WPtVbXP6dvJpfvS\\njZ1az1PPmepU7dq193d/13etvctYaymttNJKK+2Hb8HxLkBppZVW2qPFSsAtrbTSSjtGVgJuaaWV\\nVtoxshJwSyuttNKOkZWAW1pppZV2jKwE3NJKK620Y2TR8S5AaaWVVtoP04wxP1Duq7XW/LDKUgJu\\naaWVdsLb9772tUXt94TnPveHWo4ScEsrrbQT36amjncJgBJwSyuttEeDlYBbWmmllXaMbOvW410C\\noATc0kor7dFgJcMtrbTSSjtGVgJuaaWVVtoxsqMEXGNMCHwP2GmtvfJIj1MCbmmllXbi29Ez3N8C\\nNgHDR3OQEnBLK620E9+OAnCNMScBlwPvAn73aIpRAm5ppZV24tvRMdz/C7wRGDnaYpSAW1pppZ34\\ntnPnEf3NGHMFsM9ae6sx5rKjLUYJuKWVVtoJb4dbpet7bnsYuxT4SWPM5UANGDHGfMJa+3NHUg5T\\nPtOstNJKO5HNGGNvXeS+F3H4xWuMMc8E3lBmKZRWWmmlPYwt4fJfR8VQS4ZbWmmlndBmjLF3LHLf\\n8ymXZyyttNJKOyr7oSHoD2gl4JZWWmknvJWAW1pppZV2jCw83gVwVgJuaaWVdsJbyXBLK6200o6R\\n/ag8LbcE3NJKK+2Et5LhllZaaaUdIysBt7TSSivtGFkpKZRWWmmlHSMrsxRKK6200o6Rxce7AM5K\\nwC2ttNJOeCsZbmmllVbaMbIScEsrrbTSjpGVQbPSSiuttGNkJcMtrbTSSjtGVgJuaaWVVtoxsjJL\\nobTSSivtGFnJcEsrrbTSjpGVQbPSSiuttGNkpr7IHZt9/zOmBnwDqAIV4PPW2jcfaTlKwC2ttNJO\\nfBta5H59gGutbRljnmWtbRhjIuAGY8zTrLU3HEkxSsAtrbTSTnxbLOBOHPqVtbbh3lYQOfjgkRaj\\nBNzSSivtxLfFAu4CZowJgFuAxwIfstZuOtJjlYBbWmmlnfi2ZpH73XnoV9baDLjQGDMKXGOMucxa\\ne92RFKME3NJKK+3Et8Mw3Osm4Lr9izuEtXbaGPNF4AnAdUdSDGOtPZL/lVZaaaX9rzBjjLWvXuS+\\nV4O1Nn9AhDFmJZBYa6eMMXXgGuAd1tr/PpKylAy3tNJKO/HtyDXctcDHnY4bAFcfKdhCCbillVba\\no8GOEHCttXcCFy9VMUrALa200k58O4oshaW0Yw64xphSNC6ttNIWbb6mesT2aAVcZ++w1l51nM69\\noBljrjrWZTK/aT7PKk7Nv1jPeUAx8fvfgRcf7s+AdduRWOZe/f8vZsL554AXLXCcRzrPIx07oPdZ\\n1gvtbx5mn08DL1lkWRZr6Q+4v9oEW+gwz1dYxfPZxwx77Z/a5x7h0ZbUjkc7fyR7uDItGUEbW5Kj\\nHLWVksIxMrPenJN/eCd3AXARDw9EFtfpTfHZt9R9YRY4zsNxAv3Nep8Pt79/TuO96qbLMCk49QPn\\nw4GoDhha/qzv+8OVbaHjQO+SUH6dhAvs65d5ofP6/8t4+IHN/78BVvEYAMaBjawCzjNXe8DxTT7K\\nl3k/ALPst9N238McvbSlsB8RpPsRKcaJaWbILOO5/CYWeCdXAb2d2vLwrCvT38PDdPikF+B8YPJB\\nQtmw/h70vfab7Xvv/1eP7Z/PPMymFvT9xy9D/zX0X6sPnj6z97cYWV7EP5+WIfXKrHXafw6/rvrZ\\n/0IDiJbJr8N+Nh8ik0H7j/FjvJbn8loA7uV683JTRL2v4W/stF1ggmlpR2U/Ikh3vIpx3XE678PZ\\ndUtxEDNm1vIGvgrAW6mwmjPyH31wPZyrrZ8tcDbSabNk4ZP5/+0Bb72t7n/KRH0QfKQFQn132ge8\\n8xBg6wdO/dw/gGT0ApAPsP5/A29/Lav+J/T2U9af0guaFwN1CqbslyOgd9DoH4j861zI/EFH98/o\\nBWf/XGqPd2Wy3m/957iYp3MxT8/P/zx+1nzYtAB4Bz9pd9sthynVkdp1S3y8pbDrfuhn+BEB3GM+\\n8cEYY5dEBP8RMDNkljPKGoYZ5//0NZqFmFJmwPbRJhN4ndShkdHWkYHNgA6HIoM91PUO+nbpd7Hh\\n8Kz2cLYQSPczUt36QU2BL/X28QHUIh0hdceseL/H3jX5QJ16x9Stv7xaL7pvBhjjyRdWzut7Gk2g\\nHUESQDeFNC3KEbpy+gNX0PfeHzx034WYvm86aOggo+fR4yZuy4A38BPsZXtPmfXt1iOf2/+jbkuB\\nF8YYaz+zyH1fukRBusMdvwTcxZupmjov4Pfzxn4qT+JxXL6g5ujrfymQRhQ9sVvsYAblvXGIYyoQ\\njFD4wqn8L1wuQGwN2A4kW4F5tzXA2IX108VquwvJEtDLLn1Q6ZcA+pluArTdpSb0gpP+P5JLZggY\\nBQaAGgXYZn3HShBgbFKAaeLO0QWSCthBsENy4LQDyQFIW+7+BDKARVlxbr0dXQPdlRBtADMMYQyR\\nlRNn89DZD3YXVDMpp97OOoeycB0g/IGiv958puwzcq2f1PuPMnSf3afedx9zcpV/DgP2w/Yd/C+3\\nJQPczy9y3xeWgHtczfypKZazCAhZx9k92qGys4iCgXUoOkzOSAIwAw5MHbUzsQNckF7iLcZpU7Bt\\nCMdh4CVgViA9OIHkPmh+BbJdQAuMJx0sRk+Fw0sZCzHloG+/he6e6qJaJwm9oGhjGSz0mHQEXAeA\\nEQR0Awrw7AANoBUC64BVBRtMJiDrgG05jyGGYCVEZ0J0OpiaG5y6kE1BNgl2FmwTkt2QboNwTs6X\\nAGYNRKeAGZUTaN1nE5DNuvO0IWxC3crgMIKAbcW75q5XRxUKtuoDsl9fum+/PNK/D/R6BXjvtX47\\nbvP338r3e+7XAlKL/Sn7uAXO/CNjSwa4X1zkvi8oAfeHamaDlz0A8Dr+hjN5Zk8Dz7xXtbTv+/yK\\njJMEQichDEC4SliTfh+MQDAk721TDmI77rdRAYxwLQQnAcPQWQVpDaI5iPcCE9DdDOkOSPdCNg3Z\\nDNh5CFzPN1kBtAtF6/tZav8+Dwe6/aaA4bOvLtCNwVTBjEG4DIJxiDbKSZLtYK+H0aYAbyuAuTp0\\nVgrgHXzofvvG9PT+U5lrzXYyNrDLOHfbDULBeojPknoL10IwLPVoXeGMu7CsWdw06yh4NgfpHrBz\\nxYWkB8DugGoboq7IEAbRr6sI2KosESCsvELxXABfRu9SDMq+POHfh4XkEu2aKYfeSx3cVHZI+35L\\nvePpd/1SkM/K1VPx27h/DtXi1VPp0OKVPL7//hxi0xy0E3bPI+53GFsywF3kZFzznBJwl/b8rzJ/\\nCBjXmA0v4+15I1eNL6SXtSig+tJB2vfZhJDFThJYAeEa1+HbFIKcnFJc1jUQrpB9CR0Y+0JhFWxF\\n3GMD0AQzL692zoHshLxSlXNhINsL6UNgZ4TZMd0bMPM7nZ8p4Ou0pm//QyrRu26f0WpdWSCLIFsF\\n4UZXH6uFrdtEBorWt4C9YLpgQ8gqUFkNcQWCPfBfs3/JrD1IBnyFv7BzdgrAvNz8Lqs4l6cFr6Ud\\nQMdCeDJULhDQDcchGJBCZHPCZrNZ8S7MsCtsW5hxuLy42cFKGRiI5ULsDLSug+6tYA+ASSEKYDCD\\nQQRUVZLQbAQFVJURfD026tvUfEDszyzRY/mDo7Y7/b5fK9d9lPHq9/1tPNez6WXPeux+j0cHhP5y\\nK8PWQUGlGj3mrdzIN/hSz/5zTNt/tH/OImzJAPcbi9z3mSXgHv05P2JuI3RN4CQ3uaCfTegobunN\\nLXX9ExuRp2FlIH5lBMRgDATLIFguzCpc7T4Pk/eubF7+b5zwZ4bda0zRut2+tg6mJSeyVkApFzJd\\noawW1vVM4wqW7ob2TZDdBcyJW+13UL8D9QOr//1iH3PqA25b68p4AFeR+og2ynfpTjcgdCAcg2CN\\nGzjuhLFWjne0KKSZXWwmq4jD/IHuj9G2M6zhVN7CtYTD41Qvh+pFCL1USmZdHSVOylBkaskglR4E\\nOynXEKyE+AwHuHrhDWh9FYL/hmEr5eka0XVrFICq4BsXzSEHMR8wF0qLU0BWMKrAwCBEBgYMNIxU\\nRRMKsOu4LzTQ5ssWqas3bS7avjtFleQDgPXquenuW9VdzyBFsFDL2Q/uCb1Ar8fXQKg+l9wHeoWa\\nDl22cG9P0PCtvNxusXfTZ0sGuN9a5L5PPQEBl1HG7ZRd5CqUizzuSrOOIcYwwC/ybi7lhYcEIHzz\\nQaIBtM+EgZcKQ537EFT3FsGKHkAOBXwDx6RsVwAkGJOOG66B6CT5PZsSycA2Cj02WCWFMlUBGypI\\n4MsHUPVd9QL8xFe/F2vh8N7PQ+trIjkEA5BOCNOzTQjtodH10Dts2Pfd4czvRH4H9BluTBEAawDz\\nFWG7ZsBdcwOiGgRVqZvgNhhL5X7M48DWCMsnQJ4CaOTVuKhadBJkpzbY/f2tvOvWp9u5ZFGPPjGv\\nuuI9POGMK6lXh1k+sAE7B8FqV+AUkrsh2eH2DqQwnU1gd8vgShdWWlju3SZfklGwse7aOxRShNZf\\nDRiFcBAiB9aVSCZE9af2zrr3iTscmRsWjKhWaRtMG0win4m9e6T3SaWH/sFXATmjiOXGFIDr3+vQ\\n+05BXGWVflBNvHP4EJPR26e88TH/PQN2sYU3cwXbuWtJAPe7i9z3ySci4L6Xa9nEN30XzH7okSOq\\nZtxsZIRxzudZ1BjIG3oFeDo/yalcvKDL71+i73ZpY5qrQPoUqD9PtLz0u8BdkDak02cIyJrEMd1Y\\nNNhgBMyggKcZgnClMFtTEe3QuKx3U3fBMkWzqhwjR/yFRgaNwikIay/QFq6bol2CsLcpaF8P2UFh\\nlskWYI/TipNezO4P6ugp+/VF+urRdyNBOl8XSamqZIIag+RYxRQwE4KtCrqEXag0JGAGBVB3KbIR\\ntM4JHUgHUs+ETjJwdqC7jeun/4FrJt9nW+k8P4CZk1adxVMveAUAP/Vjb8d2pRKMakqDYAPo3gCd\\n20VmoAnhQViWwjJg2G2WgpFHSBCw5l2Psr4QqrGcohXAQAzLIljtdXFt0oppUxTkVa3mDunuep5d\\n13T/w0K7v/372STQe29n3R91fPfHci1U3V2DSm0KlD7j9ZmuL5Po79Cr+ffLIf3SyPOOHgCNMdbe\\nvMh9H38iAu61yM3TLQY2831aFKlEXWA/0HmyaJbx3bA6G2SUKuOsJspFAjHFJd+V8iPnepkhMOO2\\ntvtcDcCMQHM5hOeIe247DjQHEIZzB2QHXEBmuQCu7RZgGow44B2V/wTqd2rrUr9TqaWKXXBolMRH\\nPT9iomip/1WUysiperoX0q3CyNI9Ak7ZfgHc2DtM1HcoX2s0fZvP9LXnKxr47mlmYMQK4DpspAlM\\nStGIEKTQTq1Mye+8fic1jhKaATmYPutaPQubyr0yFZhYdw/RExMYBetOYiMIDkL3RvjXr7/N3tL4\\nHIcx8xOn/w6vWPcBzACEI+J9RKdIgW0L7AQ0vwrJPRDOy4CiqWwemFJ13/ngFLrrrsKQgZqBqoHJ\\nFBoNCOpwSV9ift3d0cxVXRuY6y8zvY6I4t289xn3ObByTP8PerwqEGeQWGgZWOPug4L6nPvfXFCM\\niT0Aqf2ty6HB5K47gMYqtd34GRP9cgXe559YGsBNb13cvuFFJyLgfh1psAP0aj2aDjQNHBiG9PHA\\ndfI8Iu3AvqbUHwxS8zttB2khyj76AwT+iN729s/dpioE68jDz8EouV8VjLpo+LALgLnMg5yN+gCq\\nYV4/suKLpR35j61Daz00R2BkG0T7vIvzRw+fAWsrnxc9NLkPupucrNAuzhd3i6IoSPhbf7FC79R+\\nBoIvI+gA2UQyBgYzAV2LAPM8ktqVhTKI+baQTqz3DqQgpuIB7gCYSOo9XAWVS8CcBtkodJZBd0wk\\nHxBvJGyB6UJlSoDX3i+BsE9vfxO3N/6TbBJMR8733ufdRe1FFHOE28CsgDodNwDPOKmhBelmiO6D\\nYRdEGy5uASPIstVVirbq6tfEsCKAupHmvw+Yy+SyxwNJgNOEA5VlZylEJzUF4sWaHx8N3X+bGQwF\\noozoWDtwmP9LrEusQYGjWQZZf+DO13U1BjJHIV10vN9UitJBvz+2cvnSAG5yx+L2jc4/EQH3Pynu\\nvB9oUNcDelNn8Pbt1xoXSmPqd2n8EVePny6wJX2bsq8ukMbCXuMzRTqINgrTNX5+kIJtP230GKvt\\ni05Z/9VZVpUof+AoZOCFmwMn1h3iKzrUsxPQuEZ0SJtAtBrCx4Kdh84tEHYKgFXSralM/YOZYnpA\\noXVrL9e67EQySDQD6FYg7kDNQrcLnUR6pH9P1FRuoSs/5L/5fmwf4AZDopVXnwXhuZCthGREMhxs\\nLKCu9WqSQgIKOhA2pe6CNgSOdne+C2Y7tPdB9YnOOwGCtZIWZjJ5TbaKl5XNQLYXzKzQP9Wo/XrU\\nWz+GoJe2JfXmAqhWYTgummwHaFi58lXGMVQrcnFm3W9+3S3UZfu/M72vVXeyqpGvplOohrCKgowH\\nrshK1NUUYP3PipMNxFlMLSS+VKCAqqxXQVZf9QD6HnolCt2evzSA2z4kHLewVc/+4QLu8Zlh7Luh\\nHQrRSv0Vvest5I5WEAbhu7v9kfb+9CVfb/Rvop7b/3/qHUOZbgPIVsO7Jp7CW9Z/R8DPqWY2E8aT\\nNSHUpqkUXPOD3OhgY+n0WSwbgYApODbmXN/UucvGDfFh20kWKSRuOlPgku+DjjA3o2kBOuqEYFZA\\n/XLInizHCZaJvtz9vgOOPb3sNuotbk/EXVOfXDCf/cDsqNOr14JZJ3WRPCSsszok2vH8bslGMKNO\\nemn3Dp7g3TB3c6xTKO0CvM223MBWhdqPQfA4YbWpBtIyKUeQSZ0ad9MD16CCLkQHIXgIuvdJ/Ucn\\nQXy2w/s9MpGEGKpPBjMpTDrrQrZPmG16UF6zRiHNqPneltarRv4DBHxdwgahaKtdj65mOuADE07f\\ntSkkPkFYSB7zKW+/vqBV7Np4OyxeA4CulKEZO6mBwunUpqAW02sKwF0KJ7Vr4GAEqTJU36nzc4Sh\\n1wM9XD/t8XSO3mz4yPscCzs+gDsbiJgVpIV7q8RmObACGeZDiuwrBcr+ZG3olRZ884MCvsujm9/n\\n2+59gGBnpw5/N/XLvPXM75DuRwJlw8KwgjGn2Q5TBMC8rHdbceBqhK0mdcfAKtLZbeTkjNRJGhVp\\n+Q1XlsEUBltQPVBcR2a8Y0ZgLEQzAro2E5Zp6sLqgiEkv7ch2nOyHZIHIJuEIIA069W0datTuMYH\\ngYkAZuvQjIQ9BsslCyNcKxKKGRDWHN8Fw+2ix4IAxYH9cCCE+FzRkPUm5GzMOqDNnCbrZJUchL2Z\\nd8FyqD1LRLZ0ELKaq2c33bl+PyTXS9Awvhzap7r6TWTCSNCSxtT5N6hNSFFURkmfBqwXPdjOyAYy\\noNoEmW02J1uU9o6pfqBRHRxtCropfVSUwsMSn81ZSJoUA3+/i+6DkO8I9INJPxHxg6IRZO73LJWu\\nOBzCSlfUFRTjsX8rtWnqHdLsCZXkp9z+mlHR43wtFLxejC0hSP6vAFxjzEeBFwD7rJUpgMaYq4DX\\nAbqE3FustV92v70ZeC3SHH7TWvvVBQ88v1LShKr7oZZBy83k0TSbhMLXqVGwrAMUo3bY97pQdF0b\\nrg+4qhP5UVP/f8oqak14PX9L+26wVQjWO812GUTrHKApHXRU0NYFCLKadPR0ANIapFVIQ0hC0YSt\\ngTCDuCWvYRPiEMLARYrbQAuSAQHqJIY0EZCtZBA13EWMCsPOIgFtEDAPmxBNSJni86QCbEPKn+wQ\\n4PDrScF2mbv2vcBOoLUS2AjxCvIsAVMlzxoAiM8X6SDdBe2d0OmKZ2AfA40K1E9zddQS8MpBNBNA\\nA2S9ArdIgh2CbI6cHoYrIXqs02tPhXRI6jVxdR20IHoQsruB9ZBdAY2TpA7oOjkGsANgZqB2BcT3\\nSCpfUoN0XjIQgjUu0KnmGor10S4pQHahqbs+8PoZfRqf6E+l80EVCkDyg7/6Px9gFwIun3D4KpaW\\nzc8T1vcBtDPYGUBoJFSiXS5GuI5ykgYiw7bcpetyFg23fx1xgPLy+bJCv/bfN8hAX31oiKKfWh+F\\nZf0e8CLNGLMB+ASCSBb4iLX2L4+0HI/EcP8B+KA7oZoFPmCt/UBfwc4Bfho4B1gPfM0Yc4a19lDH\\nQAMQrQRaTtwKG8JylsnHXP/a6pXUV/59UDULfLeQxKCNW7XKxPuf/7sutpIzYAOmJqw2WEa+4AwJ\\nuXhn6+LipoMCsmFTgDCtQWewCPEqNdhjYH0VBq2w+W1Gaq0KHBiA5oAAbK4NNqSOwgS6I8LabCLS\\nQtQS9ttZIedX7dI0oPUFkRNs6vavSDlUCVE5Z9h93g1sRwZE5oE7ZVEYkDoIV4MZcWx/RN5XLoL2\\nk6HdEX+YmtRR7CpQ2audF+DPnCYdDorcoOs92kjOGVTBrILqUyE4GVgr9dsdcvU7IGA5tBWSv4Lk\\nfuANsPsp8t/hDkRTUNkp5cnqyFoWgQB654niBaQPQTrhBicn4eTI5hqFTWWQyGaKNuhnePiyjGaT\\n9TM6Py7gtzM/3on32Q8m9Uf0H878eIbfD7ScPj9QIhMUuzcp1hJKEdZq3Oeud4qO208Vky4S5+4q\\nWfE1Wq1Sfwpy5r33r89PNWwt8poXaUfBcLvA71hrbzPGDAE3G2P+y9pDJ2ksxh4WcK211xtjTlng\\np4VE5RcC/2yt7QJbjTH3A08C/ueQPQen5LUDWOeEaIRXG8coMmQmFBlQGkrVEhjkrrcoAhZ2AKIO\\nDCSFK6fH9+UL1cbgULCNkGpWhpA49A1Wwv9s/1c++5k/4gNvuEsK6fJ90loBtukAtFdANyzK16Lo\\ngPuBXQZ2h9KahxGdr42w+F3AKcA6I+WfBuJB2WodibqHbgUsawR0iUS+mB6D1jJYOQgj+4WhmXkB\\naoBUtVIKzVHv5pQ7f4IEv/LVUNy50hnoTjlpYVwqzVhh2aYm4GuU+ukoZimWm3TCXmCkXNmMnMy2\\nZT+VHYIxqP8U8Fhh+ekAJIPCattVyV1KgcFpSL8tLLUKzFTdPatAugbCKYhmgZtksOECsPskXa52\\nJYVGlYkUYacdu8688rgZgspq/eUa+9muArKf1K/A6ucYU5yih+31R+51gMY7Dt5n3/qTXvSzDgTK\\nbH2pY0Be2zW4L4bVFagYcTQ1TjrAoeqdarjTCOvdD+zX7AM/j1ozI9Uh61LkrOn1dQ1Y6/pdCENp\\nISluY8nsSAHXWrsH2OPezxlj7kaSSZYecB/GfsMY83PA94Dfs9ZOuUL44LoT4WyH2jIKsFS3aQbo\\nBDCdFY2vSsFIVT7A/VfDo/ufCNlNMH4SzJ4N06dAPAmr5mDYdfD0fhi6X1J11F3zJQjNjtCO1PF+\\nNwhjpC0BoUtO+2me9vSfFhpupPDWAIPi1ttQ9NY0ERkhszBnBMy0433bwoOJ+DkVB7ox8jqIgK8G\\nFDUDXstJBbLlEggKm04PNg74GlDtyvTTuCOAFk9JL/IZv6EAiRi5FF2dS8W7fg0uZ14dSCddh69A\\nGIsOa1tOavBFTZ/ihcJurVssxjYL6cC2nF6qPbMp37VXQ2tU6tTGMBPIz/uAB4Hlj4dnvwP4ICT/\\nBit2QPP50BmHgQwaj5G6qVpgk4Tl4ysh1iUth0VmMJGAfjYJdkoknTDr1T99PbR/gojWkR80UzDV\\nVMOEIpCW9P2vn9H67nimBai5+xx5zM8NFlgkUJhKO8hP0JHvQ89T0vtdpdAC3Kpte6uyVQOouT5X\\nRWT/2MhuIxSAO5lBI3NTjzWHXlMFYwQ0Nd89cPvMnAWdOWh3ITtLvJaoK6CbVSQxuZVCpw3JAeB+\\nlsLSJZAnHPm8CFjkvLVD7UgA90PAH7n3fwy8H/jFw+y7sEz+kQ2i+YWZPNXgTFwDdrkx08hdVDGp\\nSqHWjyCAFCIq8twwVH4N9owJw4qRg+0zsM/1DNsUl/zeT8OyTXLVG9zx4VDm6wcZukjwRXNIbdu5\\n5rrzgLBqGwswpDUBw1ZdGlzbSKPbjujQbWDaQC2EjpFzzAJtlyQ8AJxM0XkrFGCn+nMaQWutpHhF\\n0xDV5ZzxJIx0JJsheEim+Nq7ZYDr17j1VRk4FMCuY4nPrhQADPKndNJp1brk5DInJ/gpI3oQ4/Rb\\nt4ZBdtCxW8ciswnpcMpwTSz7JLEMWCPfhvb3ofp6sDXp8SFw0Y3Q+RjY/RA9BlaNw20xfBJ4ah3O\\nqEsdLr8UBh4H6T4wU8j03A5kDyHrKuyFdAuE0zIDwJcIfF/OlwC6fd9rfECXc9B71aCYqqxJJX7y\\nP+5YiQZFq06yWQFmSADWOEQMBl3ZXajKOHaoWR7WuVGq4ll34u4cdGZg7gDYSRmAFGgVdB3bpSpO\\nRNvXp937sAZnBOKAzbUh89dh0DUc9Jqb7tUgHlyA9IODD0GrKjJU5WKoraJngL7jXrjrfgrXYWkA\\n93AM94Yb4IZFrLPg5IRPA79lrQZBfnD7gQHX2uKBd8aYvwP+033chcCY2knuu0PtZc+X12we0h2S\\noG+bwJywsxQBKmOlUc4Z6XiatrRrCLZVYXkHgpvBniEdGZ0kUCOfcx8MiZubDkL6m7DrVhj4J5id\\nE7D3c02hcMu0owUgLmdLGFnoK/1OajBZwTaTAahMw7J5mFsvjXiEYubcVkR+N4GcY8Y7XNCRY7Sc\\nsDZOESrWKIWSmixw7yvictOQQSCcF/DXHjVAkeuzEOj65k9C0cv0ZRadWRYA7TYku4Gqy4oYFn3U\\nDLo/e5TatpGUqmnRTbP9AirhKqmYzl4BTbQuQlnPwnwCqqdC4/UwfB7MboM/PBXmWvBSA+NbIHgx\\ndL4DU7865XFWAAAgAElEQVQIB9ZAbUbq+doxKfvZQDcQDTfQVDoHTumMzBxL7hAZapBC21RVxNcl\\nm95nrSNNt65536v+n0/8AJpGcrK6sZOo6mDGnBY+DNGIBAjNkNSLUX+8PyjhR4P1s69NqOn+7gZm\\nczKIpQ9BuhumtoM5KJNh6t1ibWJlvloHmp8dQNqFuyOEOMzTKwu0QmhE0heMhTCVvrLSFU812awr\\naYuVx3Po5B0L554K526UMtsufPp6lsLSwwTNnvIM2dTe+6eH7mOMiYHPAP9orT3sbMXF2A8MuMaY\\ntdba3e7jiwFdoPs/gE8aYz6ASAmnAzcueJBstngfrodglXTCdA+kbjZ4ZqGRgmlC3JRk+rAOB9dB\\n8wyo1mDeIi1hH73RB9VchyAbdMGdIWFj8bnQfA00vgz3PigNQqME/e5zUWApk50RlmabHrCkYFoS\\nMLOBaKvN1TBXLeSJaQqpYCUCsm5cYAiZU0ko8oJBGqeCpJbDZxsAgetgJnWbFekjaLkTdqByHmQ7\\noPsADLaLzqPH68/q6Ge06p367q6WwSBuX3cXJN50qtC4ukmLg9l5CU6l++Ve2VnxU4NzHH7c1osh\\nWereZ2BfDp95DvzUHqjOw3OA2gNwwfeh8zWRIapPho+eibntHizDUtgxCuY22ITaXggPiIbb3QSd\\nm2QWX2USRtIehpfjgM/8/cGuA3QcIrXaAi46SabRt18DaA9CthrCjRCfDJUxyRgxy5Dpy06rsO6Y\\nWSTs1VhpU1gnFejNQtqc1UbhfjMJMA92WI5l3W+mK1vUgegcKVS6j3xW4uw2aEzAbLNX440QT8ps\\nhNa9wo7Vy+kAzVAGezMo6YLBGGQNsClk98OqafKp4F2gMQzmIghPksHXqIvZcRp7KiBrHZL3z0w8\\nCksWIhiLMGOMAf4e2GTt4paUfDh7pLSwfwaeCaw0xuwA3g5cZoy5ELnLW4BfBrDWbjLGfArYhHTN\\nX7OHm8aW7nUn8NJwTE1mb2Xu7mQzkO4EWwV7sty0dFiYq055MkHhPgE9T0wAcb1NDYIBl/YzKrpd\\n9BjIXgUHb4PJBpz03+L2KEPxowTgAmeJNIDM6ZA96x+EELjfTU3+q1N6oEhyPAm4ELiPwn03FCer\\nuUvbh/gKGRKV8IFW3VWTCpuIGhIYCuYoIjougyLdW6y6NYqAfr8muVA2hwKugohqcOpqd70yNeag\\nu0XkhHzqrXX3yYoXk80I4CYPyj2ILxG5o3MTDL4GgtOgezXYzYVmn6QCzPEW+P40vHBU6uenNkP2\\nGej+BHzgj+BlBm4OYfM+LKPAGIwMSbj2dGDljGQzhA9B55vQ/S7U9sF4o6gTlW6MtzW9a9Z5tf5A\\nZDPnzkfFAJF5TTBBZt6xAaKzoHomBOukjuyYtOVkELIB8VL8hmdDAUgdBY1uHgswibS5LCz+U7tL\\n+k/nPAFCGxX7mg6EbZfpMi9sOmxAdDpke6B7L7Tuh8ZOyXqJkf7DE6B6CbQnoTZJnozbGYPwLKhs\\nFAJlGwKiOMBM0t5g2VwAbIRoTCbgGC8oY12A1jpppKcxLo0dKeACTwV+FrjDGKMrMrzZWvuVIznY\\nI2UpvHKBrz/6MPu/G3j3I5413YG4VC4nSSPbQcUFUPZLx4xOQzqtUs9O76jn3w/r0EEfj6IN2FQF\\nZINpd8zVwEoBzvh6mDsZHloFy/cVeqnqc36OJBpJ7yB5rCu8HdKCRYRNYSXGwsq7gSlIl8GWiwrW\\nM4i3dqy3gcgPKzIY6kr9TDk/XwHSILKLMtrMIYV1qVVGBcYYKpdK3WZbZCJDhMgZvsu8kLaVULC7\\nlKKV+NF5H7gbDejc50A2gEhzjdzqLdlB0ewxMrsLC/EesCPAAGQXSqWYD0OwGbJRaL+RkYOXUf1c\\nyM/s/A7hrx4kJKRChRmeyTAN7mMrd95R4UFqdFgB8Vo4vQqXAGcmMH4ABu+B7DZoXQMDm2GDhRVQ\\nH4OhQYhrch2phW4CnVQcjnReEjoyDTSpB5CPs4nkH+P9pk0zNXJt8blQOde142FgFJLl0F4O3WGY\\nrwqYO0etJ3/XzzqIcIOYbk7vCToCtEEGtXuh/QB0XgHNsYKV+x5SjDyTLW6K7FU94CSMMQg3QHIG\\nJFukfzZ2AdNgviSTZmrbyIPV5rEw/Esy4KTbINkj+9iGFNxE0gfc/BFaBuwqYL/zblWXhuKJJ9pn\\nfziWHGEerrX2Bg6lJUdsx2emWbJbgMDUJRBgnQ6YdQUgw/WIe+FWRrFehMH60oFn1kUirAuTWpdq\\nYKoUKn4mQ10UiobY+WmodKHTgj23wLJ7ihS0/vxAW3XujtNys2kIFLXcS2iFdZhM9MJU1+hrwKoZ\\nmBmR49fpJch+PmIE1BNYs8Pl766RNKoIqFjX+VLJPoimhSElAxCGck6buMbs5kUb5142m4UGqUGx\\nhSLtftX6KUtmgf/g/7clnY5IBoqs4QbSzHkFLelc6Z1gGo4JXygdPoug81yoB9B9Hzzwe4z8zcX8\\n3NQmBjnAPdzJ7dxEjRrr2cgFPJkqo4wTEmOxmkh8QRWeC2yckoV/uAHS78LgTbC2A2vhkhE4uw7n\\nWDgvlVUWSWX1yP0RbKvBpIHWsLzelsDmQQkk5RZ4mz/oq6tthyE6A+KzxJtiEBgSVtsdka1Zkfuh\\n2ShqXpCqZ0JF4IJkBvGWui6gUZ+F6ADMrYKZ10MnLLwSP6hl3H0fDWBkEIZc+mJlEqJJITtRTaSB\\n7HSZIJPucdLDQ9DsFhp+eKoUwrgUvnAZZCPIwrxeKpEO2jYAfcqOxQGytinXAXTw9uvTT908SjsK\\nhrukdnwAN2i4TqgtrYNoTTpREPlsNVLkLNNkVv3NN+tGWJ8uKlp0PW3IpUsFuMYxIMy3uxGm7imC\\nIAqEUmAnX3RFj8wmJM/ERALcuZabCcuNU9FyM12hZBAGdwNu5A9a0B6RYI7mKfrg1qzAjo2yb+hS\\nuipAvQ1phXxhlqwirmI8WzBsMrAHZAqr2SD7VWaKp8tq9oMPFr5W60fTNaK+EPFQMNAsktRCdgC6\\nt4t8EK5yg51jLyYSl7F9t+QE2zH3LLN/gywD04Lg+fAXn4CtcMbMDlbT5SGmeIjtTLIfg2GYEdo0\\ngVH2E7KZQbr1DfDMYbjUwoY9MHA3tK+GoZthwzzVjXDlGLwsg4tnYdVeqE2JR60M3rpxa3o1kn2V\\nCQHdXIMvjsBfDbrrnKDIovE9IQU5AghWO5d9DXmun3XSgXWDtAK2DmRp3xYBa2ehM1xE+9VMULSZ\\nRg3S9RKMU814Esl8mXOvWs4hJGPlDCANwY5KmSoxxDGEczJIhsMQrBRPrvuAAG66T1ICzYBIEBWN\\notakL2qGidUg3XwxQIceoBrk98PFA32PbwkBt3XkEx+W1I4P4Fat07gajj1VXWC7S76yf8+sH1uA\\nrdVkv6RP7zlc2D1GFgkfIn+cjc1kM0YaEEamjqbrof01aByAyoAwhyRzIFmXTfVlUxc/JQpcmZWm\\nxk5aiGSzEXnK2Mhd0F0GyQgEs7ISdTgP8YgEYVLjxf5iehLXowySquQsxjMCpNE8RLsp1m8y0L5O\\ntFHbgOGfBDsN7edA51pY5pDTnxWlYOHSX/O1S/VpPgq2PqvT6tbgkIKutZJj2bkPsp3AkMsSqco9\\nCIcgHJeMEjsF3f+WP8evguQZ8PGz4Y45RtjGOhIMMMoYa1jPDNM0mGWQEYwrRBto11bD84fhKcCa\\nnVC9EZp/DRu2MXoGPHktvK0LY7Nw6h4YuAHs/WDvBbvXeehrwFwM0Xqo6YI9qyFZBstXwYYxWF6H\\nj6yGfVXIhuhdblAXO0pxbSoU0DVDXmV7kUmTSNuoGPJ16FVa0ASPAJgcLp4K7HsgLVO8T+NioJxF\\npmUfdOXTYGeEzP/cjgy8c+5+RUbarhkrbnIQOiIRym9xBThV4inZtJMEAnfCSBqLPtk4n52XSYaC\\nn7PsSyb66jP5gF7SoZ7lEtmjm+HmScgWuk3nXlSE6QSQg6d1EQrb73t3BXhtl9w3MRr58Mx4eaLB\\nqMtWcOlixjhZo+IAoQ7BucAFkH4O7E7ozELWFrDw5xXbRECXQO6kqTtgGaBYrtG1LBPKuQLXqiod\\nkQOyQEA4bEE9hOYGWbsgDQt3yxaXRyURZqOrhVUPyECQrXLsdkJALFgOTMPAagivk8YfdeQ4o7Is\\nQt0IoelamPGziTQgonmjeT1yaE6qfq8dSCdRGERvTJwXk02CWQnRCmTx9kFItgljii+C+Bmw50lw\\n6zDclAIzDNBymUQRdQY5g/NYwSoazDHAEINu/tMEIVQrsBoYa0LlPuh8Fk7ezrrz4e1D8NJtMHo7\\nBF9yisscTH9WVuzSRb5rwOAodD8LZhKiNRA8DqLLYPgMWG/hxQEsq8C/LIPv1imekKCDVEqv56Dt\\ny596FnSkEKHTXgdculrXgbHKSqrpuiUoejJI9H7pYNh2ZZhAgq262LveS3ffmZJuQ8uVV5f5CgzF\\nEzTcABB6NzZwUVLbgM988528cONbqT3btZcm8vTlpmv3MTJztCFtoF/v9zNtdIvpJQDaxXWwWCJ7\\ndAOuRoRBAEGZbjBM7yoTfb6sdUt65fKAnyqj4Fkv9g9cLmoOti4/N386w6AAQM9y/THUfhxm3ybA\\nat2CLQTCsoPYufMzMhBkkw7EK2BGIXTnyec19viCyDoBNWE4uLQ1k4rbb4x0wLRGsaSj84WyirBb\\nTQNL69KBjVuqkUEpQ1SD2hzUPwPLtxbpTgAnw8oROAuZvpkZ2B/CfXXYo0DRnwKVFUU/RO+13j7W\\n+11B2ABpPmVKBgxiNwgOQ/vb0L0fonNhxzDMCqU5QMQ9xOwgJKSOZZQRTmIDXVaSUMHwPWrcwTiM\\nj8gMwtpesNuhsgVWWS4YgysmYPkDcN3FkjZ86S9Cehlc9SX4ypVc/L1f4ZbJv3Lp3f8uiR4RUNsp\\n3vdL30XywFO4KKvAr2/izh9vwbI6jA7AbRb23eIqcggBvgjypSKtSik1qRzTcK66e5ZbZ1wG2/a4\\naz/0riWg9asxBZ8B4n0GN6HAverkGj2G/t5E2oJbV50hvOexBdJ+QPSUqCJ6S+CW7LPz8PGv/zZf\\n2/4hXnTeW/MDpzslp9e25EC2LeAbZIcCrQ+uOkXaX99Bu722P9Wdl8iONGi21Hb8GK6OZhnSOFOV\\nFzq9oJlTBtVuddiG/I6YqnODnGygi1abCAFc74kB+eNwdN6/P+Rq+H4UyWPUkR7vtUve2u2UlD23\\nqiS1mzr542HsvJNDXEDPxJKKE58hDTNwKBdlIhEY57K1N0DiVsayRrawISBsAwe87okGeW93qUPJ\\nTpk80c6kk7k5mXFNyODTgVMQd7oLPGjgthp8O4ZmFbqDFLKCPytKmZXeO3/z2Zn19k0DZM1gKy6p\\n0Wi0q8/kARi4SabcmggYpovlPp1qODgMdbdE5PMzeMosvP8gzAzA8rXwEwGMz8tDP9OdEE1AXZ56\\nO2fgi+Pcc8VGezYb4RZj5u/+Atk/DrP9czu4efeFBZFvzxUtYdsrmfmlfbz09nX89+iLeeP6N/Pe\\n1+yE1gBcFMB4CC/4Y/7evse+ztxiJBtt2jUlY+We2xl5zeMSuiqB86XjOQdus3LPworc29Tdaz9D\\nxnexPcIM9KYXqpuuvytgKXg1EVydR1juIJ7i4THdykFpe3EKdgI+/2/vs1++/S9MHFYggr/79K/z\\n+lf8NXbSeZ8h8hinSdHnfQbrdy9/PQd/CUsFXJWndMBYQnt0M1x/1Ms1G9VnK/QExEyFniwF/3tA\\nGJMnHZhBB75uQXBlsibygHbY/d9fs85vEVMUWQ51T9PSqPscub9jE2E0uf+TOnkDYTs0JXOBrtcB\\ntgpYV5/qWJ8lD3jJH6GyH8IZYbbZIMyvk9cwcRLCg3IuqkgE3DGUMJHk9tb9MHOguKQBiFKJl1ya\\nwcpEZrEmBs4N4PEhXBrCt4bhtgHYq49FcasmomvZKBCoy6db/1oAWSCDVlB1HdnpEsFauVc2keuj\\nDXNny3HXhvJooYuqcs44gfNvh5P2w/2Xy+cV34N3PQR3PV8m+K9LYXgCzEFI7oWRDAxsNfDeGnz8\\nGTw5celJj/8W7yZli32q/aT5VfO2t7yUc5/9LX565e29JH1vzPaHdrELQ/Ccp3HJPzWgOgGMw2rn\\nZZ92CReZ3zZv5w8oJgOo5t6ZlAh/4AZ1o4sMdwSQQ0u+ZnKEtA/1aqLQeTZObkDr0vSus6Ayqj+T\\n0TUdmhSLJfmsGIr1S/S5dOrcGSB0WT1dp+lm2+BTH36H/ex9VxXt8nx4/TP/WtYWmZYAWjohsQJl\\ntr5MsBDT9buaauYqKWjs4JDJR0dnj27A9dOL/EAMUKzur58fbraJx2ZznXbYga0XEAvctC1TI9dt\\ne5ZQ8pZ7ymJh2Xl+sAMKH1jtrCdr6HVkDlBt0fFc5k7e2JSlNA7KbKfosRDpRHPHgnVWgXGuZ7IM\\nuo4hpqo9T0LyPckECE+RKE7zZMlgGEHSduwaaB2EphMrWzJ3YxiZWLW6VQBuN4DRCNbEcHoI34rg\\nS6GsD9MztdWPHms6k77qPinkM6YC98QH42nn8XnQeSw01sGsERY+MyAu7hrgpHm48uPQekDqdGAN\\nhOfByuvh7stgy3Pg7H+BC/8K7CkQPUEAK3sI0ntz7+n7Kdz9J1yVzhfPXrR/ad+Vv/+Q/WMzYkau\\nDnnnDTdw+8zTpBVa4MKtnPek5Zz9hdTePTJmXnvx1Sy7dQ2XhR0YmYR4GP7meVx8zeVc/OEY5vfS\\nGwgyTfI1GkwFolPdDpq36wAtTAuJyHSR4KoDWhvKRkD+tJAwEgasmq72Xl24tkLBYGcpMimUDfeF\\nOGghbUNnEJomfO7db+SO735FyMIMdsf+7+f7v/cNt8gU7ga8+a+fyFtHbiKdchruw4CtgqzGAvwA\\nrN9XVNLy4xdLZI9uScH2bf4InM8+Cxb4A4XOqdkMZrCQCcI10sCDEQRgtQW4Y5hqAb79D6PSxo4B\\nliELjKu5IB2pGxBA8g/3Fbv40VgNePRrVNqgIgszu6D1Tai7ufP5eqzO3TY1eR856qQrlkVbILwX\\n2luEJZtBuaZ66NLEXPJ5uEpSsNo2lwa6CdydwUQkz9Qa8rJz4kxy4gdD97AgnxHoZBBVLxRo/dlo\\nHQRozbBQQOPWCDAV8vVzdTp0NA+VeRhYAYQCEmcncPE+OOU70L1WkvMJZDCJYkj/Fs69DRovhr0v\\nEX36Pb97GSu+cDK/8ZpP0N4sLCsF7uPesZWcOfhrXJX8F1865xXm4quu4sMv/zMp89t/FT71DC48\\ns0s3g6xmhcxHQH0Qat+Af/91PnP6WeacmSl7t1llfu6Ja1n5+S9zy4YJ6AzJs8cGvaZV5JW6L7IZ\\nyKYk99ZMSMAw96GtG9Qj8UiCrgNVHeCdm25DaZNZLHWbOKnKRAV46aw/ze/WDIohBHQ1uKcUfq2F\\n1u7tZOtOzjNUlOXe95//br/0T+/jcLZu5Rn8yjtPsgemdvHyP8A8begX+Pn6P2BbBanQcvnZBz77\\n13blCD9QyB/aPzQRaQkZ7sKLuhx7Oz6A60dZcx1X7wiOoTrAzOdYq3iIc9ljB6Ajfdso8jDASPaz\\nHcdG9dlfPhVxYJtVyR9fE7jhPlwO6aT7X1tGddMpAkLsK4DUZ+q+HOyWys3XMNDAyBzCbqY3QasJ\\nwUY3gLhJC2bQAfq4gFXUgHA3dP5VFlrpTEtnTJdB1oR4Qlz1qO6CNdNSN2kd2vOFHNCEW+vwDzW4\\nsgZnJLL8aJwJy22H4orfAWzX2Wb9mwKsz3C7IdiagLwOdoGLWgfDjuEOuMGuA8FBt+BVKvVciWHg\\n2xB8GDgVgmUyzcu2geWQrZd70P0mxN+EFU8D8xT4gxdcx2evfweb77iJk/Y+kX3JVj7Px/kS7z1w\\nM431K+Avv8WNF+2B2g7g2cAX4aqXwJu63JbgFrV6enGrxteAfRCCj8Pld3M7UOFcnva6L/DJk28X\\n/LdGiGbHwLzv+vptIZ0R0A1mJG0qWOnarZ/vpO25i6zw5ueA6S6RAHFakxMFCQQ1GdSikHxRIWWW\\nNYpFeHQ9hEn3+dSDMPPJ97BheAOPec6rCFrQOk3WWP7WR97JwevvgjdxWAvHY37mxX8C/JyULRMC\\n4rNVnyctBL5QgK3KH/1p8368YIls5pF3OSZ2fABX3c88sALFsnRDopeqXKAzx9R07QQTyH/yINio\\nNGh9cKEyWxMji8s4GSBfK84BblYRYMqqEvUnEfCrPBXa34AskfQm01k4j1Br0A9aKNjqEni6ApMC\\nrupWlRTm7ofmFgm2pQbsgLChcC1kB8hn3WW3Q+Ub4mbn6TMt6O6Hzi3CyIMVktNkBpwruw4a90m8\\nZl7O2TTwRafTPieG50ZCeiywOYCvAjd2JYjEfN/WpGC1uYQQgB2EaLUrw0qKZRoBM1QMhBg3GAQS\\nGa+6AS6uQ3wfJLFMLdWlBm0K3eshuVEAzERyb9M75YGVf/TJZ9lNe68zr96wgrWVJzLKOHvZZDu2\\nGXzHsO48cQyaA1Adgc7j3EMgJqDWkgSTCi6R42+Av4PwEgg+Bbwe/vLZXPQXm8F+3f7zsovN5t98\\nP99Llsscg/2hexiJYqd/72NkScSgUQz4PekHC8yWNCEFCnkIbiLydLKgK2mKQUeCbFEsg1USiTbU\\nH4rQiY4hcNlB+J8/fjuf/893sn7jOZiPvgcy+KXr76QdwdNPf7F94wfexmHM/MLL/5zLr8jLZipB\\njT9Z/U7sRG+qsfYF/ez3EwVlPwjoe0lQgPMS2/TSH/KI7PgCbhuhVmaZMLpg2LEAFwDy05AMFPOu\\nLXnebeBmimlALBikQDQvh8aq3+LTEf0tkg03g4uOaI3pTgEzPY4/YruYXO4m6Sitp1bvUTPEXKJE\\nz4Lqqrs1U1kPt42swNWZI1+k21rI9kJ0nVSLgnaCBF9MB9IOMCmzguxpMqXUDEJ4MiRzMLtXsiBc\\nsKQ7A1tH4O/H4OYqnG9gmYGbnfY5PYewcAVdt1ZQrgl2ca5uJAAbjoqckz9Y0+sxxjhvY8BV/0Ek\\nuT6TssczIpN0bnIDLALgmsNqLcQXALfATGcvyWUHWFl/DH/9yV+2m/ZeB2Cv3vEb5k2rL+QxlYtI\\nnVP/a1y69V/50r2nMJZaeGwNVh+ATWfKUnajAxBOiHRSnUMeJrUaZl8Lw5uAb8I1X+G2szeYC7JB\\nOCUkS5dDdwCmY9gTytOI8mnPvkpVARErZmWwjnx9pg9o84bT7dtHG5tDq6ALQYt8Ik13WMiCrQj4\\nxjUB4NAUkx20HQ4BDy6Hp//uO3jW+99B/Z45/v+3vdreeOvnzMVP/Ul+5m//g+R555o3/t5n7Z+9\\n/yV+6cyGk87hogsu59Wv/C2SVsd+8BPCbtfVzmG0u16CgCwsI/gExQdSlREVA3QSCRSDxRKD7qMb\\ncPN0o4pzndfKqxkRV9TUDlM0S/6UV9zU2pzlDpEnXx8ybxXX4f277kQtk5BPJgjaSNqSQ8X4LOje\\nIfv2g6Q7dQ64iuN+p9NRvb/xKAPWfuUfO0QYUXevN9OmQ77MnQat+gNZIOzYDDpJYhUE4yIttDfD\\nvl0w3RTgV61vFG4bFbYbV1yX1+mhKh/4WxvoRoDTZCMXpAxWCOjm98QDGOPEReMuMFwJ6RSyjOA0\\neT5zfAkEp4MdkYwLk4rXodOY66fAv37gffbLf/4+88zx19nrJ67uaRnv2ft089Taz9Nuz5mKqduO\\n/Y55knnNa5/L45nCPqsGl/w6b/mTz/HuT5zNi1/9DM7X9O18rdY5GH4z+dOLf/yNRHe+hbtaT5En\\nICRVaAzCwQpMGcGJQ/VbfbWFJq8pgvkN7WuD4L3q9wFFCqIXgdKJNJUu8sDSCoSxSA7BgMvxjQ/N\\nf20BezbAYx+Aq9/9K/bGW2VdVxuQr3/gMl3My17yh9iKwWTw/v97FW4aij4fx1yy6hX84cn/TLqN\\n/MkY/YFwv1373ynD7ZfhlPT3eL1LZ49uSaGNi7ivhmi9AEPkHk6os74WnNqkQ6Ofhxs6tts/xPbt\\nm6eR+cdJ3CG0I/g+TgLhOqhcKEvThdO9p/BzCiPvkPqbD7p6Sp1BFHj7+B6mbjGiXyZ7kGeItaWc\\nc8isJDMKwRoBVjryg207prnBbeNyonA1xKfLjKD2TmjthWy7AP4w+TPVujpBIvTK5OfYamZCfs9W\\nyv0KR5AF3z3xTtP6VIsH5wrXBVyzaRkkww3CzswI+XOGshR5QGZA/sTdoA079t3JDVMfA7DfmPg7\\nFjD7rdbHzDKzjlT4kr3R/geyTrMU53TzKXuf3XT1W82pXMr5PU9v6CCzsZ5AEWgalzkAg9OQrJT8\\n5FYsKc47gG2+HNtTEHePE5/Nalv0xUm7wG95aekVRP2IVMXp3y3JXMiqENbE00kGnV5e622vkavi\\nZXvgRc9+k/nAW9/Em977BM57AqzaDsEsvP/Dv232PvRnvOhX3yA54V1HQGbhY5/6be6852vmT//o\\nSn55/UfIHpLfFspICLxL8K2fXOil+3Kbn2Pc7wwchT26GW46DGaFdNxog+iUOu22J3/kcMlzfoa9\\nn1ei//NFInfX7LwD3X4tDYrFBJQ+qkjZguhMGRBa10Bl+8LpLv0ukC81+MzHH7WVuPijf/9vWYd8\\nWm5iIBuQ8kSnQbTRBfYOQLoLkt3CNuMzXURcJ3sMOSA+Ra4nPeBSlnbCxN0wuQ/qLRjOJL1oCBkI\\ntPH7Ex3MiAxC4Qa5b2ZMQFTQRQJ4tumuxfRdlIV0P3TvkvNHp8lAGbhgkKbFBb7Yp/e6CxtWPI4r\\nz/99s2H1x2g3mnbf7BY9shkL1zCULafDvJ202ziM2fvspnOuMFf8y4d4td0nhzYumGgNMOrGMD3t\\nINjVkIxDcxSmh2F/Fe6K4NvArGZtqMe2gDQrx5oTOSBnugvsZDsUi8AsRO+qnuSmqQVdRFZzqWUK\\n2vc3iBgAACAASURBVMbpvWEd4lDylVUKOvgk2NA9DxL4p7+ShW//6YP/h5tv/QIf/PA2bB3Cg7Br\\nxyZoYn/nbeeaDavOwXbgT555l8wu2yXtKGbhLqrEQy9D32uTUFar+/ZPmFHPbQlZ7qMbcKNTBWTD\\nVQ5sR+lNWPVBV62fDfh3w0/ug8Jv6Ust06nBReKhdyyf7nSRNVz3SlJ3dlDARvVXl6abNxZtSFrc\\n/hQxPY2+Zn2n9IEtx/wqhI5BdIGsLiAVnymbcQ92CytOhqkLswnHKYRjvUYdeAaRtU8txOfIY07S\\nh2D+ZgmuNRuyzK8uHeHPMMtAOr3qtSspHgPjLKhJkNEepqe0roXkLhlsVYeJEEkiTyz1w9lqibD6\\nF7/qjbyo8Ub23r7DnDb2Mnv/lDzM7wXhH3CF/f+Yt1vML5qPkwC7eMB+zf4jgPl583a+zeftZnvb\\nSS/kleurMLsG4jZETWGx1iKKiLuPtiIZCd0xaIxIqvDeKmyJBWzvSaGtmRvtotkc0uzoIo8XmpEB\\n37gBpqeOEvIHhGUz5Ivw95tmfAQjIsH5fUEnVASJeARBR6SyaACiKnQi8Y7aMXTWQWV3cWNf9cL3\\n8qoXvVce8mnBTMO3r/0UpJjvPPcq3vdLbyfZIl5SNi2DgwnlGvzL8GeP+CDqdzHfCU369vEBN0Fi\\nBEu0gs2OJTnK0Zs53EMZfmgnNMbyjz/rtNvVFOsOqMrvUUidaaNPJAV6kSk/qrflIXwK8TGRhgJy\\n7EBXcfKZsTepIdsD3ftkimz6EDJbLBPWUHOH1Kc46NTE/vFCcyP90yhw6aIne5AFR3RR8gzpvPMx\\n2DoEMw5wDbDKAe45ED1GmGsOpm6NXlNx36tQq9Tb1wmVXWmrbkK2H9r/A51rBHB1/FPiqU9I7o5D\\n9QkC+MFqisVZoOgpqTDdQ/T2Csz+mRzQVgVk49MlsBetQ2b/+Qv/qOn9dKl9+iSQA9t3Mzt5ANuF\\n0V3rWNlYzggy8UOebDHHLFupwqpzOO/s/ew6b5bJJ5/FeRd2JQd5IIGBpoBu2BGMMhlksazhvWUZ\\nrOjCbAUmY9gSwdcNfAHYNy9NhVmKLA5df1YDjckwRKeIJxeOO009cqDmcm51nWASCbLZNtg5B7p9\\n5s+WDJe74+l91vzDyI0WLqCWunVv06rL47UwuAXi/RT9JB9RnXlTB7N56Q/pXucdTTrdfQZZDjWR\\nSvNjEH2y8yHTfH0t15+t6HsLxsV0Xrcda+3hXN1FmTHGblgkzO0wHPX5Hs6OD8ONz6KYWqto5fQ+\\nW3Ujmy8pZK43aKaBr+NC4adk3uc+1d7UZWTOEc8HbaWmXckIaN0I6WbRbfWxNH7wq+IOEXqnWkgS\\n8CO1s4H04F1PFCAfmYDGOFR/BeIutKYg3QKdr4M9IH/KMSyUYEigazToNQQUA1DIIQErX+vOH1va\\n97txx625pyBOXyfuqSoFPTKIrnZC7zF6NPVA3F51jW1DKiy7Hyotdz1tZFLAhAwQSebkhVXCwHtA\\n11GjbNaxv2mRh8bitYxU1ko2R1t26yCgFwMjDLGB86jCvhT2DbM+Wcv6Owz8Qg1OTWF5F0ZCGKxB\\n7CQBG8gSs7+wCb567aU87Rk38jsnJ9wdwE3ATRb2avyoQcFu/Zl2KW5GmOdzZy2weymmOTvh0jYK\\nRpvNykZa6OA62UapY1aDYLn8L8KBrk8jY5EUwrYb2Lw0srApo0mmnklalK+HqLgBLmtSBDYTV/YM\\nWRvaPZNKZ1vaeUg6kHSlvGEmW7/G2w9let+6uNRIbQfLhZCxnaWwR3fQzAxRoJI3HSurFZFX/7nG\\ngXsWTZA5tqtBsf675yOEHzLWSGwgjSFz03OCkd59s0lo3wr2+zDcLJ535ae6JAir0SCymo//ymb1\\n0AeATS+AzhOgOlws2F0FERBb0mAxYE5yDbsloGQhz8bAyEltpwBXmwjVsk2RHQKlEz74ujQsnS6q\\ni2Bb4wC3Kp22cj40N0Fzt6cju2uzhnzdCqmsvot2n21CntJGImw03ArB5l6ynXUhOyAdjFQGEus8\\njMDlVAP5Ez5sm3w1KtsUMMjm3H8CSNNi+UEdk3Xtxa68fusBGLyWf669hlc+YwjOD2T5hsEUwmox\\nzXkqhOXnAlu38t3b1nFVuoPmKZZdCTQ1hUkzOXTdAnWmVA2wkWjn+SLsc1KIzJEA4/xp25Z7b5sU\\nz/byIqm2I3WgizYZl7lgKtJeiV19+X67C/vrgvimA9ZNDc5q4qnZyNN9fZarE42Soq7zmY9AsAxx\\nI7Le25+vLTLnBsYZyUVO2nL+wBYcyG8yGY6RVwVo41EZUMJxka24hqWwI9VwjTEfBV4A7LPWPu5o\\ny3F8ADf3vzXgpW5QTVygLC4A1yQOXFR3VU3ncGq9LwZRvLcpMjNGoxtIpw9WyOdsHto3grkVVjQL\\n19QfmZUAxMjsHc1V1+L4Wq5fsxNA8gzX2Lzghna2bF7c+mxavo/WScNNtjngichTd2zbsQ49qXuc\\nUDoFQVOuMxynYCzKiPtnMWko3Su4GQSGJQCncURlIKkGuCCf/ZffE3e8fEafY6zpFJhrZZqu7Ttl\\n1oVkEnlQYSaSAtPIgj0HkcyHkCLQZIVZGbcIfOAmsWQW7Ch0Dgjo+Ljjp85lwADM/z6//I0KQ5Ov\\n48o9AZwZw6pYNL7dwIQRzHxwDHj+Q3Q/P8qd33ks3Lpd1jJunwZn3C9F8h9h47/vRgIewQryZUFz\\nloq79zpAzTsS4JBavTDrSIb/lBPfsjaYOTl2hgM0vdd+BUTSf4xboyQZdmCvI+pCIqwnL5i6EB3q\\nMsj53c5/CKyanS8A13YLhpyoPqUP8nP/1XVQohHxbsyoA9vlSE79EtmRB+D+Afgg8ImlKMZxAlxf\\nzKmI66NzybOY/Gmjh1ja99pvfkCtb6K/nfOYpDPbkMi5bUB3MwR3wIpEHrS4jEJa9sX8Nv+PvfcO\\nt+Oq7v4/e+aU26+aJUuy3GVsy2CKaaYm1ARMSbBpLyXUF3hTiIE4BIINxjEJmITwS0LA9GoDpryU\\nYOBNMGDTYsCSjXG3uqx2+z33nJn9+2OttWfPnHMlWbq2cGA9z9E9OmfOlD17vnut72pF+NRuvYTQ\\nu4lC8Tbg9UiF/nxcPfpOQDGfkB+aqezHIDNu7C5xkJBEiryXCZ9MgR+iZCQZl5bPARNyrelyBbGM\\nYnLruAdqRsfMKVGbbZbzsMWljeSvtuoawrWKYF14zaIyEM5jDU21ouRb0qLc8NnGxJyPbg7yHQIu\\naS4PXtYGNy78XanJKNHAWgXwBPFyJXJTWrPlzWJeEYTavpBPt17pz3Lfdz+/+UxONwJjr4e9OdKx\\n3qZPAqwdgy2LYerB0NkF2U3wiydA/kup5N5EwvVaQLJSfphmEili1cJwFF0VoolSAmGK93G1vLDI\\ntVTzjSmAlmj56aDsy6kVEr63FUefs85S1Ww7otB06SyR59elYtbbvWVOtOXQxgpCcaiSLJJ5l4yo\\nkjOm83JKteaoGJXr08VAt4+zRhPTdhZIDjLEzHt/lXPu2IU6jcMEuPHh1czNG8Ld+jpi6kIg5M32\\ncIZ8sVvTpOr+Vybe2jfnWgrQtwmatZ+CbCtkd0BtizjCVsprYAT6a7KXcU9hrto60UZ4PCP6MwQH\\nYiXBI5i/ow5uF7CUUN+BWdEA850F4Objou36CUnbTeqyU68auBuGTAuvOqvSnxGcSV77eWd3QWez\\nPPS1NTKBMf6t6vXVC8u3S8hWsqdIEZ0E2qNQP4FQ18Ep15tPCCBmYxSV1KYJXTqS7TC0t6DWY++1\\nnnZI8vDjkKfyPnRKUEBN+ikSJ+oyfuZcMo1w/cw3uK3zEzxeDSZHqu/rOGrR53dyI68H/n7p5/e+\\ndldz71V8rstLvpbTeAJ/TAeJVV60A1q7YWhc8GtsBuaeDMlpMnxNJy+8nFu2Scd6oAcoGY3QElDL\\np2ReJznkBo4W8hWJa/Y26kAA2tVFG3ZquodwCZ2YUyeK03P4BqUUmvPsDMqWDzoPczmOH9Bzh+Al\\nNvoHvZ+JJsf4FvgRJMXZamNkxWPraoQkp6Q/AlsrMLWA6WYLnEhxsHIYAVdvqpWii+OnHAQOynLI\\nE0t1im1GoKdJpNqtVw98tp3gXAlBgk440uwmaRm9GAHcZZAuhTPrwiiMAb8Gttf0SEaRpYh2M0vh\\nzVezNabCBHCa4KZkgvlGdK5K/vlpAUw3ALVF4tlOV0DrB5DdINeU75GJmM/K9smIjp9FYYwTODc/\\nAzSgth38XqifogtX1akGQfvv3CqJFo25QgvNG5Cs0aiEKnB0ROPKW6qpKffglWNMrlbunRLrANFp\\nmAbamZNFwu8WsHUJ2mFSriMZITjjXB3ecsv9C3TMYW9ni5/Md3N35Kylb+Ub+df9P+0+v/qVG3Gj\\nHMn5JNT4MNcyOC332u5p7YfQ+hVk1ijSVpMESVEfIdSCMKsGCFRCNd7WmxZsgOspWjt3KDpHxH6J\\nWJTOcTX9qwOeHw1jR8OYZiAO5pBkkI1CUjXLiPZtrzhrR5/HcH62eUOOG/6f6qKqIXB+QJWowbLm\\nDrqImEbdLwqF9R3s8kQfovSIsjscchgpBTNf4nhY9KYCZEhMoaXcGrFvoBtrukT/j+iETMNZgrk+\\nJt9748gmoT5WyrhiGNbV4cHIbd+BKLK7dM8Bz7Po0HbYSf2RUc6WmVmfgPYU+GGC08+3obNRTGo0\\nVCjfJDvKfqkm3QOh/0+g9V2kU/BWGa9kr3CqIdMsg0wpilAtHGhbOM8eSZSorSA8CHFkRnYX0thx\\nRrzrba/X2Kfpu3FPNxNfoRGstawTiqLeLlO8doss9rjqsU5yyHMFDBBqRLfZOn4zd7R/5v/dPw+A\\nl3Lokk9AY2DYDbhRP+1LPhU/7sdQP4t7hHsGf8LFrObUItLQgV+i03BGnI5AwSGlCiAxaOkAuEzu\\nra+rNqhasVdHl2/pwqJ1Z/1sGdDMgeXi5BAnnKc7WiyMzjCM3x9214p52odYjtMnQH1vBLixRJRC\\nWBHt2dQF25Sh+WKtSSh6pOkcYYRQ77frkFZmtY/QrSV4qRcQnuY73e/r616SwwS48erlBYRcrhqR\\nAqsVNzENN84aowXf+/EneOwZLyqTM4qG+aSa6Dsh2yFgZR1HndINaQY1L4qEVfXSiIRZpH7msP7d\\n5qWWbHCOlBwkqMKtk3MuKxxrxl70Aa0bVRuqycRq3whz14pjJV0uO/WzssCkCKfpfiJ0x8DpSCWt\\nm8CdITuf24CWT1GZE+0l1h6Zg+x28Fuk2MnsUqEHaidCuljOxU8LheE0DTjzMD0nD2e6utCqw0OE\\n3IdcnXWYhotoLDTloZlZLgtEPMPiBarKbJi2W40yu57v8P722X7K72EhxU/AKSuewEuXfMgl7hyf\\n9w5I99f4rwJfdS9w7+RpvFlo0cXQ9wgFvTnhKEOHkahofSn7pUJ/WVQGZlZPIZXV+pEyjX3gjhSl\\nwdvikxD66yXalMxZlMIK8H2yaM6shOla8ZiFlNuel3ggg1U5//irufm/M3FNmU+uqrHqZLUIImsc\\n0BXOsAAyH+A+Ul8m71rYw1blMAFuvDrr6pm0I+3WgqlDygldbueTj35MEU+rEnqHabX9zAB3l5ir\\n1ro5Tkrooyipp/d/F5JWvw24BdjSgdwqZlk/KAsLaqOJCX0SxjLbBHdTsQ4Yh5leL50bkqMlYyfb\\nKg+nVTrL9xTbx8HhLQ/JlWp616F5tPCyPofOL8Wpku+R6uGmUMVxw8E5tQsm98DsuOzYr1HHnHKN\\nyTKopeAtwgE1+WJnj92yiKCOY0RD+mkK+VESXhZruPYc2eQ3C9Wmgd1Ke9a2cAP/yvMXHGwB3jHz\\nYC5srudhRz+HD+ypRhx3yxe4kCfwZvxiaD4CSMVySLQ7SeAireMIEEjr2PNqn9uF6/xOhhG+vqP7\\nVGdhsky4dWbBwsEs2SHR0pcor+40Rs2bo40yfiW5aLd18/bmlBWWqvTylfTa7ADsdRcXFjHpBapx\\neuYCgu5BOs2cc58BHgcsdc5tBP7We/+Rgz2NwwS4kds/LHrxe6+aaJxU3Sm/jhg4Vhxh8W61fmCo\\ntr9DOEy/szuRLc65sPh0PdQeDxuc/B1rUYDsJBIcYBlGLTQCYQiSIzWcZRTcOpi9E9Jb9SGYkzjR\\n/GfQ/pk4iNwgIU0Te4gisXnXd1cRVjvYBv/5IvAgA7KpYtxsfsZWmc3zHPlnbpto11bw3NrgJEPg\\nj1RqoCqVcDs/JaZvvK0bUA0sVXO4H+aMn6PHs6NO0F40nbEu5/lT91UP+1DEb+5scA9Z/Gz+9IQr\\nuPQhHbe0ucbvam3qta1bWlvDsoFhvjT6Zs5ee5HMsR0CrNmY0gezom3mntDpAihohix65eocbclY\\nucWyTahDAQFQfRvyRdHCZs7DQdVyU0Kam18Ms8dDZ0hPnLKy7TKobwG3F3FY+Mq5xYtCHDRtPFDG\\n/pGr+n01ZOQwyUE6zbz3z1/I0zhMgBvHyKLgGkvM0dpNjgG3LZPTV3koA1t7qeffNFrTYmPANY3Q\\n5pcm2NyeyntmKAOtAe800OkTXjZdKZ62UJd3SNJv/SOEp3W7kDbZA+CmgB36kFpaZkd+Txvy26AT\\n2duWdGWLvuWK2DDGSoglY1Sze+z3dQSoJ7ZIiFA9FWC0tvE48CMUovcn1+rj+ZTQNUwTerq5yDRw\\nQ0gh+KaASbqS3qKOIB+yBIo5YAvfd3i/9hW452Tr7K/YnK9nzeLTeP4x7wXOrm7i1jTuz7lrLmfV\\nivtJGJYmG4DyjhZqNyD/T3QhSpcJKIYYuyh8xc8iXPxyhJ+1+T5ImU/Rm55oiJmflOgTV40wSMGP\\nI5EBaYFxSgvLHPCSfcaYnn9OkSIXg23VJ0L03f5E72tJlJe18LYDknghWCC5L0Qp9MqycM4tAT4H\\nHIMUvT/He79Xv/tr4GXI5f2Z9/5bvfcce5zMlR9/F0us5eorn+1xYylAIJ8SbsnPETo1GNAaxVaJ\\n+Q+HNp8cyHw0gK3mzeeLJS61tkw900uVg9O6vC4peFBbMPJZ8HugfZvwynbdrh+SFQLScxn4W8tA\\napZfrA3G/o2qpRqnFhtfGlMc6QxktxFCrqy2hGtUHmY9gTSBbEaAxI+pIyQ6cNKn16D95Sz2M11N\\nqWxj2K0maJhTyE8JD2peKQdcyfu6f7iw4rfM/Mot7ftD3vPw28sctYi78Ljr+ItVi1jEUWTbkVXW\\nRMNUnCsWTzes569Bx2ldF9WYElOHrVsuIF0CvOrcdwLItWPlvW+A2ynvk/g+aXQCG8GdJJZDnYJS\\nMDrBZQioW8ECK2Jgz1gs8bnEk3Bfsr8wrorvpuuYcTjLQXIA88l9JErhI3RnWZwHXOm9/3vn3F/p\\n/89zzp0KPBc4FSmq/23n3Ene+x4jdyCmiYnd6DhRvVcoS8Up4afld4kva7FxcER8CIsqmI0OZdXo\\nxyi027lhSNZA4yhCD69kWLW7ZnSAJAJIPUCSS6550g8sIoR04Qk8Xz5VnpOmocRGQRzMXzXXq95/\\nGyoD3RDOtle0pWRUKJHUwo7SaIcxgmsKbj4hloNzCBWiIVvJEEUBGqB+ohLfsacbQuxsPg2oyewt\\n+aMt4Dve3kG7E0XI33Pid81uBFJ31dpL3ermqeAd71i9HleDlS05N2+OsfiXShnEXvZkBFiKdOAc\\nkDkYvO4gVIIm4DgozBeb1/OZ3GYJDsucS49AiipVzfSOLKZJBrUa5K6YN/W2RMu4OmK27aHQvg/E\\nmabPrO8VoaAhexYSaNcKMk8Sm1vxuRqH7OkN1LGGvQByX9Bw58myeAZCIgN8DPhPBHSfCXzGe98G\\nbnfO3Qw8DLime8+WxnMgEnNHGYVX1GKv6mLWddELepzY+VxdYO2eW56Ezf1Ed21OsnFgIoXOEq3W\\ndT/x8rsByhWuYrK/qrWbjedEwwvpjxPyoKbTGsUwWZxfbFnZ32SdOFGaS6C2u9DY43lr11hhbkoe\\n67Qtcbcdi+dtFdSCecJN2/VG4ZiWOynaezIUDZgrfkcN0hPB7ZCH0+mKEfp7zUGaRhZJos7FGdn2\\nu5Pv87vad/S4ofec/HL7N3jH0RtCLYAQ/5oQahLb/MnRa0E1c0eRQVXXMRwUEPZDhAL5+bSMITlF\\n5frYdIl5UzuQeV2R98kiYBjSUyDfCUwX898NIJlkczrV6rpAd6CxB+p7CJk6XsNtfCe633EMLNG5\\nWBZjDyXJqWVk0QaWkm+V4twA8/O38QNpEiZ6dPwFkAVWmA9WDobDXeG9367vtwMr9P0qyuC6CdF0\\ne4gFePfymFSlYs64uj7EVhdxHgdz4EedhDqZEhHPJaPPTGx+2z03+mAigexoaKzTrKuVFN6oWG2O\\niVYoHppOcU5OPdChFOQYcBckSwjxm0mcfgyS9vxEqK2TY6YPA38UzN0GM+uhth5a2p4om4BVVWdi\\nPC4UHHY2hWRFZZBMa6hYPdLY1avotRiNlQw0Dce39X01xisViiQZhjgfwU8VgJvXZAyYQLoKDOjY\\nN+CJQ3/qljU/6Xe27j3Q3TD1Ldbv/BanJE/i/D0PCJ8/KHk6z+fvSiFrVarHe6Fccgjjl2t6cmIT\\nryl//axaA7HNP99jGE9IBWY3SqgJmqxGNNU2sAjmTpLY6UTvSzpDaCFV3wnJXYDW6AilMx1FLzof\\naeSRWZ8rzxY6eeg2odZFBKZGIXWlY/eSfT3/C4yQ9wUNd3/ivffOuX0tQ72/u+zrhBux7gRYd2L0\\nZfUGhVlNMUGUFyutulWeycmkzhuSJhtFIZTq08ahSrEG00Y03MkEsqOgcbpmXFkh3DgUIH5VsuYC\\nGOmBrWq/Rx1Qem7ZjyDfBMmJ8pDkDjoD0BgHvwTqDxBQzreIBpMsAZZA+hA5Zr8eL98L7XdBc2rf\\nSkKCxCHPqRaajIFfJefn2+AmKYc4RaZniCHttdN4sRmGJJFr9C0ku2QGfC58bz6uu6wRHG0O+M7O\\nf75XwRbw09k48BQAXqlXUHN1zmleRyPibquMllnliYKu1/oGyaguVNOi6ZLr/RsHtwz8HvkbwGpf\\nj6LNbQNvkz7gKPCbID8O4ZTb8gKkyO8s1MaBXZDdCbPfk2MnS+Qc99Q3ckTfGpn7cX1jexiqGkkU\\nEhMK0N+dFNwqVVChGjbcBBtuvhv7O0C5DwPudufckd77bc65lUgyFkiOwJpou6P0s24550mUQdVG\\nwwa+epPj2R3Zz4nWuLXydblGLpiZk4zIvrOdYjrFc9ZMa9tlO9p1jvgUpoBsBTQfBPV1CnIGto1i\\nB153lpeyDgQ4XYLEF8dZEJHHNo4m8Fsgm4N8pZqkNwBPVI+3LjLJKkiOojz5jZxNIFkOE6+EmStg\\ncYRZsWZmQ2nPSrZbwIJEACKZlbHLvZrHys+Z1uJUuymJLnBdBHJdBtM15R70Mi2Tpip7A3Slfx5O\\neRpvYWC6eEpsyG3hdpSz6XL1HVj5yEQpkzCHm+pIHIG5H0HtBL2XVX7TNF87aOTh9V4W3OAVAzha\\n5lg6S3DYJW2hqZzGNPqdUg2P/5Zts2n4vzsv4pbZa3jj8V8pNNvY26qrSdJUDlstyhALHCsa9pte\\nzrD4uqoTsCLr1ooSZtt9/tvz7Otuyn2YUvgK8BIkJ+MlwJeizz/tnLsEoRLWAj/uvYsDoRJM4psX\\nr772tk7gb5M+pNatmknx8fwEzE1DloeqdV2pp1DmdDuDmpl1nGgDIZYsiinzNTX5tXC6V17O+WjS\\nG7/nCWlqXlPV4nnnvJh9+V0CUGlL2rQPnE2oFpX0UUxcezhsJ/o3eaBQIJveCqunyj7HqsHg9bjG\\np/rZ4vMkpm4sycER0uhK5fl0H6FEYPzw1UTbmzfUR2kY1ye0xlOW/qlb1vdJv3P23uVxq/Ii/jZe\\nG0uUVEyx2iV5IFMnqJ/V8TSfg27sRnVjC4kzk8tXdm5SmfNuEVKmzgOalOI8IXvStNtkDokBnxau\\nN7sDss1QU4sr2wlfnr2Qdz/3BvFHxB2vTW23FcappRLHFse+imq4TMWBHf5WP6+ioPHVC8jdmtwX\\nNNwoy2KZZVkAFwOXOedejoaFAXjvr3fOXQZcjzzer/Xz9u+JzJL9ygF6MF1T+USt++pSSGqiKSbD\\nhJjcbFy0uSzvjlawXXeAbECANtX2L6XqRTrJ8oaAct6Ul49UocQmfuUafaaanj6Y1bUnbK5gnG+D\\nHdtu44glx0nR6Uw1DNMm48kemfPuSGh+Eu6qCa9Xuw06b4P0LgEFo14z9bAbeIZCNAjHa5ljvlPQ\\nCC7R4a9mBMXex8qAhoLWWvshn6IoN5gIxZAcK8daygou6bvdrR5ch0tg2/RNvpPdq4E97s/dF/gD\\nup99A9qq1gsKyFbCMAaTmO7SsW08NtqZaQB2sOpzYWFcFkzeBL9L7gMj8lurpJeaRq0V6fyYzCFq\\nkCyFue1yL9KV8OF109SPFo077NskpaDDbFWJPYex38KusURsdw9qkF5RSr1MrwWU+0JY2D6yLJ44\\nz/YXARft/7AxcJVcv9GrKtVVv9cKiWiAAfgG1dEzKpMqsSI2eyHbI+DVyYpde5DqZX1i+tWspml/\\n93FwArJZv4BzHvFfTptjFTulmIizhFKKaQ/QzykrNa4N/3zlC3j7s66WYVHTzcUe7nnMM0DKHiYw\\nezrwb5C9BfLroOPANxTorMcbBEeYy+QYucU7xw+jhom5BiQa8I8XMA2ebgiFyr2CUOjYEPXxCtRE\\nXUDYWZTDErj4iRtwDr75638EXt/7Ahde3LFuHW/mlPB02PQ0CiG2ouNorgxCAR4ih2I+K4t/aY7H\\nZrhpdrZjT/l+pjq200iFj35CBmMpjCXmeueERsi2FZ8lIxJl4/olIqa2VkCYWLu1c4kd2zGXEp1T\\nF30Uxcr7qiOV8rZGHZUKroPEsg/S5Yw7VLkvaLj3nBhAxCSq3TC70fv7/XwjWKMUxO60IE4y0Ttj\\nfAAAIABJREFUAvkIuDGkrUtDaIbcSkCBmLXDUBuRiegWFRpekAjofQK+LqAbTL5MP3fgOkg9CL22\\nwDOrqel895oTz7E2cAlP5s7dv+R7N3yCx576ohDmlqeEHlNdT75FdGSq+aCm5hLg4ZraOwSpNiRM\\nBuV8QpF2LW1prX2MxgieZwPqAYq4NOQ3WYsQ0G/ZZC4tNNvYzC4Vo46G1+iKZFS2ecoxf8G9BLgu\\ncY6P8A2OYk0pmqlqLRv3bvQ8KBh76ExCPoFknelYeg0XK4GsaYkR5VD6PAoJcyOIF3cCcT4OUdQE\\nJdpHJtvlW4tFDWQhrK0pxtUNETp8WNQMfTJ3jQcOr6xyjDh0zESPnU9TdK6YRyw0zrcUmGeR+hyz\\nugBEnTIWSn67AdfAtqoqxJkmRO+rK2zVXIvSK+PsLRIBXN8h9IICQuB5lggo+jkkkL9fHHGWopoY\\nldBLVKNwHXEy2XZOq5ulM1CbRropWFGdaaSjgwbTxxSsqxyqwwwf5XV+g79SP3mxe//ZT2Dp0lWy\\nQDT1OuJW5RUtx7U1CN8LHwuQPB06/ynbJGqOhhzQaLz9DDCsgJ1QVKdKigfBDSoop/J7PyHfhSLv\\nel+tL5n8iHDf54t2qDrf8l71HRZW3IAb4c+4jK/xlJDybZZt5KcNin6MRaYr2DBak8x8K2Spam9z\\nQB+kw9GOddxK/7e53svLYydgCSVaD6PkQLPaz7PlfXzomlfyqrM+KItkQmhiSU3PLSeUT/QustDs\\nYo1WqH5edQhoen21sFT83pQO2ybE0CcU3Lenu7rYIch92Gm2ABKHU7nos6oZHnF8pZuq25iTB+TB\\n967QwqxYta/L5MknBRydFu+OF2qP3tymaG6kAjDBG2sSp31FtXprHslh9wpys5rxM4lk9cwI0Gbb\\nkULbk8VlVyPJ7HJvYr3/bqUq0Teuv4QXPvLd8gO77rilSkwt2MKDUBfmLXdj0Pd08FuE1zZnWVVc\\nv+w7GSTUiKChIGuHaxTb51bNqqPnVK+Mn2q8SVNvc0vvTQwWdYq+ZVbfsgNXbnwfL+k+xbsj7gz3\\nTI7hgf4L/oKeG7ySD3ImTyk53uPWe+j/uxyOFGDciL5rT8i9Brnf+YRaErrIJSMUHQ+stkGs5fZK\\nt405VONp7aT0+1CjoiKv/qMPShSIciLOLsjmT013ryGBREksgc+1i42f00gDNp6ellJRkZYbt7YC\\nXYQraqfTcwhdOffVleJuyu803F5miQFrHBJjYmZOHn1nN1i1JZdSFMkwwFHNwaV6M+sFGBtgJAYk\\nVkvAFYALlDVHm3g10WzrXpxSeAH2RGvahuK5mpmVbZGXtdRxrUKLqhW7DNbkP/F83lweHf+1De9x\\nn3rxu+W6rMttrlpmQigEE0xEX5yXaxWf1U8B1uqk3wOtq0Urq4rrlzTSZJHs02q+WphQl9qQ6/mo\\nU894OOPznL5PdHHzMU8YZQ06TZf2M/KgfmvnP3af3N2UM3gmj+FPgC7AdRe4X/D7PKCrE4Xdkxh0\\nDWiN/TJGoPqiDe2d0s0imYRkXCgti8PORwqu0g3rQh8DKrajeKcUk8S4jCrHvhhZ5CMONV0NbpHM\\nQz+GWCrWM8zLX9/QxXIM3nD+A3jA6U/ixS94T+CDQ/vqXpEINpCWZZarpjoRfZkrIFe43VJRm4ZY\\nmCQUSTULJL/dgLsvqXJaUAZZKFZUe+h1giZmXueUwMA6n8YaWel4tUjbcDoZ4wUhfpJswjh9NjIB\\nM2ffdSganc1Avgc6myDbqBruDmBPoVzEFczi2OA/5TNIanRZ3v7Nx/C2p18lWvKQmK4kEqsbTMu4\\nFZGZdhZOFjmpXCZ/m4+V7KPOjQJ22WZCeUCnfaZCCqdK0qQoYmMScfPOiuKg2o4G/zNL0dQyWnDt\\nvRskpEu7IfBD8I8vvNUtHzne7xi/rWs8DlD8v/mXIYWVymf8WvdpnsQDekUcBiu/Vvk8Dhs3Rc0i\\nF+L/+zbSeFLThZNh1XDrouH6ESQ9fFLeBwetqxwonC3Fimygq33sQmjdoGrSWiCIjowjdbjsv/6W\\ns9b9FZPju1i+5uigQfoUob4AhuHdf/NLNt1xPZs2X092R+7f8K9SuOq9f7kBhmHp4jX09w2XFgLf\\nUS02XiSiZ7A91WGyvZPRxpHl69GBdP0yFjZ3SuOwAPLbTSlAtzt+PpkvagEFgCg4Mtekh7CKRqay\\n1XDF4k3tO3Vm+QZSdb+p2+gq6+Ygd5Ezy8w9M6+M/zXANQ/1pALsRgXcHeB3q1ZMGXCr4NsBruJD\\nPa9519Sd3Lzlx5yw/GEkUV3U0JrGbmnM7aUU4Vp59FJQdgNQXwv144R66dwMndvoRhqTPIpeiCTp\\nE3BORillLSVm9uaEppjoubqYyE4po5wukvVBuOClV7n7rT7b37j56p7jchDiVrmTeQOnzVsIKF5f\\nk8p31aAZO+UAttF2WQuytgLvWKThjsi9SxaX99VVftF2VHU2x7G8UYysOwbxWaimiTqRz3nK23EO\\nfvbTL7F87oWhIJGzi7VVYwh+eNNlMIe//GvBIvCvv2QdgHv0/V/A0cev41mPf3OgtFwK37vuk+ze\\ntoWnn/YmXA2uuMV+65iZneDXY9/nAUsk2O7ERY/k/iueLIqLFn5y6oy14us9FaSDlHulFNL+5TAB\\nbhy8CN0z3SQOp+phylhdheDxzirYHHlLvYKgxePm2t/MxGJOaULSEc0uNw//AOSZamxmZ8buaQiq\\nTT4lWm2+EzpbBHDzHeCmoZZ150405JDhZVbZf/h/7zVyfufUnW7xwLO4+KzrGBlYKnyZ13M3J0zE\\nM5ecjVWN3Tw+qpnnmpCRLIGaI8QL+xlgQKiJfK74fYhkqIspaOmi4UJihyjF8cwRV/LIG2LVot9S\\nXMeyU1dzvw2PAu4W4Lo3N67xF809oueXqzmZY7g/0A2oMdjammWnW9WWXPSdWenVFGADP9/S2Odx\\nyOsyZmmnvNNQnzh8QKHVmmnfRIB0jCLrkWg/sfMYOSGnjSkf86gXhs9KfEk05uc843zwuEv+7jlc\\ne+03/Ccue1MYmu9f92mXJAmtiWme9/QLmdk1yVs+/Eju2n07F77gx/x4y2V84ep3+I171tMt1wC4\\nkcYy/nrou6w54v6i4fbp89WgSCn/nYa7QJJTgILxtrHEjoFegdSRdzfpVy+2F0CtmmF+jhAPmo+L\\nBmeVuqyQuXnf/aTwaWikge8jFFhJRoWTcgMELtPOKZ9G4h41sSLfCfkuqcbldwrQWsEY02Ib0d8+\\nilBIqwtyqbuOv+Txfszvqo6e3zO9FVjm/uns21m+5Bhcok6ZuNNA7HTstaDFHudMNSHT0C0Q37RY\\n3Vdunu+svJC5mixQIfU5jul00bHiWNSYINWB8Q3I4zhq45/bYhn8r2f9g1u26HN+596N1TGZT+YF\\nW7usOJouVrQtwiqLPosDa3z0vU3NeEob8MbBGTngPaS6WPlZWcjDAfrEWvKDajXFnlQ7AZtEWm3M\\nDyMNoW7XudtPURYyXvhAANpSDolO3o5lz5rdswTWHHUaa5ad5h5+/6v9j667IgxdnufAO4F3usvf\\n4/nDh/45ru78uR85FYD9sO5+fG6nu+DMGZns1r23TlHU3QZ0geR3HG4ssU03X0hMr9+YsyVDvN6m\\n0UZg4pVT9bl+1y4mux9DJtyk8lgDSBjZtE7kpm5TE63VNSlifNWLHgqeWyHtMQFbPyG8WD0rt/Yx\\noDVcaiDPgWm49lA/idN4Fi8D/mHeIXjnNx/Pnz/xco5fcQauKYuJtV1x5vmuRC0EiRYyb2NnhcHn\\nim18SzUj1ea74itrsuiFhIcYGOIsuBidckoarW8K0HYGIR+gqEnh1dqwyI8mPPXkvwDOnXdM7o7k\\nlNcUKHyPsX8oXvsNv+JIhVgviJVFw8lerJgDqWI3A/lumXtJQ67djcviE4L/DcFtwjTke1/XxWmF\\nLHh+Wi9oHCmfuYQidMKAvRWdnIZxhdogtl28uuhFPvIR5wBXALhHP/h5rFxzP+644+f+xz//Mp//\\n1tu5ddPP/E82fOVAh96tXfxI/uwRUk3QGW/bRxFmuIBJD3BIgOuceyqyhKTAh7z3B91q8jAmPkDZ\\nMRXH+u2Dty2ZyFDY4BpP62eK34dCKLlqZy0BRKvrCgI0sbriZ5HYXNV8c9XEjF/KLF4oQQK2NabQ\\nOgX7aclpr+XdVnJMI/RRphJiwDXl71X8pVvmPul3+rg9bxC/Y+J297JH/oTjl58hscQIveBS0c4T\\nc1RVx1rHJIxLm6KDhtrRoW5rzvwFZZzythbaZPdHL9xHSRHOHHgxitUENPIB6cPVHtasvZqa3UCS\\ni8mdTEO9Dj/f8Y15Tubui69rl/m2jL2BZuzAhGJaGg7E4eKxH7fiJypJDL72f4f8oK1WUaYgZ8kJ\\nmFkda7aNwhLIG4ivwWLQF4tlliyniH+Goj+g9UVTR8F5f3cGF7/pp8WFGmURn6DG7D7qcc9zl7zr\\nNJiBV73sGPqbw0yN73WnnHCWv+GWtx3giBeydtmZLB6Yp3wr0UAtkBwkpeCcS4H3I9m1m4GfOOe+\\n4r2/4WD29xsQFra/lSzm8uLAR/suDthXJ5Y13POqoeEJqaW50Q6ecpseL6DtEGogvt/Gg2WOUlyu\\npXCi2TVxWFccThQrfbXofZOiRbtpv3Z5KXA/jmQtp1Luh14S/+GrX+vO/4PTud+aM+W6W7rTOSSL\\nDtlp0t/9Y2trE8ZrWhcutRDyaUL0gG/R7cwxzasW7V/vkW8IkGZNoQWSOUjjBdUp2Kpm2x4RwJ1L\\nCz8QQJJAowaNulghb337lRwCuef+fMVX/T9tPwvAXzv3Jfeq0c/wsObzyaahnoeytYG1isPEKvRo\\nSeIglmp+gIlN4TiJrIYsktld+p9+SCZlnqZx8W6roaDWQNbUMY4eYZfJmFo8NB2KrDHbj03KDC5+\\n60+7n0GbgFUfi4c1q05jemJMih3lOd/63r/4G275/r7Gu5e4dUt/n/Me+e6uwQk+gaqPZwHk4DXc\\nhwE3e+9vB3DOfRZptnBfAlxX+Wvve3kjYN+gq5/Hefq5hWXRg1KAUhZOPN+8bV85tZKLc4ygqris\\nrPVU/9o+0srnVY3X/m/nEPOCH+Xb7rHuFf57/lLmEX/+Nx7lPvViL9yXxrq6JoXXpxmBb/xDc9a0\\n6Zn7HqJA0AchFg2jCyE8sSQCDO1hAVPnJRHEa+yyywU8fV1q/nYGoT0IrTSsXcEfaQZFfwp9QzBw\\niLxeV3fkUaivgmwrtPdCZ7yggDKE7rF7GdMEYX/R+zh6sDqVexlttv4kgPe60KvlZfcygG29vEDl\\nDXG6eTP9deKkLTl4msv2iVN6wYpAWMRIs3JCCsI/uuYLbNy8ARpeLzThOc94KwB5nvGvl76Meq2P\\n5UuP81/4+jv3MdI9xT3+2Jdx3kMvlYUemUPeLMuGzl27pt+IONzVQOwz2AQ8/GB3dpgANwZN42Hj\\nUwn2VmU7KGZyNLO9lxvmp0UrMz41SE4XNxk43mj38UNlINz1fEfu65heq0Y2xZcRa7mxuWoAHPOA\\ntnv7bAB4Aa8A5gVcAD70g1fyiod/sChCY+eojj+vJxGcatXxiC9RP08WUbYmYkkoNzIkuvgUsn4B\\nhkw71+aqoSYaBYITczhvCui2akVLI80XIUe0/yE9jcGdcNGFT+J5h9K5unKtbkAKuiRLJHQv3wWz\\nO+WeXTD5IN7JtcHKri6m89HjMfASbbMvpgxfABCOwuwxGqGp1IuOadYn45lrGrTLVKuNnptQqtGL\\nJpxExyoR0XqC163/Dv/66Zf7qemxMDyXvHl90ADyVuav+dkX57uC/Yl79Or/xZ+c9s/FJzbRWxQL\\nQVtovKQaNnKIcvDVwhaU2ziMgBtfR6/TiG04E0OyiEbIp5HaBBPiqfdjlFIbSxkrajabt9xBUa+U\\nAhxjOqAX3RyfSvUBrAJ3vE0vsLU5Z5pt1b/kgCPpd8e4U9nMr31HkNStdif5zf7X4Xw27d3A5OQe\\nBusS1GkFpfMpAtdaamMdX1wkXouFW9tvEqSClwXJT1O6dy56wE0b800BhGwA5rS4eFqXzyztOZ0T\\nwM36od0P006Mh61Iude9COYMIM2blgB7btvOxo29Qo0OWPw/3nlW6YPLNr2Rx615LumRCflu0XQn\\n7xrn/ev/mC1soLUYWhNIY0Zf3Ov4HsaLdC8qIY7Ws6GLF9cM4W9riyHVRpHOPKl94PsgV9qlMygL\\nVFYXYApBBglSNwPCh4nTbWrC7cY+VEuV+4+v/Yv/0Bde5979tl+w6bbrY7AFwNUcdGDX7o1MTu7m\\nIMQd0X8sS5vH8Nf3/wRMRWOUyDmSii8hMcsxNvcWSObTcH8F3LjPX26m3FhhDaLlHpT8BsThxhxT\\nVaqBkT56b6ikxTLycQHbfLK3eWy0g4lr6Lz0AspWuSuOvY8BdD5ATaJXjF+xhhyboga6seZsnKUW\\nNit15UmBtZzO59jAp3mPe6zb4L/nP8JDeSZRBIO/adfV7uWnX87vHfMq+aBNCM9yNaQwe6XEokt7\\nvAeSYYpYyBqh/Q0eGCZwvXY/wnjrgHgF13aj6CyeOkgbAryNJtQnFQRqMOOkNdct+tqDaLkNJFPV\\nDKD/+n/v9nvGt7FA4laNnMx5z7mMdGkiD3oDvvjLC7jzzvV+/fi3XS2p0zhd+NV8J8xNgZsRp2hG\\nOfTVwDRmxmIQtr9quQMR35tCshiSZfpaUgbcbAg6o9AegrlBaCvPbftzQF058VQtBxLRgF1N+HMQ\\nB6TPdZHswBVfvMh/+qt/A+DfcMHpPQfp3AtP4+w/fBs/+Nnn/Katd5u3dMePPJTzTrmcZc1jxPka\\nNdsMjtUZuQhvzui6UgsLyOPO5zQ7SV8mX+3a4qfAWm2muwXpTH7QJtZhAlx7QKtqZCzVz+NZbA/6\\nHCGRwc9ANoE4zeZ68IqOeTN48gg8YkCNrZoYNKsgWwXcPPp/NVmrqs06ipR4K3dgWi4UmtQS4M84\\nl11Muye4lv+O7w4XK9EmUCT7N6L/m0Sfu2gFcKlQCckwoQknTnk1QwiHjPN0Zb92X5ICeOyrkIfh\\noNOEvppEH8zWYRfw38BPgCnNyjKuZm8NZhMYZUFnq7vo6f/NW/9iKctWHB0Wok9cca7/6g8vCRu9\\n86xraawVqiHbBPkeAd/2XYS26XaPjYu16Rk/4DFlFVsvGUKpuEVQWw21oyBdg5Ri1LCVvL+I4Jgb\\nglkXis8BPSKoLNJBTyAAvTqFfQ1+df1VfOgDr2XT1uv59L7Hyee5B84/0HGNxQ3VF/PO07/MqF8p\\niUax0qQXkKui42ryHCcjsk0+n+fxYOVASz2WC/947zvOuf8D/Acy2pcebIQC/MZQCr1s9aqY/W2/\\n19xtK+ZtnvYumQ/QATcooOEyyKa7lezY6VUF0HitiL83rTT+PvbxWXRUfGqm3WrQQHhgDbTstzVg\\nFQNcwqfcEvcdvzt0T5ZR+fD6V7t/f+Jz6a9pHxcL62r3WICiCH2r3ORS2S6xBz6+CCu5aPchGtNQ\\nt1Tva9KGpAWNDvQpaFtkkl3/dAqdFHYC1+Twixlo7SVBwvpyEmAAxofBD8KvHbBsuavV677TPmhG\\nzvX3DfG6J3yah5/1oMJjGTTz8gQ6/tHrIFqAOpuKbfOtkHXKyQ7xXxvimEKwV4ZEvDAI6TJIjhDA\\nTY+S/1vMoO8rONvOALQUbKOmHMGnBtF8c5DUBMzwwuH6tmjAPoNNt13v79x8SNTMvsQN1Rcz2ljJ\\n//ewDeJP0RPOozBNnM6biYhGHiLUnCDvMWcP5aQGD3DD7kpr3vtvAAsSjniYANdM+yoBti+JAx1z\\noQ6y7eLkyCdk9U7UkRAXLjZvr3XKBQrAtqBvjXTPskJbjZ1pJr0021jDMEAm2sZ+b2BjvJ36B2IH\\nc9hHHDpkiRJ2rDqi7d1df4Kf2cekSwrzzWnd23DQGP0VbF2COOF6SVvM7voeWcjqw8I3tvugXYNJ\\npIb2ncD1GdwxRbJ7L8vYyzKm6cMzhWMnNXYzgyeDiRps7oPfe+UbOeHWlENJfDh17eN5+LPOKpcf\\ndDJvTjrmTOC9AO7Mdefw+tfr9ThIV9pAihWR3UVR3JvivpkGG0t8342vdUOQroJ0hWi1tdXqpBxC\\nwvq0k0g2oPRMIguVlZG1R8hCdbv4Yy9arsugNgE7b7+Fq676JOkM/nOXn3/Q47cfcY20j9fd7+M8\\ncPTpRTRQThERY1ZYogMyAIyLlemnZVz8jICv6wa/g5a4KcE+pSuxc0Hl8AButkdX8jj1CnqbEFXu\\nVgEgH4dsF9KRd1JN3n7h4YJZHXvl60gRFZSD9BROM03tZVJuvJmHMe9WDVWsOsZ6OdjsN6bZxHH/\\nZu3HuGb7MbC14YkTJuKIiLsrfo6eBUESe2qdAEApLipOpwo/oOhrhvC7oY2MBtinOSTT0NilgLEI\\n5kZhbkBA9/sZbNzEMflOTmKWk5ljER08nnHqrKfOBnJ2kOBpwp46bE7h2W/+Sw4p0yxB0l+bArKo\\nU8nNwNXXXhY2O/NRzxXg05vogPQIGZN8pzj7/GxhkNkwVcP67Lug2SbgFotjrLZawXYN0lxSOVvU\\n4Zj3CeWQ10UjNtBuIY7FGoLPpmWHS1SgTVvQuB3Of+uj2LN7i99x1+0HP24HKOef/N+s6j+llIQU\\nOjso2Frh/HyCEFnhphVkpwlZcwvZ8eGANdx7VhYw7uJuSLZRwDKoAzFfk/X4PH5vM1tvnJ+gUEXV\\nvnKD+rJyjbXofVX1TIrv4oell6YC3dhj4nq8jw9j+zPaoEP3pRvYxsVtLA04LnLTqzb1fiWpgK3u\\nPInGqHYsRfZF/KqKJ7T68VpDwu9BSkWaCjsGbje4HVDbAX1boW8X9LVFqZnwNPIZTmCG02hxJHPU\\nmKNOmyNoczJtTqLNkMWIdTqy2zuGce/8qx/e3asvpCOe+1wBzfrR5XOYh88N9I3Qv3ykK47PDYlD\\nK10JySDkSTFf4jkTvw/lKZzytUsFuAPYrkZqICjQookNeUMjEWrCiVf1jvhR6GLovADZrV/7rn/+\\nOc7feOMP7w2wdW9Y8y1WZqeEriZ01Pqck//nk6IcMY2kzE+D3wl+N9JncJvw5PlWyDbLa6EkPeLA\\nXvewHB4Nt32L/E36KUobVt38UCBerBZmumKqFhvasezrUqohAZG4hK5Cx+YXsr9VcKs6Rqq0c8Uv\\nUAr9qr5ix0uVmog1XFM66wimHZA/wUrnQeE4s2NoaJxPJBQpWRwdBIqnuQcR7TPRQPIpeXD8HPht\\nss9kqdxXrLOwanSuJacz2A+jS+GkhOzHI2zOx1lOzgAZTb3/jpxlZBxJxjLmmGKKvNOCCQ0x88MH\\ncvHzSA38gGioJVmCdMAFHnTqH3LauicKPWWmhkbEhGSPUVEa5mYKqyM2BkqGQQ18v0Z/LBIqITlS\\nrAlnPcXieFtL242zyOimlSxTrcu5myEtlSYPYZwOQq4e/ySnNp4kES1GHVhYpjZWdbmerz5zObJt\\nPiHXm00RyjMupPyGaLiHB3A7NyIjXYf6CZTbkBvoxqhUyQrIx2UTV1dTTEHYys91FViJY3FjjRm6\\nPPuxsyrmxeaL4+ulAFadJnGBppiOMOdarEhB2UFXLeV4oIlWLm6f0kN8G9IlSDcAq4UQh1TEq0w8\\nEF5MxXyPvlSbyScIsVKhGIkWIU+WQe04ecj6arBsAB49QLbzSG662bGNLTyEhHWkjOBpklMDFpGz\\nlJyNzJH7VlFXfTYOizxwcRe//Se8+E1HkI2K9uiU5wQBxBe94j3ukjf/LS98zSh+WDnEeMGxSI5+\\nWaDyUWh3IG+X50EA2xSJZ9ZOtMlSSEcLsE1GKcBWJ4Cvq0Yb3WiXQy0tQtH6i1PpDmP0FEkQB8pb\\nLoz4H4x93J3YvJnz+n8QqIPUzomyz8MWiBIN01H/hhaEOhjabD75rQbc2jh0fo14xVN5GN0ghYsf\\n5rWb/DSF18mpaTdNkXpqDrJqwZVIJbTECCvrGI7jypvHQREx/lSxqCq2vZ1mHr2PFfk4iD7mc4k+\\nt0JOCsr1FNjBtnanFONVEZvVGUULnsrsTZdCerSCrTnJ7MJ78Sa6v6CNaE3hbJtQCfmO4nscoS+Z\\nmeHZZqEsahkMpXDcyfC0lPxbKxm7uZ+r2tu4jXFOYo6VdFikg9yPx9mAuhxqCWSLB9zw4BI/MXXA\\ngfhuyeAqLvnsaaSjfVKASC/T2eoKjB63htFj10i9XrOEKuaNVbRKFglodjLojIGbKw9xlsjcdCMa\\n5TAivG2yRGmJxZSLa2hMambpuhaO14GkI9fdSYv2Z/GxbN7UkIiEpKOAuzBhVW4gGWVRIoVmHlg/\\niz8auLirR5nFa//1csi36TnQO4zS5nqsT8UUnveFdblQcsBOs3tWDg/gpkiTvXxMvL1uQJxo1lql\\nRHxq/KDdlXxKwLTUYrtGV7JDNaTEW1aB/d/urt5p641GIiBVdW5FCl7JkRavB3baVdo55vag0KBt\\n31m0r/m03wY0UhjIYO+nuMiPd9fJBeD/3vYuzj7xIqkCBmLKLaHrVltvrfiBD+MRxzMRvdcQMz8j\\n98ESTvKdAjjG7tjC2AH8lNQnyHfJexpQG4CRYTh+JTytDj8epf3zJreN72Ibe1nBNKtoUwNuo0ZG\\nUxyeSxJR2o5ZdTqve/anXH/9bD/TPjC7+blPvpDaYB95tKAHYPJq4up1Oq8AWgUsA0Ht1pCPSCxx\\n7nTRyYrNkgHZLl0iYV/JiGq4R0ScbWS+WHaebxDqJFitBIsIiYHKblMczZIi5+M6cu6dkQMamn2J\\ne8bo23jp4tN4cP05B5wfG4NqUnnFABxHdsTfxXk0CyW/1RpuMLk1JTfbgWiro4TuAQB4Qj+yoN3O\\nUYCJbTaPvR+a2mViYvs4/qoHQFu1oryirRjFHLalAEu7njgiwX4TO0/2RUmYFhzHU0YislIPAAAg\\nAElEQVQRS6TQSGAp0GjD3vmqJQL+K7f8nfvYmReJ9TAoOzUuLV6EnFVlicMeYm+fSazxarxa0Jjn\\n5OU75dA1e5Ac4uzKp8VENhrCj0NjKyxKIT0S+lJY0Q/XrGJm4yLuYC+bmSAhp02NnFHoG4ETgGGg\\nPgEPPuOp9H15mAMgKt0pax/NG976LKk7oKCaZIR4YXIB39KPbEUseaPk5Rqi6SYjhMXaZfICJOql\\n3gNsl2nolxVDtgpgcTSC8rd5TcHWnGY6wWxeQLHAWQ5LXQvnO60StvbEh7szH3m2/+HVl+9vjLrG\\n7LT+J3POovfwzNHTik9rheVoz0v4gdXvVUojttqqmZuxlRf7u+MkmbvtFN7fBf02A+6cI0y6fFJu\\nVN6P3KwGXZWtvHUhsFoAeUQLWP1blVKzwyikx2vQq5+k2x5TNdQN6n6tRi6F1lmdAPMBLHQHWsQc\\nLtHv7JmuU/RYtH1XNOIEyXJN5pgXYtwz3fl8i3fz8EWv5qVrP0AaN+LzhG4XtVX0dlJCOSbN1Chb\\nXcyJlFB0Q27Mr4nEGn9i/2nJBbgx6NP433Q5DNRgaQLrh/A3DNGenJHEAu8E3I5NYBkw4KG+VzzZ\\n71m6ZZ6LKE5h1ejJXHLRVbSPJQBuMicabDoLbpZQ5KXr5Lvc//o3oWh6qJ028GIZASRNpLeb9S1b\\nJJqtKRThhqtm65sKuAa2kZYLMgbeFcBkt8n+pkCtXVyXgX+zb5DB4SX7Gp+eY7a6eSpvX/Ef3d8k\\n+oxokX/XR7nEqWoPdV+ArV2uLQyx8Wo/Ma0dwpq+TyXlYOReiEA4EDlMiQ+DOlmhaOo4LiaZa6p5\\n15WzSOhMEFTH+UoKgvCHsadTl1Cvn8cFbsJ2FiibyXHyvKyt2aRIi81CDr1tE1uhMT0MZXC2EDFb\\n2XOKsM+k8nkGHS83q7WPgAz/ZX8+gLv4uDdy58zPOabvgfhZQrpke70AnF/W69eU1Se70FjtUJB2\\nTdXwhoUWoibgGHPTMf3u6qoRIQtkPgbpgJxLEwG89Aho9MNSB8cDW/olBn0WeWCPQQIJGlOQjkPr\\nFqhtxj3KvdT/wH+013i4B686i3e99nL8cQJgJHIsp9qtU4DqSv2Dslcni95DkWDTlPd5Jpv7TBei\\npoxNMiIgm4wQOucGUr6umq2FgKmGa2CbNYv5Ehse8TqZAKmHdE7ANmmp1q7FOUIt3LspDx05h5/N\\nfZ4zhp8jzuhUr82iXvRvPoGkDKsSBOD3lB15dbr9EzGFFg91HK8+X/jlQcuBepvvWTk8gOsaxU0D\\nBVwDwKTQeoPzhTJAAuEuVksE3rjne3zs+teFu/jc0y7mgUc+TSaNHtvP6DlU1dY+St1ofTvSWvSz\\nGGh78bNE7+0BMY01DifrBcT2vMcRWWqmd2pwYwJ9mlewT3n3pifxjEVv5ejmA0P9iPaN0L8DZpui\\n5fpWdP0R6eb1CfFOwcl2ahdaR1IwBxRMxsEPC+jk7bKm4m3fRkF4GdNsp4JVKqDbMEtkmXToHUSq\\nkE4hobEdYBFCJ6QaquBS4U4fxcuAj1aHwL191XW85g9W03dCU9YMc7ypB99M73ATTeIHM15le9i5\\nIRrDgvd9YWG5oQhsRwjV22Ji3lrkWAhYrhEKeb04XJXKik8v0CIGuEYpzEGqC8tBoJb/0l3nu4Ha\\nKAONUU5b8iSSUci2EiajPUPJkFqASiL7KajPFtRSlVYI520H6nF9VQZrweTwpBxU5aAB1zl3OzCO\\nBih67x/mnFsCfA7RR24HzvHe7+36sZ+jlEXiZyjqdSbIE+cElP0URbtv5c98Tuhb73NodSbZOX0n\\nb7vmob7VmebC0tGeDuAufvQGlvQdRTMbEe3X+Ki0AHe8aAlhjjoB+ljxQU/F6jnbM1mL/h+LTTSb\\nTHF0Wq84A09RGzqedDm02tD6L77vv+z/uccvi13sbN/unuO2MrQzZ6aZ4BqwYkKL5LTEmvBaoSkO\\nh/CmYdXVynAUcdL25GQCJumRSDr1COSj8nBneyDxZaW4S8myNvJRnqqbgIZulGRQGxIQHqJgd6z3\\nm0P+UzsZWsfAutsf4z7hPIsRC2EaSZBYvRoGR+Qk0hmoWVyr8rUuoo2CTRufaGz79rJt7XtdgHxE\\nS7g+BdkhQkhYCXkizjzOdgv/9zKO3kC8B2g6VFNXsE1bMnet6abTBWXxopVuzVGn+o2bru9xEfOK\\nn+6MuRedvoFLfvlUPva0DGc8li2Ow7rQWBx7DslYN8j2UrATusE2QxSJuV783ULIb4aGeyiw74HH\\ne+8f5L1/mH52HnCl9/4k4Dv6/x6ijRqB0Oa8tOdZ1S5tEscB53Ux44yk/9rGd/HRG1/rz/v+Ot/q\\nTDOP+PO+v45LN7yCL995QYg7df3yMCRDqolodlpiXQxsqY7EgFLj4EPRGav0FWeQEe0idhzEzgPb\\nZxupUjcVvawg96S+9gD/zkfmu8bS9X7ev4XdIzOc2oJ1E2KOm2M8gK25t5uScdUZlpqr2aAWTRlQ\\nHjGOZFCz2KnJbFylG5AD5K6s9bt5JnqINNFcVTcNjd3Q3AX94zDYkXV3SF99iPmc18X7nqyBgbOA\\nB8j5zCI1dCeB2VTji+MW4RmhtkBwbpnKWPXuzIcUsURziCYh7tgiGKyxprOSg3bTe6zecV0KS1pI\\nMok4SDqiraYtBVXTYtuFpp62hI9OtNZw0tLvczj7j97G4x79kv1cTE/xn/jF68F7vnTLBWzv+5U8\\nc4MVbno51FZAUhfNGop5HV9qHM0Trx+xleiQUp3taNsFk/QAX/esHCqlUJ2VzwAep+8/BvwnPUFX\\neR83i9iJ1d0oqhn/VzpcgxBz+8VfXMAVG97hs7yXCtJ91B9tvRy43K0Z/DzPP+G9rBt5Yngo3IBo\\nbDnIHR9XzTeTRSHmceOjxZOoqulWOTconr0obLgUt+uj38aRDjmwBfzX/YcP5FoB4UsHo/No6XHb\\n6LUpeGZaKKUzqGZuKg9zappfLhpXvEo4Lw9bPg5+hNAx2UfOI1AgruuCWc2N9xQVfGZkn3X93OWQ\\njkjsaW7aL6IJdoYgWQzpWmg2pDXP2HUyn/J+SFZA7ShZDGJfgDmeQmKBRV1Up0/M+8ScEMW+nMZ7\\nOwhVuXACtCFd3KizXtEf9lG0EFBXbTZaFCzEKyRoQEiKCHSCgm2iDrMY3T7/xbfz+Svezic/233s\\nAxCfew+c75b1f5j3PuYOuZ/VBWMK8u0EiipmqmwoqxZiPLfj4a7n8F7+mNc0viAbV5OYDlbu45QC\\nMkzfds5lwAe89x8EVngfSgZuB1bMe1inuePOyUPpqg6LKB7XRd1fAbaO3ciNm3/gP/+z8w/qxDdO\\nrQee5NIk5Z1n/pKlg2voT4blIUoIXVC9svuuTqlmbvz89RJ7xtNou9ihZP+PF9QqZ2c8cYRHxA7h\\nHuIWuZUsYzH349H8Hz7AkxEz3DrHm6MiWQTJUqAf8iHRbDNNd82Vw3VaO9UjQFVzSNuTyORz/ZCu\\nlAfOZ3KCudr1HoqOERY5YmF6M8CIcPWJjnm4iFSB3kDW0lxdAUzW2yvRilvNx4JbCn5CqIPacarh\\nDiFUiYZX5XU5Xqa8aQA61DxGQSMjZGyFFTMmF6sacKy52rXED3h1VTZQbStwd5AoDq8LXhxjbudj\\nyKQhYs4XfG1tBlwrAls9Tqs1xcaNG3yWHbK/3++cudPVkgYXnvlzlo6soTE+jGtAZzNkdwKtYlhi\\nf3Y8320oTGw47NI6QJbCjvxmXjPXxzn97wFed6jnXj6RwyuHAriP8t5vdc4dAVzpnPtV/KX33jvX\\n5ZUSCVqPxYygGpA5yvSzpEe1oGtv+zr/9NWz/Wx7XvrgQEU143XukWuex1Ejp/HME/9GwKYf3JQC\\nRkceAKfoZ89Lku0/bMWoT3tv4gjNGEo8cPyKtWkLntifs+zZnM+beBUafluqQzOnf/ME6qcDQ5CP\\nQHtUaq1aHGhel21cDZKaaFNeze10CmnjYifWUNA7gmAnOu2OHFNxzmgIK4mIaMZhAfD6QCZykamC\\nK140xrxZgL99btYOA5CukThdryp8MooQugPg+4tr8w0pERniadsF6Lq2mO94SumxiXL4JedZUsxR\\nazNfStiJHqvQFDFGF9WuA9CiETEaHRPff4jAPxKX6++Vuy2naskOLv346/wPf3QZCyS+k7eBde6R\\ny1/Aq4//FPluYFYsCT8OWVST2i41jrqBAlxjysDezwE04PdG/zdZK+Pxrdftr0D6gcs+0tzvRTlo\\nwPXeb9W/dznnrkDaCW93zh3pvd/mnFsJ7Oj5469oSxs3ASfX4dQlkTbUx7yr0a3bf8q/fONFCwG2\\npWu5euNnXeIc12z8LC8+5f2c1Hyc0AyzFCUfW4hGDqHcXL6fzJv5+H9T4A0Uw4nQXUUsdqzNgHv5\\n0Mf9pZMv7jrUpe46/ojVzFGk58eccUv/ulVQXwvZcmgdKZqtZTPlNcjTSDuZZ5Km9jR1ZPFMlquG\\nW5cHz3eQcDRbQGMN0OlYJgK6prYnRNslAkQ1BFjyjqS8hloGCno+VeAboIg6SQjl1bxy01bqMOuD\\nTr8sKB4B+tQiFtTL7yJtNGmLtp0qFREKrxtFYSUIlYR3g4QUV3R765nnBig8rRE4Oj0Pl1NKcOi6\\nwfZGNdiwOFiB3Mgj+8Wvv5Mf/OSz/s5D6/82n/ird3zafeKpn6L2UPmg9WPo3KEgq89FFWxjQyGm\\nEWw4O8A/Np/GU4e+xt6p10ALvrqQNMB9WMN1TtKUvPcTzrlB4MnABcBXgJcA79K/X+q5g2cfRUjj\\nTUaQwHGnE7VqZyewZ+823nT5aX5iZhdv/uTBnPJ+Rbmq9cDjXTPt5+L73cyi/lWSnVYrNJrQYCsD\\npuSBmk/TNdqgyuNafKI5sWxeWX6BKSyR9cm0bTfVRQS6lf3340JOI0McR7FmHZe2zQB3NLAEZo+C\\nuUUCPkkcIEzxYOQICLtYJTHNLCt2mgyBH0X6y6k25q19j4X1GTDpquInVBOuQ6ZqfGKJFWqbBudL\\nRwDVOtQmcSaJAXHMt2rGVtYnNIlvQKbxrXNp0UOu7oSCSBtQq8m9NCADpZZyOcc0vu8GvCaJzN2k\\nnxI9gqeIhIm9Q3F8s+7KYmZ7AW6pkE1ML9g4aFLQ1u2/5qbbfuQ/88W3dO9kgeUV3xnkYydNySLT\\n0md3WM7HT0OnTag5nSBzLDAziSzs1BENuQF7jr8Vf0fO2gxOnBOt2Q3AV+9YoBO+DwMuws1e4aRL\\nQA34lPf+W865nwKXOedejoaF9fy1Uy8uNSRkxsC2cjp5C6647gJ+seUbfmLmni3FHolvZTPumMGn\\n8+DRZ/KMpW8TKsEeRKNBVEvzE4QWNSYxXxtbwLETLY5FNIXRIh+shmrMhekz5i/1L+o64bes+wHJ\\nJhibhplJmPDSAVeZG3LEe7+tD1hLkdYWqd+JuoZdLjxnHj34eU1AL2+qtjlI4cjRk0yGwU8JOPtJ\\nyrUsIieOn6NIxVaqJskVTBvK30ZexwRC+JO3+GAInGvJfE/lXL2lyTYjsO1TLdmutzqIZtqrEy12\\nVJV+ZHREH6RLKIh2BDhoEmK+e3qLeoWZRbxwCbDNh+F1DsUrcGRl0IGfr/8ml3zwbD8ze+/UZMyy\\nNpdfeYHQPrub/MHIecV8qlEqmOTqZWvQNSBNZJycOkA33HQlf7H0G+R7BWxDE9OFkvuw08x7fxvw\\nwB6f7waeuN8dpCMUYVfz0Ahf2/APXHXrx/3GPfdY76V9ib9j6lrgWnfi0Dd5y9qrBWCNp6whOfAd\\nebj9lDiAyApe0kDVnq04/DL2ytrLQHYuesX0Qgfwa0rn6E5Y8lBe/YQPc9RjF5FqQkO2CXbdBLtu\\ngb450XA9El7mHwR966QDbN4QTSMx09nUatWy8phkRrS9mOJwGaQa80lH7meyVL7LlVIIGUgKtnnM\\n8VkeflP4S/Po+0F5EOOYIpcr3WCxqtF3PiVEHZjDLxR+sepbVvLQFffF2IegfWmigBXvDtlo+lkx\\n6gQ+KFkk7y3E0VlIiFEQERlr7ee7eCI7Edu26nTTVbtKKdh+3vHuJ7J3fDt7x7bea2BL4HPPB3CJ\\nS2jNzfCM5gVB00+GkVhdi06pWK7W9SEZlQX68aOvJp9Sf4DTsVzImrgLr+E6585GxuBk4KHe+//e\\n328OU6bZKF3VvNrZLDsmbwXwb/ryOl54OE6sh2yeWc/efCuL+rWpVQwEEDzu2S55JXk5C83EQ0nZ\\nsfdZtK117rWurJ0ISGpr4NxbV/OyeB91qK1ISO8nDkYH1E4Hvwc6t0HrJpi6CfJpSJbAwJnASphd\\nBZMjyiU7yQ5KpwkLimWamXYYgvJj0DVAsgSGppiATgOKg1c6qnWRWCafHSeT732LUPDFDWrkgiUL\\nxI4m5UCtjoOPXgawPlEtV0G2V0hW7MRJNTEipMTmEdB25PpcTGHYPtUkToYJNQ9ALbceCwxEN7qq\\n9caBq/Z/o0js2nXbsb07mBjbyZ13/NK/99+ez9kv53CLz30OvB14u3vH4g0MLV3MktGV0hljkXD5\\nboBQtyQfg3xGFio/K/PT9et26LOy0OUU7xFK4Trg2cAHDvQHhwdwkxFKJRX/Y/0/s2nPev+dX//7\\nYTmffYifySbdccNn8bq1l3FE8/guntlbmuOg0AvZbNkTG0v8/JhGCwWIzQGtBDoNQDWEdFQaMVJD\\nWJro0Ddv/4l7yP3fxOvPuIz0iAF8KsCZDEB9MdRPhXwnUhwcSE+C9lKYG5EwM6M/LfA+xFHaSeaE\\nsCTrPuD1vuVNAWPXKADJ9Sl2aAlHC8UynsTnkFgdC09wnrlEHzQFYAYR2iPWBO2czFSNQMoiKXyq\\nDjE712poVvQzdHdxMoRptnH6b6KflaL1I2qBNALZOOREuV9v4SHxgeN92aJCsb9Y6x2b2M6V3/1A\\n2OiX67/tb/jV9/kNFf/WPevc8cNn8ED/dPpnBznrCW8gHaGU3u36wY2Dt+I3KRJRYhak/bOAkQX5\\nwkOd9/5XAM65/W0a5DBpuHX4h2+exc6J2wHYuvfXvp0tVITzgou/beJn7rxH3cqRRxxf0k4s5CcH\\nnHbF9Zk4DCJFGChohug5DdJB+cVhpDvCEaKRWkaPG4RsO1RurDvx2IfyN2/6OPXRASmqrXGc+RDS\\n0LEpDq0EIIFssWRpOQ/9rlgUOoMCVqbZhXOuq6YbRwcYQOX6uYGgchdW/cvPUjQJ1NXE5/qAaQyu\\nrTpuQM/VqCZLuIhNhAPRUHyBd3a+vZxQQek1XjnmYfPir4VelVZQM+d1TIMmGpkwfhYu+Ojv8bY/\\n+X9FaGMcuxsT91VKATj3b+8f3rfbs37rtpsP4OJ/Y8TfOvFT4KcuTWtcdfvHIIX3nHudfOtkkU5G\\nIFcu3PUj8yPSUFz1ATpEyRewIeUhiPO+qobdwwd0zns/b4/t32hxn3ppNFge8cqPC7D4MTWVxoST\\nCgXRE9UC0+L/YYfa+ytdKkCbrhSAdEOR00C99n4CLrj08X7DHf8Vfn75pR6GFRTV2eSTCCQs0SAV\\nj30+AO1hAdh2UkQppUioV1LJaoLILDcAqqaStiCxrAyLSbXjKzj5DkWatr2fJqRvW8q2q8u1uz5K\\nxYBLLQ2il1fqwDhbkmiR0P9nRjWoQy1LCkC2aw4ZW5oym1r2lqXIxoR69W/Mqe7rWTLA1W127L6N\\nufYMJPDuj/2R37z9xn38+H+UuMUjR/LWl36nVH+iIv7cf10HC4MXzjnPJ6/s/eX1v4AbflH8/4pP\\nEB/POXclcGSPX77Ze/9V3eb/Aef+5nK491VJh8vPlJ8U7SxpI11ftQoZw4gHXjVMK9dn1aVCpbN+\\ninoEiwonTInTixyLb3v1f7rHn/wy/5+/+oh7+hnn8pLXUmRFWb3fBFxLQCcfFaD1NaUArN5qBPoh\\n/MxBUhcQcjmhyR8QohEMnNK4hqxqwC6l8NDZeZu22KRIWIAAThajGnjZKCmhK2IeevM0+rnL9Nrz\\nyKqvFXx63lDqQp1upUfYFpko8iJQDTHA9gLVfYBtKNeo4/Tdn1zK7rFN4B1XXvMBv3tsCwCv//se\\n1/Q/V/ye8W3Aunv3oPNA3SkPkZfJFZ8o/8z7Jy3kafwOcO+OuFFC4oNlWrkcAde2hkY1gRkxj6wm\\nqjlXQoGTQTWrTIutU/QVizOWoDClFUxe8dz3u0ve+JfcvP1/k98CyUmUTFurx2qhUSinWW1MCAUw\\nxTjm4/AKXwBlnDbqk4Le8Br3CgRnVilzA8oexMgsD7HNccCy/b/qYTRAjrI5YqdZfG1xGx0r3m3R\\nDYknNDU0vjZVzTaJXk7vadHnnDK4Vq7Jz1GqIWGfv//LL+L2bT8HB9t33eJbc/vo1/E7ucdkPsBd\\nODkgLfx3lMLdEDc8sIQPvWGXAG4baRU+Syh36FvRQ+eUHrDutQqybkSAKXCVBlix6RwnoFsYUlpo\\nr4xDdgu88TPreM7v/y1nPuq55Xp4NTWj64ROAlldKIWsT+NS0246MoRLeWjuFk22rSFkBk6hW4JV\\nrFKe05xrVmd2cmwXe/ds96//q/1qMu7I5SfwV6/5SvFJAkeuOJFa2gjarrf4Z6MPUrmGEI2QFItF\\nbvSKbaPhYXkaae6OUI8gnVZn41wRseCs/JsVouhFIWjscq5ZKZvuDCUQ/bkfvXc1uP+hsmCUwkev\\nPrCNX/pIDvR4zrlnA+9DepGMAdd67/9gX7/5nYZ7dyVokDH/5AreMfg/jC5oqkardIKLywAaL1kF\\n2Wi/tq1XcMn6VNNeBpc8cEN3xoTl6ZtJHNPOcxJy5VPlMl2h2cZKqXOQL4VaVpjquELLNY4z8L2q\\nIicaIjYzOcH7/uWF/tqf92jT0i1+245bqJiY7g+f9GcMDyyFxIu23nD4xAdHnXdloD31VW8gHRyk\\nz8OoFVJJoKEXFiIhDJjRsanJmFh/M+NxQ+3NuEh57Djzsrh+/soLJMa27vxl//W2A7ne38lhkHtA\\nw/XeXwFccXd+8zvAvTvyjtd9vzCZI3F94pm2qmaJBnsHJ5AGg3elmvXKfonjTk39rBUAY0Vckn7l\\nTmcoc4z20lvrnB7Si6aaqSOp5QoHu2G/YXYD6ZFVnyi42qRDqdYqNZgblXNIZ6E2Cd+75lN85XMX\\nk3XafvPWQ3IC+a9f+b67s71bdOJncWmtFDnmgNdefvX/3969x8hVlnEc/z473S272ypyv1vEjQEx\\nlKBIgsiiAYvKxUsEEgigohEUiBcQuUtIucSIhmiioFyUcilCUBELpCgmFqhSKC2XtlJbkLbcet12\\nuzvz+Mf7ntnpMrt7psycM539fZqTnT17zszTp9Nn33nP+76HyZ2TmGDQtY7QIk3e9knhrfjlWf7l\\nUvlv5JSTdNOMs3l+0ePxYHzZilwm5kiNGt+lkIq6FGpg91zvYUxqfxzWFL+WbxEUx35ZV2zZTqA8\\n2yYpxJUf+7foBki+DhvjWdlFkKwlAJRXuCrfkyveeqbqPOJ2KL0nDAkb7A7LMW6KQ686+0OxpC2u\\nHNYRJly0l6BjNUPjUeMC2MnFuRUb/su6zg3MefRO/9ONVzU49XVjV9+zgIlvhZZ5Ka5xUNk1AkAR\\nbrntPH/muUdyDVbq16Xw6+fTHXzW/qm7FLYqFhXc9Oy0z13L8UdcEIY59YdRCskUVo/jTctTOAuU\\n70phyXTViRVFNym4SStq+JX5yoJZeaGIuC9OQS13JySFtxhjiH3DpdVh1hkboM3CtblCG/RNhMEd\\nYMJ2UNgD6ILB98X+zu2h+J7QbdC+NnYfxCv3M+++Ei8Yf599q6987T9Z5F3Gr7oV3F8tSnfwN3oa\\nWnCbo529rbj/sWs5/sgLyqv4+0D86BnHmob76g4NByovJO2EZR6BkkNbF9WneZYqjq/cXxz6+RZv\\nBae8eEkyuaDUF2Lonw/8G/bcBPus56AdBzjfw0qDr0yAh4sw+82JsKwdlm8HxQK0twEdcHvhXF5g\\n9tAohaGr775smT5Cy7anVK37LnsquDWLfXrJsK5kempbW3w8maErUUl1jAXLk6myFReayhdhEkkR\\nNoZuLxInWTAQR0EkxTqOf/XY0i2tguJL0DaLwvuhY0fYeze4sB1Ong1tT+6D9b9B+wf7uHAXmLtf\\n/yuH7dy/1u9b/5D/3L9XDuHUuiZMJH/DF2jLiQpuzSoGr1obQ+sOxBllxAVQKpcGS2ZWQbzo1Uf5\\nbheWrESePG+8gOOb43NaaLWW1lBe7KW4Aoqr4sW47tCt0eGw42roWgJ3cuWnTueEL+7C1KMXw843\\nwdLHexi8+FL+ccklf3h12bL5Ez4AM417Sot9ATVdnhLZBjVJwVUfbg2sUCjwpY9dzokfuTQssjIQ\\np/UOhiJKfyiupeTmY071++IUKC9bZ91suRZwCdomxpash4JcWgPF12NL9tWw7GJ7LMwFC8V2CXO4\\nkbMowOGdvHfmdP543Pf5zKa32Ti4Aga6J9PW3c3KFSuWr3Ff09BEidRJ3fpwb1iS7uDz91MfbrPw\\nYrFohxYWsPbJPrbzrvLwow62zGTlda/k2hcMjfgavh5K+eJ4MqIgfl8qhhZ0B+EWXf3x+SYSWr7m\\nfWz2pZzB4b7WV3NeOO3xWbbgnEv5+lPP+VMNSIPItkct3G2XTbMz7Bf8tqNiRmtnG6x3GEiWe00K\\nbRyM0GkwCegohcbrGqDPYaKFfZsdNm0kdNsmxTopzEkx30hoML8FvLE9LPbHfPrqozL7i4tkrG4t\\n3GtStnB/qBZu0/GH/BY70OYOxH+WObOZ316E/g5YPRk2FcLs2KJBu0NnMXzq7x6ICTdY2wnrCrDD\\nZmjvD4MXXtsTHumAFy00ZhcCqwbBVwM/4zru5fby2rmDG6CfDUzPKQki2xK1cFvbXdfZXQf0cGAh\\nLiLGZsINHhcC/wOmQXGfsCRBctPVA850zb8XqVC3Fu6PU7ZwL1MLd5t00gV+UkHhabcAAAcISURB\\nVN4xiEg0fCG3nKjgikjra5IuBRVcEWl9KrgiIhnpyzuAQAVXRFpfk9xnQwVXRFqfWrgiIhlRC1dE\\nJCMNKLhmdj3wecJI+iXAmT7GOiXNsUikiEgj9aXcajML+LC7HwS8BFw01gl1L7hmNs3MXjCzRWZ2\\nYb2fX0SkZhtTbjVw94fdy7fpfgLYa6xz6lpwzawA3AhMAw4ATjGz/asc11vP160HxZROM8YEzRmX\\nYkonk5jeTLltva8CD451UL1buIcCi919qbsPAHcCJ1Q5rrfOr1sPvXkHUEVv3gFU0Zt3ACPozTuA\\nKnrzDqCK3rwDqKK34a+wlS1cM3vYzOZX2Y6rOOZiYLO73zFWGPW+aLYnsLzi+1eAj9f5NUREajNS\\nd8HAHBh4YsTT3P3o0Z7WzM4APgt8Ok0Y9S642S49JiKSxogXxA6LWyL9/abMbBrwA+BId692a5d3\\nnlPP5RnN7DDgCnefFr+/CCi5+7UVx6goi0hqdVmekZTLM5J+eUYzW0S4H8tbcdc/3f3s0c6pdwt3\\nLtBjZlMIq76eBJxSecB4WAtXRJrN4NiH1Mjde2o9p64F190HzezbwF8Jt7K92d2fr+driIjUrv4F\\nd2tkfscHEZEshS6Ff6U8+hDd8UFE5N1pjhZuplN7m2UWmpktNbNnzexpM3sy7tshjrl7ycxmmdn2\\nGcTxGzNbaWbzK/aNGIeZXRRz94KZHZNhTFeY2SsxX0+b2bEZx7S3mc02swVm9pyZnRv355arUWLK\\nLVdmtp2ZPWFm88xsoZlNj/vzzNNIMWWcp8GUW4O5eyYboU93MTAFaAfmAftn9frDYnkZ2GHYvuuA\\nC+LjC4FrMojjCOBgYP5YcRBm7s2LuZsSc9mWUUyXA9+tcmxWMe0GTI2PJwEvAvvnmatRYso7V13x\\n6wRgDvCJJnhPVYspszwBDn9LueH1/vtXblm2cNPOQsvK8H6a44Fb4+NbgRMbHYC7Pw68nTKOE4AZ\\n7j7g7ksJb8RDM4oJ3pmvLGNa4e7z4uP1wPOESTa55WqUmCDfXCUjTjsIjZy3yf89VS0myDRPzdHC\\nzbLgVpuFtucIxzaaA4+Y2VwzOyvu29XdV8bHK4Fd8wltxDj2IOQskXX+vmNmz5jZzRUfSTOPKQ45\\nPJiwWEhT5KoipjlxV265MrM2M5tHyMdsd19AznkaISbINE/jr+A203CIw939YOBY4BwzO6Lyhx4+\\nh+Qeb4o4sorxl8C+wFTgNeAnoxzbsJjMbBJwL3Ceu6/b4kVzylWMaWaMaT0558rdS+4+lbBy1SfN\\n7KhhP888T1Vi6iXzPI2/gvsqsHfF93uz5W+yzLj7a/Hr68B9hI8sK81sNwAz2x1YlUdso8QxPH97\\nxX0N5+6rPAJuYugjXmYxmVk7odje7u73x9255qoipt8lMTVDrmIca4A/A4fQJO+pipg+mn2eNqXc\\nGivLgluehWZmHYRZaA9k+PoAmFmXmU2Oj7uBY4D5MZbT42GnA/dXf4aGGymOB4CTzazDzPYFeoAn\\nswgo/idNfIGQr8xiMjMDbgYWuvsNFT/KLVcjxZRnrsxsp+SjuZl1AkcDT5NvnqrGlPwCiBqepwLF\\nVFujZTYO15tnFtquwH3h/wsTgN+7+ywzmwvcbWZfA5YCX2l0IGY2AzgS2MnMlgOXAddUi8PdF5rZ\\n3cBCwmefs2ProNExXQ70mtlUwke7l4FvZhkTcDhwKvCsmT0d911EvrmqFtOPCGtA55Wr3YFbzayN\\n0Ji63d0fjfHllaeRYrotyzx1URr7IGDd2Ie8K5ppJiItzcx8N2akOnYFp+CaaSYisvW6878GDqjg\\nisg4kLZLodFUcEWk5amFKyKSkZ0zGIGQhgquiLQ8tXBFRDKigisikpFGXDQzs6sICwM58CZwhrsv\\nH+2cTNfDFRHJQzeeaqvRde5+UFwn4n7CJKFRqYUrIi2vEV0KwxZQmgS8MdY5Krgi0vImNGgcrpld\\nDZwG9AGHjXW8uhREpOWVUv4ZLt6aaH6V7TgAd7/Y3fcBbgF+OlYcauGKSMurVkwBXuYlXmbRiOe5\\n+9EpX+IO4MGxDlLBFZGW5yP04U6hhyn0lL+fzV9SP6eZ9bh7Uq1PICyFOSoVXBFpeSMV3Hdpupl9\\nCCgCS4BvjXWCCq6ItLyRuhTeDXf/cq3nqOCKSMtrUAu3Ziq4ItLyilq8RkQkG2rhiohkRAVXRCQj\\njbhotjVUcEWk5fWzKe8QABVcERkHNrIh7xAAFVwRGQf66Ms7BEAFV0TGgbdYlXcIgAquiIwDauGK\\niGREfbgiIhlRwRURyYi6FEREMqIWrohIRpql4Jp7c8wxFhFpBDOrqci5uzUqFt1EUkQkI2rhiohk\\nRC1cEZGMqOCKiGREBVdEJCMquCIiGVHBFRHJyP8BY+OV+cGFOWwAAAAASUVORK5CYII=\\n\",\n      \"text/plain\": [\n       \"<matplotlib.figure.Figure at 0x110fe2c90>\"\n      ]\n     },\n     \"metadata\": {},\n     \"output_type\": \"display_data\"\n    }\n   ],\n   \"source\": [\n    \"#plot the data\\n\",\n    \"import matplotlib.pyplot as plt\\n\",\n    \"plt.imshow(one_slice, origin='lower')\\n\",\n    \"plt.set_cmap('spectral')\\n\",\n    \"plt.colorbar()\\n\",\n    \"plt.show()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 24,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"shape: [525000]\\n\",\n      \"type: [('Key', '<u4'), ('RootSymbol', 'S16'), ('Strike', '<f4'), ('Month', 'u1'), ('Day', 'u1'), ('Year', '<i2'), ('PutCall', 'u1'), ('UnderSym', 'S16'), ('UnderType', 'S1'), ('UnderKey', '<u4'), ('NumTrades', '<u4')]\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# open a financial data file\\n\",\n    \"g = h5pyd.File(\\\"nanex.test.hdfgroup.org\\\", \\\"r\\\", endpoint=\\\"http://127.0.0.1:5000\\\")\\n\",\n    \"dset = g['/Nanex/OKey']\\n\",\n    \"print \\\"shape:\\\", dset.shape\\n\",\n    \"print \\\"type:\\\", dset.dtype\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 25,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"# retreive all rows where the symbol is \\\"AAPL\\\"\\n\",\n    \"aapl = dset.read_where('RootSymbol == \\\"AAPL\\\"') #get all rows where the symbol is AAPL\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 26,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"(2982,)\"\n      ]\n     },\n     \"execution_count\": 26,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"aapl.shape\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 27,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"array([(100001L, 'AAPL', 505.0, 1, 4, 2013, 0, 'AAPL', 'e', 10L, 0L),\\n\",\n       \"       (100002L, 'AAPL', 545.0, 1, 4, 2013, 1, 'AAPL', 'e', 10L, 0L),\\n\",\n       \"       (100003L, 'AAPL', 500.0, 1, 4, 2013, 0, 'AAPL', 'e', 10L, 0L),\\n\",\n       \"       (100004L, 'AAPL', 490.0, 1, 4, 2013, 0, 'AAPL', 'e', 10L, 0L),\\n\",\n       \"       (100005L, 'AAPL', 480.0, 1, 19, 2013, 0, 'AAPL', 'e', 10L, 0L)], \\n\",\n       \"      dtype=[('Key', '<u4'), ('RootSymbol', 'S16'), ('Strike', '<f4'), ('Month', 'u1'), ('Day', 'u1'), ('Year', '<i2'), ('PutCall', 'u1'), ('UnderSym', 'S16'), ('UnderType', 'S1'), ('UnderKey', '<u4'), ('NumTrades', '<u4')])\"\n      ]\n     },\n     \"execution_count\": 27,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"aapl[0:5]  # first 5 rows\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": true\n   },\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"Python 2\",\n   \"language\": \"python\",\n   \"name\": \"python2\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 2\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython2\",\n   \"version\": \"2.7.9\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 0\n}\n"
  },
  {
    "path": "examples/nodejs/gettoc.js",
    "content": "var http = require('http');\nvar options = {\n  host: 'data.hdfgroup.org',\n  port: 7253,\n  path: '/'\n};\nvar callback = function(response) {\n  var str = '';\n\n  //another chunk of data has been recieved, so append it to `str`\n  response.on('data', function (chunk) {\n    str += chunk;\n  });\n\n  //the whole response has been recieved, so we just print it out here\n  response.on('end', function () {\n    var rsp = JSON.parse(str);\n    console.log(JSON.stringify(rsp, null, 4));\n  });\n}\n\nhttp.request(options, callback).end();\n"
  },
  {
    "path": "examples/pi_compute.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 13,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"import h5pyd\\n\",\n    \"import numpy as np\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 14,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"# executed by workers - add random values to cells\\n\",\n    \"# n is the id for this worker\\n\",\n    \"def add_points(n):\\n\",\n    \"    # use 'a' since we'll be reading and writing to the domain\\n\",\n    \"    f = h5pyd.File(\\\"pi_calc.test.hdfgroup.org\\\", \\\"a\\\", endpoint=\\\"http://127.0.0.1:5000\\\")\\n\",\n    \"    g = f['pi']\\n\",\n    \"    # get the number of workers\\n\",\n    \"    num_workers = g.attrs['workers']\\n\",\n    \"    pts = g['pts']\\n\",\n    \"    # eacher worker will write count points\\n\",\n    \"    count = pts.shape[0] / num_workers\\n\",\n    \"    #arr = np.random.rand(n,2)\\n\",\n    \"    for i in range(count):\\n\",\n    \"        # update one row\\n\",\n    \"        # In a real program, we'd want to update a batch of elements at a time\\n\",\n    \"        pts[i*num_workers + n] = (rd.random(), rd.random())\\n\",\n    \"    return count\\n\",\n    \"        \\n\",\n    \" \\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 15,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"# create a new domain (file)\\n\",\n    \"f = h5pyd.File(\\\"pi_calc.test.hdfgroup.org\\\", \\\"w\\\", endpoint=\\\"http://127.0.0.1:5000\\\")\\n\",\n    \"g = f.create_group('pi')\\n\",\n    \"g.attrs['workers'] = 4\\n\",\n    \"NUM = 1200\\n\",\n    \"# create the dataset\\n\",\n    \"dset = g.create_dataset('pts', (NUM,2), dtype='f8')\\n\",\n    \"\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 16,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"[0, 1, 2, 3]\"\n      ]\n     },\n     \"execution_count\": 16,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# setup the workers\\n\",\n    \"from IPython.parallel import Client\\n\",\n    \"\\n\",\n    \"c = Client()\\n\",\n    \"dview = c.direct_view()\\n\",\n    \"dview.execute('import random as rd')\\n\",\n    \"dview.execute('import h5pyd')\\n\",\n    \"dview.execute('import numpy as np')\\n\",\n    \"c.block = True\\n\",\n    \"c.ids\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 17,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"[300, 300, 300, 300]\"\n      ]\n     },\n     \"execution_count\": 17,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# have workers run add_points\\n\",\n    \"c[:].map(add_points, range(4))\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 18,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [\n    {\n     \"data\": {\n      \"text/plain\": [\n       \"3.15\"\n      ]\n     },\n     \"execution_count\": 18,\n     \"metadata\": {},\n     \"output_type\": \"execute_result\"\n    }\n   ],\n   \"source\": [\n    \"# tabulate results\\n\",\n    \"count = 0\\n\",\n    \"pts = dset[...]\\n\",\n    \"for i in range(NUM):\\n\",\n    \"    x = pts[i,0]\\n\",\n    \"    y = pts[i,1]\\n\",\n    \"    if x*x + y*y <= 1:\\n\",\n    \"        count += 1\\n\",\n    \"        \\n\",\n    \"(count * 4.0)/NUM\\n\",\n    \"    \"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": true\n   },\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"Python 2\",\n   \"language\": \"python\",\n   \"name\": \"python2\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 2\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython2\",\n   \"version\": \"2.7.9\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 0\n}\n"
  },
  {
    "path": "examples/rest_ex1.ipynb",
    "content": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 3,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"%matplotlib inline\\n\",\n    \"import requests\\n\",\n    \"import json\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 4,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [\n    {\n     \"name\": \"stdout\",\n     \"output_type\": \"stream\",\n     \"text\": [\n      \"REQ: http://127.0.0.1:5000/\\n\",\n      \"RSP:\\n\",\n      \"{\\n\",\n      \"  \\\"lastModified\\\": \\\"2015-07-08T23:04:18Z\\\", \\n\",\n      \"  \\\"hrefs\\\": [\\n\",\n      \"    {\\n\",\n      \"      \\\"href\\\": \\\"http://craterlake.test.hdfgroup.org/\\\", \\n\",\n      \"      \\\"rel\\\": \\\"self\\\"\\n\",\n      \"    }, \\n\",\n      \"    {\\n\",\n      \"      \\\"href\\\": \\\"http://craterlake.test.hdfgroup.org/datasets\\\", \\n\",\n      \"      \\\"rel\\\": \\\"database\\\"\\n\",\n      \"    }, \\n\",\n      \"    {\\n\",\n      \"      \\\"href\\\": \\\"http://craterlake.test.hdfgroup.org/groups\\\", \\n\",\n      \"      \\\"rel\\\": \\\"groupbase\\\"\\n\",\n      \"    }, \\n\",\n      \"    {\\n\",\n      \"      \\\"href\\\": \\\"http://craterlake.test.hdfgroup.org/datatypes\\\", \\n\",\n      \"      \\\"rel\\\": \\\"typebase\\\"\\n\",\n      \"    }, \\n\",\n      \"    {\\n\",\n      \"      \\\"href\\\": \\\"http://craterlake.test.hdfgroup.org/groups/a96ccff0-25c5-11e5-896c-3c15c2da029e\\\", \\n\",\n      \"      \\\"rel\\\": \\\"root\\\"\\n\",\n      \"    }\\n\",\n      \"  ], \\n\",\n      \"  \\\"root\\\": \\\"a96ccff0-25c5-11e5-896c-3c15c2da029e\\\", \\n\",\n      \"  \\\"created\\\": \\\"2015-07-08T23:04:18Z\\\"\\n\",\n      \"}\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"# get domain object\\n\",\n    \"domain = \\\"craterlake.test.hdfgroup.org\\\"\\n\",\n    \"headers = {'host': domain}\\n\",\n    \"endpoint = \\\"http://127.0.0.1:5000\\\"\\n\",\n    \"req = endpoint + '/'\\n\",\n    \"print \\\"REQ:\\\", req\\n\",\n    \"print \\\"RSP:\\\"\\n\",\n    \"rsp = requests.get(req, headers=headers)\\n\",\n    \"domain_json = rsp.json()\\n\",\n    \"print json.dumps(rsp.json(), indent=2)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"# save the root group uuid\\n\",\n    \"root_uuid = domain_json['root']\\n\",\n    \"# get 'Data' link of root group\\n\",\n    \"req = endpoint + '/groups/' + root_uuid + '/links/Data'\\n\",\n    \"print \\\"REQ:\\\", req\\n\",\n    \"print \\\"RSP:\\\"\\n\",\n    \"rsp = requests.get(req, headers=headers)\\n\",\n    \"link_json = rsp.json()\\n\",\n    \"print json.dumps(rsp.json(), indent=2)\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"dset_uuid = link_json['link']['id']\\n\",\n    \"print dset_uuid\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"# get the datset obj\\n\",\n    \"req = endpoint + '/datasets/' + dset_uuid\\n\",\n    \"print \\\"REQ:\\\", req\\n\",\n    \"print \\\"RSP:\\\"\\n\",\n    \"rsp = requests.get(req, headers=headers)\\n\",\n    \"dset_json = rsp.json()\\n\",\n    \"print json.dumps(rsp.json(), indent=2)\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"#get the data (finanlly!)\\n\",\n    \"req = endpoint + '/datasets/' + dset_uuid + '/value'\\n\",\n    \"print \\\"REQ:\\\", req\\n\",\n    \"rsp = requests.get(req, headers=headers)\\n\",\n    \"print rsp.status_code\\n\",\n    \"data_json = rsp.json()\\n\",\n    \"values = data_json['value']\\n\",\n    \"import numpy as np\\n\",\n    \"ndarr = np.zeros(dset_json['shape']['dims'],dtype='i4')\\n\",\n    \"ndarr[...] = values\\n\",\n    \"import matplotlib.pyplot as plt\\n\",\n    \"plt.imshow(ndarr)\\n\",\n    \"plt.set_cmap('spectral')\\n\",\n    \"plt.colorbar()\\n\",\n    \"plt.show()\\n\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": false\n   },\n   \"outputs\": [],\n   \"source\": [\n    \"# zoom in on wizard island\\n\",\n    \"req = endpoint + '/datasets/' + dset_uuid + '/value?select=[140:180,90:130]'\\n\",\n    \"print \\\"REQ:\\\", req\\n\",\n    \"rsp = requests.get(req, headers=headers)\\n\",\n    \"print rsp.status_code\\n\",\n    \"data_json = rsp.json()\\n\",\n    \"values = data_json['value']\\n\",\n    \"import numpy as np\\n\",\n    \"ndarr = np.zeros((40,40) ,dtype='i4')\\n\",\n    \"ndarr[...] = values\\n\",\n    \"import matplotlib.pyplot as plt\\n\",\n    \"plt.imshow(ndarr)\\n\",\n    \"plt.set_cmap('spectral')\\n\",\n    \"plt.colorbar()\\n\",\n    \"plt.show()\"\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"collapsed\": true\n   },\n   \"outputs\": [],\n   \"source\": []\n  }\n ],\n \"metadata\": {\n  \"kernelspec\": {\n   \"display_name\": \"Python 2\",\n   \"language\": \"python\",\n   \"name\": \"python2\"\n  },\n  \"language_info\": {\n   \"codemirror_mode\": {\n    \"name\": \"ipython\",\n    \"version\": 2\n   },\n   \"file_extension\": \".py\",\n   \"mimetype\": \"text/x-python\",\n   \"name\": \"python\",\n   \"nbconvert_exporter\": \"python\",\n   \"pygments_lexer\": \"ipython2\",\n   \"version\": \"2.7.9\"\n  }\n },\n \"nbformat\": 4,\n \"nbformat_minor\": 0\n}\n"
  },
  {
    "path": "h5serv/__init__.py",
    "content": ""
  },
  {
    "path": "h5serv/__main__.py",
    "content": "if __name__ == '__main__':\n    import os\n    import sys\n    sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))\n\n    from h5serv import app\n    app.main()\n"
  },
  {
    "path": "h5serv/app.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\n\nimport six\n\nif six.PY3:\n    unicode = str\n    \nimport time\nimport signal\nimport logging\nimport logging.handlers\nimport os\nimport os.path as op\nimport tornado.httpserver\nimport sys\nimport ssl\nimport base64\nimport binascii\nif six.PY3:\n    from queue import Queue\nelse:\n    from Queue import Queue\nfrom tornado.ioloop import IOLoop\nfrom tornado.web import RequestHandler, Application, url, HTTPError\nfrom tornado.escape import json_encode, json_decode, url_escape, url_unescape\n\nfrom h5json import Hdf5db\nimport h5json\n\nimport h5serv.config as config\nfrom h5serv.timeUtil import unixTimeToUTC\nimport h5serv.fileUtil as fileUtil\nimport h5serv.tocUtil as tocUtil\nfrom h5serv.httpErrorUtil import errNoToHttpStatus\nfrom h5serv.h5watchdog import h5observe\nfrom h5serv.passwordUtil import getAuthClient\n\n\ndef to_bytes(a_string):\n    if type(a_string) is unicode:\n        return a_string.encode('utf-8')\n    else:\n        return a_string\n        \ndef to_str(a_string):\n    if type(a_string) is bytes:\n        return a_string.decode('utf-8')\n    else:\n        return a_string\n\nclass DefaultHandler(RequestHandler):\n    def put(self):\n        log = logging.getLogger(\"h5serv\")\n        log.warning(\"got default PUT request\")\n        log.info('remote_ip: ' + self.request.remote_ip)\n        log.warning(self.request)\n        raise HTTPError(400, reason=\"No route matches\")\n\n    def get(self):\n        log = logging.getLogger(\"h5serv\")\n        log.warning(\"got default GET request\")\n        log.info('remote_ip: ' + self.request.remote_ip)\n        log.warning(self.request)\n        raise HTTPError(400, reason=\"No route matches\")\n\n    def post(self):\n        log = logging.getLogger(\"h5serv\")\n        log.warning(\"got default POST request\")\n        log.info('remote_ip: ' + self.request.remote_ip)\n        log.warning(self.request)\n        raise HTTPError(400, reason=\"No route matches\")\n\n    def delete(self):\n        log = logging.getLogger(\"h5serv\")\n        log.warning(\"got default DELETE request\")\n        log.info('remote_ip: ' + self.request.remote_ip)\n        log.warning(self.request)\n        raise HTTPError(400, reason=\"No route matches\")\n\n\nclass BaseHandler(tornado.web.RequestHandler):\n\n    \"\"\"\n    Enable CORS\n    \"\"\"\n    def set_default_headers(self):\n        cors_domain = config.get('cors_domain')\n        if cors_domain:\n            self.set_header('Access-Control-Allow-Origin', cors_domain)\n     \n    \"\"\"\n    Set allows heards per CORS policy\n    \"\"\"        \n    def options(self):\n        cors_domain = config.get('cors_domain')\n        if cors_domain:\n            self.set_header('Access-Control-Allow-Headers', 'Content-type,')\n\n    \"\"\"\n    Override of Tornado get_current_user\n    \"\"\"\n    def get_current_user(self):\n        user = None\n        pswd = None\n        scheme, _, token = auth_header = self.request.headers.get(\n            'Authorization', '').partition(' ')\n        if scheme and token and scheme.lower() == 'basic':\n            try:\n                if six.PY3:\n                    token_decoded = base64.decodebytes(to_bytes(token))\n                else:\n                    token_decoded = base64.decodestring(token)\n            except binascii.Error:\n                raise HTTPError(400, \"Malformed authorization header\")\n            if token_decoded.index(b':') < 0:\n                raise HTTPError(400, \"Malformed authorization header\")\n            user, _, pswd = token_decoded.partition(b':')\n        if user and pswd:\n            # throws exception if passwd is not valid\n            self.username = user\n            self.userid = auth.validateUserPassword(user, pswd)  \n            return self.userid\n        else:\n            if config.get(\"allow_noauth\"):\n                self.username = None\n                self.userid = -1\n                return None\n            else:\n                self.log.info(\"Unauthenticated request\")\n                raise HTTPError(401, \"Unauthorized\")\n\n    def verifyAcl(self, acl, action):\n        \"\"\"Verify ACL for given action. Raise exception if not\n        authorized.\n\n        \"\"\"\n        if acl[action]:\n            return\n        if self.userid <= 0:\n            self.set_status(401)\n            self.set_header('WWW-Authenticate', 'basic realm=\"h5serv\"')\n            raise HTTPError(401, \"Unauthorized\")\n            # raise HTTPError(401, message=\"provide  password\")\n        # validated user, but doesn't have access\n        \n        self.log.info(\"unauthorized access for userid: \" + str(self.userid))\n        raise HTTPError(403, \"Access is not permitted\")\n\n    \"\"\"\n    baseHandler - log request and set state to be used by method implementation\n    \"\"\"\n    def baseHandler(self, checkExists=True):\n         \n        # Output request URI to log\n        self.log = logging.getLogger(\"h5serv\")\n        \n        protocol = self.request.protocol\n        if \"X-Forwarded-Proto\" in self.request.headers:\n            protocol = self.request.headers[\"X-Forwarded-Proto\"]\n        \n        host = self.request.host\n        if \"X-Forwarded-Host\" in self.request.headers:\n            host = self.request.headers[\"X-Forwarded-Host\"]\n        \n        #domain_encoded = self.get_argument(\"host\")\n        #print(\"domain_encoded: \", domain_encoded)\n        self.domain = self.get_query_argument(\"host\", default=None)\n\n        if not self.domain:\n            self.domain = self.get_query_argument(\"domain\", default=None) \n\n        if not self.domain:\n            self.domain = host\n\n        remote_ip = self.request.remote_ip\n        if \"X-Real-Ip\" in self.request.headers:\n            remote_ip = self.request.headers[\"X-Real-Ip\"]\n\n        # sets self.userid, self.username\n        self.get_current_user()  \n        self.reqUuid = self.getRequestId()\n        self.filePath = self.getFilePath(self.domain, checkExists)\n         \n        self.href = protocol + '://' + host \n        self.log.info(\"baseHandler, href: \" + self.href)\n        msg = \"REQ \" + self.request.method + \" \" + self.href + self.request.uri\n        msg += \" {remote_ip: \" + remote_ip\n        if self.username is not None:\n            msg += \", username: \" + to_str(self.username)\n        msg += \"}\"\n        self.log.info(msg)\n\n    \"\"\"\n    getExternal uri - return url for given domain\n       Use protocol and host of current request\n    \"\"\"\n    def getExternalHref(self, domain, h5path=None):\n        target = self.request.protocol\n        if \"X-Forwarded-Proto\" in self.request.headers:\n            target = self.request.headers[\"X-Forwarded-Proto\"]\n        target += '://'\n\n        host = self.request.host\n        if \"X-Forwarded-Host\" in self.request.headers:\n            host = self.request.headers[\"X-Forwarded-Host\"]\n        hostQuery = self.get_query_argument(\"host\", default=None)\n        \n        targetHostQuery = ''\n\n        # url encode the domain\n        domain = self.nameEncode(domain)\n        if hostQuery or self.isTocFilePath(self.filePath):\n            target += host\n            targetHostQuery = '?host=' + domain\n        else:\n            target += domain\n\n        if h5path is None or h5path == '/':   \n            target += '/'\n        else:\n            target += '/#h5path(' + h5path + ')'\n        target += targetHostQuery\n        \n        return target\n\n    \"\"\"\n    Convience method to compute href links\n    \"\"\"\n    def getHref(self, uri, query=None):\n        href = self.href + '/' + uri  \n        delimiter = '?'\n        if self.get_query_argument(\"host\", default=None):\n            href  += \"?host=\" + self.nameEncode(self.get_query_argument(\"host\"))\n            delimiter = '&'\n            \n        if query is not None:\n            if type(query) is str:\n                href += delimiter + query\n            else:\n                # list or tuple\n                for item in query:\n                    href += delimiter + item\n                    delimiter = '&'\n        return href\n            \n        \n    def setDefaultAcl(self):\n        \"\"\" Set default ACL for user TOC file.\n        \"\"\"\n        log = logging.getLogger(\"h5serv\")\n        log.info(\"setDeaultAcl -- userid: \" + str(self.userid))\n        if self.userid <= 0:\n            raise HTTPError(500, \"Expected userid\")\n        username = getUserName(self.userid)\n        filePath = tocUtil.getTocFilePath(username)\n        try:\n            fileUtil.verifyFile(filePath)\n        except HTTPError:\n            log.info(\"toc file doesn't exist, returning\")\n            return\n        try:\n            with Hdf5db(filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                current_user_acl = db.getAcl(rootUUID, self.userid)\n                acl = db.getDefaultAcl()\n                acl['userid'] = userid\n                fields = ('create', 'read', 'update', 'delete', 'readACL', 'updateACL')  \n                for field in fields:\n                    acl[field] = True \n                db.setAcl(obj_uuid, acl)\n                     \n        except IOError as e:\n            log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n         \n        \n    def getFilePath(self, domain, checkExists=True):\n        \"\"\" Helper method - return file path for given domain.\n        \"\"\"\n        self.log.info(\"getFilePath: \" + domain + \" checkExists: \" + str(checkExists))\n        tocFilePath = fileUtil.getTocFilePathForDomain(domain, auth)\n        self.log.info(\"tocFilePath: \" + tocFilePath)\n        if not fileUtil.isFile(tocFilePath):\n            tocUtil.createTocFile(tocFilePath)\n            if self.userid > 0:\n                # setup the permision to grant this user exclusive write access\n                # and public read for everyone\n                self.setDefaultAcl()\n    \n        filePath = fileUtil.getFilePath(domain, auth)\n\n        # convert any \"%2E\" substrings with \".\" (since dot isn't allowed for domain name)\n        filePath = self.nameDecode(filePath)  \n         \n        if checkExists:\n            while True:\n                if fileUtil.isFile(filePath):\n                    break\n                # Unfortunately the host query parameter substitues '/' for \"%2E\",\n                # so check to see if any slashes should really be dots.\n                # clients should prefer using the host header if this is an issue\n                self.log.info(\"filePath: \" + filePath + \" not found\")\n                host_query = self.get_query_argument(\"host\", default=None)\n                if host_query is None:\n                    # If using host header, we don't need to guess about the %2E substitution\n                    break  \n                if domain.find('.') > -1:\n                    domain = domain.replace('.', '%2E', 1)\n                    try:\n                        filePath = fileUtil.getFilePath(domain, auth)\n                    except HTTPError:\n                        self.log.info(\"invalid domain, ignoring\")\n                        break\n                    filePath = self.nameDecode(filePath)\n                else:\n                    break\n            self.log.info(\"verifyFile: \" + filePath)    \n            fileUtil.verifyFile(filePath)  # throws exception if not found  \n        \n        return filePath\n\n    def convertExternalPath(self, path_name):\n        \"\"\" convert external path returned by h5db to h5domain \n        convention\n        Note:\n        The external path might be a unix posix path or a HDF Server domain name.  \n        Apply some heuristics to make a best guess at which it is.\n        \"\"\"\n        server_domain = config.get(\"domain\")\n        dns_suffixes = [\".org\", \".edu\", \".com\", \".gov\", \".net\", \".mil\", server_domain]\n        hdf5_extension = config.get(\"hdf5_ext\")\n        parent_domain = self.domain\n        n = parent_domain.find('.')\n        if n > 0:\n            parent_domain = self.domain[n:]\n        h5domain = None\n        if path_name.find('/') == -1:\n            if path_name.find('.') == -1:\n                # no slash or dot, tack on the dns path relative to the source domain\n                h5domain = path_name + parent_domain\n            else:\n                # has a dot, no slashes\n                if path_name.endswith(hdf5_extension):\n                    # strip off extension and prepend to front of domain\n                    h5domain = path_name[:-len(hdf5_extension)] + parent_domain\n                else:\n                    for dns_suffix in dns_suffixes:\n                        if path_name.endswith(dns_suffix):\n                            # looks like absoutle DNS path, return that\n                            h5domain = path_name\n                            break\n                    \n                    if h5domain is None:\n                        # if we get to here, assume it a relative DNS path\n                        if path_name.endswith(hdf5_extension):\n                            h5domain = path_name[:-len(hdf5_extension)] + parent_domain\n                        else:\n                            h5domain = path_name + parent_domain\n        else:\n            # assume relative or absolute Unix file path\n            if path_name.startswith('/'):\n                h5domain = fileUtil.getDomain(path_name)\n            else:\n                # relative posix file path\n                parts = path_name.split('/')\n                basename = parts[-1]\n                if basename.endswith(hdf5_extension):\n                    basename = basename[:-len(hdf5_extension)]\n                    parts[-1] = basename\n\n                h5domain = parent_domain[1:]  # don't include first dot\n                for part in parts:\n                    if part:\n                        h5domain = part + '.' + h5domain                    \n        \n        h5domain = self.nameEncode(h5domain)\n        return h5domain\n        \n    def isWritable(self, filePath):\n        \"\"\"Helper method - raise 403 error if given file path is not writable\n        \"\"\"\n        fileUtil.verifyFile(filePath, writable=True)\n        \n    def isTocFilePath(self, filePath):\n        \"\"\"Helper method - return True if this is a TOC file apth\n        \"\"\"\n        if tocUtil.isTocFilePath(filePath):\n            return True\n        else:\n            return False\n\n  \n    def nameDecode(self, name):\n        \"\"\"\n        Helper function - convert url-encoded name to orignal format\n        \"\"\"\n        name =  name.replace('%2E', '.')\n        return name\n\n    def nameEncode(self, name):\n      \n        \"\"\"\n        Helper function - convert name to url-friendly format\n        Replaces all non-alphanumeric characters with '%<ascii_hex>'\n        \"\"\"\n         \n        valid_chars = ['-', '.', '_', '~', ':', '/', '?', '#', '[', ']', '@', '!', '$', '&', \"'\", '(', ')', '*', '+', ',', ';', '=']\n        out = []\n        for ch in name:\n            if ch.isalnum():\n                out.append(ch)\n            elif ch == ' ':\n                out.append('+')\n            elif ch == '%':\n                # pass through encoded chars ('%xx' where xx are hexidecimal values)\n                out.append(ch)\n            elif ch in valid_chars:\n                # other valid url chars\n                out.append(ch)\n            else:\n                hex = format(ord(ch), '02X')\n                out.append('%' + hex)\n         \n        return ''.join(out)\n            \n\n    def getRequestId(self):\n        \"\"\"\n        Helper method - return request uuid from request URI\n        URI' are of the form:\n            /groups/<uuid>/xxx\n            /datasets/<uuid>/xxx\n            /datatypes/<uuid>/xxx\n        extract the <uuid> and return it.\n        Throw 500 error is the URI is not in the above form\n        \"\"\"\n    \n\n        uri = self.request.path\n\n        if uri.startswith('/groups/'):\n            uri = uri[len('/groups/'):]  # get stuff after /groups/\n        elif uri.startswith('/datasets/'):\n            uri = uri[len('/datasets/'):]  # get stuff after /datasets/\n        elif uri.startswith('/datatypes/'):\n            uri = uri[len('/datatypes/'):]  # get stuff after /datatypes/\n        else:\n\n            #msg = \"unexpected uri: \" + uri\n            #self.log.error(msg)\n            #raise HTTPError(500, reason=msg)\n\n            return None\n        npos = uri.find('/')\n        if npos < 0:\n            uuid = uri\n        elif npos == 0:\n            msg = \"Bad Request: uri is invalid\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        else:\n            uuid = uri[:npos]\n\n        self.log.info('got uuid: [' + uuid + ']')\n\n        return uuid\n        \n    \"\"\"\n    Get requested content type.  Returns either \"binary\" if the accept header is \n    octet stream, otherwise json.\n    Currently does not support q fields.\n    \"\"\"\n    def getAcceptType(self):\n        content_type = self.request.headers.get('Accept')\n        if content_type:\n            self.log.info(\"CONTENT_TYPE:\" + content_type)\n        if content_type == \"application/octet-stream\":\n            return \"binary\"\n        else:\n            return \"json\"       \n\nclass LinkCollectionHandler(BaseHandler):\n    def get(self):\n        self.baseHandler()\n\n        # Get optional query parameters\n        limit = self.get_query_argument(\"Limit\", 0)\n        if type(limit) is not int:\n            try:\n                limit = int(limit)\n            except ValueError:\n                msg = \"Bad Request: Expected int type for limit\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n        marker = self.get_query_argument(\"Marker\", None)\n\n        response = {}\n\n        items = None\n        rootUUID = None\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                current_user_acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(current_user_acl, 'read')  # throws exception is unauthorized\n                items = db.getLinkItems(self.reqUuid, marker=marker, limit=limit)\n\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        # got everything we need, put together the response\n        links = []\n        hrefs = []\n        \n        hostQuery = ''\n        if self.get_query_argument(\"host\", default=None):\n            hostQuery = \"?host=\" + self.get_query_argument(\"host\") \n\n        hrefs.append({\n            'rel': 'self',\n            'href': self.getHref('groups/' + self.reqUuid + '/links')\n        })\n        for item in items:\n            link_item = {}\n            link_item['class'] = item['class']\n            link_item['title'] = item['title']\n            link_item['href'] = item['href'] = self.href + '/groups/' + self.reqUuid + '/links/' + self.nameEncode(item['title']) + hostQuery\n            if item['class'] == 'H5L_TYPE_HARD':\n                link_item['id'] = item['id']\n                link_item['collection'] = item['collection']\n                link_item['target'] = self.href + '/' + item['collection'] + '/' + item['id'] + hostQuery\n            elif item['class'] == 'H5L_TYPE_SOFT':\n                link_item['h5path'] = item['h5path']\n            elif item['class'] == 'H5L_TYPE_EXTERNAL':\n                link_item['h5path'] = item['h5path']\n                link_item['h5domain'] = self.convertExternalPath(item['file'])\n                if link_item['h5domain'].endswith(config.get('domain')):\n                    link_item['target'] = self.getExternalHref(link_item['h5domain'], link_item['h5path'])\n                    \n\n            links.append(link_item)\n\n        response['links'] = links\n\n        hrefs.append({\n            'rel': 'root',\n            'href': self.getHref('groups/' + rootUUID)\n        })\n        home_dir = config.get(\"home_dir\")\n        hrefs.append({'rel': home_dir, 'href': self.getHref('')\n        })\n        hrefs.append({\n            'rel': 'owner',\n            'href': self.getHref('groups/' + self.reqUuid)\n        })\n        response['hrefs'] = hrefs\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n\nclass LinkHandler(BaseHandler):\n    def getName(self, uri):\n        # helper method\n        # uri should be in the form: /group/<uuid>/links/<name>\n        # this method returns name\n        npos = uri.find('/links/')\n        if npos < 0:\n            # shouldn't be possible to get here\n            msg = \"Internal Server Error: Unexpected uri\"\n            self.log.error(msg)\n            raise HTTPError(500, reason=msg)\n        if npos+len('/links/') >= len(uri):\n            # no name specified\n            msg = \"Bad Request: no name specified\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        linkName = uri[npos+len('/links/'):]\n        if linkName.find('/') >= 0:\n            # can't have '/' in link name\n            msg = \"Bad Request: invalid linkname, '/' not allowed\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        npos = linkName.rfind('?')\n        if npos >= 0:\n            # trim off the query params\n            linkName = linkName[:npos]\n        \n        linkName = url_unescape(linkName)\n        return linkName\n\n    \n\n\n    def get(self):\n        self.baseHandler()\n         \n        linkName = self.getName(self.request.path)\n       \n        self.log.info(\"linkName:[\"+linkName+\"]\")\n\n        response = {}\n\n        rootUUID = None\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'read')  # throws exception is unauthorized\n                item = db.getLinkItemByUuid(self.reqUuid, linkName)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        response['lastModified'] = unixTimeToUTC(item['mtime'])\n        response['created'] = unixTimeToUTC(item['ctime'])\n        for key in ('mtime', 'ctime', 'href'):\n            if key in item:\n                del item[key]\n\n        # replace 'file' key by 'h5domain' if present\n        if 'file' in item:\n            h5domain = item['file']\n            del item['file']\n            item['h5domain'] = self.convertExternalPath(h5domain)\n\n        response['link'] = item\n\n        hrefs = []\n        hrefs.append({\n            'rel': 'self',\n            'href': self.getHref('groups/' + self.reqUuid + '/links/' + url_escape(linkName)) \n        })\n        hrefs.append({\n            'rel': 'root',\n            'href': self.getHref( 'groups/' + rootUUID)\n        })\n        hrefs.append({\n            'rel': 'home', 'href': self.getHref('')\n        })\n        hrefs.append({\n            'rel': 'owner',\n            'href': self.getHref('groups/' + self.reqUuid)\n        })\n\n        target = None\n        if item['class'] == 'H5L_TYPE_HARD':\n            target = self.getHref(item['collection'] + '/' + item['id'])\n        elif item['class'] == 'H5L_TYPE_SOFT':\n            target = self.getHref('/#h5path(' + item['h5path'] + ')')\n        elif item['class'] == 'H5L_TYPE_EXTERNAL':\n            if item['h5domain'].endswith(config.get('domain')):\n                target = self.getExternalHref(h5domain, item['h5path'])\n\n        if target:\n            hrefs.append({'rel': 'target', 'href': target})\n\n        response['hrefs'] = hrefs\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n    def put(self):\n        self.baseHandler()\n         \n        \n        # put - create a new link\n        # patterns are:\n        # PUT /groups/<id>/links/<name> {id: <id> }\n        # PUT /groups/<id>/links/<name> {h5path: <path> }\n        # PUT /groups/<id>/links/<name> {h5path: <path>, h5domain: <href> }\n\n        linkName = self.getName(self.request.path)\n\n        body = None\n        try:\n            body = json_decode(self.request.body)\n        except ValueError as e:\n            msg = \"JSON Parser Error: \" + e.message\n            log.info(msg)\n            raise HTTPError(400, reason=msg)\n\n        childUuid = None\n        h5path = None\n        h5domain = None\n        filename = None   # fake filename\n\n        if \"id\" in body:\n            childUuid = body[\"id\"]\n            if childUuid is None or len(childUuid) == 0:\n                msg = \"Bad Request: id not specified\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n        elif \"h5path\" in body:\n            # todo\n            h5path = body[\"h5path\"]\n            if h5path is None or len(h5path) == 0:\n                raise HTTPError(400)\n\n            # if h5domain is present, this will be an external link\n            if \"h5domain\" in body:\n                h5domain = body[\"h5domain\"]\n        else:\n            msg = \"Bad request: missing required body keys\"\n            self.log.info(msg)\n            raise HTTPError(400, reasoln=msg)\n\n         \n        if self.isTocFilePath(self.filePath):\n            msg = \"Forbidden: links can not be directly created in TOC domain\"\n            self.log.info(msg)\n            raise HTTPError(403, reason=msg)\n\n        response = {}\n\n        rootUUID = None\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'create')  # throws exception is unauthorized\n                try:\n                    existingItem = db.getLinkItemByUuid(self.reqUuid, linkName)\n                    if existingItem:\n                        # link alread exist\n                        msg = \"Unable to create link (Name already exists)\"\n                        self.log.info(msg)\n                        raise HTTPError(409, reason=msg)\n                except IOError as e:\n                    # link not found, so we can add one with this name\n                    pass\n\n                if childUuid:\n                    db.linkObject(self.reqUuid, childUuid, linkName)\n                elif h5domain:\n                    db.createExternalLink(self.reqUuid, h5domain, h5path, linkName)\n                elif h5path:\n                    db.createSoftLink(self.reqUuid, h5path, linkName)\n\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        hrefs = []\n         \n        hrefs.append({\n            'rel': 'self',\n            'href': self.getHref('groups/' + self.reqUuid + '/links/' + url_escape(linkName))\n        })\n        hrefs.append({\n            'rel': 'root',\n            'href': self.getHref('groups/' + rootUUID)\n        })\n        hrefs.append({\n            'rel': 'home',\n            'href': self.getHref('')\n        })\n        hrefs.append({\n            'rel': 'owner', 'href': self.getHref('groups/' + self.reqUuid) })\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n        self.set_status(201)\n\n    def delete(self):\n        self.baseHandler()\n         \n        linkName = self.getName(self.request.path)\n\n        response = {}\n        rootUUID = None\n    \n        self.isWritable(self.filePath)\n        if self.isTocFilePath(self.filePath):\n            msg = \"Forbidden: links can not be directly modified in TOC domain\"\n            self.log.info(msg)\n            raise HTTPError(403, reason=msg)\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'delete')  # throws exception is unauthorized\n                db.unlinkItem(self.reqUuid, linkName)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        hrefs = []\n        \n        hrefs.append({\n            'rel': 'root',\n            'href': self.getHref('groups/' + rootUUID)\n        })\n        hrefs.append({'rel': 'home', 'href': self.getHref('')})\n        hrefs.append({\n            'rel': 'owner', 'href': self.getHref('groups/' + self.reqUuid)})\n\n        response['hrefs'] = hrefs\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n\nclass AclHandler(BaseHandler):\n    def getRequestCollectionName(self):\n        # request is in the form /(datasets|groups|datatypes)/<id>/acls(/<username>),\n        # or /acls(/<username>) for domain acl\n        # return datasets | groups | datatypes\n        uri = self.request.path\n\n        npos = uri.find('/')\n        if npos < 0:\n            self.log.info(\"bad uri\")\n            raise HTTPError(400)\n        if uri.startswith('/acls/'):\n            # domain request - return group collection\n            return 'groups'\n\n        uri = uri[(npos+1):]\n\n        npos = uri.find('/')  # second '/'\n        if npos < 0:\n            # uri is \"/acls\"\n            return \"groups\"\n        col_name = uri[:npos]\n\n        self.log.info('got collection name: [' + col_name + ']')\n        if col_name not in ('datasets', 'groups', 'datatypes'):\n            msg = \"Internal Server Error: collection name unexpected\"\n            self.log.error(msg)\n            raise HTTPError(500, reason=msg)   # shouldn't get routed here in this case\n\n        return col_name\n\n    def getName(self):\n        uri = self.request.path\n\n        if uri.endswith('/acls'):\n            return None  # default domain acl\n        # helper method\n        # uri should be in the form: /group/<uuid>/acl/<username>\n        # this method returns name\n        npos = uri.find('/acls/')\n        if npos < 0:\n            # shouldn't be possible to get here\n            msg = \"Internal Server Error: Unexpected uri\"\n            self.log.error(msg)\n            raise HTTPError(500, reason=msg)\n        if npos+len('/acls/') >= len(uri):\n            # no name specified\n            msg = \"Bad Request: no name specified\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        userName = uri[npos+len('/acls/'):]\n        if userName.find('/') >= 0:\n            # can't have '/' in link name\n            msg = \"Bad Request: invalid linkname, '/' not allowed\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        npos = userName.rfind('?')\n        if npos >= 0:\n            # trim off the query params\n            userName = userName[:npos]\n        return userName\n\n    def convertUserIdToUserName(self, acl_in):\n        \"\"\"\n        convertUserIdToUserName - replace userids with username\n        \"\"\"\n        acl_out = None\n        if type(acl_in) in (list, tuple):\n            # convert list to list\n            acl_out = []\n            for item in acl_in:\n                acl_out.append(self.convertUserIdToUserName(item))\n        else:\n            acl_out = {}\n            for key in acl_in.keys():\n                if key == 'userid':\n                    # convert userid to username\n                    userid = acl_in['userid']\n\n                    user_name = '???'\n                    if userid == 0:\n                        user_name = 'default'\n                    else:\n                        user_name = auth.getUserName(userid)\n                        if user_name is None:\n                            self.log.warning(\"user not found for userid: \" + str(userid))\n                    acl_out['userName'] = user_name\n                else:\n                    value = acl_in[key]\n                    acl_out[key] = True if value else False\n        return acl_out\n\n    def get(self):\n        self.baseHandler()\n         \n        req_uuid = None\n        if not self.request.path.startswith(\"/acls\"):\n            # get UUID for object unless this is a get on domain acl\n            req_uuid = self.getRequestId()\n\n        rootUUID = None\n        filePath = self.getFilePath(self.domain)\n        userName = self.getName()\n\n        col_name = self.getRequestCollectionName()\n\n        req_userid = None\n        if userName:\n            if userName == 'default':\n                req_userid = 0\n            else:\n                req_userid = auth.getUserId(userName)\n                if req_userid is None:\n                    # username not found\n                    msg = \"username does not exist\"\n                    self.log.info(msg)\n                    raise HTTPError(404, reason=msg)\n\n        request = {}\n        acl = None\n        current_user_acl = None\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                if req_uuid:\n                    obj_uuid = req_uuid\n                else:\n                    obj_uuid = rootUUID\n\n                current_user_acl = db.getAcl(obj_uuid, self.userid)\n                self.verifyAcl(current_user_acl, 'readACL')  # throws exception is unauthorized\n                if req_userid is None:\n                    acl = db.getAcls(obj_uuid)\n                else:\n                    acl = db.getAcl(obj_uuid, req_userid)\n\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        response = {}\n        acl = self.convertUserIdToUserName(acl)\n\n        if userName is None:\n            userName = ''  # for string concat in the hrefs\n            response['acls'] = acl\n        else:\n            response['acl'] = acl\n\n        hrefs = []\n         \n        if current_user_acl:\n            if userName:\n                hrefs.append({\n                    'rel': 'self',\n                    'href': self.getHref(col_name + '/' + obj_uuid + '/acls/' + url_escape(userName))\n                })\n            else:\n                hrefs.append({\n                    'rel': 'self',\n                    'href': self.getHref(col_name + '/' + obj_uuid + '/acls')\n                })\n\n        else:\n            hrefs.append({\n                'rel': 'self',\n                'href': self.getHref(col_name + '/' + obj_uuid + '/acls')\n            })\n        hrefs.append({\n            'rel': 'root',\n            'href': self.getHref('groups/' + rootUUID)\n        })\n        hrefs.append({'rel': 'home', 'href': self.getHref('')})\n        hrefs.append({\n            'rel': 'owner',\n            'href': self.getHref(col_name + '/' + obj_uuid)\n        })\n\n        response['hrefs'] = hrefs\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n    def put(self):\n        self.baseHandler()\n         \n        # put - create/update an acl\n        # patterns are:\n        # PUT /group/<id>/acls/<name> {'read': True, 'write': False }\n        # PUT /acls/<name> {'read'... }\n\n        req_uuid = None\n        if not self.request.path.startswith(\"/acls/\"):\n            req_uuid = self.getRequestId()\n        col_name = self.getRequestCollectionName()\n        userName = url_unescape(self.getName())\n\n        if userName is None or len(userName) == 0:\n            msg = \"Bad Request: username not provided\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n\n        req_userid = None   # this is the userid of the acl we'll be updating\n        # self.userid is the userid of the requestor\n        if userName == 'default':\n            req_userid = 0\n        else:\n            req_userid = auth.getUserId(userName)\n\n        if req_userid is None:\n            msg = \"Bad Request: username not found\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n\n        body = None\n        try:\n            body = json_decode(self.request.body)\n        except ValueError as e:\n            msg = \"JSON Parser Error: \" + e.message\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n\n        acl = {}\n        acl['userid'] = req_userid\n        for key in ('create', 'read', 'update',\n                    'delete', 'readACL', 'updateACL'):\n            if key in body:\n                acl[key] = 1 if body[key] else 0\n        if len(acl) == 1:\n            msg = \"Bad Request: no acl permissions found in request body\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n\n        response = {}\n\n        rootUUID = None\n        obj_uuid = None\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                if req_uuid is None:\n                    obj_uuid = rootUUID\n                else:\n                    obj_uuid = req_uuid\n                current_user_acl = db.getAcl(obj_uuid, self.userid)\n                self.verifyAcl(current_user_acl, 'updateACL')  # throws exception is unauthorized\n                db.setAcl(obj_uuid, acl)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        hrefs = []\n         \n        hrefs.append({\n            'rel': 'self',\n            'href': self.getHref(col_name + '/' + obj_uuid + '/acls/' + url_escape(userName))\n        })\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n        hrefs.append({'rel': 'home', 'href': self.getHref('') })\n        hrefs.append({\n            'rel': 'owner',\n            'href': self.getHref(col_name + '/' + obj_uuid)\n        })\n\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n        self.set_status(201)\n\n\nclass TypeHandler(BaseHandler):\n    def get(self):\n        self.baseHandler()\n         \n        if not self.reqUuid:\n            msg = \"Bad Request: id is not specified\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n         \n\n        response = {}\n        hrefs = []\n        rootUUID = None\n        item = None\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'read')  # throws exception is unauthorized\n                item = db.getCommittedTypeItemByUuid(self.reqUuid)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        # got everything we need, put together the response\n         \n        hrefs.append({\n            'rel': 'self',\n            'href': self.getHref('datatypes/' + self.reqUuid)\n        })\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n        hrefs.append({\n            'rel': 'attributes',\n            'href': self.getHref('datatypes/' + self.reqUuid + '/attributes')\n        })\n        hrefs.append({'rel': 'home', 'href': self.getHref('')})\n        response['id'] = self.reqUuid\n        typeItem = item['type']\n        response['type'] = h5json.getTypeResponse(typeItem)\n        response['created'] = unixTimeToUTC(item['ctime'])\n        response['lastModified'] = unixTimeToUTC(item['mtime'])\n        response['attributeCount'] = item['attributeCount']\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n    def delete(self):\n        self.baseHandler()\n\n        self.isWritable(self.filePath)\n        response = {}\n        hrefs = []\n        rootUUID = None\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'delete')  # throws exception is unauthorized\n                db.deleteObjectByUuid('datatype', self.reqUuid)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        # got everything we need, put together the response\n         \n        hrefs.append({'rel': 'self', 'href': self.getHref('datatypes')})\n        hrefs.append({'rel': 'home', 'href': self.getHref('')})\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n\nclass DatatypeHandler(BaseHandler):\n    def get(self):\n        self.baseHandler()\n        \n        response = {}\n        hrefs = []\n        rootUUID = None\n        item = None\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'read')  # throws exception is unauthorized\n                item = db.getDatasetTypeItemByUuid(self.reqUuid)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        # got everything we need, put together the response\n         \n        hrefs.append({\n            'rel': 'self',\n            'href': self.getHref('datasets/' + self.reqUuid + '/type')\n        })\n        hrefs.append({\n            'rel': 'owner', 'href': self.getHref('datasets/' + self.reqUuid)})\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n        response['type'] = item['type']\n\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n\nclass ShapeHandler(BaseHandler):\n\n    def get(self):\n        self.baseHandler()\n         \n        response = {}\n        hrefs = []\n        rootUUID = None\n        item = None\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'read')  # throws exception is unauthorized\n                item = db.getDatasetItemByUuid(self.reqUuid)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        # got everything we need, put together the response\n         \n        hrefs.append({\n            'rel': 'self', 'href': self.getHref('datasets/' + self.reqUuid)})\n        hrefs.append({\n            'rel': 'owner', 'href': self.getHref('datasets/' + self.reqUuid)})\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n        shape = item['shape']\n        response['shape'] = shape\n        response['created'] = unixTimeToUTC(item['ctime'])\n        response['lastModified'] = unixTimeToUTC(item['mtime'])\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n    def put(self):\n        self.baseHandler()\n         \n        self.isWritable(self.filePath)\n\n        response = {}\n        hrefs = []\n        rootUUID = None\n        body = None\n        try:\n            body = json_decode(self.request.body)\n        except ValueError as e:\n            msg = \"JSON Parser Error: \" + e.message\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n\n        if \"shape\" not in body:\n            msg = \"Bad Request: Shape not specified\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)  # missing shape\n\n        shape = body[\"shape\"]\n        if type(shape) == int:\n            dim1 = shape\n            shape = [dim1]\n        elif type(shape) == list or type(shape) == tuple:\n            pass  # can use as is\n        else:\n            msg = \"Bad Request: invalid shape argument\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n\n        # validate shape\n        for extent in shape:\n            if type(extent) != int:\n                msg = \"Bad Request: invalid shape type (expecting int)\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n            if extent < 0:\n                msg = \"Bad Request: invalid shape (negative extent)\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'update')  # throws exception is unauthorized\n                db.resizeDataset(self.reqUuid, shape)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        self.log.info(\"resize OK\")\n        # put together the response\n         \n        hrefs.append({\n            'rel': 'self', 'href': self.getHref('datasets/' + self.reqUuid)})\n        hrefs.append({\n            'rel': 'owner', 'href': self.getHref('datasets/' + self.reqUuid)})\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n        response['hrefs'] = hrefs\n\n        self.set_status(201)  # resource created\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n\nclass DatasetHandler(BaseHandler):\n\n    def getDatasetNumElements(self, shape_item):\n        \n        if shape_item['class'] == 'H5S_SCALAR':\n            return 1\n        elif shape_item['class'] != 'H5S_SIMPLE':\n            return 0\n        \n        dims = shape_item['dims']\n        rank = len(dims)\n        if rank == 0:\n            return 1\n\n        count = 1\n        for i in range(rank):\n            count *= dims[i]\n        return count\n\n\n    def getPreviewQuery(self, shape_item):\n        \"\"\"Helper method - return query options for a \"reasonable\" size\n        data preview selection. Return None if the dataset is small\n        enough that a preview is not needed.\n\n        \"\"\"\n\n        select = \"select=[\"\n\n        dims = shape_item['dims']\n        rank = len(dims)\n\n        ncols = dims[rank-1]\n        if rank > 1:\n            nrows = dims[rank-2]\n        else:\n            nrows = 1\n\n        # use some rough heuristics to define the selection\n        # aim to return no more than 100 elements\n        if ncols > 100:\n            ncols = 100\n        if nrows > 100:\n            nrows = 100\n        if nrows*ncols > 100:\n            if nrows > ncols:\n                nrows = 100 // ncols\n            else:\n                ncols = 100 // nrows\n\n        for i in range(rank):\n            if i == rank-1:\n                select += \"0:\" + str(ncols)\n            elif i == rank-2:\n                select += \"0:\" + str(nrows) + \",\"\n            else:\n                select += \"0:1,\"\n        select += \"]\"\n        return select\n\n    def get(self):\n        self.baseHandler()\n         \n        response = {}\n        hrefs = []\n        rootUUID = None\n        item = None\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'read')  # throws exception is unauthorized\n                item = db.getDatasetItemByUuid(self.reqUuid)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        # got everything we need, put together the response\n        count = self.getDatasetNumElements(item['shape'])\n         \n        if count <= 100:\n            # small number of values, provide link to entire dataset\n            hrefs.append({\n                'rel': 'data',\n                'href': self.getHref('datasets/' + self.reqUuid + '/value')\n            })\n        else:\n            # large number of values, create preview link\n            previewQuery = self.getPreviewQuery(item['shape'])\n            hrefs.append({\n                'rel': 'preview',\n                'href': self.getHref('datasets/' + self.reqUuid + '/value', query=previewQuery)\n            })\n        \n\n        hrefs.append({\n            'rel': 'self', 'href': self.getHref('datasets/' + self.reqUuid)})\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n        hrefs.append({\n            'rel': 'attributes',\n            'href': self.getHref('datasets/' + self.reqUuid + '/attributes')\n        })\n        \n        hrefs.append({'rel': 'home', 'href': self.getHref('')})\n        response['id'] = self.reqUuid\n        typeItem = item['type']\n        response['type'] = h5json.getTypeResponse(typeItem)\n        response['shape'] = item['shape']\n\n        if 'creationProperties' in item:\n            response['creationProperties'] = item['creationProperties']\n        response['created'] = unixTimeToUTC(item['ctime'])\n        response['lastModified'] = unixTimeToUTC(item['mtime'])\n        response['attributeCount'] = item['attributeCount']\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n\n        json_rsp = json_encode(response)\n\n        self.write(json_rsp)\n\n    def delete(self):\n        self.baseHandler()\n         \n        self.isWritable(self.filePath)\n\n        response = {}\n        hrefs = []\n        rootUUID = None\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'delete')  # throws exception is unauthorized\n                db.deleteObjectByUuid('dataset', self.reqUuid)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        # write the response\n        href = self.request.protocol + '://' + self.request.host + '/'\n        hostQuery = ''\n        if self.get_query_argument(\"host\", default=None):\n            hostQuery = \"?host=\" + self.get_query_argument(\"host\")\n        hrefs.append({'rel': 'self', 'href': href + 'datasets' + hostQuery})\n        hrefs.append({\n            'rel': 'root', 'href': href + 'groups/' + rootUUID + hostQuery})\n        hrefs.append({'rel': 'home', 'href': href + hostQuery})\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n\nclass ValueHandler(BaseHandler):\n\n    def getSliceQueryParam(self, dim, extent):\n        \"\"\"\n        Helper method - return slice for dim based on query params\n\n        Query arg should be in the form: [<dim1>, <dim2>, ... , <dimn>]\n         brackets are optional for one dimensional arrays.\n         Each dimension, valid formats are:\n            single integer: n\n            start and end: n:m\n            start, end, and stride: n:m:s\n        \"\"\"\n        \n        # Get optional query parameters for given dim\n        self.log.info(\"getSliceQueryParam: \" + str(dim) + \", \" + str(extent))\n        query = self.get_query_argument(\"select\", default='ALL')\n        if query == 'ALL':\n            # just return a slice for the entire dimension\n            self.log.info(\"getSliceQueryParam: return default\")\n            return slice(0, extent)\n\n        self.log.info(\"select query value: [\" + query + \"]\")\n\n        if not query.startswith('['):\n            msg = \"Bad Request: selection query missing start bracket\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        if not query.endswith(']'):\n            msg = \"Bad Request: selection query missing end bracket\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n\n        # now strip out brackets\n        query = query[1:-1]\n\n        query_array = query.split(',')\n        if dim > len(query_array):\n            msg = \"Not enough dimensions supplied to query argument\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        dim_query = query_array[dim].strip()\n        start = 0\n        stop = extent\n        step = 1\n        if dim_query.find(':') < 0:\n            # just a number - return start = stop for this value\n            try:\n                start = int(dim_query)\n            except ValueError:\n                msg = \"Bad Request: invalid selection parameter (can't convert to int) for dimension: \" + str(dim)\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n            stop = start\n        elif dim_query == ':':\n            # select everything\n            pass\n        else:\n            fields = dim_query.split(\":\")\n            if len(fields) > 3:\n                msg = \"Bad Request: Too many ':' seperators for dimension: \" + str(dim)\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n            try:\n                if fields[0]:\n                    start = int(fields[0])\n                if fields[1]:\n                    stop = int(fields[1])\n                if len(fields) > 2 and fields[2]:\n                    step = int(fields[2])\n            except ValueError:\n                msg = \"Bad Request: invalid selection parameter (can't convert to int) for dimension: \" + str(dim)\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n\n        if start < 0 or start > extent:\n            msg = \"Bad Request: Invalid selection start parameter for dimension: \" + str(dim)\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        if stop > extent:\n            msg = \"Bad Request: Invalid selection stop parameter for dimension: \" + str(dim)\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        if step <= 0:\n            msg = \"Bad Request: invalid selection step parameter for dimension: \" + str(dim)\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        s = slice(start, stop, step)\n        self.log.info(\n            \"dim query[\" + str(dim) + \"] returning: start: \" +\n            str(start) + \" stop: \" + str(stop) + \" step: \" + str(step))\n        return s\n\n    def getHyperslabSelection(self, dsetshape, start, stop, step):\n        \"\"\"\n        Get slices given lists of start, stop, step values\n        \"\"\"\n        rank = len(dsetshape)\n        if start:\n            if type(start) is not list:\n                start = [start]\n            if len(start) != rank:\n                msg = \"Bad Request: start array length not equal to dataset rank\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n            for dim in range(rank):\n                if start[dim] < 0 or start[dim] >= dsetshape[dim]:\n                    msg = \"Bad Request: start index invalid for dim: \" + str(dim)\n                    self.log.info(msg)\n                    raise HTTPError(400, reason=msg)\n        else:\n            start = []\n            for dim in range(rank):\n                start.append(0)\n\n        if stop:\n            if type(stop) is not list:\n                stop = [stop]\n            if len(stop) != rank:\n                msg = \"Bad Request: stop array length not equal to dataset rank\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n            for dim in range(rank):\n                if stop[dim] <= start[dim] or stop[dim] > dsetshape[dim]:\n                    msg = \"Bad Request: stop index invalid for dim: \" + str(dim)\n                    self.log.info(msg)\n                    raise HTTPError(400, reason=msg)\n        else:\n            stop = []\n            for dim in range(rank):\n                stop.append(dsetshape[dim])\n\n        if step:\n            if type(step) is not list:\n                step = [step]\n            if len(step) != rank:\n                msg = \"Bad Request: step array length not equal to dataset rank\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n            for dim in range(rank):\n                if step[dim] <= 0 or step[dim] > dsetshape[dim]:\n                    msg = \"Bad Request: step index invalid for dim: \" + str(dim)\n                    self.log.info(msg)\n                    raise HTTPError(400, reason=msg)\n        else:\n            step = []\n            for dim in range(rank):\n                step.append(1)\n\n        slices = []\n        for dim in range(rank):\n            try:\n                s = slice(int(start[dim]), int(stop[dim]), int(step[dim]))\n            except ValueError:\n                msg = \"Bad Request: invalid start/stop/step value\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n            slices.append(s)\n        return tuple(slices)\n\n    def get(self):\n        self.baseHandler()\n         \n        request_content_type = self.getAcceptType()\n        response_content_type = \"json\"\n        self.log.info(\"contenttype:\" + request_content_type)\n         \n        response = {}\n        hrefs = []\n        rootUUID = None\n        item = None\n        item_shape = None\n        rank = None\n        item_type = None\n        values = None\n        indexes = None\n        slices = []\n        query_selection = self.get_query_argument(\"query\", default=None)\n        limit = self.get_query_argument(\"Limit\", default=None)\n        if limit:\n            try:\n                limit = int(limit)  # convert to int\n            except ValueError as e:\n                msg = \"invalid Limit: \" + e.message\n                log.info(msg)\n                raise HTTPError(400, msg)\n                \n        if query_selection:\n            self.log.info(\"query: \" + query_selection)\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'read')  # throws exception is unauthorized\n                item = db.getDatasetItemByUuid(self.reqUuid)\n                item_type = item['type']\n                \n                if item_type['class'] == 'H5T_OPAQUE':\n                    # TODO - support for returning OPAQUE data...\n                    msg = \"Not Implemented: GET OPAQUE data not supported\"\n                    self.log.info(msg)\n                    raise HTTPError(501, reason=msg)  # Not implemented\n                elif item_type['class'] != 'H5T_COMPOUND' and query_selection:\n                    msg = \"Bad Request: query selection is only supported for compound types\"\n                    self.log.info(msg)\n                    raise HTTPError(400, reason=msg)\n            \n                \n                item_shape = item['shape']\n                if item_shape['class'] == 'H5S_NULL':\n                    pass   # don't return a value\n                elif item_shape['class'] == 'H5S_SCALAR':\n                    if query_selection:\n                        msg = \"Bad Request: query selection not valid with scalar dataset\"\n                        self.log.info(msg)\n                        raise HTTPError(400, reason=msg)\n                    values = db.getDatasetValuesByUuid(self.reqUuid, Ellipsis)\n                elif item_shape['class'] == 'H5S_SIMPLE':\n                    dims = item_shape['dims']\n                    rank = len(dims)\n                    if query_selection and rank != 1:\n                        msg = \"Bad Request: query selection is only supported for \"\n                        msg += \"one dimensional datasets\"\n                        self.log.info(msg)\n                        raise HTTPError(400, reason=msg)\n                    nelements = 1\n                    for dim in range(rank):\n                        dim_slice = self.getSliceQueryParam(dim, dims[dim])\n                        self.log.info(\"dim_size[{}]: {}\".format(dim, dim_slice))\n                        nelements *= (dim_slice.stop - dim_slice.start)\n                        slices.append(dim_slice)\n                    if query_selection:\n                        start = slices[0].start\n                        stop = slices[0].stop\n                        step = slices[0].step\n                        (indexes, values) = db.doDatasetQueryByUuid(self.reqUuid, query_selection, start=start, stop=stop, step=step, limit=limit)\n                    else:\n                        if request_content_type == \"binary\":\n                            self.log.info(\"nelements:\" + str(nelements))\n                            itemSize = h5json.getItemSize(item_type)\n                            self.log.info(\"itemSize: \" + str(itemSize))\n                            if itemSize != \"H5T_VARIABLE\" and nelements > 1:\n                                response_content_type = \"binary\"\n                       \n                        self.log.info(\"response_content_type: \" + response_content_type)\n                        values = db.getDatasetValuesByUuid(\n                            self.reqUuid, tuple(slices), format=response_content_type)      \n                         \n                else:\n                    msg = \"Internal Server Error: unexpected shape class: \" + shape['class']\n                    self.log.error(msg)\n                    raise HTTPError(500, reason=msg)\n\n                rootUUID = db.getUUIDByPath('/')\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n         \n        # got everything we need, put together the response\n        \n        if response_content_type == \"binary\":\n            # binary transfer, just write the bytes and return\n            self.log.info(\"writing binary stream\")\n            self.set_header('Content-Type', 'application/octet-stream')\n            self.write(values)\n            return\n            \n        if request_content_type == \"binary\":\n            #unable to return binary data\n            self.log.info(\"requested binary response, but returning JSON instead\")\n            \n        \n        selfQuery = []\n        if self.get_query_argument(\"select\", default=''):\n            selfQuery.append('select=' + self.get_query_argument(\"select\"))\n        if self.get_query_argument(\"query\", default=''):     \n            selfQuery.append('query=' + self.get_query_argument(\n                \"select\", default=''))\n\n        if values is not None:\n            response['value'] = values\n        else:\n            response['value'] = None\n            \n        if indexes is not None:\n            response['index'] = indexes\n\n        hrefs.append({\n            'rel': 'self',\n            'href': self.getHref('datasets/' + self.reqUuid + '/value', query=selfQuery)\n        })\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n        hrefs.append({\n            'rel': 'owner', 'href': self.getHref('datasets/' + self.reqUuid)})\n        hrefs.append({\n            'rel': 'home', 'href': self.getHref('')})\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n    def post(self):\n        self.baseHandler()\n         \n        body = None\n        try:\n            body = json_decode(self.request.body)\n        except ValueError as e:\n            msg = \"JSON Parser Error: \" + e.message\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        self.log.info(\"type body: {}\".format(type(body)))\n\n        if \"points\" not in body:\n            msg = \"Bad Request: value post request without points in body\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        \n        #self.log.info(\"points type: {}\".format(type(points)))\n        self.log.info(\"body type: {}\".format(type(body)))\n        self.log.info(\"body keys: {}\".format(list(body.keys())))\n        points = body['points']\n        \n        if type(points) != list:\n            msg = \"Bad Request: expecting list of points, got: {}\".format(type(points))\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n\n        response = {}\n        hrefs = []\n        rootUUID = None\n        item = None\n        values = None\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'read')  # throws exception is unauthorized\n                item = db.getDatasetItemByUuid(self.reqUuid)\n                shape = item['shape']\n                if shape['class'] == 'H5S_SCALAR':\n                    msg = \"Bad Request: point selection is not supported on scalar datasets\"\n                    self.log.info(msg)\n                    raise HTTPError(400, reason=msg)\n                if shape['class'] == 'H5S_NULL':\n                    msg = \"Bad Request: point selection is not supported on Null Space datasets\"\n                    self.log.info(msg)\n                    raise HTTPError(400, reason=msg)\n                \n                rank = len(shape['dims'])\n\n                for point in points:\n                    if rank == 1 and type(point) != int:\n                        msg = \"Bad Request: elements of points should be int type for datasets of rank 1\"\n                        self.log.info(msg)\n                        raise HTTPError(400, reason=msg)\n                    elif rank > 1 and type(point) != list:\n                        msg = \"Bad Request: elements of points should be list type for datasets of rank >1\"\n                        self.log.info(msg)\n                        raise HTTPError(400, reason=msg)\n                        if len(point) != rank:\n                            msg = \"Bad Request: one or more points have a missing coordinate value\"\n                            self.log.info(msg)\n                            raise HTTPError(400, reason=msg)\n\n                values = db.getDatasetPointSelectionByUuid(self.reqUuid, points)\n\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        # got everything we need, put together the response\n        \n        response['value'] = values\n\n        hrefs.append({\n            'rel': 'self',\n            'href': self.getHref('datasets/' + self.reqUuid + '/value')\n        })\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n        hrefs.append({\n            'rel': 'owner', 'href': self.getHref('datasets/' + self.reqUuid)})\n        hrefs.append({'rel': 'home',  'href': self.getHref('')})\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n    def put(self):\n        self.baseHandler()\n         \n        points = None\n        start = None\n        stop = None\n        step = None\n        body = None\n        format = \"json\"\n        data = None\n        \n        try:\n            body = json_decode(self.request.body)\n        except ValueError as e:\n            try:\n                msg = \"JSON Parser Error: \" + e.message\n            except AttributeError:\n                msg = \"JSON Parser Error\"\n            log.info(msg)\n            raise HTTPError(400, reason=msg)\n\n        if \"value\" in body:\n            data = body[\"value\"]\n            format = \"json\"\n        elif \"value_base64\" in body:\n            base64_data = body[\"value_base64\"]\n            base64_data = base64_data.encode(\"ascii\")\n            data = base64.b64decode(base64_data)\n            format = \"binary\"\n            \n        else:\n            msg = \"Bad Request: Value not specified\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)  # missing data     \n\n        if \"points\" in body:\n            points = body['points']\n            if type(points) != list:\n                msg = \"Bad Request: expecting list of points\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n            if 'start' in body or 'stop' in body or 'step' in body:\n                msg = \"Bad Request: can use hyperslab selection and points selection in one request\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n            if len(points) > len(data):\n                msg = \"Bad Request: more points provided than values\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n        else:\n            # hyperslab selection\n            if 'start' in body:\n                start = body['start']\n            if 'stop' in body:\n                stop = body['stop']\n            if 'step' in body:\n                step = body['step']\n         \n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'update')  # throws exception is unauthorized\n                item = db.getDatasetItemByUuid(self.reqUuid)\n                item_type = item['type']\n               \n                dims = None\n                if 'shape' not in item:\n                    msg = \"Unexpected error, shape information not found\"\n                    self.log.info(msg)\n                    raise HTTPError(500, reason=msg)\n                datashape = item['shape']\n                if datashape['class'] == 'H5S_NULL':\n                    msg = \"Bad Request: PUT value can't be used with Null Space datasets\"\n                    self.log.info(msg)\n                    raise HTTPError(400, reason=msg)  # missing data\n                    \n                if format == \"binary\":\n                    item_size = h5json.getItemSize(item_type)\n                    if item_size == \"H5T_VARIABLE\":\n                        msg = \"binary data cannot be used with variable length types\"\n                        self.log.info(msg)\n                        raise HTTPError(400, reason=msg)  # need to use json\n                         \n                if datashape['class'] == 'H5S_SIMPLE':\n                    dims = datashape['dims']\n                elif datashape['class'] == 'H5S_SCALAR':\n                    if start is not None or stop is not None or step is not None:\n                        msg = \"Bad Request: start/stop/step option can't be used with Scalar Space datasets\"\n                        self.log.info(msg)\n                        raise HTTPError(400, reason=msg)  # missing data           \n                    elif points:\n                        msg = \"Bad Request: Point selection can't be used with scalar datasets\"\n                        self.log.info(msg)\n                        raise HTTPError(400, reason=msg)  # missing data\n                  \n                if points is not None:\n                    # write point selection\n                    db.setDatasetValuesByPointSelection(self.reqUuid, data, points, format=format)\n                     \n                else:\n                    slices = None\n                    if dims is not None:          \n                        slices = self.getHyperslabSelection(\n                            dims, start, stop, step)\n                    # todo - check that the types are compatible\n                    db.setDatasetValuesByUuid(self.reqUuid, data, slices, format=format)\n                     \n                    \n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        self.log.info(\"value put succeeded\")\n\n\nclass AttributeHandler(BaseHandler):\n\n    # convert embedded list (list of lists) to tuples\n    def convertToTuple(self, data):\n        if type(data) == list or type(data) == tuple:\n            sublist = []\n            for e in data:\n                sublist.append(self.convertToTuple(e))\n            return tuple(sublist)\n        else:\n            return data\n\n    def getRequestName(self):\n        # request is in the form /(datasets|groups|datatypes)/<id>/attributes(/<name>),\n        # return <name>\n        # return None if the uri doesn't end with \".../<name>\"\n        uri = self.request.path\n        name = None\n        npos = uri.rfind('/attributes')\n        if npos <= 0:\n            msg = \"Bad Request: URI is invalid\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        uri = uri[npos+len('/attributes'):]\n        if uri[0:1] == '/':\n            uri = uri[1:]\n            if len(uri) > 0:\n                # strip off any query param\n                npos = uri.rfind('?')\n                if npos > 0:\n                    uri = uri[:npos]\n                name = url_unescape(uri)  # todo: handle possible query string?\n                self.log.info('got name: [' + name + ']')\n\n        return name\n\n    def getRequestCollectionName(self):\n        # request is in the form /(datasets|groups|datatypes)/<id>/attributes(/<name>),\n        # return datasets | groups | datatypes\n        uri = self.request.path\n\n        npos = uri.find('/')\n        if npos < 0:\n            log.info(\"bad uri\")\n            raise HTTPError(400)\n        uri = uri[(npos+1):]\n        npos = uri.find('/')  # second '/'\n        col_name = uri[:npos]\n\n        self.log.info('got collection name: [' + col_name + ']')\n        if col_name not in ('datasets', 'groups', 'datatypes'):\n            msg = \"Internal Server Error: collection name unexpected\"\n            self.log.error(msg)\n            raise HTTPError(500, reason=msg)   # shouldn't get routed here in this case\n\n        return col_name\n\n    def get(self):\n        self.baseHandler()\n         \n        col_name = self.getRequestCollectionName()\n        attr_name = self.getRequestName()\n\n        response = {}\n        hrefs = []\n        rootUUID = None\n        items = []\n        # Get optional query parameters\n        limit = self.get_query_argument(\"Limit\", 0)\n        if type(limit) is not int:\n            try:\n                limit = int(limit)\n            except ValueError:\n                log.info(\"expected int type for limit\")\n                raise HTTPError(400)\n        marker = self.get_query_argument(\"Marker\", None)\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'read')  # throws exception is unauthorized\n                if attr_name is not None:\n                    item = db.getAttributeItem(col_name, self.reqUuid, attr_name)\n                    items.append(item)\n                else:\n                    # get all attributes (but without data)\n                    items = db.getAttributeItems(col_name, self.reqUuid, marker, limit)\n\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        # got everything we need, put together the response\n        owner_uri = col_name + '/' + self.reqUuid \n        self_uri = owner_uri + '/attributes'\n        if attr_name is not None:\n            self_uri += '/' + url_escape(attr_name)\n\n        hostQuery = ''\n        if self.get_query_argument(\"host\", default=None):\n            hostQuery = \"?host=\" + self.get_query_argument(\"host\")\n\n        responseItems = []\n        for item in items:\n            responseItem = {}\n            responseItem['name'] = item['name']\n            typeItem = item['type']\n            responseItem['type'] = h5json.getTypeResponse(typeItem)\n            responseItem['shape'] = item['shape']\n            responseItem['created'] = unixTimeToUTC(item['ctime'])\n            responseItem['lastModified'] = unixTimeToUTC(item['mtime'])\n            if not attr_name or typeItem['class'] == 'H5T_OPAQUE':\n                pass  # TODO - send data for H5T_OPAQUE's\n            elif 'value' in item:\n                responseItem['value'] = item['value']\n            else:\n                responseItem['value'] = None\n            if attr_name is None:\n                # add an href to the attribute\n                responseItem['href'] = self.getHref(self_uri + '/' + url_escape(item['name']))\n\n            responseItems.append(responseItem)\n\n        hrefs.append({'rel': 'self', 'href': self.getHref(self_uri)})\n        hrefs.append({'rel': 'owner', 'href': self.getHref(owner_uri)})\n        hrefs.append({'rel': 'root', 'href': self.getHref('/groups/' + rootUUID)})\n        hrefs.append({'rel': 'home', 'href': self.getHref('')})\n\n        if attr_name is None:\n            # specific attribute response\n            response['attributes'] = responseItems\n        else:\n            if len(responseItems) == 0:\n                # should have raised exception earlier\n                log.error(\"attribute not found: \" + attr_name)\n                raise HTTPError(404)\n            responseItem = responseItems[0]\n            for k in responseItem:\n                response[k] = responseItem[k]\n\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n    def put(self):\n        self.baseHandler()\n         \n        col_name = self.getRequestCollectionName()\n        attr_name = self.getRequestName()\n        if attr_name is None:\n            msg = \"Bad Request: attribute name not supplied\"\n            log.info(msg)\n            raise HTTPError(400, reason=msg)\n        \n        body = None\n        try:\n            body = json_decode(self.request.body)\n        except ValueError as e:\n            msg = \"JSON Parser Error\"\n            try:\n                msg += \": \" + e.message\n            except AttributeError:\n                pass # no message property\n          \n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n\n        if \"type\" not in body:\n            self.log.info(\"Type not supplied\")\n            raise HTTPError(400)  # missing type\n\n        dims = ()  # default as empty tuple (will create a scalar attribute)\n        if \"shape\" in body:\n            shape = body[\"shape\"]\n            if type(shape) == int:\n                dims = [shape]\n            elif type(shape) == list or type(shape) == tuple:\n                dims = shape  # can use as is\n            elif type(shape) in (str, unicode) and shape == 'H5S_NULL':\n                dims = None\n            else:\n                msg = \"Bad Request: shape is invalid!\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n        datatype = body[\"type\"]\n\n        # validate shape\n        if dims:\n            for extent in dims:\n                if type(extent) != int:\n                    msg = \"Bad Request: invalid shape type\"\n                    self.log.info(msg)\n                    raise HTTPError(400, reason=msg)\n                if extent < 0:\n                    msg = \"Bad Request: invalid shape (negative extent)\"\n                    self.log.info(msg)\n                    raise HTTPError(400, reason=msg)\n\n        # convert list values to tuples (otherwise h5py is not happy)\n        data = None\n\n        if dims is not None:\n            if \"value\" not in body:\n                msg = \"Bad Request: value not specified\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)  # missing value\n            value = body[\"value\"]\n\n            data = self.convertToTuple(value)\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'create')  # throws exception is unauthorized\n                attribute_exist = True\n                try:\n                    db.getAttributeItem(col_name, self.reqUuid, attr_name)\n                except IOError:\n                    attribute_exist = False  \n                if attribute_exist:\n                    self.log.info(\"attribute {} already exist\".format(attr_name))\n                    raise HTTPError(409, \"Attribute already exist\")\n                db.createAttribute(\n                    col_name, self.reqUuid, attr_name, dims, datatype, data)\n                rootUUID = db.getUUIDByPath('/')\n\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        response = {}\n\n        # got everything we need, put together the response\n        root_href = self.getHref('groups/' + rootUUID)\n        owner_href = self.getHref(col_name + '/' + self.reqUuid)\n        self_href = owner_href + '/attributes'\n        if attr_name is not None:\n            self_href = self.getHref(col_name + '/' + self.reqUuid + '/' + attr_name)\n        else:\n            self_href = self.getHref(col_name + '/' + self.reqUuid)\n         \n        hrefs = []\n        hrefs.append({'rel': 'self',   'href': self_href})\n        hrefs.append({'rel': 'owner',  'href': owner_href})\n        hrefs.append({'rel': 'root',   'href': root_href})\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n        self.set_status(201)  # resource created\n\n    def delete(self):\n        self.baseHandler()\n         \n        col_name = self.getRequestCollectionName()\n        attr_name = self.getRequestName()\n        if attr_name is None:\n            msg = \"Bad Request: attribute name not specified\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        filePath = self.getFilePath(self.domain)\n        self.isWritable(self.filePath)\n\n        response = {}\n        hrefs = []\n        rootUUID = None\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'delete')  # throws exception is unauthorized\n                db.deleteAttribute(col_name, self.reqUuid, attr_name)\n\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        # got everything we need, put together the response\n        \n        root_href = self.getHref('groups/' + rootUUID)\n        owner_href = self.getHref(col_name + '/' + self.reqUuid)\n        self_href = self.getHref(col_name + '/' + self.reqUuid + '/attributes')\n        home_href = self.getHref('')\n\n        hrefs.append({'rel': 'self', 'href': self_href})\n        hrefs.append({'rel': 'owner', 'href': owner_href})\n        hrefs.append({'rel': 'root', 'href': root_href})\n        hrefs.append({'rel': 'home', 'href': home_href})\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n        self.log.info(\"Attribute delete succeeded\")\n\n\nclass GroupHandler(BaseHandler):\n\n    def get(self):\n        self.baseHandler()\n        \n        response = {}\n\n        hrefs = []\n        links = []\n        rootUUID = None\n        item = None\n        include_links = self.get_query_argument(\"include_links\", 0)\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'read')  # throws exception is unauthorized\n                item = db.getGroupItemByUuid(self.reqUuid)\n                if include_links:\n                    # TBD: add marker & limit options for pagination\n                    links = db.getLinkItems(self.reqUuid)\n\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        # got everything we need, put together the response\n         \n        hrefs.append({\n            'rel': 'self',\n            'href': self.getHref('groups/' + self.reqUuid)\n        })\n        hrefs.append({\n            'rel': 'links',\n            'href': self.getHref('groups/' + self.reqUuid + '/links')\n        })\n        hrefs.append({\n            'rel': 'root', \n            'href': self.getHref('groups/' + rootUUID)\n        })\n        hrefs.append({\n            'rel': 'home',\n            'href': self.getHref('')\n        })\n        hrefs.append({\n            'rel': 'attributes',\n            'href': self.getHref('groups/' + self.reqUuid + '/attributes')\n        })\n        response['id'] = self.reqUuid\n        response['created'] = unixTimeToUTC(item['ctime'])\n        response['lastModified'] = unixTimeToUTC(item['mtime'])\n        response['attributeCount'] = item['attributeCount']\n        response['linkCount'] = item['linkCount']\n        response['hrefs'] = hrefs\n        if links:\n\n            hostQuery = ''\n            if self.get_query_argument(\"host\", default=None):\n                hostQuery = \"?host=\" + self.get_query_argument(\"host\")\n            response[\"links\"] = []\n            for item in links:\n                link_item = {}\n                link_item['class'] = item['class']\n                link_item['title'] = item['title']\n                link_item['href'] = item['href'] = self.href + '/groups/' + self.reqUuid + '/links/' + self.nameEncode(item['title']) + hostQuery\n                if item['class'] == 'H5L_TYPE_HARD':\n                    link_item['id'] = item['id']\n                    link_item['collection'] = item['collection']\n                    link_item['target'] = self.href + '/' + item['collection'] + '/' + item['id'] + hostQuery\n                elif item['class'] == 'H5L_TYPE_SOFT':\n                    link_item['h5path'] = item['h5path']\n                elif item['class'] == 'H5L_TYPE_EXTERNAL':\n                    link_item['h5path'] = item['h5path']\n                    link_item['h5domain'] = self.convertExternalPath(item['file'])\n                    if link_item['h5domain'].endswith(config.get('domain')):\n                        link_item['target'] = self.getExternalHref(link_item['h5domain'], link_item['h5path'])\n                response[\"links\"].append(link_item)\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n    def delete(self):\n        self.baseHandler()\n         \n        self.isWritable(self.filePath)\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(self.reqUuid, self.userid)\n                self.verifyAcl(acl, 'delete')  # throws exception is unauthorized\n                db.deleteObjectByUuid('group', self.reqUuid)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        response = {}\n        hrefs = []\n\n        # write the response\n         \n        hrefs.append({'rel': 'self', 'href': self.getHref('groups')})\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n        hrefs.append({'rel': 'home', 'href': self.getHref('')})\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n\nclass GroupCollectionHandler(BaseHandler):\n\n    def get(self):\n        self.baseHandler()\n         \n        rootUUID = None\n\n        # Get optional query parameters\n        limit = self.get_query_argument(\"Limit\", 0)\n        if type(limit) is not int:\n            try:\n                limit = int(limit)\n            except ValueError:\n                log.info(\"expected int type for limit\")\n                raise HTTPError(400)\n        marker = self.get_query_argument(\"Marker\", None)\n\n        response = {}\n\n        items = None\n        hrefs = []\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(rootUUID, self.userid)\n                self.verifyAcl(acl, 'read')  # throws exception is unauthorized\n                items = db.getCollection(\"groups\", marker, limit)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        # write the response\n        response['groups'] = items\n         \n        hrefs.append({\n            'rel': 'self', 'href': self.getHref('groups')})\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n        hrefs.append({\n            'rel': 'home', 'href': self.getHref('')})\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n    def post(self):\n        self.baseHandler()\n         \n        if self.request.path != '/groups':\n            msg = \"Method Not Allowed: bad group post request: \" + self.request.path\n            self.log.info(msg)\n            raise HTTPError(405, reason=msg)  # Method not allowed\n\n        parent_group_uuid = None\n        link_name = None\n\n        body = {}\n        if self.request.body:\n            try:\n                body = json_decode(self.request.body)\n            except ValueError as e:\n                msg = \"JSON Parser Error: \" + e.message\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n\n        if \"link\" in body:\n            link_options = body[\"link\"]\n            if \"id\" not in link_options or \"name\" not in link_options:\n                msg = \"Bad Request: missing link parameter\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n            parent_group_uuid = link_options[\"id\"]\n            link_name = link_options[\"name\"]\n            self.log.info(\n                \"add link to: \" + parent_group_uuid + \" with name: \" + link_name)\n\n         \n        self.isWritable(self.filePath)\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                current_user_acl = db.getAcl(rootUUID, self.userid)\n\n                self.verifyAcl(current_user_acl, 'create')  # throws exception is unauthorized\n                if parent_group_uuid:\n                    # verify no link already exists before creating a new group\n                    link_exists = False\n                    try:\n                        item = db.getLinkItemByUuid(parent_group_uuid, link_name)\n                        if item:\n                            link_exists = True\n                    except IOError:\n                        pass # ok, link not found\n                    if link_exists:\n                        self.log.info(\"Link already exists\")\n                        raise HTTPError(409, \"Link already exists\")\n\n                grpUUID = db.createGroup()\n                item = db.getGroupItemByUuid(grpUUID)\n                # if link info is provided, link the new group\n                if parent_group_uuid:\n                    # link the new dataset\n                    db.linkObject(parent_group_uuid, grpUUID, link_name)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        href = self.request.protocol + '://' + self.domain\n        self.set_header('Location', href + '/groups/' + grpUUID)\n        self.set_header('Content-Location', href + '/groups/' + grpUUID)\n\n        # got everything we need, put together the response\n        response = {}\n        hrefs = []\n         \n        hrefs.append({\n            'rel': 'self', 'href': self.getHref('groups/' + grpUUID)})\n        hrefs.append({\n            'rel': 'links',\n            'href': self.getHref('groups/' + grpUUID + '/links')\n        })\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n        hrefs.append({\n            'rel': 'home', 'href': self.getHref('')})\n        hrefs.append({\n            'rel': 'attributes',\n            'href': self.getHref('groups/' + grpUUID + '/attributes')\n        })\n        response['id'] = grpUUID\n        response['created'] = unixTimeToUTC(item['ctime'])\n        response['lastModified'] = unixTimeToUTC(item['mtime'])\n        response['attributeCount'] = item['attributeCount']\n        response['linkCount'] = item['linkCount']\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n        self.set_status(201)  # resource created\n\n\nclass DatasetCollectionHandler(BaseHandler):\n\n    def get(self):\n        self.baseHandler()\n\n        # Get optional query parameters\n        limit = self.get_query_argument(\"Limit\", 0)\n        if type(limit) is not int:\n            try:\n                limit = int(limit)\n            except ValueError:\n                msg = \"Bad Request: expected int type for limit\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n        marker = self.get_query_argument(\"Marker\", None)\n\n        response = {}\n        hrefs = []\n        rootUUID = None\n\n        items = None\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(rootUUID, self.userid)\n                self.verifyAcl(acl, 'read')  # throws exception is unauthorized\n                items = db.getCollection(\"datasets\", marker, limit)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        # write the response\n        response['datasets'] = items\n         \n        hrefs.append({'rel': 'self', 'href': self.getHref('datasets')})\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n        hrefs.append({'rel': 'home', 'href': self.getHref('')})\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n    def post(self):\n        self.baseHandler()\n\n        if self.request.path != '/datasets':\n            msg = \"Method not Allowed: invalid datasets post request\"\n            log.info(msg)\n            raise HTTPError(405, reason=msg)  # Method not allowed\n\n        self.isWritable(self.filePath)\n        dims = None\n        group_uuid = None\n        link_name = None\n\n        body = {}\n        if self.request.body:\n            try:\n                body = json_decode(self.request.body)\n            except ValueError as e:\n                msg = \"JSON Parser Error: \" + e.message\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n                \n\n        if \"type\" not in body:\n            msg = \"Bad Request: Type not specified\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)  # missing type\n\n        if \"shape\" in body:\n            shape = body[\"shape\"]\n            if type(shape) == int:\n                dims = [shape]\n            elif type(shape) == list or type(shape) == tuple:\n                dims = shape  # can use as is\n            elif type(shape) in (str, unicode) and shape == 'H5S_NULL':\n                dims = None\n            else:\n                msg = \"Bad Request: shape is invalid\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n        else:\n            dims = ()  # empty tuple\n\n        if \"link\" in body:\n            link_options = body[\"link\"]\n            if \"id\" not in link_options or \"name\" not in link_options:\n                msg = \"Bad Request: No 'name' or 'id' not specified\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n\n            group_uuid = link_options[\"id\"]\n            link_name = link_options[\"name\"]\n            self.log.info(\"add link to: \" + group_uuid + \" with name: \" + link_name)\n\n        datatype = body[\"type\"]\n\n        maxdims = None\n        if \"maxdims\" in body:\n            maxdims = body[\"maxdims\"]\n            if type(maxdims) == int:\n                dim1 = maxdims\n                maxdims = [dim1]\n            elif type(maxdims) == list or type(maxdims) == tuple:\n                pass  # can use as is\n            else:\n                msg = \"Bad Request: maxdims is invalid\"\n                log.info(msg)\n                raise HTTPError(400, reason=msg)\n\n        # validate shape\n        if dims:\n            for extent in dims:\n                if type(extent) != int:\n                    msg = \"Bad Request: Invalid shape type\"\n                    self.log.info(msg)\n                    raise HTTPError(400, reason=msg)\n                if extent < 0:\n                    msg = \"Bad Request: shape dimension is negative\"\n                    self.log.info(\"msg\")\n                    raise HTTPError(400, reason=msg)\n\n        if maxdims:\n            if dims is None:\n                # can't use maxdims with null_space dataset\n                msg = \"Bad Request: maxdims not valid for H5S_NULL dataspace\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n\n            if len(maxdims) != len(dims):\n                msg = \"Bad Request: maxdims array length must equal shape array length\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n            for i in range(len(dims)):\n                maxextent = maxdims[i]\n                if maxextent != 0 and maxextent < dims[i]:\n                    msg = \"Bad Request: maxdims extent can't be smaller than shape extent\"\n                    self.log.info(msg)\n                    raise HTTPError(400, reason=msg)\n                if maxextent == 0:\n                    maxdims[i] = None  # this indicates unlimited\n\n        creationProps = None\n        if \"creationProperties\" in body:\n            creationProps = body[\"creationProperties\"]\n        item = None\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(rootUUID, self.userid)\n                self.verifyAcl(acl, 'create')  # throws exception is unauthorized\n                # verify the link perm as well\n                if group_uuid and group_uuid != rootUUID:\n                    acl = db.getAcl(group_uuid, self.userid)\n                    self.verifyAcl(acl, 'create')  # throws exception is unauthorized\n                # verify the link name doesn't already exists\n                if group_uuid:\n                    # verify no link already exists before creating a new group\n                    link_exists = False\n                    try:\n                        item = db.getLinkItemByUuid(group_uuid, link_name)\n                        if item:\n                            link_exists = True\n                    except IOError:\n                        pass # ok, link not found\n                    if link_exists:\n                        self.log.info(\"Link already exists\")\n                        raise HTTPError(409, \"Link already exists\")\n\n                item = db.createDataset(datatype, dims, maxdims, creation_props=creationProps)\n                if group_uuid:\n                    # link the new dataset\n                    db.linkObject(group_uuid, item['id'], link_name)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        response = {}\n\n        # got everything we need, put together the response\n        hrefs = []\n         \n        hrefs.append({\n            'rel': 'self',\n            'href': self.getHref('datasets/' + item['id'])\n        })\n        hrefs.append({\n            'rel': 'root',\n            'href': self.getHref('groups/' + rootUUID)\n        })\n        hrefs.append({\n            'rel': 'attributes',\n            'href': self.getHref('datasets/' + item['id'] + '/attributes')\n        })\n        hrefs.append({\n            'rel': 'value',\n            'href': self.getHref('datasets/' + item['id'] + '/value')})\n        response['id'] = item['id']\n        response['attributeCount'] = item['attributeCount']\n        response['hrefs'] = hrefs\n        response['created'] = unixTimeToUTC(item['ctime'])\n        response['lastModified'] = unixTimeToUTC(item['mtime'])\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n        self.set_status(201)  # resource created\n\n\nclass TypeCollectionHandler(BaseHandler):\n    def get(self):\n        self.baseHandler()\n         \n        # Get optional query parameters\n        limit = self.get_query_argument(\"Limit\", 0)\n        if type(limit) is not int:\n            try:\n                limit = int(limit)\n            except ValueError:\n                msg = \"Bad Request: expected int type for Limit\"\n                log.info(msg)\n                raise HTTPError(400, reason=msg)\n        marker = self.get_query_argument(\"Marker\", None)\n\n        response = {}\n        hrefs = []\n        rootUUID = None\n\n        items = None\n        try:\n            with Hdf5db(self.filePath) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(rootUUID, self.userid)\n                self.verifyAcl(acl, 'read')  # throws exception is unauthorized\n                items = db.getCollection(\"datatypes\", marker, limit)\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        # write the response\n        response['datatypes'] = items\n\n        hrefs.append({\n            'rel': 'self',\n            'href': self.getHref('datatypes')\n        })\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n        hrefs.append({'rel': 'home', 'href': self.getHref('')})\n        response['hrefs'] = hrefs\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n    def post(self):\n        self.baseHandler()\n\n        if self.request.path != '/datatypes':\n            msg = \"Method not Allowed: invalid URI\"\n            log.info(msg)\n            raise HTTPError(405, reason=msg)  # Method not allowed\n\n        \n        self.isWritable(self.filePath)\n\n        body = None\n        try:\n            body = json_decode(self.request.body)\n        except ValueError as e:\n            msg = \"JSON Parser Error: \" + e.message\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n\n        parent_group_uuid = None\n        link_name = None\n\n        if \"type\" not in body:\n            msg = \"Type not specified\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)  # missing type\n\n        if \"link\" in body:\n            link_options = body[\"link\"]\n            if \"id\" not in link_options or \"name\" not in link_options:\n                msg = \"Bad Request: missing link parameter\"\n                self.log.info(msg)\n                raise HTTPError(400, reason=msg)\n            parent_group_uuid = link_options[\"id\"]\n            link_name = link_options[\"name\"]\n            self.log.info(\n                \"add link to: \" + parent_group_uuid + \" with name: \" + link_name)\n\n        datatype = body[\"type\"]\n\n        item = None\n        rootUUID = None\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(rootUUID, self.userid)\n                self.verifyAcl(acl, 'create')  # throws exception is unauthorized\n                if parent_group_uuid:\n                    # verify no link already exists before creating a new group\n                    link_exists = False\n                    try:\n                        item = db.getLinkItemByUuid(parent_group_uuid, link_name)\n                        if item:\n                            link_exists = True\n                    except IOError:\n                        pass # ok, link not found\n                    if link_exists:\n                        self.log.info(\"Link already exists\")\n                        raise HTTPError(409, \"Link already exists\")\n                item = db.createCommittedType(datatype)\n                # if link info is provided, link the new group\n                if parent_group_uuid:\n                    # link the new dataset\n                    db.linkObject(parent_group_uuid, item['id'], link_name)\n\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        response = {}\n\n        # got everything we need, put together the response\n        hrefs = []\n         \n        hrefs.append({\n            'rel': 'self',\n            'href': self.getHref('datatypes/' + item['id'])\n        })\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n        hrefs.append({\n            'rel': 'attributes',\n            'href': self.getHref('datatypes/' + item['id'] + '/attributes')\n        })\n        response['id'] = item['id']\n        response['attributeCount'] = 0\n        response['hrefs'] = hrefs\n        response['created'] = unixTimeToUTC(item['ctime'])\n        response['lastModified'] = unixTimeToUTC(item['mtime'])\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n        self.set_status(201)  # resource created\n\n\nclass RootHandler(BaseHandler):\n     \n    def getRootResponse(self, filePath):\n        acl = None\n        # used by GET / and PUT /\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(rootUUID, self.userid)\n\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        self.verifyAcl(acl, 'read')  # throws exception is unauthorized\n\n        # generate response\n        hrefs = []\n         \n        hrefs.append({\n            'rel': 'self', 'href': self.getHref('')})\n        hrefs.append({\n            'rel': 'database', 'href': self.getHref('datasets')})\n        hrefs.append({'rel': 'groupbase', 'href': self.getHref('groups')})\n        hrefs.append({\n            'rel': 'typebase', 'href': self.getHref('datatypes')})\n        hrefs.append({\n            'rel': 'root', 'href': self.getHref('groups/' + rootUUID)})\n\n        response = {}\n        response['created'] = unixTimeToUTC(op.getctime(filePath))\n        response['lastModified'] = unixTimeToUTC(op.getmtime(filePath))\n        response['root'] = rootUUID\n        response['hrefs'] = hrefs\n\n        return response\n\n    def get(self):\n         \n        self.baseHandler()  \n        \"\"\"\n        self.log.info(\"header keys...\")\n        for k in self.request.headers.keys():\n            self.log.info(\"header[\" + k + \"]: \" + self.request.headers[k])\n        self.log.info('remote_ip: ' + self.request.remote_ip)\n        \"\"\"\n        try:\n            response = self.getRootResponse(self.filePath)\n        except HTTPError as e:\n            if e.status_code == 401:\n                # no user provied, just return 401 response\n                return\n            raise e  # re-throw the exception\n\n        root_uuid = response['root']\n \n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n\n    def put(self):\n        self.baseHandler(checkExists=False)     \n        new_domain_policy = config.get(\"new_domain_policy\")    \n        if new_domain_policy:\n            # should be one of ANON, AUTH, NEVER\n            if new_domain_policy.upper() == \"NEVER\":\n                msg = \"Forbidden: new domains not allowed\"\n                self.log.info(msg)\n                raise HTTPError(403, reason=msg)\n            elif new_domain_policy.upper() == \"AUTH\" and self.userid <= 0:\n                msg = \"Unauthorized\"\n                self.log.info(msg)\n                raise HTTPError(401, reason=msg)\n             \n\n        self.log.info(\"filePath: \" + self.filePath)\n        \n        if self.filePath is not None and fileUtil.isFile(self.filePath):\n            # the file already exists\n            msg = \"Conflict: resource exists: \" + self.filePath\n            self.log.info(msg)\n            raise HTTPError(409, reason=msg)  # Conflict - is this the correct code?\n             \n        if self.filePath is not None and self.isTocFilePath(self.filePath):\n            msg = \"Forbidden: invalid resource\"\n            self.log.info(msg)\n            raise HTTPError(403, reason=msg)  # Forbidden - TOC file\n        \n        if self.filePath is None:\n            msg = \"domain not valid\"\n            self.log.info(msg)\n            raise HTTPError(400, reason=msg)\n        \n        self.log.info(\"FilePath: \" + self.filePath)     \n        # create directories as needed\n        fileUtil.makeDirs(op.dirname(self.filePath))\n        self.log.info(\"creating file: [\" + self.filePath + \"]\")\n\n        try:\n            Hdf5db.createHDF5File(self.filePath)\n        except IOError as e:\n            self.log.info(\n                \"IOError creating new HDF5 file: \" + str(e.errno) + \" \" + e.strerror)\n            raise HTTPError(\n                500, \"Unexpected error: unable to create collection\")\n\n        response = self.getRootResponse(self.filePath)\n        \n        try:\n            tocUtil.addTocEntry(self.domain, self.filePath, userid=self.userid)        \n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        self.set_header('Content-Type', 'application/json')\n        self.write(json_encode(response))\n        self.set_status(201)  # resource created\n\n    def delete(self):\n        self.baseHandler()\n         \n        self.isWritable(self.filePath)\n\n        if not op.isfile(self.filePath):\n            # file not there\n            msg = \"Not found: resource does not exist\"\n            self.log.info(msg)\n            raise HTTPError(404, reason=msg)  # Not found\n\n        # don't use os.access since it will always return OK if uid is root\n        if not os.stat(self.filePath).st_mode & 0o200:\n            # file is read-only\n            msg = \"Forbidden: Resource is read-only\"\n            self.log.info(msg)\n            raise HTTPError(403, reason=msg)  # Forbidden\n\n        if self.isTocFilePath(self.filePath):\n            msg = \"Forbidden: Resource is read-only\"\n            self.log.info(msg)\n            raise HTTPError(403, reason=msg)  # Forbidden - TOC file\n\n        try:\n            with Hdf5db(self.filePath, app_logger=self.log) as db:\n                rootUUID = db.getUUIDByPath('/')\n                acl = db.getAcl(rootUUID, self.userid)\n                self.verifyAcl(acl, 'delete')  # throws exception is unauthorized\n        except IOError as e:\n            self.log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n            status = errNoToHttpStatus(e.errno)\n            raise HTTPError(status, reason=e.strerror)\n\n        try:\n            tocUtil.removeTocEntry(self.domain, self.filePath, userid=self.userid)\n        except IOError as ioe:\n            # This exception may happen if the file has been imported directly\n            # after toc creation\n            self.log.warn(\"IOError removing toc entry\")\n\n        try:\n            os.remove(self.filePath)\n        except IOError as ioe:\n            self.log.info(\n                \"IOError deleting HDF5 file: \" + str(ioe.errno) + \" \" + ioe.strerror)\n            raise HTTPError(\n                500, \"Unexpected error: unable to delete collection\")\n\n\nclass InfoHandler(RequestHandler):\n\n    def get(self):\n        log = logging.getLogger(\"h5serv\")\n        log.info('InfoHandler.get ' + self.request.host)\n        log.info('remote_ip: ' + self.request.remote_ip)\n\n        greeting = \"Welcome to h5serv!\"\n        about = \"h5serv is a webservice for HDF5 data\"\n        doc_href = \"http://h5serv.readthedocs.org\"\n        h5serv_version = \"0.2\"\n        response = Hdf5db.getVersionInfo()\n        response['name'] = \"h5serv\"\n        response['greeting'] = greeting\n        response['about'] = about\n        response['documentation'] = doc_href\n        response['h5serv_version'] = h5serv_version\n\n        accept_type = ''\n        if 'accept' in self.request.headers:\n            accept = self.request.headers['accept']\n            # just extract the first type and not worry about q values for now...\n            accept_values = accept.split(',')\n            accept_types = accept_values[0].split(';')\n            accept_type = accept_types[0]\n            # print 'accept_type:', accept_type\n        if accept_type == 'text/html':\n            self.set_header('Content-Type', 'text/html')\n            htmlText = \"<html><body><h1>\" + response['greeting'] + \"</h1>\"\n            htmlText += \"<h2>\" + response['about'] + \"</h2>\"\n            htmlText += \"<h2>Documentation: <a href=\" + response['documentation'] + \"> h5serv documentation </a></h2>\"\n            htmlText += \"<h2>server version: \" + response['h5serv_version'] + \"</h2>\"\n            htmlText += \"<h2>h5py version: \" + response['h5py_version'] + \"</h2>\"\n            htmlText += \"<h2>hdf5 version: \" + response['hdf5_version'] + \"</h2>\"\n            htmlText += \"</body></html>\"\n            self.write(htmlText)\n        else:\n            self.set_header('Content-Type', 'application/json')\n            self.write(json_encode(response))\n\n\ndef sig_handler(sig, frame):\n    log = logging.getLogger(\"h5serv\")\n    log.warning('Caught signal: %s', sig)\n    IOLoop.instance().add_callback(shutdown)\n\n\ndef shutdown():\n    log = logging.getLogger(\"h5serv\")\n    MAX_WAIT_SECONDS_BEFORE_SHUTDOWN = 2\n    log.info('Stopping http server')\n\n    log.info(\n        'Will shutdown in %s seconds ...', MAX_WAIT_SECONDS_BEFORE_SHUTDOWN)\n    io_loop = tornado.ioloop.IOLoop.instance()\n\n    deadline = time.time() + MAX_WAIT_SECONDS_BEFORE_SHUTDOWN\n\n    def stop_loop():\n        now = time.time()\n        if now < deadline:\n            io_loop.add_timeout(now + 1, stop_loop)\n        else:\n            io_loop.stop()\n            log.info('Shutdown')\n    stop_loop()\n\n    log.info(\"closing db\")\n\n\ndef make_app():\n    static_url = config.get('static_url')\n    static_path = config.get('static_path')\n    settings = {} \n    config_debug = config.get('debug')\n    if type(config_debug) is str:\n        if config_debug[0] in ('T', 't'):\n            settings[\"debug\"] = True\n        else:\n            settings[\"debug\"] = False\n    else:\n        settings[\"debug\"] = config_debug\n     \n    favicon_path = \"favicon.ico\"\n    print(\"favicon_path:\", favicon_path)\n    print('Static content in the path:' + static_path +\n          \" will be displayed via the url: \" + static_url)\n    print('isdebug:', settings['debug'])\n\n    app = Application([\n        url(r\"/datasets/.*/type\", DatatypeHandler),\n        url(r\"/datasets/.*/shape\", ShapeHandler),\n        url(r\"/datasets/.*/attributes/.*\", AttributeHandler),\n        url(r\"/datasets/.*/acls/.*\", AclHandler),\n        url(r\"/datasets/.*/acls\", AclHandler),\n        url(r\"/groups/.*/attributes/.*\", AttributeHandler),\n        url(r\"/groups/.*/acls/.*\", AclHandler),\n        url(r\"/groups/.*/acls\", AclHandler),\n        url(r\"/datatypes/.*/attributes/.*\", AttributeHandler),\n        url(r\"/datasets/.*/attributes\", AttributeHandler),\n        url(r\"/groups/.*/attributes\", AttributeHandler),\n        url(r\"/datatypes/.*/attributes\", AttributeHandler),\n        url(r\"/datatypes/.*/acls/.*\", AclHandler),\n        url(r\"/datatypes/.*/acls\", AclHandler),\n        url(r\"/datatypes/.*\", TypeHandler),\n        url(r\"/datatypes/\", TypeHandler),\n        url(r\"/datatypes\\?.*\", TypeCollectionHandler),\n        url(r\"/datatypes\", TypeCollectionHandler),\n        url(r\"/datasets/.*/value\", ValueHandler),\n        url(r\"/datasets/.*/value\\?.*\", ValueHandler),\n        url(r\"/datasets/.*\", DatasetHandler),\n        url(r\"/datasets/\", DatasetHandler),\n        url(r\"/datasets\\?.*\", DatasetCollectionHandler),\n        url(r\"/datasets\", DatasetCollectionHandler),\n        url(r\"/groups/.*/links/.*\", LinkHandler),\n        url(r\"/groups/.*/links\\?.*\", LinkCollectionHandler),\n        url(r\"/groups/.*/links\", LinkCollectionHandler),\n        url(r\"/groups/\", GroupHandler),\n        url(r\"/groups/.*\", GroupHandler),\n        url(r\"/groups\\?.*\", GroupCollectionHandler),\n        url(r\"/groups\", GroupCollectionHandler),\n        url(r\"/info\", InfoHandler),\n        url(static_url, tornado.web.StaticFileHandler, {'path': static_path}),\n        url(r\"/(favicon\\.ico)\", tornado.web.StaticFileHandler, {'path': favicon_path}),\n        url(r\"/acls/.*\", AclHandler),\n        url(r\"/acls\", AclHandler),\n        url(r\"/\", RootHandler),\n        url(r\".*\", DefaultHandler)\n    ],  **settings)\n    return app\n\n# \n# update TOC when files are added via some out of process method \n# (e.g. scp to the server)\n#\ndef updateToc(filepath):\n    log = logging.getLogger(\"h5serv\")\n    log.info(\"updateToc(%s)\", filepath)\n    if os.name == 'nt':\n        filepath = filepath.replace('\\\\', '/')  # match HDF5 convention\n    hdf5_ext = config.get('hdf5_ext')  \n    if not filepath.endswith(hdf5_ext):\n        log.info(\"ignoring non-HDF5 file added to data directory\")\n        return\n    \n    if filepath.endswith(config.get('toc_name')):\n        log.info(\"ignore toc file creation\")\n        return \n     \n    base_domain = fileUtil.getDomain(filepath)\n    log.info(\"base domain: \" + base_domain)\n    \n    try:\n        if fileUtil.isFile(filepath): \n            tocUtil.addTocEntry(base_domain, filepath) \n        else:\n            tocUtil.removeTocEntry(base_domain, filepath)\n    except IOError as e:\n        log.info(\"periodic callback: unable to update toc\")\n        \n    \n        \n#\n# Background processing callback\n#\ndef periodicCallback():\n    # callback for background processing\n    log = logging.getLogger(\"h5serv\")\n    #log.info(\"periodicCallback\")\n    # check event queue\n    while not event_queue.empty():\n        item = event_queue.get()\n        log.info(\"process_queue, got: %s\", item)\n        # just add file events for now\n        updateToc(item)\n    \ndef main():\n    # create logger\n    log = logging.getLogger(\"h5serv\")\n    log_file = config.get(\"log_file\")\n    log_level = config.get(\"log_level\")\n\n    # add file handler if given in config\n    if log_file:\n        print(\"Using logfile: \", log_file)\n        # set daily rotating log\n        \n        handler = logging.handlers.TimedRotatingFileHandler(\n            log_file,\n            when=\"midnight\",\n            interval=1,\n            backupCount=0,\n            utc=True)\n  \n        # add formatter to handler\n        # create formatter\n        formatter = logging.Formatter(\n            \"%(asctime)s:%(levelname)s:%(filename)s:%(lineno)d::%(message)s\")\n        handler.setFormatter(formatter)\n        # add handler to logger\n        log.addHandler(handler)\n    else:\n        print(\"No logfile\")\n        \n    # add default logger (to stdout)\n    handler = logging.StreamHandler(sys.stdout)\n    # create formatter\n    formatter = logging.Formatter(\n        \"%(levelname)s:%(filename)s:%(lineno)d::%(message)s\")\n    handler.setFormatter(formatter)\n    log.addHandler(handler)\n    log.propagate = False  # otherwise, we'll get repeated lines\n    \n    password_uri = \"none\"\n    x = \"password_uri\"\n    if x.upper() in os.environ:\n        password_uri = os.environ[x.upper()]\n    password_uri = config.get(\"password_uri\")\n    print(\"password_uri config:\", password_uri)   \n    \n    # log levels: ERROR, WARNING, INFO, DEBUG, or NOTSET\n    if not log_level or log_level == \"NOTSET\":\n        log.setLevel(logging.NOTSET)\n    if log_level == \"ERROR\":\n        print(\"Setting log level to: ERROR\")\n        log.setLevel(logging.ERROR)\n    elif log_level == \"WARNING\":\n        print(\"Setting log level to: WARNING\")\n        log.setLevel(logging.WARNING)\n    elif log_level == \"INFO\":\n        print(\"Setting log level to: INFO\")\n        log.setLevel(logging.INFO)\n    elif log_level == \"DEBUG\":\n        print(\"Setting log level to: DEBUG\")\n        log.setLevel(logging.DEBUG)\n    else:\n        print(\"No logging!\")\n        log.setLevel(logging.NOTSET)  \n    \n    log.info(\"log test\")\n    \n    app = make_app()\n    domain = config.get(\"domain\")\n    print(\"domain:\", domain)\n    \n    \n    ssl_cert = config.get('ssl_cert')\n    if ssl_cert:\n        print(\"ssl_cert:\", ssl_cert)\n    ssl_key = config.get('ssl_key')\n    if ssl_key:\n        print(\"ssl_key:\", ssl_key)\n    ssl_port = config.get('ssl_port')\n    if ssl_port:\n        print(\"ssl_port:\", ssl_port)\n    \n    #\n    # Setup listener for changes in the file system\n    #\n    data_path = config.get('datapath')\n    global event_queue\n    event_queue = Queue()\n    # implemented in h5watchdog.py\n    background_timeout = int(config.get(\"background_timeout\"))\n    if background_timeout:\n        print(\"Setting watchdog on: \", data_path)\n        h5observe(data_path, event_queue)\n        tornado.ioloop.PeriodicCallback(periodicCallback, 1000).start()\n        \n    # \n    # Insantiate auth class\n    #\n    global auth\n    auth = getAuthClient()\n     \n    if ssl_cert and op.isfile(ssl_cert) and ssl_key and op.isfile(ssl_key) and ssl_port:\n        ssl_cert_pwd = config.get('ssl_cert_pwd')\n        ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)\n        ssl_ctx.load_cert_chain(ssl_cert, keyfile=ssl_key, password=ssl_cert_pwd)\n        ssl_server = tornado.httpserver.HTTPServer(app, ssl_options=ssl_ctx)\n        ssl_server.listen(ssl_port)\n        msg = \"Running SSL on port: \" + str(ssl_port) + \" (SSL)\"\n    else:\n        server = tornado.httpserver.HTTPServer(app, xheaders=True)\n        port = int(config.get('port'))\n        server.listen(port)\n        msg = \"Starting event loop on port: \" + str(port)\n        \n\n    signal.signal(signal.SIGTERM, sig_handler)\n    signal.signal(signal.SIGINT, sig_handler)\n    log.info(\"INITIALIZING...\")\n    log.info(msg)\n    print(msg)\n\n    IOLoop.current().start()\n"
  },
  {
    "path": "h5serv/authFile.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\n\nimport six\n\nif six.PY3:\n    unicode = str\n    \nimport os.path as op\nimport time\nimport logging\nimport h5py\n\nfrom tornado.web import HTTPError\n\nfrom h5serv.passwordUtil import encrypt_pwd, to_string\n\ncache_expire_time = 10.0  # ten seconds\n\nclass AuthClient(object):\n\n    def __init__(self, filepath):\n        self.log = logging.getLogger(\"h5serv\")\n        self.log.info(\"AuthFile class init(\" + filepath + \")\")\n        self.filepath = filepath\n        self.username_cache = {}\n        self.userid_cache = {}\n         \n\n    \"\"\"\n    Password util helper functions\n    \"\"\"\n\n\n    def getUserInfo(self, user_name):\n        \"\"\"\n        getUserInfo: return user data\n        \"\"\"\n         \n        userid = None\n\n        if not user_name:\n            return None\n            \n        self.log.info(\"Auth.getUserInfo: [\" + to_string(user_name) + \"]\")\n        \n        if user_name in self.username_cache:\n            item = self.username_cache[user_name]\n            if item['timestamp'] - time.time() > cache_expire_time:\n                self.log.info(\"Auth-cache expired\")\n                # delete the entry and re-fetch below\n                del self.username_cache[user_name]\n            else:\n                self.log.info(\"Auth-got cache value\")\n                data = item['data']\n                return data\n                    \n       \n        # verify file exists and is writable\n        if not op.isfile(self.filepath):\n            self.log.error(\"password file is missing\")\n            raise HTTPError(500, message=\"bad configuration\")\n\n        if not h5py.is_hdf5(self.filepath):\n            self.log.error(\"password file is invalid\")\n            raise HTTPError(500, message=\"bad configuration\")\n\n        with h5py.File(self.filepath, 'r') as f:\n            if user_name not in f.attrs:\n                return None\n            data = f.attrs[user_name]\n            \n        # add to cache \n        self.log.info(\"Auth - added to cache\")\n        item = {}\n        timestamp = time.time()\n        item['timestamp'] = timestamp\n        item['data'] = data\n        self.username_cache[user_name] = item\n        item = {}\n        item['timestamp'] = timestamp\n        item['username'] = user_name\n        userid = data['userid']\n        self.userid_cache[userid] = item\n        \n        return data\n\n\n    def getUserId(self, user_name):\n        \"\"\"\n        getUserId: get id for given user name\n        \"\"\"\n        self.log.info(\"Auth.getUserId: [\" + user_name + \"]\")\n        data = self.getUserInfo(user_name)\n        userid = None\n        if data is not None:\n            userid = data['userid']\n        return userid\n\n\n    def getUserName(self, userid):\n        \"\"\"\n        getUserName: return user name for given user id\n        #todo: may need to be optimized to support large number of users\n        \"\"\"\n\n        self.log.info(\"Auth.getUserName: [\" + str(userid) + \"]\")\n        \n        if userid in self.userid_cache:\n            item = self.userid_cache[userid]\n            if item['timestamp'] - time.time() > cache_expire_time:\n                # delete the entry and re-fetch below\n                self.log.info(\"Auth-cache expired\")\n                del self.userid_cache[userid]\n            else:\n                self.log.info(\"Auth-got cache value\")\n                username = item['username']\n                return to_string(username)\n         \n        # verify file exists and is writable\n        if not op.isfile(self.filepath):\n            self.log.error(\"password file is missing\")\n            raise HTTPError(500, message=\"bad configuration\")\n\n        if not h5py.is_hdf5(self.filepath):\n            self.log.error(\"password file is invalid\")\n            raise HTTPError(500, message=\"bad configuration\")\n\n        user_name = None\n        with h5py.File(self.filepath, 'r') as f:\n            for attr_name in f.attrs:\n                attr = f.attrs[attr_name]\n                if attr['userid'] == userid:\n                    user_name = to_string(attr_name)\n        \n        self.log.info(\"Auth-add to cachecache\")\n        item = {}\n        item['timestamp'] = time.time()\n        item['username'] = user_name\n        self.userid_cache[userid] = item\n        \n        return user_name\n\n\n    def validateUserPassword(self, user_name, password):\n        \"\"\"\n        validateUserPassword: verify user and password.\n            throws exception if not valid\n        \"\"\"\n\n        if not user_name:\n            self.log.info('validateUserPassword - null user')\n            raise HTTPError(401, message=\"provide user name and password\")\n        if not password:\n            self.log.info('isPasswordValid - null password')\n            raise HTTPError(401, message=\"provide  password\")\n        data = self.getUserInfo(user_name)\n\n        if data is None:\n            self.log.info(\"user not found\")\n            raise HTTPError(401, message=\"provide user and password\")\n\n        userid = None\n        if data['pwd'] == encrypt_pwd(password):\n            self.log.info(\"user  password validated\")\n            userid = data['userid']\n        else:\n            self.log.info(\"user password is not valid\")\n            raise HTTPError(401, message=\"invalid user name/password\")\n\n        return userid\n"
  },
  {
    "path": "h5serv/authMongo.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\n\nimport six\n\nif six.PY3:\n    unicode = str\n    \nimport os.path as op\nimport time\nimport hashlib\nimport logging\nfrom pymongo import MongoClient\n\nfrom tornado.web import HTTPError\n\nimport h5serv.config as config\nfrom h5serv.passwordUtil import encrypt_pwd, to_string, to_bytes\n\ncache_expire_time = 10.0  # ten seconds\n\nclass AuthClient(object):\n\n    def __init__(self, mongouri):\n        self.log = logging.getLogger(\"h5serv\")\n        self.log.info(\"AuthMongo class init(\" + mongouri + \")\")\n        self.client = MongoClient(mongouri)\n        db_name = config.get('mongo_dbname')\n        self.db = self.client[db_name]\n        self.username_cache = {}\n        self.userid_cache = {}\n         \n\n    \"\"\"\n    Password util helper functions\n    \"\"\"\n\n\n    def getUserInfo(self, user_name):\n        \"\"\"\n        getUserInfo: return user data\n        \"\"\"\n         \n        userid = None\n\n        if not user_name:\n            return None\n            \n        self.log.info(\"Auth.getUserInfo: [\" + to_string(user_name) + \"]\")\n        \n        if user_name in self.username_cache:\n            item = self.username_cache[user_name]\n            if item['timestamp'] - time.time() > cache_expire_time:\n                self.log.info(\"Auth-cache expired\")\n                # delete the entry and re-fetch below\n                del self.username_cache[user_name]\n            else:\n                self.log.info(\"Auth-got cache value\")\n                data = item['data']\n                return data\n                    \n        # mongodb lookup\n        self.log.info(\"mongo query\")\n        users = self.db[\"users\"]\n        data = users.find_one({\"username\": to_string(user_name)})\n         \n        if data is None:\n            return None\n            \n        # add to cache \n        self.log.info(\"Auth - added to cache\")\n        item = {}\n        timestamp = time.time()\n        item['timestamp'] = timestamp\n        item['data'] = data\n        self.username_cache[user_name] = item\n        item = {}\n        item['timestamp'] = timestamp\n        item['username'] = user_name\n        userid = data['userid']\n        self.userid_cache[userid] = item\n        \n        return data\n\n\n    def getUserId(self, user_name):\n        \"\"\"\n        getUserId: get id for given user name\n        \"\"\"\n        self.log.info(\"Auth.getUserId: [\" + user_name + \"]\")\n        data = self.getUserInfo(user_name)\n        userid = None\n        if data is not None:\n            userid = data['userid']\n        return userid\n\n\n    def getUserName(self, userid):\n        \"\"\"\n        getUserName: return user name for given user id\n        #todo: may need to be optimized to support large number of users\n        \"\"\"\n\n        self.log.info(\"Auth.getUserName: [\" + str(userid) + \"]\")\n        \n        if userid in self.userid_cache:\n            item = self.userid_cache[userid]\n            if item['timestamp'] - time.time() > cache_expire_time:\n                # delete the entry and re-fetch below\n                self.log.info(\"Auth-cache expired\")\n                del self.userid_cache[userid]\n            else:\n                self.log.info(\"Auth-got cache value\")\n                username = item['username']\n                return to_string(username)\n        \n        \n        # mongodb lookup\n        users = self.db[\"users\"]\n        data = users.find_one({\"userid\": userid})\n        if data is None:\n            return None\n        user_name = data[\"username\"]\n             \n        self.log.info(\"Auth-add to cachecache\")\n        item = {}\n        item['timestamp'] = time.time()\n        item['username'] = user_name\n        self.userid_cache[userid] = item\n        \n        return user_name\n\n\n    def validateUserPassword(self, user_name, password):\n        \"\"\"\n        validateUserPassword: verify user and password.\n            throws exception if not valid\n        \"\"\"\n\n        if not user_name:\n            self.log.info('validateUserPassword - null user')\n            raise HTTPError(401, message=\"provide user name and password\")\n        if not password:\n            self.log.info('isPasswordValid - null password')\n            raise HTTPError(401, message=\"provide  password\")\n        data = self.getUserInfo(user_name)\n\n        if data is None:\n            self.log.info(\"user not found\")\n            raise HTTPError(401, message=\"provide user and password\")\n\n        userid = None\n        saved_password = to_bytes(data['password'])\n        if saved_password == encrypt_pwd(password):\n            self.log.info(\"user  password validated\")\n            userid = data['userid']\n        else:\n            self.log.info(\"user password is not valid\")\n            raise HTTPError(401, message=\"invalid user name/password\")\n\n        return userid\n"
  },
  {
    "path": "h5serv/config.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport os\nimport sys\n\n__all__ = ['get', 'update']\n\n_cfgDefault = {\n    'port':   5000,\n    'debug':  True,\n    'datapath': 'data',\n    'public_dir': ['public', 'test'],\n    'domain':  'hdfgroup.org',\n    'hdf5_ext': '.h5',\n    'toc_name': '.toc.h5',\n    'home_dir': 'home',\n    'ssl_port': 6050,\n    'ssl_cert': '',  # certs/data.hdfgroup.org.crt',  # add relative path to cert for SSL\n    'ssl_key':  '',  # certs/data.hdfgroup.org.key',  # add relative path to cert key for SSL\n    'ssl_cert_pwd': '',\n    'password_uri': 'util/admin/passwd.h5',\n    #'password_uri': 'mongodb://mongo:27017',\n    'mongo_dbname': 'hdfdevtest',\n    'static_url': r'/views/(.*)',\n    'static_path': 'static',\n    'cors_domain': '*',  # set to None to disallow CORS (cross-origin resource sharing)\n    'log_file': 'h5serv.log',\n    'log_level': 'INFO', # ERROR, WARNING, INFO, DEBUG, or NOTSET,\n    'background_timeout': 1000,  # (ms) set to 0 to disable background processing\n    'new_domain_policy': 'ANON',  # Ability to create domains (files) on serv: ANON - anonymous users ok, AUTH - only authenticated, NEVER - never allow \n    'allow_noauth': True  # Allow anonymous requests (i.e. without auth header)\n}\n\ndef get(x):\n    # see if there is a command-line override\n    option = '--'+x+'='\n    val = None\n    for i in range(1, len(sys.argv)):\n        #print i, sys.argv[i]\n        if sys.argv[i].startswith(option):\n            # found an override\n            arg = sys.argv[i]\n            val = arg[len(option):]  # return text after option string\n    # see if there are an environment variable override\n    if val is None and x.upper() in os.environ:\n        val = os.environ[x.upper()]\n    # if no command line or env override, just get the cfg value\n    if val is None and x in _cfgDefault:\n        val = _cfgDefault[x]\n    if isinstance(val, str):\n        # convert True/False strings to booleans\n        if val.upper() in (\"T\", \"TRUE\"):\n            val = True \n        elif val.upper() in (\"F\", \"FALSE\"):\n            val = False  \n    return val\n\ndef update(d):\n    _cfgDefault.update(d)\n"
  },
  {
    "path": "h5serv/fileUtil.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\n\"\"\"File util helper functions (primarily from mapping files to domains\nand vice-versa).\n\n\"\"\"\n\nimport os\nimport os.path as op\nimport logging\n\nfrom tornado.web import HTTPError\n\nfrom h5py import is_hdf5\nimport h5serv.config as config\nfrom h5serv.passwordUtil import getAuthClient\n\ndef getFileModCreateTimes(filePath):\n    (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(filePath)\n    return (mtime, ctime)\n\n\ndef isIPAddress(s):\n    \"\"\"Return True if the string looks like an IP address:\n        n.n.n.n where n is between 0 and 255 \"\"\"\n    \n    parts = s.split('.')\n    \n    if len(parts) == 1:\n        # treat as IP address for names like \"localhost\" or other one-word names\n        # that may get mapped to IP address via /etc/hosts entries\n        return True\n    if len(parts) != 4:\n        return False\n    for part in parts:\n        try:\n            n = int(part)\n            if n < 0 or n > 255:\n                return False\n        except ValueError:\n            return False\n    return True\n    \n# Convert windows style path names to posxipaths\n#\n# todo: any edge cases this doesn't handle?\ndef posixpath(filepath):\n     \n     if os.name == 'nt':\n        pp = filepath.replace('\\\\', '/')\n     else:\n        pp = filepath\n     return pp\n     \n# Join to pathnames and convert to posix style\n#\n# todo: any edge cases this doesn't handle?\ndef join(path, paths):\n     pp = op.join(path, paths)\n     if os.name == 'nt':\n        pp = posixpath(pp)\n      \n     return pp\n\ndef getFilePath(host_value, auth=None):\n    # logging.info('getFilePath[' + host_value + ']')\n    # strip off port specifier (if present)\n    npos = host_value.rfind(':')\n    if npos > 0:\n        host = host_value[:npos]\n    else:\n        host = host_value\n\n    topdomain = config.get('domain')\n    \n    # check to see if this is an ip address\n    if isIPAddress(host):\n        host = topdomain  # use topdomain\n\n    if host.lower() == topdomain:\n        # if host is the same as topdomain, return toc path\n        # filePath = getTocFilePath()\n        filePath = config.get('datapath')\n        filePath = join(filePath, config.get('toc_name') )\n        return filePath\n    print(\"host:\", host, \"topdomain:\", topdomain)\n    if len(host) <= len(topdomain) or host[-len(topdomain):].lower() != topdomain:\n        msg = \"top-level domain is not valid\"\n        print(msg)\n        raise HTTPError(403, message=msg)\n\n    if host[-(len(topdomain) + 1)] != '.':\n        # there needs to be a dot separator\n        raise HTTPError(400, message='domain name is not valid')\n\n    host = host[:-(len(topdomain)+1)]   # strip off top domain part\n\n    if len(host) == 0 or host[0] == '.' or host[-1] == '.':\n        # needs a least one character (which can't be '.', or have '.' as first or last char)\n        raise HTTPError(400, message='domain name is not valid')\n\n    dns_path = host.split('.')\n    dns_path.reverse()  # flip to filesystem ordering\n    filePath = config.get('datapath')\n    num_parts = 0\n    for field in dns_path:      \n        if len(field) == 0:   \n            raise HTTPError(400)  # Bad syntax\n        \n        filePath = join(filePath, field)\n        num_parts += 1\n\n    # check to see if this is the user's home domain\n    if num_parts == 2 and dns_path[0] == config.get('home_dir'):\n        if auth is None:\n            auth = getAuthClient\n        user_info = auth.getUserInfo(dns_path[1])\n        if user_info is None:\n            raise HTTPError(404)  # not found\n        makeDirs(filePath)  # add user directory if it doesn't exist\n        filePath = join(filePath, config.get('toc_name') )\n    else:    \n        filePath += config.get('hdf5_ext')   # add extension\n     \n    #print('getFilePath[' + host + '] -> \"' + filePath + '\"')\n\n    return filePath\n\n# \n# Return filepath to TOC file - either the public toc file or the per\n# user TOC file (if the dns path includes the \"home\" directory).\n# For the later, method will throw 404 if the user is not registered.\n#    \ndef getTocFilePathForDomain(host_value, auth=None):\n    \"\"\" Return toc file path for given domain value.\n        Will return path \"../data/.toc.h5\" for public domains or\n        \"../data/home/<user>/.toc.h5\" for user domains.\n    \"\"\"\n    # logging.info('getFilePath[' + host_value + ']')\n    # strip off port specifier (if present)\n    npos = host_value.rfind(':')\n    if npos > 0:\n        host = host_value[:npos]\n    else:\n        host = host_value\n\n    topdomain = config.get('domain')\n\n    # check to see if this is an ip address\n    if isIPAddress(host):\n        host = topdomain  # use topdomain\n\n    if host.lower() == topdomain:\n        # if host is the same as topdomain, return toc path\n        # filePath = getTocFilePath()\n        filePath = config.get('datapath')\n        filePath = join(filePath, config.get('toc_name') )\n        return filePath\n\n    if len(host) <= len(topdomain) or host[-len(topdomain):].lower() != topdomain:\n        host = topdomain  # use topdomain\n    else:\n        if host[-(len(topdomain) + 1)] != '.':\n            # there needs to be a dot separator\n            raise HTTPError(400, message='domain name is not valid')\n\n        host = host[:-(len(topdomain)+1)]   # strip off top domain part\n\n        if len(host) == 0 or host[0] == '.' or host[-1] == '.':\n            # needs a least one character (which can't be '.', or have '.' as first or last char)\n            raise HTTPError(400, message='domain name is not valid')\n\n    dns_path = host.split('.')\n    dns_path.reverse()  # flip to filesystem ordering\n    filePath = config.get('datapath')\n    \n    if dns_path[0] == config.get('home_dir'):\n        filePath = join(filePath, config.get('home_dir'))\n        filePath = join(filePath, dns_path[1])\n        if auth is None:\n            auth = getAuthClient()\n        user_info = auth.getUserInfo(dns_path[1])\n        if user_info is None:\n            raise HTTPError(404)  # not found\n        makeDirs(filePath)  # add user directory if it doesn't exist\n        filePath = join(filePath, config.get('toc_name'))\n        #print(\"return user toc filepath\")\n    else:\n        # not home dir, just return top-level toc\n        filePath = join(filePath, config.get('toc_name'))\n        #print(\"return default toc filepath\")\n\n    return filePath\n\n#\n# If the filePath passed references the user's home directory, return a path relative \n# to the base location of the user's toc file.  Otherwise returns the path relative to\n# the base data directory\n#\ndef getUserFilePath(file_path):\n    data_path = config.get('datapath')\n    file_path = file_path[len(data_path):]  # strip off base data path\n    if len(file_path) > 1 and file_path[0] == '/':\n        file_path = file_path[1:]  # don't include first slash if preseent- messes up the split\n    path_names = file_path.split('/')\n     \n    if path_names[0] == config.get('home_dir') and len(path_names) > 1:\n        # return a path relative to user's base dir\n        file_path = '/'  \n        path_names = path_names[2:]  # skip home, userid\n        for path_name in path_names:\n            file_path = op.join(file_path, path_name)\n        \n    return file_path\n \ndef getDomain(file_path, base_domain=None):\n    # Get domain given a file path\n    \n    data_path = op.normpath(config.get('datapath'))  # base path for data directory\n    data_path = posixpath(data_path)\n    file_path = posixpath(file_path)\n    hdf5_ext = config.get(\"hdf5_ext\")\n    if op.isabs(file_path):\n        # compare with absolute path if we're given an absolute path\n        data_path = posixpath(op.abspath(data_path))\n    \n    if file_path == data_path:\n        return config.get('domain')\n            \n    if file_path.endswith(hdf5_ext):\n        domain = op.basename(file_path)[:-(len(hdf5_ext))]\n    else:\n        domain = op.basename(file_path)\n\n    # replace dots with %2E in basename\n    domain = domain.replace('.', '%2E')\n\n    dirname = op.dirname(file_path)\n    \n    while len(dirname) > 1 and dirname != data_path:\n        domain += '.'\n        domain += op.basename(dirname)\n        if len(op.dirname(dirname)) >= len(dirname):\n            break\n        dirname = op.dirname(dirname)\n     \n    domain += '.'\n    if base_domain:\n        domain += base_domain\n    else:\n        domain += config.get('domain')\n\n    return domain\n\ndef verifyFile(filePath, writable=False):\n    \"\"\" verify given file exists and is an HDF5 file\n    \"\"\"\n    log = logging.getLogger(\"h5serv\")\n    log.info(\"verifyFile('\" + filePath + \"', \" + str(writable) + \")\")\n    if not op.isfile(filePath):\n        log.info(\"not a file\")\n        raise HTTPError(404)  # not found\n    if not is_hdf5(filePath):\n        log.info('this is not a hdf5 file!')\n        raise HTTPError(404)\n    if writable and not os.access(filePath, os.W_OK):\n        log.warning('attempting update of read-only file')\n        raise HTTPError(403)\n        \ndef isFile(filePath):\n    \"\"\" verify given file exists and is an HDF5 file\n    \"\"\"\n    if not op.isfile(filePath):\n        return False\n    if not is_hdf5(filePath):\n        # logging.warning('this is not a hdf5 file!')\n        return False\n    return True\n     \n\n\ndef makeDirs(filePath):\n    # Make any directories along path as needed\n    if len(filePath) == 0 or op.isdir(filePath):\n        return\n    dirname = op.dirname(filePath)\n\n    if len(dirname) >= len(filePath):\n        return\n    makeDirs(dirname)  # recursive call\n    os.mkdir(filePath)  # should succeed since parent directory is created\n"
  },
  {
    "path": "h5serv/h5watchdog.py",
    "content": "import sys\nimport time\nimport os.path as op\nimport logging\n\nfrom watchdog.observers import Observer\nfrom watchdog.events import FileSystemEventHandler\n\nclass H5EventHandler(FileSystemEventHandler):\n    \"\"\"Put create  events inteo queue.\"\"\"\n\n    def __init__(self, event_queue):\n        self.log = logging.getLogger(\"h5serv\")\n        self.event_queue = event_queue\n        \n    def on_moved(self, event):\n        super(H5EventHandler, self).on_moved(event)\n\n        what = 'directory' if event.is_directory else 'file'\n        self.log.info(\"H5EventHandler -- Moved %s: from %s to %s\", what, event.src_path,\n                     event.dest_path)\n\n    def on_created(self, event):\n        super(H5EventHandler, self).on_created(event)\n\n        what = 'directory' if event.is_directory else 'file'\n        self.log.info(\"H5EventHandler -- Created %s: %s\", what, event.src_path)\n        \n        # ignore directories\n        if not op.isdir(event.src_path):\n            self.event_queue.put(event.src_path)\n\n    def on_deleted(self, event):\n        super(H5EventHandler, self).on_deleted(event)\n\n        what = 'directory' if event.is_directory else 'file'\n        self.log.info(\"H5EventHandler -- Deleted %s: %s\", what, event.src_path)\n        if not op.isdir(event.src_path):\n            self.event_queue.put(event.src_path)\n\n    def on_modified(self, event):\n        super(H5EventHandler, self).on_modified(event)\n\n        what = 'directory' if event.is_directory else 'file'\n        self.log.info(\"H5EventHandler -- Modified %s: %s\", what, event.src_path)\n\n#\n# Watch file system at location data_path and add any file create events to the event_queue\n# Call at application startup\n#\ndef h5observe(data_path, event_queue):\n    event_handler = H5EventHandler(event_queue)\n    observer = Observer()\n    observer.schedule(event_handler, data_path, recursive=True)\n    observer.start()"
  },
  {
    "path": "h5serv/httpErrorUtil.py",
    "content": "import errno\n\n\ndef errNoToHttpStatus(error_code):\n    \"\"\"Convert IOError error numbers to HTTP equivalent status codes.\"\"\"\n    httpStatus = 500\n    if error_code == errno.EINVAL:  # formerly EBADMSG\n        httpStatus = 400  # bad request\n    elif error_code == errno.EACCES:\n        httpStatus = 401   # unauthorized\n    elif error_code == errno.EPERM:\n        httpStatus = 403  # forbidden\n    elif error_code == errno.ENXIO:\n        httpStatus = 404  # Not Found\n    elif error_code == errno.EEXIST:\n        httpStatus = 409   # conflict\n    elif error_code == errno.ENOENT:  # formerly EIDRM\n        httpStatus = 410   # Gone\n    elif error_code == errno.EIO:\n        httpStatus = 500   # Internal Error\n    elif error_code == errno.ENOSYS:\n        httpStatus = 501   # Not implemented\n\n    return httpStatus\n"
  },
  {
    "path": "h5serv/passwordUtil.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\n\nimport six\n\nif six.PY3:\n    unicode = str    \n \nimport hashlib\nimport logging\nimport h5serv.config as config\n \n\n\"\"\"\n Password util helper functions\n\"\"\"\n\n    \ndef to_string(data):\n    if six.PY3:           \n        if type(data) is bytes:\n            return data.decode('utf-8')\n        else:\n            return data\n    else:\n        return data\n        \ndef to_bytes(data):\n    if six.PY3:\n        if type(data) is unicode:\n            return data.encode('utf-8')\n        else:\n            return data\n    else:\n        return data\n        \ndef encrypt_pwd(passwd):\n    \"\"\"\n     One way password encryptyion\n    \"\"\"\n    encrypted = hashlib.sha224(passwd).hexdigest()\n    \n    return to_bytes(encrypted)\n    \ndef getAuthClient():\n    log = logging.getLogger(\"h5serv\")\n    log.info(\"getAuthClient\")\n    password_uri = config.get(\"password_uri\")\n    log.info(\"password_uri:\" + password_uri)\n     \n    auth = None\n    if password_uri.startswith(\"mongo\"):\n        # use mongodb user db\n        from h5serv.authMongo import AuthClient\n        auth = AuthClient(password_uri)\n    else:\n        # use HDF5 file-based user db\n        from h5serv.authFile import AuthClient\n        auth = AuthClient(password_uri)\n        \n    return auth\n"
  },
  {
    "path": "h5serv/timeUtil.py",
    "content": "from datetime import datetime\nimport pytz\n\n\ndef unixTimeToUTC(timestamp):\n    \"\"\"Convert unix timestamp (seconds since Jan 1, 1970, to ISO-8601\n    compatible UTC time string.\n\n    \"\"\"\n    utc = pytz.utc\n    dtTime = datetime.fromtimestamp(int(timestamp), utc)\n    iso_str = dtTime.isoformat()\n    # isoformat returns a string like this:\n    # '2014-10-30T04:25:21+00:00'\n    # strip off the '+00:00' and replace\n    # with 'Z' (both are ISO-8601 compatible)\n    npos = iso_str.rfind('+')\n    iso_z = iso_str[:npos] + 'Z'\n    return iso_z\n"
  },
  {
    "path": "h5serv/tocUtil.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport os\nimport os.path as op\nimport re\nfrom tornado.web import HTTPError\nimport logging\n\nimport h5py\nimport h5serv.config as config\nimport h5serv.fileUtil as fileUtil\nfrom h5json import Hdf5db\n\n\"\"\"\n TOC (Table of contents) util helper functions\n Creates a directory listing in the form of an HDF5 file\n\"\"\"\n\n\ndef getTocFilePath(user=None):\n    datapath = config.get('datapath')\n    if user is None:\n        #print(\"get default toc\")\n        toc_file_path = fileUtil.join(datapath, config.get('toc_name'))\n    else:\n        #print(\"get user toc\")\n        toc_file_path = fileUtil.join(datapath, config.get('home_dir'))\n        toc_file_path = fileUtil.join(toc_file_path, config.get('toc_name'))\n \n    return toc_file_path\n\n\ndef isTocFilePath(filePath):\n    datapath = config.get('datapath')\n    toc_file_path = fileUtil.join(datapath, config.get('toc_name'))\n    if filePath == toc_file_path:\n        isTocFilePath = True\n    else:\n        isTocFilePath = False\n    return isTocFilePath\n    \n    \n\n\"\"\"\nhelper - get group uuid of hardlink, or None if no link\n\"\"\"\ndef getSubgroupId(db, group_uuid, link_name):\n    #print(\"link_name:\", link_name)    \n    subgroup_uuid = None\n    try:\n        item = db.getLinkItemByUuid(group_uuid, link_name)\n        if item['class'] != 'H5L_TYPE_HARD':\n            return None\n        if item['collection'] != 'groups':\n            return None\n        subgroup_uuid = item['id']\n    except IOError:\n        # link_name doesn't exist, return None\n        pass\n\n    return subgroup_uuid\n        \n\"\"\"\nUpdate toc with new filename\n\"\"\"\ndef addTocEntry(domain, filePath,  userid=None):\n    \"\"\"\n    Helper method - update TOC when a domain is created\n    If userid is provide, the acl will be checked to ensure userid has permissions\n    to modify the object.\n    \"\"\"\n    log = logging.getLogger(\"h5serv\")\n    hdf5_ext = config.get('hdf5_ext')\n    dataPath = config.get('datapath')\n    log.info(\"addTocEntry - domain: \" + domain + \" filePath: \" + filePath)\n    if not filePath.startswith(dataPath):\n        log.error(\"unexpected filepath: \" + filePath)\n        raise HTTPError(500)\n    filePath = fileUtil.getUserFilePath(filePath)   \n    tocFile = fileUtil.getTocFilePathForDomain(domain)\n    log.info(\"tocFile: \" + tocFile)\n    acl = None\n\n    try:         \n        with Hdf5db(tocFile, app_logger=log) as db:\n            group_uuid = db.getUUIDByPath('/')\n            pathNames = filePath.split('/')\n            for linkName in pathNames:\n                if not linkName:\n                    continue\n                if linkName.endswith(hdf5_ext):\n                    linkName = linkName[:-(len(hdf5_ext))]\n                    print(\"linkName:\", linkName)\n                    if userid is not None:\n                        acl = db.getAcl(group_uuid, userid)\n                        if not acl['create']:\n                            self.log.info(\"unauthorized access to group:\" + group_uuid)\n                            raise IOError(errno.EACCES)  # unauthorized\n                    log.info(\"createExternalLink -- uuid %s, domain: %s, linkName: %s\", group_uuid, domain, linkName)\n                    db.createExternalLink(group_uuid, domain, '/', linkName)\n                else:\n                    subgroup_uuid = getSubgroupId(db, group_uuid, linkName)\n                    if subgroup_uuid is None:\n                        if userid is not None:\n                            acl = db.getAcl(group_uuid, userid)\n                            if not acl['create']:\n                                self.log.info(\"unauthorized access to group:\" + group_uuid)\n                                raise IOError(errno.EACCES)  # unauthorized\n                        # create subgroup and link to parent group\n                        subgroup_uuid = db.createGroup()\n                        # link the new group\n                        log.info(\"linkObject -- uuid: %s, subgroup_uuid: %s, linkName: %s\", group_uuid, subgroup_uuid, linkName)\n                        db.linkObject(group_uuid, subgroup_uuid, linkName)\n                    group_uuid = subgroup_uuid \n\n    except IOError as e:\n        log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n        raise e\n\n\"\"\"\nHelper method - update TOC when a domain is deleted\n\"\"\"\ndef removeTocEntry(domain, filePath, userid=None):\n    log = logging.getLogger(\"h5serv\")\n    hdf5_ext = config.get('hdf5_ext')\n    dataPath = config.get('datapath')\n\n    if not filePath.startswith(dataPath):\n        log.error(\"unexpected filepath: \" + filePath)\n        raise HTTPError(500)\n    filePath = fileUtil.getUserFilePath(filePath)   \n    tocFile = fileUtil.getTocFilePathForDomain(domain)\n    log.info(\"removeTocEntry - domain: \" + domain + \" filePath: \" + filePath + \" tocfile: \" + tocFile)\n    pathNames = filePath.split('/')\n    log.info(\"pathNames: \" + str(pathNames))\n\n    try:\n        with Hdf5db(tocFile, app_logger=log) as db:\n            group_uuid = db.getUUIDByPath('/')\n            log.info(\"group_uuid:\" + group_uuid)\n                           \n            for linkName in pathNames:\n                if not linkName:\n                    continue\n                log.info(\"linkName:\" + linkName)\n                if linkName.endswith(hdf5_ext):\n                    linkName = linkName[:-(len(hdf5_ext))]\n                    log.info(\"unklink \" + group_uuid + \", \" + linkName)\n                    db.unlinkItem(group_uuid, linkName)\n                else:\n                    subgroup_uuid = getSubgroupId(db, group_uuid, linkName)\n                    if subgroup_uuid is None:\n                        msg = \"Didn't find expected subgroup: \" + group_uuid\n                        log.error(msg)\n                        raise HTTPError(500, reason=msg)\n                    group_uuid = subgroup_uuid\n\n    except IOError as e:\n        log.info(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n        raise e\n\n\"\"\"\nCreate a populate TOC file if not present\n\"\"\"            \ndef createTocFile(datapath):\n    log = logging.getLogger(\"h5serv\")\n    log.info(\"createTocFile(\" + datapath + \")\")\n    data_dir = fileUtil.posixpath(op.normpath(config.get('datapath')))\n    home_dir = fileUtil.join(data_dir, config.get(\"home_dir\"))\n    log.info(\"home dir: \" + home_dir)\n    if datapath.startswith(home_dir):\n        log.info(\"user toc\")\n        user_toc = True\n    else:\n        log.info(\"system toc\")\n        user_toc = False\n    \n    if datapath.endswith(config.get('toc_name')):\n        toc_dir = fileUtil.posixpath(op.normpath(op.dirname(datapath)))\n        toc_file = datapath\n    else:\n        toc_dir = fileUtil.posixpath(op.normpath(datapath))\n        toc_file = fileUtil.join(toc_dir, config.get(\"toc_name\"))\n   \n           \n    log.info(\"toc_dir:[\" + toc_dir + \"]\")\n    log.info(\"data_dir:[\" + data_dir + \"]\") \n    log.info(\"home_dir:[\" + home_dir + \"]\")\n    log.info(\"check toc with path: \" + toc_file)    \n    if op.exists(toc_file):\n        msg = \"toc file already exists\"\n        log.warn(msg)\n        raise IOError(msg)\n        \n    base_domain = fileUtil.getDomain(toc_dir)\n    log.info(\"base domain: \" + base_domain)\n    \n    #if os.name == 'nt':\n    #    toc_dir = toc_dir.replace('\\\\', '/')  # use unix style to map to HDF5 convention\n    \n    hdf5_ext = config.get('hdf5_ext')  \n    \n    f = h5py.File(toc_file, 'w')\n     \n    for root, subdirs, files in os.walk(toc_dir):\n        root = fileUtil.posixpath(root)\n        log.info( \"toc walk: \" + root)\n        \n        if toc_dir == data_dir:\n            log.info(fileUtil.join(toc_dir, home_dir))\n            if root.startswith(home_dir):\n                log.info(\"skipping home dir\")\n                continue\n         \n        grppath = root[len(toc_dir):]\n        if not grppath:\n            grppath = '/'\n        if grppath[-1] == '.':\n            grppath = grppath[:-1]\n        log.info(\"grppath: \" + grppath)\n         \n        if os.name == 'nt':\n            grppath = grppath.replace('\\\\', '/')  # match HDF5 convention\n        grp = None\n        if grppath == '/':\n            grp = f['/']  # use root group\n         \n        domainpath = fileUtil.getDomain(grppath, base_domain=base_domain)\n        log.info(\"grppath: \" + grppath)\n        log.info(\"base_domain: \" + base_domain)\n        log.info(\"domainpath: \" + domainpath)\n        for filename in os.listdir(root):\n            log.info(\"walk, file: \" + filename)\n            if filename[0] == '.':\n                log.info(\"skip hidden\")\n                continue  # skip 'hidden' files\n            \n            filepath = fileUtil.join(root, filename)\n            log.info(\"walk, filepath: \" + filepath)\n            link_target = '/'\n            \n            if op.islink(filepath):\n                log.info(\"symlink: \" + filepath)\n                # todo - quick hack for now to set a symlink with to sub-folder of data dir\n                # todo - revamp to use os.readlink and do the proper thing with the link value\n                filedomain = config.get('domain')\n                link_target += filename\n                log.info(\"setting symbolic link domainpath to: \" + filedomain + \" target: /\" + filename)\n            else:\n                ext_len = len(hdf5_ext)\n                if len(filename) < ext_len or filename[-ext_len:] != hdf5_ext:\n                    log.info(\"skip non-hdf5 extension\")\n                    continue\n                if not h5py.is_hdf5(filepath):\n                    log.info(\"skip non-hdf5 file\")\n                    continue\n                filename = filename[:-ext_len]\n                # replace any dots with '%2E' to disambiguate from domain seperators\n                filename_encoded = filename.replace('.', '%2E')\n                log.info(\"filename (noext): \" + filename)\n                if domainpath[0] == '.':        \n                    filedomain = filename_encoded + domainpath\n                else:\n                    filedomain = filename_encoded + '.' + domainpath\n                    \n            # create the grp at grppath if it doesn't exist\n            if not grp:\n                log.info(\"tocfile - create_group: \" + grppath)\n                grp = f.create_group(grppath)           \n                \n            # verify that we can convert the domain back to a file path\n            log.info(\"filedomain: \" + filedomain)\n            try:\n                fileUtil.getFilePath(filedomain)\n                # ok - add the external link\n                log.info(\"tocFile - ExternalLink: \" + domainpath)\n                grp[filename] = h5py.ExternalLink(filedomain, link_target)\n            except HTTPError:\n                log.info(\"file path: [\" + filepath + \"] is not valid dns name, ignoring\")\n"
  },
  {
    "path": "setup.py",
    "content": "\"\"\"A setuptools based setup module for h5serv.\n\nSee:\nhttps://packaging.python.org/en/latest/distributing.html\nhttps://github.com/pypa/sampleproject\n\"\"\"\n\n# create universal wheel:  python setup.py bdist_wheel --universal\n# after install the wheel, run server with:\n#$ python h5serv --log_file=<log_dir>/h5serv.log --datapath=<data_dir>\n# where log_dir is the full path to the desired directory for log file output,\n# and datapath is full path to the desired data directory.\n#\n\n# Always prefer setuptools over distutils\nfrom setuptools import setup, find_packages\n# To use a consistent encoding\nfrom codecs import open\nfrom os import path\n\nhere = path.abspath(path.dirname(__file__))\n\n# Get the long description from the README file\nwith open(path.join(here, 'README.rst'), encoding='utf-8') as f:\n    long_description = f.read()\n\nsetup(\n    name='h5serv',\n\n    # Versions should comply with PEP440.  For a discussion on single-sourcing\n    # the version across setup.py and the project code, see\n    # https://packaging.python.org/en/latest/single_source_version.html\n    version='1.2.0',\n\n    description='HDF REST Server',\n    long_description=long_description,\n\n    # The project's main homepage.\n    url='https://github.com/HDFGroup/h5serv',\n\n    # Author details\n    author='John Readey',\n    author_email='jreadey@hdfgroup.org',\n\n    # Choose your license\n    license='BSD',\n\n    # See https://pypi.python.org/pypi?%3Aaction=list_classifiers\n    classifiers=[\n        # How mature is this project? Common values are\n        #   3 - Alpha\n        #   4 - Beta\n        #   5 - Production/Stable\n        'Development Status :: 5 - Production/Stable',\n\n        # Indicate who your project is intended for\n        'Intended Audience :: Developers',\n        'Topic :: Software Development :: Build Tools',\n\n        # Pick your license as you wish (should match \"license\" above)\n        'License :: OSI Approved :: BSD License',\n\n        # Specify the Python versions you support here. In particular, ensure\n        # that you indicate whether you support Python 2, Python 3 or both.\n        'Programming Language :: Python :: 2',\n        'Programming Language :: Python :: 2.7',\n        'Programming Language :: Python :: 3',\n    ],\n\n    # What does your project relate to?\n    keywords='json hdf5 numpy array data',\n\n    # You can just specify the packages manually here if your project is\n    # simple. Or you can use find_packages().\n    packages=('h5serv',),\n\n    # Alternatively, if you want to distribute just a my_module.py, uncomment\n    # this:\n    #   py_modules=[\"my_module\"],\n\n    # List run-time dependencies here.  These will be installed by pip when\n    # your project is installed. For an analysis of \"install_requires\" vs pip's\n    # requirements files see:\n    # https://packaging.python.org/en/latest/requirements.html\n    install_requires=['numpy>=1.10.4', 'h5py>=2.5', 'h5json>=1.1', \n        'watchdog>=0.8.3', 'tornado>=4.2.1', 'requests>=2.10.0', \n        'pyzmq>=14.7.0', 'pytz'],\n\n    # List additional groups of dependencies here (e.g. development\n    # dependencies). You can install these using the following syntax,\n    # for example:\n    # $ pip install -e .[dev,test]\n    extras_require={\n        'dev': ['check-manifest'],\n        'test': ['coverage'],\n    },\n\n    # If there are data files included in your packages that need to be\n    # installed, specify them here.  If using Python 2.6 or less, then these\n    # have to be included in MANIFEST.in as well.\n    package_data={\n        'h5serv': ['data/*',]\n    },\n\n    # Although 'package_data' is the preferred approach, in some case you may\n    # need to place data files outside of your packages. See:\n    # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa\n    # In this case, 'data_file' will be installed into '<sys.prefix>/my_data'\n    #data_files=[('my_data', ['data/data_file'])],\n\n    # To provide executable scripts, use entry points in preference to the\n    # \"scripts\" keyword. Entry points provide cross-platform support and allow\n    # pip to create the appropriate form of executable for the target platform.\n    entry_points={\n        'console_scripts': [\n            'h5serv = h5serv.app:main'\n        ]\n    },\n    #scripts=['server/app.py', 'util/admin/import_file.py'],\n)\n"
  },
  {
    "path": "test/aws/config.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nfrom h5serv.config import *\n\ncfg = {\n    'server': 'data.hdfgroup.org',\n    'port':   7258,  # HTTPS port\n    'domain':   'test.data.hdfgroup.org',\n    'hdf5_ext': '.h5'\n}\nupdate(cfg)\n"
  },
  {
    "path": "test/aws/roottest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport requests\nimport config\nimport unittest\nimport json\nimport base64\n\nclass RootTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(RootTest, self).__init__(*args, **kwargs)\n        self.endpoint = 'https://' + config.get('server') + ':' + str(config.get('port'))\n        #self.endpoint = \"https://data.hdfgroup.org:7258\"\n    \n    def testGetInfo(self):\n    \n        req = self.endpoint + \"/info\"\n        rsp = requests.get(req, verify=False)\n        self.failUnlessEqual(rsp.status_code, 200)\n        self.failUnlessEqual(rsp.headers['content-type'], 'application/json')\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('h5serv_version' in rspJson)\n            \n    def testGetDomain(self):\n        domain = 'tall.' + config.get('domain')  \n        \n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers, verify=False)\n        self.failUnlessEqual(rsp.status_code, 200)\n        self.failUnlessEqual(rsp.headers['content-type'], 'application/json')\n        rspJson = json.loads(rsp.text)\n         \n        \n        \nif __name__ == '__main__':\n    unittest.main()\n"
  },
  {
    "path": "test/integ/acltest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport requests\nimport config\nimport helper\nimport unittest\nimport json\nimport base64\n\nno_perm = { 'read': False, 'create': False, 'update': False, \n             'delete': False, 'readACL': False, 'updateACL': False }\nreadonly_perm = { 'read': True, 'create': False, 'update': False, \n             'delete': False, 'readACL': False, 'updateACL': False }\nallaccess_perm = { 'read': True, 'create': True, 'update': True, \n             'delete': True, 'readACL': True, 'updateACL': True }\n\nclass AclTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(AclTest, self).__init__(*args, **kwargs)\n        self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port'))\n        self.domain = None  \n        self.user1 = {'username':'test_user1', 'password':'test'}\n        self.user2 = {'username':'test_user2', 'password':'test'}\n         \n        \n    def getHeaders(self, user=None):\n        headers = {'host': self.domain}\n        if user is not None:\n            # if user is supplied, add the auth header\n            headers['Authorization'] = helper.getAuthString(user['username'], user['password'])\n        return headers\n              \n        \n    def getUUIDByPath(self, path):\n        username = self.user1['username']\n        password = self.user1['password']\n        \n        obj_uuid = helper.getUUIDByPath(self.domain, path, user=username, password=password)\n        return obj_uuid\n        \n        \n    def setupAcls(self):\n         \n          \n        rootUUID = self.getUUIDByPath('/')\n        self.assertTrue(helper.validateId(rootUUID))\n        \n        headers = self.getHeaders()\n           \n        # set allaccess acl for test_user2\n        payload = allaccess_perm \n        req = self.endpoint + \"/acls/test_user2\"\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        \n        if rsp.status_code == 401:\n            # acl is already setup by another test, return\n            return\n            \n        self.assertEqual(rsp.status_code, 201)      \n        \n        # set read-only acl for test_user1\n        payload =  readonly_perm \n        req = self.endpoint + \"/acls/test_user1\"\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # set default acl for domain\n        payload =  no_perm \n        req = self.endpoint + \"/acls/default\"\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201) \n        \n        # try - again - should report authorizationis required now\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 401) \n         \n           \n            \n    def testGetDomainDefaultAcls(self):\n        self.domain = 'tall.' + config.get('domain')   \n        req = self.endpoint + \"/acls\"\n        headers = self.getHeaders()\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['content-type'], 'application/json')\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('acls' in rspJson)\n        \n    def testGetDomainAcls(self):\n        self.domain = 'tall_acl.' + config.get('domain')  \n        self.setupAcls()\n        self.assertEqual(self.domain, 'tall_acl.' + config.get('domain')  )\n         \n        headers = self.getHeaders()    \n        \n        # read domain acls\n        req = self.endpoint + \"/acls\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # needs Authorization\n        \n        # try with test_user1\n        headers = self.getHeaders(self.user1)   \n        req = self.endpoint + \"/acls\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 403)  # unAuthorization - test_user1 only has read access\n        \n        # try with test_user2\n        headers = self.getHeaders(self.user2)  \n        req = self.endpoint + \"/acls\"\n        rsp = requests.get(req, headers=headers)\n    \n        self.assertEqual(rsp.status_code, 200)   \n        \n        rspJson = json.loads(rsp.text)\n        self.assertTrue('acls' in rspJson)\n        acls = rspJson['acls']\n        self.assertEqual(len(acls), 3)\n        \n        # get acl for a particular user\n        headers = self.getHeaders(self.user2)  \n        req = self.endpoint + \"/acls/\" + self.user1['username']\n        rsp = requests.get(req, headers=headers)\n    \n        self.assertEqual(rsp.status_code, 200)   \n        \n        rspJson = json.loads(rsp.text)\n        self.assertTrue('acl' in rspJson)\n        acl = rspJson['acl']\n        self.assertEqual(len(acl.keys()), 7)\n        \n    def testPutDomain(self):\n        \n        self.domain = 'new_domain.test_user1.' + config.get('home_domain')  \n        \n        headers = self.getHeaders()    \n        \n        # put domain in user home folder\n        req = self.endpoint + \"/\"\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201)  \n        # todo - above should fail with 401 - need authorization\n                \n        \n    def testAttributes(self):\n        self.domain = 'tall_acl.' + config.get('domain')  \n        self.setupAcls()\n        rootUUID = self.getUUIDByPath('/')\n        self.assertTrue(helper.validateId(rootUUID))\n        \n        # create attribute\n        headers = self.getHeaders()  \n        payload = {'type': 'H5T_STD_I32LE', 'value': 42}    \n        req = self.endpoint + \"/groups/\" + rootUUID + \"/attributes/a1\"\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # auth needed\n        \n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 403)  # not authorized\n        \n        headers = self.getHeaders(user=self.user2)\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # OK\n        \n        # read group attribute\n        headers = self.getHeaders()\n        req = self.endpoint + \"/groups/\" + rootUUID + \"/attributes/a1\"  \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # un-authorized                \n        \n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # OK\n        rspJson = json.loads(rsp.text)  \n        self.assertEqual(rspJson['value'], 42)  \n               \n        # delete attribute\n        headers = self.getHeaders()\n        req = self.endpoint + \"/groups/\" + rootUUID + \"/attributes/\" + 'a1'\n        rsp = requests.delete(req, headers=headers)     \n        self.assertEqual(rsp.status_code, 401)  # auth needed\n        \n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 403)  # not authorized\n        \n        headers = self.getHeaders(user=self.user2)\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200) \n        \n    def testDataset(self):\n        self.domain = 'tall_acl.' + config.get('domain')  \n        self.setupAcls()\n        rootUUID = self.getUUIDByPath('/')\n        self.assertTrue(helper.validateId(rootUUID))\n            \n        # create dataset\n        headers = self.getHeaders()  \n        payload = {'type': 'H5T_STD_I32LE' }    \n        req = self.endpoint + \"/datasets\"  \n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # auth needed\n        \n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 403)  # not authorized\n        \n        headers = self.getHeaders(user=self.user2)\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # OK\n        \n        rspJson = json.loads(rsp.text)  \n        dataset_uuid = rspJson['id']\n        \n        # read dataset  \n        headers = self.getHeaders()\n        req = self.endpoint + \"/datasets/\" + dataset_uuid  \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # un-authorized                    \n        \n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # OK\n\n        # read dataset acls\n        req += \"/acls\"\n        rsp = requests.get(req, headers=headers)  \n        self.assertEqual(rsp.status_code, 403)  # test_user1 doesn't have readACL permission\n        headers = self.getHeaders(user=self.user2)\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text) \n        self.assertTrue(\"acls\" in rspJson)\n        acls = rspJson[\"acls\"]\n        self.assertEqual(len(acls), 0)  # empty list of acls\n        \n        # delete dataset\n        headers = self.getHeaders()\n        req = self.endpoint + \"/datasets/\" + dataset_uuid  \n        rsp = requests.delete(req, headers=headers)     \n        self.assertEqual(rsp.status_code, 401)  # auth needed\n        \n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 403)  # not authorized\n        \n        headers = self.getHeaders(user=self.user2)\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # OK\n        \n    def testValue(self):\n        self.domain = 'tall_acl.' + config.get('domain')  \n        self.setupAcls()\n        \n        dset_uuid = self.getUUIDByPath('/g1/g1.1/dset1.1.1')  \n        self.assertTrue(helper.validateId(dset_uuid))   \n        \n        # read value\n        headers = self.getHeaders()\n        req = self.endpoint + \"/datasets/\" + dset_uuid + \"/value\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # auth needed\n        \n        headers = self.getHeaders(user=self.user1)\n        req = self.endpoint + \"/datasets/\" + dset_uuid + \"/value\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # OK\n        \n        # point selection\n        points = []\n        for i in range(10):\n            points.append((i,i))  # get diagonal\n        req = self.endpoint + \"/datasets/\" + dset_uuid + \"/value\" \n        payload = {'points': points}\n        headers = self.getHeaders()    \n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # auth needed\n        \n        # write values\n        data = []\n        for i in range(10):\n            row = []\n            for j in range(10):\n                row.append(j*10 + i)\n            data.append(row)\n        req = self.endpoint + \"/datasets/\" + dset_uuid + \"/value\" \n        payload = { 'value': data }\n        headers = self.getHeaders()\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # auth needed\n        \n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 403)  # not authorized\n        \n        headers = self.getHeaders(user=self.user2)\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # OK\n        \n    def testDatatypes(self):\n        self.domain = 'tall_acl.' + config.get('domain')  \n        self.setupAcls()\n        \n        payload = {'type': 'H5T_IEEE_F32LE'}\n        req = self.endpoint + \"/datatypes\"\n        \n        # test create\n        headers = self.getHeaders()\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # auth needed  \n         \n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 403)  # not authorized\n        \n        headers = self.getHeaders(user=self.user2)\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # created\n        rspJson = json.loads(rsp.text) \n        type_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(type_uuid))\n        \n        # test read\n        req = self.endpoint + \"/datatypes/\" + type_uuid \n        headers = self.getHeaders()\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # auth needed  \n        \n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # OK\n\n        # read dataset acls\n        req += \"/acls\"\n        rsp = requests.get(req, headers=headers)  \n        self.assertEqual(rsp.status_code, 403)  # test_user1 doesn't have readACL permission\n        headers = self.getHeaders(user=self.user2)\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text) \n        self.assertTrue(\"acls\" in rspJson)\n        acls = rspJson[\"acls\"]\n        self.assertEqual(len(acls), 0)  # empty list of acls\n           \n        # test delete\n        headers = self.getHeaders()\n        req = self.endpoint + \"/datatypes/\" + type_uuid \n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # auth needed\n        \n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 403)  # un authorized\n        \n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 403)  # OK\n         \n    def testGroups(self):\n        self.domain = 'tall_acl.' + config.get('domain')  \n        self.setupAcls()\n        \n        g1_uuid = self.getUUIDByPath('/g1')\n       \n        self.assertTrue(helper.validateId(g1_uuid))\n        \n        # read group g1\n        headers = self.getHeaders()\n        req = self.endpoint + \"/groups/\" + g1_uuid\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # needs Authorization\n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n\n        # read group acls\n        req += \"/acls\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 403)  # test_user1 doesn't have readACL permission\n        headers = self.getHeaders(user=self.user2)\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text) \n        self.assertTrue(\"acls\" in rspJson)\n        acls = rspJson[\"acls\"]\n        self.assertEqual(len(acls), 0)  # empty list of acls\n         \n        # read links\n        headers = self.getHeaders()\n        req = self.endpoint + \"/groups/\" + g1_uuid + '/links'\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # needs Authorization\n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # OK\n        \n        # read link\n        headers = self.getHeaders()\n        req = self.endpoint + \"/groups/\" + g1_uuid + '/links/g1.1'\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # needs Authorization\n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200) # OK\n        \n        # create group\n        headers = self.getHeaders()\n        req = self.endpoint + \"/groups\"\n        rsp = requests.post(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # needs Authorization\n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.post(req, headers=headers)\n        self.assertEqual(rsp.status_code, 403)  # un-authorized\n        headers = self.getHeaders(user=self.user2)\n        rsp = requests.post(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # Created\n        rspJson = json.loads(rsp.text)\n        grp_uuid = rspJson['id']\n        \n        # add link\n        headers = self.getHeaders()\n        payload = { \"id\": grp_uuid }\n        req = self.endpoint + \"/groups/\" + g1_uuid + '/links/new_group'\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # needs Authorization\n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 403) # un-authorized\n        headers = self.getHeaders(user=self.user2)\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201) # created\n        \n        # delete link\n        headers = self.getHeaders()\n        req = self.endpoint + \"/groups/\" + g1_uuid + '/links/new_group'\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # needs Authorization\n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 403) # un-authorized\n        headers = self.getHeaders(user=self.user2)\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200) # OK\n               \n        # delete group\n        headers = self.getHeaders()\n        req = self.endpoint + \"/groups/\" + grp_uuid\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # needs Authorization\n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 403)  # un-authorized\n        headers = self.getHeaders(user=self.user2)\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # OK\n        \n    def testRoot(self):\n        self.domain = 'tall_acl_delete.' + config.get('domain')  \n        self.setupAcls()\n        \n        # read domain resource\n        headers = self.getHeaders()\n        req = self.endpoint + \"/\" \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # needs Authorization\\\n        \n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        # delete domain!\n        headers = self.getHeaders()\n        req = self.endpoint + \"/\" \n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # needs Authorization\n        \n        # try malformed auth string \n        headers['Authorization'] = \"Basic \" + \"xxx123\"\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 400)  # bad auth header\n        \n        # try invalid password\n        headers['Authorization'] = helper.getAuthString(\"test_user1\", \"notmypassword\")\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 401)  # need valid auth header     \n        \n        headers = self.getHeaders(user=self.user1)\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 403)  # not authorized \n        \n        headers = self.getHeaders(user=self.user2)\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # OK        \n     \n        \nif __name__ == '__main__':\n    unittest.main()\n"
  },
  {
    "path": "test/integ/attributetest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport requests\nimport config\nimport helper\nimport unittest\nimport json\n\nclass AttributeTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(AttributeTest, self).__init__(*args, **kwargs)\n        self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port'))    \n       \n    def testGetGroupAttr(self):\n        for domain_name in ('tall', 'tall_ro'):\n            domain = domain_name + '.' + config.get('domain') \n            rootUUID = helper.getRootUUID(domain)\n            req = helper.getEndpoint() + \"/groups/\" + rootUUID + \"/attributes/attr1\"\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertEqual(rspJson['name'], 'attr1')\n            self.assertTrue('type' in rspJson)\n            type = rspJson['type']\n            self.assertEqual(type['class'], 'H5T_INTEGER')\n            self.assertEqual(type['base'], 'H5T_STD_I8LE')\n            self.assertTrue('shape' in rspJson)\n            shape = rspJson['shape']\n            self.assertEqual(shape['class'], 'H5S_SIMPLE')\n            self.assertEqual(len(shape['dims']), 1)\n            self.assertEqual(shape['dims'][0], 10) \n            self.assertTrue('maxdims' not in shape) \n            data = rspJson['value'] \n            self.assertEqual(len(data), 10)\n            # data should be the array [97, 98, 99, ..., 105, 0]\n            expected = list(range(97, 107))\n            expected[9] = 0\n            self.assertEqual(data, expected) \n            self.assertEqual(len(rspJson['hrefs']), 4)\n            \n    def testGetDatasetAttr(self):\n        for domain_name in ('tall',  'tall_ro'):\n            domain = domain_name + '.' + config.get('domain') \n            rootUUID = helper.getRootUUID(domain)\n            # get dataset uuid at path: 'g1/g1.1/dset1.1.1'\n            req = helper.getEndpoint() + \"/groups/\" + rootUUID + \"/links/g1\"\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertTrue('link' in rspJson)\n            link = rspJson['link']\n            g1UUID = link['id']\n            req = helper.getEndpoint() + \"/groups/\" + g1UUID + \"/links/g1.1\"\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertTrue('link' in rspJson)\n            link = rspJson['link']\n            g11UUID = link['id']\n            req = helper.getEndpoint() + \"/groups/\" + g11UUID + \"/links/dset1.1.1\"\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertTrue('link' in rspJson)\n            link = rspJson['link']\n            dset111UUID = link['id']\n            \n            req = helper.getEndpoint() + \"/datasets/\" + dset111UUID + \"/attributes/attr1\"\n            rsp = requests.get(req, headers=headers)\n            rspJson = json.loads(rsp.text)\n           \n            self.assertEqual(rspJson['name'], 'attr1')\n            self.assertTrue('type' in rspJson)\n            type = rspJson['type']\n            self.assertEqual(type['class'], 'H5T_INTEGER')\n            self.assertEqual(type['base'], 'H5T_STD_I8LE')\n            self.assertTrue('shape' in rspJson)\n            shape = rspJson['shape']\n            self.assertEqual(shape['class'], 'H5S_SIMPLE')\n            self.assertEqual(len(shape['dims']), 1)\n            self.assertEqual(shape['dims'][0], 27) \n            self.assertTrue('maxdims' not in shape) \n            data = rspJson['value'] \n            self.assertEqual(len(data), 27)\n            # first value is 49           \n            self.assertEqual(data[0], 49) \n            self.assertEqual(len(rspJson['hrefs']), 4)\n            \n    def testGetAll(self):\n        for domain_name in ('tall', 'tall_ro'):\n            domain = domain_name + '.' + config.get('domain') \n            rootUUID = helper.getRootUUID(domain)\n            req = helper.getEndpoint() + \"/groups/\" + rootUUID + \"/attributes\"\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertEqual(len(rspJson['hrefs']), 4)\n            attrsJson = rspJson['attributes']\n            self.assertEqual(len(attrsJson), 2)\n            self.assertEqual(attrsJson[0]['name'], 'attr1')\n            self.assertEqual(attrsJson[1]['name'], 'attr2')\n            self.assertFalse('value' in attrsJson[0])\n            \n    def testGetBatch(self):\n        domain = 'attr1k.' + config.get('domain')   \n        rootUUID = helper.getRootUUID(domain)     \n        req = helper.getEndpoint() + \"/groups/\" + rootUUID + \"/attributes\"\n        headers = {'host': domain}\n        params = {'Limit': 50 }\n        names = set()\n        # get attributes in 20 batches of 50 links each\n        lastName = None\n        for batchno in range(20):\n            if lastName:\n                params['Marker'] = lastName\n            rsp = requests.get(req, headers=headers, params=params)\n            self.assertEqual(rsp.status_code, 200)\n            if rsp.status_code != 200:\n                break\n            rspJson = json.loads(rsp.text)\n            attrs = rspJson['attributes']\n            self.assertEqual(len(attrs) <= 50, True)\n            for attr in attrs:\n                lastName = attr['name']\n                names.add(lastName)\n            if len(attrs) == 0:\n                break\n        self.assertEqual(len(names), 1000)  # should get 1000 unique attributes\n        \n    def testGetNullSpace(self):\n        domain = \"null_space_attr.\" + config.get('domain') \n        rootUUID = helper.getRootUUID(domain)\n        req = helper.getEndpoint() + \"/groups/\" + rootUUID + \"/attributes/attr1\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertEqual(rspJson['name'], 'attr1')\n        self.assertTrue('type' in rspJson)\n        type = rspJson['type']\n        self.assertEqual(type['class'], 'H5T_FLOAT')\n        self.assertEqual(type['base'], 'H5T_IEEE_F64LE')\n        self.assertTrue('shape' in rspJson)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_NULL')\n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(value, None)\n        self.assertEqual(len(rspJson['hrefs']), 4)\n        \n    def testGetCompound(self):\n        for domain_name in ('compound_attr', ):\n            domain = domain_name + '.' + config.get('domain') \n            rootUUID = helper.getRootUUID(domain)\n            req = helper.getEndpoint() + \"/groups/\" + rootUUID + \"/attributes/weather\"\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertEqual(rspJson['name'], 'weather')\n            shape = rspJson['shape']\n            self.assertEqual(shape['class'], 'H5S_SIMPLE')\n            self.assertEqual(len(shape['dims']), 1)\n            self.assertEqual(shape['dims'][0], 1) \n            typeItem = rspJson['type']\n            self.assertEqual(typeItem['class'], 'H5T_COMPOUND')\n            self.assertEqual(len(typeItem['fields']), 4)\n            fields = typeItem['fields']\n            field0 = fields[0]\n            self.assertEqual(field0['name'], 'time')\n            field0Type = field0['type']\n            self.assertEqual(field0Type['class'], 'H5T_INTEGER')\n            self.assertEqual(field0Type['base'], 'H5T_STD_I64LE')\n            field1 = fields[1]\n            self.assertEqual(field1['name'], 'temp')\n            field1Type = field1['type']\n            self.assertEqual(field1Type['class'], 'H5T_INTEGER')\n            self.assertEqual(field1Type['base'], 'H5T_STD_I64LE')\n            field2 = fields[2]\n            self.assertEqual(field2['name'], 'pressure')\n            field2Type = field2['type']\n            self.assertEqual(field2Type['class'], 'H5T_FLOAT')\n            self.assertEqual(field2Type['base'], 'H5T_IEEE_F64LE')\n            field3 = fields[3]\n            self.assertEqual(field3['name'], 'wind')\n            field3Type = field3['type']\n            self.assertEqual(field3Type['class'], 'H5T_STRING')\n            self.assertEqual(field3Type['charSet'], 'H5T_CSET_ASCII')\n            self.assertEqual(field3Type['length'], 6)\n            self.assertEqual(field3Type['strPad'], 'H5T_STR_NULLPAD')\n            \n    def testGetCompoundArray(self):\n        for domain_name in ('compound_array_attr', ):\n            domain = domain_name + '.' + config.get('domain') \n            root_uuid = helper.getRootUUID(domain)\n            dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n            req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/attributes/A1\"\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertEqual(rspJson['name'], 'A1')\n            shape = rspJson['shape']\n            self.assertEqual(shape['class'], 'H5S_SCALAR')\n            typeItem = rspJson['type']\n            self.assertEqual(typeItem['class'], 'H5T_COMPOUND')\n            self.assertEqual(len(typeItem['fields']), 2)\n            fields = typeItem['fields']\n            field0 = fields[0]\n            self.assertEqual(field0['name'], 'temp')\n            field0Type = field0['type']\n            self.assertEqual(field0Type['class'], 'H5T_FLOAT')\n            self.assertEqual(field0Type['base'], 'H5T_IEEE_F64LE')\n            field1 = fields[1]\n            self.assertEqual(field1['name'], '2x2')\n            field1Type = field1['type']\n            self.assertEqual(field1Type['class'], 'H5T_ARRAY')\n            self.assertEqual(field1Type['dims'], [2, 2])\n            baseType = field1Type['base']\n            self.assertEqual(baseType['class'], 'H5T_FLOAT')\n            self.assertEqual(baseType['base'], 'H5T_IEEE_F32LE')\n             \n            \n    def testGetCommitted(self):\n        domain = 'committed_type.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        req = helper.getEndpoint() + \"/groups/\" + root_uuid + \"/attributes/attr1\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SCALAR')\n        self.assertTrue('dims' not in shape)\n        typeItem = rspJson['type']  # returns '/datatypes/<uuid>'\n        npos = typeItem.rfind('/')\n        type_uuid = typeItem[(npos+1):]\n        self.assertTrue(helper.validateId(type_uuid))\n             \n    def testGetArray(self):\n        domain = 'array_attr.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/attributes/A1\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 4) \n        typeItem = rspJson['type']    \n        self.assertEqual(typeItem['class'], 'H5T_ARRAY')\n        self.assertTrue('dims' in typeItem)\n        typeShape = typeItem['dims']\n        self.assertEqual(len(typeShape), 2)\n        self.assertEqual(typeShape[0], 3)\n        self.assertEqual(typeShape[1], 5)\n        typeBase = typeItem['base']\n        self.assertEqual(typeBase['class'], 'H5T_INTEGER')\n        self.assertEqual(typeBase['base'], 'H5T_STD_I64LE')\n        self.assertTrue('value' in rspJson)\n         \n        value = rspJson['value']\n        self.assertEqual(len(value), 4)   \n        elem = value[0] # elem should be a 3x5 array \n        self.assertEqual(len(elem), 3)\n        self.assertEqual(elem[2], [0, -2, -4, -6, -8])\n\n    def testGetBool(self):\n        domain = 'bool_attr.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        req = helper.getEndpoint() + \"/groups/\" + root_uuid + \"/attributes/attr1\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 4)  \n        typeItem = rspJson['type']\n        \n        self.assertEqual(typeItem['class'], 'H5T_ENUM')\n        typeBase = typeItem['base']\n        self.assertEqual(typeBase['class'], 'H5T_INTEGER')\n        self.assertEqual(typeBase['base'], 'H5T_STD_I8LE')\n        self.assertTrue('mapping' in typeItem)\n        mapping = typeItem['mapping']\n        self.assertEqual(len(mapping), 2)\n        self.assertEqual(mapping['FALSE'], 0)\n        self.assertEqual(mapping['TRUE'], 1)\n        \n    def testGetVLenString(self):\n        domain = 'vlen_string_attr.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/attributes/A1\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 4) \n        typeItem = rspJson['type']   \n        self.assertEqual(typeItem['class'], 'H5T_STRING')\n        self.assertEqual(typeItem['charSet'], 'H5T_CSET_ASCII')\n        self.assertEqual(typeItem['length'], 'H5T_VARIABLE')\n        self.assertEqual(typeItem['strPad'], 'H5T_STR_NULLTERM')\n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 4) \n        self.assertEqual(value[0], \"Parting\")\n        self.assertEqual(value[1], \"is such\")\n        self.assertEqual(value[2], \"sweet\")\n        self.assertEqual(value[3], \"sorrow.\")\n        \n    def testGetFixedString(self):\n        domain = 'fixed_string_attr.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/attributes/A1\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 4)  \n        typeItem = rspJson['type']\n        \n        self.assertEqual(typeItem['class'], 'H5T_STRING')\n        self.assertEqual(typeItem['charSet'], 'H5T_CSET_ASCII')\n        self.assertEqual(typeItem['length'], 7)\n        self.assertEqual(typeItem['strPad'], 'H5T_STR_NULLPAD')\n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 4) \n        self.assertEqual(value[0], \"Parting\")\n        self.assertEqual(value[1], \"is such\")\n        self.assertEqual(value[2], \"sweet\")\n        self.assertEqual(value[3], \"sorrow.\")\n        \n    def testGetEnum(self):\n        domain = 'enum_attr.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/attributes/A1\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 2)\n        self.assertEqual(shape['dims'][0], 4)\n        self.assertEqual(shape['dims'][1], 7) \n        typeItem = rspJson['type']\n        \n        self.assertEqual(typeItem['class'], 'H5T_ENUM')\n        baseType = typeItem['base']\n        self.assertEqual(baseType['class'], 'H5T_INTEGER')\n        self.assertEqual(baseType['base'], 'H5T_STD_I16BE')\n        self.assertTrue('mapping' in typeItem)\n        mapping = typeItem['mapping']\n        self.assertEqual(len(mapping), 4)\n        self.assertEqual(mapping['SOLID'], 0)\n        self.assertEqual(mapping['LIQUID'], 1)\n        self.assertEqual(mapping['GAS'], 2)\n        self.assertEqual(mapping['PLASMA'], 3)\n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 4) \n        self.assertEqual(value[1][2], mapping['GAS'])\n        \n    def testGetVlen(self):\n        domain = 'vlen_attr.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/attributes/A1\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 2)\n        typeItem = rspJson['type']\n        \n        self.assertEqual(typeItem['class'], 'H5T_VLEN')\n        baseType = typeItem['base']\n        self.assertEqual(baseType['class'], 'H5T_INTEGER')\n        self.assertEqual(baseType['base'], 'H5T_STD_I32LE')\n        #verify data returned\n        value = rspJson['value']\n        self.assertEqual(len(value), 2)\n        self.assertEqual(len(value[1]), 12)\n        self.assertEqual(value[1][11], 144)\n        \n    def testGetOpaque(self):\n        domain = 'opaque_attr.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/attributes/A1\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 4) \n        typeItem = rspJson['type']   \n        \n        self.assertEqual(typeItem['class'], 'H5T_OPAQUE')\n        self.assertEqual(typeItem['size'], 7)\n        self.assertTrue('value' not in rspJson)  # opaque data is not supported yet\n        \n    def testGetObjectReference(self):\n        domain = 'objref_attr.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        ds1_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        ds2_uuid = helper.getUUID(domain, root_uuid, 'DS2') \n        g1_uuid = helper.getUUID(domain, root_uuid, 'G1') \n        req = helper.getEndpoint() + \"/datasets/\" + ds1_uuid + \"/attributes/A1\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 2)  \n        typeItem = rspJson['type']     \n        self.assertEqual(typeItem['class'], 'H5T_REFERENCE')\n        self.assertEqual(typeItem['base'], 'H5T_STD_REF_OBJ')\n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 2)\n        self.assertEqual(value[0], 'groups/' + g1_uuid)\n        self.assertEqual(value[1], 'datasets/' + ds2_uuid)\n        \n    def testGetRegionReference(self):\n        domain = 'regionref_attr.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        ds1_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        ds2_uuid = helper.getUUID(domain, root_uuid, 'DS2') \n        req = helper.getEndpoint() + \"/datasets/\" + ds1_uuid + \"/attributes/A1\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 2) \n        typeItem = rspJson['type']     \n        self.assertEqual(typeItem['class'], 'H5T_REFERENCE')\n        self.assertEqual(typeItem['base'], 'H5T_STD_REF_DSETREG')\n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 2)\n        value = rspJson['value']\n        self.assertEqual(len(value), 2)\n        ref0 = value[0]\n        self.assertEqual(ref0['select_type'], 'H5S_SEL_POINTS')\n        self.assertEqual(ref0['id'], ds2_uuid)\n        points = ref0['selection']\n        self.assertEqual(len(points), 4)\n        self.assertEqual(points[0], [0, 1])\n        self.assertEqual(points[1], [2,11])\n        self.assertEqual(points[2], [1, 0])\n        self.assertEqual(points[3], [2, 4])\n        \n        ref1 = value[1]\n        self.assertEqual(ref1['select_type'], 'H5S_SEL_HYPERSLABS')\n        self.assertEqual(ref1['id'], ds2_uuid)\n        hyperslabs = ref1['selection'] \n        self.assertEqual(len(hyperslabs), 4)\n        self.assertEqual(hyperslabs[0][0], [0, 0])\n        self.assertEqual(hyperslabs[0][1], [1, 3])\n        self.assertEqual(hyperslabs[1][0], [0, 11])\n        self.assertEqual(hyperslabs[1][1], [1, 14])\n        self.assertEqual(hyperslabs[2][0], [2, 0])\n        self.assertEqual(hyperslabs[2][1], [3, 3])\n        self.assertEqual(hyperslabs[3][0], [2, 11])\n        self.assertEqual(hyperslabs[3][1], [3, 14])\n        \n            \n    def testGetScalar(self):\n        domain = 'scalar.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        req = helper.getEndpoint() + \"/groups/\" + root_uuid + \"/attributes/attr1\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SCALAR')\n        self.assertTrue('dims' not in shape)\n        typeItem = rspJson['type']\n        self.assertEqual(typeItem['class'], 'H5T_INTEGER')\n        self.assertEqual(typeItem['base'], 'H5T_STD_I64LE')\n        data = rspJson['value'] \n        self.assertEqual(type(data), int)\n        self.assertEqual(data, 42)\n    \n    def testGetScalarString(self):\n        domain = 'scalar.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        # now try reading a scalar string\n        req = helper.getEndpoint() + \"/groups/\" + root_uuid + \"/attributes/attr2\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SCALAR')\n        self.assertTrue('dims' not in shape)\n        typeItem = rspJson['type']\n        self.assertEqual(typeItem['class'], 'H5T_STRING')\n        self.assertEqual(typeItem['charSet'], 'H5T_CSET_ASCII')\n        self.assertEqual(typeItem['length'], 'H5T_VARIABLE')\n        self.assertEqual(typeItem['strPad'], 'H5T_STR_NULLTERM')\n        data = rspJson['value'] \n        self.assertEqual(data, \"hello\")   \n\n    def testGetDimensionScale(self):\n        domain = 'dim_scale.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        dset_uuid = helper.getUUID(domain, root_uuid, 'temperatures') \n        scale_x_uuid = helper.getUUID(domain, root_uuid, 'scale_x') \n        scale_y_uuid = helper.getUUID(domain, root_uuid, 'scale_y') \n        scale_z_uuid = helper.getUUID(domain, root_uuid, 'scale_z') \n        # now try reading the dimension list attribute\n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/attributes/DIMENSION_LIST\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertTrue('dims'  in shape)\n        dims = shape['dims']\n        self.assertEqual(len(dims), 1)\n        self.assertEqual(dims[0], 3)\n\n        typeItem = rspJson['type']\n        self.assertEqual(typeItem['class'], 'H5T_VLEN')\n        baseType = typeItem['base']\n        self.assertEqual(baseType['class'], 'H5T_REFERENCE')\n        self.assertEqual(baseType['base'], 'H5T_STD_REF_OBJ')\n        data = rspJson['value'] \n        self.assertEqual(len(data), 3)\n        self.assertEqual(data[0], ['datasets/' + scale_x_uuid])\n        self.assertEqual(data[1], ['datasets/' + scale_y_uuid])\n        self.assertEqual(data[2], ['datasets/' + scale_z_uuid])\n\n        # read the x dimenscale and verify it refernces the temperature dataset\n        req = helper.getEndpoint() + \"/datasets/\" + scale_x_uuid + \"/attributes/REFERENCE_LIST\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        typeItem = rspJson['type']\n        self.assertEqual(typeItem['class'], 'H5T_COMPOUND')\n        fields = typeItem['fields']\n        self.assertEqual(len(fields), 2)\n        refType = fields[0][\"type\"]\n        self.assertEqual(refType[\"class\"], 'H5T_REFERENCE')\n        intType = fields[1][\"type\"]\n        self.assertEqual(intType[\"class\"], 'H5T_INTEGER')\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertTrue('dims'  in shape)\n        dims = shape['dims']\n        self.assertEqual(len(dims), 1)\n        self.assertEqual(dims[0], 1)\n        data = rspJson['value']\n        elem = data[0]\n       \n        self.assertEqual(len(elem), 2)  # two fields of a compound type\n        self.assertEqual(elem[0], 'datasets/' + dset_uuid) #  reference primary dataset\n        self.assertEqual(elem[1], 0)  # first dimension    \n        \n    def testPut(self):\n        domain = 'tall_updated.' + config.get('domain') \n        attr_name = 'attr3'\n        rootUUID = helper.getRootUUID(domain) \n        headers = {'host': domain}\n           \n        payload = {'type': 'H5T_IEEE_F32LE', 'shape': (1,), 'value': (3.12,)}\n        req = self.endpoint + \"/groups/\" + rootUUID + \"/attributes/\" + attr_name\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create attribute\n        rspJson = json.loads(rsp.text)\n        self.assertEqual(len(rspJson['hrefs']), 3)\n        \n        # do a get and verify the space is simple\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # get attribute\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        dims = shape['dims']\n        self.assertEqual(len(dims), 1)\n        self.assertEqual(dims[0], 1)\n\n        # try creating the attribute again, should return 409\n        req = self.endpoint + \"/groups/\" + rootUUID + \"/attributes/\" + attr_name\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 409)  # conflict\n        \n\n        \n    def testPutScalar(self):\n        domain = 'tall_updated.' + config.get('domain') \n        attr_name = 'attr4'\n        rootUUID = helper.getRootUUID(domain) \n        headers = {'host': domain}\n           \n        payload = {'type': 'H5T_STD_I32LE', 'value': 42}\n        req = self.endpoint + \"/groups/\" + rootUUID + \"/attributes/\" + attr_name\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create attribute\n        rspJson = json.loads(rsp.text)\n        self.assertEqual(len(rspJson['hrefs']), 3)\n        # do a get and verify the space is scalar\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # get attribute\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SCALAR') \n        \n    def testPutList(self):\n        domain = 'tall_updated.' + config.get('domain') \n        attr_name = 'attr5'\n        rootUUID = helper.getRootUUID(domain) \n        headers = {'host': domain}\n        data = list(range(10))\n           \n        payload = {'type': 'H5T_STD_I32LE', 'shape': (10,), 'value': data}\n        req = self.endpoint + \"/groups/\" + rootUUID + \"/attributes/\" + attr_name\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create attribute\n        rspJson = json.loads(rsp.text)\n        self.assertEqual(len(rspJson['hrefs']), 3)\n        \n        # do a get and verify the space has 10 elements\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # get attribute\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        dims = shape['dims']\n        self.assertEqual(len(dims), 1)\n        self.assertEqual(dims[0], 10)  \n        \n    def testPutFixedString(self):\n        domain = 'tall_updated.' + config.get('domain') \n        attr_name = 'attr6'\n        rootUUID = helper.getRootUUID(domain) \n        headers = {'host': domain}\n        data = \"Hello, I'm a fixed-width string!\"\n        str_type = { 'charSet':   'H5T_CSET_ASCII', \n                     'class':  'H5T_STRING', \n                     'strPad': 'H5T_STR_NULLPAD', \n                     'length': 40}\n                      \n        payload = {'type': str_type, 'shape': (1,), 'value': data}\n        req = self.endpoint + \"/groups/\" + rootUUID + \"/attributes/\" + attr_name\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create attribute\n        rspJson = json.loads(rsp.text)\n        self.assertEqual(len(rspJson['hrefs']), 3)\n        \n    def testPutVariableString(self):\n        domain = 'tall_updated.' + config.get('domain') \n        attr_name = 'attr7'\n        rootUUID = helper.getRootUUID(domain) \n        headers = {'host': domain}\n        data = [\"Hypermedia\", \"as\", \"the\", \"engine\", \"of\", \"state.\"]\n        str_type = { 'charSet':   'H5T_CSET_ASCII', \n                     'class':  'H5T_STRING', \n                     'strPad': 'H5T_STR_NULLPAD', \n                     'length': 'H5T_VARIABLE'}\n                      \n        payload = {'type': str_type, 'shape': (6,), 'value': data}\n        req = self.endpoint + \"/groups/\" + rootUUID + \"/attributes/\" + attr_name\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create attribute\n        rspJson = json.loads(rsp.text)\n        self.assertEqual(len(rspJson['hrefs']), 3)\n        \n    def testPutNullSpace(self):\n        domain = 'tall_updated.' + config.get('domain') \n        attr_name = 'attr8'\n        rootUUID = helper.getRootUUID(domain) \n        headers = {'host': domain}\n           \n        payload = {'type': 'H5T_STD_I32LE', 'shape': 'H5S_NULL'}\n        req = self.endpoint + \"/groups/\" + rootUUID + \"/attributes/\" + attr_name\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create attribute\n        rspJson = json.loads(rsp.text)\n        self.assertEqual(len(rspJson['hrefs']), 3)\n        # do a get and verify the space is scalar\n        \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # get attribute\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_NULL')\n\n    def testPutObjReference(self):\n        domain = 'tall_updated.' + config.get('domain')\n        attr_name = 'attr9'\n        root_uuid = helper.getRootUUID(domain)\n        g2_uuid = helper.getUUID(domain, root_uuid, 'g2') \n        d22_uuid = helper.getUUID(domain, g2_uuid, 'dset2.2') \n        \n        headers = {'host': domain}\n         \n        datatype = {'class': 'H5T_REFERENCE', 'base': 'H5T_STD_REF_OBJ' }\n        \n        value = ('groups/' + g2_uuid, '', 'datasets/' + d22_uuid) \n        payload = {'type': datatype, 'shape': 3, 'value': value}\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/attributes/\" + attr_name\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create attribute\n        rspJson = json.loads(rsp.text)\n        self.assertEqual(len(rspJson['hrefs']), 3)\n        \n    def testPutRegionReference(self):\n        domain = 'tall_updated.' + config.get('domain')\n        attr_name = 'attr10'\n        root_uuid = helper.getRootUUID(domain)\n        g1_uuid = helper.getUUID(domain, root_uuid, 'g1') \n        g11_uuid = helper.getUUID(domain, g1_uuid, 'g1.1') \n        d111_uuid = helper.getUUID(domain, g11_uuid, 'dset1.1.1') \n        \n        headers = {'host': domain}\n         \n        datatype = {'class': 'H5T_REFERENCE', 'base': 'H5T_STD_REF_DSETREG' }\n        \n        region_ref = { }\n        region_ref['id'] = d111_uuid\n        region_ref['select_type'] = 'H5S_SEL_HYPERSLABS'\n        region_ref['selection'] = (((0,0),(1,1)),((2,2),(4,4)), ((5,5),(10,10)))\n        \n        point_ref = { }\n        point_ref['id'] = d111_uuid\n        point_ref['select_type'] = 'H5S_SEL_POINTS'\n        point_ref['selection'] = ((0,0),(1,1),(2,2),(3,3),(4,4),(5,5),(6,6),(7,7),(8,8),(9,9))\n        \n        all_ref = {}\n        all_ref['id'] = d111_uuid\n        all_ref['select_type'] = 'H5S_SEL_ALL'\n        \n        none_ref = {}\n        none_ref['id'] = d111_uuid\n        none_ref['select_type'] = 'H5S_SEL_NONE'\n        \n        value = ( region_ref , point_ref, all_ref, none_ref ) \n         \n        payload = {'type': datatype, 'shape': 4, 'value': value}\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/attributes/\" + attr_name\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create attribute\n        rspJson = json.loads(rsp.text)\n        self.assertEqual(len(rspJson['hrefs']), 3)\n          \n        \n    def testPutCompound(self):\n        domain = 'tall_updated.' + config.get('domain')\n        attr_name = 'attr_compound'\n        root_uuid = helper.getRootUUID(domain)\n        headers = {'host': domain}\n        \n        fields = ({'name': 'temp', 'type': 'H5T_STD_I32LE'}, \n                    {'name': 'pressure', 'type': 'H5T_IEEE_F32LE'}) \n        datatype = {'class': 'H5T_COMPOUND', 'fields': fields }\n        \n        value = ((55, 32.34), (59, 29.34)) \n        payload = {'type': datatype, 'shape': 2, 'value': value}\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/attributes/\" + attr_name\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create attribute\n        rspJson = json.loads(rsp.text)\n        self.assertEqual(len(rspJson['hrefs']), 3)\n \n    \"\"\"\n    tbd - fix issue passing attribute data       \n    def testPutCompoundArray(self):\n        domain = 'tall_updated.' + config.get('domain')\n        attr_name = 'attr_compound_array'\n        root_uuid = helper.getRootUUID(domain)\n        headers = {'host': domain}\n        \n        fields = ({'name': 'temp', 'type': 'H5T_STD_I32LE'}, \n                    {'name': '2x2', 'type': { 'class': 'H5T_ARRAY', 'dims': [2,2],\n                    'base': 'H5T_IEEE_F32LE'} }) \n        datatype = {'class': 'H5T_COMPOUND', 'fields': fields }\n        \n        value = ((3.14, ((55.0, 32.34), (59.0, 29.34))),\n                 (6.28, ((110.0, 64.68), (118.0, 58.68)))) \n        payload = {'type': datatype, 'shape': 2 'value': 0}\n        print \"payload:\", json.dumps(payload)\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/attributes/\" + attr_name\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create attribute\n        rspJson = json.loads(rsp.text)\n        self.assertEqual(len(rspJson['hrefs']), 3)\n    \"\"\"    \n        \n    def testPutCommittedType(self):\n        domain = 'tall_updated.' + config.get('domain')\n        attr_name = 'attr_committed'\n        root_uuid = helper.getRootUUID(domain)\n        headers = {'host': domain}\n        \n        # create the datatype\n        payload = {'type': 'H5T_IEEE_F32LE'}\n        req = self.endpoint + \"/datatypes\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create datatype\n        rspJson = json.loads(rsp.text)\n        dtype_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dtype_uuid))\n         \n        # link new datatype as 'dtype1'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'dtype1'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {'id': dtype_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # create the attribute using the type created above\n        value = []\n        for i in range(10):\n            value.append(i*0.5) \n        payload = {'type': dtype_uuid, 'shape': 10, 'value': value}\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/attributes/\" + attr_name\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create attribute\n        rspJson = json.loads(rsp.text)\n        self.assertEqual(len(rspJson['hrefs']), 3)\n        \n    def testPutDimensionScale(self):\n        domain = 'dim_scale_updated.' + config.get('domain')\n        root_uuid = helper.getRootUUID(domain)\n        headers = {'host': domain}\n       \n        dset_uuid = helper.getUUID(domain, root_uuid, 'temperatures')\n        \n        scale_x_uuid = helper.getUUID(domain, root_uuid, 'scale_x') \n        scale_y_uuid = helper.getUUID(domain, root_uuid, 'scale_y') \n        scale_z_uuid = helper.getUUID(domain, root_uuid, 'scale_z') \n        \n        # attach a dimension_list attribute to temperatures dataset\n        reftype = {'class': 'H5T_REFERENCE', 'base': 'H5T_STD_REF_OBJ' }\n        \n        attr_name = \"DIMENSION_LIST\"\n        vlen_type = {'class': 'H5T_VLEN', 'base': reftype }\n        value = []\n        for item_uuid in (scale_x_uuid, scale_y_uuid, scale_z_uuid):\n            obj_ref = 'datasets/' + item_uuid\n            vlen_item = (obj_ref,)\n            value.append(vlen_item)\n            \n         \n        payload = {'type': vlen_type, 'shape': 3, 'value': value}\n        req = self.endpoint + \"/datasets/\" + dset_uuid + \"/attributes/\" + attr_name\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create attribute\n        \n        \n    def testPutInvalid(self):\n        domain = 'tall_updated.' + config.get('domain') \n        attr_name = 'attr_invalid'\n        rootUUID = helper.getRootUUID(domain) \n        headers = {'host': domain}\n        # attempt to pass in a string directly (which is not valid JSON)\n        payload = \"{'type': 'H5T_IEEE_F32LE', 'shape': (0,), 'value': 3.12}\"\n        req = self.endpoint + \"/groups/\" + rootUUID + \"/attributes/\" + attr_name\n        rsp = requests.put(req, data=payload, headers=headers)\n        self.assertEqual(rsp.status_code, 400)  # Bad Request \n         \n    def testDelete(self):\n        domain = 'tall_updated.' + config.get('domain') \n        attr_name = 'attr1'\n        rootUUID = helper.getRootUUID(domain) \n        headers = {'host': domain}\n           \n        req = self.endpoint + \"/groups/\" + rootUUID + \"/attributes/\" + attr_name\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # delete attribute\n        \n    def testGetInvalidName(self):\n        domain = 'tall.' + config.get('domain')\n        rootUUID = helper.getRootUUID(domain)\n        req = helper.getEndpoint() + \"/groups/\" + rootUUID + \"/attributes/no_attr_here\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 404)\n        \n                         \n            \nif __name__ == '__main__':\n    unittest.main()\n"
  },
  {
    "path": "test/integ/config.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nfrom h5serv.config import *\n\ncfg = {\n    'server': '127.0.0.1',\n    'home_domain': 'home.hdfgroup.org',\n    'port':   5000,\n    'domain':   'test.hdfgroup.org',\n    'hdf5_ext': '.h5',\n    'home_dir': 'home'\n}\nupdate(cfg)\n"
  },
  {
    "path": "test/integ/datasettest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport requests\nimport config\nimport helper\nimport unittest\nimport json\n\nclass DatasetTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(DatasetTest, self).__init__(*args, **kwargs)\n        self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port'))    \n       \n    def testGet(self):\n        domain = 'tall.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        g2_uuid = helper.getUUID(domain, root_uuid, 'g2')\n        dset21_uuid = helper.getUUID(domain, g2_uuid, 'dset2.1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset21_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n      \n        self.assertTrue('type' in rspJson)\n        type_json = rspJson['type']\n        self.assertEqual(type_json['class'], 'H5T_FLOAT')\n        self.assertEqual(type_json['base'], 'H5T_IEEE_F32BE')\n        self.assertTrue('shape' in rspJson)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 10)  \n        self.assertTrue('maxdims' not in shape)\n        \n    def testGetResizable(self):\n        domain = 'resizable.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        resizable_1d_uuid = helper.getUUID(domain, root_uuid, 'resizable_1d') \n        req = helper.getEndpoint() + \"/datasets/\" + resizable_1d_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        type_json = rspJson['type']\n        self.assertEqual(type_json['class'], 'H5T_INTEGER')\n        self.assertEqual(type_json['base'], 'H5T_STD_I64LE')\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 10)  \n        self.assertEqual(shape['maxdims'][0], 20)\n        \n        resizable_2d_uuid = helper.getUUID(domain, root_uuid, 'resizable_2d') \n        req = helper.getEndpoint() + \"/datasets/\" + resizable_2d_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        type_json = rspJson['type']\n        self.assertEqual(type_json['class'], 'H5T_INTEGER')\n        self.assertEqual(type_json['base'], 'H5T_STD_I64LE')\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 2)\n        self.assertEqual(shape['dims'][1], 10)  \n        self.assertEqual(shape['maxdims'][1], 20)\n        \n        unlimited_1d_uuid = helper.getUUID(domain, root_uuid, 'unlimited_1d') \n        req = helper.getEndpoint() + \"/datasets/\" + unlimited_1d_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        type_json = rspJson['type']\n        self.assertEqual(type_json['class'], 'H5T_INTEGER')\n        self.assertEqual(type_json['base'], 'H5T_STD_I64LE')\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 10)  \n        self.assertEqual(shape['maxdims'][0], 0)\n        \n        unlimited_2d_uuid = helper.getUUID(domain, root_uuid, 'unlimited_2d') \n        req = helper.getEndpoint() + \"/datasets/\" + unlimited_2d_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        type_json = rspJson['type']\n        self.assertEqual(type_json['class'], 'H5T_INTEGER')\n        self.assertEqual(type_json['base'], 'H5T_STD_I64LE')\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 2)\n        self.assertEqual(shape['dims'][1], 10)  \n        self.assertEqual(shape['maxdims'][1], 0)\n        \n    def testGetScalar(self):\n        domain = 'scalar.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, '0d') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        type_json = rspJson['type']\n        self.assertEqual(type_json['class'], 'H5T_INTEGER')\n        self.assertEqual(type_json['base'], 'H5T_STD_I32LE')\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SCALAR')\n        self.assertTrue('dims' not in shape)\n        self.assertTrue('maxdims' not in shape)\n        \n    def testGetScalarString(self):\n        domain = 'scalar.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, '0ds') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        type_json = rspJson['type']\n        self.assertEqual(type_json['class'], 'H5T_STRING')\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SCALAR')\n        self.assertTrue('dims' not in shape)\n        self.assertTrue('maxdims' not in shape)\n        \n    def testGetSimpleOneElement(self):\n        domain = 'scalar.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, '1d') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        type_json = rspJson['type']\n        self.assertEqual(type_json['class'], 'H5T_INTEGER')\n        self.assertEqual(type_json['base'], 'H5T_STD_I32LE')\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertTrue('dims' in shape)\n        self.assertEqual(shape['dims'][0], 1) \n        \n    def testGetSimpleOneElementString(self):\n        domain = 'scalar.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, '1ds') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        type = rspJson['type']\n        self.assertEqual(type['class'], 'H5T_STRING')\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertTrue('dims' in shape)\n        self.assertEqual(shape['dims'][0], 1) \n        \n        \n    def testGetNullSpace(self):\n        domain = 'null_space_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        type = rspJson['type']\n        self.assertEqual(type['class'], 'H5T_INTEGER')\n        self.assertEqual(type['base'], 'H5T_STD_I32LE')\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_NULL')\n        self.assertTrue('dims' not in shape)\n        self.assertTrue('maxdims' not in shape)\n       \n    def testGetCompound(self):\n        domain = 'compound.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 72)  \n        typeItem = rspJson['type']   \n        self.assertEqual(typeItem['class'], 'H5T_COMPOUND')\n        self.assertTrue('fields' in typeItem)\n        fields = typeItem['fields']\n        self.assertEqual(len(fields), 5)\n        timeField = fields[1]\n        self.assertEqual(timeField['name'], 'time')\n        self.assertTrue('type' in timeField)\n        timeFieldType = timeField['type']\n        self.assertEqual(timeFieldType['class'], 'H5T_STRING')\n        self.assertEqual(timeFieldType['charSet'], 'H5T_CSET_ASCII')\n        self.assertEqual(timeFieldType['length'], 6)\n        self.assertEqual(timeFieldType['strPad'], 'H5T_STR_NULLPAD')\n        tempField = fields[2]\n        self.assertEqual(tempField['name'], 'temp')\n        tempFieldType = tempField['type']\n        self.assertEqual(tempFieldType['class'], 'H5T_INTEGER')\n        self.assertEqual(tempFieldType['base'], 'H5T_STD_I64LE')\n        \n    def testGetCompoundArray(self):\n        for domain_name in ('compound_array_dset', ):\n            domain = domain_name + '.' + config.get('domain') \n            root_uuid = helper.getRootUUID(domain)\n            dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n            req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            shape = rspJson['shape']\n            self.assertEqual(shape['class'], 'H5S_SIMPLE')\n            self.assertEqual(len(shape['dims']), 10)\n            typeItem = rspJson['type']\n            self.assertEqual(typeItem['class'], 'H5T_COMPOUND')\n            self.assertEqual(len(typeItem['fields']), 2)\n            fields = typeItem['fields']\n            field0 = fields[0]\n            self.assertEqual(field0['name'], 'temp')\n            field0Type = field0['type']\n            self.assertEqual(field0Type['class'], 'H5T_FLOAT')\n            self.assertEqual(field0Type['base'], 'H5T_IEEE_F64LE')\n            field1 = fields[1]\n            self.assertEqual(field1['name'], '2x2')\n            field1Type = field1['type']\n            self.assertEqual(field1Type['class'], 'H5T_ARRAY')\n            self.assertEqual(field1Type['dims'], [2, 2])\n            baseType = field1Type['base']\n            self.assertEqual(baseType['class'], 'H5T_FLOAT')\n            self.assertEqual(baseType['base'], 'H5T_IEEE_F32LE')\n        \n    def testGetCompoundCommitted(self):\n        domain = 'compound_committed.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 72)  \n        typeItem = rspJson['type']   \n        self.assertEqual(typeItem['class'], 'H5T_COMPOUND')\n        self.assertTrue('fields' in typeItem)\n        fields = typeItem['fields']\n        self.assertEqual(len(fields), 3)\n        timeField = fields[1]\n        self.assertEqual(timeField['name'], 'time')\n        self.assertTrue('type' in timeField)\n        timeFieldType = timeField['type']\n        self.assertEqual(timeFieldType['class'], 'H5T_STRING')\n        self.assertEqual(timeFieldType['charSet'], 'H5T_CSET_ASCII')\n        self.assertEqual(timeFieldType['length'], 6)\n        self.assertEqual(timeFieldType['strPad'], 'H5T_STR_NULLPAD')\n        tempField = fields[2]\n        self.assertEqual(tempField['name'], 'temp')\n        tempFieldType = tempField['type']\n        self.assertEqual(tempFieldType['class'], 'H5T_INTEGER')\n        self.assertEqual(tempFieldType['base'], 'H5T_STD_I32LE')\n        \n    def testGetCompoundArray(self):\n        # compound where the fields are array type\n        domain = 'tstr.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'comp1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 2)\n        self.assertEqual(shape['dims'][0], 3) \n        self.assertEqual(shape['dims'][1], 6)   \n        typeItem = rspJson['type'] \n        self.assertEqual(typeItem['class'], 'H5T_COMPOUND')\n        self.assertTrue('fields' in typeItem)\n        fields = typeItem['fields']\n        self.assertEqual(len(fields), 2)\n        intField = fields[0]\n        self.assertEqual(intField['name'], 'int_array')\n        self.assertTrue('type' in intField)\n        intFieldType = intField['type']\n        self.assertEqual(intFieldType['class'], 'H5T_ARRAY')\n        intFieldTypeDims = intFieldType['dims']\n        self.assertEqual(len(intFieldTypeDims), 2)\n        self.assertEqual(intFieldTypeDims[0], 8)\n        self.assertEqual(intFieldTypeDims[1], 10)\n        self.assertTrue('base' in intFieldType)\n        intFieldTypeBase = intFieldType['base']\n        self.assertEqual(intFieldTypeBase['class'], 'H5T_INTEGER')\n        self.assertEqual(intFieldTypeBase['base'], 'H5T_STD_I32BE')\n        \n        strField = fields[1]\n        self.assertEqual(strField['name'], 'string')\n        self.assertTrue('type' in strField)\n        strFieldType = strField['type']\n        self.assertEqual(strFieldType['class'], 'H5T_ARRAY')\n        strFieldTypeDims = strFieldType['dims']\n        self.assertEqual(len(strFieldTypeDims), 2)\n        self.assertEqual(strFieldTypeDims[0], 3)\n        self.assertEqual(strFieldTypeDims[1], 4)\n        self.assertTrue('base' in strFieldType)\n        strFieldTypeBase = strFieldType['base']\n        \n        self.assertEqual(strFieldTypeBase['class'], 'H5T_STRING')\n        self.assertEqual(strFieldTypeBase['charSet'], 'H5T_CSET_ASCII')\n        self.assertEqual(strFieldTypeBase['length'], 32)\n        # todo - fix, cf https://github.com/HDFGroup/h5serv/issues/20\n        #self.assertEqual(strFieldTypeBase['strPad'], 'H5T_STR_SPACEPAD')\n        \n    def testGetCommitted(self):\n        domain = 'committed_type.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 4)  \n        typeItem = rspJson['type']  # returns '/datatypes/<uuid>'\n        npos = typeItem.rfind('/')\n        type_uuid = typeItem[(npos+1):]\n        self.assertTrue(helper.validateId(type_uuid))\n        \n    def testGetArray(self):\n        domain = 'array_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 4)   \n        typeItem = rspJson['type']\n        \n        self.assertEqual(typeItem['class'], 'H5T_ARRAY')\n        self.assertTrue('dims' in typeItem)\n        typeShape = typeItem['dims']\n        self.assertEqual(len(typeShape), 2)\n        self.assertEqual(typeShape[0], 3)\n        self.assertEqual(typeShape[1], 5)\n        typeItemBase = typeItem['base']\n        self.assertEqual(typeItemBase['class'], 'H5T_INTEGER')\n        self.assertEqual(typeItemBase['base'], 'H5T_STD_I64LE')\n        \n    def testGetFixedString(self):\n        domain = 'fixed_string_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 4)   \n        typeItem = rspJson['type']\n        \n        self.assertEqual(typeItem['class'], 'H5T_STRING')\n        self.assertEqual(typeItem['charSet'], 'H5T_CSET_ASCII')\n        self.assertEqual(typeItem['length'], 7)\n        self.assertEqual(typeItem['strPad'], 'H5T_STR_NULLPAD')\n        \n    def testGetEnum(self):\n        domain = 'enum_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 2)\n        self.assertEqual(shape['dims'][0], 4)  \n        self.assertEqual(shape['dims'][1], 7)\n        typeItem = rspJson['type']\n        \n        self.assertEqual(typeItem['class'], 'H5T_ENUM')\n        typeBase = typeItem['base']\n        self.assertEqual(typeBase['class'], 'H5T_INTEGER')\n        self.assertEqual(typeBase['base'], 'H5T_STD_I16BE')\n        self.assertTrue('mapping' in typeItem)\n        mapping = typeItem['mapping']\n        self.assertEqual(len(mapping), 4)\n        self.assertEqual(mapping['SOLID'], 0)\n        self.assertEqual(mapping['LIQUID'], 1)\n        self.assertEqual(mapping['GAS'], 2)\n        self.assertEqual(mapping['PLASMA'], 3)\n\n    def testGetBool(self):\n        domain = 'bool_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 4)  \n        typeItem = rspJson['type']\n        \n        self.assertEqual(typeItem['class'], 'H5T_ENUM')\n        typeBase = typeItem['base']\n        self.assertEqual(typeBase['class'], 'H5T_INTEGER')\n        self.assertEqual(typeBase['base'], 'H5T_STD_I8LE')\n        self.assertTrue('mapping' in typeItem)\n        mapping = typeItem['mapping']\n        self.assertEqual(len(mapping), 2)\n        self.assertEqual(mapping['FALSE'], 0)\n        self.assertEqual(mapping['TRUE'], 1)\n         \n        \n    def testGetVlen(self):\n        domain = 'vlen_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 2)   \n        typeItem = rspJson['type']\n        \n        self.assertEqual(typeItem['class'], 'H5T_VLEN')\n        typeBase = typeItem['base']\n        self.assertEqual(typeBase['class'], 'H5T_INTEGER')\n        self.assertEqual(typeBase['base'], 'H5T_STD_I32LE')\n        \n    def testGetOpaque(self):\n        domain = 'opaque_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 4)    \n        typeItem = rspJson['type']\n        \n        self.assertEqual(typeItem['class'], 'H5T_OPAQUE')\n        self.assertEqual(typeItem['size'], 7)\n        \n    def testGetObjReference(self):\n        domain = 'objref_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 2)   \n        typeItem = rspJson['type']\n        self.assertEqual(typeItem['class'], 'H5T_REFERENCE')\n        self.assertEqual(typeItem['base'], 'H5T_STD_REF_OBJ')\n        \n    def testGetNullObjReference(self):\n        domain = 'null_objref_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 1)   \n        typeItem = rspJson['type']\n        self.assertEqual(typeItem['class'], 'H5T_REFERENCE')\n        self.assertEqual(typeItem['base'], 'H5T_STD_REF_OBJ')\n        \n    def testGetRegionReference(self):\n        domain = 'regionref_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 2)  \n        typeItem = rspJson['type']\n        self.assertEqual(typeItem['class'], 'H5T_REFERENCE')\n        self.assertEqual(typeItem['base'], 'H5T_STD_REF_DSETREG')\n        \n    def testGetFillValueProp(self):\n        domain = 'fillvalue.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid  \n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('creationProperties' in rspJson)\n        creationProps = rspJson['creationProperties']\n        self.assertTrue('fillValue' in creationProps)   \n        self.assertEqual(creationProps['fillValue'], 42)\n        \n    def testGetCreationProps(self):\n        \n        domain = 'dset_gzip.' + config.get('domain')  \n        headers = {'host': domain}\n        root_uuid = helper.getRootUUID(domain)\n        \n        # dset1\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid  \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('creationProperties' in rspJson)\n        creationProps = rspJson['creationProperties']\n        self.assertTrue('fillTime' in creationProps)\n        self.assertEqual(creationProps['fillTime'], 'H5D_FILL_TIME_ALLOC')\n        self.assertTrue('layout' in creationProps)\n        layout = creationProps['layout']\n        self.assertEqual(layout['class'], 'H5D_CHUNKED')\n        self.assertEqual(layout['dims'], [100, 100])\n        self.assertTrue('allocTime' in creationProps)\n        self.assertEqual(creationProps['allocTime'], 'H5D_ALLOC_TIME_INCR')\n        self.assertTrue('filters' in creationProps)\n        filters = creationProps['filters']\n        self.assertEqual(len(filters), 1)\n        deflate_filter = filters[0]\n        self.assertTrue('id' in deflate_filter)\n        self.assertEqual(deflate_filter['id'], 1)\n        self.assertTrue('class' in deflate_filter)\n        self.assertEqual(deflate_filter['class'], 'H5Z_FILTER_DEFLATE')\n        self.assertTrue('level' in deflate_filter)\n        self.assertEqual(deflate_filter['level'], 9)\n        self.assertTrue('name' in deflate_filter)\n        self.assertEqual(deflate_filter['name'], 'deflate')\n        \n        # dset2\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset2') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid  \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('creationProperties' in rspJson)\n        creationProps = rspJson['creationProperties']\n        self.assertTrue('fillTime' in creationProps)\n        self.assertEqual(creationProps['fillTime'], 'H5D_FILL_TIME_ALLOC')\n        self.assertTrue('layout' in creationProps)\n        layout = creationProps['layout']\n        self.assertEqual(layout['class'], 'H5D_CHUNKED')\n        self.assertEqual(layout['dims'], [100, 100])\n        self.assertTrue('allocTime' in creationProps)\n        self.assertEqual(creationProps['allocTime'], 'H5D_ALLOC_TIME_INCR')\n        self.assertTrue('filters' in creationProps)\n        \n        filters = creationProps['filters']\n        self.assertEqual(len(filters), 2)\n        \n        shuffle_filter = filters[0]\n        self.assertTrue('id' in shuffle_filter)\n        self.assertEqual(shuffle_filter['id'], 2)\n        self.assertTrue('class' in shuffle_filter)\n        self.assertEqual(shuffle_filter['class'], 'H5Z_FILTER_SHUFFLE')\n        self.assertTrue('name' in shuffle_filter)\n        self.assertEqual(shuffle_filter['name'], 'shuffle')\n        \n        deflate_filter = filters[1]\n        self.assertTrue('id' in deflate_filter)\n        self.assertEqual(deflate_filter['id'], 1)\n        self.assertTrue('class' in deflate_filter)\n        self.assertEqual(deflate_filter['class'], 'H5Z_FILTER_DEFLATE')\n        self.assertTrue('level' in deflate_filter)\n        self.assertEqual(deflate_filter['level'], 9)\n        self.assertTrue('name' in deflate_filter)\n        self.assertEqual(deflate_filter['name'], 'deflate')\n        \n        # dset3\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset3') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid  \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('creationProperties' in rspJson)\n        creationProps = rspJson['creationProperties']\n        self.assertTrue('fillTime' in creationProps)\n        self.assertEqual(creationProps['fillTime'], 'H5D_FILL_TIME_ALLOC')\n        self.assertTrue('layout' in creationProps)\n        layout = creationProps['layout']\n        self.assertEqual(layout['class'], 'H5D_CHUNKED')\n        self.assertEqual(layout['dims'], [100, 100])\n        self.assertTrue('allocTime' in creationProps)\n        self.assertEqual(creationProps['allocTime'], 'H5D_ALLOC_TIME_INCR')\n        self.assertTrue('filters' in creationProps)\n        \n        filters = creationProps['filters']\n        self.assertEqual(len(filters), 3)\n        \n        fletcher_filter = filters[0]\n        self.assertTrue('id' in fletcher_filter)\n        self.assertEqual(fletcher_filter['id'], 3)\n        self.assertTrue('class' in fletcher_filter)\n        self.assertEqual(fletcher_filter['class'], 'H5Z_FILTER_FLETCHER32')\n        self.assertTrue('name' in fletcher_filter)\n        self.assertEqual(fletcher_filter['name'], 'fletcher32')\n        \n        shuffle_filter = filters[1]\n        self.assertTrue('id' in shuffle_filter)\n        self.assertEqual(shuffle_filter['id'], 2)\n        self.assertTrue('class' in shuffle_filter)\n        self.assertEqual(shuffle_filter['class'], 'H5Z_FILTER_SHUFFLE')\n        self.assertTrue('name' in shuffle_filter)\n        self.assertEqual(shuffle_filter['name'], 'shuffle')\n        \n        deflate_filter = filters[2]\n        self.assertTrue('id' in deflate_filter)\n        self.assertEqual(deflate_filter['id'], 1)\n        self.assertTrue('class' in deflate_filter)\n        self.assertEqual(deflate_filter['class'], 'H5Z_FILTER_DEFLATE')\n        self.assertTrue('level' in deflate_filter)\n        self.assertEqual(deflate_filter['level'], 9)\n        self.assertTrue('name' in deflate_filter)\n        self.assertEqual(deflate_filter['name'], 'deflate')\n        \n    def testGetFilters(self):\n        #\n        # map of filter properties we expect to get\n        #\n        filter_props = {\"h5ex_d_checksum\": [{'id': 3},], \n            \"h5ex_d_gzip\":  [{'id': 1, 'level': 9},], \n            \"h5ex_d_nbit\":  [{'id': 5},],\n            \"h5ex_d_shuffle\":  [{'id': 2}, {'id': 1, 'level': 9}], \n            \"h5ex_d_sofloat\":  [{'id': 6},], \n            \"h5ex_d_soint\":  [{'id': 6, 'scaleType': 'H5Z_SO_INT'},],\n            \"h5ex_d_unlimgzip\":  [{'id': 1, 'level': 9},] }\n            \n            \n        for domain_val in filter_props.keys():\n            domain = domain_val + '.' + config.get('domain')  \n            #print \"domain\", domain_val\n            headers = {'host': domain}\n            root_uuid = helper.getRootUUID(domain)\n        \n            dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n            req = helper.getEndpoint() + \"/datasets/\" + dset_uuid  \n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertTrue('creationProperties' in rspJson)\n            creationProps = rspJson['creationProperties']\n            self.assertTrue('filters' in creationProps)\n            filters = creationProps['filters']\n            num_filters = len(filters)\n            ref_vals = filter_props[domain_val]\n            # check we got the expected number of filters\n            self.assertTrue(num_filters, len(ref_vals))\n            \n            for i in range(num_filters):\n                #print \"filter:\", i\n                filter_prop = filters[i]\n                #print \"filter_prop\", filter_prop\n                ref_val = ref_vals[i]\n                # check filter property values are correct\n                for k in ref_val.keys():\n                    #print \"checking key:\", k\n                    self.assertTrue(k in filter_prop)\n                    self.assertEqual(filter_prop[k], ref_val[k])\n             \n        \n        \n    def testPost(self):\n        domain = 'newdset.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        payload = {'type': 'H5T_IEEE_F32LE', 'shape': 10}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link new dataset as 'dset1'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'dset1'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # verify we can read the dataset back\n        req = self.endpoint + \"/datasets/\" + dset_uuid\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        \n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        # verify type class is float\n        rsp_type = rspJson['type']\n        self.assertEqual(rsp_type['class'], 'H5T_FLOAT')\n        \n    def testPostScalar(self):\n        domain = 'newscalar.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        str_type = { 'charSet':   'H5T_CSET_ASCII', \n                     'class':  'H5T_STRING', \n                     'strPad': 'H5T_STR_NULLPAD', \n                     'length': 40}\n        payload = {'type': str_type}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link new dataset as 'dset1'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'dset1'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # verify the dataspace is scalar\n        req = self.endpoint + \"/datasets/\" + dset_uuid\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SCALAR')\n        # verify type class is string\n        rsp_type = rspJson['type']\n        self.assertEqual(rsp_type['class'], 'H5T_STRING')\n    \n        \n    def testPostNullSpace(self):\n        domain = 'newnullspace.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        payload = {'type': 'H5T_IEEE_F32LE', 'shape': 'H5S_NULL'}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link new dataset as 'dset1'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'dset1'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # verify the dataspace is has a null dataspace\n        req = self.endpoint + \"/datasets/\" + dset_uuid\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_NULL')\n        # verify type class is string\n        type_json = rspJson['type']\n        self.assertEqual(type_json['class'], 'H5T_FLOAT')\n    \n         \n    def testPostZeroDim(self):\n        domain = 'new0d.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        payload = {'type': 'H5T_STD_I32LE', 'shape': (1,)}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link new dataset as 'dset1'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'dset1'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # verify the dataspace is one dimensional/one-element\n        req = self.endpoint + \"/datasets/\" + dset_uuid\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        \n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 1)  \n        \n        \n    def testPostTypes(self):\n        domain = 'datatypes.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        root_uuid = helper.getRootUUID(domain)\n        \n        # todo - add 8-bit types to list:\n        #  'H5T_STD_I8',   'H5T_STD_U8'\n        # See https://github.com/HDFGroup/h5serv/issues/51\n        \n        datatypes = ( 'H5T_STD_I16',  'H5T_STD_U16',    \n                      'H5T_STD_I32',  'H5T_STD_U32',   \n                      'H5T_STD_I64',  'H5T_STD_U64',  \n                      'H5T_IEEE_F32', 'H5T_IEEE_F64' )\n                      \n        endianess = ('LE', 'BE')\n        \n        for datatype in datatypes:\n            for endian in endianess:  \n                payload = {'type': datatype+endian, 'shape': 10}\n                req = self.endpoint + \"/datasets\"\n                rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n                self.assertEqual(rsp.status_code, 201)  # create dataset\n                rspJson = json.loads(rsp.text)\n                dset_uuid = rspJson['id']\n                self.assertTrue(helper.validateId(dset_uuid))\n         \n                # link new dataset using the type name\n                name = datatype + endian\n                req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n                payload = {\"id\": dset_uuid}\n                headers = {'host': domain}\n                rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n                self.assertEqual(rsp.status_code, 201)\n            \n                # Do a GET on the datasets we just created\n                req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n                rsp = requests.get(req, headers=headers)\n                self.assertEqual(rsp.status_code, 200)\n                rspJson = json.loads(rsp.text)\n                # verify the type\n                self.assertTrue('type' in rspJson)\n                type_json = rspJson['type']\n                self.assertTrue(type_json['class'] in ('H5T_FLOAT', 'H5T_INTEGER'))\n                self.assertEqual(type_json['base'], datatype+endian)      \n                     \n    def testPostCompoundType(self):\n        domain = 'compound.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        root_uuid = helper.getRootUUID(domain)\n        \n        fields = ({'name': 'temp', 'type': 'H5T_STD_I32LE'}, \n                    {'name': 'pressure', 'type': 'H5T_IEEE_F32LE'}) \n        datatype = {'class': 'H5T_COMPOUND', 'fields': fields }\n        payload = {'type': datatype, 'shape': 10}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link the new dataset \n        name = \"dset\"\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n    def testPostCompoundArrayVLenStringType(self):\n        domain = 'compound_array_vlen_string.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        root_uuid = helper.getRootUUID(domain)\n        \n        fields = [ {\"type\": {\"class\": \"H5T_INTEGER\", \"base\": \"H5T_STD_U64BE\"}, \"name\": \"VALUE1\"}, \n                   {\"type\": {\"class\": \"H5T_FLOAT\", \"base\": \"H5T_IEEE_F64BE\"}, \"name\": \"VALUE2\"}, \n                   {\"type\": {\"class\": \"H5T_ARRAY\", \"dims\": [8], \"base\": \n                         {\"class\": \"H5T_STRING\", \"charSet\": \"H5T_CSET_ASCII\",\n                          \"strPad\": \"H5T_STR_NULLTERM\", \"length\": \"H5T_VARIABLE\"}}, \"name\": \"VALUE3\"}]\n                           \n        datatype = {'class': 'H5T_COMPOUND', 'fields': fields }\n        payload = {'type': datatype, 'shape': 5}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link the new dataset \n        name = \"dset\"\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n    def testPostCompoundFillValue(self):\n        domain = 'compound_fillvalue.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        root_uuid = helper.getRootUUID(domain)\n        \n        fields = ({'name': 'temp', 'type': 'H5T_STD_I32LE'}, \n                    {'name': 'pressure', 'type': 'H5T_IEEE_F32LE'}) \n        datatype = {'class': 'H5T_COMPOUND', 'fields': fields }\n        payload = {'type': datatype, 'shape': 10}\n        payload['creationProperties'] = {'fillValue': [42, 3.12] }\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link the new dataset \n        name = \"dset\"\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n    def testPostCompoundArray(self):\n        domain = 'compound_array.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        root_uuid = helper.getRootUUID(domain)\n        \n        fields = ({'name': 'temp', 'type': 'H5T_STD_I32LE'}, \n                    {'name': '2x2', 'type': { 'class': 'H5T_ARRAY', 'dims': [2,2],\n                    'base': 'H5T_IEEE_F32LE'} }) \n        datatype = {'class': 'H5T_COMPOUND', 'fields': fields }\n        \n         \n        payload = {'type': datatype, 'shape': 2 }\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link the new dataset \n        name = \"dset\"\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n    def testPostCommittedType(self):\n        domain = 'committedtype.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        # create the datatype\n        payload = {'type': 'H5T_IEEE_F32LE'}\n        req = self.endpoint + \"/datatypes\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create datatype\n        rspJson = json.loads(rsp.text)\n        dtype_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dtype_uuid))\n         \n        # link new datatype as 'dtype1'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'dtype1'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {'id': dtype_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # create the dataset\n        payload = {'type': dtype_uuid, 'shape': [10, 10]}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link new dataset as 'dset1'\n        name = 'dset1'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n\n        # Verify the dataset type\n        req = self.endpoint + \"/datasets/\" + dset_uuid + \"/type\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"type\" in rspJson)\n        rsp_type = rspJson[\"type\"]\n        self.assertEqual(rsp_type[\"base\"], 'H5T_IEEE_F32LE')\n        self.assertEqual(rsp_type[\"class\"], 'H5T_FLOAT')\n\n\n    def testPostObjReference(self):\n        domain = 'objref.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n\n        datatype = {'class': 'H5T_REFERENCE', 'base': 'H5T_STD_REF_OBJ' }\n        payload = {'type': datatype, 'shape': (1,)}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link new dataset as 'dset1'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'dset1'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n\n        \n    def testPostArray(self):\n        domain = 'newarraydset.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        datatype = {'class': 'H5T_ARRAY', 'base': 'H5T_STD_I64LE', 'dims': (3, 5) }\n        \n        payload = {'type': datatype, 'shape': 10}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link new dataset as 'dset1'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'dset1'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        \n    def testPostResizable(self):\n        domain = 'resizabledset.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        payload = {'type': 'H5T_IEEE_F32LE', 'shape': 10, 'maxdims': 20}\n        payload['creationProperties'] = {'fillValue': 3.12 }\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link new dataset as 'resizable'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'resizable'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # verify type and shape\n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        type_json = rspJson['type']\n        self.assertEqual(type_json['class'], 'H5T_FLOAT')\n        self.assertEqual(type_json['base'], 'H5T_IEEE_F32LE')\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        \n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 10)  \n        self.assertTrue('maxdims' in shape)\n        self.assertEqual(shape['maxdims'][0], 20)\n        \n        # create a datataset with unlimited dimension\n        payload = {'type': 'H5T_IEEE_F32LE', 'shape': 10, 'maxdims': 0}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link new dataset as 'resizable'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'unlimited'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)     \n        \n    def testPostInvalidType(self):\n        domain = 'tall.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        payload = {'type': 'badtype', 'shape': 10}\n        headers = {'host': domain}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)\n        \n    def testPostInvalidShape(self):\n        domain = 'tall.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        payload = {'type': 'H5T_STD_I32LE', 'shape': -5}\n        headers = {'host': domain}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)\n        \n    def testPostNoBody(self):\n        domain = 'tall.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        headers = {'host': domain}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, headers=headers)\n        self.assertEqual(rsp.status_code, 400)\n        \n    def testPostWithLink(self):\n        domain = 'newdsetwithlink.datasettest.' + config.get('domain')\n        \n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        root_uuid = helper.getRootUUID(domain)\n        \n        type_vstr = {\"charSet\": \"H5T_CSET_ASCII\", \n            \"class\": \"H5T_STRING\", \n            \"strPad\": \"H5T_STR_NULLTERM\", \n            \"length\": \"H5T_VARIABLE\" } \n        payload = {'type': type_vstr, 'shape': 10,\n             'link': {'id': root_uuid, 'name': 'linked_dset'} }\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n        \n    def testPostCreationProps(self):\n        domain = 'newdset_creationprops.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        creation_props = { 'allocTime': 'H5D_ALLOC_TIME_INCR',\n                           'fillTime': 'H5D_FILL_TIME_NEVER',\n                           'layout': {'class': 'H5D_CHUNKED', 'dims': [10, 10] }}\n        payload = {'type': 'H5T_IEEE_F32LE', 'shape': (100, 100), 'creationProperties': creation_props }\n                                                           \n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link new dataset as 'dset1'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'dset1'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # read back the dataset and verify the creation props are returned\n        req = self.endpoint + \"/datasets/\" + dset_uuid\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('creationProperties' in rspJson)\n        creationProps = rspJson['creationProperties']\n        \n        self.assertTrue('allocTime' in creationProps)\n        self.assertEqual(creationProps['allocTime'], 'H5D_ALLOC_TIME_INCR')\n        self.assertTrue('fillTime' in creationProps)\n        self.assertEqual(creationProps['fillTime'], 'H5D_FILL_TIME_NEVER')\n        self.assertTrue('layout' in creationProps)\n        layout = creationProps['layout']\n        self.assertTrue('class' in layout)\n        self.assertEqual(layout['class'], 'H5D_CHUNKED')\n        self.assertTrue('dims' in layout)\n        self.assertEqual(layout['dims'], [10, 10])\n        self.assertEqual(len(creationProps.keys()), 3)  # just return what we set\n    \n    def testInvalidCreationProps(self):\n        domain = 'newdset_badcreationprops.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        creation_props = { 'layout': {'class': 'H5D_CHUNKED', 'dims': [200, 200] }}\n        payload = {'type': 'H5T_IEEE_F32LE', 'shape': (100, 100), 'creationProperties': creation_props }\n                                                           \n        req = self.endpoint + \"/datasets\"\n        # should fail because the chunk dimension is larger than the dataset dimensions\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)  # bad request\n                 \n        \n    def testPostDeflateFilter(self):\n        domain = 'newdset_gzip.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        filters = [ { 'id': 1, 'level': 9 }, ]  # deflate filter (gzip)\n        creation_props = { 'layout': {'class': 'H5D_CHUNKED', 'dims': [100, 100] }, 'filters': filters }  \n        payload = {'type': 'H5T_IEEE_F32LE', 'shape': (1000, 1000), 'creationProperties': creation_props }\n                                                           \n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link new dataset as 'dset1'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'dset1'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # read back the dataset and verify the creation props are returned\n        req = self.endpoint + \"/datasets/\" + dset_uuid\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('creationProperties' in rspJson)\n        creationProps = rspJson['creationProperties']\n        \n        self.assertTrue('filters' in creationProps)\n        filters = creationProps['filters']\n        self.assertEqual(len(filters), 1)\n        filter_prop = filters[0]\n        self.assertTrue('id' in filter_prop)\n        self.assertEqual(filter_prop['id'], 1)\n        self.assertTrue('class' in filter_prop)\n        self.assertEqual(filter_prop['class'], 'H5Z_FILTER_DEFLATE')\n        self.assertTrue('level' in filter_prop)\n        self.assertEqual(filter_prop['level'], 9)\n        self.assertTrue('layout' in creationProps)\n        # should see chunks returned, even though it was specified in creation\n        layout = creationProps['layout']\n        self.assertTrue('class' in layout)\n        self.assertEqual(layout['class'], 'H5D_CHUNKED')\n        self.assertTrue('dims' in layout)\n         \n        \n    def testPostLZFFilter(self):\n        domain = 'newdset_lzf.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        filters = [ { 'id': 32000}, ]  # LZF filter \n        creation_props = { 'filters': filters }  \n        payload = {'type': 'H5T_IEEE_F32LE', 'shape': (1000, 1000), 'creationProperties': creation_props }\n                                                           \n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link new dataset as 'dset1'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'dset1'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # read back the dataset and verify the creation props are returned\n        req = self.endpoint + \"/datasets/\" + dset_uuid\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('creationProperties' in rspJson)\n        creationProps = rspJson['creationProperties']\n        \n        \n        self.assertTrue('filters' in creationProps)\n        filters = creationProps['filters']\n        self.assertEqual(len(filters), 1)\n        filter_prop = filters[0]\n        self.assertTrue('id' in filter_prop)\n        self.assertEqual(filter_prop['id'], 32000)\n        self.assertTrue('class' in filter_prop)\n        self.assertEqual(filter_prop['class'], 'H5Z_FILTER_LZF')\n        self.assertTrue('level' not in filter_prop)\n        \n        self.assertTrue('layout' in creationProps)\n        # should see chunks returned, even though it was specified in creation\n        layout = creationProps['layout']\n        self.assertTrue('class' in layout)\n        self.assertEqual(layout['class'], 'H5D_CHUNKED')\n        self.assertTrue('dims' in layout)\n        \n    def testPostSZIPFilter(self):\n        domain = 'newdset_szip.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        filters = [ { 'id': 4, 'bitsPerPixel': 8, 'coding': 'H5_SZIP_EC_OPTION_MASK',\n            'pixelsPerBlock': 32, 'pixelsPerScanline': 100}, ]  # SZIP filter \n        creation_props = { 'layout': {'class': 'H5D_CHUNKED', 'dims': (100, 100) }, 'filters': filters }  \n        payload = {'type': 'H5T_IEEE_F32LE', 'shape': (1000, 1000), 'creationProperties': creation_props }\n                                                              \n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n         \n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link new dataset as 'dset1'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'dset1'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # read back the dataset and verify the creation props are returned\n        req = self.endpoint + \"/datasets/\" + dset_uuid\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('creationProperties' in rspJson)\n        creationProps = rspJson['creationProperties']\n        \n        \n        self.assertTrue('filters' in creationProps)\n        filters = creationProps['filters']\n        self.assertEqual(len(filters), 1)\n        filter_prop = filters[0]\n        self.assertTrue('id' in filter_prop)\n        self.assertEqual(filter_prop['id'], 4)\n        self.assertTrue('class' in filter_prop)\n        self.assertEqual(filter_prop['class'], 'H5Z_FILTER_SZIP')\n        self.assertTrue('level' not in filter_prop)\n        self.assertTrue('bitsPerPixel' in filter_prop)\n        self.assertEqual(filter_prop['bitsPerPixel'], 8)\n        self.assertTrue('coding' in filter_prop)\n        self.assertEqual(filter_prop['coding'], 'H5_SZIP_EC_OPTION_MASK')\n        \n        self.assertTrue('layout' in creationProps)\n        # should see chunks returned, even though it was specified in creation\n        layout = creationProps['layout']\n        self.assertTrue('class' in layout)\n        self.assertEqual(layout['class'], 'H5D_CHUNKED')\n        self.assertTrue('dims' in layout)\n              \n       \n    def testDelete(self):\n        domain = 'tall_dset112_deleted.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        g1_uuid = helper.getUUID(domain, root_uuid, 'g1')\n        self.assertTrue(helper.validateId(g1_uuid))\n        g11_uuid = helper.getUUID(domain, g1_uuid, 'g1.1')\n        self.assertTrue(helper.validateId(g11_uuid))\n        d112_uuid = helper.getUUID(domain, g11_uuid, 'dset1.1.2')\n        self.assertTrue(helper.validateId(d112_uuid))\n        req = self.endpoint + \"/datasets/\" + d112_uuid\n        headers = {'host': domain}\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # verify that a GET on the dataset fails\n        req = helper.getEndpoint() + \"/datasets/\" + d112_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 410)\n    \n        \n    def testDeleteRootChild(self):\n        # test delete with a dset that is child of root\n        domain = 'scalar_1d_deleted.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, '1d')\n        self.assertTrue(helper.validateId(dset_uuid))\n        req = self.endpoint + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        # verify that a GET on the dataset succeeds\n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # now delete the dataset\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # verify that a GET on the dataset fails\n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 410)\n        \n    def testDeleteAnonymous(self):\n        # test delete works with anonymous dataset\n        domain = 'tall_dset22_deleted.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        g2_uuid = helper.getUUID(domain, root_uuid, 'g2')\n        self.assertTrue(helper.validateId(g2_uuid))\n        d22_uuid = helper.getUUID(domain, g2_uuid, 'dset2.2')\n        self.assertTrue(helper.validateId(d22_uuid))\n        \n        # delete g2, that will make dataset anonymous\n        req = self.endpoint + \"/groups/\" + g2_uuid\n        headers = {'host': domain}\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        # verify that a GET on the dataset succeeds still\n        req = helper.getEndpoint() + \"/datasets/\" + d22_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        # delete dataset...\n        req = self.endpoint + \"/datasets/\" + d22_uuid\n        headers = {'host': domain}\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        # verify that a GET on the dataset fails\n        req = helper.getEndpoint() + \"/datasets/\" + d22_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 410)\n        \n    def testDeleteBadUUID(self):\n        domain = 'tall_dset112_deleted.' + config.get('domain')    \n        req = self.endpoint + \"/datasets/dff53814-2906-11e4-9f76-3c15c2da029e\"\n        headers = {'host': domain}\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 404)\n        \n    def testGetCollection(self):\n        for domain_name in ('tall', 'tall_ro'):\n            domain = domain_name + '.' + config.get('domain')    \n            req = self.endpoint + \"/datasets\"\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            datasetIds = rspJson[\"datasets\"]\n            \n            self.assertEqual(len(datasetIds), 4)\n            for uuid in datasetIds:\n                self.assertTrue(helper.validateId(uuid))\n                \n    def testGetCollectionBatch(self):\n        domain = 'dset1k.' + config.get('domain')   \n        req = self.endpoint + \"/datasets\" \n        headers = {'host': domain}\n        params = {'Limit': 50 }\n        uuids = set()\n        # get ids in 20 batches of 50 links each\n        last_uuid = None\n        for batchno in range(20):\n            if last_uuid:\n                params['Marker'] = last_uuid\n            rsp = requests.get(req, headers=headers, params=params)\n            self.assertEqual(rsp.status_code, 200)\n            if rsp.status_code != 200:\n                break\n            rspJson = json.loads(rsp.text)\n            dsetIds = rspJson['datasets']\n            self.assertEqual(len(dsetIds) <= 50, True)\n            for dsetId in dsetIds:\n                uuids.add(dsetId)\n                last_uuid = dsetId\n            if len(dsetIds) == 0:\n                break\n        self.assertEqual(len(uuids), 1000)  # should get 1000 unique uuid's \n        \nif __name__ == '__main__':\n    unittest.main()\n"
  },
  {
    "path": "test/integ/datasettypetest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport requests\nimport config\nimport helper\nimport unittest\nimport json\n\nclass DatasetTypeTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(DatasetTypeTest, self).__init__(*args, **kwargs)\n        self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port'))    \n       \n    def testGet(self):\n        domain = 'tall.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        g2_uuid = helper.getUUID(domain, root_uuid, 'g2')\n        dset21_uuid = helper.getUUID(domain, g2_uuid, 'dset2.1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset21_uuid + '/type'\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        typeItem = rspJson['type']\n        self.assertEqual(typeItem['base'], 'H5T_IEEE_F32BE') \n        self.assertEqual(typeItem['class'], 'H5T_FLOAT') \n     \n        \n    def testGetScalar(self):\n        domain = 'scalar.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, '0d') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + '/type'\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        typeItem = rspJson['type']\n        self.assertEqual(typeItem['base'], 'H5T_STD_I32LE') \n        self.assertEqual(typeItem['class'], 'H5T_INTEGER') \n       \n    def testGetCompound(self):\n        domain = 'compound.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + '/type'\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        typeItem = rspJson['type']\n        self.assertEqual(typeItem['class'], 'H5T_COMPOUND')\n        self.assertTrue('fields' in typeItem)\n        fields = typeItem['fields']\n        self.assertEqual(len(fields), 5)\n        timeField = fields[1]\n        self.assertEqual(timeField['name'], 'time')\n        self.assertTrue('type' in timeField)\n        timeFieldType = timeField['type']\n        self.assertEqual(timeFieldType['class'], 'H5T_STRING')\n        self.assertEqual(timeFieldType['charSet'], 'H5T_CSET_ASCII')\n        self.assertEqual(timeFieldType['length'], 6)\n        self.assertEqual(timeFieldType['strPad'], 'H5T_STR_NULLPAD')\n    \n        \nif __name__ == '__main__':\n    unittest.main()\n"
  },
  {
    "path": "test/integ/datatypetest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport requests\nimport config\nimport helper\nimport unittest\nimport json\n\nclass DatatypeTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(DatatypeTest, self).__init__(*args, **kwargs)\n        self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port'))    \n       \n    def testGet(self):\n        domain = 'namedtype.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        dtype_uuid = helper.getUUID(domain, root_uuid, 'dtype_simple')\n        self.assertTrue(helper.validateId(dtype_uuid))\n         \n        req = helper.getEndpoint() + \"/datatypes/\" + dtype_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertEqual(rspJson['id'], dtype_uuid)\n        typeItem = rspJson['type']\n        self.assertEqual(typeItem['class'], 'H5T_FLOAT')\n        self.assertEqual(typeItem['base'], 'H5T_IEEE_F32LE')\n        self.assertEqual(rspJson['attributeCount'], 1)\n       \n    def testGetCompound(self):\n        domain = 'namedtype.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        dtype_uuid = helper.getUUID(domain, root_uuid, 'dtype_compound') \n        req = helper.getEndpoint() + \"/datatypes/\" + dtype_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        typeItem = rspJson['type']\n        self.assertEqual(typeItem['class'], 'H5T_COMPOUND')\n        self.assertTrue('fields' in typeItem)\n        fields = typeItem['fields']\n        self.assertEqual(len(fields), 2)\n        tempField = fields[0]\n        self.assertEqual(tempField['name'], 'temp')\n        tempFieldType = tempField['type']\n        self.assertEqual(tempFieldType['class'], 'H5T_INTEGER')\n        self.assertEqual(tempFieldType['base'], 'H5T_STD_I32LE')\n        pressureField = fields[1]\n        self.assertEqual(pressureField['name'], 'pressure')\n        pressureFieldType = pressureField['type']\n        self.assertEqual(pressureFieldType['class'], 'H5T_FLOAT')\n        self.assertEqual(pressureFieldType['base'], 'H5T_IEEE_F32LE')\n    \n    def testPost(self):\n        domain = 'newdtype.datatypetest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        payload = {'type': 'H5T_IEEE_F32LE'}\n        req = self.endpoint + \"/datatypes\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create datatype\n        rspJson = json.loads(rsp.text)\n        dtype_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dtype_uuid))\n         \n        # link new dataset as 'dtype1'\n        root_uuid = helper.getRootUUID(domain)\n        name = 'dtype1'\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {'id': dtype_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n    def testPostWithLink(self):\n        # test PUT_root\n        domain = 'newlinkedtype.datatypetest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201)  \n        \n        root_uuid = helper.getRootUUID(domain)\n        \n        payload = { \n            'type': 'H5T_IEEE_F64LE', \n            'link': {'id': root_uuid, 'name': 'linked_dtype'} \n        }\n         \n        req = self.endpoint + \"/datatypes\"\n        headers = {'host': domain}\n        # create a new group\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201) \n        rspJson = json.loads(rsp.text)\n        self.assertEqual(rspJson[\"attributeCount\"], 0)\n        self.assertTrue(helper.validateId(rspJson[\"id\"]) ) \n        \n        \n    def testPostTypes(self):\n        domain = 'datatypes.datatypetest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        root_uuid = helper.getRootUUID(domain)\n        \n        # list of types supported\n        datatypes = ( 'H5T_STD_I8LE',   'H5T_STD_U8LE',  \n                      'H5T_STD_I16LE',  'H5T_STD_U16LE',    \n                      'H5T_STD_I32LE',  'H5T_STD_U32LE',   \n                      'H5T_STD_I64LE',  'H5T_STD_U64LE',  \n                      'H5T_IEEE_F32LE', 'H5T_IEEE_F64LE' )\n                     \n               #todo: check on  'vlen_bytes', 'vlen_unicode'\n        for datatype in datatypes:  \n            payload = {'type': datatype}\n            req = self.endpoint + \"/datatypes\"\n            rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n            self.assertEqual(rsp.status_code, 201)  # create datatypes\n            rspJson = json.loads(rsp.text)\n            dtype_uuid = rspJson['id']\n            self.assertTrue(helper.validateId(dtype_uuid))\n         \n            # link new datatype using the type name\n            name = datatype\n            req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n            payload = {\"id\": dtype_uuid}\n            headers = {'host': domain}\n            rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n            self.assertEqual(rsp.status_code, 201)\n            \n    def testPostCompoundType(self):\n        domain = 'compound.datatypetest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        root_uuid = helper.getRootUUID(domain)\n        fields = ({'name': 'temp', 'type': 'H5T_STD_I32LE'}, \n                    {'name': 'pressure', 'type': 'H5T_IEEE_F32LE'}) \n        datatype = {'class': 'H5T_COMPOUND', 'fields': fields }\n        payload = {'type': datatype}\n        req = self.endpoint + \"/datatypes\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create datatype\n        rspJson = json.loads(rsp.text)\n        dtype_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dtype_uuid))\n         \n        # link the new datatype \n        name = \"dtype_compound\"\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dtype_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n    \n    \"\"\"\n    This test fails due to h5py issue #540: https://github.com/h5py/h5py/issues/540\n    Commenting out for now.\n        \n    def testPostVLenStringType(self):\n        domain = 'vlenstr.datatypetest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        root_uuid = helper.getRootUUID(domain)\n        data_type = { 'charSet':   'H5T_CSET_ASCII', \n                     'class':  'H5T_STRING', \n                     'strPad': 'H5T_STR_NULLPAD', \n                     'length': 'H5T_VARIABLE'}\n                     \n        payload = {'type': data_type}\n        req = self.endpoint + \"/datatypes\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create datatype\n        rspJson = json.loads(rsp.text)\n        dtype_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dtype_uuid))\n         \n        # link the new datatype \n        name = \"dtype_vlenstr\"\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dtype_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n    \"\"\"\n         \n    def testPostInvalidType(self):\n        domain = 'tall.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        payload = {'type': 'badtype'}\n        headers = {'host': domain}\n        req = self.endpoint + \"/datatypes\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)\n        \n    def testDelete(self):\n        domain = 'namedtype_deleted.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        dtype_uuid = helper.getUUID(domain, root_uuid, 'dtype_simple')\n        self.assertTrue(helper.validateId(dtype_uuid))\n         \n        req = helper.getEndpoint() + \"/datatypes/\" + dtype_uuid\n        headers = {'host': domain}\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        # verify that it's gone\n        req = helper.getEndpoint() + \"/datatypes/\" + dtype_uuid\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 410)\n        \n    def testGetCollection(self):\n        domain = 'namedtype.' + config.get('domain') \n        req = self.endpoint + \"/datatypes\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        datatypeIds = rspJson[\"datatypes\"]\n            \n        self.assertEqual(len(datatypeIds), 2)\n        for uuid in datatypeIds:\n            self.assertTrue(helper.validateId(uuid))\n            \n    def testGetCollectionBatch(self):\n        domain = 'type1k.' + config.get('domain')   \n        req = self.endpoint + \"/datatypes\" \n        headers = {'host': domain}\n        params = {'Limit': 50 }\n        uuids = set()\n        # get ids in 20 batches of 50 links each\n        last_uuid = None\n        for batchno in range(20):\n            if last_uuid:\n                params['Marker'] = last_uuid\n            rsp = requests.get(req, headers=headers, params=params)\n            self.assertEqual(rsp.status_code, 200)\n            if rsp.status_code != 200:\n                break\n            rspJson = json.loads(rsp.text)\n            typeIds = rspJson[\"datatypes\"]\n            self.assertEqual(len(typeIds) <= 50, True)\n            for typeId in typeIds:\n                uuids.add(typeId)\n                last_uuid = typeId\n            if len(typeIds) == 0:\n                break\n        self.assertEqual(len(uuids), 1000)  # should get 1000 unique uuid's \n        \n    \n     \nif __name__ == '__main__':\n    unittest.main()\n"
  },
  {
    "path": "test/integ/dirtest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport requests\nimport config\nimport helper\nimport unittest\nimport json\nimport os\nimport time\nfrom shutil import copyfile\nfrom tornado.escape import url_escape\n\nclass DirTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(DirTest, self).__init__(*args, **kwargs)\n        self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port'))\n        self.user1 = {'username':'test_user1', 'password':'test'}\n    \n        \n    def testGetToc(self):  \n        domain = config.get('domain')  \n        if domain.startswith('test.'):\n            domain = domain[5:]\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['content-type'], 'application/json')\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('root' in rspJson)\n        root_uuid = rspJson['root']\n        req = self.endpoint + \"/groups/\" + root_uuid \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # get top-level links\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"links\" in rspJson)\n        links = rspJson[\"links\"]\n         \n        home_dir = config.get(\"home_dir\")\n        for item in links:\n            if item['title'] == home_dir:\n                self.assertTrue(False)  # should not see home dir from root toc\n\n        # get group uuid that maps to \"test\" sub-directory\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/test\" \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"link\" in rspJson)\n        link = rspJson['link']\n        group_uuid = link['id']\n\n        # verify we see \"tall\" under links\n        name = \"tall\"\n        req = self.endpoint + \"/groups/\" + group_uuid + \"/links/\" + name \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"link\" in rspJson)\n        link = rspJson['link']\n        self.assertEqual(link['class'], 'H5L_TYPE_EXTERNAL')\n        self.assertEqual(link['title'], name)\n        self.assertEqual(link['h5path'], '/')\n        self.assertEqual(link['h5domain'], name + '.test.' + domain)\n\n        # verify that \"filename with space\" shows up properly url encoded\n        name = \"filename with space\"\n        name_escaped = url_escape(name)\n        req = self.endpoint + \"/groups/\" + group_uuid + \"/links/\" + name \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"link\" in rspJson)\n        link = rspJson['link']\n        self.assertEqual(link['class'], 'H5L_TYPE_EXTERNAL')\n        self.assertEqual(link['title'], name)\n        self.assertEqual(link['h5path'], '/')\n        self.assertEqual(link['h5domain'], name_escaped + '.test.' + domain)\n         \n        # get all the links in the test group\n        req = self.endpoint + \"/groups/\" + group_uuid + \"/links\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"links\" in rspJson)\n        links = rspJson[\"links\"]\n        tall_link = None         # normal link\n        file_space_link = None   # link that contains a space  \n        file_dot_link = None     # link that contains a dot\n        for link in links:\n            self.assertTrue(\"title\" in link)\n            self.assertTrue(\"class\" in link)\n            if link['title'] == \"tall\":\n                tall_link = link\n            elif link['title'] == \"filename with space\":\n                file_space_link = link              \n            elif link['title'] == \"tall.dots.need.to.be.encoded\":\n                file_dot_link = link\n\n        self.assertTrue(tall_link is not None)\n        name = \"tall\"\n        link = tall_link\n        self.assertEqual(link['class'], 'H5L_TYPE_EXTERNAL')\n        self.assertEqual(link['title'], name)\n        self.assertEqual(link['h5path'], '/')\n        self.assertEqual(link['h5domain'], name + '.test.' + domain)\n        href = \"groups/\" + group_uuid + \"/links/\" + name\n        self.assertTrue(link['href'].endswith(href))\n\n        self.assertTrue(file_space_link is not None)\n        name = \"filename with space\"\n        link = file_space_link\n        self.assertEqual(link['class'], 'H5L_TYPE_EXTERNAL')\n        self.assertEqual(link['title'], name)\n        self.assertEqual(link['h5path'], '/')\n        self.assertEqual(link['h5domain'], url_escape(name) + '.test.' + domain)\n        href = \"groups/\" + group_uuid + \"/links/\" + url_escape(name)\n        self.assertTrue(link['href'].endswith(href))\n\n        self.assertTrue(file_dot_link is not None)\n        name = \"tall.dots.need.to.be.encoded\"\n        name_encoded = name.replace('.', '%2E')\n    \n        link = file_dot_link\n        self.assertEqual(link['class'], 'H5L_TYPE_EXTERNAL')\n        self.assertEqual(link['title'], name)\n        self.assertEqual(link['h5path'], '/')\n        self.assertEqual(link['h5domain'], name_encoded + '.test.' + domain)\n        href = \"groups/\" + group_uuid + \"/links/\" + name\n        self.assertTrue(link['href'].endswith(href))\n\n         \n\n        \n    def testGetUserToc(self):  \n        domain = config.get('domain')\n        if domain.startswith('test.'):\n            domain = domain[5:]  # backup over the test part\n      \n        home_dir = config.get(\"home_dir\")\n        user_domain = self.user1['username'] + '.' + home_dir  + '.' + domain\n        req = self.endpoint + \"/\"\n        headers = {'host': user_domain}\n        # this should get the users .toc file\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['content-type'], 'application/json')\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('root' in rspJson)\n        root_uuid = rspJson['root']\n        req = self.endpoint + \"/groups/\" + root_uuid \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        if os.name == 'nt':\n            return # symbolic links used below are not supported on Windows\n            \n        # get link to 'public' folder\n        req =  self.endpoint + \"/groups/\" + root_uuid + \"/links/public\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"link\" in rspJson)\n        link_json = rspJson[\"link\"]\n        self.assertEqual(link_json[\"class\"], \"H5L_TYPE_EXTERNAL\")\n        self.assertEqual(link_json[\"title\"], \"public\")\n        self.assertEqual(link_json[\"h5domain\"], domain) \n        self.assertEqual(link_json[\"h5path\"], \"/public\") \n        \n        # get link to 'tall' file\n        req =  self.endpoint + \"/groups/\" + root_uuid + \"/links/tall\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"link\" in rspJson)\n        link_json = rspJson[\"link\"]\n        self.assertEqual(link_json[\"class\"], \"H5L_TYPE_EXTERNAL\")\n        self.assertEqual(link_json[\"title\"], \"tall\")\n        self.assertEqual(link_json[\"h5domain\"], \"tall.\" + user_domain)\n\n        \n    def testPutUserDomain(self):  \n        domain = config.get('domain')\n        home_dir = config.get(\"home_dir\")\n        if domain.startswith('test.'):\n            domain = domain[5:]  # backup over the test part\n      \n        user_domain = self.user1['username'] + '.' + home_dir + '.' + domain\n        \n        # this should get the users .toc file\n        headers = {'host': user_domain }\n        req = self.endpoint + '/'\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('root' in rspJson)\n        toc_root_uuid = rspJson['root']\n        req = self.endpoint + \"/groups/\" + toc_root_uuid \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n       \n        # verify that \"myfile\" doesn't exist yet\n        user_file = \"myfile.\" + user_domain\n        req = self.endpoint + \"/\"\n        headers = {'host': user_file}\n        #verify that the domain doesn't exist yet\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 404)\n        \n        # do a put on \"myfile\"\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # now the domain should exist  \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        # go back to users toc and get \"/myfile\" link\n        headers = {'host': user_domain }\n        req = self.endpoint + \"/groups/\" + toc_root_uuid + \"/links/myfile\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        link = rspJson['link']\n         \n        self.assertTrue('class' in link)\n        self.assertEqual(link['class'], \"H5L_TYPE_EXTERNAL\")\n        self.assertTrue('h5path' in link)\n        self.assertEqual(link['h5path'], \"/\")\n        self.assertTrue('h5domain' in link)\n        self.assertEqual(link['h5domain'], \"myfile.\" + user_domain)\n                \n        \n    def testDeleteUserDomain(self):  \n        domain = config.get('domain')\n        home_dir = config.get(\"home_dir\")\n        if domain.startswith('test.'):\n            domain = domain[5:]  # backup over the test part\n      \n        user_domain = self.user1['username'] + '.' + home_dir + '.' + domain\n        \n        # this should get the users .toc file\n        headers = {'host': user_domain }\n        req = self.endpoint + '/'\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('root' in rspJson)\n        toc_root_uuid = rspJson['root']\n        req = self.endpoint + \"/groups/\" + toc_root_uuid \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        # \"tall_deleteme\" should be a link\n        req = req + \"/link/tall_deleteme\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        # And we should be able to query directly\n        user_file = \"tall_deleteme.\" + user_domain\n        req = self.endpoint + \"/\"\n        headers = {'host': user_file}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        # Delete \"tall_deleteme\"  \n        user_file = \"tall_deleteme.\" + user_domain\n        req = self.endpoint + \"/\"\n        headers = {'host': user_file}\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        # link in user TOC should be removed\n        req = self.endpoint + \"/groups/\" + toc_root_uuid +  \"/link/tall_deleteme\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 404)\n        \n         \n        \n    def testNoHostHeader(self):\n        req = self.endpoint + \"/\"\n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['content-type'], 'application/json')\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('root' in rspJson)\n                   \n    \n    def testPutDomain(self): \n        domain_name = \"dirtest_putdomain\"\n        \n        # get toc root uuid\n        req = self.endpoint + \"/\"\n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('root' in rspJson)\n        toc_root_uuid = rspJson['root']\n        \n        # get toc 'test' group uuid\n        req = self.endpoint + \"/groups/\" + toc_root_uuid \n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 200)\n        req = self.endpoint + \"/groups/\" + toc_root_uuid + \"/links/test\" \n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"link\" in rspJson)\n        link = rspJson['link']\n        test_group_uuid = link['id']\n        \n                 \n        # verify that the domain name is not present\n        req = self.endpoint + \"/groups/\" + test_group_uuid + \"/links/\" + domain_name \n        rsp = requests.get(req)\n        self.assertTrue(rsp.status_code in (404, 410))\n        \n        # create a new domain\n        domain = domain_name + '.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        rspJson = json.loads(rsp.text)\n         \n        # external link should exist now\n        req = self.endpoint + \"/groups/\" + test_group_uuid + \"/links/\" + domain_name \n         \n        rsp = requests.get(req)\n       \n        self.assertEqual(rsp.status_code, 200)\n         \n        # delete the domain\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        # external link should be gone\n        req = self.endpoint + \"/groups/\" + test_group_uuid + \"/links/\" + domain_name \n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 410)  \n        \n    def testWatchdog(self):\n        domain_name = \"dirtest_watchdogadd\"\n        \n        # get toc root uuid\n        req = self.endpoint + \"/\"\n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('root' in rspJson)\n        toc_root_uuid = rspJson['root']\n        \n        # get toc 'test' group uuid\n        req = self.endpoint + \"/groups/\" + toc_root_uuid \n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 200)\n        req = self.endpoint + \"/groups/\" + toc_root_uuid + \"/links/test\" \n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"link\" in rspJson)\n        link = rspJson['link']\n        test_group_uuid = link['id']\n                  \n        # verify that the domain name is not present\n        req = self.endpoint + \"/groups/\" + test_group_uuid + \"/links/\" + domain_name \n        rsp = requests.get(req)\n        self.assertTrue(rsp.status_code in (404, 410))\n        \n        # copy file to target domain\n        src_file = \"../test_files/tall.h5\"\n        des_file = \"../../data/test/\" + domain_name + \".h5\"\n        copyfile(src_file, des_file)\n        \n        # sleep to give the watchdog time to update the toc\n        time.sleep(2)  \n         \n        # external link should exist now\n        req = self.endpoint + \"/groups/\" + test_group_uuid + \"/links/\" + domain_name \n         \n        rsp = requests.get(req)\n       \n        self.assertEqual(rsp.status_code, 200)\n              \n        # delete the file\n        os.remove(des_file)\n        # sleep to give the watchdog time to update the toc\n        time.sleep(2)\n          \n        # external link should be gone\n        req = self.endpoint + \"/groups/\" + test_group_uuid + \"/links/\" + domain_name \n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 410)    \n          \n    def testDeleteToc(self):\n        #test DELETE toc\n        req = self.endpoint + \"/\"\n        rsp = requests.delete(req)\n        self.assertEqual(rsp.status_code, 403)\n        \n    def testPutToc(self):\n        # test PUT toc\n        req = self.endpoint + \"/\"\n        rsp = requests.put(req)\n        # status code be Forbiden or Conflict based on TOC file\n        # existing or not\n        self.assertTrue(rsp.status_code in (403, 409))\n        \n    def testDeleteRoot(self):\n        req = self.endpoint + \"/\"\n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('root' in rspJson)\n        root_uuid = rspJson['root']\n        req = self.endpoint + \"/groups/\" + root_uuid \n        rsp = requests.delete(req)\n        self.assertEqual(rsp.status_code, 403)\n        \n    def testPutLink(self):\n        req = self.endpoint + \"/\"\n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('root' in rspJson)\n        root_uuid = rspJson['root']\n        name = 'dirtest.testPutLink'\n        req = helper.getEndpoint() + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"h5path\": \"somewhere\"}\n        # verify softlink does not exist\n        rsp = requests.get(req, data=json.dumps(payload))\n        self.assertEqual(rsp.status_code, 404)\n        # make request\n        rsp = requests.put(req, data=json.dumps(payload))\n        self.assertEqual(rsp.status_code, 403)\n        \n    def testDeleteLink(self):\n        req = self.endpoint + \"/\"\n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('root' in rspJson)\n        root_uuid = rspJson['root']\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/test\" \n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 200)\n        rsp = requests.delete(req)  # try to delete the link\n        self.assertEqual(rsp.status_code, 403)\n        \n        \nif __name__ == '__main__':\n    unittest.main()\n"
  },
  {
    "path": "test/integ/grouptest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport requests\nimport config\nimport helper\nimport unittest\nimport json\n\nclass GroupTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(GroupTest, self).__init__(*args, **kwargs)\n        self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port'))\n       \n    def testGet(self):\n        for domain_name in ('tall', 'tall_ro'):\n            domain = domain_name + '.' + config.get('domain')    \n            req = self.endpoint + \"/\"\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            rootUUID = rspJson[\"root\"]\n            self.assertTrue(helper.validateId(rootUUID))\n        \n            req = self.endpoint + \"/groups/\" + rootUUID\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rspJson[\"linkCount\"], 2)\n            self.assertEqual(rspJson[\"attributeCount\"], 2)\n            self.assertFalse(\"links\" in rspJson)\n            \n            \n    def testGetInvalidUUID(self):\n        for domain_name in ('tall', 'tall_ro'):\n            domain = domain_name + '.' + config.get('domain')    \n            req = self.endpoint + \"/\"\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            import uuid\n            bad_uuid = str(uuid.uuid1())    \n            req = self.endpoint + \"/groups/\" + bad_uuid\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 404)\n             \n            \n    def testGetWithHostQuery(self):\n        for domain_name in ('tall',):\n            domain = domain_name + '.' + config.get('domain')    \n            req = self.endpoint + \"/?host=\" + domain\n            rsp = requests.get(req)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            rootUUID = rspJson[\"root\"]\n            self.assertTrue(helper.validateId(rootUUID))\n        \n            req = self.endpoint + \"/groups/\" + rootUUID + \"?host=\" + domain\n            rsp = requests.get(req)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertEqual(rspJson[\"linkCount\"], 2)\n            self.assertEqual(rspJson[\"attributeCount\"], 2)\n            self.assertEqual(rsp.status_code, 200)\n\n    def testGetWithLinks(self):\n        for domain_name in ('tall',):\n            domain = domain_name + '.' + config.get('domain')    \n            req = self.endpoint + \"/\"\n            headers = {'host': domain}\n            \n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            rootUUID = rspJson[\"root\"]\n            self.assertTrue(helper.validateId(rootUUID))\n        \n            req = self.endpoint + \"/groups/\" + rootUUID\n            params = {'include_links': True }\n            rsp = requests.get(req, params=params, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rspJson[\"linkCount\"], 2)\n            self.assertEqual(rspJson[\"attributeCount\"], 2)\n            self.assertTrue(\"links\" in rspJson)\n            links = rspJson[\"links\"]\n            self.assertEqual(len(links), 2)\n            for link in links:\n                self.assertTrue(\"collection\" in link)\n                self.assertTrue(link[\"collection\"], 'groups')\n                self.assertTrue(\"class\" in link)\n                self.assertEqual(link[\"class\"], 'H5L_TYPE_HARD')\n                self.assertTrue(\"id\" in link)\n                self.assertTrue(\"title\" in link)\n                self.assertTrue(\"href\" in link)\n            \n          \n    def testPost(self):\n        # test PUT_root\n        domain = 'testGroupPost.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201)   \n        req = self.endpoint + \"/groups\"\n        headers = {'host': domain}\n        # create a new group\n        rsp = requests.post(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) \n        rspJson = json.loads(rsp.text)\n        self.assertEqual(rspJson[\"linkCount\"], 0)\n        self.assertEqual(rspJson[\"attributeCount\"], 0)\n        self.assertTrue(helper.validateId(rspJson[\"id\"]) ) \n       \n        \n    def testPostWithLink(self):\n        # test PUT_root\n        domain = 'testGroupPostWithLink.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201)  \n        \n        root_uuid = helper.getRootUUID(domain)\n        \n        payload = { 'link': {'id': root_uuid, 'name': 'linked_dset'} }\n         \n        req = self.endpoint + \"/groups\"\n        headers = {'host': domain}\n        # create a new group\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201) \n        rspJson = json.loads(rsp.text)\n        self.assertEqual(rspJson[\"linkCount\"], 0)\n        self.assertEqual(rspJson[\"attributeCount\"], 0)\n        self.assertTrue(helper.validateId(rspJson[\"id\"]) ) \n        # try repeat post with same link - should return 409\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 409) \n       \n    def testBadPost(self):\n        domain = 'tall.' + config.get('domain')    \n        req = self.endpoint + \"/groups/dff53814-2906-11e4-9f76-3c15c2da029e\"\n        headers = {'host': domain}\n        rsp = requests.post(req, headers=headers)\n        # post is not allowed to provide uri, so should fail\n        self.assertEqual(rsp.status_code, 405) \n        \n    def testDelete(self):\n        domain = 'tall_g2_deleted.' + config.get('domain')  \n        rootUUID = helper.getRootUUID(domain)\n        helper.validateId(rootUUID)\n        g2UUID = helper.getUUID(domain, rootUUID, 'g2')\n        self.assertTrue(helper.validateId(g2UUID))\n        req = self.endpoint + \"/groups/\" + g2UUID\n        headers = {'host': domain}\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"hrefs\" in rspJson)\n        # do a GET, should return 410 (GONE)\n        req = self.endpoint + \"/groups/\" + g2UUID\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 410)\n        \n    def testDeleteAnonymous(self):\n        # Test deleting anonymous (not linked) group\n        domain = 'testGroupDelete.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201)   \n        req = self.endpoint + \"/groups\"\n        headers = {'host': domain}\n        # create a new group\n        rsp = requests.post(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) \n        rspJson = json.loads(rsp.text)\n        uuid = rspJson[\"id\"]\n        self.assertTrue(helper.validateId(uuid))   \n        \n        req = self.endpoint + \"/groups/\" + uuid\n        headers = {'host': domain}\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # do a GET, should return 410 (GONE)\n        req = self.endpoint + \"/groups/\" + uuid\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 410)\n            \n        \n    def testDeleteBadUUID(self):\n        domain = 'tall_g2_deleted.' + config.get('domain')    \n        req = self.endpoint + \"/groups/dff53814-2906-11e4-9f76-3c15c2da029e\"\n        headers = {'host': domain}\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 404)\n        \n    def testDeleteRoot(self):\n        domain = 'tall.' + config.get('domain')    \n        headers = {'host': domain}\n        req = self.endpoint + \"/\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        rootUUID = rspJson[\"root\"]\n        req = self.endpoint + \"/groups/\" + rootUUID\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 403)\n        \n    def testGetCollection(self):\n        for domain_name in ('tall', 'tall_ro'):\n            domain = domain_name + '.' + config.get('domain')    \n            req = self.endpoint + \"/groups\"\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            groupIds = rspJson[\"groups\"]\n            \n            self.assertEqual(len(groupIds), 5)\n            for uuid in groupIds:\n                self.assertTrue(helper.validateId(uuid))\n                \n    def testGetCollectionBatch(self):\n        domain = 'group1k.' + config.get('domain')   \n        req = self.endpoint + \"/groups\" \n        headers = {'host': domain}\n        params = {'Limit': 50 }\n        uuids = set()\n        # get ids in 20 batches of 50 links each\n        last_uuid = None\n        for batchno in range(20):\n            if last_uuid:\n                params['Marker'] = last_uuid\n            rsp = requests.get(req, headers=headers, params=params)\n            self.assertEqual(rsp.status_code, 200)\n            if rsp.status_code != 200:\n                break\n            rspJson = json.loads(rsp.text)\n            groupIds = rspJson['groups']\n            self.assertEqual(len(groupIds) <= 50, True)\n            for groupId in groupIds:\n                uuids.add(groupId)\n                last_uuid = groupId\n            if len(groupIds) == 0:\n                break\n        self.assertEqual(len(uuids), 1000)  # should get 1000 unique uuid's    \n    \n       \nif __name__ == '__main__':\n    unittest.main()"
  },
  {
    "path": "test/integ/helper.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport six\nif six.PY3:\n    unicode = str\n    \nimport requests\nimport config\nimport unittest\nimport json\nimport base64\n\n\n\n\"\"\"\n    Helper function - get endpoint we'll send http requests to \n\"\"\" \ndef getEndpoint():\n    endpoint = 'http://' + config.get('server') + ':' + str(config.get('port'))\n    return endpoint\n\n\"\"\"\nHelper function - return true if the parameter looks like a UUID\n\"\"\"\ndef validateId(id):\n    if type(id) != str and type(id) != unicode: \n        # should be a string\n        return False\n    if len(id) != 36:\n        # id's returned by uuid.uuid1() are always 36 chars long\n        return False\n    return True\n   \n\"\"\"\nHelper function - get auth string\n\"\"\"\ndef getAuthString(user, password):   \n    auth_string = user + ':' + password\n    auth_string = auth_string.encode('utf-8')\n    auth_string = base64.b64encode(auth_string)\n    auth_string = b\"Basic \" + auth_string\n    return auth_string\n        \n\n\"\"\"\nHelper function - get root uuid  \n\"\"\" \ndef getRootUUID(domain, user=None, password=None):\n    req = getEndpoint() + \"/\"\n    headers = {'host': domain}\n    if user is not None:\n        # if user is supplied, add the auth header\n        headers['Authorization'] = getAuthString(user, password)\n    rsp = requests.get(req, headers=headers)\n    rootUUID = None\n    if rsp.status_code == 200:\n        rspJson = json.loads(rsp.text)\n        rootUUID = rspJson[\"root\"]\n    return rootUUID\n           \n\"\"\"\nHelper function - get uuid given parent group uuid and link name\n\"\"\"\ndef getUUID(domain, parentUuid, name):\n    if type(name) != str or len(name) == 0:\n        return None\n    req = getEndpoint() + \"/groups/\" + parentUuid + \"/links/\" + name\n    headers = {'host': domain}\n    rsp = requests.get(req, headers=headers)\n    tgtUuid = None\n    if rsp.status_code == 200:\n        rspJson = json.loads(rsp.text)\n        target = rspJson['link']\n        if target['class'] != 'H5L_TYPE_HARD':\n            # soft/external links\n            return None\n        tgtUuid = target['id']\n\n    return tgtUuid\n\"\"\"\nHelper function - get uuid for a given path\n\"\"\"\ndef getUUIDByPath(domain, path, user=None, password=None):\n    if path[0] != '/':\n        raise KeyError(\"only abs paths\") # only abs paths\n            \n    parent_uuid = getRootUUID(domain, user=user, password=password)  \n     \n    if path == '/':\n        return parent_uuid\n            \n    headers = {'host': domain}\n    if user is not None:\n        # if user is supplied, add the auth header\n        headers['Authorization'] = getAuthString(user, password)\n            \n    # make a fake tgt_json to represent 'link' to root group\n    tgt_json = {'collection': \"groups\", 'class': \"H5L_TYPE_HARD\", 'id': parent_uuid }\n    tgt_uuid = None\n            \n    names = path.split('/')         \n                      \n    for name in names:\n        if not name: \n            continue\n        if parent_uuid is None:\n            raise KeyError(\"not found\")\n                 \n        req = getEndpoint() + \"/groups/\" + parent_uuid + \"/links/\" + name\n        rsp = requests.get(req, headers=headers)\n        if rsp.status_code != 200:\n            raise KeyError(\"not found\")\n        rsp_json = json.loads(rsp.text)    \n        tgt_json = rsp_json['link']\n            \n        if tgt_json['class'] == 'H5L_TYPE_HARD':\n            #print \"hard link, collection:\", link_json['collection']\n            if tgt_json['collection'] == 'groups':\n                parent_uuid = tgt_json['id']    \n            else:\n                parent_uuid = None\n            tgt_uuid = tgt_json['id']\n        else:\n            raise KeyError(\"non-hard link\")\n    return tgt_uuid\n            \n\"\"\"\nHelper function - create an anonymous group\n\"\"\"    \ndef createGroup(domain):\n    # test PUT_root\n    req = getEndpoint() + \"/groups\"\n    headers = {'host': domain}\n    # create a new group\n    rsp = requests.post(req, headers=headers)\n    if rsp.status_code != 201:\n        return None\n    rspJson = json.loads(rsp.text)\n    id = rspJson[\"id\"] \n    return id\n        \n\"\"\"\nHelper function - link given object/name\n\"\"\"\ndef linkObject(domain, objUuid, name, parentUuid=None):\n    if parentUuid == None:\n        # use root as parent if not specified\n        parentUuid = getRootUUID(domain)\n    req = getEndpoint() + \"/groups/\" + parentUuid + \"/links/\" + name \n    payload = {\"id\": objUuid}\n    headers = {'host': domain}\n    rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n    if rsp.status_code == 201:\n        return True\n    else: \n        return False\n        \n\"\"\"\nHelper function - return data from dataset\n\"\"\"\ndef readDataset(domain, dsetUuid):\n    req = getEndpoint() + \"/datasets/\" + dsetUuid + \"/value\"\n    headers = {'host': domain}\n    rsp = requests.get(req, headers=headers)\n    if rsp.status_code != 200:\n        return None\n    rspJson = json.loads(rsp.text)\n    data = rspJson['value']\n    return data\n    \n\"\"\"\nHelper function - convert name to url-friendly format\n  Replaces all non-alphanumeric characters with '%<ascii_hex>'\n\"\"\"\ndef nameEncode(name):\n    out = []\n    for ch in name:\n        if ch.isalnum():\n            out.append(ch)\n        elif ch == ' ':\n            out.append('+')\n        else:\n            hex = format(ord(ch), '02X')\n            out.append('%' + hex)\n    return ''.join(out)\n         \n    \n            \n"
  },
  {
    "path": "test/integ/linktest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport requests\nimport config\nimport unittest\nimport helper\nimport json\nimport logging\n\nclass LinkTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(LinkTest, self).__init__(*args, **kwargs)\n        self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port'))\n       \n    def testGetHard(self):\n        logging.info(\"LinkTest.testGetHard\")\n        for domain_name in ('tall', 'tall_ro'):\n            g1_uuid = None\n            domain = domain_name + '.' + config.get('domain')   \n            root_uuid = helper.getRootUUID(domain)     \n            req = self.endpoint + \"/groups/\" + root_uuid + \"/links/g1\"\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertTrue(\"created\" in rspJson)\n            self.assertTrue(\"lastModified\" in rspJson)\n            self.assertTrue('link' in rspJson)\n            target = rspJson['link']\n            self.assertTrue(helper.validateId(target['id']))\n            self.assertEqual(target['class'], 'H5L_TYPE_HARD')\n            self.assertEqual(target['title'], 'g1')\n            self.assertEqual(target['collection'], 'groups')\n            \n    def testGetMising(self):\n        logging.info(\"LinkTest.testGetMissing\")\n        for domain_name in ('tall', 'tall_ro'):\n            g1_uuid = None\n            domain = domain_name + '.' + config.get('domain')   \n            root_uuid = helper.getRootUUID(domain)     \n            req = self.endpoint + \"/groups/\" + root_uuid + \"/links/not_a_link\"\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 404)\n             \n            \n    def testGetSoft(self):\n        logging.info(\"LinkTest.testGetSoft\")\n        for domain_name in ('tall', 'tall_ro'):\n            g1_uuid = None\n            domain = domain_name + '.' + config.get('domain')   \n            root_uuid = helper.getRootUUID(domain)\n            g1_uuid = helper.getUUID(domain, root_uuid, 'g1')\n            g12_uuid = helper.getUUID(domain, g1_uuid, 'g1.2')\n            g121_uuid = helper.getUUID(domain, g12_uuid, 'g1.2.1')\n            req = self.endpoint + \"/groups/\" + g121_uuid + \"/links/slink\"\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertTrue(\"created\" in rspJson)\n            self.assertTrue(\"lastModified\" in rspJson)\n            target = rspJson['link']\n            self.assertEqual(target['h5path'], 'somevalue')\n            self.assertEqual(target['class'], 'H5L_TYPE_SOFT')\n            self.assertEqual(target['title'], 'slink')\n            self.assertTrue('collection' not in target)\n            \n    def testGetExternal(self):\n        logging.info(\"LinkTest.testGetExternal\")\n        for domain_name in ('tall', 'tall_ro'):\n            g1_uuid = None\n            domain = domain_name + '.' + config.get('domain')   \n            root_uuid = helper.getRootUUID(domain)\n            g1_uuid = helper.getUUID(domain, root_uuid, 'g1')\n            g12_uuid = helper.getUUID(domain, g1_uuid, 'g1.2')\n            req = self.endpoint + \"/groups/\" + g12_uuid + \"/links/extlink\"\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertTrue(\"created\" in rspJson)\n            self.assertTrue(\"lastModified\" in rspJson)\n            target = rspJson['link'] \n            # self.assertEqual(target, \"http://somefile/#h5path(somepath)\")\n            expected_h5domain = 'somefile' + '.' + config.get('domain') \n            self.assertEqual(target['class'], 'H5L_TYPE_EXTERNAL')\n            self.assertEqual(target['h5domain'], expected_h5domain)\n            self.assertEqual(target['h5path'], 'somepath')\n            self.assertEqual(target['title'], 'extlink')\n            self.assertTrue('collection' not in target)\n\n    def testGetExternalLinkDomain(self):\n        logging.info(\"LinkTest.testExternalLinkDomain\")\n        domain = \"link_example.\" + config.get('domain')   \n        root_uuid = helper.getRootUUID(domain)\n        headers = {'host': domain}\n        # test file has seven external links in the root group that should all\n        # map to the same external file in either the same directory or a\n        # a subdirectory \"subdir\"\n        expected_curdir = \"tall.\" + config.get('domain') \n        expected_subdir = \"tall.subdir.\" + config.get('domain')  \n        expected_h5path = \"g1/g1.1\"\n        for i in range(7):\n            external_link_name = \"external_link\" + str(i+1)\n            req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + external_link_name\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertTrue(\"created\" in rspJson)\n            self.assertTrue(\"lastModified\" in rspJson)\n            self.assertTrue(\"link\" in rspJson)\n            target = rspJson['link'] \n            self.assertTrue(\"h5path\" in target)\n            self.assertEqual(target[\"h5path\"], expected_h5path)\n            self.assertTrue(\"h5domain\" in target)\n            h5domain = target[\"h5domain\"]\n            if i < 4:\n                # these links map to a file in the same directory\n                self.assertEqual(h5domain, expected_curdir)\n            else:\n                # these map to a file in \"subdir\"\n                self.assertEqual(h5domain, expected_subdir)\n\n        # get all the links in one request and very the external filename\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links\"  \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"links\" in rspJson)\n        links = rspJson[\"links\"]\n        external_link_count = 0\n        for link in links:\n            if link[\"class\"] != 'H5L_TYPE_EXTERNAL':\n                continue\n            \n            self.assertTrue(\"title\" in link)\n            title = link[\"title\"]\n            if not title.startswith(\"external_link\"):\n                continue\n            external_link_count += 1\n            link_no = int(title[-1])\n            self.assertTrue(\"h5path\" in link)\n            self.assertEqual(link[\"h5path\"], expected_h5path)\n            self.assertTrue(\"h5domain\" in link)\n            if link_no < 5:\n                self.assertEqual(link[\"h5domain\"], expected_curdir)\n            else:\n                self.assertEqual(link[\"h5domain\"], expected_subdir)\n \n\n\n            \n    def testGetUDLink(self):\n        logging.info(\"LinkTest.testGetUDLink\")\n        domain_name = 'tall_with_udlink'    \n        domain = domain_name + '.' + config.get('domain')   \n        root_uuid = helper.getRootUUID(domain)\n        g2_uuid = helper.getUUID(domain, root_uuid, 'g2')\n        req = self.endpoint + \"/groups/\" + g2_uuid + \"/links/udlink\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"created\" in rspJson)\n        self.assertTrue(\"lastModified\" in rspJson)\n        target = rspJson['link']\n        self.assertEqual(target['class'], 'H5L_TYPE_USER_DEFINED')\n        self.assertEqual(target['title'], 'udlink')\n        \n    def testGetLinks(self):\n        logging.info(\"LinkTest.testGetLinks\")\n        for domain_name in ('tall', 'tall_ro'):\n            g1_uuid = None\n            domain = domain_name + '.' + config.get('domain')   \n            root_uuid = helper.getRootUUID(domain) \n            g1_uuid = helper.getUUID(domain, root_uuid, 'g1')\n            g12_uuid = helper.getUUID(domain, g1_uuid, 'g1.2')    \n            req = self.endpoint + \"/groups/\" + g12_uuid + \"/links\"\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertTrue(\"links\" in rspJson)\n            links = rspJson[\"links\"]\n            self.assertEqual(len(links), 2)\n            for link in links:\n                self.assertTrue(\"title\" in link)\n                self.assertTrue(\"class\" in link)\n                \n    \n    def testGetBatch(self):\n        logging.info(\"LinkTest.testGetBatch\")\n        domain = 'group1k.' + config.get('domain')   \n        root_uuid = helper.getRootUUID(domain)     \n        req = helper.getEndpoint() + \"/groups/\" + root_uuid + \"/links\"\n        headers = {'host': domain}\n        params = {'Limit': 50 }\n        names = set()\n        # get links in 20 batches of 50 links each\n        lastName = None\n        for batchno in range(20):\n            if lastName:\n                params['Marker'] = lastName\n            rsp = requests.get(req, headers=headers, params=params)\n            self.assertEqual(rsp.status_code, 200)\n            if rsp.status_code != 200:\n                break\n            rspJson = json.loads(rsp.text)\n            links = rspJson['links']\n            self.assertEqual(len(links) <= 50, True)\n            for link in links:\n                lastName = link['title']\n                names.add(lastName)\n            if len(links) == 0:\n                break\n        self.assertEqual(len(names), 1000)  # should get 1000 unique links\n    \n    \n    #Fix - This needs to be made more efficient - when deleting links, the code now\n    # searches all objects to see if the linked target needs to be made anonymous or not.\n    # idea: keep back pointers for all links?\n    # Tracked as Issue #12 in Github\n    \"\"\"    \n    def testMoveLinks(self):\n        logging.info(\"LinkTest.testMoveLinks\")\n        domain = 'group1k_updated.' + config.get('domain')   \n        root_uuid = helper.getRootUUID(domain)  \n        \n        # create a new subgroup to move others to\n        targetGroupId = helper.createGroup(domain)\n         \n           \n        req = helper.getEndpoint() + \"/groups/\" + root_uuid + \"/links\"\n        headers = {'host': domain}\n        params = {'Limit': 100 }\n        names = set()\n        # get links in batches of 100 links each\n        count = 0\n        while True:\n            print 'count:', count\n            rsp = requests.get(req, headers=headers, params=params)\n            self.assertEqual(rsp.status_code, 200)\n            if rsp.status_code != 200:\n                break\n            rspJson = json.loads(rsp.text)\n            links = rspJson['links']\n            \n            if len(links) == 0:\n                break\n            count += len(links)\n            for link in links:\n                # delete link\n                del_req = helper.getEndpoint() + \"/groups/\" + root_uuid + \"/links/\" + link['title']\n                rsp = requests.delete(del_req, headers=headers)\n                self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(count, 1000)  # should get 1000 unique links\n    \"\"\"\n        \n    def testGetBadParam(self):\n        logging.info(\"LinkTest.testGetBatchBadParam\")\n        domain = 'tall.' + config.get('domain')   \n        root_uuid = helper.getRootUUID(domain)     \n        req = helper.getEndpoint() + \"/groups/\" + root_uuid + \"/links\"\n        headers = {'host': domain}\n        params = {'Limit': 'abc' }\n        rsp = requests.get(req, headers=headers, params=params)\n        self.assertEqual(rsp.status_code, 400)\n    \n        \n    def testPut(self):\n        logging.info(\"LinkTest.testPut\")\n        domain = 'tall_updated.' + config.get('domain') \n        grpId = helper.createGroup(domain)\n        rootId = helper.getRootUUID(domain)   \n        name = 'g3'\n        req = helper.getEndpoint() + \"/groups/\" + rootId + \"/links/\" + name \n        payload = {\"id\": grpId}\n        headers = {'host': domain}\n        rsp = requests.get(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 404)  # link doesn't exist\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        rsp = requests.get(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # it's there now!\n        # make a request second time (verify idempotent)\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 409)  # status - conflict, already exists\n        # now try with a different payload\n        grpId2 = helper.createGroup(domain)\n        payload[\"id\"] = grpId2\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 409)\n        \n        \n    def testPutNameWithSpaces(self):\n        logging.info(\"LinkTest.testPutNameWithSpaces\")\n        domain = 'tall_updated.' + config.get('domain') \n        grpId = helper.createGroup(domain)\n        rootId = helper.getRootUUID(domain)   \n        name = 'name with spaces'\n        req = helper.getEndpoint() + \"/groups/\" + rootId + \"/links/\" + name \n        payload = {\"id\": grpId}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        # verify we can read the link back\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"link\" in rspJson)\n        link = rspJson[\"link\"]\n        self.assertTrue(\"title\" in link)\n        self.assertEqual(link[\"title\"], name)\n        self.assertTrue(\"class\" in link)\n        self.assertEqual(link[\"class\"], \"H5L_TYPE_HARD\")\n            \n        \n    def testPutBadReqId(self):\n        logging.info(\"LinkTest.testPutBadReqId\")\n        domain = 'tall_updated.' + config.get('domain') \n        grpId = helper.createGroup(domain)\n        badReqId  = 'b2771194-347f-11e4-bb67-3c15c2da029e' # doesn't exist in tall.h5\n        name = 'g3'\n        req = helper.getEndpoint() + \"/groups/\" + badReqId + \"/links/\" + name \n        payload = {\"id\": grpId}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 404)\n        \n    def testPutBadLinkId(self):\n        logging.info(\"LinkTest.testPutBadLinkId\")\n        domain = 'tall_updated.' + config.get('domain') \n        grpId = helper.createGroup(domain)\n        rootId = helper.getRootUUID(domain)  \n        badLinkId  = 'b2771194-347f-11e4-bb67-3c15c2da029e' # doesn't exist in tall.h5\n        name = 'badid'\n        req = helper.getEndpoint() + \"/groups/\" + rootId + \"/links/\" + name \n        payload = {\"id\": badLinkId}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 404)\n        \n    def testPutNoName(self):\n        logging.info(\"LinkTest.testPutNoName\")\n        domain = 'tall_updated.' + config.get('domain') \n        grpId = helper.createGroup(domain)\n        rootId = helper.getRootUUID(domain)   \n        req = helper.getEndpoint() + \"/groups/\" + rootId + \"/links/\"  \n        payload = {\"id\": grpId}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)\n        \n    def testPutBadName(self):\n        logging.info(\"LinkTest.testPutBadName\")\n        domain = 'tall_updated.' + config.get('domain') \n        grpId = helper.createGroup(domain)\n        rootId = helper.getRootUUID(domain)   \n        name = 'bad/name'  # forward slash not allowed\n        req = helper.getEndpoint() + \"/groups/\" + rootId + \"/links/\" + name \n        payload = {\"id\": grpId}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)\n        \n    def testPutSoftLink(self):\n        logging.info(\"LinkTest.testPutSoftLink\")\n        domain = 'tall_updated.' + config.get('domain') \n        grpId = helper.createGroup(domain)\n        rootId = helper.getRootUUID(domain)   \n        name = 'softlink'\n        req = helper.getEndpoint() + \"/groups/\" + rootId + \"/links/\" + name \n        payload = {\"h5path\": \"somewhere\"}\n        headers = {'host': domain}\n        # verify softlink does not exist\n        rsp = requests.get(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 404)\n        # make request\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        # verify link is created\n        rsp = requests.get(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # verify idempotent\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 409)\n        \n    def testPutExternalLink(self):\n        logging.info(\"LinkTest.testPutExternalLink\")\n        domain = 'tall_updated.' + config.get('domain') \n        target_domain = 'external_target.' + config.get('domain')  \n        target_path = '/dset1'\n        grpId = helper.createGroup(domain)\n        rootId = helper.getRootUUID(domain)   \n        name = 'extlink'\n        req = helper.getEndpoint() + \"/groups/\" + rootId + \"/links/\" + name \n        payload = {\"h5path\": target_path, \"h5domain\": target_domain}\n        headers = {'host': domain}\n        # verify extlink does not exist\n        rsp = requests.get(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 404)\n        # make request\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201) \n        # verify link is created\n        rsp = requests.get(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # verify that it is an external link\n        rspJson = json.loads(rsp.text)   \n        target = rspJson['link']\n              \n        self.assertEqual(target['class'], 'H5L_TYPE_EXTERNAL')\n        self.assertEqual(target['h5domain'], target_domain)\n        self.assertEqual(target['h5path'], target_path)\n            \n          \n    def testPutExternalMissingPath(self):\n        logging.info(\"LinkTest.testPutExternalMissingPath\")\n        fakeId = \"14bfeeb8-68b1-11e4-a69a-3c15c2da029e\"\n        domain = 'tall_updated.' + config.get('domain') \n        external_domain = 'external_target.' + config.get('domain') \n        grpId = helper.createGroup(domain)\n        rootId = helper.getRootUUID(domain)   \n        name = 'extlinkid'\n        req = helper.getEndpoint() + \"/groups/\" + rootId + \"/links/\" + name \n        payload = {\"h5domain\": external_domain}\n        headers = {'host': domain}\n        # verify extlink does not exist\n        rsp = requests.get(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 404)\n        # make request\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)     \n        \n    def testDelete(self):\n        logging.info(\"LinkTest.testDelete\")\n        domain = 'tall_updated.' + config.get('domain') \n        grpId = helper.createGroup(domain)\n        rootId = helper.getRootUUID(domain)   \n        name = 'deleteme'\n        req = helper.getEndpoint() + \"/groups/\" + rootId + \"/links/\" + name \n        payload = {\"id\": grpId}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # now remove the link\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n        # get should fail\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 410)   \n        \n        # Group should still be accessible via uuid\n        req = self.endpoint + \"/groups/\" + grpId\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n    \n       \nif __name__ == '__main__':\n    unittest.main()\n"
  },
  {
    "path": "test/integ/makeattr.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport h5py\nf = h5py.File(\"attr1k.h5\", \"w\")\n\nfor i in range(1000):\n    name = 'a{:04d}'.format(i)\n    f.attrs[name] = \"this is attribute: \" + str(i)\nf.close()\n \n\n\n\n\n"
  },
  {
    "path": "test/integ/makegroups.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport h5py\nf = h5py.File(\"group1k.h5\", \"w\")\nfor i in range(1000):\n    name = 'g{:04d}'.format(i)\n    f.create_group(name)\nf.close()\n \n\n\n\n\n"
  },
  {
    "path": "test/integ/roottest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport requests\nimport config\nimport helper\nimport unittest\nimport json\nimport base64\n\nclass RootTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(RootTest, self).__init__(*args, **kwargs)\n        self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port'))\n    \n    def testGetInfo(self):\n    \n        req = self.endpoint + \"/info\"\n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['content-type'], 'application/json')\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('h5serv_version' in rspJson)\n            \n    def testGetDomain(self):\n        domain = 'tall.' + config.get('domain')   \n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['content-type'], 'application/json')\n        rspJson = json.loads(rsp.text)\n        root_uuid = rspJson[\"root\"]\n        helper.validateId(root_uuid)\n         \n        # try again with query arg\n        req = self.endpoint + \"/?host=\" + domain\n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['content-type'], 'application/json')\n        rspJson = json.loads(rsp.text)\n        helper.validateId(rspJson[\"root\"])\n        self.assertEqual(root_uuid, rspJson[\"root\"])\n        \n    def testGetReadOnly(self):\n        domain = 'tall_ro.' + config.get('domain')    \n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        helper.validateId(rspJson[\"root\"])\n        \n    def testGetToc(self):  \n        domain = config.get('domain')  \n        if domain.startswith('test.'):\n            domain = domain[5:]\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['content-type'], 'application/json')\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('root' in rspJson)\n        \n    def testGetNotFound(self):\n        domain = 'doesnotexist.' + config.get('domain')    \n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 404)\n        \n    def testWrongTopLevelDomain(self):\n        domain = \"www.baddomain.org\"    \n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 403)  # 403 == Forbidden\n        \n    def testInvalidDomain(self):\n        # can't be just a bare top-level domain\n        domain = config.get('domain')  \n        # get top-level domain. e.g.: 'test.hdf.io' -> 'hdf.io'\n        npos = domain.find('.')\n        topdomain = domain[npos+1:] \n         \n        domain = 'two.dots..are.bad.' + config.get('domain')   \n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 400)  # 400 == bad syntax\n        \n        domain = 'missingenddot' + topdomain   \n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 400)  # 400 == bad syntax\n        \n        # just a dot is no good\n        domain = '.' + topdomain  \n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 400)  # 400 == bad syntax\n        \n        domain =  '.dot.in.front.is.bad.' + config.get('domain')   \n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 400)  # 400 == bad syntax\n        \n        domain = 'tall.dots.need.to.be.encoded.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 404)  # 404 == not found were expected\n        \n        \n    def testDomainWithSpaces(self):\n        domain = 'filename with space.' + config.get('domain')    \n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        \n    def testGetSubdomain(self): \n        domain = 'zerodim.subdir.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        \n    def testPutSubdomain(self): \n        domain = 'newfile.newsubdir.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        rspJson = json.loads(rsp.text)\n        \n    def testPutSubSubdomain(self): \n        domain = 'newfile.newsubsubdir.newsubdirparent.' + config.get('domain')        \n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        rspJson = json.loads(rsp.text)\n        href = (rspJson[\"hrefs\"][0])[u\"href\"]\n        self.assertEqual(href, \"http://\" + domain + \"/\")\n               \n    def testDelete(self):\n        #test DELETE_root\n        domain = 'deleteme.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        \n    def testDeleteReadonly(self):\n        #test DELETE_root\n        domain = 'readonly.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 403)\n        \n    def testDeleteNotFound(self):\n        domain = 'doesnotexist.' + config.get('domain')    \n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 404)\n        \n    def testDeleteSubSubdomain(self): \n        domain = 'deleteme.subdir.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.delete(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n\n    def testPut(self):\n        # test PUT_root\n        domain = 'newfile.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        rspJson = json.loads(rsp.text)\n        for k in (\"root\", \"hrefs\", \"created\", \"lastModified\"):\n            self.assertTrue(k in rspJson)\n        # verify that putting the same domain again fails with a 409 error\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 409)\n        \n    def testGetDomainWithDot(self):\n        domain = helper.nameEncode('tall.dots.need.to.be.encoded') + '.'  + config.get('domain') \n        \n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['content-type'], 'application/json')\n        rspJson = json.loads(rsp.text)\n        helper.validateId(rspJson[\"root\"])    \n        \n        # try using host as query argument\n        req = self.endpoint + \"/?host=\" + domain\n        rsp = requests.get(req)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['content-type'], 'application/json')\n        rspJson = json.loads(rsp.text)\n        helper.validateId(rspJson[\"root\"]) \n        \n        \n    def testPutNameWithDot(self):\n        # test PUT_root\n        domain = helper.nameEncode('new.file') + '.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        rspJson = json.loads(rsp.text)\n        \nif __name__ == '__main__':\n    unittest.main()\n"
  },
  {
    "path": "test/integ/setupdata.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport sys\nimport os\nimport stat\nfrom shutil import copyfile\nimport h5py\nimport numpy as np\n\nSRC = \"../test_files\"\nDES = \"../../data/test\"\n\n# files to be copied into test directory\ntestfiles = {\n    'tall.h5': ('.',  'tall_updated.h5', 'tall_ro.h5', 'tall_g2_deleted.h5', \n            'tall_dset112_deleted.h5', 'tall_dset22_deleted.h5', 'tall_acl.h5', \n            'tall_acl_delete.h5', 'tall.dots.need.to.be.encoded.h5', 'subdir/tall.h5'),\n    'tall_with_udlink.h5': ('.',),\n    'scalar.h5': ('.', 'scalar_1d_deleted.h5',),\n    'namedtype.h5': ('.', 'namedtype_deleted.h5'),\n    'resizable.h5': ('.', 'resized.h5'),\n    'notahdf5file.h5': ('.',),\n    'zerodim.h5': ('filename with space.h5', 'deleteme.h5', 'readonly.h5', 'subdir',\n    'subdir/deleteme.h5', 'subdir/subdir/deleteme.h5'),\n    'group1k.h5': ('.', 'group1k_updated.h5'),\n    'attr1k.h5': ('.',),\n    'type1k.h5': ('.',),\n    'dset1k.h5': ('.',),\n    'fillvalue.h5': ('.'),\n    'null_space_dset.h5': ('.'),\n    'compound.h5': ('.',),\n    'compound_attr.h5': ('.',),\n    'compound_array_attr.h5': ('.',),\n    'compound_array_dset.h5': ('.',),\n    'compound_committed.h5': ('.',),\n    'arraytype.h5': ('.',),\n    'array_attr.h5': ('.',),\n    'array_dset.h5': ('.',),\n    'bitfield_attr.h5': ('.',),\n    'bitfield_dset.h5': ('.',),\n    'dim_scale.h5': ('.',),\n    'dim_scale_data.h5': ('.', 'dim_scale_updated.h5'),\n    'dset_gzip.h5': ('.',),\n    'enum_attr.h5': ('.',),\n    'enum_dset.h5': ('.',),\n    'fixed_string_attr.h5': ('.',),\n    'fixed_string_dset.h5': ('.',),\n    'h5ex_d_alloc.h5': ('.',),\n    'h5ex_d_checksum.h5': ('.',),\n    'h5ex_d_chunk.h5': ('.',),\n    'h5ex_d_compact.h5': ('.',),\n    'h5ex_d_extern.h5': ('.',),\n    'h5ex_d_fillval.h5': ('.',),\n    'h5ex_d_gzip.h5': ('.',),\n    'h5ex_d_hyper.h5': ('.',),\n    'h5ex_d_nbit.h5': ('.',),\n    'h5ex_d_rdwr.h5': ('.',),\n    'h5ex_d_shuffle.h5': ('.',),\n    'h5ex_d_sofloat.h5': ('.',),\n    'h5ex_d_soint.h5': ('.',),\n    'h5ex_d_transform.h5': ('.',),\n    'h5ex_d_unlimadd.h5': ('.',),\n    'h5ex_d_unlimgzip.h5': ('.',),\n    'h5ex_d_hyper.h5': ('.',),\n    'link_example.h5': ('.',),\n    'objref_attr.h5': ('.',),\n    'objref_dset.h5': ('.', 'objref_dset_updated.h5'),\n    'null_objref_dset.h5': ('.',),\n    'regionref_attr.h5': ('.',),\n    'regionref_dset.h5': ('.', 'regionref_dset_updated.h5'),    \n    'vlen_attr.h5': ('.',),\n    'vlen_dset.h5': ('.',),\n    'vlen_string_attr.h5': ('.',),\n    'vlen_string_dset.h5': ('.',),\n    'opaque_attr.h5': ('.',),\n    'opaque_dset.h5': ('.',),\n    'committed_type.h5': ('.',),\n    'tstr.h5': ('.',),\n    'null_space_attr.h5': ('.',),\n    'bool_dset.h5': ('.',),\n    'bool_attr.h5': ('.',)\n}\n\n# files that will get set as read-only\nread_only_files = ( 'tall_ro.h5', 'readonly.h5')\n\n\n\"\"\"\nCreate test accounts\n - add test_user1 and test_user2 if they don't exist already\n\"\"\"\n\n\ndef addTestAccount(user_id):\n    password_file = \"passwd.h5\" \n    cwd = os.getcwd()\n    src_dir = os.path.abspath(SRC)\n    os.chdir('../../util/admin')\n    if not os.path.isfile(password_file):    \n        os.system('python makepwd_file.py')\n              \n    add_user_script = 'python update_pwd.py' \n    add_user_script += ' -f ' + password_file  \n    os.system(add_user_script + ' -a -u ' + user_id + ' -p test')\n    home_dir = \"../../data/home\"\n    \n    \n    if not os.path.isdir(home_dir):\n        os.mkdir(home_dir)\n    os.chdir(home_dir)\n    \n    # clean out any old files\n    if os.path.isdir(user_id):\n        removeFilesFromDir(user_id)\n    else:\n        # create user home directory   \n        os.mkdir(user_id)\n        \n    os.chdir(user_id)\n    \n    print(\"cwd:\", os.getcwd())\n    # link to \"public\" directory\n    # create symlink to public directory\n    public_dir = \"../../public\"\n    if os.name != 'nt':\n        if not os.path.isdir(public_dir):\n            print(\"create public dir\")\n            os.mkdir(public_dir)\n        if not os.path.islink('public'):\n            print(\"create symlink\")\n            os.symlink(public_dir, \"public\")\n    copyfile(src_dir + '/tall.h5', 'tall.h5')\n    copyfile(src_dir + '/tall.h5', 'tall_deleteme.h5') \n    \n    os.chdir(cwd)\n    \ndef addTestAccounts():\n    for test_user in ('test_user1', 'test_user2'):\n        addTestAccount(test_user)\n    \n    \n    \n\"\"\"\nMake a testfile with 1000 sub-groups\n\"\"\"\ndef makeGroup1k():\n    file_path = SRC + \"/group1k.h5\"\n    if os.path.exists(file_path):\n        return # don't waste time re-creating\n    print('makeGroup1k')\n    f = h5py.File(file_path, \"w\")\n    for i in range(1000):\n        name = 'g{:04d}'.format(i)\n        f.create_group(name)\n    f.close()\n \n\"\"\"\nMake a testfile with 1000 attributes\n\"\"\"\ndef makeAttr1k():\n    file_path = SRC + \"/attr1k.h5\" \n    if os.path.exists(file_path):\n        return # don't waste time re-creating  \n    print('makeAttr1k()')\n    f = h5py.File(file_path, \"w\")\n    for i in range(1000):\n        name = 'a{:04d}'.format(i)\n        f.attrs[name] = \"this is attribute: \" + str(i)\n    f.close()\n    \n\"\"\"\nMake a testfile with 1000 types\n\"\"\"\ndef makeType1k():\n    file_path = SRC + \"/type1k.h5\" \n    if os.path.exists(file_path):\n        return # don't waste time re-creating  \n    f = h5py.File(file_path, \"w\")\n    for i in range(1000):\n        name = 'S{:04d}'.format(i+1)\n        f[name] = np.dtype(name)  #create fixed length string\n    f.close()\n    \n\"\"\"\nMake a testfile with 1000 datasets\n\"\"\"\ndef makeDataset1k():\n    file_path = SRC + \"/dset1k.h5\" \n    if os.path.exists(file_path):\n        return # don't waste time re-creating  \n    f = h5py.File(file_path, \"w\")\n    for i in range(1000):\n        name = 'd{:04d}'.format(i+1)\n        dim = i+1\n        f.create_dataset(name, (dim,), dtype=np.int32)\n    f.close()\n\n\"\"\"\nMake a testfile with external links\n\"\"\"\ndef makeExternalLinks():\n    file_path = SRC + \"/link_example.h5\"\n    if os.path.exists(file_path):\n        return # don't waste time re-creating  \n    tgt_link_path = os.path.abspath(DES)  # for absolute paths in link\n    tgt_link_path += \"/tall.h5\"\n    f = h5py.File(file_path, 'w')\n    f.create_group('g1')\n    f.create_group('g1/g1.1')\n    f['soft_link'] = h5py.SoftLink('g1')\n    f['external_link1'] = h5py.ExternalLink('tall.h5', 'g1/g1.1')\n    f['external_link2'] = h5py.ExternalLink('tall', 'g1/g1.1')\n    f['external_link3'] = h5py.ExternalLink('tall.test.hdfgroup.org', 'g1/g1.1')\n    f['external_link4'] = h5py.ExternalLink(tgt_link_path, 'g1/g1.1')\n    f['external_link5'] = h5py.ExternalLink('tall.subdir.test.hdfgroup.org', 'g1/g1.1')\n    f['external_link6'] = h5py.ExternalLink('tall.subdir', 'g1/g1.1')\n    f['external_link7'] = h5py.ExternalLink('subdir/tall.h5', 'g1/g1.1')\n    f.close()\n\n\n\"\"\"\nRemove files from given directory\n\"\"\"    \ndef removeFilesFromDir(dir_name):\n    print('remove files', dir_name)\n    if not os.path.isdir(dir_name):\n        print(\"expected\", dir_name, \"to be a directory\")\n        sys.exit()\n    for file_name in os.listdir(dir_name):\n        file_path = os.path.join(dir_name, file_name)\n        try:\n            if os.path.isdir(file_path):\n                if os.path.islink(file_path):\n                    os.unlink(file_path)  # just remove the link\n                else:\n                    removeFilesFromDir(file_path)\n                    os.rmdir(file_path)\n            else:\n                if os.path.isfile(file_path):\n                    # check for read-only\n                    if (os.stat(file_path).st_mode & stat.S_IWUSR) == 0:\n                        # make read-write\n                        os.chmod(file_path, 0O666)\n                    os.unlink(file_path)\n        except Exception as e:\n            print(e)\n    \n\"\"\"\nmain\n\"\"\"\n# verify we are in the right place and the correct argument has been passed\nif len(sys.argv) > 1 and sys.argv[1] == '-h':\n    print(\"this script will remove all files from ../../data/test and repopulate using files from ../../testdata\")\n    sys.exit(); \n    \nif not os.path.exists(SRC):\n    print(\"run this from the integ test directory!\")\n    sys.exit()\n    \nif not  os.path.exists(DES):\n    # create the data/test directory if it doesn't exist\n    os.mkdir(DES)\n   \n   \n# create test accounts\naddTestAccounts()\n    \n# create group1k.h5 (if not created before)\nmakeGroup1k()\n\n# create attr1k.h5 (if not created before)\nmakeAttr1k()\n\n# create type1k.h5 (if not created before)\nmakeType1k()\n\n# create dset1k.h5 (if not created before)\nmakeDataset1k()\n\n# create link_example.h5 (if not created before))\nmakeExternalLinks()\n\nremoveFilesFromDir(DES)\n\n\ntest_dirs = ('.', 'subdir', 'subdir/subdir')\nfor dir_name in test_dirs:\n    tgt_dir = DES\n    if dir_name != '.':\n        tgt_dir += '/' + dir_name\n    if not os.path.exists(tgt_dir):\n        os.mkdir(tgt_dir)\n  \nfor file_name in testfiles:\n    for tgt in testfiles[file_name]:\n        src = SRC + '/' + file_name\n        des = DES + '/'\n        if tgt == '.':\n            # copy to DES\n            des += file_name\n        else:\n            des += tgt\n            if os.path.isdir(des):\n                # copy to directory\n                des += '/'\n                des += file_name\n            \n        print('copyfile(\"'+file_name+'\", \"'+des+'\")')  \n        copyfile(src, des) \n        \nfor file_name in read_only_files:\n    file_path = DES + '/' + file_name\n    print('chmod', file_path)\n    os.chmod(file_path, 0O444)\n\n    \n\n\n\n"
  },
  {
    "path": "test/integ/shapetest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport requests\nimport config\nimport helper\nimport unittest\nimport json\n\nclass ShapeTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(ShapeTest, self).__init__(*args, **kwargs)\n        self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port'))    \n       \n    def testGet(self):\n        domain = 'tall.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        g2_uuid = helper.getUUID(domain, root_uuid, 'g2')\n        dset21_uuid = helper.getUUID(domain, g2_uuid, 'dset2.1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset21_uuid + \"/shape\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('shape' in rspJson)\n        shape = rspJson['shape']\n        self.assertEqual(shape['class'], 'H5S_SIMPLE')\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 10)  \n        self.assertTrue('maxdims' not in shape)  # not re-sizeable\n        \n    def testGetResizable(self):\n        domain = 'resizable.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        resizable_1d_uuid = helper.getUUID(domain, root_uuid, 'resizable_1d') \n        req = helper.getEndpoint() + \"/datasets/\" + resizable_1d_uuid + \"/shape\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('shape' in rspJson)\n        shape = rspJson['shape']\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 10)  \n        self.assertEqual(shape['maxdims'][0], 20)  \n        \n        resizable_2d_uuid = helper.getUUID(domain, root_uuid, 'resizable_2d') \n        req = helper.getEndpoint() + \"/datasets/\" + resizable_2d_uuid + \"/shape\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('shape' in rspJson)\n        shape = rspJson['shape']\n        self.assertEqual(len(shape['dims']), 2)\n        self.assertEqual(shape['dims'][1], 10)  \n        self.assertTrue('maxdims'  in shape)  # is re-sizeable!\n        self.assertEqual(shape['maxdims'][1], 20)\n        \n        unlimited_1d_uuid = helper.getUUID(domain, root_uuid, 'unlimited_1d') \n        req = helper.getEndpoint() + \"/datasets/\" + unlimited_1d_uuid + \"/shape\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('shape' in rspJson)\n        shape = rspJson['shape']\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 10)  \n        self.assertTrue('maxdims' in shape)  # is re-sizeable\n        self.assertEqual(shape['maxdims'][0], 0)\n        \n        unlimited_2d_uuid = helper.getUUID(domain, root_uuid, 'unlimited_2d') \n        req = helper.getEndpoint() + \"/datasets/\" + unlimited_2d_uuid + \"/shape\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('shape' in rspJson)\n        shape = rspJson['shape']\n        self.assertEqual(len(shape['dims']), 2)\n        self.assertEqual(shape['dims'][1], 10)  \n        self.assertTrue('maxdims' in shape)  # is re-sizeable\n        self.assertEqual(shape['maxdims'][1], 0)\n            \n       \n    def testPutResizable(self):\n        domain = 'resized.' + config.get('domain')\n        headers = {'host': domain}\n        root_uuid = helper.getRootUUID(domain)\n        resizable_1d_uuid = helper.getUUID(domain, root_uuid, 'resizable_1d') \n        req = helper.getEndpoint() + \"/datasets/\" + resizable_1d_uuid + \"/shape\"\n        \n        # get the existing shape\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('shape' in rspJson)\n        shape = rspJson['shape']\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 10)  \n        self.assertEqual(shape['maxdims'][0], 20)  \n        \n        # modify shape by setting extent to maxdims \n        payload = { 'shape': 20 }\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # get the shape again\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('shape' in rspJson)\n        shape = rspJson['shape']\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 20)  \n        self.assertTrue('maxdims' not in shape)   \n        \n        # two-dimensional - verify existing shape\n        resizable_2d_uuid = helper.getUUID(domain, root_uuid, 'resizable_2d') \n        req = helper.getEndpoint() + \"/datasets/\" + resizable_2d_uuid + \"/shape\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('shape' in rspJson)\n        shape = rspJson['shape']\n        self.assertEqual(len(shape['dims']), 2)\n        self.assertEqual(shape['dims'][0], 10)\n        self.assertEqual(shape['dims'][1], 10)  \n        self.assertTrue('maxdims' in shape)  # is re-sizeable\n        self.assertEqual(shape['maxdims'][0], 10)\n        self.assertEqual(shape['maxdims'][1], 20)\n        \n        # modify shape by setting extent to maxdims \n        payload = { 'shape': [10, 20] }\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        # verify the changed shape\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('shape' in rspJson)\n        shape = rspJson['shape']\n        self.assertEqual(len(shape['dims']), 2)\n        self.assertEqual(shape['dims'][0], 10)\n        self.assertEqual(shape['dims'][1], 20)  \n        self.assertTrue('maxdims' not in shape)  \n               \n        unlimited_1d_uuid = helper.getUUID(domain, root_uuid, 'unlimited_1d') \n        req = helper.getEndpoint() + \"/datasets/\" + unlimited_1d_uuid + \"/shape\"\n        payload = { 'shape': 25 }\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('shape' in rspJson)\n        shape = rspJson['shape']\n        self.assertEqual(len(shape['dims']), 1)\n        self.assertEqual(shape['dims'][0], 25)  \n        self.assertTrue('maxdims'  in shape)  # is re-sizeable\n        self.assertEqual(shape['maxdims'][0], 0)\n        \n        unlimited_2d_uuid = helper.getUUID(domain, root_uuid, 'unlimited_2d') \n        req = helper.getEndpoint() + \"/datasets/\" + unlimited_2d_uuid + \"/shape\"\n        payload = { 'shape': [10, 25] }\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n        \n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('shape' in rspJson)\n        shape = rspJson['shape']\n        self.assertEqual(len(shape['dims']), 2)\n        self.assertEqual(shape['dims'][0], 10)  \n        self.assertEqual(shape['maxdims'][0], 10)\n        self.assertEqual(shape['dims'][1], 25)  \n        self.assertTrue('maxdims'  in shape)  # is re-sizeable\n        self.assertEqual(shape['maxdims'][1], 0)   \n        \n        \n        \n    def testPutInvalidShape(self):\n        domain = 'resized.' + config.get('domain')\n        headers = {'host': domain}\n        root_uuid = helper.getRootUUID(domain)\n        resizable_1d_uuid = helper.getUUID(domain, root_uuid, 'resizable_1d') \n        req = helper.getEndpoint() + \"/datasets/\" + resizable_1d_uuid + \"/shape\"\n        payload = { 'shape': [20, 10] }  # wrong rank\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)\n        \n        payload = { 'shape': 8 }  # try to shrink\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)        \n                  \n        resizable_2d_uuid = helper.getUUID(domain, root_uuid, 'resizable_2d') \n        req = helper.getEndpoint() + \"/datasets/\" + resizable_2d_uuid + \"/shape\"\n        payload = { 'shape': [12, 20] }  # try to extend non-extendable dimension\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)\n        \n     \n    \n        \nif __name__ == '__main__':\n    unittest.main()"
  },
  {
    "path": "test/integ/spidertest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport requests\nimport config\nimport helper\nimport unittest\nimport json\n\nclass SpiderTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(SpiderTest, self).__init__(*args, **kwargs)\n        self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port'))\n        self.verifiedhrefs = set()\n        self.unverifiedhrefs = set()  \n        self.headers = {} \n        \n    def validateHrefs(self, href):\n        self.verifiedhrefs.add(href)\n        # convert to local endpoint\n        domain = config.get('domain')\n        npos = href.find(domain)\n        if npos > 0:\n            req = self.endpoint + href[(npos+len(domain)):]\n        else:\n            req = href\n        \n        rsp = requests.get(req, headers=self.headers)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['content-type'], 'application/json')\n        rspJson = json.loads(rsp.text)\n        self.assertTrue(\"hrefs\" in rspJson)\n        hrefs = rspJson[\"hrefs\"]\n        self.assertTrue(len(hrefs) > 0)\n        links = {}\n        for link in hrefs:\n            self.assertTrue('href' in link)\n            self.assertTrue('rel' in link)\n            rel = link['rel']\n            url = link['href']\n            self.assertTrue(rel not in links)\n            links[rel] = url\n            if url in self.verifiedhrefs:\n                continue\n            self.unverifiedhrefs.add(url)\n        self.assertTrue('self' in links)\n        self.assertTrue('root' in links)\n        \n        while len(self.unverifiedhrefs) > 0:\n            link = self.unverifiedhrefs.pop()\n            self.validateHrefs(link)     \n        \n       \n    def testHateoas(self):\n        domains = ('tall', 'tall_ro', 'group1k')\n        for name in domains:     \n            domain = name + '.' + config.get('domain') \n            self.verifiedhrefs.clear()\n            self.unverifiedhrefs.clear()\n            req = self.endpoint + \"/\"\n            self.headers = {'host': domain}\n            self.validateHrefs(self.endpoint + \"/\")\n         \n        \n        \nif __name__ == '__main__':\n    unittest.main()"
  },
  {
    "path": "test/integ/valuetest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\n\nimport six\nimport requests\nimport config\nimport helper\nimport unittest\nimport json\nimport base64\n \n\nclass ValueTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(ValueTest, self).__init__(*args, **kwargs)\n        self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port'))  \n    \n    \"\"\"\n     Test 32-bit memory word at given offset from value against expected.\n     Expected must be less than 256.\n    \"\"\"  \n    def compareWord32(self, value, offset, expected):\n        if six.PY3:\n            self.assertEqual(value[offset+0], 0)\n            self.assertEqual(value[offset+1], 0)\n            self.assertEqual(value[offset+2], 0)\n            self.assertEqual(value[offset+3], expected)\n        else:\n            self.assertEqual(ord(value[offset+0]), 0)\n            self.assertEqual(ord(value[offset+1]), 0)\n            self.assertEqual(ord(value[offset+2]), 0)\n            self.assertEqual(ord(value[offset+3]), expected)\n              \n       \n    def testGet(self):\n        for domain_name in ('tall', 'tall_ro'):\n            domain = domain_name + '.' + config.get('domain') \n            rootUUID = helper.getRootUUID(domain)\n            g1UUID = helper.getUUID(domain, rootUUID, 'g1')\n            g11UUID = helper.getUUID(domain, g1UUID, 'g1.1')\n               \n            # rank 1 dataset\n            dset112UUID = helper.getUUID(domain, g11UUID, 'dset1.1.2') \n            req = helper.getEndpoint() + \"/datasets/\" + dset112UUID\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertEqual(rspJson['id'], dset112UUID)\n            typeItem = rspJson['type']  \n            self.assertEqual(typeItem['base'], 'H5T_STD_I32BE')\n            shape = rspJson['shape']\n            self.assertEqual(shape['class'], 'H5S_SIMPLE')\n            self.assertEqual(len(shape['dims']), 1)\n            self.assertEqual(shape['dims'][0], 20)  \n            req = helper.getEndpoint() + \"/datasets/\" + dset112UUID + \"/value\"\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rsp.headers['Content-Type'], \"application/json\")\n            rspJson = json.loads(rsp.text)\n            data = rspJson['value'] \n            self.assertEqual(len(data), 20)\n            for i in range(20):\n                self.assertEqual(data[i], i)\n        \n            # rank 2 dataset\n            dset111UUID = helper.getUUID(domain, g11UUID, 'dset1.1.1') \n            req = helper.getEndpoint() + \"/datasets/\" + dset111UUID\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            \n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertEqual(rspJson['id'], dset111UUID)\n            typeItem = rspJson['type']  \n            self.assertEqual(typeItem['base'], 'H5T_STD_I32BE')\n            shape = rspJson['shape']\n            self.assertEqual(shape['class'], 'H5S_SIMPLE')\n            self.assertEqual(len(shape['dims']), 2)\n            self.assertEqual(shape['dims'][0], 10) \n            self.assertEqual(shape['dims'][1], 10)    \n            req = helper.getEndpoint() + \"/datasets/\" + dset111UUID + \"/value\"\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rsp.headers['Content-Type'], \"application/json\")\n            rspJson = json.loads(rsp.text)\n            data = rspJson['value'] \n            self.assertEqual(len(data), 10)  \n            for i in range(10):\n                arr = data[i]\n                self.assertEqual(len(arr), 10)\n                for j in range(10):\n                    self.assertEqual(arr[j], i*j)\n                    \n    def testGetBinary(self):\n        for domain_name in ('tall', 'tall_ro'):\n            domain = domain_name + '.' + config.get('domain') \n            rootUUID = helper.getRootUUID(domain)\n            g1UUID = helper.getUUID(domain, rootUUID, 'g1')\n            g11UUID = helper.getUUID(domain, g1UUID, 'g1.1')\n               \n            # rank 1 dataset\n            dset112UUID = helper.getUUID(domain, g11UUID, 'dset1.1.2') \n            req = helper.getEndpoint() + \"/datasets/\" + dset112UUID\n            headers = {'host': domain}\n            headers_binary = {'host': domain, 'accept': \"application/octet-stream\"}\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertEqual(rspJson['id'], dset112UUID)\n            typeItem = rspJson['type']  \n            self.assertEqual(typeItem['base'], 'H5T_STD_I32BE')\n            shape = rspJson['shape']\n            self.assertEqual(shape['class'], 'H5S_SIMPLE')\n            self.assertEqual(len(shape['dims']), 1)\n            self.assertEqual(shape['dims'][0], 20)  \n            req = helper.getEndpoint() + \"/datasets/\" + dset112UUID + \"/value\"\n             \n            rsp = requests.get(req, headers=headers_binary)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rsp.headers['Content-Type'], \"application/octet-stream\")\n            \n            data = rsp.content\n            self.assertEqual(len(data), 80)\n            for i in range(20):\n                self.compareWord32(data, i*4, i)\n                   \n            # rank 2 dataset\n            dset111UUID = helper.getUUID(domain, g11UUID, 'dset1.1.1') \n            req = helper.getEndpoint() + \"/datasets/\" + dset111UUID\n            headers = {'host': domain}\n            rsp = requests.get(req, headers=headers)\n            \n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertEqual(rspJson['id'], dset111UUID)\n            typeItem = rspJson['type']  \n            self.assertEqual(typeItem['base'], 'H5T_STD_I32BE')\n            shape = rspJson['shape']\n            self.assertEqual(shape['class'], 'H5S_SIMPLE')\n            self.assertEqual(len(shape['dims']), 2)\n            self.assertEqual(shape['dims'][0], 10) \n            self.assertEqual(shape['dims'][1], 10)    \n            req = helper.getEndpoint() + \"/datasets/\" + dset111UUID + \"/value\"\n            rsp = requests.get(req, headers=headers_binary)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rsp.headers['Content-Type'], \"application/octet-stream\")\n            data = rsp.content\n            self.assertEqual(len(data), 400)\n            row_offset = 0\n           \n            for i in range(10):\n                col_offset = 0\n                for j in range(10):\n                    # 4 byte integers, little indian\n                    self.compareWord32(data, row_offset+col_offset, i*j)\n                    col_offset += 4\n                row_offset += col_offset\n                \n        \n    def testGetSelection(self):\n        for domain_name in ('tall', 'tall_ro'):\n            domain = domain_name + '.' + config.get('domain')  \n            headers = {'host': domain}\n            rootUUID = helper.getRootUUID(domain)\n            g1UUID = helper.getUUID(domain, rootUUID, 'g1')\n            g11UUID = helper.getUUID(domain, g1UUID, 'g1.1')\n               \n            # rank 1 dataset\n            dset112UUID = helper.getUUID(domain, g11UUID, 'dset1.1.2') \n         \n            # dataset has shape (20,) and type 'int32'\n        \n            # get values starting at index 2\n            req = helper.getEndpoint() + \"/datasets/\" + dset112UUID + \"/value\" + \\\n             \"?select=[2:]\"\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rsp.headers['Content-Type'], \"application/json\")\n            rspJson = json.loads(rsp.text)\n            data = rspJson['value']  # should be [2, 3, 4, ..., 19]\n            self.assertEqual(len(data), 18)\n            self.assertEqual(data, list(range(2, 20)))\n        \n            # get values starting at index 2 with stop of 10\n            req = helper.getEndpoint() + \"/datasets/\" + dset112UUID + \"/value\" + \\\n             \"?select=[2:10]\"\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            data = rspJson['value']  # should be [2, 3, 4, ..., 9]\n            self.assertEqual(len(data), 8)\n            self.assertEqual(data, list(range(2, 10)))\n        \n            # get values starting at index 2 with stop of 10, and stride of 2\n            req = helper.getEndpoint() + \"/datasets/\" + dset112UUID + \"/value\" + \\\n             \"?select=[2:10:2]\"\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rsp.headers['Content-Type'], \"application/json\")\n            rspJson = json.loads(rsp.text)\n            data = rspJson['value']  # should be [2, 4, 6, 8]\n            self.assertEqual(len(data), 4)\n            self.assertEqual(data, list(range(2, 9, 2)))\n        \n            # rank 2 dataset\n            dset111UUID = helper.getUUID(domain, g11UUID, 'dset1.1.1') \n         \n            # dataset has shape (10,10) and type 'int32'\n        \n            # get rows 2, 3, 4, and 5\n            req = helper.getEndpoint() + \"/datasets/\" + dset111UUID + \"/value\" + \\\n             \"?select=[:,2:6]\"\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rsp.headers['Content-Type'], \"application/json\")\n            rspJson = json.loads(rsp.text)\n            data = rspJson['value']   \n            self.assertEqual(len(data), 10)  \n            for i in range(10):\n                arr = data[i]\n                self.assertEqual(len(arr), 4)\n                for j in range(4):\n                    self.assertEqual(arr[j], i*(j+2))\n                \n            # get 2d subregion with stride\n            req = helper.getEndpoint() + \"/datasets/\" + dset111UUID + \"/value\" + \\\n             \"?select=[1:9,1:9:2]\"\n            rsp = requests.get(req, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rsp.headers['Content-Type'], \"application/json\")\n            rspJson = json.loads(rsp.text)\n            data = rspJson['value']   \n          \n            self.assertEqual(len(data), 8)  \n            for i in range(8):\n                arr = data[i]\n                self.assertEqual(len(arr), 4)\n                for j in range(4):\n                    self.assertEqual(arr[j], (i+1)*(j*2+1))\n                    \n    def testGetSelectionBinary(self):\n        for domain_name in ('tall', ):\n            domain = domain_name + '.' + config.get('domain')  \n            headers = {'host': domain}\n            headers_binary = {'host': domain, 'accept': \"application/octet-stream\"}\n            rootUUID = helper.getRootUUID(domain)\n            g1UUID = helper.getUUID(domain, rootUUID, 'g1')\n            g11UUID = helper.getUUID(domain, g1UUID, 'g1.1')\n               \n            # rank 1 dataset\n            dset112UUID = helper.getUUID(domain, g11UUID, 'dset1.1.2') \n         \n            # dataset has shape (20,) and type 'int32'\n        \n            # get values starting at index 2\n            req = helper.getEndpoint() + \"/datasets/\" + dset112UUID + \"/value\" + \\\n             \"?select=[2:]\"\n            rsp = requests.get(req, headers=headers_binary)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rsp.headers['Content-Type'], \"application/octet-stream\")\n            \n            # content should be [2, 3, 4, ..., 19]\n            data = rsp.content\n            self.assertEqual(len(data), 18*4)  # 18 elements with 4 bytes per element\n            for i in range(18):\n                self.compareWord32(data, i*4, i+2)\n                     \n            # get values starting at index 2 with stop of 10\n            req = helper.getEndpoint() + \"/datasets/\" + dset112UUID + \"/value\" + \\\n             \"?select=[2:10]\"\n            rsp = requests.get(req, headers=headers_binary)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rsp.headers['Content-Type'], \"application/octet-stream\")\n             \n            data = rsp.content  # should be [2, 3, 4, ..., 9]\n            self.assertEqual(len(data), 8*4)\n            for i in range(8):\n                self.compareWord32(data, i*4, i+2)\n                 \n            \n            # get values starting at index 2 with stop of 10, and stride of 2\n            req = helper.getEndpoint() + \"/datasets/\" + dset112UUID + \"/value\" + \\\n             \"?select=[2:10:2]\"\n            rsp = requests.get(req, headers=headers_binary)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rsp.headers['Content-Type'], \"application/octet-stream\")\n            data = rsp.content\n            # should be [2, 4, 6, 8]\n            self.assertEqual(len(data), 4*4)\n            for i in range(4):\n                offset = i*4\n                self.compareWord32(data, offset, (i*2)+2)\n                 \n        \n            # rank 2 dataset\n            dset111UUID = helper.getUUID(domain, g11UUID, 'dset1.1.1') \n         \n            # dataset has shape (10,10) and type 'int32'\n            # get rows 2, 3, 4, and 5\n            req = helper.getEndpoint() + \"/datasets/\" + dset111UUID + \"/value\" + \\\n             \"?select=[:,2:6]\"\n            rsp = requests.get(req, headers=headers_binary)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rsp.headers['Content-Type'], \"application/octet-stream\")\n            data = rsp.content\n            self.assertEqual(len(data), 4*10*4)\n            row_offset = 0\n            for i in range(10):\n                col_offset = 0\n                for j in range(4):\n                    # 4 byte integers, little indian\n                    self.compareWord32(data, row_offset+col_offset, i*(j+2))\n                     \n                    col_offset += 4\n                row_offset += col_offset\n                    \n            # get 2d subregion with stride\n            req = helper.getEndpoint() + \"/datasets/\" + dset111UUID + \"/value\" + \\\n             \"?select=[1:9,1:9:2]\"\n            rsp = requests.get(req, headers=headers_binary)\n            self.assertEqual(rsp.status_code, 200)\n            self.assertEqual(rsp.headers['Content-Type'], \"application/octet-stream\")\n            data = rsp.content\n            self.assertEqual(len(data), 8*4*4)\n            row_offset = 0\n            for i in range(8):\n                col_offset = 0\n                for j in range(4):\n                    # 4 byte integers, little indian\n                    self.compareWord32(data, row_offset+col_offset, (i+1)*(j*2+1))\n                    col_offset += 4\n                row_offset += col_offset\n            \n                \n    def testGetSelectionBadQuery(self):\n        domain = 'tall.' + config.get('domain')  \n        headers = {'host': domain}\n        rootUUID = helper.getRootUUID(domain)\n        g1UUID = helper.getUUID(domain, rootUUID, 'g1')\n        g11UUID = helper.getUUID(domain, g1UUID, 'g1.1')\n               \n        # rank 1 dataset\n        dset112UUID = helper.getUUID(domain, g11UUID, 'dset1.1.2') \n    \n        # don't use  bracket\n        req = helper.getEndpoint() + \"/datasets/\" + dset112UUID + \"/value\" + \\\n             \"?select=abc\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 400)\n        \n        # not a number\n        req = helper.getEndpoint() + \"/datasets/\" + dset112UUID + \"/value\" + \\\n             \"?select=[a:b:c]\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 400)\n        \n        # start is negative\n        req = helper.getEndpoint() + \"/datasets/\" + dset112UUID + \"/value\" + \\\n             \"?select=[-1:3]\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 400)\n        \n        # stop past extent\n        req = helper.getEndpoint() + \"/datasets/\" + dset112UUID + \"/value\" + \\\n             \"?select=[1:25]\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 400)\n        \n        # pass in 0 step\n        req = helper.getEndpoint() + \"/datasets/\" + dset112UUID + \"/value\" + \\\n             \"?select=[1:2:0]\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 400)  \n        \n    def testGetScalar(self):\n        domain = 'scalar.' + config.get('domain')\n        headers = {'host': domain}  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, '0d') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['Content-Type'], \"application/json\")\n        rspJson = json.loads(rsp.text)\n        data = rspJson['value'] \n        self.assertEqual(data, 42)\n        \n    def testGetNullSpace(self):\n        domain = 'null_space_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('value' in rspJson)\n        data = rspJson['value'] \n        self.assertEqual(data, None)\n         \n        \n    def testGetScalarString(self):\n        domain = 'scalar.' + config.get('domain')  \n        headers = {'host': domain}\n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, '0ds') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['Content-Type'], \"application/json\")\n        rspJson = json.loads(rsp.text)\n        data = rspJson['value'] \n        self.assertEqual(data, \"hello\")\n        \n    def testGetScalarStringBinary(self):\n        domain = 'scalar.' + config.get('domain')  \n        headers = {'host': domain}\n        headers_binary = {'host': domain, 'accept': \"application/octet-stream\"}\n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, '0ds') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        rsp = requests.get(req, headers=headers_binary)\n        self.assertEqual(rsp.status_code, 200)\n        # requested binary, but got json (because it's a variable length string)\n        self.assertEqual(rsp.headers['Content-Type'], \"application/json\")\n        rspJson = json.loads(rsp.text)\n        data = rspJson['value'] \n        self.assertEqual(data, \"hello\")\n        \n    def testGetSimpleOneElement(self):\n        domain = 'scalar.' + config.get('domain') \n        headers = {'host': domain} \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, '1d') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid\n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        data = rspJson['value'] \n        self.assertEqual(data, [42,])\n        \n    def testGetSimpleOneElementString(self):\n        domain = 'scalar.' + config.get('domain') \n        headers = {'host': domain} \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, '1ds') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        data = rspJson['value'] \n        self.assertEqual(data, [\"hello\",])\n        \n        \n    def testGetCompound(self):\n        domain = 'compound.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['Content-Type'], \"application/json\")\n        rspJson = json.loads(rsp.text)\n        data = rspJson['value'] \n        self.assertEqual(len(data), 72)\n        first = data[0]\n        self.assertEqual(len(first), 5)\n        self.assertEqual(first[0], 24) \n        self.assertEqual(first[1], \"13:53\")  \n        # get first element via selection query\n        # get values starting at index 2\n        req += \"?select=[0:1]\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['Content-Type'], \"application/json\")\n        rspJson = json.loads(rsp.text)\n        data = rspJson['value'] \n        self.assertEqual(len(data), 1)\n        first = data[0]\n        self.assertEqual(len(first), 5)\n        self.assertEqual(first[0], 24) \n        self.assertEqual(first[1], \"13:53\")\n        \n    def testGetCompoundBinary(self):\n        domain = 'compound.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        headers = {'host': domain}\n        headers_binary = {'host': domain, 'accept': \"application/octet-stream\"}\n        rsp = requests.get(req, headers=headers_binary)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['Content-Type'], \"application/octet-stream\")\n        data = rsp.content\n        self.assertEqual(len(data) // 36, 72 )\n        \n \n        # get first element via selection query\n        # get values starting at index 2\n        req += \"?select=[0:1]\"\n        rsp = requests.get(req, headers=headers_binary)\n        self.assertEqual(rsp.status_code, 200)\n        # just one element, so expect json response\n        self.assertEqual(rsp.headers['Content-Type'], \"application/json\")\n        rspJson = json.loads(rsp.text)\n        data = rspJson['value'] \n        self.assertEqual(len(data), 1)\n        first = data[0]\n        self.assertEqual(len(first), 5)\n        self.assertEqual(first[0], 24) \n        self.assertEqual(first[1], \"13:53\")\n       \n        \n    def testGetCommitted(self):\n        domain = 'committed_type.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        data = rspJson['value'] \n        self.assertEqual(len(data), 4)\n         \n        \n    def testGetArray(self):\n        domain = 'array_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 4)  # four dataset elements (each an array)\n        self.assertEqual(len(value[0]), 3)  # 3x5 array shape\n        self.assertEqual(len((value[0])[0]), 5)  # 3x5 array shape\n        self.assertEqual(value[0][2][4], -8)  # pull out a value from the array\n        \n    def testGetVLenString(self):\n        domain = 'vlen_string_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 4) \n        self.assertEqual(value[0], \"Parting\")\n        self.assertEqual(value[1], \"is such\")\n        self.assertEqual(value[2], \"sweet\")\n        self.assertEqual(value[3], \"sorrow.\")\n        \n    def testGetFixedString(self):\n        domain = 'fixed_string_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n       \n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 4) \n        self.assertEqual(value[0], \"Parting\")\n        self.assertEqual(value[1], \"is such\")\n        self.assertEqual(value[2], \"sweet\")\n        self.assertEqual(value[3], \"sorrow.\")\n\n    def testGetFixedStringBinary(self):\n        domain = 'fixed_string_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        headers = {'host': domain, 'accept': \"application/octet-stream\"}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        self.assertEqual(rsp.headers['Content-Type'], \"application/octet-stream\")\n        data = rsp.content\n        self.assertEqual(data, b\"Partingis suchsweet\\x00\\x00sorrow.\")\n         \n        \n    def testGetEnum(self):\n        domain = 'enum_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 4) \n        self.assertEqual(value[1][2], 2)\n        \n    def testGetVlen(self):\n        domain = 'vlen_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 2)\n        self.assertEqual(len(value[1]), 12)\n        self.assertEqual(value[1][11], 144)\n        \n    def testGetOpaque(self):\n        domain = 'opaque_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        # get for Opaque data is not supported yet.  Check that the call returns 501\n        self.assertEqual(rsp.status_code, 501)\n        \n    def testGetObjectReference(self):\n        domain = 'objref_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        ds1_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        ds2_uuid = helper.getUUID(domain, root_uuid, 'DS2') \n        g1_uuid = helper.getUUID(domain, root_uuid, 'G1') \n        req = helper.getEndpoint() + \"/datasets/\" + ds1_uuid  + \"/value\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n         \n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 2)\n        self.assertEqual(value[0], 'groups/' + g1_uuid)\n        self.assertEqual(value[1], 'datasets/' + ds2_uuid)\n        \n    def testGetNullObjReference(self):\n        domain = 'null_objref_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 1)\n        self.assertEqual(value[0], \"null\")\n        \n    def testGetRegionReference(self):\n        domain = 'regionref_dset.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        ds1_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        ds2_uuid = helper.getUUID(domain, root_uuid, 'DS2')\n        req = helper.getEndpoint() + \"/datasets/\" + ds1_uuid + \"/value\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('value' in rspJson) \n        value = rspJson['value']\n        self.assertEqual(len(value), 2)\n        ref0 = value[0]\n        self.assertEqual(ref0['select_type'], 'H5S_SEL_POINTS')\n        self.assertEqual(ref0['id'], ds2_uuid)\n        points = ref0['selection']\n        self.assertEqual(len(points), 4)\n        self.assertEqual(points[0], [0, 1])\n        self.assertEqual(points[1], [2,11])\n        self.assertEqual(points[2], [1, 0])\n        self.assertEqual(points[3], [2, 4])\n        \n        ref1 = value[1]\n        self.assertEqual(ref1['select_type'], 'H5S_SEL_HYPERSLABS')\n        self.assertEqual(ref1['id'], ds2_uuid)\n        hyperslabs = ref1['selection'] \n        self.assertEqual(len(hyperslabs), 4)\n        self.assertEqual(hyperslabs[0][0], [0, 0])\n        self.assertEqual(hyperslabs[0][1], [1, 3])\n        self.assertEqual(hyperslabs[1][0], [0, 11])\n        self.assertEqual(hyperslabs[1][1], [1, 14])\n        self.assertEqual(hyperslabs[2][0], [2, 0])\n        self.assertEqual(hyperslabs[2][1], [3, 3])\n        self.assertEqual(hyperslabs[3][0], [2, 11])\n        self.assertEqual(hyperslabs[3][1], [3, 14])\n\n    def testGetFillValue(self):\n        domain = 'fillvalue.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n\n        # create a new dataset\n        payload = {'type': 'H5T_STD_I32LE', 'shape': 10}\n        payload['creationProperties'] = {'fillValue': 42 }\n        req = self.endpoint + \"/datasets\"\n        headers = {'host': domain}\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset_uuid = rspJson['id']\n        self.assertTrue(helper.validateId(dset_uuid))\n         \n        # link the new dataset \n        name = \"dset_new\"\n        req = self.endpoint + \"/groups/\" + root_uuid + \"/links/\" + name \n        payload = {\"id\": dset_uuid}\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)\n\n        # retrieve the values\n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        data = rspJson['value'] \n        self.assertEqual(data, [42,]*10)\n\n        \n        \n    #\n    # Query tests\n    #\n    \n    def testQuery(self):\n        domain = 'compound.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        req += \"?query=date == 23\"  # values where date field = 23\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('hrefs' in rspJson)\n        self.assertTrue('index' in rspJson)\n        index = rspJson['index']\n        self.assertEqual(len(index), 24)\n        self.assertEqual(index[0], 14)\n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 24)\n        item = value[0]\n        self.assertEqual(len(item), 5)\n        self.assertEqual(item[0], 23)\n        \n    \"\"\"    \n    def testsnp(self):\n        limit = 20\n        domain = 'snp500.demo.hdfgroup.org'  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(root_uuid is not None)\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        req += \"?query=symbol == 'AAPL'&Limit=\" + str(limit) # values where date field = 23\n        print req\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        print rspJson\n    \"\"\"    \n    \n    \n    def testQueries(self):\n        # use '%26' rather than '&' since otherwise it will be \n        # interpreted as a http query param seperator\n        queries = { \"date == 23\": 24,\n                    \"wind == b'W 5'\": 3,\n                    \"temp > 61\": 53,\n                    \"(date >=22) %26 (date <= 24)\": 62,\n                    \"(date == 21) %26 (temp > 70)\": 4,\n                    \"(wind == b'E 7') | (wind == b'S 7')\": 7 }\n       \n        #queries = {    \"(date == 21) %26 (temp >= 72)\": 4 }\n        domain = 'compound.' + config.get('domain') \n        headers = {'host': domain} \n        root_uuid = helper.getRootUUID(domain)\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        for key in queries.keys():\n            query = req + \"?query=\" + key\n            rsp = requests.get(query, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            self.assertTrue('hrefs' in rspJson)\n            self.assertTrue('index' in rspJson)\n            index = rspJson['index']\n            self.assertTrue(len(index), queries[key])\n            self.assertTrue('value' in rspJson)\n            value = rspJson['value']\n            self.assertEqual(len(value), queries[key])\n            \n    def testQuerySelection(self):\n        domain = 'compound.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        req += \"?query=date == 23\"  # values where date field = 23\n        req += \"&select=[10:20]\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('hrefs' in rspJson)\n        self.assertTrue('index' in rspJson)\n        index = rspJson['index']\n        self.assertEqual(len(index), 6)\n        self.assertEqual(index[0], 14)\n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 6)\n        item = value[0]\n        self.assertEqual(len(item), 5)\n        self.assertEqual(item[0], 23)\n        \n    def testQueryBatch(self):\n        domain = 'compound.' + config.get('domain')  \n        headers = {'host': domain}\n        root_uuid = helper.getRootUUID(domain)\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"  \n        start = 0\n        stop = 72\n        count = 0\n        count = req_count=0\n        limit = 10\n        req += \"?query=date == 23\"     # values where date field = 23\n        req += \"&Limit=\" + str(limit)  # return no more than 10 results at a time\n        for i in range(50):\n            sreq = req+\"&select=[\" + str(start) + \":\" + str(stop) + \"]\" \n            rsp = requests.get(sreq, headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            req_count += 1\n            rspJson = json.loads(rsp.text)\n            self.assertTrue('hrefs' in rspJson)\n            self.assertTrue('index' in rspJson)\n            index = rspJson['index']\n            self.assertTrue(len(index) <= limit)\n            self.assertTrue('value' in rspJson)\n            value = rspJson['value']\n            self.assertEqual(len(value), len(index))\n            count += len(index)\n            if len(index) < limit:\n                break  # no more results\n            start = index[-1] + 1  # start at next index\n        self.assertEqual(count, 24)\n        self.assertEqual(req_count, 3)\n            \n         \n        \n    def testBadQuery(self):\n        domain = 'compound.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        dset_uuid = helper.getUUID(domain, root_uuid, 'dset') \n        req = helper.getEndpoint() + \"/datasets/\" + dset_uuid + \"/value\"\n        req += \"?query=foobar\"  \n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 400)\n         \n          \n         \n        \n    #\n    # Post tests\n    #\n    \n    def testPost(self):    \n        for domain_name in ('tall','tall_ro'):\n            domain = domain_name + '.' + config.get('domain') \n            rootUUID = helper.getRootUUID(domain)\n            g1UUID = helper.getUUID(domain, rootUUID, 'g1')\n            g11UUID = helper.getUUID(domain, g1UUID, 'g1.1')\n               \n            # rank 1 dataset\n            dset112UUID = helper.getUUID(domain, g11UUID, 'dset1.1.2') \n            points = (19, 17, 13, 11, 7, 5, 3, 2)\n            req = self.endpoint + \"/datasets/\" + dset112UUID + \"/value\" \n            payload = {'points': points}\n            headers = {'host': domain}\n            \n            rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            data = rspJson['value'] \n            self.assertEqual(len(data), len(points))\n            self.assertEqual(points[0], data[0])\n            \n            # rank 2 dataset\n            dset111UUID = helper.getUUID(domain, g11UUID, 'dset1.1.1') \n            points = []\n            for i in range(10):\n                points.append((i,i))  # get diagonal\n            req = self.endpoint + \"/datasets/\" + dset111UUID + \"/value\" \n            payload = {'points': points}\n            headers = {'host': domain}\n            \n            rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n            self.assertEqual(rsp.status_code, 200)\n            rspJson = json.loads(rsp.text)\n            data = rspJson['value'] \n            self.assertEqual(len(data), len(points))\n            self.assertEqual(9, data[3]) \n    #\n    # Put tests\n    #   \n        \n    def testPut(self):\n        # create domain\n        domain = 'valueput.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        #create scalar dataset\n        payload = {'type': 'H5T_STD_I32LE'}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset0UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset0UUID))\n        \n        # link new dataset as 'dset0'\n        ok = helper.linkObject(domain, dset0UUID, 'dset0')\n        self.assertTrue(ok)\n        \n        # write to dset0\n        req = self.endpoint + \"/datasets/\" + dset0UUID + \"/value\" \n        data = 42\n        payload = { 'value': data }\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # read back the data\n        readData = helper.readDataset(domain, dset0UUID)\n        self.assertEqual(readData, data)  # verify we got back what we started with\n        \n        #create 1d/one element dataset\n        payload = {'type': 'H5T_STD_I32LE', 'shape': 1}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset1UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset1UUID))\n         \n        # link new dataset as 'dset1_0'\n        ok = helper.linkObject(domain, dset1UUID, 'dset1_0')\n        self.assertTrue(ok)\n        \n        # write to dset1\n        req = self.endpoint + \"/datasets/\" + dset1UUID + \"/value\" \n        data = [42,]\n        payload = { 'value': data }\n      \n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # read back the data\n        readData = helper.readDataset(domain, dset1UUID)\n        self.assertEqual(readData, data)  # verify we got back what we started with\n        \n        \n        #create 1d dataset\n        payload = {'type': 'H5T_STD_I32LE', 'shape': 10}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset1UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset1UUID))\n         \n        # link new dataset as 'dset1'\n        ok = helper.linkObject(domain, dset1UUID, 'dset1')\n        self.assertTrue(ok)\n        \n        # write to dset1\n        req = self.endpoint + \"/datasets/\" + dset1UUID + \"/value\" \n        data = [2,3,5,7,11,13,17,19,23,29]\n        # payload = {'type': 'H5T_STD_I32LE', 'shape': 10, 'value': data }\n        payload = { 'value': data }\n      \n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # read back the data\n        readData = helper.readDataset(domain, dset1UUID)\n        self.assertEqual(readData, data)  # verify we got back what we started with\n        \n        # verify attempting the wrong number of elements fails\n        data = [9, 99, 999, 999]\n        payload = { 'value': data }\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)  # Bad Request\n        \n        #create 2d dataset\n        payload = {'type': 'H5T_STD_I32LE', 'shape': (10,10)}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset2UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset2UUID))\n         \n        # link new dataset as 'dset2'\n        ok = helper.linkObject(domain, dset2UUID, 'dset2')\n        self.assertTrue(ok)\n        \n        req = self.endpoint + \"/datasets/\" + dset2UUID + \"/value\" \n        data = []\n        for i in range(10):\n            row = []\n            for j in range(10):\n                row.append(i*10 + j)\n            data.append(row)\n        payload = { 'value': data }\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # read back the data\n        readData = helper.readDataset(domain, dset2UUID)\n        self.assertEqual(readData, data)  # verify we got back what we started with\n        \n    def testPutBinary(self):\n        # create domain\n        domain = 'valueput_binary.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        #create scalar dataset\n        payload = {'type': 'H5T_STD_I32LE'}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset0UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset0UUID))\n        \n        # link new dataset as 'dset0'\n        ok = helper.linkObject(domain, dset0UUID, 'dset0')\n        self.assertTrue(ok)\n        \n        # write to dset0\n        req = self.endpoint + \"/datasets/\" + dset0UUID + \"/value\" \n        byte_array = bytearray(4)\n        byte_array[0] = 42  # create 4-byte int, little endian\n        data = base64.b64encode(bytes(byte_array))\n        data = data.decode(\"ascii\")\n        payload = { 'value_base64': data }\n   \n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # read back the data\n        readData = helper.readDataset(domain, dset0UUID)\n        self.assertEqual(readData, 42)  # verify we got back what we started with\n        \n        #create 1d dataset\n        payload = {'type': 'H5T_STD_I32LE', 'shape': 10}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset1UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset1UUID))\n         \n        # link new dataset as 'dset1'\n        ok = helper.linkObject(domain, dset1UUID, 'dset1')\n        self.assertTrue(ok)\n        \n        # write to dset1\n        req = self.endpoint + \"/datasets/\" + dset1UUID + \"/value\" \n        primes = [2,3,5,7,11,13,17,19,23,29]\n        data = bytearray(4 * 10)\n        for i in range(10):\n            data[i*4] = primes[i]\n        data = base64.b64encode(bytes(data))\n        data = data.decode(\"ascii\")\n        \n        payload = { 'value_base64': data }\n        \n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # read back the data\n        readData = helper.readDataset(domain, dset1UUID)\n        self.assertEqual(readData, primes)  # verify we got back what we started with\n        \n        #create 2d dataset\n        payload = {'type': 'H5T_STD_I32LE', 'shape': (10,10)}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset2UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset2UUID))\n         \n        # link new dataset as 'dset2'\n        ok = helper.linkObject(domain, dset2UUID, 'dset2')\n        self.assertTrue(ok)\n        \n        req = self.endpoint + \"/datasets/\" + dset2UUID + \"/value\" \n        data = bytearray(10*10*4)\n        for i in range(10):\n            for j in range(10):\n                data[i*10*4 + j*4] = i*j         \n        data = base64.b64encode(bytes(data))\n        data = data.decode(\"ascii\")\n             \n        payload = { 'value_base64': data }\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # read back the data\n        read_data = helper.readDataset(domain, dset2UUID)\n        self.assertEqual(len(read_data), 10)  # verify we got back what we started with\n        for i in range(10):\n            row = read_data[i]\n            self.assertEqual(len(row), 10)\n            for j in range(10):\n                self.assertEqual(row[j], i*j)\n        \n        \n    def testPutSelection(self):\n        # create domain\n        domain = 'valueputsel.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        #create 1d dataset\n        payload = {'type': 'H5T_STD_I32LE', 'shape': 10}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset1UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset1UUID))\n         \n        # link new dataset as 'dset1'\n        ok = helper.linkObject(domain, dset1UUID, 'dset1')\n        self.assertTrue(ok)\n        \n        req = self.endpoint + \"/datasets/\" + dset1UUID + \"/value\" \n        data = [2,3,5,7,11,13,17,19,23,29]\n        data_part1 = data[0:5]\n        data_part2 = data[5:10]\n        # write part 1\n        payload = { 'start': 0, 'stop': 5, 'value': data_part1 }\n     \n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # write part 2\n        payload = { 'start': 5, 'stop': 10, 'value': data_part2 }\n\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)  \n        \n        # read back the data\n        readData = helper.readDataset(domain, dset1UUID)\n        self.assertEqual(readData, data)  # verify we got back what we started with\n        \n    def testPutSelectionValueMismatch(self):\n        # test that putting the wrong number of items in the value body key is handled correctly.\n        # create domain\n        domain = 'valueputselvaluemismatch.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        #create 1d dataset\n        payload = {'type': 'H5T_STD_I32LE', 'shape': 10}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset1UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset1UUID))\n         \n        # link new dataset as 'dset1'\n        ok = helper.linkObject(domain, dset1UUID, 'dset1')\n        self.assertTrue(ok)\n        \n        req = self.endpoint + \"/datasets/\" + dset1UUID + \"/value\" \n        data_9 = [2,3,5,7,11,13,17,19,23]\n        data_10 = [2,3,5,7,11,13,17,19,23,29]\n        data_11 = [2,3,5,7,11,13,17,19,23,29,31]\n        \n        # try writing 9 elements when the selection has 10 slots\n        payload = { 'start': 0, 'stop': 10, 'value': data_9 }\n     \n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)  # should fail\n        \n        # try writing 11 elements when the selection has 10 slots\n        payload = { 'start': 0, 'stop': 10, 'value': data_11 }\n     \n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)  # should fail\n        \n        # try writing 10 elements when the selection has 10 slots\n        payload = { 'start': 0, 'stop': 10, 'value': data_10 }\n     \n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # just right!\n        \n    def testPutSelectionBinaryValueMismatch(self):\n        # test that putting the wrong number of items in the value body key is handled correctly.\n        # create domain\n        domain = 'valueputselbinaryvaluemismatch.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        #create 1d dataset\n        payload = {'type': 'H5T_STD_I32LE', 'shape': 10}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset1UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset1UUID))\n         \n        # link new dataset as 'dset1'\n        ok = helper.linkObject(domain, dset1UUID, 'dset1')\n        self.assertTrue(ok)\n        \n        req = self.endpoint + \"/datasets/\" + dset1UUID + \"/value\" \n        primes = [2,3,5,7,11,13,17,19,23,29,31]\n        data_9 = bytearray(4 * 9)    # write 4*9 byte data \n        data_10 = bytearray(4 * 10)  # write 4*10 byte data \n        data_11 = bytearray(4 * 11)  # write 4*11 byte data \n        for i in range(9):\n            data_9[i*4] = primes[i]\n        for i in range(10):\n            data_10[i*4] = primes[i]\n        for i in range(11):\n            data_11[i*4] = primes[i]\n       \n        data_9 = base64.b64encode(bytes(data_9))\n        data_10 = base64.b64encode(bytes(data_10))\n        data_11 = base64.b64encode(bytes(data_11))\n        \n        data_9 = data_9.decode(\"ascii\")\n        data_10 = data_10.decode(\"ascii\")\n        data_11 = data_11.decode(\"ascii\")\n         \n        # try writing 9 elements when the selection has 10 slots\n        payload = { 'start': 0, 'stop': 10, 'value_base64': data_9 }\n     \n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)  # should fail\n        \n        # try writing 11 elements when the selection has 10 slots\n        payload = { 'start': 0, 'stop': 10, 'value_base64': data_11 }\n     \n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 400)  # should fail\n        \n        # try writing 10 elements when the selection has 10 slots\n        payload = { 'start': 0, 'stop': 10, 'value_base64': data_10 }\n     \n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # just right!\n         \n    \n    def testPutSelectionBinary(self):\n        # create domain\n        domain = 'valueputsel_binary.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        #create 1d dataset\n        payload = {'type': 'H5T_STD_I32LE', 'shape': 10}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset1UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset1UUID))\n         \n        # link new dataset as 'dset1'\n        ok = helper.linkObject(domain, dset1UUID, 'dset1')\n        self.assertTrue(ok)\n        \n        req = self.endpoint + \"/datasets/\" + dset1UUID + \"/value\" \n        primes = [2,3,5,7,11,13,17,19,23,29]\n        data_part1 = bytearray(4 * 5)  # write 4*10 byte data in two parts of 20 bytes\n        data_part2 = bytearray(4 * 5)  # 2nd part\n        for i in range(5):\n            data_part1[i*4] = primes[i]\n            data_part2[i*4] = primes[i+5]\n        data_part1 = base64.b64encode(bytes(data_part1))\n        data_part2 = base64.b64encode(bytes(data_part2))\n        data_part1 = data_part1.decode(\"ascii\")\n        data_part2 = data_part2.decode(\"ascii\")\n        # write part 1\n        payload = { 'start': 0, 'stop': 5, 'value_base64': data_part1 }\n     \n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        # write part 2\n        payload = { 'start': 5, 'stop': 10, 'value_base64': data_part2 }\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)  \n        \n        # read back the data\n        readData = helper.readDataset(domain, dset1UUID)\n        self.assertEqual(readData, primes)  # verify we got back what we started with\n        \n    def testPutPointSelection(self):\n        # create domain\n        domain = 'valueputpointsel.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        #create 1d dataset\n        payload = {'type': 'H5T_STD_I32LE', 'shape': 100}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset1UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset1UUID))\n         \n        # link new dataset as 'dset1'\n        ok = helper.linkObject(domain, dset1UUID, 'dset1')\n        self.assertTrue(ok)\n        \n        req = self.endpoint + \"/datasets/\" + dset1UUID + \"/value\" \n        primes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97]\n        value = [1,] * len(primes)  # write 1's at indexes that are prime\n        # write 1's to all the prime indexes\n        payload = { 'points': primes, 'value': value }\n        headers = {'host': domain}\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n         \n        # read back the data\n        readData = helper.readDataset(domain, dset1UUID)\n        self.assertEqual(readData[37], 1)  # prime\n        self.assertEqual(readData[38], 0)  # not prime\n        \n    def testPutPointSelectionBinary(self):\n        # create domain\n        domain = 'valueputpointsel_binary.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        #create 1d dataset\n        payload = {'type': 'H5T_STD_I32LE', 'shape': 100}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        rspJson = json.loads(rsp.text)\n        dset1UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset1UUID))\n         \n        # link new dataset as 'dset1'\n        ok = helper.linkObject(domain, dset1UUID, 'dset1')\n        self.assertTrue(ok)\n        \n        req = self.endpoint + \"/datasets/\" + dset1UUID + \"/value\" \n        primes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97]\n        value = [1,] * len(primes)  # write 1's at indexes that are prime\n        data = bytearray(4 * len(primes))   \n        for i in range(len(primes)):\n            data[i*4] = 1\n        data = base64.b64encode(bytes(data))\n        data = data.decode(\"ascii\")\n        \n        # write 1's to all the prime indexes\n        payload = { 'points': primes, 'value_base64': data }\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n         \n        # read back the data\n        readData = helper.readDataset(domain, dset1UUID)\n        self.assertEqual(readData[37], 1)  # prime\n        self.assertEqual(readData[38], 0)  # not prime\n        \n        \n    def testPutCompound(self):\n        domain = 'valueputcompound.datasettest.' + config.get('domain')\n        req = self.endpoint + \"/\"\n        headers = {'host': domain}\n        rsp = requests.put(req, headers=headers)\n        self.assertEqual(rsp.status_code, 201) # creates domain\n        \n        root_uuid = helper.getRootUUID(domain)\n        headers = {'host': domain}\n        \n        fields = ({'name': 'temp', 'type': 'H5T_STD_I32LE'}, \n                    {'name': 'pressure', 'type': 'H5T_IEEE_F32LE'}) \n        datatype = {'class': 'H5T_COMPOUND', 'fields': fields }\n        \n        #\n        #create compound dataset\n        #\n        payload = {'type': datatype}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        \n        rspJson = json.loads(rsp.text)\n        dset0UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset0UUID))\n         \n        # link new dataset as 'dset0_compound'\n        ok = helper.linkObject(domain, dset0UUID, 'dset0_compound')\n        self.assertTrue(ok)\n        \n        # write entire array\n        value = (42, 0.42)\n        payload = {'value': value}\n        req = self.endpoint + \"/datasets/\" + dset0UUID + \"/value\"\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # write value\n        \n         \n        # read back the data\n        readData = helper.readDataset(domain, dset0UUID)\n        self.assertEqual(readData[0], 42)   \n        \n        #    \n        #create 1d dataset\n        #\n        num_elements = 10\n        payload = {'type': datatype, 'shape': num_elements}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        \n        rspJson = json.loads(rsp.text)\n        dset1UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset1UUID))\n         \n        # link new dataset as 'dset1'\n        ok = helper.linkObject(domain, dset1UUID, 'dset_compound')\n        self.assertTrue(ok)\n        \n        # write entire array\n        value = [] \n        for i in range(num_elements):\n            item = (i*10, i*10+i/10.0) \n            value.append(item)\n        payload = {'value': value}\n        req = self.endpoint + \"/datasets/\" + dset1UUID + \"/value\"\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # write value\n        \n        # selection write\n        payload = { 'start': 0, 'stop': 1, 'value': (42, .42) }\n        req = self.endpoint + \"/datasets/\" + dset1UUID + \"/value\"\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # write value\n        \n        # read back the data\n        readData = helper.readDataset(domain, dset1UUID)\n        \n        self.assertEqual(readData[0][0], 42)   \n        self.assertEqual(readData[1][0], 10)   \n\n        #\n        # Create 2d dataset\n        #\n        dims = [2,2]\n        payload = {'type': datatype, 'shape': dims}\n        req = self.endpoint + \"/datasets\"\n        rsp = requests.post(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 201)  # create dataset\n        \n        rspJson = json.loads(rsp.text)\n        dset2UUID = rspJson['id']\n        self.assertTrue(helper.validateId(dset2UUID))\n         \n        # link new dataset as 'dset2d_compound'\n        ok = helper.linkObject(domain, dset2UUID, 'dset2d_compound')\n        self.assertTrue(ok)\n\n        # write entire array\n        value = [] \n        for i in range(dims[0]):\n            row = []\n            for j in range(dims[1]):\n                item = (i*10, i*10+j/2.0) \n                row.append(item)\n            value.append(row)\n        payload = {'value': value}\n         \n        req = self.endpoint + \"/datasets/\" + dset2UUID + \"/value\"\n        data = json.dumps(payload)\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # write value\n\n   \n    def testPutObjectReference(self):\n        domain = 'objref_dset_updated.' + config.get('domain')  \n        root_uuid = helper.getRootUUID(domain)\n        self.assertTrue(helper.validateId(root_uuid))\n        ds1_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        ds2_uuid = helper.getUUID(domain, root_uuid, 'DS2') \n        g1_uuid = helper.getUUID(domain, root_uuid, 'G1') \n        req = helper.getEndpoint() + \"/datasets/\" + ds1_uuid  + \"/value\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n\n        value = ('datasets/' + ds2_uuid, 'groups/' + g1_uuid)\n        payload = {'value': value}\n        req = self.endpoint + \"/datasets/\" + ds1_uuid + \"/value\"\n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # write value\n        \n    def testPutRegionReference(self):\n        domain = 'regionref_dset_updated.' + config.get('domain')\n        root_uuid = helper.getRootUUID(domain)\n        ds1_uuid = helper.getUUID(domain, root_uuid, 'DS1') \n        ds2_uuid = helper.getUUID(domain, root_uuid, 'DS2')\n        \n        req = helper.getEndpoint() + \"/datasets/\" + ds1_uuid  + \"/value\"\n        headers = {'host': domain}\n        rsp = requests.get(req, headers=headers)\n        self.assertEqual(rsp.status_code, 200)\n        rspJson = json.loads(rsp.text)\n        self.assertTrue('value' in rspJson)\n        value = rspJson['value']\n        self.assertEqual(len(value), 2)\n         \n        \n        updated_value = ( value[1], value[0] )  # switch elements\n        payload = {'value': updated_value}\n        \n        rsp = requests.put(req, data=json.dumps(payload), headers=headers)\n        self.assertEqual(rsp.status_code, 200)  # write value\n           \n             \nif __name__ == '__main__':\n    unittest.main()\n"
  },
  {
    "path": "test/test_files/notahdf5file.h5",
    "content": "This is not an HDF5 file!\n"
  },
  {
    "path": "test/testall.py",
    "content": "#!/usr/local/env python\n\n##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including       s   #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\n\nfrom argparse import ArgumentParser\nimport os\nimport sys\n\n\nunit_tests = ('timeUtilTest', 'fileUtilTest')\ninteg_tests = ('roottest', 'grouptest', 'dirtest', 'linktest', 'datasettest', 'valuetest',\n    'attributetest', 'datatypetest', 'shapetest', 'datasettypetest', 'acltest')\n\n#todo - add spidertest back\ncwd = os.getcwd()\nno_server = False\n\nparser = ArgumentParser()\ntestKind = parser.add_mutually_exclusive_group()\ntestKind.add_argument('--unit', action='store_true', help='run only the unit tests')\ntestKind.add_argument('--integ', action='store_true', help='run only the integrity tests')\nparser.add_argument('--failslow', action='store_true', help='keep running if a test fails, instead of terminating early')\n\nargs = vars(parser.parse_args())\n\nif args['unit']:\n    integ_tests = ()\nelif args['integ']:\n    unit_tests = ()\n\ntest_dir = os.path.dirname(os.path.realpath(__file__))\nos.chdir(test_dir)\n\n# Run all h5serv tests\n# Run this script before running any integ tests\n\nexit_code = None\n\nos.chdir('unit')\nfor file_name in unit_tests:\n    print(file_name)\n    rc = os.system('python ' + file_name + '.py')\n    if rc != 0:\n        if args['failslow']:\n            exit_code = 'Failed'\n        else:\n            os.chdir(cwd)\n            sys.exit(\"Failed\")\n \n \nos.chdir('../integ')\n\nif integ_tests:\n    os.system(\"python ./setupdata.py -f\")  # initialize data files\n    \nfor file_name in integ_tests:\n    print(file_name)\n    rc = os.system('python ' + file_name + '.py')\n    if rc != 0:\n        if args['failslow']:\n            exit_code = 'Failed'\n        else:\n            os.chdir(cwd)\n            sys.exit(\"Failed\")\n\nlog_file = \"../../h5serv.log\"\nif exit_code:\n    if os.name != 'nt' and os.path.isfile(log_file):\n        # tail not available on windows\n        print(\"server log...\")\n        os.system(\"tail -n 100 \" + log_file)\n    os.chdir(cwd)\n    sys.exit(exit_code)\nelse:\n    os.chdir(cwd)\n    print(\"Done!\")\n"
  },
  {
    "path": "test/unit/config.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nfrom h5serv.config import *\n\ncfg = {\n    'testfiledir': '../../testfiles/',\n    'domain':  'unit.hdf.io',\n    'datapath': '../data/',\n    'uuidlen':  36,\n    'hdf5_ext': '.h5'\n}\nupdate(cfg)\n"
  },
  {
    "path": "test/unit/fileUtilTest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport unittest\nimport os\nfrom tornado.web import HTTPError\n\nfrom h5serv.fileUtil import getFilePath, getDomain, posixpath, join\n\nimport config\n\nclass FileUtilTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(FileUtilTest, self).__init__(*args, **kwargs)\n        # main\n    \n    def testPosixPath(self):\n        path1 = \"dir1\\\\dir2\"\n        pp = posixpath(path1)\n        if os.name == 'nt':\n            self.assertEqual(pp, \"dir1/dir2\")\n        else:\n            self.assertEqual(pp, path1)  # no conversion on unix\n            \n    def testJoin(self):\n        path1 = \"dir1\\\\dir2\"\n        path2 = \"myfile.h5\"\n        pp = join(path1, path2)\n        if os.name == 'nt':\n            self.assertEqual(pp, \"dir1/dir2/myfile.h5\")\n        else:\n            self.assertEqual(pp, \"dir1\\\\dir2/myfile.h5\")  # no conversion on unix\n           \n    def testDomaintoFilePath(self):\n        domain = 'tall.' + config.get('domain')  \n        filePath = getFilePath(domain)\n        self.assertEqual(filePath, \"../data/tall.h5\")\n        # dot in front\n        domain = '.tall.' + config.get('domain')  \n        self.assertRaises(HTTPError, getFilePath, domain)\n        # two dots\n        domain = 'two..dots.' + config.get('domain')  \n        self.assertRaises(HTTPError, getFilePath, domain)\n        # no dot before domain\n        domain = 'nodot' + config.get('domain')  \n        self.assertRaises(HTTPError, getFilePath, domain)\n        \n    def testGetDomain(self):\n        \n        filePath = \"tall.h5\"\n        domain = getDomain(filePath)\n        self.assertEqual(domain, 'tall.' + config.get('domain'))\n        filePath = \"somevalue\"\n        domain = getDomain(filePath)\n        self.assertEqual(domain, 'somevalue.' + config.get('domain'))\n        filePath = \"subdir/tall.h5\"\n        domain = getDomain(filePath)\n        self.assertEqual(domain, 'tall.subdir.' + config.get('domain'))\n        \n        filePath = os.path.join(config.get('datapath'), 'subdir/tall.h5')\n        \n        domain = getDomain(filePath)\n        self.assertEqual(domain, 'tall.subdir.' + config.get('domain'))\n        \n        filePath = os.path.join(config.get('datapath'), 'subdir/tall.h5')\n        filePath = os.path.abspath(filePath)\n        domain = getDomain(filePath)\n        self.assertEqual(domain, 'tall.subdir.' + config.get('domain'))\n        \n        filePath = os.path.join(config.get('datapath'), 'home/test_user1/tall.h5')\n        domain = getDomain(filePath)\n        self.assertEqual(domain, 'tall.test_user1.home.' + config.get('domain'))\n        \n        filePath = '../data/home/test_user1/tall.h5'\n        domain = getDomain(filePath)\n        self.assertEqual(domain, 'tall.test_user1.home.' + config.get('domain'))\n        \n        #domainpath = fileUtil.getDomain(grppath, base_domain=base_domain)\n        \n        filePath = \"../data\"\n        domain = getDomain(filePath)\n        self.assertEqual(domain, config.get('domain'))\n        \n        # verify backslashes are ok for windows...\n        if os.name == 'nt':\n            filePath = \"subdir\\\\subsubdir\\\\tall.h5\"\n            domain = getDomain(filePath)\n            self.assertEqual(domain, 'tall.subsubdir.subdir.' + config.get('domain'))          \n             \nif __name__ == '__main__':\n    #setup test files\n    \n    unittest.main()\n    \n"
  },
  {
    "path": "test/unit/timeUtilTest.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport unittest\nimport time\n\nfrom h5serv.timeUtil import unixTimeToUTC\n\nimport config\n\nclass TimeUtilTest(unittest.TestCase):\n    def __init__(self, *args, **kwargs):\n        super(TimeUtilTest, self).__init__(*args, **kwargs)\n        # main\n        \n    def testConvertUnixTimetoUTC(self):\n        # get test file\n        now = time.time()\n        utcTime = unixTimeToUTC(now)\n        print(utcTime)\n        self.assertEqual(len(utcTime), 20)\n        self.assertTrue(utcTime.startswith('20'))\n        self.assertTrue(utcTime.endswith('Z'))\n        \n            \n             \nif __name__ == '__main__':\n    #setup test files\n    \n    unittest.main()\n    \n"
  },
  {
    "path": "util/admin/add_user.py",
    "content": "import h5py\nimport numpy as np\nimport sys\nimport argparse\nimport os.path as op\nimport os\nimport time\nimport datetime\nimport hashlib\nimport config\n\n\ndef encrypt_pwd(passwd):\n    passwd = passwd.encode('utf-8')\n    encrypted = hashlib.sha224(passwd).hexdigest()\n    return encrypted\n    \ndef print_time(timestamp):\n    str_time = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')\n    return str_time\n    \ndef generate_temp_password(length=6):\n    if not isinstance(length, int) or length < 4:\n        raise ValueError(\"temp password must have positive length\")\n\n    chars = \"ABCDEFGHJKLMNPQRSTUVWXYZ23456789\"\n    return \"\".join([chars[ord(c) % len(chars)] for c in os.urandom(length)])\n    \ndef main():\n    if os.name == 'nt':\n        print(\"Sorry, this utility is not supported on Windows!\")\n        return -1\n    parser = argparse.ArgumentParser()\n    parser.add_argument('-u', \"--user\", help='user id')\n    parser.add_argument('-p', \"--passwd\", help='user password') \n    args = parser.parse_args()\n     \n    filename = None\n    passwd = None\n    username = None\n    \n    filename = config.get('password_file')\n    if not filename:\n        print(\"no password file in config\")\n        return -1\n     \n    if not args.user:\n        print(\"no userid supplied\")\n        return -1\n    \n         \n    username = args.user\n    if username.find(':') != -1:\n        print(\"invalid username (':' is not allowed)\")\n        return -1\n    if username.find('/') != -1:\n        print(\"invalid username ('/' is not allowed)\")\n        return -1\n    \n    if args.passwd:\n        passwd = args.passwd\n        if len(passwd) < 4:\n            print(\"password must be at least 4 characters long\")\n            return -1\n    else:\n        passwd = generate_temp_password()\n        \n    # verify file exists and is writable\n    if not op.isfile(filename):\n        print(\"password file:\", filename, \" does not exist\")\n        return -1\n        \n    if not h5py.is_hdf5(filename):\n        print(\"invalid password file\")\n        return -1\n        \n    if not os.access(filename, os.W_OK):\n        print(\"password file is not writable\")\n        return -1\n    \n    f = h5py.File(filename, 'r+')\n    if 'user_type' not in f:\n        print(\"invalid password file\")\n        return -1\n        \n    user_type = f['user_type']\n       \n    \n    now = int(time.time())\n    \n    # add a new user\n    if username in f.attrs:\n        print(\"user already exists\")\n        return -1\n        \n    # create userid 1 greater than previous used\n    userid = len(f.attrs) + 1\n    data = np.empty((), dtype=user_type)\n    data['pwd'] = encrypt_pwd(passwd)\n    data['state'] = 'A'\n    data['userid'] = userid\n    data['ctime'] = now\n    data['mtime'] = now\n    f.attrs.create(username, data, dtype=user_type)   \n    f.close()\n    \n    datapath = config.get('datapath')\n    if not op.isdir(datapath):\n        print(\"data directory not found\")\n        return -1\n    \n    userpath = op.join(datapath, config.get('home_dir'))\n    if not op.isdir(userpath):\n        os.mkdir(userpath)\n    userdir = op.join(userpath, username)\n    if op.isdir(userdir):\n        print(\"user directory already exists\")\n        return -1\n    \n    # create user directory    \n    os.mkdir(userdir)\n    \n    # link to \"public\" directory\n    link_name = op.join(userdir, \"public\")\n    # create symlink to public directory\n    os.symlink(\"../../public\", link_name)\n    \n    print(passwd)\n    return \n     \n    \n\nmain()\n"
  },
  {
    "path": "util/admin/config.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport os\nimport sys\n\ncfg = {\n    'datapath': '../../data/',\n    'domain':  'hdfgroup.org',\n    'hdf5_ext': '.h5',\n    'toc_name': '.toc.h5',\n    'password_file': 'passwd.h5',\n    'home_dir': 'home'   \n}\n\n\ndef get(x):\n    # see if there is a command-line override\n    option = '--'+x+'='\n    for i in range(1, len(sys.argv)):\n        #print i, sys.argv[i]\n        if sys.argv[i].startswith(option):\n            # found an override\n            arg = sys.argv[i]\n            return arg[len(option):]  # return text after option string\n    # see if there are an environment variable override\n    if x.upper() in os.environ:\n        return os.environ[x.upper()]\n    # no command line override, just return the cfg value\n    return cfg[x]\n"
  },
  {
    "path": "util/admin/getacl.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport sys\nfrom os.path import isfile \nimport json\nimport numpy as np\nimport h5py\n\nfrom h5json import Hdf5db\n   \n\n#\n# Print usage and exit\n#\ndef printUsage():\n    print(\"usage: python get_acl.py [-h] -file <filename> [-path h5path] [userid1, userid2, ...]\")\n    print(\"  options -file: name of file\")\n    print(\"  options -path: h5path to object (default as /)\")\n    print(\"  options [userid]: list of userids (default as all)\")\n    print(\" ------------------------------------------------------------------------------\")\n    print(\"  Example - get all ACL's for root group of file 'tall.h5' \")\n    print(\"       python getacl.py -file ../../data/test/tall.h5\")\n    print(\"  Example - get acl for dataset '/g1/g1.1/dset1.1.1' of 'tall.h5', user 123\")\n    print(\"        python getacl.py -file ../../data/test/tall.h5 -path /g1/g1.1/dset1.1.1  123\")\n    sys.exit(); \n    \n\"\"\"\n  Get command line argument.\n  Exit with usage message if not available\n\"\"\"    \ndef getNextArg(argn):\n    if (argn+1) == len(sys.argv):\n        printUsage();\n        sys.exit(-1)\n    return sys.argv[argn+1]\n               \ndef main():\n    h5path = None\n    filename = None\n    req_userids = []\n    if len(sys.argv) == 1 or sys.argv[1] == \"-h\":\n        printUsage();\n        sys.exit(0)\n    argn = 1 \n    while argn < len(sys.argv):\n        arg = sys.argv[argn]\n        if arg == '-file':  \n            filename = getNextArg(argn)\n            argn += 2\n        elif arg == '-path':\n            h5path = getNextArg(argn)\n            argn += 2\n        else:\n            # process userids\n            try:\n                userid = int(arg)\n                req_userids.append(userid)\n            except ValueError:\n                print(\"Invalid userid:\", userid)\n                sys.exit(1)\n            argn += 1 \n            \n            \n    if not isfile(filename):\n        print(filename, \"not found\")\n        sys.exit(1) \n    if not h5py.is_hdf5(filename):\n        print(filename, \"not an hdf5 file\")\n        sys.exit(1)\n        \n    if h5path is None:\n        h5path = '/'\n            \n    fields = ('userid', 'create', 'read', 'update', 'delete', 'readACL', 'updateACL')\n    with Hdf5db(filename) as db:\n        try:\n            obj_uuid = db.getUUIDByPath(h5path)\n        except KeyError:\n            print(\"no object found at path:\", h5path)\n            sys.exit(1)\n        acl_dset = db.getAclDataset(obj_uuid)\n        if acl_dset and acl_dset.shape[0] > 0:\n            acls = {}\n            items = acl_dset[...]\n            for item in items:\n                acls[item[0]] = item\n                \n            userids = list(acls.keys())\n            userids.sort()  # sort to print by userid\n         \n            print(\"%8s   %8s  %8s  %8s  %8s  %8s  %8s \" % fields)\n            for userid in userids:\n                if len(req_userids) > 0 and userid not in req_userids:\n                    continue\n                acl = acls[userid]\n                format_args = [userid]\n                for field in ('create', 'read', 'update', 'delete', 'readACL', 'updateACL'):\n                    format_args.append('Y' if acl[field] else 'N')\n                print(\"%8s %8s  %8s  %8s  %8s  %8s  %8s \" % tuple(format_args))\n        else:\n            print(\"no ACLs\")\n              \n    \n\nmain()\n\n    \n\t\n"
  },
  {
    "path": "util/admin/import_file.py",
    "content": "import h5py\nimport numpy as np\nimport sys\nimport argparse\nimport os.path as op\nimport os\nimport shutil\nfrom tornado.escape import url_escape\nfrom h5json import Hdf5db\nimport config\n \n\n\"\"\"\n Create directories as needed along the given path.\n\"\"\"\ndef makeDirs(filePath):\n    #print(\"makeDirs:\", filePath)\n    # Make any directories along path as needed\n    if len(filePath) == 0 or op.isdir(filePath):\n        return\n    dirname = op.dirname(filePath)\n\n    if len(dirname) >= len(filePath):\n        return\n    makeDirs(dirname)  # recursive call\n    os.mkdir(filePath)  # should succeed since parent directory is created\n    \n\"\"\"\n Get userid given username.\n If user_name is not found, return None\n\"\"\"\ndef getUserId(user_name, password_file):\n    \"\"\"\n      getUserInfo: return user data\n    \"\"\"\n      \n    userid = None\n\n    if not user_name:\n        return None\n\n    # verify file exists and is writable\n    if not op.isfile(password_file):\n        print(\"password file not found\")\n        raise None\n\n    with h5py.File(password_file, 'r') as f:\n        if user_name not in f.attrs:\n            return None\n\n        data = f.attrs[user_name]\n        #print(data)\n        return data['userid']\n\n\"\"\"\nget group uuid of hardlink, or None if no link\n\"\"\"\ndef getSubgroupId(db, group_uuid, link_name):\n    #print(\"link_name:\", link_name)    \n    subgroup_uuid = None\n    try:\n        item = db.getLinkItemByUuid(group_uuid, link_name)\n        if item['class'] != 'H5L_TYPE_HARD':\n            return None\n        if item['collection'] != 'groups':\n            return None\n        subgroup_uuid = item['id']\n    except IOError:\n        # link_name doesn't exist, return None\n        pass\n\n    return subgroup_uuid\n        \n\"\"\"\nUpdate toc with new filename\n\"\"\"\n\ndef addTocEntry(toc_file, domain, base_domain):\n    \"\"\"\n    Helper method - update TOC when a domain is created\n    \"\"\"\n         \n    if not domain.endswith(base_domain):\n        sys.exit(\"unexpected domain value: \" + domain)\n    # trim domain by base domain\n\n    try:\n        with Hdf5db(toc_file) as db:\n            group_uuid = db.getUUIDByPath('/')\n            names = domain.split('.')\n            base_names = base_domain.split('.')\n            indexes = list(range(len(names)))\n            indexes = indexes[::-1] # reverse\n            for i in indexes:\n                if i >= len(names) - len(base_names):\n                    continue # still in the base domain\n                linkName = names[i]\n                if not linkName:\n                    continue\n                if i == 0:\n                    db.createExternalLink(group_uuid, domain, '/', linkName)\n                else:\n                    subgroup_uuid = getSubgroupId(db, group_uuid, linkName)\n                    if subgroup_uuid is None:\n                        # create subgroup and link to parent group\n                        subgroup_uuid = db.createGroup()\n                        # link the new group\n                        db.linkObject(group_uuid, subgroup_uuid, linkName)\n                    group_uuid = subgroup_uuid\n\n    except IOError as e:\n        print(\"IOError: \" + str(e.errno) + \" \" + e.strerror)\n        sys.exit(-1)\n            \n\"\"\"\nmain method\n\"\"\"\ndef main():\n    parser = argparse.ArgumentParser()\n    parser.add_argument('-s', \"--src\", help=\"source path for the file to be imported\")\n    parser.add_argument('-u', \"--user\", help=\"user name (optional)\")\n    parser.add_argument('-f', \"--folder\", help='folder path under user home dir (optional)')\n    parser.add_argument('-p', \"--passwd_file\", help='password file (optional)')\n     \n      \n    args = parser.parse_args()\n       \n    src_path = None\n    username = None\n    folder = None\n    password_file = None\n    \n    if args.src:\n        src_path = args.src\n    else:\n        print(\"no source file provided\")\n        return -1\n    if not op.isfile(src_path):\n        print(\"no file found\")\n        return -1\n    if not h5py.is_hdf5(src_path):\n        print(\"file must be an HDF5 file\")\n        \n    if args.user:\n        username = args.user\n    else:\n        print(\"Importing into public\")\n                             \n    if args.passwd_file:\n        password_file = args.passwd_file\n    else:\n        password_file = config.get(\"password_file\")\n        \n    if args.folder:\n        folder = args.folder\n        if op.isabs(folder):\n            print(\"folder path must be relative\")\n            return -1\n        folder = op.normpath(folder)\n    \n            \n    print(\">source:\", src_path)\n    print(\">username:\", username)\n    print(\">password_file:\", password_file)\n    print(\">folder:\", folder)  \n    \n    hdf5_ext = config.get(\"hdf5_ext\")\n    \n    if username:\n        userid = getUserId(username, password_file)\n    \n        if not userid:\n            print(\"user not found\")\n            return -1\n    \n    tgt_dir = op.join(op.dirname(__file__), config.get(\"datapath\"))\n    tgt_dir = op.normpath(tgt_dir)\n    \n    if username:\n        tgt_dir = op.join(tgt_dir, config.get(\"home_dir\"))\n        tgt_dir = op.join(tgt_dir, username)\n    toc_file = op.join(tgt_dir, config.get(\"toc_name\"))\n    if not op.isfile(toc_file):\n        print(\"toc_file:\", toc_file, \"not found\")\n        return -1\n    if folder:\n        tgt_dir = op.join(tgt_dir, folder)\n     \n    if not op.isdir(tgt_dir):\n        print(\"directory:\", tgt_dir, \"not found, creating\")\n        makeDirs(tgt_dir)\n    \n    tgt_file = op.basename(src_path)\n    tgt_file = op.splitext(tgt_file)[0] # ignore the extension\n    tgt_file = url_escape(tgt_file)  # make the filename url compatible\n    tgt_file = tgt_file.replace('.', '_')  # replace dots with underscores\n       \n    tgt_path = op.join(tgt_dir, tgt_file)\n    tgt_path = op.normpath(tgt_path)\n        \n    if op.isfile(tgt_path + hdf5_ext):\n        print(\"file already exists\")\n        return -1\n    \n    # determine target domain\n    domain = tgt_file\n    if folder:\n        domain += '.' + folder\n    if username:\n        domain += '.' + username + '.' + config.get(\"home_dir\")\n    domain += \".\" + config.get(\"domain\") \n    \n    # determine the base so that the toc update can be done relative to the base.\n    if username:\n        base_domain = username + '.' + config.get(\"home_dir\") + '.' + config.get(\"domain\")\n    else:\n        base_domain = config.get(\"domain\")\n    \n     \n    print(\"domain:\", domain)\n    # add toc entry\n    addTocEntry(toc_file, domain, base_domain)    \n    # copy file\n    tgt_path += hdf5_ext\n    shutil.copyfile(src_path, tgt_path) \n    \n    return 0\n        \n\nmain()"
  },
  {
    "path": "util/admin/makepwd_file.py",
    "content": "import h5py\nimport numpy as np\n\nfile_name = 'passwd.h5'\nf = h5py.File(file_name, 'x')\nfields = []\nfields.append(('pwd', np.dtype('S56')))\nfields.append(('state', np.dtype('S1')))\nfields.append(('userid', np.int32))\nfields.append(('email', np.dtype('S80')))\nfields.append(('ctime', np.int32))\nfields.append(('mtime', np.int32))\ndt = np.dtype(fields)\nf['user_type'] = dt\n\nf.close()\nprint(file_name, \"created\")\n"
  },
  {
    "path": "util/admin/remove_db.py",
    "content": "import h5py\nimport sys\n \n            \n\"\"\"\nmain method\n\"\"\"\ndef main():\n    if len(sys.argv) < 2 or sys.argv[1] in (\"-h\", \"--help\"):\n        print(\"Delete db from h5serv file.\")\n        print(\"Warning: all object uuids and any user ACLs will be lost\")\n        print(\"Usage: python remove_db.py <filename>\")\n        sys.exit(1)\n    filename = sys.argv[1]\n    f = h5py.File(filename, 'a')\n    if \"__db__\" not in f:\n        print(\"No db group found\")\n    else:\n        del f[\"__db__\"]\n        print(\"db group removed\")\n    f.close()\n\n        \n\nmain()"
  },
  {
    "path": "util/admin/setacl.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport sys\nfrom os.path import isfile \nimport json\nimport numpy as np\nimport h5py\n\nfrom h5json import Hdf5db\n    \n\n#\n# Print usage and exit\n#\ndef printUsage():\n    print(\"usage: python set_acl.py -file <filename> [-path h5path] [+-}[crudep] [userid1, userid2, ...]\")\n    print(\"  options -v: verbose, print request and response codes from server\")\n    print(\"  options -file: name of file\")\n    print(\"  options -path: path to object (default as /)\")\n    print(\"  options userids: userid of acl to return\")\n    print(\" ------------------------------------------------------------------------------\")\n    print(\"  Example - set 'tall.h5' default access to read only\")\n    print(\"       python setacl.py -file ../../data/test/tall.h5 +r-udep\")\n    print(\"  Example - get acl for 'tall.h5' dataset /g1/g1.1/dset1.1.1 to full access for user 123\")\n    print(\"        python setacl.py -file ../../data/test/tall.h5 -path /g1/g1.1/dset1.1.1 +crudep 123\")\n    sys.exit(); \n\n\"\"\"\n  Get command line argument.\n  Exit with usage message if not available\n\"\"\"    \ndef getNextArg(argn):\n    if (argn+1) == len(sys.argv):\n        printUsage();\n        sys.exit(-1)\n    return sys.argv[argn+1]\n    \n               \ndef main():\n    perm_abvr = {'c':'create', 'r': 'read', 'u': 'update', 'd': 'delete', 'e': 'readACL', 'p':'updateACL'} \n    h5path = None\n    filename = None\n    userids = []\n    add_list = []\n    remove_list = []\n    if len(sys.argv) == 1 or sys.argv[1] == \"-h\":\n        printUsage();\n        sys.exit(1)\n    argn = 1 \n    while argn < len(sys.argv):\n        arg = sys.argv[argn]\n        if arg == '-file':  \n            filename = getNextArg(argn)\n            argn += 2\n        elif arg == '-path':\n            h5path = getNextArg(argn)\n            argn += 2\n        elif arg[0] in ('+', '-'):\n            to_list = None\n            for ch in arg:\n                if ch == '+':\n                    to_list = add_list\n                elif ch == '-':\n                    to_list = remove_list\n                elif ch in perm_abvr.keys():\n                    to_list.append(perm_abvr[ch])\n                else:\n                    printUsage()\n                    sys.exit(1)\n            argn += 1\n        else:\n            # process userids\n            try:\n                userid = int(arg)\n                userids.append(userid)\n            except ValueError:\n                print(\"Invalid userid:\", userid)\n                sys.exit(1)\n            argn += 1            \n      \n    \n    conflicts = list(set(add_list) & set(remove_list))\n    \n    if len(conflicts) > 0:\n        print(\"permission: \", conflicts[0], \" set for both add and remove\")\n        sys.exit(1)\n     \n    if filename is None:\n        print(\"no filename specified\")\n        sys.exit(1)    \n  \n    if not isfile(filename):\n        print(filename, \"not found\")\n        sys.exit(1)\n    if not h5py.is_hdf5(filename):\n        print(filename, \"not an hdf5 file\")\n        sys.exit(1)\n    if h5path is None:\n        h5path = '/'\n    if len(userids) == 0:\n        userids.append(0)\n    \n    fields = ('userid', 'create', 'read', 'update', 'delete', 'readACL', 'updateACL')    \n    with Hdf5db(filename) as db:\n        try:\n            obj_uuid = db.getUUIDByPath(h5path)\n        except KeyError:\n            print(\"no object found at path:\", h5path)\n            sys.exit(1)\n        print(\"%8s   %8s  %8s  %8s  %8s  %8s  %8s \" % fields)\n        for userid in userids:\n            \n            acl = db.getAclByObjAndUser(obj_uuid, userid)\n            if acl is None and userid != 0:\n                acl = db.getAclByObjAndUser(obj_uuid, 0)\n            if acl is None:\n                acl = db.getDefaultAcl()\n            \n            acl['userid'] = userid\n            for field in add_list:\n                acl[field] = True \n            for field in remove_list:\n                acl[field] = False\n                \n            format_args = [userid]\n            for field in fields:\n                if field == 'userid':\n                    continue\n                format_args.append('Y' if acl[field] else 'N')\n            print(\"%8s %8s  %8s  %8s  %8s  %8s  %8s \" % tuple(format_args))\n            \n            db.setAcl(obj_uuid, acl)\n            \n    \n\nmain()\n\n    \n\t\n"
  },
  {
    "path": "util/admin/update_pwd.py",
    "content": "import h5py\nimport numpy as np\nimport sys\nimport argparse\nimport os.path as op\nimport os\nimport time\nimport datetime\nimport hashlib\nimport config\n \ndef encrypt_pwd(passwd):\n    passwd = passwd.encode('utf-8')\n    encrypted = hashlib.sha224(passwd).hexdigest()\n    return encrypted\n    \ndef print_time(timestamp):\n    str_time = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')\n    return str_time\n    \ndef main():\n    parser = argparse.ArgumentParser()\n    parser.add_argument('-r', \"--replace\", help=\"update existing user/password\", action=\"store_true\")\n    parser.add_argument('-a', \"--add\", help=\"add a new user/password\", action=\"store_true\")\n    parser.add_argument('-f', \"--file\", help='password file')\n    parser.add_argument('-u', \"--user\", help='user id')\n    parser.add_argument('-e', \"--email\", help='user email')\n    parser.add_argument('-p', \"--passwd\", help='user password') \n      \n    args = parser.parse_args()\n       \n    filename = None\n    passwd = None\n    username = None\n    email = None\n    \n    if args.file:\n        filename = args.file\n    else:\n        filename = config.get(\"password_file\")\n        \n    if args.user:\n        username = args.user\n        for ch in username:\n            if ord(ch) >= ord('a') and ord(ch) <= ord('z'):\n                continue # OK\n            if ord(ch) >= ord('0') and ord(ch) <= ord('9'):\n                continue # OK\n            if ord(ch) == ord('_'):\n                continue # OK\n            print(\"invalid username ('\", ch, \"' is not allowed)\")\n            return -1\n                \n        \n    if args.passwd:\n        passwd = args.passwd\n        if passwd.find(':') != -1:\n            print(\"invalid passwd (':' is not allowed)\")\n            return -1\n    if args.email:\n        email = args.email\n        if email.find('@') == -1:\n            print(\"invalid email address ('@' not found)\")\n            return -1\n            \n    print(\">filename:\", filename)\n    print(\">username:\", username)\n    print(\">password:\", passwd)\n    print(\">email:\", email)\n    \n    \n    if args.replace:\n        print(\"replace is on\")\n        \n        \n    # verify file exists and is writable\n    if not op.isfile(filename):\n        print(\"password file:\", filename, \" does not exist\")\n        return -1\n        \n    if not h5py.is_hdf5(filename):\n        print(\"invalid password file\")\n        return -1\n        \n    mode = 'r'\n    if args.replace or args.add:\n        mode = 'r+'\n    \n        if not os.access(filename, os.W_OK):\n            print(\"password file is not writable\")\n            return -1\n    \n    f = h5py.File(filename, mode)\n    if 'user_type' not in f:\n        print(\"invalid password file\")\n        return -1\n        \n    user_type = f['user_type']\n       \n    \n    now = int(time.time())\n    \n    if args.add:\n        # add a new user\n        if username in f.attrs:\n            print(\"user already exists\")\n            return -1\n        # create userid 1 greater than previous used\n        userid = len(f.attrs) + 1\n        data = np.empty((), dtype=user_type)\n        data['pwd'] = encrypt_pwd(passwd)\n        data['state'] = 'A'\n        data['userid'] = userid\n        data['email'] = email\n        data['ctime'] = now\n        data['mtime'] = now\n        f.attrs.create(username, data, dtype=user_type)   \n    elif args.replace:\n        if username not in f.attrs:\n            print(\"user not found\")\n            return -1\n        data = f.attrs[username]\n        if passwd:\n            data['pwd'] = encrypt_pwd(passwd)\n        if email:\n            data['email'] = email\n        data['mtime'] = now\n        f.attrs.create(username, data, dtype=user_type)\n    elif username and passwd:\n        if username not in f.attrs:\n            print(\"user not found\")\n            return -1\n        data = f.attrs[username]\n        if data['pwd'] == encrypt_pwd(passwd):\n            print(\"password is valid\")\n            return 0\n        else:\n            print(\"password is not valid\")\n             \n    elif username:\n        if username not in f.attrs:\n            print(\"user not found\")\n            return -1\n        data = f.attrs[username]\n        print(\"username:\", username, \"userid:\", data['userid'], \"email:\", data['email'], \"state:\", data['state'], \"ctime:\", print_time(data['ctime']), \"mtime:\", print_time(data['mtime']))\n    else:\n        # print all users\n        sys.stdout.write(\"{:<25}{:<8}{:<8}{:<40}{:<20}{:<20}\\n\".format('username', 'userid', 'state', 'email', 'ctime', 'mtime'))\n        sys.stdout.write((\"-\" * 120)+'\\n')\n        for username in f.attrs.keys():\n            data = f.attrs[username]\n            \n            sys.stdout.write(\"{:<25}{:<8}{:<8}{:<40}{:<20}{:<20}\\n\".format(username,\n                 str(data['userid']), data['state'], data['email'], print_time(data['ctime']), print_time(data['mtime']))) \n             \n    f.close()\n    \n    return 0\n     \n    \n\nmain()"
  },
  {
    "path": "util/dumpobjdb.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport h5py\nimport sys\n\ndef dumpAttr(col):\n    for k in col.attrs:\n        attr = col.attrs[k]\n        \n        if attr.__class__.__name__ == 'Reference':\n            obj = col[attr]\n            print('\\t\\tattr[' + k + ']: ->' + obj.name)\n        else:\n            print('\\t\\tattr[' + k + ']: ' + str(attr))  # path\n        \ndef dumpCol(col):   \n    if len(col) == 0:\n        pass # return  # skip\n    npos = col.name.rfind('/') + 1\n    name = col.name[npos:]\n    print('\\t{' + name + '}')\n    dumpAttr(col)\n    for uuid in col:\n        g = col[uuid]\n        addr = h5py.h5o.get_info(g.id).addr\n        print('\\t\\t' + uuid + ': ' + g.__class__.__name__ + ' addr: ' + str(addr))\n    \ndef dumpFile(filePath):\n    print(\"db info for: \", filePath)\n    f = h5py.File(filePath, 'r')\n    dbGrp = f['/']\n    if '__db__'  in f:\n        dbGrp = f['__db__']\n    else:\n        if '{groups}' not in f:\n            print(\"no db data found!\")\n            return\n    print('__db__', 'Group')\n    dumpAttr(dbGrp)\n    dumpCol(dbGrp['{groups}'])\n    dumpCol(dbGrp['{datasets}'])\n    dumpCol(dbGrp['{datatypes}'])\n    dumpCol(dbGrp['{addr}'])\n    \n    f.close()\n\ndef main():\n    if len(sys.argv) < 1:\n        print(\"usage: dumpobjdb <filename>\")\n        sys.exit(); \n        \n    dumpFile(sys.argv[1])\n     \n\nmain()\n\n    \n\t\n"
  },
  {
    "path": "util/dumptojson.sh",
    "content": "for f in testfiles/*.h5\ndo\n  echo \"$f\"\n  s=${f##*/}\n  b=${s%.h5}\n  python h5tojson.py $f  >json_dump/$b.json\ndone\n"
  },
  {
    "path": "util/exporth5.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\n\nimport six\n\nif six.PY3:\n    unicode = str\n\nimport sys\nimport requests\nimport json\nimport numpy as np\nimport h5py\n\nfrom h5json import Hdf5db\n  \n\n\"\"\"\nexporth5 - creates an HDF5 file based from h5serv domain  \n\"\"\"\n\nclass Dumph5:\n    def __init__(self):\n        self.group_uuids = []\n        self.dataset_uuids = []\n        self.datatype_uuids = []\n               \n    #\n    # Make request to service, convert json response to python dictionary \n    # and return.\n    #      \n    def makeRequest(self, uri):\n        endpoint = self.endpoint\n        if not endpoint:\n            endpoint = \"http://\" + self.domain\n        endpoint += ':'\n        endpoint += str(self.port)\n        req = endpoint + uri\n        if self.verbose:\n            print(\"REQ:\", req)\n        #print \"headers:\", self.domain\n        headers = {'host': self.domain}\n        rsp = requests.get(req, headers=headers)\n        if self.verbose:\n            print(\"RSP:\", rsp.status_code)\n            \n        if rsp.status_code != 200:\n            raise Exception(\"got bad httpstatus: \" + str(rsp.status_code) +\n                \" for request: \" + uri);\n        #print \"got response text:\", rsp.text\n        rsp_json = json.loads(rsp.text)\n        return rsp_json\n        \n     \n    #\n    # Create a hard, soft, or external link\n    #    \n    def createLink(self, link_obj, parent_uuid):\n        title = link_obj[\"title\"]\n        link_class = link_obj[\"class\"]\n        if link_class == 'H5L_TYPE_HARD':\n            child_uuid = link_obj[\"id\"]\n            self.db.linkObject(parent_uuid, child_uuid, title)\n        elif link_class == 'H5L_TYPE_SOFT':\n            h5path = link_obj[\"h5path\"]\n            self.db.createSoftLink(parent_uuid, h5path, title)\n        elif link_class == 'H5L_TYPE_EXTERNAL':\n            h5path = link_obj[\"h5path\"]\n            link_file = link_obj[\"h5domain\"]\n            self.db.createExternalLink(parent_uuid, link_file, h5path, title)\n        else:\n            print(\"Unable to create link with class:\", link_class)\n    \n    #\n    # Create HDF5 dataset object and write data values\n    #        \n    def createDataset(self, uuid):\n        # get json for the dataset\n        rsp_json = self.makeRequest(\"/datasets/\" + uuid)\n        \n        self.dataset_uuids.append(uuid)\n\n        datatype = rsp_json['type']\n        if type(datatype) in (str, unicode) and datatype.startswith(\"datatypes/\"):\n            #committed datatype, just pass in the UUID part\n            datatype = datatype[len(\"datatypes/\"):]\n        dims = None\n        max_shape=None\n        creation_props=None\n        if \"shape\" in rsp_json:\n            shape = rsp_json[\"shape\"]\n            if shape[\"class\"] == 'H5S_SIMPLE':\n                dims = shape[\"dims\"]\n                if type(dims) == int:\n                    # convert int to array\n                    dim1 = shape\n                    dims = [dim1]\n                if \"maxdims\" in shape:\n                    max_shape = shape[\"maxdims\"]\n                    if type(max_shape) == int:\n                        #convert to array\n                        dim1 = max_shape\n                        max_shape = [dim1]\n                    # convert 0's to None's\n                    for i in range(len(max_shape)):\n                        if max_shape[i] == 0:\n                            max_shape[i] = None\n        if 'creationProperties' in rsp_json:\n            creation_props = rsp_json['creationProperties']\n                                 \n        self.db.createDataset(datatype, dims, max_shape=max_shape, \n            creation_props=creation_props, obj_uuid=uuid)         \n        \n        # get the data values    \n        rsp_json = self.makeRequest(\"/datasets/\" + uuid + '/value')\n           \n        if \"value\" in rsp_json:\n            data = rsp_json[\"value\"]\n            #print json.dumps(data, sort_keys=True, indent=4)\n            self.db.setDatasetValuesByUuid(uuid, data) \n            \n    #\n    # Create all datasets in the domain\n    #\n    def createDatasets(self):\n        uri = \"/datasets\" \n        rsp_json = self.makeRequest(uri)\n        dataset_ids = rsp_json['datasets']\n        \n        for dataset_uuid in dataset_ids:\n            self.createDataset(dataset_uuid)\n            \n    def createAttribute(self, attr_name, col_name, uuid):\n    \n        attr_json = self.makeRequest(\"/\" + col_name + \"/\" + uuid + \"/attributes/\" + attr_name)\n        datatype = attr_json[\"type\"]\n        if type(datatype) in (str, unicode) and datatype.startswith(\"datatypes/\"):\n            #committed datatype, just pass in the UUID part\n            datatype = datatype[len(\"datatypes/\"):]\n            \n        attr_value = attr_json[\"value\"]\n        dims = None\n        if \"shape\" in attr_json:\n            shape = attr_json[\"shape\"]\n            if shape[\"class\"] == 'H5S_SIMPLE':\n                dims = shape[\"dims\"]\n                if type(dims) == int:\n                    # convert int to array\n                    dim1 = shape\n                    dims = [dim1]\n        self.db.createAttribute(col_name, uuid, attr_name, dims, datatype, attr_value)\n                    \n            \n    #\n    # create committed datatype HDF5 object\n    #   \n    def createDatatype(self, uuid):\n        rsp_json = self.makeRequest(\"/datatypes/\" + uuid)\n        datatype = rsp_json['type']\n        self.db.createCommittedType(datatype, obj_uuid=uuid)   \n        \n        \n    #\n    # create datatypes\n    #\n    def createDatatypes(self):   \n        rsp_json = self.makeRequest(\"/datatypes\")\n        datatype_ids = rsp_json['datatypes']\n        \n        for datatype_uuid in datatype_ids:\n            self.createDatatype(datatype_uuid)\n    \n            \n    #\n    # Create HDF5 group object  (links and attributes will be added later)\n    #        \n    def createGroup(self, uuid):\n        self.group_uuids.append(uuid)\n        if uuid != self.root_uuid:\n            self.db.createGroup(obj_uuid=uuid)\n            \n            \n    #\n    # Create all groups in the domain\n    #\n    def createGroups(self):\n        rsp_json = self.makeRequest(\"/groups\")\n        group_ids = rsp_json['groups']\n        group_ids.append(self.root_uuid)  # add root group uuid\n        \n        for group_uuid in group_ids:\n            self.createGroup(group_uuid)\n                 \n    # \n    # Create all the HDF5 objects defined in the JSON file\n    #       \n    def createObjects(self):\n        # create datatypes\n        self.createDatatypes()\n        \n        # create groups\n        self.createGroups()\n                \n        # create datasets\n        self.createDatasets()\n         \n       \n            \n    # \n    # Create all the attributes for HDF5 objects defined in the JSON file\n    # Note: this needs to be done after createObjects since an attribute\n    # may use a committed datatype\n    #       \n    def createAttributes(self):\n        # create datatype attributes\n        for datatype_uuid in self.datatype_uuids:\n            rsp_json = self.makeRequest(\"/datatypes/\" + datatype_uuid + \"/attributes\")\n            attributes = rsp_json[\"attributes\"]\n            for attribute_json in attributes:\n                self.createAttribute(attribute_json[\"name\"], \"datatypes\", uuid)\n                \n        # create group attributes\n        for group_uuid in self.group_uuids:\n            rsp_json = self.makeRequest(\"/groups/\" + group_uuid + \"/attributes\")\n            attributes = rsp_json[\"attributes\"]\n            for attribute_json in attributes:\n                self.createAttribute(attribute_json[\"name\"], \"groups\", group_uuid)\n                \n        # create dataset attributes\n        for dataset_uuid in self.dataset_uuids:\n            rsp_json = self.makeRequest(\"/datasets/\" + dataset_uuid + \"/attributes\")\n            attributes = rsp_json[\"attributes\"]\n            for attribute_json in attributes:\n                self.createAttribute(attribute_json[\"name\"], \"datasets\", dataset_uuid)\n            \n                    \n    #\n    # Link all the objects \n    # Note: this will \"de-anonymous-ize\" objects defined in the HDF5 file\n    #   Any non-linked objects will be deleted when the __db__ group is deleted\n    #               \n    def createLinks(self):\n        for group_uuid in self.group_uuids:\n            rsp_json = self.makeRequest(\"/groups/\" + group_uuid + \"/links\")\n            links = rsp_json[\"links\"]\n            for link in links:\n                self.createLink(link, group_uuid)\n                 \n        \n    def writeFile(self, db):\n    \n        self.db = db\n        \n        self.root_uuid = db.root_uuid\n        \n        print(\"file root_uuid:\", self.root_uuid)\n        \n        \n        self.createObjects()    # create datasets, groups, committed datatypes\n        self.createAttributes() # create attributes for objects\n        self.createLinks()      # link it all together\n\n#\n# Print usage and exit\n#\ndef printUsage():\n    print(\"usage: python exporth5.py [-v] [-endpoint=<server_ip>]  [-port=<port>] <domain> <filename>\")\n    print(\"  options -v: verbose, print request and response codes from server\")\n    print(\"  options -endpoint: specify IP endpoint of server\")\n    print(\"  options -port: port address of server [default 7253]\")\n    print(\" ------------------------------------------------------------------------------\")\n    print(\"  Example - get 'tall' collection from HDF Group server, save to tall.h5:\")\n    print(\"       python exporth5.py tall.data.hdfgroup.org tall.h5\")\n    print(\"  Example - get 'tall' collection from a local server instance \")\n    print(\"        (assuming the server is using port 5000):\")\n    print(\"        python exporth5.py -endpoint=127.0.0.1 -port=5000 tall.test.hdfgroup.org tall.h5\")\n    sys.exit(); \n               \ndef main():\n    nargs = len(sys.argv)\n        \n    dumper = Dumph5()\n    dumper.verbose = False \n    dumper.endpoint = None\n    dumper.port = 7253\n    dumper.noDsetData = False\n    dumper.noAttrData = False\n    \n    endpoint_option = \"-endpoint=\"\n    port_option = \"-port=\"\n    \n    option_count = 0\n    \n    for arg in sys.argv:\n        if arg.startswith(endpoint_option):\n            endpoint = arg[len(endpoint_option):]\n            if endpoint.startswith(\"http\"):\n                dumper.endpoint = endpoint\n            else:\n                dumper.endpoint = \"http://\" + endpoint\n            option_count += 1\n        elif arg.startswith(port_option):\n            port = arg[len(port_option):]\n            dumper.port = int(port)\n            option_count += 1\n        elif arg == \"-v\":\n            dumper.verbose = True\n            \n     \n    if nargs - option_count <= 2:\n        printUsage()\n        \n    domain = sys.argv[nargs-2]\n    filename = sys.argv[nargs-1]\n    \n    print(\"domain:\", domain)\n    print(\"filename:\", filename)\n    \n    dumper.domain = domain\n    \n    \n    domain_json = dumper.makeRequest(\"/\")\n    \n    if \"root\" not in domain_json:\n        raise Exception(\"no root key in domain response\")\n        \n    root_uuid = domain_json[\"root\"]\n    \n    # create the file, will raise IOError if there's a problem\n    Hdf5db.createHDF5File(filename)\n    \n    with Hdf5db(filename, root_uuid=root_uuid) as db:\n        dumper.writeFile(db) \n\n    # open with h5py and remove the _db_ group\n    # Note: this will delete any anonymous (un-linked) objects\n    f = h5py.File(filename, 'a') \n    del f[\"__db__\"]\n    f.close()\n    \n           \n    print(\"done!\")  \n    \n\nmain()\n\n    \n\t\n"
  },
  {
    "path": "util/exportjson.py",
    "content": "##############################################################################\n# Copyright by The HDF Group.                                                #\n# All rights reserved.                                                       #\n#                                                                            #\n# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and      #\n# Utilities.  The full HDF5 REST Server copyright notice, including          #\n# terms governing use, modification, and redistribution, is contained in     #\n# the file COPYING, which can be found at the root of the source code        #\n# distribution tree.  If you do not have access to this file, you may        #\n# request a copy from help@hdfgroup.org.                                     #\n##############################################################################\nimport requests\nimport sys\nimport json\n\n\n\"\"\"\nDumpJson - return json representation of all objects within the given domain\n\"\"\"\n\nclass DumpJson:\n    def __init__(self):\n        pass\n        \n    #\n    # Make request to service, convert json response to python dictionary \n    # and return.\n    #      \n    def makeRequest(self, uri):\n        endpoint = self.endpoint\n        if not endpoint:\n            endpoint = \"http://\" + self.domain\n        endpoint += ':'\n        endpoint += str(self.port)\n        req = endpoint + uri\n        if self.verbose:\n            print(\"REQ:\", req)\n        #print \"headers:\", self.domain\n        headers = {'host': self.domain}\n        rsp = requests.get(req, headers=headers)\n        if self.verbose:\n            print(\"RSP:\", rsp.status_code)\n            \n        if rsp.status_code != 200:\n            raise Exception(\"got bad httpstatus: \" + str(rsp.status_code) +\n                \" for request: \" + uri);\n        #print \"got response text:\", rsp.text\n        rsp_json = json.loads(rsp.text)\n        return rsp_json\n        \n        \n    def dumpAttribute(self, obj_uri):\n        rsp_json = self.makeRequest(obj_uri)\n        attr_json = {}\n        attr_json['name'] = rsp_json['name']\n        attr_json['type'] = rsp_json['type']\n        attr_json['shape'] = rsp_json['shape']\n        if 'value' in rsp_json and rsp_json['value']:\n            attr_json['value'] = rsp_json['value']\n        return attr_json\n        \n    def dumpAttributes(self, uri, jsonOut):\n        rsp_json = self.makeRequest(uri)\n        attributes_json = rsp_json['attributes']\n        \n        if len(attributes_json) > 0:\n            items = []\n            \n            for attr in attributes_json:\n                name = attr['name']\n                \n                if self.noAttrData:\n                    # just copy what we got from \"attributes\" request\n                    items.append(attr)\n                else:\n                    # fetch the attribute data\n                    uri_attr_request = uri + \"/\" + name\n                    item = self.dumpAttribute(uri_attr_request)\n                    items.append(item)\n            jsonOut['attributes'] = items;\n        \n    def dumpLinks(self, uri, jsonOut):\n        rsp_json = self.makeRequest(uri)\n        links_json = rsp_json['links']\n        \n        if len(links_json) > 0:\n            linkDict = []\n            jsonOut['links'] = links_json\n        \n    def dumpGroup(self, uri, jsonOut):\n        rsp_json = self.makeRequest(uri)\n        group_uuid = rsp_json['id'] \n        jsonOut[group_uuid] = {} \n        self.dumpLinks(uri + '/links', jsonOut[group_uuid]) \n        self.dumpAttributes(uri + '/attributes', jsonOut[group_uuid]) \n        \n    def dumpGroups(self):\n        uri = \"/groups\" \n        rsp_json = self.makeRequest(uri)\n        jsonOut = {}\n        \n        self.json['groups'] = jsonOut\n        \n        group_ids = rsp_json['groups']\n        group_ids.append(self.json['root'])  # add in root group\n        for group_uuid in group_ids:\n            uri = \"/groups/\" + group_uuid\n            self.dumpGroup(uri, jsonOut)\n        \n    def dumpDataset(self, uri, jsonOut):\n        rsp_json = self.makeRequest(uri)\n        dset_uuid = rsp_json['id'] \n        dset_json = {}\n        dset_json['shape'] = rsp_json['shape']\n        dset_json['type'] = rsp_json['type']\n        \n        # get the data values    \n        rsp_json = self.makeRequest(uri + '/value')\n        \n        if not self.noDsetData:\n            # get the dataset values\n            if 'value' in rsp_json:\n                data = rsp_json['value']\n                if data:\n                    dset_json['value'] = data\n        \n        jsonOut[dset_uuid] = dset_json \n         \n        self.dumpAttributes(uri + '/attributes', jsonOut[dset_uuid]) \n        \n    def dumpDatasets(self):\n        rsp_json = self.makeRequest(\"/datasets\")\n        jsonOut = {}\n        \n        self.json['datasets'] = jsonOut\n        \n        dataset_ids = rsp_json['datasets']\n        \n        for dset_uuid in dataset_ids:\n            uri = \"/datasets/\" + dset_uuid\n            self.dumpDataset(uri, jsonOut)\n        \n    def dumpDatatype(self, uri, jsonOut):\n        rsp_json = self.makeRequest(uri)\n        \n        dtype_uuid = rsp_json['id']\n        \n        type_json = {}\n        type_json['type'] = rsp_json['type']\n        \n        jsonOut[dtype_uuid] =  type_json\n         \n        self.dumpAttributes(uri + '/attributes', jsonOut[dtype_uuid]) \n        \n    def dumpDatatypes(self):\n        rsp_json = self.makeRequest(\"/datatypes\")\n        jsonOut = {}\n        \n        self.json['datatypes'] = jsonOut\n        \n        datatype_ids = rsp_json['datatypes']\n        \n        for dtype_uuid in datatype_ids:\n            uri = \"/datatypes/\" + dtype_uuid\n            self.dumpDatatype(uri, jsonOut)\n             \n    \n    def dumpDomain(self):\n        rsp_json = self.makeRequest(\"/\")\n        \n        self.json = {}\n        \n        # save the root uuid\n        self.json['root'] = rsp_json['root']\n        \n        self.dumpGroups()\n        \n        self.dumpDatasets()\n            \n        self.dumpDatatypes()\n            \n        print(json.dumps(self.json, sort_keys=True, indent=4))\n\n#\n# Print usage and exit\n#\ndef printUsage():\n    print(\"usage: python exportjson.py [-v] [-D|d] [-endpoint=<server_ip>]  [-port=<port] <domain>\")\n    print(\"  options -v: verbose, print request and response codes from server\")\n    print(\"  options -D: suppress all data output\")\n    print(\"  options -d: suppress data output for datasets (but not attributes)\")\n    print(\"  options -endpoint: specify IP endpoint of server\")\n    print(\"  options -port: port address of server [default 7253]\")\n    print(\" ------------------------------------------------------------------------------\")\n    print(\"  Example - get 'tall' collection from HDF Group server:\")\n    print(\"       python exportjson.py tall.data.hdfgroup.org\")\n    print(\"  Example - get 'tall' collection from a local server instance \")\n    print(\"        (assuming the server is using port 5000):\")\n    print(\"        python exportjson.py -endpoint=127.0.0.1 -port=5000 tall.test.hdfgroup.org\")\n    sys.exit(); \n    \n#\n# main\n#\ndef main():\n    \n    nargs = len(sys.argv)\n        \n    dumper = DumpJson()\n    dumper.verbose = False \n    dumper.endpoint = None\n    dumper.port = 7253\n    dumper.noDsetData = False\n    dumper.noAttrData = False\n    \n    endpoint_option = \"-endpoint=\"\n    port_option = \"-port=\"\n    \n    option_count = 0\n    \n    for arg in sys.argv:\n        if arg.startswith(endpoint_option):\n            endpoint = arg[len(endpoint_option):]\n            if endpoint.startswith(\"http\"):\n                dumper.endpoint = endpoint\n            else:\n                dumper.endpoint = \"http://\" + endpoint\n            option_count += 1\n        elif arg.startswith(port_option):\n            port = arg[len(port_option):]\n            dumper.port = int(port)\n            option_count += 1\n        elif arg == \"-v\":\n            dumper.verbose = True\n        elif arg == \"-D\":\n            dumper.noDsetData = True\n            dumper.noAttrData = True\n        elif arg == \"-d\":\n            dumper.noDsetData = True\n            \n     \n    if nargs - option_count <= 1:\n        printUsage()\n        \n    dumper.domain = sys.argv[nargs-1]\n    dumper.dumpDomain()\n    \n\nmain()\n\n    \n\t\n"
  },
  {
    "path": "util/rebuildIndex.py",
    "content": "import sys\nimport h5py\nimport logging\nfrom h5json import Hdf5db\n\n\ndbname = \"__db__\"\nif len(sys.argv) < 2 or sys.argv[1] == \"-h\" or sys.argv[1] == \"--help\":\n    print(\"Usage: python rebuildIndex.py [filename]\")\n    print(\"Warning: this utility will delete any previous UUIDs!\");\n    sys.exit()\n\n# setup logger\nlog = logging.getLogger(\"rebuildIndex\")\nlog.setLevel(logging.INFO)\nhandler = logging.StreamHandler(sys.stdout)\n# create formatter\nformatter = logging.Formatter( \"%(levelname)s:%(filename)s:%(lineno)d::%(message)s\")\nhandler.setFormatter(formatter)\nlog.addHandler(handler)\nlog.propagate = False \n\nfilepath = sys.argv[1]\nlog.info(\"openining file: \" + filepath)\n# remove the old index\nf = h5py.File(filepath, 'a')\nif dbname in f:\n    log.info(\"deleting old db group\")\n    del f[dbname]\nf.close()\n\n# now open with hdf5db\n \nwith Hdf5db(filepath, app_logger=log) as db:\n    # the actual index rebuilding will happen in the init function\n    root_uuid = db.getUUIDByPath('/')\n    print(\"root_uuid:\", root_uuid)\n\nprint(\"done!\")\n\n"
  }
]