Repository: HDFGroup/h5serv Branch: develop Commit: 12c1c4786259 Files: 208 Total size: 851.0 KB Directory structure: gitextract_szc0bvdr/ ├── .gitignore ├── .gitmodules ├── .travis.yml ├── COPYING ├── Dockerfile ├── README.rst ├── data/ │ ├── public/ │ │ └── tall.h5 │ └── readme.txt ├── docs/ │ ├── AclOps/ │ │ ├── GET_ACL.rst │ │ ├── GET_ACLs.rst │ │ ├── PUT_ACL.rst │ │ └── index.rst │ ├── AdminTools.rst │ ├── AttrOps/ │ │ ├── DELETE_Attribute.rst │ │ ├── GET_Attribute.rst │ │ ├── GET_Attributes.rst │ │ ├── PUT_Attribute.rst │ │ └── index.rst │ ├── Authorization.rst │ ├── CommonErrorResponses.rst │ ├── CommonRequestHeaders.rst │ ├── CommonResponseHeaders.rst │ ├── DatasetOps/ │ │ ├── DELETE_Dataset.rst │ │ ├── GET_Dataset.rst │ │ ├── GET_DatasetShape.rst │ │ ├── GET_DatasetType.rst │ │ ├── GET_Datasets.rst │ │ ├── GET_Value.rst │ │ ├── POST_Dataset.rst │ │ ├── POST_Value.rst │ │ ├── PUT_DatasetShape.rst │ │ ├── PUT_Value.rst │ │ └── index.rst │ ├── DatatypeOps/ │ │ ├── DELETE_Datatype.rst │ │ ├── GET_Datatype.rst │ │ ├── GET_Datatypes.rst │ │ ├── POST_Datatype.rst │ │ └── index.rst │ ├── Diagram.rst │ ├── DomainOps/ │ │ ├── DELETE_Domain.rst │ │ ├── GET_Domain.rst │ │ ├── PUT_Domain.rst │ │ └── index.rst │ ├── FAQ/ │ │ └── index.rst │ ├── GroupOps/ │ │ ├── DELETE_Group.rst │ │ ├── DELETE_Link.rst │ │ ├── GET_Group.rst │ │ ├── GET_Groups.rst │ │ ├── GET_Link.rst │ │ ├── GET_Links.rst │ │ ├── POST_Group.rst │ │ ├── PUT_Link.rst │ │ └── index.rst │ ├── Hypermedia.rst │ ├── Installation/ │ │ ├── ServerSetup.rst │ │ └── index.rst │ ├── Introduction/ │ │ └── index.rst │ ├── License/ │ │ └── index.rst │ ├── Makefile │ ├── Reference.rst │ ├── Resources.rst │ ├── Tutorials/ │ │ ├── IPython_samples.rst │ │ └── index.rst │ ├── Types/ │ │ └── index.rst │ ├── UsingIteration.rst │ ├── Utilities.rst │ ├── WhatsNew/ │ │ └── index.rst │ ├── _static/ │ │ └── README │ ├── build.sh │ ├── conf.py │ ├── index.rst │ └── make.bat ├── entrypoint.sh ├── examples/ │ ├── h5pyd_ex1.ipynb │ ├── h5pyd_ex2.ipynb │ ├── nodejs/ │ │ └── gettoc.js │ ├── pi_compute.ipynb │ └── rest_ex1.ipynb ├── h5serv/ │ ├── __init__.py │ ├── __main__.py │ ├── app.py │ ├── authFile.py │ ├── authMongo.py │ ├── config.py │ ├── fileUtil.py │ ├── h5watchdog.py │ ├── httpErrorUtil.py │ ├── passwordUtil.py │ ├── timeUtil.py │ └── tocUtil.py ├── setup.py ├── test/ │ ├── aws/ │ │ ├── config.py │ │ └── roottest.py │ ├── integ/ │ │ ├── acltest.py │ │ ├── attributetest.py │ │ ├── config.py │ │ ├── datasettest.py │ │ ├── datasettypetest.py │ │ ├── datatypetest.py │ │ ├── dirtest.py │ │ ├── grouptest.py │ │ ├── helper.py │ │ ├── linktest.py │ │ ├── makeattr.py │ │ ├── makegroups.py │ │ ├── roottest.py │ │ ├── setupdata.py │ │ ├── shapetest.py │ │ ├── spidertest.py │ │ └── valuetest.py │ ├── test_files/ │ │ ├── array_attr.h5 │ │ ├── array_dset.h5 │ │ ├── arraytype.h5 │ │ ├── attr1k.h5 │ │ ├── bitfield_attr.h5 │ │ ├── bitfield_dset.h5 │ │ ├── bool_attr.h5 │ │ ├── bool_dset.h5 │ │ ├── committed_type.h5 │ │ ├── comp_complex.h5 │ │ ├── compound.h5 │ │ ├── compound_array.h5 │ │ ├── compound_array_attr.h5 │ │ ├── compound_array_dset.h5 │ │ ├── compound_attr.h5 │ │ ├── compound_committed.h5 │ │ ├── dim_scale.h5 │ │ ├── dim_scale_data.h5 │ │ ├── dset1k.h5 │ │ ├── dset_creationprop.h5 │ │ ├── dset_gzip.h5 │ │ ├── empty.h5 │ │ ├── enum_attr.h5 │ │ ├── enum_dset.h5 │ │ ├── ex_image2.h5 │ │ ├── ex_image3.h5 │ │ ├── fillvalue.h5 │ │ ├── fixed_string_attr.h5 │ │ ├── fixed_string_dset.h5 │ │ ├── group100.h5 │ │ ├── group1k.h5 │ │ ├── h5ex_d_alloc.h5 │ │ ├── h5ex_d_checksum.h5 │ │ ├── h5ex_d_chunk.h5 │ │ ├── h5ex_d_compact.h5 │ │ ├── h5ex_d_extern.h5 │ │ ├── h5ex_d_fillval.h5 │ │ ├── h5ex_d_gzip.h5 │ │ ├── h5ex_d_hyper.h5 │ │ ├── h5ex_d_nbit.h5 │ │ ├── h5ex_d_rdwr.h5 │ │ ├── h5ex_d_shuffle.h5 │ │ ├── h5ex_d_sofloat.h5 │ │ ├── h5ex_d_soint.h5 │ │ ├── h5ex_d_transform.h5 │ │ ├── h5ex_d_unlimadd.h5 │ │ ├── h5ex_d_unlimgzip.h5 │ │ ├── h5ex_d_unlimmod.h5 │ │ ├── namedtype.h5 │ │ ├── notahdf5file.h5 │ │ ├── null_objref_dset.h5 │ │ ├── null_space_attr.h5 │ │ ├── null_space_dset.h5 │ │ ├── objref_attr.h5 │ │ ├── objref_dset.h5 │ │ ├── opaque_attr.h5 │ │ ├── opaque_dset.h5 │ │ ├── regionref_attr.h5 │ │ ├── regionref_dset.h5 │ │ ├── resizable.h5 │ │ ├── sample.h5 │ │ ├── scalar.h5 │ │ ├── scalar_attr.h5 │ │ ├── tall.h5 │ │ ├── tall_with_udlink.h5 │ │ ├── tallrw.h5 │ │ ├── tgroup.h5 │ │ ├── tref.h5 │ │ ├── tstr.h5 │ │ ├── type1k.h5 │ │ ├── types_attr.h5 │ │ ├── types_dset.h5 │ │ ├── vlen_attr.h5 │ │ ├── vlen_dset.h5 │ │ ├── vlen_string_attr.h5 │ │ ├── vlen_string_dset.h5 │ │ ├── vlen_string_dset_utc.h5 │ │ ├── vlen_string_nullterm_attr.h5 │ │ ├── vlen_string_nullterm_dset.h5 │ │ ├── vlen_unicode_attr.h5 │ │ └── zerodim.h5 │ ├── testall.py │ └── unit/ │ ├── config.py │ ├── fileUtilTest.py │ └── timeUtilTest.py └── util/ ├── admin/ │ ├── add_user.py │ ├── config.py │ ├── getacl.py │ ├── import_file.py │ ├── makepwd_file.py │ ├── remove_db.py │ ├── setacl.py │ └── update_pwd.py ├── dumpobjdb.py ├── dumptojson.sh ├── exporth5.py ├── exportjson.py └── rebuildIndex.py ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitignore ================================================ .DS_Store *.pyc data/*.h5 data/*.db test/unit/*.h5 test/unit/*.db # jetbrains IDE stuff .idea *.iml ================================================ FILE: .gitmodules ================================================ [submodule "hdf5-json"] path = hdf5-json url = https://github.com/HDFGroup/hdf5-json.git [submodules "hdf5-json"] branch = stable ================================================ FILE: .travis.yml ================================================ language: python notifications: email: false python: - "2.7" - "3.4" - "3.5" - "3.6" - "3.7" - "3.8" install: - sudo apt-get update -qq - sudo apt-get install -qq libhdf5-serial-dev - pip uninstall numpy -y - pip install numpy>=1.10.4 - pip install h5py - pip install requests - pip install pytz - pip install tornado - pip install watchdog - git clone https://github.com/HDFGroup/hdf5-json.git - cd hdf5-json - python setup.py install - cd .. - pip install . script: - PYTHONPATH="test/unit" python test/testall.py --unit --failslow - python h5serv >h5serv.out & - sleep 5 - PYTHONPATH="test/integ" python test/testall.py --integ --failslow ================================================ FILE: COPYING ================================================ Copyright Notice and License Terms for h5serv Software Service, Libraries and Utilities ----------------------------------------------------------------------------- h5serv (HDF5 REST Server) Service, Libraries and Utilities Copyright 2014-2017 by The HDF Group. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted for any purpose (including commercial purposes) provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions, and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions, and the following disclaimer in the documentation and/or materials provided with the distribution. 3. In addition, redistributions of modified forms of the source or binary code must carry prominent notices stating that the original code was changed and the date of the change. 4. All publications or advertising materials mentioning features or use of this software are asked, but not required, to acknowledge that it was developed by The HDF Group and credit the contributors. 5. Neither the name of The HDF Group, nor the name of any Contributor may be used to endorse or promote products derived from this software without specific prior written permission from The HDF Group or the Contributor, respectively. DISCLAIMER: THIS SOFTWARE IS PROVIDED BY THE HDF GROUP AND THE CONTRIBUTORS "AS IS" WITH NO WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED. In no event shall The HDF Group or the Contributors be liable for any damages suffered by the users arising out of the use of this software, even if advised of the possibility of such damage. ================================================ FILE: Dockerfile ================================================ FROM python:3.6 MAINTAINER John Readey RUN cd /usr/local/src ; \ pip install --upgrade pip ; \ pip install h5py ; \ pip install tornado ; \ pip install requests ; \ pip install pytz ; \ pip install watchdog ; \ pip install pymongo WORKDIR /usr/local/src RUN git clone https://github.com/HDFGroup/hdf5-json.git ; \ cd hdf5-json ; \ python setup.py install ; \ cd .. ; \ mkdir h5serv WORKDIR /usr/local/src/h5serv COPY h5serv h5serv COPY util util COPY test test COPY data /data RUN cp /usr/local/src/hdf5-json/data/hdf5/tall.h5 /data ; \ ln -s /data EXPOSE 5000 COPY entrypoint.sh / ENTRYPOINT ["/entrypoint.sh"] ================================================ FILE: README.rst ================================================ h5serv - REST-based service for HDF5 data =========================================== .. image:: https://travis-ci.org/HDFGroup/h5serv.svg?branch=develop :target: https://travis-ci.org/HDFGroup/h5serv Introduction ------------ h5serv is a web service that implements a REST-based web service for HDF5 data stores as described in the paper: http://hdfgroup.org/pubs/papers/RESTful_HDF5.pdf. Notice ------ h5serv has been deprecated. Users looking for a RESTful way of accessing HDF data should use HSDS (https://github.com/HDFGroup/hsds) instead. Websites -------- * Main website: http://www.hdfgroup.org * Source code: https://github.com/HDFGroup/h5serv * Mailing list: hdf-forum@lists.hdfgroup.org * Documentation: http://h5serv.readthedocs.org Quick Install ------------- Install Python (2.7 or later) and the following packages: * NumPy 1.10.4 or later * h5py 2.5 or later * tornado 4.0.2 or later * watchdog 0.8.3 or later * requests 2.3 or later (for client tests) Clone the hdf5-json project: ``git clone https://github.com/HDFGroup/hdf5-json.git`` . Next, cd to the hdf5-json folder and run: ``python setup.py install``. Clone this project: ``git clone https://github.com/HDFGroup/h5serv.git``. Running the Server ------------------ Start the server: ``cd h5serv; python h5serv``. By default the server will listen on port 5000. The port and and several other defaults can be modified with command line options. For example to use port 8888 run: ``python h5serv --port=8888``. See test cases for examples of interacting with the server. Run: ``python testall.py`` from the test directory to run through the entire test suite. Also, the interface (at least as far as read requests) can be explored in a browser. Go to: http://127.0.0.1:5000/. A JSON browser plugin will be helpful for formatting responses from the server to be more human readable. See h5serv/docs/Installation.rst for step by step install instructions. Running with Docker ------------------- To run h5serv as a docker container you just need to install Docker (no Python, h5py, etc. needed). * Install docker: https://docs.docker.com/installation/#installation. * Run the h5serv image: ``docker run -p 5000:5000 -d -v :/data hdfgroup/h5serv`` where is the folder path that contains any HDF5 files you want to made available through the h5serv REST API. Since requests to the server can modify (or delete!) content, you probably want to create a new folder and copy files to it. * Go to http://192.168.99.100:5000/ in your browser to verify the server is up and running (replace 192.168.99.100 with the IP address of the system or VM that is running the container). Writing Client Applications ---------------------------- As a REST service, clients be developed using almost any programming language. The test programs under: h5serv/test/integ illustrate some of the methods for peforming different operations using Python. The related project: https://github.com/HDFGroup/h5pyd provides a (mostly) h5py-compatible interface to the server for Python clients. Uninstalling ------------ h5serv does not modify anything in the system outside the directory where it was installed, so just remove the install directory and all contents to uninstall. Reporting bugs (and general feedback) ------------------------------------- Create new issues at http://github.com/HDFGroup/h5serv/issues for any problems you find. For general questions/feedback, please use the list (hdf-forum@lists.hdfgroup.org). ================================================ FILE: data/readme.txt ================================================ This is the default location for HDF5 data files to be visible in h5serv. ================================================ FILE: docs/AclOps/GET_ACL.rst ================================================ ********************************************** GET ACL ********************************************** Description =========== Returns access information for the given user for the object with the UUID provided in the URI. Requests ======== Syntax ------ To get a user's default access for a domain: .. code-block:: http GET /acls/ HTTP/1.1 Host: DOMAIN Authorization: To get a user's access information for a group: .. code-block:: http GET /groups//acls/ HTTP/1.1 Host: DOMAIN Authorization: To get a user's access information for a dataset: .. code-block:: http GET /datasets//acls/ HTTP/1.1 Host: DOMAIN Authorization: To get a user's access information for a committed datatype: .. code-block:: http GET /datatypes//acls/ HTTP/1.1 Host: DOMAIN Authorization: where: * is the UUID of the requested dataset/group/committed datatype * is the userid for the requested user. Use the special userid "default" to get the default access permisions for the object Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: acl ^^^ A JSON object that describe a users acces permisions. Subkeys of acl are: userName: the userid of the requested user create: A boolean flag that indicated if the user is authorized to create new resources delete: A boolean flag that indicated if the user is authorized to delete resources read: A boolean flag that indicated if the user is authorized to read (GET) resources update: A boolean flag that indicated if the user is authorized to update resources readACL: A boolean flag that indicated if the user is authorized to read the object's ACL updateACL: A boolean flag that indicated if the user is authorized to update the object's ACL hrefs ^^^^^ An array of hypertext links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http GET /groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e/acls/test_user1 HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Fri, 16 Jan 2015 20:06:08 GMT Content-Length: 660 Etag: "2c410d1c469786f25ed0075571a8e7a3f313cec1" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "acl": { "create": false, "delete": false, "read": true, "readACL": false, "update": false, "updateACL": false, "userName": "test_user1" }, "hrefs": [ { "href": "http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e/acls/test_user1", "rel": "self" }, { "href": "http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e", "rel": "root" }, { "href": "http://tall_acl.test.hdfgroup.org/", "rel": "home" }, { "href": "http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e", "rel": "owner" } ] Related Resources ================= * :doc:`PUT_ACL` * :doc:`GET_ACLs` ================================================ FILE: docs/AclOps/GET_ACLs.rst ================================================ ********************************************** GET ACLs ********************************************** Description =========== Returns access information for all users defined in the ACL (Access Control List) for the object with the UUID provided in the URI. Requests ======== Syntax ------ To get the ACL for a domain: .. code-block:: http GET /acls HTTP/1.1 Host: DOMAIN Authorization: To get the ACL for a group: .. code-block:: http GET /groups//acls HTTP/1.1 Host: DOMAIN Authorization: To get the ACL for a dataset: .. code-block:: http GET /datasets//acls HTTP/1.1 Host: DOMAIN Authorization: To get the ACL for a committed datatype: .. code-block:: http GET /datatypes//acls HTTP/1.1 Host: DOMAIN Authorization: where: * is the UUID of the requested dataset/group/committed datatype Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: acls ^^^^ A JSON list that contains one element for each user specified in the ACL. The elements will be JSON object that describe the users acces permisions. Subkeys of the element are are: userName: the userid of the user ('default' for the default access) create: A boolean flag that indicated if the user is authorized to create new resources delete: A boolean flag that indicated if the user is authorized to delete resources read: A boolean flag that indicated if the user is authorized to read (GET) resources update: A boolean flag that indicated if the user is authorized to update resources readACL: A boolean flag that indicated if the user is authorized to read the object's ACL updateACL: A boolean flag that indicated if the user is authorized to update the object's ACL hrefs ^^^^^ An array of hypertext links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http GET /groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e/acls HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Fri, 16 Jan 2015 20:06:08 GMT Content-Length: 660 Etag: "2c410d1c469786f25ed0075571a8e7a3f313cec1" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "acls": [ { "create": true, "delete": true, "read": true, "readACL": true, "update": true, "updateACL": true, "userName": "test_user2" }, { "create": false, "delete": false, "read": true, "readACL": false, "update": false, "updateACL": false, "userName": "test_user1" }, { "create": false, "delete": false, "read": false, "readACL": false, "update": false, "updateACL": false, "userName": "default" } ], "hrefs": [ { "href": "http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e/acls", "rel": "self" }, { "href": "http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e", "rel": "root" }, { "href": "http://tall_acl.test.hdfgroup.org/", "rel": "home" }, { "href": "http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e", "rel": "owner" } ] Related Resources ================= * :doc:`PUT_ACL` * :doc:`GET_ACL` ================================================ FILE: docs/AclOps/PUT_ACL.rst ================================================ ********************************************** PUT ACL ********************************************** Description =========== Update the access information for the given user for the object with the UUID provided in the URI. Requests ======== Syntax ------ To update a user's access information for a domain: .. code-block:: http PUT /acls/ HTTP/1.1 Host: DOMAIN Authorization: To update a user's access information for a group: .. code-block:: http PUT /groups//acls/ HTTP/1.1 Host: DOMAIN Authorization: To get a user's access information for a dataset: .. code-block:: http PUT /datasets//acls/ HTTP/1.1 Host: DOMAIN Authorization: To get a user's access information for a committed datatype: .. code-block:: http PUT /datatypes//acls/ HTTP/1.1 Host: DOMAIN Authorization: where: * is the UUID of the requested dataset/group/committed datatype * is the userid for the requested user. Use the special userid "default" to get the default access permisions for the object Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Request Elements ---------------- The request body most include a JSON object that has the following keys and boolean values: { 'read': , 'create': , 'update': , 'delete': , 'readACL': , 'updateACL': } Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: hrefs ^^^^^ An array of hypertext links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http PUT /groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e/acls/test_user1 HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 { 'read': True, 'create': False, 'update': False, 'delete': False, 'readACL': False, 'updateACL': False } Sample Response --------------- .. code-block:: http HTTP/1.1 201 Created Date: Fri, 16 Jan 2015 20:06:08 GMT Content-Length: 660 Etag: "2c410d1c469786f25ed0075571a8e7a3f313cec1" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json "hrefs": [ { "href": "http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e/acls/test_user1", "rel": "self" }, { "href": "http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e", "rel": "root" }, { "href": "http://tall_acl.test.hdfgroup.org/", "rel": "home" }, { "href": "http://tall_acl.test.hdfgroup.org/groups/eb8f6959-8775-11e5-96b6-3c15c2da029e", "rel": "owner" } ] Related Resources ================= * :doc:`GET_ACL` * :doc:`GET_ACLs` ================================================ FILE: docs/AclOps/index.rst ================================================ #################### Access Control List #################### Access Control List (ACL) are key-value stores that can be used to manage what operations can be performed by which user on group, dataset, or committed type objects. Operations on other objects (e.g. links, dataspace, or attributes) use the ACL of the object they belong to. Each ACL consists of 1 or more items in the form: (username, read, create, update, delete, readACL, updateACL) where username is a string, and read, create, update, delete, readACL, updateACL are booleans. There flags have the following semantics when the given username is provided in the http Authorization header: * read: The given user is authorized for read access to the resource (generally all GET requests) * create: The given user is authorized to create new resources (generally POST or PUT requests) * update: The given user is authorized to modified a resource (e.g. :doc:`../DatasetOps\PUT_Value`) * delete: The given user is authorized to delete a resource (e.g. Delete a Group) * readACL: The given user is authorized to read the ACLs of a resource * updateACL: The given user is authorized to modify the ACLs of a resource A special username 'default' is used to denote the access permission for all other users who or not list in the ACL (including un-authenticated requests that don't provide a username). Example ------- Suppose a given dataset has the following ACL: ======== ==== ====== ====== ====== ======= ======== username read create update delete readACL writeACL ======== ==== ====== ====== ====== ======= ======== default true false false false false false joe true false true false false false ann true true true true true true ======== ==== ====== ====== ====== ======= ======== This ACL would enable anyone to read (perform GET requests). User 'joe' would be able to read and update (modify values in the dataset). While user 'ann' would have full control to do any operation on the dataset (including modifying permissions for herself or other users). The following unauthenticated (no HTTP Authorization header) requests on the dataset would be granted or denied as follows: * GET /datasets/ - granted (returns HTTP Status 200 - OK) * POST /datasets//value - granted (returns HTTP Status 200 - OK) * PUT /datasets//shape) - denied (returns HTTP Status 401 - Unauthorized) * PUT /datasets//attributes/ - denied (returns HTTP Status 401 - Unauthorized) * DELETE /datasets/ - denied (returns HTTP Status 401 - Unauthorized) Next the same set of requests are sent with 'joe' as the user in the HTTP Authorization header: * GET /datasets/ - granted (returns HTTP Status 200 - OK) * POST /datasets//value - granted (returns HTTP Status 200 - OK) * PUT /datasets//shape) - grant (returns HTTP Status 200 - OK) * PUT /datasets//attributes/ - denied (returns HTTP Status 403 - Forbidden) * DELETE /datasets/ - denied (returns HTTP Status 403 - Forbidden) Finally the same set of requests are sent with 'ann' as the user: * GET /datasets/ - granted (returns HTTP Status 200 - OK) * POST /datasets//value - granted (returns HTTP Status 200 - OK) * PUT /datasets//shape) - grant (returns HTTP Status 200 - OK) * PUT /datasets//attributes/ - denied (returns HTTP Status 201 - Created) * DELETE /datasets/ - denied (returns HTTP Status 200 - OK) Note: HTTP Status 401 basically says: "you can't have access until you tell me who your are", while HTTP Status 403 says: "I know who you are, but you don't have permissions to access this resource." Root ACL Inheritance -------------------- In many cases it will be desired to have a default ACL that applies to each resource in the domain. This can be accomplished by defining an ACL for the root group. This will control the access rights for any resource unless of ACL is present in that resource for the requesting user. The default ACL can be read or updated by forming a request with a uri that includes the root group id, i.e.: "/groups//acls", or by using the uri path for the domain, i.e. "/acls". For a given user then, the permissions for a resource are found in the following way: #. If the user is present in the resources ACL, those permissions are used #. If no user is present in the resources ACL, but is present in the root group, those permissions are used #. Otherwise, if a 'default' user is present in the resource ACL, those permissions are used #. If a 'default' user is not present in the resource ACL, but is present in the root ACL, those permissions are used #. If no 'default' user is present in the root ACL, the permissions defined in the 'default_acl' config is used List of Operations ------------------ .. toctree:: :maxdepth: 1 GET_ACL GET_ACLs PUT_ACL ================================================ FILE: docs/AdminTools.rst ================================================ ################### Admin Tools ################### The scripts described here are intended to be run on the server by "privileged" users. These are all located in the ``util\admin`` directory. makepwd_file.py --------------- This script creates an initial password file "passwd.h5". The password file will be used to manage http basic authentication. After creation, move the file into the location referenced by the 'password_file' configuration value. Usage: ``python makepwd_file.py`` Use the update_pwd.py utility to create user accounts. update_pwd.py ------------- This script can be used to add users and passwords to the password file, list information about one or more users, or to update a user's information (e.g. change the password). Usage: ``python update_pwd.py [-h] [-r] [-a] [-f FILE] [-u USER] [-p PASSWD]`` Options: * ``-h``: print usage information * ``-r``: update a user's entry * ``-a``: add a user (requires -u and -p options) * ``-f``: password file to be used * ``-u``: print/update information for specified user (otherwise show all users) * ``-p``: password to be set for the given users Example - list all users ``python update_pwd.py -f passwd.h5`` Example - list user 'bob': ``python update_pwd.py -f passwd.h5 -u bob`` Example - add a user 'ann': ``python update_pwd.py -f passwd.h5 -a -u ann -p mysecret`` Example - changes password for user 'paul': ``python update_pwd.py -f passwd.h5 -r -u paul -p mysecret2`` Note, there is no way to display the passwords for any user. If a password is lost, that users password must be reset. getacl.py ----------- This script displays ACL's of a given file or object within a file. usage: ``python getacl.py [-h] [-file ] [-path ] [userid_1, userid_2, ... userid_n]`` Options: * ``-h``: print usage information * ``-file``: (required) data file to be used * ``-path``: h5path to object. If not present, ACLs of the root group will be displayed * ````: list of user ids to fetch ACLs for. If not present, ACLs for all users will be printed Example - get all ACLs of tall.h5 root group ``python getacl.py -file ../../data/tall.h5`` Example - get ACLs for userid 123 of root group in tall.h5 ``python getacl.py -file ../../data/tall.h5 123`` Example - get ACLs for userid 123 of the dataset identified by path '/g1/g1.1/dset1.1.1' ``python getacl.py -file ../../data/tall.h5 -path /g1/g1.1/dset1.1.1`` setacl.py ----------- This script creates or modifies ACL's of a given file or object within a file. usage: ``python setacl.py [-h] [-file ] [-path ] [+-][crudep] [userid_1, userid_2, ... userid_n]`` Options: * ``-h``: print usage information * ``-file``: (required) data file to be used * ``-path``: h5path to object. If not present, ACLs of the root group will be modified * ``[+-][crudep]``: add (+) or remove (-) permisions for Create (c), Read (r), Update (u), Delete (d), rEadAcl (e), and Putacl (p) * ````: list of user ids to sets ACLs for. If not present, ACLs for the default user will be set. Example - set default permission of tall.h5 to read only ``python setacl.py -file ../../data/tall.h5 +r-cudep`` Example - give userid 123 full control of tall.h5: ``python setacl.py -file ../../data/tall.h5 +crudep 123`` Example - give userid read/update access to dataset at path '/g1/g1.1/dset1.1.1' ``python setacl.py -file ../../data/tall.h5 -path /g1/g1.1/dset1.1.1 +ru-cdep 123`` ================================================ FILE: docs/AttrOps/DELETE_Attribute.rst ================================================ ********************************************** DELETE Attribute ********************************************** Description =========== The implementation of the DELETE operation deletes the attribute named in the URI. All attributes and links of the dataset will also be deleted. Requests ======== Syntax ------ .. code-block:: http DELETE /groups// HTTP/1.1 Host: DOMAIN Authorization: * ** is the UUID of the dataset/group/committed datatype * ** is the url-encoded name of the requested attribute Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http DELETE /groups/36ae688a-ac0e-11e4-a44b-3c15c2da029e/attributes/attr1 HTTP/1.1 Content-Length: 0 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: tall_updated.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Wed, 04 Feb 2015 01:36:17 GMT Content-Length: 420 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "hrefs": [ {"href": "http://tall_updated.test.hdfgroup.org/groups/36ae688a-ac0e-11e4-a44b-3c15c2da029e/attributes", "rel": "self"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/36ae688a-ac0e-11e4-a44b-3c15c2da029e", "rel": "owner"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/36ae688a-ac0e-11e4-a44b-3c15c2da029e", "rel": "root"}, {"href": "http://tall_updated.test.hdfgroup.org/", "rel": "home"} ] } Related Resources ================= * :doc:`GET_Attributes` * :doc:`GET_Attribute` * :doc:`../DatasetOps/GET_Dataset` * :doc:`../DatatypeOps/GET_Datatype` * :doc:`../GroupOps/GET_Group` * :doc:`PUT_Attribute` ================================================ FILE: docs/AttrOps/GET_Attribute.rst ================================================ ********************************************** GET Attribute ********************************************** Description =========== Gets the specified attribute of a dataset, group, or committed datatype. Requests ======== Syntax ------ To get an attribute of a group: .. code-block:: http GET /groups//attributes/ HTTP/1.1 Host: DOMAIN Authorization: To get an attribute of a dataset: .. code-block:: http GET /datasets//attributes/ HTTP/1.1 Host: DOMAIN Authorization: To get an attribute of a datatype: .. code-block:: http GET /datatypes//attributes/ HTTP/1.1 Host: DOMAIN Authorization: where: * ** is the UUID of the dataset/group/committed datatype * ** is the url-encoded name of the requested attribute Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: type ^^^^ A JSON object representing the type of the attribute. See :doc:`../Types/index` for details of the type representation. shape ^^^^^ A JSON object that represents the dataspace of the attribute. Subkeys of shape are: class: A string with one of the following values: * H5S_NULL: A null dataspace, which has no elements * H5S_SCALAR: A dataspace with a single element (although possibly of a complext datatype) * H5S_SIMPLE: A dataspace that consists of a regular array of elements dims: An integer array whose length is equal to the number of dimensions (rank) of the dataspace. The value of each element gives the the current size of each dimension. Dims is not returned for H5S_NULL or H5S_SCALAR dataspaces. value ^^^^^ A json array (or string or number for scalar datasets) giving the values of the requested attribute. hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- Get an attribute named "attr1" from a group with UUID: "45a882e1-...". .. code-block:: http GET /groups/1a956e54-abf6-11e4-b878-3c15c2da029e/attributes/attr1 HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Tue, 03 Feb 2015 22:44:04 GMT Content-Length: 648 Etag: "55b2e2ce2d3a2449a49cfd76c4dae635ec43a150" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "name": "attr1", "type": { "class": "H5T_INTEGER", "base": "H5T_STD_I8LE" }, "shape": { "class": "H5S_SIMPLE", "dims": [10] }, "value": [97, 98, 99, 100, 101, 102, 103, 104, 105, 0], "created": "2015-02-03T22:40:09Z", "lastModified": "2015-02-03T22:40:09Z", "hrefs": [ {"href": "http://tall.test.hdfgroup.org/groups/1a956e54-abf6-11e4-b878-3c15c2da029e/attributes/attr1", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/groups/1a956e54-abf6-11e4-b878-3c15c2da029e", "rel": "owner"}, {"href": "http://tall.test.hdfgroup.org/groups/1a956e54-abf6-11e4-b878-3c15c2da029e", "rel": "root"}, {"href": "http://tall.test.hdfgroup.org/", "rel": "home"} ] } Related Resources ================= * :doc:`DELETE_Attribute` * :doc:`GET_Attributes` * :doc:`../DatasetOps/GET_Dataset` * :doc:`../DatatypeOps/GET_Datatype` * :doc:`../GroupOps/GET_Group` * :doc:`PUT_Attribute` ================================================ FILE: docs/AttrOps/GET_Attributes.rst ================================================ ********************************************** GET Attributes ********************************************** Description =========== Gets all the attributes of a dataset, group, or committed datatype. For each attribute the request returns the attributes name, type, and shape. To get the attribute data use :doc:`GET_Attribute`. Requests ======== Syntax ------ To get the attributes of a group: .. code-block:: http GET /groups//attributes HTTP/1.1 Host: DOMAIN Authorization: To get the attributes of a dataset: .. code-block:: http GET /datasets//attributes HTTP/1.1 Host: DOMAIN Authorization: To get the attributes of a datatype: .. code-block:: http GET /datatypes//attributes HTTP/1.1 Host: DOMAIN Authorization: where: * ** is the UUID of the dataset/group/committed datatype Request Parameters ------------------ This implementation of the operation uses the following request parameters (both optional): Limit ^^^^^ If provided, a positive integer value specifying the maximum number of attributes to return. Marker ^^^^^^ If provided, a string value indicating that only attributes that occur after the marker value will be returned. *Note:* the marker expression should be url-encoded. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: attributes ^^^^^^^^^^ An array of JSON objects with an element for each returned attribute. Each element will have keys: name, type, shape, created, and lastModified. See :doc:`GET_Attribute` for a description of these keys. hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- Get attributes of a group with UUID: "45a882e1-...". .. code-block:: http GET /groups/1a956e54-abf6-11e4-b878-3c15c2da029e/attributes HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Wed, 04 Feb 2015 00:49:28 GMT Content-Length: 807 Etag: "7cbeefcf8d9997a8865bdea3bf2d541a14e9bf71" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "attributes": [ { "name": "attr1", "type": { "base": "H5T_STD_I8LE", "class": "H5T_INTEGER" }, "shape": { "dims": [10], "class": "H5S_SIMPLE" }, "created": "2015-02-03T22:40:09Z", "lastModified": "2015-02-03T22:40:09Z", }, "name": "attr2", "type": { "base": "H5T_STD_I32BE", "class": "H5T_INTEGER" }, "shape": { "dims": [2, 2], "class": "H5S_SIMPLE" }, "created": "2015-02-03T22:40:09Z", "lastModified": "2015-02-03T22:40:09Z", } ], "hrefs": [ {"href": "http://tall.test.hdfgroup.org/groups/1a956e54-abf6-11e4-b878-3c15c2da029e/attributes", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/groups/1a956e54-abf6-11e4-b878-3c15c2da029e", "rel": "owner"}, {"href": "http://tall.test.hdfgroup.org/groups/1a956e54-abf6-11e4-b878-3c15c2da029e", "rel": "root"}, {"href": "http://tall.test.hdfgroup.org/", "rel": "home"} ] } Sample Request - get Batch --------------------------- Get 5 the five attributes that occur after attribute "a0004" from a of a group with UUID: "45a882e1-...". .. code-block:: http GET /groups/4cecd4dc-ac0a-11e4-af59-3c15c2da029e/attributes?Marker=a0004&Limit=5 HTTP/1.1 host: attr1k.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response - get Batch --------------------------- .. code-block:: http HTTP/1.1 200 OK Date: Wed, 04 Feb 2015 01:08:16 GMT Content-Length: 1767 Etag: "9483f4356e08d12b719aa64ece09e659b05adaf2" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "attributes": [ { "name": "a0005", "type": {"cset": "H5T_CSET_ASCII", "order": "H5T_ORDER_NONE", "class": "H5T_STRING", "strpad": "H5T_STR_NULLTERM", "strsize": "H5T_VARIABLE"}, "shape": {"class": "H5S_SCALAR"}, "created": "2015-02-03T22:40:09Z", "lastModified": "2015-02-03T22:40:09Z" }, { "name": "a0006", "type": {"cset": "H5T_CSET_ASCII", "order": "H5T_ORDER_NONE", "class": "H5T_STRING", "strpad": "H5T_STR_NULLTERM", "strsize": "H5T_VARIABLE"}, "shape": {"class": "H5S_SCALAR"}, "created": "2015-02-03T22:40:09Z", "lastModified": "2015-02-03T22:40:09Z" }, { "name": "a0007", "type": {"cset": "H5T_CSET_ASCII", "order": "H5T_ORDER_NONE", "class": "H5T_STRING", "strpad": "H5T_STR_NULLTERM", "strsize": "H5T_VARIABLE"}, "shape": {"class": "H5S_SCALAR"}, "created": "2015-02-03T22:40:09Z", "lastModified": "2015-02-03T22:40:09Z" }, { "name": "a0008", "type": {"cset": "H5T_CSET_ASCII", "order": "H5T_ORDER_NONE", "class": "H5T_STRING", "strpad": "H5T_STR_NULLTERM", "strsize": "H5T_VARIABLE"}, "shape": {"class": "H5S_SCALAR"}, "created": "2015-02-03T22:40:09Z", "lastModified": "2015-02-03T22:40:09Z" }, { "name": "a0009", "type": {"cset": "H5T_CSET_ASCII", "order": "H5T_ORDER_NONE", "class": "H5T_STRING", "strpad": "H5T_STR_NULLTERM", "strsize": "H5T_VARIABLE"}, "shape": {"class": "H5S_SCALAR"}, "created": "2015-02-03T22:40:09Z", "lastModified": "2015-02-03T22:40:09Z" } ], "hrefs": [ {"href": "http://attr1k.test.hdfgroup.org/groups/4cecd4dc-ac0a-11e4-af59-3c15c2da029e/attributes", "rel": "self"}, {"href": "http://attr1k.test.hdfgroup.org/groups/4cecd4dc-ac0a-11e4-af59-3c15c2da029e", "rel": "owner"}, {"href": "http://attr1k.test.hdfgroup.org/groups/4cecd4dc-ac0a-11e4-af59-3c15c2da029e", "rel": "root"}, {"href": "http://attr1k.test.hdfgroup.org/", "rel": "home"} ] } Related Resources ================= * :doc:`DELETE_Attribute` * :doc:`GET_Attributes` * :doc:`../DatasetOps/GET_Dataset` * :doc:`../DatatypeOps/GET_Datatype` * :doc:`../GroupOps/GET_Group` * :doc:`PUT_Attribute` ================================================ FILE: docs/AttrOps/PUT_Attribute.rst ================================================ ********************************************** PUT Attribute ********************************************** Description =========== Creates a new attribute in a group, dataset, or committed datatype. *Note*: The new attribute will replace any existing attribute with the same name. Requests ======== Syntax ------ To create a group attribute: .. code-block:: http PUT /groups//attributes/ HTTP/1.1 Host: DOMAIN Authorization: To create a dataset attribute: .. code-block:: http PUT /datasets//attributes/ HTTP/1.1 Host: DOMAIN Authorization: To create a committed datatype attribute: .. code-block:: http PUT /datatypes//attributes/ HTTP/1.1 Host: DOMAIN Authorization: * ** is the UUID of the dataset/group/committed datatype * ** is the url-encoded name of the requested attribute Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Request Elements ---------------- The request body must include a JSON object with "type" key. Optionally a "shape" key can be provide to make a non-scalar attribute. type ^^^^ Specify's the desired type of the attribute. Either a string that is one of the predefined type values, a uuid of a committed type, or a JSON object describing the type. See :doc:`../Types/index` for details of the type specification. shape ^^^^^^ Either a string with the value ``H5S_NULL`` or an integer array describing the dimensions of the attribute. If shape is not provided, a scalar attribute will be created. If a shape value of ``H5S_NULL`` is specified a null space attribute will be created. (Null space attributes can not contain any data values.) value ^^^^^ A JSON array (or number or string for scalar attributes with primitive types) that specifies the initial values for the attribute. The elements of the array must be compatible with the type of the attribute. Not valid to provide if the shape is ``H5S_NULL``. Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request - scalar attribute ---------------------------------- Create an integer scalar attribute in the group with UUID of "be319519-" named "attr4". The value of the attribute will be 42. .. code-block:: http PUT /groups/be319519-acff-11e4-bf8e-3c15c2da029e/attributes/attr4 HTTP/1.1 Content-Length: 38 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: tall_updated.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "type": "H5T_STD_I32LE", "value": 42 } Sample Response - scalar attribute ----------------------------------- .. code-block:: http HTTP/1.1 201 Created Date: Thu, 05 Feb 2015 06:25:30 GMT Content-Length: 359 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json {"hrefs": [ {"href": "http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e/attributes/attr4", "rel": "self"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e", "rel": "owner"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e", "rel": "root"} ] } Sample Request - string attribute ---------------------------------- Create a two-element, fixed width string attribute in the group with UUID of "be319519-" named "attr6". The attributes values will be "Hello, ..." and "Goodbye!". .. code-block:: http PUT /groups/be319519-acff-11e4-bf8e-3c15c2da029e/attributes/attr6 HTTP/1.1 Content-Length: 162 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: tall_updated.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "shape": [2], "type": { "class": "H5T_STRING", "cset": "H5T_CSET_ASCII", "strpad": "H5T_STR_NULLPAD", "strsize": 40 }, "value": ["Hello, I'm a fixed-width string!", "Goodbye!"] } Sample Response - string attribute ----------------------------------- .. code-block:: http HTTP/1.1 201 Created Date: Thu, 05 Feb 2015 06:42:14 GMT Content-Length: 359 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "hrefs": [ {"href": "http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e/attributes/attr6", "rel": "self"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e", "rel": "owner"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e", "rel": "root"} ] } Sample Request - compound type ---------------------------------- Create a two-element, attribute of group with UUID of "be319519-" named "attr_compound". The attribute has a compound type with an integer and a floating point element. .. code-block:: http PUT /groups/be319519-acff-11e4-bf8e-3c15c2da029e/attributes/attr_compound HTTP/1.1 Content-Length: 187 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: tall_updated.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "shape": 2, "type": { "class": "H5T_COMPOUND", "fields": [ {"type": "H5T_STD_I32LE", "name": "temp"}, {"type": "H5T_IEEE_F32LE", "name": "pressure"} ] }, "value": [[55, 32.34], [59, 29.34]] } Sample Response - compound type ----------------------------------- .. code-block:: http HTTP/1.1 201 Created Date: Thu, 05 Feb 2015 06:49:19 GMT Content-Length: 367 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "hrefs": [ {"href": "http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e/attributes/attr_compound", "rel": "self"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e", "rel": "owner"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/be319519-acff-11e4-bf8e-3c15c2da029e", "rel": "root"} ] } Related Resources ================= * :doc:`DELETE_Attribute` * :doc:`GET_Attribute` * :doc:`GET_Attributes` * :doc:`../DatasetOps/GET_Dataset` * :doc:`../DatatypeOps/GET_Datatype` * :doc:`../GroupOps/GET_Group` ================================================ FILE: docs/AttrOps/index.rst ================================================ ######################## Attributes ######################## Like datasets (see :doc:`../DatasetOps/index`), attributes are objects that contain a homogeneous collection of elements and have associatted type information. Attributes are typically small metadata objects that describe some aspect of the object (dataset, group, or committed datatype) that contains the attribute. Creating Attributes -------------------- Use :doc:`PUT_Attribute` to create an attribute. If there is an existing attribute with the same name, it will be overwritten by this request. You can use :doc:`GET_Attribute` to inquire if the attribute already exists or not. When creating an attribute, the attribute name, type, and shape (for non-scalar attributes) is included in the request. Reading and Writing Data ------------------------- Unlike datasets, attribute's data can not be read or written partially. Data can only be written as part of the PUT requests. Reading the data of an attribute is done by :doc:`GET_Attribute`. Listing attributes ------------------ Use :doc:`GET_Attributes` to get information about all the attributes of a group, dataset, or committed datatype. Deleting Attributes ------------------- Use :doc:`DELETE_Attribute` to delete an attribute. List of Operations ------------------ .. toctree:: :maxdepth: 1 DELETE_Attribute GET_Attribute GET_Attributes PUT_Attribute ================================================ FILE: docs/Authorization.rst ================================================ ********************************* Authorization and Authentication ********************************* Request Authentication ----------------------- h5serv supports HTTP Basic authentication to authenticate users by comparing an encrypted username and password against a value stored within a password file. (See :doc:`AdminTools` to create a password file and add user accounts.) If neither the requested object (Group, Dataset, or Committed Datatype) nor the object's root group has an Access Control List (ACL), authorization is not required and no authentication string needs to be supplied. See :doc:`../AclOps`) for information on how to use ACL's. If the requested object (or object's root group), does have an ACL, authorization may be required (if the object is not publically readable), and if so the requestor will need to provide an Authorization header in the request. If authoriazation is required, but not provided, the server will return an HTTP Status of 401 - Unauthorized. If authorization is required (i.e. a 401 response is received), the client should provide an authorization header in the http request which conveys the userid and password. The authorization string is constructed as follows: 1. Username and password are combined into a string "username:password". Note that username cannot contain the ":" character 2. The resulting string is then encoded using the RFC2045-MIME variant of Base64, except not limited to 76 char/line 3. The authorization method and a space i.e. "Basic " is then put before the encoded string For example, if the user agent uses 'Aladdin' as the username and 'open sesame' as the password then the field is formed as follows: ``Authorization: Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==``. When passwords are being sent over an open network, SSL connections should be used to avoid "man in the middle attacks". The Base64 encoding is easily reversible and if using plain http there is no assurance that the password will not be compromised. If the authorization string is validated, the server will verify the request is authorized as per the object's ACL list. If not authorized a http status 403 - Forbidden will be returned. User ids and passwords ---------------------- User ids and passwords are maintained in an HDF5 file referenced in the server config: 'password_file'. The admin tool (See :doc:`AdminTools`) script: update_pwd.py can be used to create new users and update passwords. ================================================ FILE: docs/CommonErrorResponses.rst ================================================ *************************** Common Error Responses *************************** For each request, h5serv returns a standard HTTP status code as described below. In general 2xx codes indicate success, 3xx codes some form of redirection, 4xx codes client error, and 5xx codes for server errors. In addition to the numeric code, h5serv will return an informational message as part of the response providing further information on the nature of the error. * ``200 OK`` - The request was completed successfully * ``201 Created`` - The request was fulfilled and a new resource (e.g. group, dataset, attribute was created) * ``400 Bad Request`` - The request was not structured correctly (e.g. a required key was missing). * ``401 Unauthorization`` - Use authentitcation is required, supply an Authentication header with valid user and password * ``403 Forbidden`` - The requesting user does not have access to the requested resource * ``404 Not Found`` - The requested resource was not found (e.g. ``GET /groups/`` where was not a valid identifier for a group in the domain). * ``409 Conflict`` - This error is used with PUT requests where the resources cannot be created because there is an existing resource with the same name (e.g. PUT / where the requested domain is already present). * ``410 Gone`` - The resource requested has been recently deleted. * ``500 Internal Error`` - An unexpected error that indicates some problem occurred on the server. * ``501 Not Implemented`` - The request depends on a feature that is not yet implemented. ================================================ FILE: docs/CommonRequestHeaders.rst ================================================ *********************** Common Request Headers *********************** The following describe common HTTP request headers as used in h5serv: * Request line: The first line of the request, the format is of the form HTTP verb (GET, PUT, DELETE, or POST) followed by the path to the resource (e.g. /group/. Some operations take one or more query parameters (see relevant documentation) * Accept: Specified the media type that is acceptable for the response. Valid values are "application/json", and "*/*. In addiiton, GET Value (see :doc:`DatasetOps/GET_Value`) supports the value "application/octet-stream" * Authorization: A string that provides the requester's credentials for the request. See :doc:`Authorization` * Host: the domain (i.e. related collection of groups, datasets, and attributes) that the request should apply to Note: the host header can also be provided as a query paramter. Example: https://data.hdfgroup.org:7258/?host=tall.test.data.hdfgroup.org ================================================ FILE: docs/CommonResponseHeaders.rst ================================================ *************************** Common Response Headers *************************** The following describes some of the common response lines returned by h5serv. * Status Line: the first line of the ressponse will always by: "``HTTP/1.1``" followed by a status code (e.g. 200) followed by a reason message (e.g. "``OK``"). For errors, an additional error message may be included on this line. * Content-Length: the response size in bytes. * Etag: a hash code that indicates the state of the requested resource. If the client sees the same Etag value for the same request, it can assume the resource has not changes since the last request. * Content-Type: the mime type of the response. Currently always "``application/json``". ================================================ FILE: docs/DatasetOps/DELETE_Dataset.rst ================================================ ********************************************** DELETE Dataset ********************************************** Description =========== The implementation of the DELETE operation deletes the dataset named in the URI. All attributes and links of the dataset will also be deleted. In addition any links from other groups to the deleted group will be removed. Requests ======== Syntax ------ .. code-block:: http DELETE /datasets/ HTTP/1.1 Host: DOMAIN Authorization: ** is the UUID of the requested dataset to be deleted. Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http DELETE /datasets/289bb654-a2c6-11e4-97d8-3c15c2da029e HTTP/1.1 Content-Length: 0 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: tall_dset112_deleted.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Fri, 23 Jan 2015 06:07:49 GMT Content-Length: 287 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "hrefs": [ {"href": "http://tall_dset112_deleted.test.hdfgroup.org/datasets", "rel": "self"}, {"href": "http://tall_dset112_deleted.test.hdfgroup.org/groups/289b4873-a2c6-11e4-adfb-3c15c2da029e", "rel": "root"}, {"href": "http://tall_dset112_deleted.test.hdfgroup.org/", "rel": "home"} ] } Related Resources ================= * :doc:`GET_Datasets` * :doc:`GET_Dataset` * :doc:`POST_Dataset` ================================================ FILE: docs/DatasetOps/GET_Dataset.rst ================================================ ********************************************** GET Dataset ********************************************** Description =========== Returns information about the dataset with the UUID given in the URI. Requests ======== Syntax ------ .. code-block:: http GET /datasets/ HTTP/1.1 Host: DOMAIN Authorization: **** is the UUID of the requested dataset. Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: id ^^ The UUID of the dataset object. type ^^^^ A JSON object representing the type of the dataset. See :doc:`../Types/index` for details of the type representation. shape ^^^^^ A JSON object representing the shape of the dataset. See :doc:`GET_DatasetShape` for details of the shape representation. creationProperties ^^^^^^^^^^^^^^^^^^ A JSON object that describes chunk layout, filters, fill value, and other aspects of the dataset. See: http://hdf5-json.readthedocs.org/en/latest/bnf/dataset.html#grammar-token-dcpl for a complete description of fields that can be used. attributeCount ^^^^^^^^^^^^^^ The number of attributes belonging to the dataset. created ^^^^^^^ A timestamp giving the time the dataset was created in UTC (ISO-8601 format). lastModified ^^^^^^^^^^^^ A timestamp giving the most recent time the group has been modified (i.e. attributes or links updated) in UTC (ISO-8601 format). hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http GET /datasets/c8d83759-a2c6-11e4-8713-3c15c2da029e HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Fri, 23 Jan 2015 06:15:33 GMT Content-Length: 755 Etag: "ecbd7e52654b0a8f4ccbebac06175ce5df5f8c79" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "id": "c8d83759-a2c6-11e4-8713-3c15c2da029e", "shape": { "dims": [10], "class": "H5S_SIMPLE" }, "type": { "base": "H5T_IEEE_F32BE", "class": "H5T_FLOAT" }, "creationProperties": { "allocTime": "H5D_ALLOC_TIME_LATE", "fillTime": "H5D_FILL_TIME_IFSET", "layout": { "class": "H5D_CONTIGUOUS" } }, "attributeCount": 0, "created": "2015-01-23T06:12:18Z", "lastModified": "2015-01-23T06:12:18Z", "hrefs": [ {"href": "http://tall.test.hdfgroup.org/datasets/c8d83759-a2c6-11e4-8713-3c15c2da029e", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/groups/c8d7842b-a2c6-11e4-b4f1-3c15c2da029e", "rel": "root"}, {"href": "http://tall.test.hdfgroup.org/datasets/c8d83759-a2c6-11e4-8713-3c15c2da029e/attributes", "rel": "attributes"}, {"href": "http://tall.test.hdfgroup.org/datasets/c8d83759-a2c6-11e4-8713-3c15c2da029e/value", "rel": "data"}, {"href": "http://tall.test.hdfgroup.org/", "rel": "home"} ] } Related Resources ================= * :doc:`DELETE_Dataset` * :doc:`../AttrOps/GET_Attributes` * :doc:`GET_DatasetShape` * :doc:`GET_DatasetType` * :doc:`GET_Datasets` * :doc:`GET_Value` * :doc:`POST_Value` * :doc:`PUT_Value` ================================================ FILE: docs/DatasetOps/GET_DatasetShape.rst ================================================ ********************************************** GET Shape ********************************************** Description =========== Gets shape of a dataset. Requests ======== Syntax ------ .. code-block:: http GET /datasets//shape HTTP/1.1 Host: DOMAIN Authorization: ** is the UUID of the dataset that shape is requested for. Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: shape ^^^^^ A JSON object with the following keys: class: A string with one of the following values: * H5S_NULL: A null dataspace, which has no elements * H5S_SCALAR: A dataspace with a single element (although possibly of a complext datatype) * H5S_SIMPLE: A dataspace that consists of a regular array of elements dims: An integer array whose length is equal to the number of dimensions (rank) of the dataspace. The value of each element gives the the current size of each dimension. Dims is not returned for H5S_NULL or H5S_SCALAR dataspaces. maxdims: An integer array whose length is equal to the number of dimensions of the dataspace. The value of each element gives the maximum size of each dimension. A value of 0 indicates that the dimension has *unlimited* extent. maxdims is not returned for H5S_SIMPLE dataspaces which are not extensible or for H5S_NULL or H5S_SCALAR dataspaces. fillvalue: A value of compatible with the dataset's type, which gives the *fill* value for the dataset (the value for which elements will be initialized to when a dataspace is extended). fillvalue is only returned for extensible dataspaces. created ^^^^^^^ A timestamp giving the time the datashape (same as the dataset) was created in UTC (ISO-8601 format). lastModified ^^^^^^^^^^^^ A timestamp giving the most recent time the dataspace has been modified (i.e. a dimension has been extended) in UTC (ISO-8601 format). hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http GET /datasets/3b57b6d4-a6a8-11e4-96b5-3c15c2da029e/shape HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Wed, 28 Jan 2015 04:43:41 GMT Content-Length: 445 Etag: "76ed777f151c70d0560d1414bffe1515a3df86b0" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "shape": { "class": "H5S_SIMPLE" "dims": [10], }, "created": "2015-01-28T04:40:23Z", "lastModified": "2015-01-28T04:40:23Z", "hrefs": [ {"href": "http://tall.test.hdfgroup.org/datasets/3b57b6d4-a6a8-11e4-96b5-3c15c2da029e", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/datasets/3b57b6d4-a6a8-11e4-96b5-3c15c2da029e", "rel": "owner"}, {"href": "http://tall.test.hdfgroup.org/groups/3b56ee54-a6a8-11e4-b2ae-3c15c2da029e", "rel": "root"} ], } Sample Request - Resizable -------------------------- .. code-block:: http GET /datasets/a64010e8-a6aa-11e4-98c8-3c15c2da029e/shape HTTP/1.1 host: resizable.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response - Resizable ---------------------------- .. code-block:: http HTTP/1.1 200 OK Date: Wed, 28 Jan 2015 05:00:59 GMT Content-Length: 500 Etag: "1082800980d6809a8008b22e225f1adde8afc73f" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "shape": { "class": "H5S_SIMPLE", "dims": [10, 10], "maxdims": [10, 0], }, "created": "2015-01-28T04:40:23Z", "lastModified": "2015-01-28T04:40:23Z", "hrefs": [ {"href": "http://resizable.test.hdfgroup.org/datasets/a64010e8-a6aa-11e4-98c8-3c15c2da029e", "rel": "self"}, {"href": "http://resizable.test.hdfgroup.org/datasets/a64010e8-a6aa-11e4-98c8-3c15c2da029e", "rel": "owner"}, {"href": "http://resizable.test.hdfgroup.org/groups/a63f5dcf-a6aa-11e4-ab68-3c15c2da029e", "rel": "root"} ] } Related Resources ================= * :doc:`GET_Dataset` * :doc:`GET_DatasetType` * :doc:`PUT_DatasetShape` ================================================ FILE: docs/DatasetOps/GET_DatasetType.rst ================================================ ********************************************** GET Type ********************************************** Description =========== Gets Type Information for a dataset. Requests ======== Syntax ------ .. code-block:: http GET /datasets//type HTTP/1.1 Host: DOMAIN Authorization: ** is the UUID of the dataset the type information is requested for. Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: type ^^^^ A JSON object representing the type definition for the dataset. See :doc:`../Types/index` for information on how different types are represented. hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request - Predefined Type -------------------------------- .. code-block:: http GET /datasets/ba06ce68-a6b5-11e4-8ed3-3c15c2da029e/type HTTP/1.1 host: scalar.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response - Predefined Type --------------------------------- .. code-block:: http HTTP/1.1 200 OK Date: Wed, 28 Jan 2015 06:20:16 GMT Content-Length: 519 Etag: "802b160bf786596a9cb9f6d5cd6faa4fe1127e8c" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "type": { "class": "H5T_INTEGER", "order": "H5T_ORDER_LE", "base_size": 4, "base": "H5T_STD_I32LE", "size": 4 }, "hrefs": [ {"href": "http://scalar.test.hdfgroup.org/datasets/ba06ce68-a6b5-11e4-8ed3-3c15c2da029e/type", "rel": "self"}, {"href": "http://scalar.test.hdfgroup.org/datasets/ba06ce68-a6b5-11e4-8ed3-3c15c2da029e", "rel": "owner"}, {"href": "http://scalar.test.hdfgroup.org/groups/ba06992e-a6b5-11e4-9ba5-3c15c2da029e", "rel": "root"} ] } Sample Request - Compound Type -------------------------------- .. code-block:: http GET /datasets/b9edddd7-a6b5-11e4-9afd-3c15c2da029e/type HTTP/1.1 host: compound.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response - Compound Type -------------------------------- .. code-block:: http HTTP/1.1 200 OK Date: Wed, 28 Jan 2015 06:20:16 GMT Content-Length: 1199 Etag: "1f97eac24aa18d3c462a2f2797c4782a1f2a0aa2" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "type": { "class": "H5T_COMPOUND", "fields": [ { "type": { "order": "H5T_ORDER_LE", "base_size": 8, "class": "H5T_INTEGER", "base": "H5T_STD_I64LE", "size": 8}, "name": "date" }, { "type": { "strpad": "H5T_STR_NULLPAD", "base_size": 6, "order": "H5T_ORDER_NONE", "cset": "H5T_CSET_ASCII", "strsize": 6, "class": "H5T_STRING", "size": 6}, "name": "time" }, { "type": { "order": "H5T_ORDER_LE", "base_size": 8, "class": "H5T_INTEGER", "base": "H5T_STD_I64LE", "size": 8}, "name": "temp" }, { "type": { "order": "H5T_ORDER_LE", "base_size": 8, "class": "H5T_FLOAT", "base": "H5T_IEEE_F64LE", "size": 8}, "name": "pressure" }, { "type": { "strpad": "H5T_STR_NULLPAD", "base_size": 6, "order": "H5T_ORDER_NONE", "cset": "H5T_CSET_ASCII", "strsize": 6, "class": "H5T_STRING", "size": 6}, "name": "wind"} ] }, "hrefs": [ {"href": "http://compound.test.hdfgroup.org/datasets/b9edddd7-a6b5-11e4-9afd-3c15c2da029e/type", "rel": "self"}, {"href": "http://compound.test.hdfgroup.org/datasets/b9edddd7-a6b5-11e4-9afd-3c15c2da029e", "rel": "owner"}, {"href": "http://compound.test.hdfgroup.org/groups/b9eda805-a6b5-11e4-aa52-3c15c2da029e", "rel": "root"} ] } Related Resources ================= * :doc:`GET_Dataset` * :doc:`GET_DatasetShape` * :doc:`POST_Dataset` ================================================ FILE: docs/DatasetOps/GET_Datasets.rst ================================================ ********************************************** GET Datasets ********************************************** Description =========== Returns UUIDs for all the datasets in a domain. Requests ======== Syntax ------ .. code-block:: http GET /datasets HTTP/1.1 Host: DOMAIN Authorization: Request Parameters ------------------ This implementation of the operation uses the following request parameters (both optional): Limit ^^^^^ If provided, a positive integer value specifying the maximum number of UUID's to return. Marker ^^^^^^ If provided, a string value indicating that only UUID's that occur after the marker value will be returned. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: datasets ^^^^^^^^ An array of UUID's, one for each dataset in the domain. hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http GET /datasets HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Fri, 23 Jan 2015 06:33:36 GMT Content-Length: 413 Etag: "977e96c7bc63a6e05d10d56565df2ab8d30e404d" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "datasets": [ "c8d7dd14-a2c6-11e4-a68c-3c15c2da029e", "c8d7f159-a2c6-11e4-99af-3c15c2da029e", "c8d83759-a2c6-11e4-8713-3c15c2da029e", "c8d84a8a-a2c6-11e4-b457-3c15c2da029e" ], "hrefs": [ {"href": "http://tall.test.hdfgroup.org/datasets", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/groups/c8d7842b-a2c6-11e4-b4f1-3c15c2da029e", "rel": "root"}, {"href": "http://tall.test.hdfgroup.org/", "rel": "home"} ] } Sample Request with Marker and Limit ------------------------------------ This example uses the "Marker" request parameter to return only UUIDs after the given Marker value. The "Limit" request parameter is used to limit the number of UUIDs in the response to 5. .. code-block:: http GET /datasets?Marker=817db263-a2cc-11e4-87f2-3c15c2da029e&Limit=5 HTTP/1.1 host: dset1k.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response with Marker and Limit ------------------------------------- .. code-block:: http HTTP/1.1 200 OK Date: Fri, 23 Jan 2015 06:53:52 GMT Content-Length: 459 Etag: "cb708d4839cc1e165fe6bb30718e49589ef140f4" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "datasets": [ "817dcfb8-a2cc-11e4-9197-3c15c2da029e", "817de9ee-a2cc-11e4-8378-3c15c2da029e", "817e028a-a2cc-11e4-8ce3-3c15c2da029e", "817e1b61-a2cc-11e4-ba39-3c15c2da029e", "817e341c-a2cc-11e4-a16f-3c15c2da029e" ], "hrefs": [ {"href": "http://dset1k.test.hdfgroup.org/datasets", "rel": "self"}, {"href": "http://dset1k.test.hdfgroup.org/groups/81760a80-a2cc-11e4-bb55-3c15c2da029e", "rel": "root"}, {"href": "http://dset1k.test.hdfgroup.org/", "rel": "home"} ] } Related Resources ================= * :doc:`DELETE_Dataset` * :doc:`GET_Dataset` * :doc:`POST_Dataset` ================================================ FILE: docs/DatasetOps/GET_Value.rst ================================================ ********************************************** GET Value ********************************************** Description =========== Gets data values of a dataset. Requests ======== Syntax ------ .. code-block:: http GET /datasets//value HTTP/1.1 Host: DOMAIN Authorization: ** is the UUID of the requested dataset. Request Parameters ------------------ select ^^^^^^ Optionally the request can provide a select value to indicate a hyperslab selection for the values to be returned - i.e. a rectangular (in 1, 2, or more dimensions) region of the dataset. Format is the following as a url-encoded value: [dim1_start:dim1_end:dim1_step, dim2_start:dim2_end:dim2_step, ... , dimn_start:dimn_stop:dimn_step] The number of tuples "start:stop:step" should equal the number of dimensions of the dataset. For each tuple: * start must be greater than equal to zero and less than the dimension extent * stop must be greater than or equal to start and less than or equal to the dimension extent * step is optional and if provided must be greater than 0. If not provided, the step value for that dimension is assumed to be 1. query ^^^^^ Optionally the request can provide a query value to select items from a dataset based on a condition expression. E.g. The condition: "(temp > 32.0) & (dir == 'N')" would return elements of the dataset where the 'temp' field was greater than 32.0 and the 'dir' field was equal to 'N'. Note: the query value needs to be url-encoded. Note: the query parameter can be used in conjunction with the select parameter to restrict the return set to the provided selection. Note: the query parameter can be used in conjunction with the Limit parameter to limit the number of matches returned. Note: Currently the query parameter can only be used with compound type datasets that are one-dimensional. Limit ^^^^^ If provided, a positive integer value specifying the maximum number of elements to return. Only has an effect if used in conjunction with the query parameter. Request Headers --------------- This implementation of the operation supports the common headers in addition to the "Accept" header value of "application/octet-stream". Use this accept value if a binary response is desired. Binary data will be more efficient for large data requests. If a binary response can be returned, the "Content-Type" response header will be "application/octet-stream". Otherwise the response header will be "json". Note: Binary responses are only supported for dataset that have a fixed-length type (i.e. either a fixed length primitive type or compound type that in turn consists of fixed=length types). Namely variable length strings and variable length data types will always be returned as JSON. Note: if a binary response is returned, it will consist of the equivalent binary data of the "data" item in the JSON response. No data representing "hrefs" is returned. For other request headers, see :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: value ^^^^^ A json array (integer or string for scalar datasets) giving the values of the requested dataset region. index ^^^^^ A list of indexes for each element that met the query condition (only provided when the query request parameter is used). hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http GET /datasets/548f2f21-a83c-11e4-8baf-3c15c2da029e/value HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Fri, 30 Jan 2015 04:56:20 GMT Content-Length: 776 Etag: "788efb3caaba7fd2ae5d1edb40b474ba94c877a8" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "value": [ [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 2, 4, 6, 8, 10, 12, 14, 16, 18], [0, 3, 6, 9, 12, 15, 18, 21, 24, 27], [0, 4, 8, 12, 16, 20, 24, 28, 32, 36], [0, 5, 10, 15, 20, 25, 30, 35, 40, 45], [0, 6, 12, 18, 24, 30, 36, 42, 48, 54], [0, 7, 14, 21, 28, 35, 42, 49, 56, 63], [0, 8, 16, 24, 32, 40, 48, 56, 64, 72], [0, 9, 18, 27, 36, 45, 54, 63, 72, 81] ], "hrefs": [ {"href": "http://tall.test.hdfgroup.org/datasets/548f2f21-a83c-11e4-8baf-3c15c2da029e/value", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/groups/548ed535-a83c-11e4-b58b-3c15c2da029e", "rel": "root"}, {"href": "http://tall.test.hdfgroup.org/datasets/548f2f21-a83c-11e4-8baf-3c15c2da029e", "rel": "owner"}, {"href": "http://tall.test.hdfgroup.org/", "rel": "home"} ] } Sample Request - Selection -------------------------- .. code-block:: http GET /datasets/a299db70-ab57-11e4-9c00-3c15c2da029e/value?select=[1:9,1:9:2] HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response - Selection --------------------------- .. code-block:: http HTTP/1.1 200 OK Date: Tue, 03 Feb 2015 04:01:41 GMT Content-Length: 529 Etag: "b370a3d34bdd7ebf57a496bc7f0da7bc5a1aafb9" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "value": [ [1, 3, 5, 7], [2, 6, 10, 14], [3, 9, 15, 21], [4, 12, 20, 28], [5, 15, 25, 35], [6, 18, 30, 42], [7, 21, 35, 49], [8, 24, 40, 56] ], "hrefs": [ {"href": "http://tall.test.hdfgroup.org/datasets/a299db70-ab57-11e4-9c00-3c15c2da029e/value", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/groups/a29982cf-ab57-11e4-b976-3c15c2da029e", "rel": "root"}, {"href": "http://tall.test.hdfgroup.org/datasets/a299db70-ab57-11e4-9c00-3c15c2da029e", "rel": "owner"}, {"href": "http://tall.test.hdfgroup.org/", "rel": "home"} ] } Sample Request - Query -------------------------- Get elements from dataset where the 'date' field is equal to 20 and the 'temp' field is greater or equal to 70. .. code-block:: http GET /datasets/b2c82938-0e2e-11e5-9092-3c15c2da029e/value?query=(date%20==%2021)%20%26%20(temp%20%3E=%2072) HTTP/1.1 host: compound.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response - Query ------------------------- .. code-block:: http HTTP/1.1 200 OK Date: Thu, 11 Jun 2015 21:05:06 GMT Content-Length: 805 Etag: "927b5ed89616896d3dce7df8bdddac058321076a" Content-Type: application/json Server: TornadoServer/4.1 .. code-block:: json { "index": [68, 69, 70, 71], "value": [ [21, "17:53", 74, 29.87, "S 9"], [21, "16:53", 75, 29.87, "SW 10"], [21, "15:53", 79, 29.87, "S 12"], [21, "14:53", 78, 29.87, "SW 9"] ] }, "hrefs": [ {"href": "http://compound.test.hdfgroup.org/datasets/b2c82938-0e2e-11e5-9092-3c15c2da029e/value", "rel": "self"}, {"href": "http://compound.test.hdfgroup.org/groups/b2c7f935-0e2e-11e5-96ae-3c15c2da029e", "rel": "root"}, {"href": "http://compound.test.hdfgroup.org/datasets/b2c82938-0e2e-11e5-9092-3c15c2da029e", "rel": "owner"}, {"href": "http://compound.test.hdfgroup.org/", "rel": "home"} ] Sample Request - Query Batch ----------------------------- Get elements where the 'date' field is equal to 23 and the index is between 24 and 72. Limit the number of results to 5. .. code-block:: http GET /datasets/b2c82938-0e2e-11e5-9092-3c15c2da029e/value?query=date%20==%2023&Limit=5&select=[24:72] HTTP/1.1 host: compound.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response - Query Batch ----------------------------- .. code-block:: http HTTP/1.1 200 OK Date: Thu, 11 Jun 2015 21:15:28 GMT Content-Length: 610 Etag: "927b5ed89616896d3dce7df8bdddac058321076a" Content-Type: application/json Server: TornadoServer/4.1 .. code-block:: json { "index": [24, 25, 26, 27, 28], "value": [ [23, "13:53", 65, 29.83, "W 5"], [23, "12:53", 66, 29.84, "W 5"], [23, "11:53", 64, 29.84, "E 6"], [23, "10:53", 61, 29.86, "SE 5"], [23, "9:53", 62, 29.86, "S 6"] ], "hrefs": [ {"href": "http://compound.test.hdfgroup.org/datasets/b2c82938-0e2e-11e5-9092-3c15c2da029e/value", "rel": "self"}, {"href": "http://compound.test.hdfgroup.org/groups/b2c7f935-0e2e-11e5-96ae-3c15c2da029e", "rel": "root"}, {"href": "http://compound.test.hdfgroup.org/datasets/b2c82938-0e2e-11e5-9092-3c15c2da029e", "rel": "owner"}, {"href": "http://compound.test.hdfgroup.org/", "rel": "home"} ] Related Resources ================= * :doc:`GET_Dataset` * :doc:`POST_Value` * :doc:`PUT_Value` ================================================ FILE: docs/DatasetOps/POST_Dataset.rst ================================================ ********************************************** POST Dataset ********************************************** Description =========== Creates a new Dataset. Requests ======== Syntax ------ .. code-block:: http POST /datasets HTTP/1.1 Host: DOMAIN Authorization: Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Request Elements ---------------- The request body must include a JSON object with a "type" key. Optionally "shape", "maxdims", and "link" keys can be provided. type ^^^^ Either a string that is one of the predefined type values, a uuid of a committed type, or a JSON object describing the type. See :doc:`../Types/index` for details of the type specification. shape ^^^^^^ Either a string with the value ``H5S_NULL`` or an integer array describing the initial dimensions of the dataset. If shape is not provided, a scalar dataset will be created. If the shape value of ``H5S_NULL`` is specified a dataset with a null dataspace will be created. A null dataset has attributes and a type, but will not be able to store any values. maxdims ^^^^^^^ An integer array describing the maximum extent of each dimension (or 0 for unlimited dimensions). If maxdims is not provided that resulting dataset will be non-extensible. Not valid to include if ``H5S_NULL`` is specified for the shape. creationProperties ^^^^^^^^^^^^^^^^^^ A JSON object that can specify chunk layout, filters, fill value, and other aspects of the dataset. See: http://hdf5-json.readthedocs.org/en/latest/bnf/dataset.html#grammar-token-dcpl for a complete description of fields that can be used. If creationProperties is not provided, default values will be used link["id"] ^^^^^^^^^^ The UUID of the group the new group should be linked to. If the UUID is not valid, the request will fail and a new group will not be created. link["name"] ^^^^^^^^^^^^ The name of the new link. Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: id ^^ The UUID of the newly created dataset. attributeCount ^^^^^^^^^^^^^^ The number of attributes belonging to the dataset. created ^^^^^^^ A timestamp giving the time the dataset was created in UTC (ISO-8601 format). lastModified ^^^^^^^^^^^^ A timestamp giving the most recent time the dataset has been modified (i.e. attributes or links updated) in UTC (ISO-8601 format). hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- Create a one-dimensional dataset with 10 floating point elements. .. code-block:: http POST /datasets HTTP/1.1 Content-Length: 39 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: newdset.datasettest.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "shape": 10, "type": "H5T_IEEE_F32LE" } Sample Response --------------- .. code-block:: http HTTP/1.1 201 Created Date: Thu, 29 Jan 2015 06:14:02 GMT Content-Length: 651 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "id": "0568d8c5-a77e-11e4-9f7a-3c15c2da029e", "attributeCount": 0, "created": "2015-01-29T06:14:02Z", "lastModified": "2015-01-29T06:14:02Z", "hrefs": [ {"href": "http://newdset.datasettest.test.hdfgroup.org/datasets/0568d8c5-a77e-11e4-9f7a-3c15c2da029e", "rel": "self"}, {"href": "http://newdset.datasettest.test.hdfgroup.org/groups/055fe7de-a77e-11e4-bbe9-3c15c2da029e", "rel": "root"}, {"href": "http://newdset.datasettest.test.hdfgroup.org/datasets/0568d8c5-a77e-11e4-9f7a-3c15c2da029e/attributes", "rel": "attributes"}, {"href": "http://newdset.datasettest.test.hdfgroup.org/datasets/0568d8c5-a77e-11e4-9f7a-3c15c2da029e/value", "rel": "value"} ] } Sample Request with Link ------------------------ Create a dataset with 10 variable length string elements. Create link in group: "5e441dcf-..." with name: "linked_dset". .. code-block:: http POST /datasets HTTP/1.1 Content-Length: 235 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: newdsetwithlink.datasettest.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "type": { "class": "H5T_STRING", "strsize": "H5T_VARIABLE", "cset": "H5T_CSET_ASCII", "order": "H5T_ORDER_NONE", "strpad": "H5T_STR_NULLTERM" }, "shape": 10, "link": { "id": "5e441dcf-a782-11e4-bd6b-3c15c2da029e", "name": "linked_dset" } } Sample Response with Link ------------------------- .. code-block:: http HTTP/1.1 201 Created Date: Thu, 29 Jan 2015 06:45:09 GMT Content-Length: 683 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "id": "5e579297-a782-11e4-93f9-3c15c2da029e", "attributeCount": 0, "created": "2015-01-29T06:45:09Z", "lastModified": "2015-01-29T06:45:09Z", "hrefs": [ {"href": "http://newdsetwithlink.datasettest.test.hdfgroup.org/datasets/5e579297-a782-11e4-93f9-3c15c2da029e", "rel": "self"}, {"href": "http://newdsetwithlink.datasettest.test.hdfgroup.org/groups/5e441dcf-a782-11e4-bd6b-3c15c2da029e", "rel": "root"}, {"href": "http://newdsetwithlink.datasettest.test.hdfgroup.org/datasets/5e579297-a782-11e4-93f9-3c15c2da029e/attributes", "rel": "attributes"}, {"href": "http://newdsetwithlink.datasettest.test.hdfgroup.org/datasets/5e579297-a782-11e4-93f9-3c15c2da029e/value", "rel": "value"} ] } Sample Request - Resizable Dataset ---------------------------------- Create a one-dimensional dataset with 10 elements, but extendable to an unlimited dimension. .. code-block:: http POST /datasets HTTP/1.1 Content-Length: 54 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: resizabledset.datasettest.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "type": "H5T_IEEE_F32LE", "shape": 10, "maxdims": 0 } Sample Response - Resizable Dataset ----------------------------------- .. code-block:: http HTTP/1.1 201 Created Date: Thu, 29 Jan 2015 08:28:19 GMT Content-Length: 675 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "id": "c79933ab-a790-11e4-b36d-3c15c2da029e", "attributeCount": 0, "created": "2015-01-29T08:28:19Z", "lastModified": "2015-01-29T08:28:19Z", "hrefs": [ {"href": "http://resizabledset.datasettest.test.hdfgroup.org/datasets/c79933ab-a790-11e4-b36d-3c15c2da029e", "rel": "self"}, {"href": "http://resizabledset.datasettest.test.hdfgroup.org/groups/c7759c11-a790-11e4-ae03-3c15c2da029e", "rel": "root"}, {"href": "http://resizabledset.datasettest.test.hdfgroup.org/datasets/c79933ab-a790-11e4-b36d-3c15c2da029e/attributes", "rel": "attributes"}, {"href": "http://resizabledset.datasettest.test.hdfgroup.org/datasets/c79933ab-a790-11e4-b36d-3c15c2da029e/value", "rel": "value"} ] } Sample Request - Committed Type ---------------------------------- Create a two-dimensional dataset which uses a committed type with uuid: .. code-block:: http POST /datasets HTTP/1.1 Content-Length: 67 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: committedtype.datasettest.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "type": "accd0b1e-a792-11e4-bada-3c15c2da029e", "shape": [10, 10] } Sample Response - Committed Type ----------------------------------- .. code-block:: http HTTP/1.1 201 Created Date: Thu, 29 Jan 2015 08:41:53 GMT Content-Length: 675 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "id": "ace8cdca-a792-11e4-ad88-3c15c2da029e", "attributeCount": 0, "created": "2015-01-29T08:41:53Z", "lastModified": "2015-01-29T08:41:53Z", "hrefs": [ {"href": "http://committedtype.datasettest.test.hdfgroup.org/datasets/ace8cdca-a792-11e4-ad88-3c15c2da029e", "rel": "self"}, {"href": "http://committedtype.datasettest.test.hdfgroup.org/groups/acc4d37d-a792-11e4-b326-3c15c2da029e", "rel": "root"}, {"href": "http://committedtype.datasettest.test.hdfgroup.org/datasets/ace8cdca-a792-11e4-ad88-3c15c2da029e/attributes", "rel": "attributes"}, {"href": "http://committedtype.datasettest.test.hdfgroup.org/datasets/ace8cdca-a792-11e4-ad88-3c15c2da029e/value", "rel": "value"} ] } Sample Request - SZIP Compression with chunking ----------------------------------------------- .. code-block:: http POST /datasets HTTP/1.1 Content-Length: 67 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: szip.datasettest.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "creationProperties": { "filters": [ { "bitsPerPixel": 8, "coding": "H5_SZIP_EC_OPTION_MASK", "id": 4, "pixelsPerBlock": 32, "pixelsPerScanline": 100 } ], "layout": { "class": "H5D_CHUNKED", "dims": [ 100, 100 ] } }, "shape": [ 1000, 1000 ], "type": "H5T_IEEE_F32LE" } Sample Response - SZIP Compression with chunking ------------------------------------------------ .. code-block:: http HTTP/1.1 201 Created Date: Thu, 18 Jun 2015 08:41:53 GMT Content-Length: 975 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "id": "ad283c05-158c-11e5-bd67-3c15c2da029e", "attributeCount": 0, "created": "2015-06-18T07:36:04Z", "lastModified": "2015-06-18T07:36:04Z", "hrefs": [ { "href": "http://newdset_szip.datasettest.test.hdfgroup.org/datasets/ad283c05-158c-11e5-bd67-3c15c2da029e", "rel": "self" }, { "href": "http://newdset_szip.datasettest.test.hdfgroup.org/groups/ad2746d4-158c-11e5-a083-3c15c2da029e", "rel": "root" }, { "href": "http://newdset_szip.datasettest.test.hdfgroup.org/datasets/ad283c05-158c-11e5-bd67-3c15c2da029e/attributes", "rel": "attributes" }, { "href": "http://newdset_szip.datasettest.test.hdfgroup.org/datasets/ad283c05-158c-11e5-bd67-3c15c2da029e/value", "rel": "value" } ] } Related Resources ================= * :doc:`GET_Dataset` * :doc:`GET_Datasets` * :doc:`GET_Value` * :doc:`POST_Value` * :doc:`PUT_Value` ================================================ FILE: docs/DatasetOps/POST_Value.rst ================================================ ********************************************** POST Value ********************************************** Description =========== Gets values of a data for a given point selection (provided in the body of the request). Requests ======== Syntax ------ .. code-block:: http POST /datasets//value HTTP/1.1 Host: DOMAIN Authorization: ** is the UUID of the requested dataset t Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Request Body ------------ The request body should be a JSON object with the following key: points ^^^^^^ An array of points defining the selection. Each point can either be an integer (if the dataset has just one dimension), or an array where the length of the array is equal to the number of dimensions of the dataset. Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: value ^^^^^ An array of values where the length of the array is equal to the number of points in the request. Each value will be a string, integer, or JSON object consist with the dataset type (e.g. an compound type). Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http POST /datasets/4e83ad1c-ab6e-11e4-babb-3c15c2da029e/value HTTP/1.1 Content-Length: 92 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: tall.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "points": [19, 17, 13, 11, 7, 5, 3, 2] } Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Tue, 03 Feb 2015 06:31:38 GMT Content-Length: 47 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "value": [0, 1, 4, 9, 16, 25, 36, 49, 64, 81] } Related Resources ================= * :doc:`GET_Dataset` * :doc:`GET_Value` * :doc:`PUT_Value` ================================================ FILE: docs/DatasetOps/PUT_DatasetShape.rst ================================================ ********************************************** PUT Shape ********************************************** Description =========== Modifies the dimensions of a dataset. Dimensions can only be changed if the dataset was initially created with that dimension as *extensible* - i.e. the maxdims value for that dimension is larger than the initial dimension size (or maxdims set to 0). *Note:* Dimensions can only be made larger, they can not be reduced. Requests ======== Syntax ------ .. code-block:: http PUT /datasets//shape HTTP/1.1 Host: DOMAIN Authorization: ** is the UUID of the dataset whose shape will be modified. Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Request Elements ---------------- The request body must include a JSON object with a "shape" key as described below: shape ^^^^^ An integer array giving the new dimensions of the dataset. Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http PUT /datasets/b9b6acc0-a839-11e4-aa86-3c15c2da029e/shape HTTP/1.1 Content-Length: 19 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: resized.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "shape": [10, 25] } Sample Response --------------- .. code-block:: http HTTP/1.1 201 Created Date: Fri, 30 Jan 2015 04:47:47 GMT Content-Length: 331 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "hrefs": [ {"href": "http://resized.test.hdfgroup.org/datasets/22e1b235-a83b-11e4-97f4-3c15c2da029e", "rel": "self"}, {"href": "http://resized.test.hdfgroup.org/datasets/22e1b235-a83b-11e4-97f4-3c15c2da029e", "rel": "owner"}, {"href": "http://resized.test.hdfgroup.org/groups/22dfff8f-a83b-11e4-883d-3c15c2da029e", "rel": "root"} ] } Related Resources ================= * :doc:`GET_Dataset` * :doc:`GET_DatasetShape` * :doc:`GET_Value` * :doc:`POST_Value` * :doc:`PUT_Value` ================================================ FILE: docs/DatasetOps/PUT_Value.rst ================================================ ********************************************** PUT Value ********************************************** Description =========== Update the values in a dataset. Requests ======== Syntax ------ .. code-block:: http PUT /datasets//value HTTP/1.1 Host: DOMAIN Authorization: ** is the UUID of the requested dataset. Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Request Body ------------ The request body should be a JSON object with the following keys: start: ^^^^^^ An optional key that gives the starting coordinate of the selection to be updated. The start value can either be an integer (for 1 dimensional arrays) or an array of integers where the length of the array is equal to the number of dimensions of the dataset. Each value must be greater than equal to zero and less than the extent of the corresponding dimension. If start is not provided, the selection starts at 0 for each dimension. stop: ^^^^^ An optional key that gives the ending coordinate of the selection to be updated. The stop value can either be an integer (for 1 dimensional arrays) or an array of integers where the length of the array is equal to the number of dimensions of the dataset. Each value must be greater than equal to start (or zero if start is not provided) and less than the extent of the corresponding dimension. step: ^^^^^ An optional key that gives the step value (i.e. the increment of the coordinate for each supplied value). The step value can either be an integer (for 1 dimensional arrays) or an array of integers where the length of the array is equal to the number of dimensions of the dataset. Each value must be greater than equal to start (or zero if start is not provided) and less than or equal to the extent of the corresponding dimension. points: ^^^^^^^ An optional key that contains a list of array elements to be updated. Each element of the list should be an an integer if the dataset is of rank 1 or an n-element list (which n is the dataset rank) is the dataset rank is greater than 1. If points is provided (indicating a point selection update), then start, stop, and step (used for hyperslab selection) should not be provied. value: ^^^^^^ A JSON array containing the data values to be written. value_base64: ^^^^^^^^^^^^^ Use this key instead of "value" to use base64-encoded binary data rather than JSON ascii. This will be more efficient for large data transfers than using a JSON array. Note: "value_base64" is only supported for fixed length datatypes. Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- No response elements are returned. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- This example writes a 10x10 integer datasets with the values 0-99 inclusive. .. code-block:: http PUT /datasets/817e2280-ab5d-11e4-afe6-3c15c2da029e/value HTTP/1.1 Content-Length: 465 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: valueput.datasettest.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "value": [ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [10, 11, 12, 13, 14, 15, 16, 17, 18, 19], [20, 21, 22, 23, 24, 25, 26, 27, 28, 29], [30, 31, 32, 33, 34, 35, 36, 37, 38, 39], [40, 41, 42, 43, 44, 45, 46, 47, 48, 49], [50, 51, 52, 53, 54, 55, 56, 57, 58, 59], [60, 61, 62, 63, 64, 65, 66, 67, 68, 69], [70, 71, 72, 73, 74, 75, 76, 77, 78, 79], [80, 81, 82, 83, 84, 85, 86, 87, 88, 89], [90, 91, 92, 93, 94, 95, 96, 97, 98, 99] ] } Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Tue, 03 Feb 2015 04:31:22 GMT Content-Length: 0 Content-Type: text/html; charset=UTF-8 Server: TornadoServer/3.2.2 Sample Request - Selection -------------------------- This example writes a portion of the dataset by using the start and stop keys in the request. .. code-block:: http PUT /datasets/b2d0af00-ab65-11e4-a874-3c15c2da029e/value HTTP/1.1 Content-Length: 92 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: valueputsel.datasettest.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "start": 5, "stop": 10, "value": [13, 17, 19, 23, 29] } Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Tue, 03 Feb 2015 05:30:01 GMT Content-Length: 0 Content-Type: text/html; charset=UTF-8 Server: TornadoServer/3.2.2 Related Resources ================= * :doc:`GET_Dataset` * :doc:`GET_Value` * :doc:`POST_Value` ================================================ FILE: docs/DatasetOps/index.rst ================================================ ###################### Datasets ###################### Datasets are objects that a composed of a homogenous collection of data elements. Each dataset has a *type* that specifies the structure of the individual elements (float, string, compound, etc.), and a *shape* that specifies the layout of the data elements (scalar, one-dimensional, multi-dimensional). In addition meta-data can be attached to a dataset in the form of attributes. See: :doc:`../AttrOps/index`. Creating Datasets ----------------- Use the :doc:`POST_Dataset` operation to create new datasets. As part of the POST request, JSON descriptions for the type and shape of the dataset are included with the request. Optionally, creation properties can be used to specify the chunk layout (how the data elements are stored in the server) and compression filter (e.g. GZIP, LZF, SZIP). Getting information about a dataset ----------------------------------- Use the :doc:`GET_Dataset` operation to retrieve information about a datasets type, shape, creation properties, and number of attributes. To list all the datasets within a domain use :doc:`GET_Datasets`. To list the datasets linked to a particular group use :doc:`../GroupOps/GET_Links` and look at links with a "collection" key of "datsets". Writing data to a dataset ------------------------- To write data into the dataset, use the :doc:`PUT_Value` operation. The request can either provide values for the entire dataset, or values for a hyperslab (rectangular sub-region) selection. In addition, if it desired to update a specific list of data elements, a point selection (series of element coordinates) can be passed to the :doc:`PUT_Value` operation. Reading data from a dataset --------------------------- To read either the entire dataset, or a specified selection, use the :doc:`GET_Value` operation. Without any request parameters, the GET operation retuns all data values. To read a specific hyperslab, use the select parameter to start and end indexes of the hyperslab (the selection can also include a step value to include a regular subset of the hyperslab). Finally, for one-dimensional datasets with compound types, a *where* parameter can be used to select elements meeting a specified condition. To read a specific list of elements (by index values), use the :doc:`POST_Value` operation (POST is used in this case rather than GET since the point selection values may be to large to include in the URI.) Resizable datasets ------------------ If one or more of the dimensions of a dataset may need to be extended after creation, provide a *maxdims* key to the shape during creation (see :doc:`POST_dataset`). If the value of the maxdims dimension is 0, that dimension is *unlimited* and may be extended as much as desired. If an upper limit is known, use that value in maxdims which will allow that dimension to be extended up to the given value. To resize the dataset, use :doc:`PUT_DataShape.rst` operation with the desired shape value(s) for the new dimensions. Note: dimensions can only be increased, not decreased. Deleting datasets ----------------- The :doc:`DELETE_Dataset` operation will remove the dataset, its attributes, and any links to the object. List of Operations ------------------- .. toctree:: :maxdepth: 1 DELETE_Dataset GET_Dataset GET_Datasets GET_DatasetShape GET_DatasetType GET_Value POST_Dataset POST_Value PUT_DatasetShape PUT_Value ================================================ FILE: docs/DatatypeOps/DELETE_Datatype.rst ================================================ ********************************************** DELETE Datatype ********************************************** Description =========== The implementation of the DELETE operation deletes the committed datatype named in the URI. All attributes the datatype will also be deleted. Requests ======== Syntax ------ .. code-block:: http DELETE /datatypes/ HTTP/1.1 Host: DOMAIN Authorization: ** is the UUID of the datatype to be deleted. Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http DELETE /datatypes/93b6a335-ac44-11e4-8d71-3c15c2da029e HTTP/1.1 Content-Length: 0 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: namedtype_deleted.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Wed, 04 Feb 2015 08:05:26 GMT Content-Length: 363 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "hrefs": [ {"href": "http://namedtype_deleted.test.hdfgroup.org/datatypes", "rel": "self"}, {"href": "http://namedtype_deleted.test.hdfgroup.org/", "rel": "home"}, {"href": "http://namedtype_deleted.test.hdfgroup.org/groups/93b51245-ac44-11e4-8a21-3c15c2da029e", "rel": "root"} ] } Related Resources ================= * :doc:`../AttrOps/GET_Attributes` * :doc:`GET_Datatype` * :doc:`GET_Datatypes` * :doc:`POST_Datatype` * :doc:`../DatasetOps/POST_Dataset` * :doc:`../AttrOps/PUT_Attribute` ================================================ FILE: docs/DatatypeOps/GET_Datatype.rst ================================================ ********************************************** GET Datatype ********************************************** Description =========== Returns information about the committed datatype with the UUID given in the URI. Requests ======== Syntax ------ .. code-block:: http GET /datatypes/ HTTP/1.1 Host: DOMAIN Authorization: ** is the UUID of the requested datatype. Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: id ^^ The UUID of the datatype object. type ^^^^ A JSON object representing the type of the datatype object. attributeCount ^^^^^^^^^^^^^^ The number of attributes belonging to the datatype. created ^^^^^^^ A timestamp giving the time the dataset was created in UTC (ISO-8601 format). lastModified ^^^^^^^^^^^^ A timestamp giving the most recent time the dataset has been modified (i.e. attributes updated) in UTC (ISO-8601 format). hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Get the committed datatype with UUID: "f545543d-...". Sample Request -------------- .. code-block:: http GET /datatypes/f545543d-a1b4-11e4-8fa4-3c15c2da029e HTTP/1.1 host: namedtype.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Wed, 21 Jan 2015 21:36:49 GMT Content-Length: 619 Etag: "c53bc5b2d3c3b5059b71ef92ca7d144a2df54456" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "id": "f545543d-a1b4-11e4-8fa4-3c15c2da029e", "type": { "base": "H5T_IEEE_F32LE", "class": "H5T_FLOAT" }, "created": "2015-01-21T21:32:01Z", "lastModified": "2015-01-21T21:32:01Z", "attributeCount": 1, "hrefs": [ {"href": "http://namedtype.test.hdfgroup.org/datatypes/f545543d-a1b4-11e4-8fa4-3c15c2da029e", "rel": "self"}, {"href": "http://namedtype.test.hdfgroup.org/groups/f545103d-a1b4-11e4-b4a1-3c15c2da029e", "rel": "root"}, {"href": "http://namedtype.test.hdfgroup.org/datatypes/f545543d-a1b4-11e4-8fa4-3c15c2da029e/attributes", "rel": "attributes"}, {"href": "http://namedtype.test.hdfgroup.org/", "rel": "home"} ] } Related Resources ================= * :doc:`DELETE_Datatype` * :doc:`GET_Datatypes` * :doc:`POST_Datatype` * :doc:`../DatasetOps/POST_Dataset` * :doc:`../AttrOps/PUT_Attribute` ================================================ FILE: docs/DatatypeOps/GET_Datatypes.rst ================================================ ********************************************** GET Datatypes ********************************************** Description =========== Gets all the committed datatypes in a domain. Requests ======== Syntax ------ .. code-block:: http GET /datatypes HTTP/1.1 Host: DOMAIN Authorization: Request Parameters ------------------ This implementation of the operation uses the following request parameters (both optional): Limit ^^^^^ If provided, a positive integer value specifying the maximum number of UUID's to return. Marker ^^^^^^ If provided, a string value indicating that only UUID's that occur after the marker value will be returned. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http GET /datatypes HTTP/1.1 host: namedtype.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Wed, 21 Jan 2015 22:42:30 GMT Content-Length: 350 Etag: "e01f56869a9a919b1496c463f3569a2a7c319f11" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "datatypes": [ "f54542e6-a1b4-11e4-90bf-3c15c2da029e", "f545543d-a1b4-11e4-8fa4-3c15c2da029e" ], "hrefs": [ {"href": "http://namedtype.test.hdfgroup.org/datatypes", "rel": "self"}, {"href": "http://namedtype.test.hdfgroup.org/groups/f545103d-a1b4-11e4-b4a1-3c15c2da029e", "rel": "root"}, {"href": "http://namedtype.test.hdfgroup.org/", "rel": "home"} ] } Sample Request with Marker and Limit ------------------------------------ This example uses the "Marker" request parameter to return only UUIDs after the given Marker value. Also the "Limit" request parameter is used to limit the number of UUIDs in the response to 5. .. code-block:: http GET /datatypes?Marker=d779cd5e-a1e6-11e4-8fc5-3c15c2da029e&Limit=5 HTTP/1.1 host: type1k.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response with Marker and Limit ------------------------------------- .. code-block:: http HTTP/1.1 200 OK Date: Thu, 22 Jan 2015 03:32:13 GMT Content-Length: 461 Etag: "a2e2d5a3ae63cd504d02b51d99f27b30d17b75b5" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "datatypes": [ "d779ddd9-a1e6-11e4-89e5-3c15c2da029e", "d779ef11-a1e6-11e4-8837-3c15c2da029e", "d77a008a-a1e6-11e4-8840-3c15c2da029e", "d77a121e-a1e6-11e4-b2b0-3c15c2da029e", "d77a2523-a1e6-11e4-aa6d-3c15c2da029e" ], "hrefs": [ {"href": "http://type1k.test.hdfgroup.org/datatypes", "rel": "self"}, {"href": "http://type1k.test.hdfgroup.org/groups/d7742c14-a1e6-11e4-b2a8-3c15c2da029e", "rel": "root"}, {"href": "http://type1k.test.hdfgroup.org/", "rel": "home"} ] } Related Resources ================= * :doc:`DELETE_Datatype` * :doc:`GET_Datatype` * :doc:`POST_Datatype` * :doc:`../DatasetOps/POST_Dataset` * :doc:`../AttrOps/PUT_Attribute` ================================================ FILE: docs/DatatypeOps/POST_Datatype.rst ================================================ ********************************************** POST Datatype ********************************************** Description =========== Creates a new committed datatype. Requests ======== Syntax ------ .. code-block:: http POST /datatypes HTTP/1.1 Host: DOMAIN Authorization: Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Request Elements ---------------- The request body must be a JSON object with a 'type' link key as described below. Optionally, the request body can include a 'link' key that describes how the new committed datatype will be linked. type ^^^^ The value of the type key can either be one of the predefined type strings (see predefined types), or a JSON representation of a type. (see :doc:`../Types/index`). link ^^^^ If present, the link value must include the following subkeys: link['id'] ^^^^^^^^^^ The UUID of the group the new datatype should be linked from. If the UUID is not valid, the request will fail and a new datatype will not be created. link['name'] ^^^^^^^^^^^^ The name of the new link. Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: id ^^ The UUID of the newly created datatype object. attributeCount ^^^^^^^^^^^^^^ The number of attributes belonging to the datatype. created ^^^^^^^ A timestamp giving the time the group was created in UTC (ISO-8601 format). lastModified ^^^^^^^^^^^^ A timestamp giving the most recent time the group has been modified (i.e. attributes or links updated) in UTC (ISO-8601 format). hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- Create a new committed datatype using the "H5T_IEEE_F32LE" (32-bit float) predefined type. .. code-block:: http POST /datatypes HTTP/1.1 Content-Length: 26 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: newdtype.datatypetest.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "type": "H5T_IEEE_F32LE" } Sample Response --------------- .. code-block:: http HTTP/1.1 201 Created Date: Thu, 22 Jan 2015 19:06:17 GMT Content-Length: 533 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "id": "be08d40c-a269-11e4-84db-3c15c2da029e", "attributeCount": 0, "created": "2015-01-22T19:06:17Z", "lastModified": "2015-01-22T19:06:17Z", "hrefs": [ {"href": "http://newdtype.datatypetest.test.hdfgroup.org/datatypes/be08d40c-a269-11e4-84db-3c15c2da029e", "rel": "self"}, {"href": "http://newdtype.datatypetest.test.hdfgroup.org/groups/be00807d-a269-11e4-8d9c-3c15c2da029e", "rel": "root"}, {"href": "http://newdtype.datatypetest.test.hdfgroup.org/datatypes/be08d40c-a269-11e4-84db-3c15c2da029e/attributes", "rel": "attributes"} ] } Sample Request with Link ------------------------ Create a new committed datatype and link to root as "linked_dtype". .. code-block:: http POST /datatypes HTTP/1.1 Content-Length: 106 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: newlinkedtype.datatypetest.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "type": "H5T_IEEE_F64LE", "link": { "id": "76b0bbf8-a26c-11e4-8d4c-3c15c2da029e", "name": "linked_dtype" } } Sample Response with Link ------------------------- .. code-block:: http HTTP/1.1 201 Created Date: Thu, 22 Jan 2015 19:25:46 GMT Content-Length: 548 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "id": "76c3c33a-a26c-11e4-998c-3c15c2da029e", "attributeCount": 0, "created": "2015-01-22T19:25:46Z", "lastModified": "2015-01-22T19:25:46Z", "hrefs": [ {"href": "http://newlinkedtype.datatypetest.test.hdfgroup.org/datatypes/76c3c33a-a26c-11e4-998c-3c15c2da029e", "rel": "self"}, {"href": "http://newlinkedtype.datatypetest.test.hdfgroup.org/groups/76b0bbf8-a26c-11e4-8d4c-3c15c2da029e", "rel": "root"}, {"href": "http://newlinkedtype.datatypetest.test.hdfgroup.org/datatypes/76c3c33a-a26c-11e4-998c-3c15c2da029e/attributes", "rel": "attributes"} ] } Related Resources ================= * :doc:`DELETE_Datatype` * :doc:`GET_Datatype` * :doc:`GET_Datatypes` * :doc:`../DatasetOps/POST_Dataset` * :doc:`../AttrOps/PUT_Attribute` ================================================ FILE: docs/DatatypeOps/index.rst ================================================ ####################### Committed Datatypes ####################### Committed datatypes (also know as "named types"), are object that describe types. These types can be used in the creation of datasets and attributes. Committed datatypes can be linked to from a Group and can contain attributes, just like a dataset or group object. Creating committed datatypes ---------------------------- Use :doc:`POST_Datatype` to create a new datatype. A complete description of the type must be sent with the POST request. Getting information about a committed datatype ----------------------------------------------- Use the :doc:`GET_Datatype` operation to retrieve information about a committed datatype. To list all the committed datatypes within a domain use :doc:`GET_Datatypes`. To list the committed types linked to a particular group use :doc:`../GroupOps/GET_Links` and examine link object with a "collection" key of "datatypes". Deleting committed datatypes ---------------------------- Use :doc:`DELETE_Datatype` to delete a datatype. Links from any group to the datatype will be deleted. List of Operations ------------------ .. toctree:: :maxdepth: 1 DELETE_Datatype GET_Datatype GET_Datatypes POST_Datatype ================================================ FILE: docs/Diagram.rst ================================================ *************************** Diagram of REST operations *************************** .. image:: RESTful_HDF5.png :width: 100 % :alt: alternate text :align: right ================================================ FILE: docs/DomainOps/DELETE_Domain.rst ================================================ ********************************************** DELETE Domain ********************************************** Description =========== The DELETE operation deletes the given domain and all its resources (groups, datasets, attributes, etc.). Requests ======== Syntax ------ .. code-block:: http DELETE / HTTP/1.1 Host: DOMAIN Authorization: Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- This implementation of the operation does not return any response elements. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http DELETE / HTTP/1.1 Content-Length: 0 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: deleteme.test.hdfgroup.org Accept: */* Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Fri, 16 Jan 2015 03:47:33 GMT Content-Length: 0 Content-Type: text/html; charset=UTF-8 Server: TornadoServer/3.2.2 Related Resources ================= * :doc:`GET_Domain` * :doc:`PUT_Domain` ================================================ FILE: docs/DomainOps/GET_Domain.rst ================================================ ********************************************** GET Domain ********************************************** Description =========== This operation retrieves information about the requested domain. *Note:* If the HDF Dynamic DNS Server (see https://github.com/HDFGroup/dynamic-dns) is running, the operations can specify the domain as part of the URI. Example: http://tall.data.hdfgroup.org:7253/ returns data about the domain "tall" hosted on data.hdfgroup.org. The DNS server will determine the proper IP that maps to this domain. If the DNS Server is not setup, specify the desired domain in the Host line of the http header. Alternatively, the domain can be specified as a 'Host' query parameter. Example: http://127.0.0.1:7253?host=tall.data.hdfgroup.org. If no Host value is supplied, the default Table of Contents (TOC) domain is returned. Requests ======== Syntax ------ .. code-block:: http GET / HTTP/1.1 Host: DOMAIN Authorization: Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: root ^^^^ The UUID of the root group of this domain. created ^^^^^^^ A timestamp giving the time the domain was created in UTC (ISO-8601 format). lastModified ^^^^^^^^^^^^ A timestamp giving the most recent time that any content in the domain has been modified in UTC (ISO-8601 format). hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return any special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http GET / HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Fri, 16 Jan 2015 03:51:58 GMT Content-Length: 508 Etag: "e45bef255ffc0530c33857b88b15f551f371de38" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "root": "052dcbbd-9d33-11e4-86ce-3c15c2da029e", "created": "2015-01-16T03:51:58Z", "lastModified": "2015-01-16T03:51:58Z", "hrefs": [ {"href": "http://tall.test.hdfgroup.org/", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/datasets", "rel": "database"}, {"href": "http://tall.test.hdfgroup.org/groups", "rel": "groupbase"}, {"href": "http://tall.test.hdfgroup.org/datatypes", "rel": "typebase"}, {"href": "http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e", "rel": "root"} ] } Related Resources ================= * :doc:`DELETE_Domain` * :doc:`../GroupOps/GET_Group` * :doc:`PUT_Domain` ================================================ FILE: docs/DomainOps/PUT_Domain.rst ================================================ ********************************************** PUT Domain ********************************************** Description =========== This operation creates a new domain. *Note*: Initially the only object contained in the domain is the root group. Use other PUT and POST operations to create new objects in the domain. *Note*: The operation will fail if the domain already exists (a 409 code will be returned). Requests ======== Syntax ------ .. code-block:: http PUT / HTTP/1.1 Host: DOMAIN Authorization: Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: root ^^^^ The UUID of the root group of this domain. created ^^^^^^^ A timestamp giving the time the domain was created in UTC (ISO-8601 format). lastModified ^^^^^^^^^^^^ A timestamp giving the most recent time that any content in the domain has been modified in UTC (ISO-8601 format). hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return any special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. An http status code of 409 (Conflict) will be returned if the domain already exists. Examples ======== Sample Request -------------- .. code-block:: http PUT / HTTP/1.1 Content-Length: 0 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: newfile.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate Sample Response --------------- .. code-block:: http HTTP/1.1 201 Created Date: Fri, 16 Jan 2015 04:11:52 GMT Content-Length: 523 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "root": "cd31cfdc-9d35-11e4-aa58-3c15c2da029e", "created": "2015-01-16T04:11:52Z", "lastModified": "2015-01-16T04:11:52Z", "hrefs": [ {"href": "http://newfile.test.hdfgroup.org/", "rel": "self"}, {"href": "http://newfile.test.hdfgroup.org/datasets", "rel": "database"}, {"href": "http://newfile.test.hdfgroup.org/groups", "rel": "groupbase"}, {"href": "http://newfile.test.hdfgroup.org/datatypes", "rel": "typebase"}, {"href": "http://newfile.test.hdfgroup.org/groups/cd31cfdc-9d35-11e4-aa58-3c15c2da029e", "rel": "root"} ] } Related Resources ================= * :doc:`DELETE_Domain` * :doc:`../GroupOps/GET_Group` * :doc:`GET_Domain` ================================================ FILE: docs/DomainOps/index.rst ================================================ ####################### Domains ####################### In h5serv, domains are containers for related collection of resources, similar to a file in the traditional HDF5 library. In the h5serv implementation of the HDF5 REST API, domains *are* files, but in general the HDF REST API supports alternative implementations (e.g. data that is stored in a database). Most operations of the service act on a domain resource that is provided in the *Host* http header or (alternatively) the Host query parameter. Mapping of file paths to domain names ------------------------------------- To convert a file path to a domain name: #. Remove the extension #. Determine the path relative to the data directory #. Replace '/' with '.' #. Reverse the path #. Add the domain suffix (using the domain config value) As an example consider a server installation where that data directory is '/data' and an HDF5 is located at ``/data/myfolder/an_hdf_file.h5`` and ``hdfgroup.org`` is the base domain. The above sequence of steps would look like the following: #. /data/myfolder/an_hdf_file #. myfolder/an_hdf_file #. myfolder.an_hdf_file #. an_hdf_file.myfolder #. an_hdf_file.myfolder.hdfgroup.org The final expression is what should be used in the Host field for any request that access that file. For path names that include non-alphanumeric charters, replace any such characters with the string '%XX' where XX is the hexidecimal value of the character. For example: ``this.file.has.dots.h5`` becomes: ``this%2Efile%2Ehase%2Edots`` Creating Domains ---------------- Use :doc:`PUT_Domain` to create a domain. The domain name must follow DNS conventions (e.g. two consecutive "dots" are not allowed). After creation, the domain will contain just one resource, the root group. Use :doc:`GET_Domain` to get information about a domain, including the UUID of the domain's root group. Getting Information about Domains --------------------------------- Use :doc:`GET_Domain` to retreive information about a specific domain (specified in the Host header). If the Host value is not supplied, the service returns information on the auto-generated Table of Contents (TOC) that provides information on domains that are available. Deleting Domains ---------------- Use :doc:`DELETE_Domain` to delete a domain. All resources within the domain will be deleted! The TOC domain cannot be deleted. List of Operations ------------------ .. toctree:: :maxdepth: 1 DELETE_Domain GET_Domain PUT_Domain ================================================ FILE: docs/FAQ/index.rst ================================================ ################### FAQ ################### What datatypes are supported? ----------------------------- ========================= ============================================ Type Precisions ========================= ============================================ Integer 1, 2, 4 or 8 byte, BE/LE, signed/unsigned Float 4, 8 byte, BE/LE Compound Arbitrary names and offsets Strings (fixed-length) Any length Strings (variable-length) Any length, ASCII Opaque Any length Array Any supported type Enumeration Any integer type References Region and object ========================= ============================================ Unsupported types: ========================= ============================================ Type Status ========================= ============================================ HDF5 VLEN (non-string) Coming soon! HDF5 "time" type Opaque Bitfields ========================= ============================================ Why does h5serv use those long ids? ------------------------------------ h5serv uses the UUID standard (http://en.wikipedia.org/wiki/Universally_unique_identifier) to identify objects (datasets, groups, and committed datatypes) uniquely. The benefit of using UUIDs is that objects can be uniquely identified without requiring any central coordination. How can I get a dataset (or group) via a pathname? -------------------------------------------------- You will need to iterate through the path to get the UUID of each subgroup. E.g. suppose the path of interest is "/g1/g1.1" in the domain: "tall.data.hdfgroup.org". Perform these actions to get the UUID of the group at /g1/g1.1. #. ``GET /`` // returns the UUID of the root group #. ``GET /groups//links/g1`` // returns the UUID of the group at "/g1" #. ``GET /groups//links/g1.1`` // returns the UUID of the group at "/g1/g1.1' How do I guard against an attribute (dataset/group/file) from being deleted by a request? ----------------------------------------------------------------------------------------- Future releases of h5serv will support authorization and permissions to protect content that shouldn't be altered. For now the choices are: #. Don't expose the h5serv endpoint on a non-trusted network #. Make the files readonly #. Make periodic backups of all data files #. Don't share the domain name with non-trusted sources. Since h5serv doesn't provide an operation to list all domains on the server, creating a non-trivial domain name (e.g. "mydata_18494") will be relatively secure. How can I display my data in a nice Web UI? ------------------------------------------- There are many Javascript libraries (e.g. http://d3js.org) that can take the data returned by h5serv to create compelling graphics. I have a C or Fortran application that uses HDF5, how can I take advantage of h5serv? ------------------------------------------------------------------------------------- We are planning on creating a hdf5 library plugin that will transparently invoke the REST api. For now, you can use C-libraries such as libcurl to invoke h5serv requests. Is there documentation on the JSON format generated by h5tojson.py: ------------------------------------------------------------------- Yes. There is a grammer published here: http://hdf5-json.readthedocs.org/en/latest/index.html. How do I submit a bug report? ------------------------------ If you have a Github account, create an issue here: https://github.com/HDFGroup/h5serv/issues. Alternatively, you send a email to the HDF Group help desk: help@hdfgroup.org. ================================================ FILE: docs/GroupOps/DELETE_Group.rst ================================================ ********************************************** DELETE Group ********************************************** Description =========== The implementation of the DELETE operation deletes the group with the UUID given in the URI. All attributes and links of the group will also be deleted. In addition any links from other groups **TO** the deleted group will be removed. *Note:* Groups, datatypes, and datasets that are referenced by the group's links will **not** be deleted. Use the DELETE operation for those objects to remove. Requests ======== Syntax ------ .. code-block:: http DELETE /groups/ HTTP/1.1 Host: DOMAIN Authorization: ** is the UUID of the group to be deleted. Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http DELETE /groups/45a882e1-9d01-11e4-8acf-3c15c2da029e HTTP/1.1 Host: testGroupDelete.test.hdfgroup.org Authorization: authorization_string Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Thu, 15 Jan 2015 21:55:51 GMT Content-Length: 270 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "hrefs": [ {"href": "http://testGroupDelete.test.hdfgroup.org/groups", "rel": "self"}, {"href": "http://testGroupDelete.test.hdfgroup.org/groups/45a06719-9d01-11e4-9b1c-3c15c2da029e", "rel": "root"}, {"href": "http://testGroupDelete.test.hdfgroup.org/", "rel": "home"} ] } Related Resources ================= * :doc:`POST_Group` * :doc:`GET_Group` ================================================ FILE: docs/GroupOps/DELETE_Link.rst ================================================ ********************************************** DELETE Link ********************************************** Description =========== The implementation of the DELETE operation deletes the link named in the URI. Groups, datatypes, and datasets that are referenced by the link will **not** be deleted. To delete groups, datatypes or datasets, use the appropriate DELETE operation for those objects. Requests ======== Syntax ------ .. code-block:: http DELETE /groups//links/ HTTP/1.1 Host: DOMAIN Authorization: * ** is the UUID of the group the link is a member of. * ** is the URL-encoded name of the link. Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- An attempt to delete the root group will return 403 - Forbidden. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http DELETE /groups/25dd052b-a06d-11e4-a29e-3c15c2da029e/links/deleteme HTTP/1.1 Content-Length: 0 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: tall_updated.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Tue, 20 Jan 2015 06:25:37 GMT Content-Length: 299 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "hrefs": [ {"href": "http://tall_updated.test.hdfgroup.org/groups/25dd052b-a06d-11e4-a29e-3c15c2da029e", "rel": "root"}, {"href": "http://tall_updated.test.hdfgroup.org/", "rel": "home"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/25dd052b-a06d-11e4-a29e-3c15c2da029e", "rel": "owner"} ] } Related Resources ================= * :doc:`../DatasetOps/DELETE_Dataset` * :doc:`../DatatypeOps/DELETE_Datatype` * :doc:`DELETE_Group` * :doc:`GET_Link` * :doc:`GET_Groups` * :doc:`POST_Group` ================================================ FILE: docs/GroupOps/GET_Group.rst ================================================ ********************************************** GET Group ********************************************** Description =========== Returns information about the group with the UUID given in the URI. Requests ======== Syntax ------ .. code-block:: http GET /groups/ HTTP/1.1 Host: DOMAIN Authorization: ** is the UUID of the requested group. Request Parameters ------------------ include_links ^^^^^^^^^^^^^ If this request parameter is provided, the links of the group are included in the response. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: id ^^ The UUID of the requested group attributeCount ^^^^^^^^^^^^^^ The number of attributes belonging to the group. linkCount ^^^^^^^^^ The number of links belonging to the group. created ^^^^^^^ A timestamp giving the time the group was created in UTC (ISO-8601 format). lastModified ^^^^^^^^^^^^ A timestamp giving the most recent time the group has been modified (i.e. attributes or links updated) in UTC (ISO-8601 format). hrefs ^^^^^ An array of hypertext links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http GET /groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Fri, 16 Jan 2015 20:06:08 GMT Content-Length: 660 Etag: "2c410d1c469786f25ed0075571a8e7a3f313cec1" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "id": "052dcbbd-9d33-11e4-86ce-3c15c2da029e", "attributeCount": 2, "linkCount": 2, "created": "2015-01-16T03:47:22Z", "lastModified": "2015-01-16T03:47:22Z", "hrefs": [ {"href": "http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e/links", "rel": "links"}, {"href": "http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e", "rel": "root"}, {"href": "http://tall.test.hdfgroup.org/", "rel": "home"}, {"href": "http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e/attributes", "rel": "attributes"} ] } Related Resources ================= * :doc:`DELETE_Group` * :doc:`GET_Links` * :doc:`GET_Groups` * :doc:`POST_Group` * :doc:`../AttrOps/GET_Attribute` ================================================ FILE: docs/GroupOps/GET_Groups.rst ================================================ ********************************************** GET Groups ********************************************** Description =========== Returns UUIDs for all the groups in a domain (other than the root group). Requests ======== Syntax ------ .. code-block:: http GET /groups HTTP/1.1 Host: DOMAIN Authorization: Request Parameters ------------------ This implementation of the operation uses the following request parameters (both optional): Limit ^^^^^ If provided, a positive integer value specifying the maximum number of UUID's to return. Marker ^^^^^^ If provided, a string value indicating that only UUID's that occur after the marker value will be returned. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: groups ^^^^^^ An array of UUIDs - one for each group (including the root group) in the domain. If the "Marker" and/or "Limit" request parameters are used, a subset of the UUIDs may be returned. hrefs ^^^^^ An array of hypertext links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http GET /groups HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Fri, 16 Jan 2015 21:53:48 GMT Content-Length: 449 Etag: "83575a7865761b6d4eaf5d285ab1de062c49250b" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "groups": [ "052e001e-9d33-11e4-9a3d-3c15c2da029e", "052e13bd-9d33-11e4-91a6-3c15c2da029e", "052e5ae8-9d33-11e4-888d-3c15c2da029e", "052e700a-9d33-11e4-9fe4-3c15c2da029e", "052e89c7-9d33-11e4-b9bc-3c15c2da029e" ], "hrefs": [ {"href": "http://tall.test.hdfgroup.org/groups", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e", "rel": "root"}, {"href": "http://tall.test.hdfgroup.org/", "rel": "home"} ] } Sample Request with Marker and Limit ------------------------------------ This example uses the "Marker" request parameter to return only UUIDs after the given Marker value. The "Limit" request parameter is used to limit the number of UUIDs in the response to 5. .. code-block:: http GET /groups?Marker=cba6e3fd-9dbd-11e4-bf4a-3c15c2da029e&Limit=5 HTTP/1.1 host: group1k.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response with Marker and Limit ------------------------------------- .. code-block:: http HTTP/1.1 200 OK Date: Fri, 16 Jan 2015 22:02:46 GMT Content-Length: 458 Etag: "49221af3436fdaca7e26c74b491ccf8698555f08" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "groups": [ "cba6fc19-9dbd-11e4-846e-3c15c2da029e", "cba71842-9dbd-11e4-abd0-3c15c2da029e", "cba73442-9dbd-11e4-a6e9-3c15c2da029e", "cba74fc5-9dbd-11e4-bc15-3c15c2da029e", "cba77c2e-9dbd-11e4-9c71-3c15c2da029e" ], "hrefs": [ {"href": "http://group1k.test.hdfgroup.org/groups", "rel": "self"}, {"href": "http://group1k.test.hdfgroup.org/groups/cb9ebf11-9dbd-11e4-9e83-3c15c2da029e", "rel": "root"}, {"href": "http://group1k.test.hdfgroup.org/", "rel": "home"} ] } Related Resources ================= * :doc:`DELETE_Group` * :doc:`GET_Links` * :doc:`GET_Group` * :doc:`POST_Group` ================================================ FILE: docs/GroupOps/GET_Link.rst ================================================ ********************************************** GET Link ********************************************** Description =========== Returns information about a Link. Requests ======== Syntax ------ .. code-block:: http GET /groups//links/ HTTP/1.1 Host: DOMAIN Authorization: * ** is the UUID of the group the link is a member of. * ** is the URL-encoded name of the link. Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: link["title"] ^^^^^^^^^^^^^ The name of the link. link["collection"] ^^^^^^^^^^^^^^^^^^ For hard links, the domain collection for which the object the link points to is a member of. The value will be one of: "groups", "datasets", "datatypes". For symbol links, this element is not present. link["class"] ^^^^^^^^^^^^^ Indicates the type of link. One of the following values will be returned: * H5L_TYPE_HARD: A direct link to a group, dataset, or committed datatype object in the domain * H5L_TYPE_SOFT: A symbolic link that gives a path to an object within the domain (object may or may not be present). * H5L_TYPE_EXTERNAL: A symbolic link to an object that is external to the domain * H5L_TYPE_UDLINK: A user-defined link (this implementation only provides title and class for user-defined links) link["h5path"] ^^^^^^^^^^^^^^ For symbolic links ("H5L_TYPE_SOFT" or "H5L_TYPE_EXTERNAL"), the path to the resource the link references. link["h5domain"] ^^^^^^^^^^^^^^^^ For external links, the path of the external domain containing the object that is linked. *Note:* The domain may or may not exist. Use GET / with the domain to verify. link["id"] ^^^^^^^^^^^^ For hard links, the uuid of the object the link points to. For symbolic links this element is not present created ^^^^^^^ A timestamp giving the time the link was created in UTC (ISO-8601 format). lastModified ^^^^^^^^^^^^ A timestamp giving the most recent time the group has been modified in UTC (ISO-8601 format). hrefs ^^^^^ An array of hypertext links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request - Hard Link -------------------------- .. code-block:: http GET /groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e/links/g1 HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response - Hard Link --------------------------- .. code-block:: http HTTP/1.1 200 OK Date: Fri, 16 Jan 2015 22:42:05 GMT Content-Length: 688 Etag: "70c5c4f2f7cac9f7f155fe026f4c492f65e3fb8e" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "link": { "title": "g1", "collection": "groups", "class": "H5L_TYPE_HARD", "id": "052e001e-9d33-11e4-9a3d-3c15c2da029e" }, "created": "2015-01-16T03:47:22Z", "lastModified": "2015-01-16T03:47:22Z", "hrefs": [ {"href": "http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e/links/g1", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e", "rel": "root"}, {"href": "http://tall.test.hdfgroup.org/", "rel": "home"}, {"href": "http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e", "rel": "owner"}, {"href": "http://tall.test.hdfgroup.org/groups/052e001e-9d33-11e4-9a3d-3c15c2da029e", "rel": "target"} ] } Sample Request - Soft Link -------------------------- .. code-block:: http GET /groups/052e700a-9d33-11e4-9fe4-3c15c2da029e/links/slink HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Related Resources Sample Response - Soft Link --------------------------- .. code-block:: http HTTP/1.1 200 OK Date: Fri, 16 Jan 2015 23:29:27 GMT Content-Length: 620 Etag: "7bd777729ac5af261c85c7e3b87ef0045739bf77" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "link": { "title": "slink", "class": "H5L_TYPE_SOFT", "h5path": "somevalue" }, "created": "2015-01-16T03:47:22Z", "lastModified": "2015-01-16T03:47:22Z", "hrefs": [ {"href": "http://tall.test.hdfgroup.org/groups/052e700a-9d33-11e4-9fe4-3c15c2da029e/links/slink", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e", "rel": "root"}, {"href": "http://tall.test.hdfgroup.org/", "rel": "home"}, {"href": "http://tall.test.hdfgroup.org/groups/052e700a-9d33-11e4-9fe4-3c15c2da029e", "rel": "owner"}, {"href": "http://tall.test.hdfgroup.org/#h5path(somevalue)", "rel": "target"} ] } Sample Request - External Link ------------------------------ .. code-block:: http GET /groups/052e5ae8-9d33-11e4-888d-3c15c2da029e/links/extlink HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response - External Link ------------------------------- .. code-block:: http HTTP/1.1 200 OK Date: Tue, 20 Jan 2015 05:47:55 GMT Content-Length: 644 Etag: "1b7a228acdb19f7259ed8a1b3ba4bc442b405ef9" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "link": { "title": "extlink", "class": "H5L_TYPE_EXTERNAL", "h5path": "somepath", "h5domain": "somefile" }, "created": "2015-01-16T03:47:22Z", "lastModified": "2015-01-16T03:47:22Z", "hrefs": [ {"href": "http://tall.test.hdfgroup.org/groups/052e5ae8-9d33-11e4-888d-3c15c2da029e/links/extlink", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/groups/052dcbbd-9d33-11e4-86ce-3c15c2da029e", "rel": "root"}, {"href": "http://tall.test.hdfgroup.org/", "rel": "home"}, {"href": "http://tall.test.hdfgroup.org/groups/052e5ae8-9d33-11e4-888d-3c15c2da029e", "rel": "owner"}, {"href": "http://somefile.hdfgroup.org#h5path(somepath)", "rel": "target"} ] } Sample Request - User Defined Link ---------------------------------- .. code-block:: http GET /groups/0262c3a6-a069-11e4-8905-3c15c2da029e/links/udlink HTTP/1.1 host: tall_with_udlink.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response - User Defined Link ----------------------------------- .. code-block:: http HTTP/1.1 200 OK Date: Tue, 20 Jan 2015 05:56:00 GMT Content-Length: 576 Etag: "2ab310eba3bb4282f84d643fcc30e591da485576" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "link": { "class": "H5L_TYPE_USER_DEFINED", "title": "udlink" }, "created": "2015-01-16T03:47:22Z", "lastModified": "2015-01-16T03:47:22Z", "hrefs": [ {"href": "http://tall_with_udlink.test.hdfgroup.org/groups/0262c3a6-a069-11e4-8905-3c15c2da029e/links/udlink", "rel": "self"}, {"href": "http://tall_with_udlink.test.hdfgroup.org/groups/0260b214-a069-11e4-a840-3c15c2da029e", "rel": "root"}, {"href": "http://tall_with_udlink.test.hdfgroup.org/", "rel": "home"}, {"href": "http://tall_with_udlink.test.hdfgroup.org/groups/0262c3a6-a069-11e4-8905-3c15c2da029e", "rel": "owner"} ] } ================= * :doc:`DELETE_Link` * :doc:`GET_Links` * :doc:`PUT_Link` ================================================ FILE: docs/GroupOps/GET_Links.rst ================================================ ********************************************** GET Links ********************************************** Description =========== Returns all the links for a given group. Requests ======== Syntax ------ .. code-block:: http GET /groups//links HTTP/1.1 Host: DOMAIN Authorization: * ** is the UUID of the group the links to be returned are a member of. Request Parameters ------------------ This implementation of the operation uses the following request parameters (both optional): Limit ^^^^^ If provided, a positive integer value specifying the maximum number of links to return. Marker ^^^^^^ If provided, a string value indicating that only links that occur after the marker value will be returned. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: links ^^^^^ An array of JSON objects giving information about each link returned. See :doc:`GET_Link` for a description of the link response elements. hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- .. code-block:: http GET /groups/0ad37be1-a06f-11e4-8651-3c15c2da029e/links HTTP/1.1 host: tall.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response --------------- .. code-block:: http HTTP/1.1 200 OK Date: Tue, 20 Jan 2015 06:55:19 GMT Content-Length: 607 Etag: "49edcce6a8f724108d41d52c98002d6255286ff8" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "links": [ { "title": "g1.2.1", "class": "H5L_TYPE_HARD", "collection": "groups", "id": "0ad38d45-a06f-11e4-a909-3c15c2da029e" }, { "title": "extlink", "class": "H5L_TYPE_EXTERNAL", "h5path": "somepath", "file": "somefile" } ], "hrefs": [ {"href": "http://tall.test.hdfgroup.org/groups/0ad37be1-a06f-11e4-8651-3c15c2da029e/links", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/groups/0ad2e151-a06f-11e4-bc68-3c15c2da029e", "rel": "root"}, {"href": "http://tall.test.hdfgroup.org/", "rel": "home"}, {"href": "http://tall.test.hdfgroup.org/groups/0ad37be1-a06f-11e4-8651-3c15c2da029e", "rel": "owner"} ] } Sample Request Batch -------------------- .. code-block:: http GET /groups/76bddb1e-a06e-11e4-86d6-3c15c2da029e/links?Marker=g0089&Limit=5 HTTP/1.1 host: group1k.test.hdfgroup.org Accept-Encoding: gzip, deflate Accept: */* User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 Sample Response Batch --------------------- .. code-block:: http HTTP/1.1 200 OK Date: Tue, 20 Jan 2015 07:30:03 GMT Content-Length: 996 Etag: "221affdeae54076d3493ce8ce0ed80ddb89c6e27" Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "links": [ {"title": "g0090", "id": "76c53485-a06e-11e4-96f3-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"}, {"title": "g0091", "id": "76c54d40-a06e-11e4-a342-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"}, {"title": "g0092", "id": "76c564f5-a06e-11e4-bccd-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"}, {"title": "g0093", "id": "76c57d19-a06e-11e4-a9a8-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"}, {"title": "g0094", "id": "76c5941c-a06e-11e4-b641-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"} ], "hrefs": [ {"href": "http://group1k.test.hdfgroup.org/groups/76bddb1e-a06e-11e4-86d6-3c15c2da029e/links", "rel": "self"}, {"href": "http://group1k.test.hdfgroup.org/groups/76bddb1e-a06e-11e4-86d6-3c15c2da029e", "rel": "root"}, {"href": "http://group1k.test.hdfgroup.org/", "rel": "home"}, {"href": "http://group1k.test.hdfgroup.org/groups/76bddb1e-a06e-11e4-86d6-3c15c2da029e", "rel": "owner"} ] } Related Resources ================= * :doc:`DELETE_Link` * :doc:`GET_Link` * :doc:`GET_Group` * :doc:`PUT_Link` ================================================ FILE: docs/GroupOps/POST_Group.rst ================================================ ********************************************** POST Group ********************************************** Description =========== Creates a new Group. *Note:* By default he new Group will not be linked from any other group in the domain. A link element can be included in the request body to have an existing group link to the new group. Alternatively, use the *PUT link* operation to link the new group. Requests ======== Syntax ------ .. code-block:: http POST /groups HTTP/1.1 Host: DOMAIN Authorization: Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Request Elements ---------------- Optionally the request body can be a JSON object that has a link key with sub-keys: id ^^ The UUID of the group the new group should be linked to. If the UUID is not valid, the request will fail and a new group will not be created. name ^^^^ The name of the new link. Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: id ^^ The UUID of the newly created group attributeCount ^^^^^^^^^^^^^^ The number of attributes belonging to the group. linkCount ^^^^^^^^^ The number of links belonging to the group. created ^^^^^^^ A timestamp giving the time the group was created in UTC (ISO-8601 format). lastModified ^^^^^^^^^^^^ A timestamp giving the most recent time the group has been modified (i.e. attributes or links updated) in UTC (ISO-8601 format). hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request -------------- Create a new, un-linked Group. .. code-block:: http POST /groups HTTP/1.1 Content-Length: 0 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: testGroupPost.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate Sample Response --------------- .. code-block:: http HTTP/1.1 201 Created Content-Length: 705 Content-Location: http://testGroupPost.test.hdfgroup.org/groups/777978c5-a078-11e4-8755-3c15c2da029e Server: TornadoServer/3.2.2 Location: http://testGroupPost.test.hdfgroup.org/groups/777978c5-a078-11e4-8755-3c15c2da029e Date: Tue, 20 Jan 2015 07:46:38 GMT Content-Type: application/json .. code-block:: json { "id": "777978c5-a078-11e4-8755-3c15c2da029e", "created": "2015-01-20T07:46:38Z", "lastModified": "2015-01-20T07:46:38Z", "attributeCount": 0, "linkCount": 0, "hrefs": [ {"href": "http://testGroupPost.test.hdfgroup.org/groups/777978c5-a078-11e4-8755-3c15c2da029e", "rel": "self"}, {"href": "http://testGroupPost.test.hdfgroup.org/groups/777978c5-a078-11e4-8755-3c15c2da029e/links", "rel": "links"}, {"href": "http://testGroupPost.test.hdfgroup.org/groups/777109b3-a078-11e4-8512-3c15c2da029e", "rel": "root"}, {"href": "http://testGroupPost.test.hdfgroup.org/", "rel": "home"}, {"href": "http://testGroupPost.test.hdfgroup.org/groups/777978c5-a078-11e4-8755-3c15c2da029e/attributes", "rel": "attributes"} ] } Sample Request with Link ------------------------ Create a new Group, link to root (which has uuid of "36b921f3-...") as "linked_group". .. code-block:: http POST /groups HTTP/1.1 Content-Length: 79 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: testGroupPostWithLink.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json { "link": { "id": "36b921f3-a07a-11e4-88da-3c15c2da029e", "name": "linked_group" } } Sample Response with Link ------------------------- .. code-block:: http HTTP/1.1 201 Created Content-Length: 745 Content-Location: http://testGroupPostWithLink.test.hdfgroup.org/groups/36cbe08a-a07a-11e4-8301-3c15c2da029e Server: TornadoServer/3.2.2 Location: http://testGroupPostWithLink.test.hdfgroup.org/groups/36cbe08a-a07a-11e4-8301-3c15c2da029e Date: Tue, 20 Jan 2015 07:59:09 GMT Content-Type: application/json .. code-block:: json { "id": "36cbe08a-a07a-11e4-8301-3c15c2da029e", "attributeCount": 0, "linkCount": 0, "created": "2015-01-20T07:59:09Z", "lastModified": "2015-01-20T07:59:09Z", "hrefs": [ {"href": "http://testGroupPostWithLink.test.hdfgroup.org/groups/36cbe08a-a07a-11e4-8301-3c15c2da029e", "rel": "self"}, {"href": "http://testGroupPostWithLink.test.hdfgroup.org/groups/36cbe08a-a07a-11e4-8301-3c15c2da029e/links", "rel": "links"}, {"href": "http://testGroupPostWithLink.test.hdfgroup.org/groups/36b921f3-a07a-11e4-88da-3c15c2da029e", "rel": "root"}, {"href": "http://testGroupPostWithLink.test.hdfgroup.org/", "rel": "home"}, {"href": "http://testGroupPostWithLink.test.hdfgroup.org/groups/36cbe08a-a07a-11e4-8301-3c15c2da029e/attributes", "rel": "attributes"} ] } Related Resources ================= * :doc:`DELETE_Group` * :doc:`GET_Links` * :doc:`PUT_Link` * :doc:`GET_Group` * :doc:`GET_Groups` ================================================ FILE: docs/GroupOps/PUT_Link.rst ================================================ ********************************************** PUT Link ********************************************** Description =========== Creates a new link in a given group. Either hard, soft, or external links can be created based on the request elements. See examples below. *Note:* any existing link with the same name will be replaced with the new link. Requests ======== Syntax ------ .. code-block:: http PUT /groups//links/ HTTP/1.1 Host: DOMAIN Authorization: * ** is the UUID of the group that the link will be created in. * ** is the URL-encoded name of the link. Request Parameters ------------------ This implementation of the operation does not use request parameters. Request Headers --------------- This implementation of the operation uses only the request headers that are common to most requests. See :doc:`../CommonRequestHeaders` Request Elements ---------------- The request body must include a JSON object that has the following key: id ^^ The UUID of the group the new group should be linked to. If the UUID is not valid, the request will fail and a new group will not be created. If this key is present, the h5path and h5domain keys will be ignored h5path ^^^^^^ A string describing a path to an external resource. If this key is present an soft or external link will be created. h5domain ^^^^^^^^ A string giving the external domain where the resource is present. If this key is present, the h5path key must be provided as well. Responses ========= Response Headers ---------------- This implementation of the operation uses only response headers that are common to most responses. See :doc:`../CommonResponseHeaders`. Response Elements ----------------- On success, a JSON response will be returned with the following elements: hrefs ^^^^^ An array of links to related resources. See :doc:`../Hypermedia`. Special Errors -------------- The implementation of the operation does not return special errors. For general information on standard error codes, see :doc:`../CommonErrorResponses`. Examples ======== Sample Request - Create Hard Link --------------------------------- In group "e0309a0a-...", create a hard link named "g3" that points to the object with uuid "e032ad9c-...". .. code-block:: http PUT /groups/e0309a0a-a198-11e4-b127-3c15c2da029e/links/g3 HTTP/1.1 Content-Length: 46 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: tall_updated.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json {"id": "e032ad9c-a198-11e4-8d53-3c15c2da029e"} Sample Response - Create Hard Link ---------------------------------- .. code-block:: http HTTP/1.1 201 Created Date: Wed, 21 Jan 2015 18:11:09 GMT Content-Length: 418 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "hrefs": [ {"href": "http://tall_updated.test.hdfgroup.org/groups/e0309a0a-a198-11e4-b127-3c15c2da029e/links/g3", "rel": "self"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/e0309a0a-a198-11e4-b127-3c15c2da029e", "rel": "root"}, {"href": "http://tall_updated.test.hdfgroup.org/", "rel": "home"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/e0309a0a-a198-11e4-b127-3c15c2da029e", "rel": "owner"} ] } Sample Request - Create Soft Link --------------------------------- In group "e0309a0a-...", create a soft link named "softlink" that contains the path "/somewhere". .. code-block:: http PUT /groups/e0309a0a-a198-11e4-b127-3c15c2da029e/links/softlink HTTP/1.1 Content-Length: 24 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: tall_updated.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json {"h5path": "/somewhere"} Sample Response - Create Soft Link ---------------------------------- .. code-block:: http HTTP/1.1 201 Created Date: Wed, 21 Jan 2015 18:35:26 GMT Content-Length: 424 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "hrefs": [ {"href": "http://tall_updated.test.hdfgroup.org/groups/e0309a0a-a198-11e4-b127-3c15c2da029e/links/softlink", "rel": "self"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/e0309a0a-a198-11e4-b127-3c15c2da029e", "rel": "root"}, {"href": "http://tall_updated.test.hdfgroup.org/", "rel": "home"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/e0309a0a-a198-11e4-b127-3c15c2da029e", "rel": "owner"} ] } Sample Request - Create External Link ------------------------------------- In group "d2f8bd6b-...", create an external link named "extlink" that references the object at path: "/somewhere" in domain: "external_target.test.hdfgroup.org". .. code-block:: http PUT /groups/d2f8bd6b-a1b1-11e4-ae1c-3c15c2da029e/links/extlink HTTP/1.1 Content-Length: 69 User-Agent: python-requests/2.3.0 CPython/2.7.8 Darwin/14.0.0 host: tall_updated.test.hdfgroup.org Accept: */* Accept-Encoding: gzip, deflate .. code-block:: json {"h5domain": "external_target.test.hdfgroup.org", "h5path": "/dset1"} Sample Response - Create External Link -------------------------------------- .. code-block:: http HTTP/1.1 201 Created Date: Wed, 21 Jan 2015 21:09:45 GMT Content-Length: 423 Content-Type: application/json Server: TornadoServer/3.2.2 .. code-block:: json { "hrefs": [ {"href": "http://tall_updated.test.hdfgroup.org/groups/d2f8bd6b-a1b1-11e4-ae1c-3c15c2da029e/links/extlink", "rel": "self"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/d2f8bd6b-a1b1-11e4-ae1c-3c15c2da029e", "rel": "root"}, {"href": "http://tall_updated.test.hdfgroup.org/", "rel": "home"}, {"href": "http://tall_updated.test.hdfgroup.org/groups/d2f8bd6b-a1b1-11e4-ae1c-3c15c2da029e", "rel": "owner"} ] } Related Resources ================= * :doc:`DELETE_Link` * :doc:`GET_Link` * :doc:`GET_Links` * :doc:`GET_Group` ================================================ FILE: docs/GroupOps/index.rst ================================================ #################### Groups #################### Groups are objects that can be used to organize objects within a domain. Groups contain *links* which can reference other objects (datasets, groups or committed datatypes). There are four different types of links that can be used: * hard: A direct link to a group, dataset, or committed datatype object in the domain. * soft: A symbolic link that gives a path to an object within the domain (object may or may not be present). * external: A symbolic link to an object that is external to the domain. * user-defined: A user-defined link (this implementation only provides title and class for user-defined links). Groups all have attributes which can be used to store meta-data about the group. Creating Groups --------------- Use the :doc:`POST_Group` to create new Groups. Initially the new group will have no links and no attributes. Getting information about Groups -------------------------------- Use :doc:`GET_Group` to get information about a group: attribute count, link count, creation and modification times. To retrieve the UUIDs of all the groups in a domain, use :doc:`GET_Groups`. To retrieve the links of a group use :doc:`GET_Links`. Use :doc:`GET_Link` to get information about a specific link. To get a group's attributes, use :doc:`../AttrOps/GET_Attributes`. Updating Links --------------- To create a hard, soft, or external link, use :doc:`PUT_Link`. To delete a link use :doc:`DELETE_Link`. *Note*: deleting a link doesn't delete the object that it refers to. Deleting Groups --------------- Use :doc:`DELETE_Group` to remove a group. All attributes and links of the group will be deleted. *Note:* deleting a group will not delete any objects (datasets or other groups) that the the group's links points to. These objects may become *anonymous*, i.e. they are not referenced by any link, but can still be accessed via ``GET`` request with the object uuid. List of Operations ------------------ .. toctree:: :maxdepth: 1 DELETE_Group DELETE_Link GET_Group GET_Groups GET_Link GET_Links POST_Group PUT_Link ================================================ FILE: docs/Hypermedia.rst ================================================ ************************* Hypermedia ************************* h5serv supports the REST convention of **HATEOAS** or *Hypermedia as the Engine of Application State*. The idea is (see http://en.wikipedia.org/wiki/HATEOS for a full explanation) is that each response include links to related resources related to the requested resource. For example, consider the request for a dataset: ``GET /datasets/``. The response will be a JSON representation of the dataset describing it's type, shape, and other aspects. Related resources to the dataset would include: * the dataset's attributes * the dataset's value * the dataset collection of the domain * the root group of the domain the dataset is in * the domain resource So the ``GET /datasets/`` response includes a key ``hrefs`` that contains an a JSON array. Each array element has a key: ``href`` - the related resource, and a key: ``rel`` that denotes the type of relation. Example: .. code-block:: json { "hrefs": [ {"href": "http://tall.test.hdfgroup.org/datasets/", "rel": "self"}, {"href": "http://tall.test.hdfgroup.org/groups/", "rel": "root"}, {"href": "http://tall.test.hdfgroup.org/datasets//attributes", "rel": "attributes"}, {"href": "http://tall.test.hdfgroup.org/datasets//value", "rel": "data"}, {"href": "http://tall.test.hdfgroup.org/", "rel": "home"} ] } This enables clients to "explore" the api without detailed knowledge of the API. This is the list of relations used in h5serv: * attributes - the attributes of the resource * data - the resources data (used for datasets) * database - the collection of all datasets in the domain * groupbase - the collection of all groups in the domain * home - the domain the resource is a member of * owner - the containing object of this resource (e.g. the group an attribute is a member of) * root - the root group of the domain the resource is a member of * self - this resource * typebase - the collection of all committed types in the domain ================================================ FILE: docs/Installation/ServerSetup.rst ================================================ ################### Installing h5serv ################### You should find h5serv quite easy to setup. The server (based on Python Tornado) is self-contained, so you will not need to setup Apache or other web server software to utilize h5serv. Prerequisites ------------- A computer running a 64-bit version of Windows, Mac OS X, or Linux. You will also need the following Python packages: * Python 2.7 or later * NumPy 1.10.4 or later * h5py 2.5 or later * tornado 4.0.2 or later * watchdog 0.8.3 or later * requests 2.3 or later (for client tests) If you are not familiar with installing Python packages, the easiest route is to use a package manager such as Anaconda (as described below). If you have a git client installed on your system, you can directly download the h5serv source from GitHub: ``git clone --recursive https://github.com/HDFGroup/h5serv.git``. Otherwise, you can download a zip file of the source from GitHub (as described below). Installing on Windows --------------------- Anaconda from Continuum Analytics can be used to easily manage the package dependencies needed for HDF Server. In a browser go to: http://continuum.io/downloads and click the "Windows 64-bit Python 2.7 Graphical Installer" button. Install Anaconda using the default options. Once Anaconda is installed select "Anaconda Command Prompt" from the start menu. In the command window that appears, create a new anaconda environment using the following command: ``conda create -n h5serv python=2.7 h5py tornado requests pytz`` Answer 'y' to the prompt, and the packages will be fetched. In the same command window, run: ``activate h5serv`` Install the watchdog package (this is currently not available through Anaconda): ``pip install watchdog`` Download the hdf5-json project: ``git clone https://github.com/HDFGroup/hdf5-json.git`` . Alternatively, in a browser go to: https://github.com/HDFGroup/hdf5-json and click the "Download ZIP" button (right side of page). Download the zip file and extract to the destination directory of your choice. Next, cd to the hdf5-json folder and run: ``python setup.py install``. Download the h5serv project: ``git clone https://github.com/HDFGroup/h5serv.git`` . Alternatively, download the source zip as described above. Next, in the command window, cd to the folder you extracted the source files to. Run: ``python h5serv`` You should see the output: "Starting event loop on port: 5000". You may then see a security alert: "Windows Firewall has blocked some features of this program". Click "Allow access" with the default option (Private network access). At this point the server is running, waiting on any requests being sent to port 5000. Go to the "verification" section below to try out the service. Installing on Linux/Mac OS X ----------------------------- Anaconda from Continuum Analytics can be used to easily manage the package dependencies needed for HDF Server. In a browser go to: http://continuum.io/downloads and click the "Mac OS X 64-bit Python 2.7 Graphical Installer" button for Mac OS X or: "Linux 64-bit Python 2.7". Install Anaconda using the default options. Once Anaconda is installed, open a new shell and run the following on the command line: ``conda create -n h5serv python=2.7 h5py tornado requests pytz`` Answer 'y' to the prompt, and the packages will be fetched. Install the watchdog package (this is currently not available through Anaconda): ``pip install watchdog`` In the same shell, run: ``source activate h5serv`` Download the hdf5-json project: ``git clone https://github.com/HDFGroup/hdf5-json.git`` . Alternatively, in a browser go to: https://github.com/HDFGroup/hdf5-json and click the "Download ZIP" button (right side of page). Download the zip file and extract to the destination directory of your choice. Next, cd to the hdf5-json folder and run: ``python setup.py install``. Download the h5serv project: ``git clone https://github.com/HDFGroup/h5serv.git`` . Alternatively, download the source zip as described above. Next, in the command window, cd to the folder you extracted the source files to. Run: ``python h5serv`` You should see the output: "Starting event loop on port: 5000". At this point the server is running, waiting on any requests being sent to port 5000. Go to the "verification" section below to try out the service. Verification ------------- To verify that h5serv was installed correctly, you can run the test suite included with the installation. Open a new shell (on Windows, run "Annaconda Command Prompt" from the start menu). In this shell, run the following commands: * source activate h5serv (just: activate h5serv on Windows) * cd * cd test * python testall.py All tests should report OK. Server Configuration -------------------- The file ``h5serv/server/config.py`` provides several configuration options that can be used to customize h5serv. Each of the options can be changed by: * Changing the value in the config.py file and re-starting the service. * Passing a command line option to ``h5serv`` on startup. E.g. ``python h5serv --port=7253`` * Setting an environment variable with the option name in upper case. E.g. ``export PORT=5000; python h5serv`` The config options are: port ^^^^ The port that h5serv will listen on. Change this if 5000 conflicts with another service. Default: 5000 debug ^^^^^ If ``True`` the server will report debug info (e.g. a stack trace) to the requester on error. If ``False``, just the status code and message will be reported. Default: ``True`` datapath ^^^^^^^^ A path indicating the directory where HDF5 files will be be stored. *Note*: Any HDF5 file content that you put in this directory will be exposed via the server REST api (unless the domain's ACL is configured to prevent public access, see: :doc:`../AclOps`). Default: ``../data/`` public_dir ^^^^^^^^^^ A list of directories under datapath which will be visible to any autenticated user's request. Default: ``['public', 'test']`` domain ^^^^^^ The base DNS path for domain access (see comment to hdf5_ext config option). Default. ``hdfgroup.org`` hdf5_ext ^^^^^^^^ The extension to assume for HDF5 files. The REST requests don't assume an extension, so a request such as: .. code-block:: http GET / HOST: tall.data.hdfgroup.org Translates to: "Get the file tall.h5 in the directory given by datapath". Default: ``.h5`` toc_name ^^^^^^^^ Name of the auto-generated HDF5 that provides a "Table Of Contents" list of all HDF5 files in the datapath directory and sub-directories. Default: ``.toc.h5`` home_dir ^^^^^^^^ A directory under data_path that will be the parent directory of user home directores. For example if ``datapath`` is ``../data``, ``home_dir`` is ``home``, the authenticated request of ``GET /`` for userid ``knuth`` would return a list of files in the directory: ``../data/home/knuth``. Default: ``home`` ssl_port ^^^^^^^^ The SSL port the server will listen on for HTTPS requests. Default: 6050 ssl_cert ^^^^^^^^ Location of the SSL cert. default: ssl_key ^^^^^^^ The SSL key. default: ssl_cert_pwd ^^^^^^^^^^^^ The SSL cert password default: password_uri ^^^^^^^^^^^^ Resource path to be used for user authentication. Currently two methods are supported: HDF5 Password file: An HDF5 that contains userids and (encrypted) passwords. See: :doc:`../AdminTools`. In this case the password_uri config is a path to the password file. MongoDB: A MongoDB database that contains a "users" collection of userids and passwords. In this case the password_uri would be of the form: ``mongodb://:`` where ```` is the IP address of the host running the mongo database and ```` is the port of the mongo database (typically 27017). default: ``../util/admin/passwd.h5`` mongo_dbname ^^^^^^^^^^^^ Mongo database named used for MongoDB-based authentication as described above. default: ``hdfdevtest`` static_url ^^^^^^^^^^ URI path that will be used to map any static HTML content to be displayed by the server. default: ``/views/(.*)`` static_path ^^^^^^^^^^^ File path for files (i.e. regular HTML files) to be hosted statically. default: ``../static`` cors_domain ^^^^^^^^^^^ Domains to allow for CORS (cross-origin resource sharing). Use ``*`` to allow any domain, None to disallow. default: ``*`` log_file ^^^^^^^^ File path for server log files. Set to None to have logout go to standard out. log_level ^^^^^^^^^ Verbosity level for logging. One of: ``ERROR, WARNING, INFO, DEBUG, NOTSET``. default: ``INFO`` background_timeout ^^^^^^^^^^^^^^^^^^ Time interval in milliseconds to check for updates in the datapath folder (e.g. a file that is added through some external process). Set to 0 to disable background processsing. default: 1000 Data files ---------- Copy any HDF5 files you would like exposed by the service to the datapath directory (h5serv/data). If you do not wish to have the files updatable by the service make the files read-only. On the first request to the service, a Table of Contents (TOC) file will be generated which will contain links to all HDF5 files in the data folder (and sub-folders). *Note:* Do not modify files once they have been placed in the datapath directory. h5serv inventories new files on first access, but won't see some changes (e.g. new group is created) made to the file outside the REST api. *Note: HDF5 that are newly created (copied into) the datapath directory will be "noticed" by the service and added into the TOC. ================================================ FILE: docs/Installation/index.rst ================================================ ################### Installation ################### .. toctree:: :maxdepth: 2 ServerSetup ================================================ FILE: docs/Introduction/index.rst ================================================ ################### Introduction ################### h5serv is a web service that can be used to send and receive HDF5 data. h5serv uses a REST interface to support CRUD (create, read, update, delete) operations on the full spectrum of HDF5 objects including: groups, links, datasets, attributes, and committed data types. As a REST-based service a variety of clients can be developed in JavaScript, Python, C, and other common languages. ================================================ FILE: docs/License/index.rst ================================================ ####################### License and Legal Info ####################### Copyright Notice and License Terms for h5serv Software Service, Libraries and Utilities --------------------------------------------------------------------------------------- h5serv (HDF5 REST Server) Service, Libraries and Utilities Copyright (c) |copyright| All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted for any purpose (including commercial purposes) provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions, and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions, and the following disclaimer in the documentation and/or materials provided with the distribution. 3. In addition, redistributions of modified forms of the source or binary code must carry prominent notices stating that the original code was changed and the date of the change. 4. All publications or advertising materials mentioning features or use of this software are asked, but not required, to acknowledge that it was developed by The HDF Group and credit the contributors. 5. Neither the name of The HDF Group, nor the name of any Contributor may be used to endorse or promote products derived from this software without specific prior written permission from The HDF Group or the Contributor, respectively. DISCLAIMER: THIS SOFTWARE IS PROVIDED BY THE HDF GROUP AND THE CONTRIBUTORS "AS IS" WITH NO WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED. In no event shall The HDF Group or the Contributors be liable for any damages suffered by the users arising out of the use of this software, even if advised of the possibility of such damage. ================================================ FILE: docs/Makefile ================================================ # Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/h5serv.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/h5serv.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/h5serv" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/h5serv" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." ================================================ FILE: docs/Reference.rst ================================================ ################### Reference ################### .. toctree:: :maxdepth: 2 Authorization CommonRequestHeaders CommonResponseHeaders CommonErrorResponses Diagram Hypermedia Resources UsingIteration ================================================ FILE: docs/Resources.rst ================================================ ************** Resource List ************** List of Resources ================= +----------------+------+------+------+--------+-----------------------------------------+ | Resource | GET | PUT | POST | DELETE | Description | +================+======+======+======+========+=========================================+ | Domain | Y | Y | N | Y | A related collection of HDF objects | +----------------+------+------+------+--------+-----------------------------------------+ | Group | Y | N | N | Y | Represents an HDF Group | +----------------+------+------+------+--------+-----------------------------------------+ | Links | Y | N | N | N | Collection of links within a group | +----------------+------+------+------+--------+-----------------------------------------+ | Link | Y | Y | N | Y | Represents an HDF link | +----------------+------+------+------+--------+-----------------------------------------+ | Dataset | Y | N | N | Y | Represents an HDF Dataset | +----------------+------+------+------+--------+-----------------------------------------+ | Attributes | Y | N | N | N | Collection of Attributes | +----------------+------+------+------+--------+-----------------------------------------+ | Attribute | Y | Y | N | Y | Represents an HDF Attribute | +----------------+------+------+------+--------+-----------------------------------------+ | Dataspace | Y | Y | N | N | Shape of a dataset | +----------------+------+------+------+--------+-----------------------------------------+ | Type | Y | N | N | N | Type of a dataset | +----------------+------+------+------+--------+-----------------------------------------+ | Value | Y | Y | Y | N | Data values of a datset | +----------------+------+------+------+--------+-----------------------------------------+ | Datatype | Y | N | N | Y | Committed datatype | +----------------+------+------+------+--------+-----------------------------------------+ | Groups | Y | N | Y | N | Collection of groups within a domain | +----------------+------+------+------+--------+-----------------------------------------+ | Datasets | Y | N | Y | N | Collection of datasets within a domain | +----------------+------+------+------+--------+-----------------------------------------+ | Datatypes | Y | N | Y | N | Collection of datatypes within a domain | +----------------+------+------+------+--------+-----------------------------------------+ ================================================ FILE: docs/Tutorials/IPython_samples.rst ================================================ ################### Samples ################### TBD: Some walkthroughs here. ================================================ FILE: docs/Tutorials/index.rst ================================================ ################### Tutorials ################### .. toctree:: :maxdepth: 2 IPython_samples ================================================ FILE: docs/Types/index.rst ================================================ #################### Types #################### The h5serv REST API supports the rich type capabilities provided by HDF. Types are are described in JSON and these JSON descriptions are used in operations involving datasets, attributes, and committed types. There is not a separate request for creating types, rather the description of the type in included with the request to create the dataset, attribute, or committed type. Once a type is created it is immutable and will exist until the containing object is deleted. Type information is returned as a JSON object in dataset, attribute, or committed type GET requests (under the type key). Predefined Types ================ Predefined types are base integer and floating point types that are identified via one of the following strings: * ``H5T_STD_U8{LE|BE}``: a one byte unsigned integer * ``H5T_STD_I8{LE|BE}``: a one byte signed integer * ``H5T_STD_U6{LE|BE}``: a two byte unsigned integer * ``H5T_STD_I16{LE|BE}``: a two byte signed integer * ``H5T_STD_U32{LE|BE}``: a four byte unsigned integer * ``H5T_STD_I32{LE|BE}``: a four byte signed integer * ``H5T_STD_U64{LE|BE}``: a eight byte unsigned integer * ``H5T_STD_I64{LE|BE}``: a eight byte signed integer * ``H5T_IEEE_F32{LE|BE}``: a four byte floating-point value * ``H5T_IEEE_F64{LE|BE}``: a eight byte floating-point integer Predefined types ending in "LE" or little-endian formatted and types ending in "BE" are big-endian. E.g. ``H5T_STD_I64LE`` would be an eight byte, signed, little-endian integer. *Note:* little vs. big endian are used to specify the byte ordering in the server storage system and are not reflected in the JSON representation of the values. Example ------- JSON Representation of an attribute with a ``H5T_STD_I8LE`` (signed, one byte) type: .. code-block:: json { "name": "attr1", "shape": { "class": "H5S_SIMPLE", "dims": [27] }, "type": { "class": "H5T_INTEGER", "base": "H5T_STD_I8LE" }, "value": [49, 115, 116, 32, 97, 116, 116, 114, 105, 98, 117, 116, 101, 32, 111, 102, 32, 100, 115, 101, 116, 49, 46, 49, 46, 49, 0] } String Types - Fixed Length ============================ Fixed length strings have a specified length (supplied when the object is created) that is used for each data element. Any values that are assigned that exceed that length will be truncated. To specify a fixed length string, create a JSON object with class, charSet, strPad, and length keys (see definitions of these keys below). *Note:* Current only the ASCII character set is supported. *Note:* Fixed width unicode strings are not currently supported. *Note:* String Padding other than "H5T_STR_NULLPAD" will get stored as "H5T_STR_NULLPAD" Example ------- JSON representation of a dataset using a fixed width string of 40 characters: .. code-block:: json { "id": "1e8a359c-ac46-11e4-9f3e-3c15c2da029e", "shape": { "class": "H5S_SCALAR", }, "type": { "class": "H5T_STRING", "charSet": "H5T_CSET_ASCII", "strPad": "H5T_STR_NULLPAD", "length": 40 }, "value": "Hello, World!" } String Types - Variable Length ============================== Variable length strings allow each element of an array to only use as much storage as needed. This is convenient when the maximum string length is not know before hand, or there is a great deal of variability in the lengths of strings. *Note:* Typically there is a slight performance penalty in accessing variable length string elements of an array in the server. To specify a variable length string, create a JSON object with class, charSet, strPad, and length keys (see definitions of these keys below) where the value of "length" is: ``H5T_VARIABLE``. *Note:* Current only the ASCII character set is supported. *Note:* Variable width unicode strings are not currently supported. *Note:* String Padding other than "H5T_STR_NULLTERM" will get stored as "H5T_STR_NULLTERM" Example ------- JSON representation of a attribute using a variable length string: .. code-block:: json { "name": "A1", "shape": { "class": "H5S_SIMPLE", "dims": [4] }, "type": { "class": "H5T_STRING", "charSet": "H5T_CSET_ASCII", "strPad": "H5T_STR_NULLTERM", "length": "H5T_VARIABLE" }, "value": [ "Hypermedia", "as the", "engine", "of state." ] } Compound Types ============== For some types of data it makes sense to store sets of related items together rather than in separate datasets or attributes. For these use cases a compound datatype can be defined. A compound datatype has class: ``H5T_COMPOUND`` and a field key which contains an array of sub-types. Each of these sub-types can be a primitive type, a string, or another compound type. Each sub-type has a name that can be used to refer to the element. *Note:* The field names are not shown in the representation of an dataset or attribute's values. Example ------- JSON representation of a scalar attribute with a compound type that consists of two floating point elements: .. code-block:: json { "name": "mycomplex", "shape": { "class": "H5S_SCALAR" }, "type": { "class": "H5T_COMPOUND", "fields": [ { "name": "real_part", "type": { "base": "H5T_IEEE_F64LE", "class": "H5T_FLOAT" } }, { "name": "imaginary_part", "type": { "base": "H5T_IEEE_F64LE", "class": "H5T_FLOAT" } } ] }, "value": [ 1.2345, -2.468 ] } Enumerated Types ================= Enumerated types enable the integer values of a dataset or attribute to be mapped to a set of strings. This allows the semantic meaning of a given set of values to be described along with the data. To specify an enumerated type, use the class ``H5T_ENUM``, provide a base type (must be some form of integer), and a "mapping" key that list strings with their associated numeric values. Example ------- .. code-block:: json { "id": "1e8a359c-ac46-11e4-9f3e-3c15c2da029e", "shape": { "class": "H5S_SIMPLE", "dims": [ 7 ] }, "type": { "class": "H5T_ENUM", "base": { "class": "H5T_INTEGER", "base": "H5T_STD_I16BE" }, "mapping": { "GAS": 2, "LIQUID": 1, "PLASMA": 3, "SOLID": 0 } }, "value": [ 0, 2, 3, 2, 0, 1, 1 ] } Array Types =========== Array types are used when it is desired for each element of a attribute or dataset to itself be a (typically small) array. To specify an array type, use the class ``H5T_ARRAY`` and provide the dimensions of the array with the type. Use the "base" key to specify the type of the elements of the array type. Example ------- A dataset with 3 elements, each of which is a 2x2 array of integers. .. code-block:: json { "id": "9348ad51-7bf7-11e4-a66f-3c15c2da029e", "shape": { "class": "H5S_SIMPLE", "dims": [ 3 ] }, "type": { "class": "H5T_ARRAY", "base": { "class": "H5T_INTEGER", "base": "H5T_STD_I16BE" }, "dims": [ 2, 2 ] }, "value": [ [ [1, 2], [3, 4] ], [ [2, 1], [4, 3] ], [ [1, 1], [4, 4] ] ] } Opaque Types ============= TBD Example ------- TBD Object Reference Types ====================== An object reference type enables you to define an array where each element of the array is a reference to another dataset, group or committed datatype. To specify a object reference type, use ``H5T_REFERENCE`` as the type class, and ``H5T_STD_REF_OBJ`` as the base type. The elements of the array consist of strings that have the prefix: "groups/", "datasets/", or "datatypes/" followed by the UUID of the referenced object. Example ------- A JSON representation of an attribute that consist of a 3 element array of object references. The first element points to a group, the second element is null, and the third element points to a group. .. code-block:: json { "name": "objref_attr", "shape": { "class": "H5S_SIMPLE", "dims": [ 3 ] }, "type": { "class": "H5T_REFERENCE", "base": "H5T_STD_REF_OBJ" }, "value": [ "groups/a09a9b99-7bf7-11e4-aa4b-3c15c2da029e", "", "datasets/a09a8efa-7bf7-11e4-9fb6-3c15c2da029e" ] } Region Reference Types ====================== A region reference types allows the creation of attributes or datasets where each array element references a section (point selection or hyperslab) of another dataset. To specify a region reference type, use ``H5T_REFERENCE`` as the type class, and ``H5T_STD_REF_DSETREG`` as the base type. *Note:* When writing values to the dataset, each element of the dataset must be a JSON object with keys: "id", "select_type", and "selection" (as in the example below). Example ------- A JSON representation of a region reference dataset with two elements. The first element is a point selection element that references 4 elements in the dataset with UUID of "68ee967a-...". The second element is a hyperslab selection that references 4 hyper-slabs in the same dataset as the first element. Each element is a pair of points that gives the boundary of the selection. .. code-block:: json { "id": "68ee8647-7bed-11e4-9397-3c15c2da029e", "shape": { "class": "H5S_SIMPLE", "dims": [2] }, "type": { "class": "H5T_REFERENCE", "base": "H5T_STD_REF_DSETREG" }, "value": [ { "id": "68ee967a-7bed-11e4-819c-3c15c2da029e", "select_type": "H5S_SEL_POINTS", "selection": [ [0, 1], [2, 11], [1, 0], [2, 4] ] }, { "id": "68ee967a-7bed-11e4-819c-3c15c2da029e", "select_type": "H5S_SEL_HYPERSLABS", "selection": [ [ [0, 0], [0, 2] ], [ [0, 11], [0, 13] ], [ [2, 0], [2, 2] ], [ [2, 11], [2, 13] ] ] } ] } Type Keys ========= Information on the JSON keys used in type specifications. class ----- The type class. One of: * ``H5T_INTEGER``: an integer type * ``H5T_FLOAT``: a floating point type * ``H5T_STRING``: a string type * ``H5T_OPAQUE``: an opaque type * ``H5T_COMPOUND``: a compound type * ``H5T_ARRAY``: an array type * ``H5T_ENUM``: an enum type * ``H5T_REFERENCE``: a reference type base ---- A string that gives the base predefined type used (or reference type for the reference class). order ----- The byte ordering. One of: * ``H5T_NONE``: Ordering is not relevant (e.g. for string types) * ``H5T_ORDER_LE``: Little endian ordering (e.g. native ordering for x86 computers) * ``H5T_ORDER_BE``: Big endian ordering charSet ------- Character set for strings. Currently only ``H5T_CSET_ASCII`` is supported. strPad ------- Defines how fixed length strings are padded. One of: * ``H5T_STR_NULLPAD``: String is padded with nulls * ``H5T_STR_NULLTERM``: String is null terminated * ``H5T_STR_SPACEPAD``: String is padded with spaces length -------- Defines the string length. Either a positive integer or the string: ``H5T_VARIABLE``. name ---- The field name for compound types. mapping ------- The enum name for enum types. select_type ----------- The selection type for reference types. One of: * ``H5S_SEL_POINTS``: selection is a series of points * ``H5S_SEL_HYPERSLABS``: selection is a series of hyper-slabs. Related Resources ================= * :doc:`../DatasetOps/GET_Dataset` * :doc:`../DatasetOps/GET_DatasetType` * :doc:`../DatasetOps/POST_Dataset` * :doc:`../AttrOps/GET_Attribute` * :doc:`../AttrOps/PUT_Attribute` * :doc:`../DatatypeOps/GET_Datatype` * :doc:`../DatatypeOps/POST_Datatype` ================================================ FILE: docs/UsingIteration.rst ================================================ *************** Using Iteration *************** There are some operations that may return an arbitrary large list of results. For example: ``GET /groups//attributes`` returns all the attributes of the group object with the given id. It's possible (if not common in practice) that the group may contain hundreds or more attributes. If you desire to retrieve the list of attributes in batches (say you are developing a user interface that has a "get next page" style button), you can use iteration. This is accomplished by adding query parameters to the request the limit the number of items returned and a marker parameter that identifies where the iteration should start off. Let's flush out our example by supposing the group with UUID has 1000 attributes named "a0000", "a0001", and so on. If we'd like to retrieve just the first 100 attributes, we can add a limit value to the request like so: ``GET /groups//attributes?Limit=100`` Now the response will return attributes "a0000", "a0001", through "a0099". To get the next hundred, use the URL-encoded name of the last attribute received as the marker value for the next request: ``GET /groups//attributes?Limit=100&Marker="a0099"`` This request will return attributes "a0100", "a0101", through "a0199". Repeat this pattern until less the limit items are returned. This indicates that you've completed the iteration through all elements of the group. Iteration is also supported for links in a group, and the groups, datasets, and datatypes collections. Related Resources ================= * :doc:`AttrOps/GET_Attributes` * :doc:`GroupOps/GET_Groups` * :doc:`GroupOps/GET_Links` * :doc:`DatasetOps/GET_Datasets` * :doc:`DatatypeOps/GET_Datatypes` ================================================ FILE: docs/Utilities.rst ================================================ ################### Utilities ################### The h5serv distribution includes the following utility scripts. These are all located in the ``util`` directory. dumpobjdb.py ------------ This script prints all the UUID's in an h5serv data file. Usage: ``python dumpobjdb.py `` hdf5_file is a file from the h5serv data directory. Output is a list of All UUID's and a path to the associated object. exportjson.py ------------- This script makes a series of rest requests to the desired h5serv endpoint and constructs a JSON file representing the domain's contents. Usage: ``python exportjson.py [-v] [-D|d] [-endpoint=] [-port=`` Options: * ``-v``: verbose, print request and response codes from server * ``-D``: suppress all data output * ``-d``: suppress data output for datasets (but not attributes) * ``-endpoint``: specify IP endpoint of server * ``-port``: port address of server [default 7253] Example - get 'tall' collection from HDF Group server: ``python exportjson.py tall.data.hdfgroup.org`` Example - get 'tall' collection from a local server instance (assuming the server is using port 5000): ``python exportjson.py -endpoint=127.0.0.1 -port=5000 tall.test.hdfgroup.org`` exporth5.py ----------- This script makes a series of rest requests to the desired h5serv endpoint and constructs a HDF5 file representing the domain's contents. usage: ``python exporth5.py [-v] [-endpoint=] [-port= `` Options: * ``-v``: verbose, print request and response codes from server * ``-endpoint``: specify IP endpoint of server * ``-port``: port address of server [default 7253] Example - get 'tall' collection from HDF Group server, save to tall.h5: ``python exporth5.py tall.data.hdfgroup.org tall.h5`` Example - get 'tall' collection from a local server instance (assuming the server is using port 5000): ``python exporth5.py -endpoint=127.0.0.1 -port=5000 tall.test.hdfgroup.org tall.h5`` The following two utilities are located in hdf5-json submodule: hdf5-json/util. jsontoh5.py ----------- Converts a JSON representation of an HDF5 file to an HDF5 file. Usage: ``jsontoh5.py [-h] `` is the input .json file. is the output file (will be created by the script) Options: * ``-h``: prints help message h5tojson.py ----------- This script converts the given HDF5 file to a JSON representation of the file. Usage: ``python h5tojson.py [-h] -[D|-d] `` Output is a file the hdf5 file base name and the extension ``.json``. Options: * ``-h``: prints help message * ``-D``: suppress all data output * ``-d``: suppress data output for datasets (but not attributes) ================================================ FILE: docs/WhatsNew/index.rst ================================================ ################### What's New ################### h5serv 1.1 ----------- Significant features: * Support was added for http over SSL (https) * Support was added for authentication and simple user management * Acess Control List (ACL) - Enables access to HDF objects to be controled for specific users h5serv 1.0 ---------- This is the first release of h5serv. Significant features: * An implementation of the REST API as outlined in the RESTful HDF5 paper: http://www.hdfgroup.org/pubs/papers/RESTful_HDF5.pdf * A simple DNS Server that maps DNS domains to HDF5 collections (see: https://github.com/HDFGroup/dynamic-dns) * Utilities to convert native HDF5 files to HDF5-JSON and HDF5-JSON to HDF5 * UUID and timestamp extensions for HDF5 datasets, groups, and committed data types ================================================ FILE: docs/_static/README ================================================ static files ================================================ FILE: docs/build.sh ================================================ sphinx-build -b html . _build ================================================ FILE: docs/conf.py ================================================ # -*- coding: utf-8 -*- # # h5serv documentation build configuration file, created by # sphinx-quickstart on Mon Nov 3 22:14:28 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import six import sys import os from datetime import datetime #sys.path.append(os.path.abspath("sphinx_ext")) # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. #extensions = ['JSONLexer'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'h5serv' if six.PY3: copyright = str(datetime.now().year) + ', The HDF Group' else: copyright = unicode(datetime.now().year) + u', The HDF Group' rst_epilog = '.. |copyright| replace:: %s' % copyright # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.1' # The full version, including alpha/beta/rc tags. release = '0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'h5servdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'h5serv.tex', u'h5serv Documentation', u'HDFGroup', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'h5serv', u'h5serv Documentation', [u'HDFGroup'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'h5serv', u'h5serv Documentation', u'HDFGroup', 'h5serv', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False ================================================ FILE: docs/index.rst ================================================ .. h5serv documentation master file, created by sphinx-quickstart on Fri Oct 24 14:51:58 2014. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. h5serv Developer Documentation ================================== This is the developer documentation for h5serv, a WebService for HDF5 content. Contents: .. toctree:: :maxdepth: 2 Introduction/index Installation/index DomainOps/index GroupOps/index DatasetOps/index DatatypeOps/index AttrOps/index Types/index AclOps/index Reference Utilities AdminTools WhatsNew/index Tutorials/index FAQ/index License/index Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` ================================================ FILE: docs/make.bat ================================================ @ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\h5serv.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\h5serv.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end ================================================ FILE: entrypoint.sh ================================================ #!/bin/bash # entrypoint for Docker container cd /usr/local/src/h5serv python h5serv --datapath=/data --log_file= ================================================ FILE: examples/h5pyd_ex1.ipynb ================================================ { "cells": [ { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "%matplotlib inline\n", "import h5pyd\n", "import numpy as np" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "# get handle to domain object\n", "f = h5pyd.File(\"craterlake.test.hdfgroup.org\", \"r\", endpoint=\"http://127.0.0.1:5000\")\n", "# this is the root group uuid\n", "f.id.uuid" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "dset = f['/Data']\n", "dset.id.uuid\n", "print \"name:\", dset.name\n", "print \"shape:\", dset.shape\n", "print \"type:\", dset.dtype" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "#plot the data\n", "ndarr = dset[...]\n", "import matplotlib.pyplot as plt\n", "plt.imshow(ndarr)\n", "plt.set_cmap('spectral')\n", "plt.colorbar()\n", "plt.show()\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "# zoom in on wizard island\n", "ndarr = dset[140:180,90:130]\n", "plt.imshow(ndarr)\n", "plt.set_cmap('spectral')\n", "plt.colorbar()\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "g = h5pyd.File(\"nanex.test.hdfgroup.org\", \"r\", endpoint=\"http://127.0.0.1:5000\")\n", "dset = g['/Nanex/OKey']\n", "print \"shape:\", dset.shape\n", "print \"type:\", dset.dtype" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "aapl = dset.read_where('RootSymbol == \"AAPL\"') #get all rows where the symbol is AAPL" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "aapl.shape" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false }, "outputs": [], "source": [ "aapl[0:5] # first 5 rows" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": true }, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 2", "language": "python", "name": "python2" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 2 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython2", "version": "2.7.9" } }, "nbformat": 4, "nbformat_minor": 0 } ================================================ FILE: examples/h5pyd_ex2.ipynb ================================================ { "cells": [ { "cell_type": "code", "execution_count": 18, "metadata": { "collapsed": false }, "outputs": [], "source": [ "%matplotlib inline\n", "import h5pyd\n", "import numpy as np" ] }, { "cell_type": "code", "execution_count": 19, "metadata": { "collapsed": false }, "outputs": [ { "data": { "text/plain": [ "u'39400ce6-263e-11e5-bd87-3c15c2da029e'" ] }, "execution_count": 19, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# get handle to domain object\n", "f = h5pyd.File(\"Land_and_Ocean_LatLong1.test.hdfgroup.org\", \"r\", endpoint=\"http://127.0.0.1:5000\")\n", "# this is the root group uuid\n", "f.id.uuid" ] }, { "cell_type": "code", "execution_count": 20, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "datasets: [u'longitude', u'latitude', u'land_mask', u'temperature', u'month_number', u'climatology', u'time']\n" ] } ], "source": [ "# print name of all the top-level items\n", "print \"datasets:\", f.keys()\n" ] }, { "cell_type": "code", "execution_count": 21, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "temperatures shape: [1980, 180, 360]\n" ] } ], "source": [ "# get the 'tempaerature' dataset (doesn't retrieve data)\n", "temp = f['/temperature']\n", "print \"temperatures shape:\", temp.shape" ] }, { "cell_type": "code", "execution_count": 22, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "(1, 180, 360)\n", "(180, 360)\n" ] } ], "source": [ "# retreive one slice from the dataset\n", "one_slice = temp[16,:,:]\n", "print one_slice.shape\n", "one_slice = np.squeeze(one_slice) # convert to 2d\n", "print one_slice.shape" ] }, { "cell_type": "code", "execution_count": 23, "metadata": { "collapsed": false }, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAVwAAADtCAYAAADz981IAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXm4JVdV9//ZNZzhzj3cHtPpEDInZGIKYQqDgiGRSRBE\nRBEcf87gCwgSFBAUeFVUkJ8iEEVFQFCmIIZAAkhCZtJJOkPP6eF2953PWFX7/WPtVbXP6dvJpfvS\njZ1az1PPmepU7dq193d/13etvctYaymttNJKK+2Hb8HxLkBppZVW2qPFSsAtrbTSSjtGVgJuaaWV\nVtoxshJwSyuttNKOkZWAW1pppZV2jKwE3NJKK620Y2TR8S5AaaWVVtoP04wxP1Duq7XW/LDKUgJu\naaWVdsLb9772tUXt94TnPveHWo4ScEsrrbQT36amjncJgBJwSyuttEeDlYBbWmmllXaMbOvW410C\noATc0kor7dFgJcMtrbTSSjtGVgJuaaWVVtoxsqMEXGNMCHwP2GmtvfJIj1MCbmmllXbi29Ez3N8C\nNgHDR3OQEnBLK620E9+OAnCNMScBlwPvAn73aIpRAm5ppZV24tvRMdz/C7wRGDnaYpSAW1pppZ34\ntnPnEf3NGHMFsM9ae6sx5rKjLUYJuKWVVtoJb4dbpet7bnsYuxT4SWPM5UANGDHGfMJa+3NHUg5T\nPtOstNJKO5HNGGNvXeS+F3H4xWuMMc8E3lBmKZRWWmmlPYwt4fJfR8VQS4ZbWmmlndBmjLF3LHLf\n8ymXZyyttNJKOyr7oSHoD2gl4JZWWmknvJWAW1pppZV2jCw83gVwVgJuaaWVdsJbyXBLK6200o6R\n/ag8LbcE3NJKK+2Et5LhllZaaaUdIysBt7TSSivtGFkpKZRWWmmlHSMrsxRKK6200o6Rxce7AM5K\nwC2ttNJOeCsZbmmllVbaMbIScEsrrbTSjpGVQbPSSiuttGNkJcMtrbTSSjtGVgJuaaWVVtoxsjJL\nobTSSivtGFnJcEsrrbTSjpGVQbPSSiuttGNkpr7IHZt9/zOmBnwDqAIV4PPW2jcfaTlKwC2ttNJO\nfBta5H59gGutbRljnmWtbRhjIuAGY8zTrLU3HEkxSsAtrbTSTnxbLOBOHPqVtbbh3lYQOfjgkRaj\nBNzSSivtxLfFAu4CZowJgFuAxwIfstZuOtJjlYBbWmmlnfi2ZpH73XnoV9baDLjQGDMKXGOMucxa\ne92RFKME3NJKK+3Et8Mw3Osm4Lr9izuEtXbaGPNF4AnAdUdSDGOtPZL/lVZaaaX9rzBjjLWvXuS+\nV4O1Nn9AhDFmJZBYa6eMMXXgGuAd1tr/PpKylAy3tNJKO/HtyDXctcDHnY4bAFcfKdhCCbillVba\no8GOEHCttXcCFy9VMUrALa200k58O4oshaW0Yw64xphSNC6ttNIWbb6mesT2aAVcZ++w1l51nM69\noBljrjrWZTK/aT7PKk7Nv1jPeUAx8fvfgRcf7s+AdduRWOZe/f8vZsL554AXLXCcRzrPIx07oPdZ\n1gvtbx5mn08DL1lkWRZr6Q+4v9oEW+gwz1dYxfPZxwx77Z/a5x7h0ZbUjkc7fyR7uDItGUEbW5Kj\nHLWVksIxMrPenJN/eCd3AXARDw9EFtfpTfHZt9R9YRY4zsNxAv3Nep8Pt79/TuO96qbLMCk49QPn\nw4GoDhha/qzv+8OVbaHjQO+SUH6dhAvs65d5ofP6/8t4+IHN/78BVvEYAMaBjawCzjNXe8DxTT7K\nl3k/ALPst9N238McvbSlsB8RpPsRKcaJaWbILOO5/CYWeCdXAb2d2vLwrCvT38PDdPikF+B8YPJB\nQtmw/h70vfab7Xvv/1eP7Z/PPMymFvT9xy9D/zX0X6sPnj6z97cYWV7EP5+WIfXKrHXafw6/rvrZ\n/0IDiJbJr8N+Nh8ik0H7j/FjvJbn8loA7uV683JTRL2v4W/stF1ggmlpR2U/Ikh3vIpx3XE678PZ\ndUtxEDNm1vIGvgrAW6mwmjPyH31wPZyrrZ8tcDbSabNk4ZP5/+0Bb72t7n/KRH0QfKQFQn132ge8\n8xBg6wdO/dw/gGT0ApAPsP5/A29/Lav+J/T2U9af0guaFwN1CqbslyOgd9DoH4j861zI/EFH98/o\nBWf/XGqPd2Wy3m/957iYp3MxT8/P/zx+1nzYtAB4Bz9pd9sthynVkdp1S3y8pbDrfuhn+BEB3GM+\n8cEYY5dEBP8RMDNkljPKGoYZ5//0NZqFmFJmwPbRJhN4ndShkdHWkYHNgA6HIoM91PUO+nbpd7Hh\n8Kz2cLYQSPczUt36QU2BL/X28QHUIh0hdceseL/H3jX5QJ16x9Stv7xaL7pvBhjjyRdWzut7Gk2g\nHUESQDeFNC3KEbpy+gNX0PfeHzx034WYvm86aOggo+fR4yZuy4A38BPsZXtPmfXt1iOf2/+jbkuB\nF8YYaz+zyH1fukRBusMdvwTcxZupmjov4Pfzxn4qT+JxXL6g5ujrfymQRhQ9sVvsYAblvXGIYyoQ\njFD4wqn8L1wuQGwN2A4kW4F5tzXA2IX108VquwvJEtDLLn1Q6ZcA+pluArTdpSb0gpP+P5JLZggY\nBQaAGgXYZn3HShBgbFKAaeLO0QWSCthBsENy4LQDyQFIW+7+BDKARVlxbr0dXQPdlRBtADMMYQyR\nlRNn89DZD3YXVDMpp97OOoeycB0g/IGiv958puwzcq2f1PuPMnSf3afedx9zcpV/DgP2w/Yd/C+3\nJQPczy9y3xeWgHtczfypKZazCAhZx9k92qGys4iCgXUoOkzOSAIwAw5MHbUzsQNckF7iLcZpU7Bt\nCMdh4CVgViA9OIHkPmh+BbJdQAuMJx0sRk+Fw0sZCzHloG+/he6e6qJaJwm9oGhjGSz0mHQEXAeA\nEQR0Awrw7AANoBUC64BVBRtMJiDrgG05jyGGYCVEZ0J0OpiaG5y6kE1BNgl2FmwTkt2QboNwTs6X\nAGYNRKeAGZUTaN1nE5DNuvO0IWxC3crgMIKAbcW75q5XRxUKtuoDsl9fum+/PNK/D/R6BXjvtX47\nbvP338r3e+7XAlKL/Sn7uAXO/CNjSwa4X1zkvi8oAfeHamaDlz0A8Dr+hjN5Zk8Dz7xXtbTv+/yK\njJMEQichDEC4SliTfh+MQDAk721TDmI77rdRAYxwLQQnAcPQWQVpDaI5iPcCE9DdDOkOSPdCNg3Z\nDNh5CFzPN1kBtAtF6/tZav8+Dwe6/aaA4bOvLtCNwVTBjEG4DIJxiDbKSZLtYK+H0aYAbyuAuTp0\nVgrgHXzofvvG9PT+U5lrzXYyNrDLOHfbDULBeojPknoL10IwLPVoXeGMu7CsWdw06yh4NgfpHrBz\nxYWkB8DugGoboq7IEAbRr6sI2KosESCsvELxXABfRu9SDMq+POHfh4XkEu2aKYfeSx3cVHZI+35L\nvePpd/1SkM/K1VPx27h/DtXi1VPp0OKVPL7//hxi0xy0E3bPI+53GFsywF3kZFzznBJwl/b8rzJ/\nCBjXmA0v4+15I1eNL6SXtSig+tJB2vfZhJDFThJYAeEa1+HbFIKcnFJc1jUQrpB9CR0Y+0JhFWxF\n3GMD0AQzL692zoHshLxSlXNhINsL6UNgZ4TZMd0bMPM7nZ8p4Ou0pm//QyrRu26f0WpdWSCLIFsF\n4UZXH6uFrdtEBorWt4C9YLpgQ8gqUFkNcQWCPfBfs3/JrD1IBnyFv7BzdgrAvNz8Lqs4l6cFr6Ud\nQMdCeDJULhDQDcchGJBCZHPCZrNZ8S7MsCtsW5hxuLy42cFKGRiI5ULsDLSug+6tYA+ASSEKYDCD\nQQRUVZLQbAQFVJURfD026tvUfEDszyzRY/mDo7Y7/b5fK9d9lPHq9/1tPNez6WXPeux+j0cHhP5y\nK8PWQUGlGj3mrdzIN/hSz/5zTNt/tH/OImzJAPcbi9z3mSXgHv05P2JuI3RN4CQ3uaCfTegobunN\nLXX9ExuRp2FlIH5lBMRgDATLIFguzCpc7T4Pk/eubF7+b5zwZ4bda0zRut2+tg6mJSeyVkApFzJd\noawW1vVM4wqW7ob2TZDdBcyJW+13UL8D9QOr//1iH3PqA25b68p4AFeR+og2ynfpTjcgdCAcg2CN\nGzjuhLFWjne0KKSZXWwmq4jD/IHuj9G2M6zhVN7CtYTD41Qvh+pFCL1USmZdHSVOylBkaskglR4E\nOynXEKyE+AwHuHrhDWh9FYL/hmEr5eka0XVrFICq4BsXzSEHMR8wF0qLU0BWMKrAwCBEBgYMNIxU\nRRMKsOu4LzTQ5ssWqas3bS7avjtFleQDgPXquenuW9VdzyBFsFDL2Q/uCb1Ar8fXQKg+l9wHeoWa\nDl22cG9P0PCtvNxusXfTZ0sGuN9a5L5PPQEBl1HG7ZRd5CqUizzuSrOOIcYwwC/ybi7lhYcEIHzz\nQaIBtM+EgZcKQ537EFT3FsGKHkAOBXwDx6RsVwAkGJOOG66B6CT5PZsSycA2Cj02WCWFMlUBGypI\n4MsHUPVd9QL8xFe/F2vh8N7PQ+trIjkEA5BOCNOzTQjtodH10Dts2Pfd4czvRH4H9BluTBEAawDz\nFWG7ZsBdcwOiGgRVqZvgNhhL5X7M48DWCMsnQJ4CaOTVuKhadBJkpzbY/f2tvOvWp9u5ZFGPPjGv\nuuI9POGMK6lXh1k+sAE7B8FqV+AUkrsh2eH2DqQwnU1gd8vgShdWWlju3SZfklGwse7aOxRShNZf\nDRiFcBAiB9aVSCZE9af2zrr3iTscmRsWjKhWaRtMG0win4m9e6T3SaWH/sFXATmjiOXGFIDr3+vQ\n+05BXGWVflBNvHP4EJPR26e88TH/PQN2sYU3cwXbuWtJAPe7i9z3ySci4L6Xa9nEN30XzH7okSOq\nZtxsZIRxzudZ1BjIG3oFeDo/yalcvKDL71+i73ZpY5qrQPoUqD9PtLz0u8BdkDak02cIyJrEMd1Y\nNNhgBMyggKcZgnClMFtTEe3QuKx3U3fBMkWzqhwjR/yFRgaNwikIay/QFq6bol2CsLcpaF8P2UFh\nlskWYI/TipNezO4P6ugp+/VF+urRdyNBOl8XSamqZIIag+RYxRQwE4KtCrqEXag0JGAGBVB3KbIR\ntM4JHUgHUs+ETjJwdqC7jeun/4FrJt9nW+k8P4CZk1adxVMveAUAP/Vjb8d2pRKMakqDYAPo3gCd\n20VmoAnhQViWwjJg2G2WgpFHSBCw5l2Psr4QqrGcohXAQAzLIljtdXFt0oppUxTkVa3mDunuep5d\n13T/w0K7v/372STQe29n3R91fPfHci1U3V2DSm0KlD7j9ZmuL5Po79Cr+ffLIf3SyPOOHgCNMdbe\nvMh9H38iAu61yM3TLQY2831aFKlEXWA/0HmyaJbx3bA6G2SUKuOsJspFAjHFJd+V8iPnepkhMOO2\ntvtcDcCMQHM5hOeIe247DjQHEIZzB2QHXEBmuQCu7RZgGow44B2V/wTqd2rrUr9TqaWKXXBolMRH\nPT9iomip/1WUysiperoX0q3CyNI9Ak7ZfgHc2DtM1HcoX2s0fZvP9LXnKxr47mlmYMQK4DpspAlM\nStGIEKTQTq1Mye+8fic1jhKaATmYPutaPQubyr0yFZhYdw/RExMYBetOYiMIDkL3RvjXr7/N3tL4\nHIcx8xOn/w6vWPcBzACEI+J9RKdIgW0L7AQ0vwrJPRDOy4CiqWwemFJ13/ngFLrrrsKQgZqBqoHJ\nFBoNCOpwSV9ift3d0cxVXRuY6y8zvY6I4t289xn3ObByTP8PerwqEGeQWGgZWOPug4L6nPvfXFCM\niT0Aqf2ty6HB5K47gMYqtd34GRP9cgXe559YGsBNb13cvuFFJyLgfh1psAP0aj2aDjQNHBiG9PHA\ndfI8Iu3AvqbUHwxS8zttB2khyj76AwT+iN729s/dpioE68jDz8EouV8VjLpo+LALgLnMg5yN+gCq\nYV4/suKLpR35j61Daz00R2BkG0T7vIvzRw+fAWsrnxc9NLkPupucrNAuzhd3i6IoSPhbf7FC79R+\nBoIvI+gA2UQyBgYzAV2LAPM8ktqVhTKI+baQTqz3DqQgpuIB7gCYSOo9XAWVS8CcBtkodJZBd0wk\nHxBvJGyB6UJlSoDX3i+BsE9vfxO3N/6TbBJMR8733ufdRe1FFHOE28CsgDodNwDPOKmhBelmiO6D\nYRdEGy5uASPIstVVirbq6tfEsCKAupHmvw+Yy+SyxwNJgNOEA5VlZylEJzUF4sWaHx8N3X+bGQwF\noozoWDtwmP9LrEusQYGjWQZZf+DO13U1BjJHIV10vN9UitJBvz+2cvnSAG5yx+L2jc4/EQH3Pynu\nvB9oUNcDelNn8Pbt1xoXSmPqd2n8EVePny6wJX2bsq8ukMbCXuMzRTqINgrTNX5+kIJtP230GKvt\ni05Z/9VZVpUof+AoZOCFmwMn1h3iKzrUsxPQuEZ0SJtAtBrCx4Kdh84tEHYKgFXSralM/YOZYnpA\noXVrL9e67EQySDQD6FYg7kDNQrcLnUR6pH9P1FRuoSs/5L/5fmwf4AZDopVXnwXhuZCthGREMhxs\nLKCu9WqSQgIKOhA2pe6CNgSOdne+C2Y7tPdB9YnOOwGCtZIWZjJ5TbaKl5XNQLYXzKzQP9Wo/XrU\nWz+GoJe2JfXmAqhWYTgummwHaFi58lXGMVQrcnFm3W9+3S3UZfu/M72vVXeyqpGvplOohrCKgowH\nrshK1NUUYP3PipMNxFlMLSS+VKCAqqxXQVZf9QD6HnolCt2evzSA2z4kHLewVc/+4QLu8Zlh7Luh\nHQrRSv0Vvest5I5WEAbhu7v9kfb+9CVfb/Rvop7b/3/qHUOZbgPIVsO7Jp7CW9Z/R8DPqWY2E8aT\nNSHUpqkUXPOD3OhgY+n0WSwbgYApODbmXN/UucvGDfFh20kWKSRuOlPgku+DjjA3o2kBOuqEYFZA\n/XLInizHCZaJvtz9vgOOPb3sNuotbk/EXVOfXDCf/cDsqNOr14JZJ3WRPCSsszok2vH8bslGMKNO\nemn3Dp7g3TB3c6xTKO0CvM223MBWhdqPQfA4YbWpBtIyKUeQSZ0ad9MD16CCLkQHIXgIuvdJ/Ucn\nQXy2w/s9MpGEGKpPBjMpTDrrQrZPmG16UF6zRiHNqPneltarRv4DBHxdwgahaKtdj65mOuADE07f\ntSkkPkFYSB7zKW+/vqBV7Np4OyxeA4CulKEZO6mBwunUpqAW02sKwF0KJ7Vr4GAEqTJU36nzc4Sh\n1wM9XD/t8XSO3mz4yPscCzs+gDsbiJgVpIV7q8RmObACGeZDiuwrBcr+ZG3olRZ884MCvsujm9/n\n2+59gGBnpw5/N/XLvPXM75DuRwJlw8KwgjGn2Q5TBMC8rHdbceBqhK0mdcfAKtLZbeTkjNRJGhVp\n+Q1XlsEUBltQPVBcR2a8Y0ZgLEQzAro2E5Zp6sLqgiEkv7ch2nOyHZIHIJuEIIA069W0datTuMYH\ngYkAZuvQjIQ9BsslCyNcKxKKGRDWHN8Fw+2ix4IAxYH9cCCE+FzRkPUm5GzMOqDNnCbrZJUchL2Z\nd8FyqD1LRLZ0ELKaq2c33bl+PyTXS9Awvhzap7r6TWTCSNCSxtT5N6hNSFFURkmfBqwXPdjOyAYy\noNoEmW02J1uU9o6pfqBRHRxtCropfVSUwsMSn81ZSJoUA3+/i+6DkO8I9INJPxHxg6IRZO73LJWu\nOBzCSlfUFRTjsX8rtWnqHdLsCZXkp9z+mlHR43wtFLxejC0hSP6vAFxjzEeBFwD7rJUpgMaYq4DX\nAbqE3FustV92v70ZeC3SHH7TWvvVBQ88v1LShKr7oZZBy83k0TSbhMLXqVGwrAMUo3bY97pQdF0b\nrg+4qhP5UVP/f8oqak14PX9L+26wVQjWO812GUTrHKApHXRU0NYFCLKadPR0ANIapFVIQ0hC0YSt\ngTCDuCWvYRPiEMLARYrbQAuSAQHqJIY0EZCtZBA13EWMCsPOIgFtEDAPmxBNSJni86QCbEPKn+wQ\n4PDrScF2mbv2vcBOoLUS2AjxCvIsAVMlzxoAiM8X6SDdBe2d0OmKZ2AfA40K1E9zddQS8MpBNBNA\nA2S9ArdIgh2CbI6cHoYrIXqs02tPhXRI6jVxdR20IHoQsruB9ZBdAY2TpA7oOjkGsANgZqB2BcT3\nSCpfUoN0XjIQgjUu0KnmGor10S4pQHahqbs+8PoZfRqf6E+l80EVCkDyg7/6Px9gFwIun3D4KpaW\nzc8T1vcBtDPYGUBoJFSiXS5GuI5ykgYiw7bcpetyFg23fx1xgPLy+bJCv/bfN8hAX31oiKKfWh+F\nZf0e8CLNGLMB+ASCSBb4iLX2L4+0HI/EcP8B+KA7oZoFPmCt/UBfwc4Bfho4B1gPfM0Yc4a19lDH\nQAMQrQRaTtwKG8JylsnHXP/a6pXUV/59UDULfLeQxKCNW7XKxPuf/7sutpIzYAOmJqw2WEa+4AwJ\nuXhn6+LipoMCsmFTgDCtQWewCPEqNdhjYH0VBq2w+W1Gaq0KHBiA5oAAbK4NNqSOwgS6I8LabCLS\nQtQS9ttZIedX7dI0oPUFkRNs6vavSDlUCVE5Z9h93g1sRwZE5oE7ZVEYkDoIV4MZcWx/RN5XLoL2\nk6HdEX+YmtRR7CpQ2audF+DPnCYdDorcoOs92kjOGVTBrILqUyE4GVgr9dsdcvU7IGA5tBWSv4Lk\nfuANsPsp8t/hDkRTUNkp5cnqyFoWgQB654niBaQPQTrhBicn4eTI5hqFTWWQyGaKNuhnePiyjGaT\n9TM6Py7gtzM/3on32Q8m9Uf0H878eIbfD7ScPj9QIhMUuzcp1hJKEdZq3Oeud4qO208Vky4S5+4q\nWfE1Wq1Sfwpy5r33r89PNWwt8poXaUfBcLvA71hrbzPGDAE3G2P+y9pDJ2ksxh4WcK211xtjTlng\np4VE5RcC/2yt7QJbjTH3A08C/ueQPQen5LUDWOeEaIRXG8coMmQmFBlQGkrVEhjkrrcoAhZ2AKIO\nDCSFK6fH9+UL1cbgULCNkGpWhpA49A1Wwv9s/1c++5k/4gNvuEsK6fJ90loBtukAtFdANyzK16Lo\ngPuBXQZ2h9KahxGdr42w+F3AKcA6I+WfBuJB2WodibqHbgUsawR0iUS+mB6D1jJYOQgj+4WhmXkB\naoBUtVIKzVHv5pQ7f4IEv/LVUNy50hnoTjlpYVwqzVhh2aYm4GuU+ukoZimWm3TCXmCkXNmMnMy2\nZT+VHYIxqP8U8Fhh+ekAJIPCattVyV1KgcFpSL8tLLUKzFTdPatAugbCKYhmgZtksOECsPskXa52\nJYVGlYkUYacdu8688rgZgspq/eUa+9muArKf1K/A6ucYU5yih+31R+51gMY7Dt5n3/qTXvSzDgTK\nbH2pY0Be2zW4L4bVFagYcTQ1TjrAoeqdarjTCOvdD+zX7AM/j1ozI9Uh61LkrOn1dQ1Y6/pdCENp\nISluY8nsSAHXWrsH2OPezxlj7kaSSZYecB/GfsMY83PA94Dfs9ZOuUL44LoT4WyH2jIKsFS3aQbo\nBDCdFY2vSsFIVT7A/VfDo/ufCNlNMH4SzJ4N06dAPAmr5mDYdfD0fhi6X1J11F3zJQjNjtCO1PF+\nNwhjpC0BoUtO+2me9vSfFhpupPDWAIPi1ttQ9NY0ERkhszBnBMy0433bwoOJ+DkVB7ox8jqIgK8G\nFDUDXstJBbLlEggKm04PNg74GlDtyvTTuCOAFk9JL/IZv6EAiRi5FF2dS8W7fg0uZ14dSCddh69A\nGIsOa1tOavBFTZ/ihcJurVssxjYL6cC2nF6qPbMp37VXQ2tU6tTGMBPIz/uAB4Hlj4dnvwP4ICT/\nBit2QPP50BmHgQwaj5G6qVpgk4Tl4ysh1iUth0VmMJGAfjYJdkoknTDr1T99PbR/gojWkR80UzDV\nVMOEIpCW9P2vn9H67nimBai5+xx5zM8NFlgkUJhKO8hP0JHvQ89T0vtdpdAC3Kpte6uyVQOouT5X\nRWT/2MhuIxSAO5lBI3NTjzWHXlMFYwQ0Nd89cPvMnAWdOWh3ITtLvJaoK6CbVSQxuZVCpw3JAeB+\nlsLSJZAnHPm8CFjkvLVD7UgA90PAH7n3fwy8H/jFw+y7sEz+kQ2i+YWZPNXgTFwDdrkx08hdVDGp\nSqHWjyCAFCIq8twwVH4N9owJw4qRg+0zsM/1DNsUl/zeT8OyTXLVG9zx4VDm6wcZukjwRXNIbdu5\n5rrzgLBqGwswpDUBw1ZdGlzbSKPbjujQbWDaQC2EjpFzzAJtlyQ8AJxM0XkrFGCn+nMaQWutpHhF\n0xDV5ZzxJIx0JJsheEim+Nq7ZYDr17j1VRk4FMCuY4nPrhQADPKndNJp1brk5DInJ/gpI3oQ4/Rb\nt4ZBdtCxW8ciswnpcMpwTSz7JLEMWCPfhvb3ofp6sDXp8SFw0Y3Q+RjY/RA9BlaNw20xfBJ4ah3O\nqEsdLr8UBh4H6T4wU8j03A5kDyHrKuyFdAuE0zIDwJcIfF/OlwC6fd9rfECXc9B71aCYqqxJJX7y\nP+5YiQZFq06yWQFmSADWOEQMBl3ZXajKOHaoWR7WuVGq4ll34u4cdGZg7gDYSRmAFGgVdB3bpSpO\nRNvXp937sAZnBOKAzbUh89dh0DUc9Jqb7tUgHlyA9IODD0GrKjJU5WKoraJngL7jXrjrfgrXYWkA\n93AM94Yb4IZFrLPg5IRPA79lrQZBfnD7gQHX2uKBd8aYvwP+033chcCY2knuu0PtZc+X12we0h2S\noG+bwJywsxQBKmOlUc4Z6XiatrRrCLZVYXkHgpvBniEdGZ0kUCOfcx8MiZubDkL6m7DrVhj4J5id\nE7D3c02hcMu0owUgLmdLGFnoK/1OajBZwTaTAahMw7J5mFsvjXiEYubcVkR+N4GcY8Y7XNCRY7Sc\nsDZOESrWKIWSmixw7yvictOQQSCcF/DXHjVAkeuzEOj65k9C0cv0ZRadWRYA7TYku4Gqy4oYFn3U\nDLo/e5TatpGUqmnRTbP9AirhKqmYzl4BTbQuQlnPwnwCqqdC4/UwfB7MboM/PBXmWvBSA+NbIHgx\ndL4DU7865XFWAAAgAElEQVQIB9ZAbUbq+doxKfvZQDcQDTfQVDoHTumMzBxL7hAZapBC21RVxNcl\nm95nrSNNt65536v+n0/8AJpGcrK6sZOo6mDGnBY+DNGIBAjNkNSLUX+8PyjhR4P1s69NqOn+7gZm\nczKIpQ9BuhumtoM5KJNh6t1ibWJlvloHmp8dQNqFuyOEOMzTKwu0QmhE0heMhTCVvrLSFU812awr\naYuVx3Po5B0L554K526UMtsufPp6lsLSwwTNnvIM2dTe+6eH7mOMiYHPAP9orT3sbMXF2A8MuMaY\ntdba3e7jiwFdoPs/gE8aYz6ASAmnAzcueJBstngfrodglXTCdA+kbjZ4ZqGRgmlC3JRk+rAOB9dB\n8wyo1mDeIi1hH73RB9VchyAbdMGdIWFj8bnQfA00vgz3PigNQqME/e5zUWApk50RlmabHrCkYFoS\nMLOBaKvN1TBXLeSJaQqpYCUCsm5cYAiZU0ko8oJBGqeCpJbDZxsAgetgJnWbFekjaLkTdqByHmQ7\noPsADLaLzqPH68/q6Ge06p367q6WwSBuX3cXJN50qtC4ukmLg9l5CU6l++Ve2VnxU4NzHH7c1osh\nWereZ2BfDp95DvzUHqjOw3OA2gNwwfeh8zWRIapPho+eibntHizDUtgxCuY22ITaXggPiIbb3QSd\nm2QWX2USRtIehpfjgM/8/cGuA3QcIrXaAi46SabRt18DaA9CthrCjRCfDJUxyRgxy5Dpy06rsO6Y\nWSTs1VhpU1gnFejNQtqc1UbhfjMJMA92WI5l3W+mK1vUgegcKVS6j3xW4uw2aEzAbLNX440QT8ps\nhNa9wo7Vy+kAzVAGezMo6YLBGGQNsClk98OqafKp4F2gMQzmIghPksHXqIvZcRp7KiBrHZL3z0w8\nCksWIhiLMGOMAf4e2GTt4paUfDh7pLSwfwaeCaw0xuwA3g5cZoy5ELnLW4BfBrDWbjLGfArYhHTN\nX7OHm8aW7nUn8NJwTE1mb2Xu7mQzkO4EWwV7sty0dFiYq055MkHhPgE9T0wAcb1NDYIBl/YzKrpd\n9BjIXgUHb4PJBpz03+L2KEPxowTgAmeJNIDM6ZA96x+EELjfTU3+q1N6oEhyPAm4ELiPwn03FCer\nuUvbh/gKGRKV8IFW3VWTCpuIGhIYCuYoIjougyLdW6y6NYqAfr8muVA2hwKugohqcOpqd70yNeag\nu0XkhHzqrXX3yYoXk80I4CYPyj2ILxG5o3MTDL4GgtOgezXYzYVmn6QCzPEW+P40vHBU6uenNkP2\nGej+BHzgj+BlBm4OYfM+LKPAGIwMSbj2dGDljGQzhA9B55vQ/S7U9sF4o6gTlW6MtzW9a9Z5tf5A\nZDPnzkfFAJF5TTBBZt6xAaKzoHomBOukjuyYtOVkELIB8VL8hmdDAUgdBY1uHgswibS5LCz+U7tL\n+k/nPAFCGxX7mg6EbZfpMi9sOmxAdDpke6B7L7Tuh8ZOyXqJkf7DE6B6CbQnoTZJnozbGYPwLKhs\nFAJlGwKiOMBM0t5g2VwAbIRoTCbgGC8oY12A1jpppKcxLo0dKeACTwV+FrjDGKMrMrzZWvuVIznY\nI2UpvHKBrz/6MPu/G3j3I5413YG4VC4nSSPbQcUFUPZLx4xOQzqtUs9O76jn3w/r0EEfj6IN2FQF\nZINpd8zVwEoBzvh6mDsZHloFy/cVeqnqc36OJBpJ7yB5rCu8HdKCRYRNYSXGwsq7gSlIl8GWiwrW\nM4i3dqy3gcgPKzIY6kr9TDk/XwHSILKLMtrMIYV1qVVGBcYYKpdK3WZbZCJDhMgZvsu8kLaVULC7\nlKKV+NF5H7gbDejc50A2gEhzjdzqLdlB0ewxMrsLC/EesCPAAGQXSqWYD0OwGbJRaL+RkYOXUf1c\nyM/s/A7hrx4kJKRChRmeyTAN7mMrd95R4UFqdFgB8Vo4vQqXAGcmMH4ABu+B7DZoXQMDm2GDhRVQ\nH4OhQYhrch2phW4CnVQcjnReEjoyDTSpB5CPs4nkH+P9pk0zNXJt8blQOde142FgFJLl0F4O3WGY\nrwqYO0etJ3/XzzqIcIOYbk7vCToCtEEGtXuh/QB0XgHNsYKV+x5SjDyTLW6K7FU94CSMMQg3QHIG\nJFukfzZ2AdNgviSTZmrbyIPV5rEw/Esy4KTbINkj+9iGFNxE0gfc/BFaBuwqYL/zblWXhuKJJ9pn\nfziWHGEerrX2Bg6lJUdsx2emWbJbgMDUJRBgnQ6YdQUgw/WIe+FWRrFehMH60oFn1kUirAuTWpdq\nYKoUKn4mQ10UiobY+WmodKHTgj23wLJ7ihS0/vxAW3XujtNys2kIFLXcS2iFdZhM9MJU1+hrwKoZ\nmBmR49fpJch+PmIE1BNYs8Pl766RNKoIqFjX+VLJPoimhSElAxCGck6buMbs5kUb5142m4UGqUGx\nhSLtftX6KUtmgf/g/7clnY5IBoqs4QbSzHkFLelc6Z1gGo4JXygdPoug81yoB9B9Hzzwe4z8zcX8\n3NQmBjnAPdzJ7dxEjRrr2cgFPJkqo4wTEmOxmkh8QRWeC2yckoV/uAHS78LgTbC2A2vhkhE4uw7n\nWDgvlVUWSWX1yP0RbKvBpIHWsLzelsDmQQkk5RZ4mz/oq6tthyE6A+KzxJtiEBgSVtsdka1Zkfuh\n2ShqXpCqZ0JF4IJkBvGWui6gUZ+F6ADMrYKZ10MnLLwSP6hl3H0fDWBkEIZc+mJlEqJJITtRTaSB\n7HSZIJPucdLDQ9DsFhp+eKoUwrgUvnAZZCPIwrxeKpEO2jYAfcqOxQGytinXAXTw9uvTT908SjsK\nhrukdnwAN2i4TqgtrYNoTTpREPlsNVLkLNNkVv3NN+tGWJ8uKlp0PW3IpUsFuMYxIMy3uxGm7imC\nIAqEUmAnX3RFj8wmJM/ERALcuZabCcuNU9FyM12hZBAGdwNu5A9a0B6RYI7mKfrg1qzAjo2yb+hS\nuipAvQ1phXxhlqwirmI8WzBsMrAHZAqr2SD7VWaKp8tq9oMPFr5W60fTNaK+EPFQMNAsktRCdgC6\nt4t8EK5yg51jLyYSl7F9t+QE2zH3LLN/gywD04Lg+fAXn4CtcMbMDlbT5SGmeIjtTLIfg2GYEdo0\ngVH2E7KZQbr1DfDMYbjUwoY9MHA3tK+GoZthwzzVjXDlGLwsg4tnYdVeqE2JR60M3rpxa3o1kn2V\nCQHdXIMvjsBfDbrrnKDIovE9IQU5AghWO5d9DXmun3XSgXWDtAK2DmRp3xYBa2ehM1xE+9VMULSZ\nRg3S9RKMU814Esl8mXOvWs4hJGPlDCANwY5KmSoxxDGEczJIhsMQrBRPrvuAAG66T1ICzYBIEBWN\notakL2qGidUg3XwxQIceoBrk98PFA32PbwkBt3XkEx+W1I4P4Fat07gajj1VXWC7S76yf8+sH1uA\nrdVkv6RP7zlc2D1GFgkfIn+cjc1kM0YaEEamjqbrof01aByAyoAwhyRzIFmXTfVlUxc/JQpcmZWm\nxk5aiGSzEXnK2Mhd0F0GyQgEs7ISdTgP8YgEYVLjxf5iehLXowySquQsxjMCpNE8RLsp1m8y0L5O\ntFHbgOGfBDsN7edA51pY5pDTnxWlYOHSX/O1S/VpPgq2PqvT6tbgkIKutZJj2bkPsp3AkMsSqco9\nCIcgHJeMEjsF3f+WP8evguQZ8PGz4Y45RtjGOhIMMMoYa1jPDNM0mGWQEYwrRBto11bD84fhKcCa\nnVC9EZp/DRu2MXoGPHktvK0LY7Nw6h4YuAHs/WDvBbvXeehrwFwM0Xqo6YI9qyFZBstXwYYxWF6H\nj6yGfVXIhuhdblAXO0pxbSoU0DVDXmV7kUmTSNuoGPJ16FVa0ASPAJgcLp4K7HsgLVO8T+NioJxF\npmUfdOXTYGeEzP/cjgy8c+5+RUbarhkrbnIQOiIRym9xBThV4inZtJMEAnfCSBqLPtk4n52XSYaC\nn7PsSyb66jP5gF7SoZ7lEtmjm+HmScgWuk3nXlSE6QSQg6d1EQrb73t3BXhtl9w3MRr58Mx4eaLB\nqMtWcOlixjhZo+IAoQ7BucAFkH4O7E7ozELWFrDw5xXbRECXQO6kqTtgGaBYrtG1LBPKuQLXqiod\nkQOyQEA4bEE9hOYGWbsgDQt3yxaXRyURZqOrhVUPyECQrXLsdkJALFgOTMPAagivk8YfdeQ4o7Is\nQt0IoelamPGziTQgonmjeT1yaE6qfq8dSCdRGERvTJwXk02CWQnRCmTx9kFItgljii+C+Bmw50lw\n6zDclAIzDNBymUQRdQY5g/NYwSoazDHAEINu/tMEIVQrsBoYa0LlPuh8Fk7ezrrz4e1D8NJtMHo7\nBF9yisscTH9WVuzSRb5rwOAodD8LZhKiNRA8DqLLYPgMWG/hxQEsq8C/LIPv1imekKCDVEqv56Dt\ny596FnSkEKHTXgdculrXgbHKSqrpuiUoejJI9H7pYNh2ZZhAgq262LveS3ffmZJuQ8uVV5f5CgzF\nEzTcABB6NzZwUVLbgM988528cONbqT3btZcm8vTlpmv3MTJztCFtoF/v9zNtdIvpJQDaxXWwWCJ7\ndAOuRoRBAEGZbjBM7yoTfb6sdUt65fKAnyqj4Fkv9g9cLmoOti4/N386w6AAQM9y/THUfhxm3ybA\nat2CLQTCsoPYufMzMhBkkw7EK2BGIXTnyec19viCyDoBNWE4uLQ1k4rbb4x0wLRGsaSj84WyirBb\nTQNL69KBjVuqkUEpQ1SD2hzUPwPLtxbpTgAnw8oROAuZvpkZ2B/CfXXYo0DRnwKVFUU/RO+13j7W\n+11B2ABpPmVKBgxiNwgOQ/vb0L0fonNhxzDMCqU5QMQ9xOwgJKSOZZQRTmIDXVaSUMHwPWrcwTiM\nj8gMwtpesNuhsgVWWS4YgysmYPkDcN3FkjZ86S9Cehlc9SX4ypVc/L1f4ZbJv3Lp3f8uiR4RUNsp\n3vdL30XywFO4KKvAr2/izh9vwbI6jA7AbRb23eIqcggBvgjypSKtSik1qRzTcK66e5ZbZ1wG2/a4\naz/0riWg9asxBZ8B4n0GN6HAverkGj2G/t5E2oJbV50hvOexBdJ+QPSUqCJ6S+CW7LPz8PGv/zZf\n2/4hXnTeW/MDpzslp9e25EC2LeAbZIcCrQ+uOkXaX99Bu722P9Wdl8iONGi21Hb8GK6OZhnSOFOV\nFzq9oJlTBtVuddiG/I6YqnODnGygi1abCAFc74kB+eNwdN6/P+Rq+H4UyWPUkR7vtUve2u2UlD23\nqiS1mzr542HsvJNDXEDPxJKKE58hDTNwKBdlIhEY57K1N0DiVsayRrawISBsAwe87okGeW93qUPJ\nTpk80c6kk7k5mXFNyODTgVMQd7oLPGjgthp8O4ZmFbqDFLKCPytKmZXeO3/z2Zn19k0DZM1gKy6p\n0Wi0q8/kARi4SabcmggYpovlPp1qODgMdbdE5PMzeMosvP8gzAzA8rXwEwGMz8tDP9OdEE1AXZ56\nO2fgi+Pcc8VGezYb4RZj5u/+Atk/DrP9czu4efeFBZFvzxUtYdsrmfmlfbz09nX89+iLeeP6N/Pe\n1+yE1gBcFMB4CC/4Y/7evse+ztxiJBtt2jUlY+We2xl5zeMSuiqB86XjOQdus3LPworc29Tdaz9D\nxnexPcIM9KYXqpuuvytgKXg1EVydR1juIJ7i4THdykFpe3EKdgI+/2/vs1++/S9MHFYggr/79K/z\n+lf8NXbSeZ8h8hinSdHnfQbrdy9/PQd/CUsFXJWndMBYQnt0M1x/1Ms1G9VnK/QExEyFniwF/3tA\nGJMnHZhBB75uQXBlsibygHbY/d9fs85vEVMUWQ51T9PSqPscub9jE2E0uf+TOnkDYTs0JXOBrtcB\ntgpYV5/qWJ8lD3jJH6GyH8IZYbbZIMyvk9cwcRLCg3IuqkgE3DGUMJHk9tb9MHOguKQBiFKJl1ya\nwcpEZrEmBs4N4PEhXBrCt4bhtgHYq49FcasmomvZKBCoy6db/1oAWSCDVlB1HdnpEsFauVc2keuj\nDXNny3HXhvJooYuqcs44gfNvh5P2w/2Xy+cV34N3PQR3PV8m+K9LYXgCzEFI7oWRDAxsNfDeGnz8\nGTw5celJj/8W7yZli32q/aT5VfO2t7yUc5/9LX565e29JH1vzPaHdrELQ/Ccp3HJPzWgOgGMw2rn\nZZ92CReZ3zZv5w8oJgOo5t6ZlAh/4AZ1o4sMdwSQQ0u+ZnKEtA/1aqLQeTZObkDr0vSus6Ayqj+T\n0TUdmhSLJfmsGIr1S/S5dOrcGSB0WT1dp+lm2+BTH36H/ex9VxXt8nx4/TP/WtYWmZYAWjohsQJl\ntr5MsBDT9buaauYqKWjs4JDJR0dnj27A9dOL/EAMUKzur58fbraJx2ZznXbYga0XEAvctC1TI9dt\ne5ZQ8pZ7ymJh2Xl+sAMKH1jtrCdr6HVkDlBt0fFc5k7e2JSlNA7KbKfosRDpRHPHgnVWgXGuZ7IM\nuo4hpqo9T0LyPckECE+RKE7zZMlgGEHSduwaaB2EphMrWzJ3YxiZWLW6VQBuN4DRCNbEcHoI34rg\nS6GsD9MztdWPHms6k77qPinkM6YC98QH42nn8XnQeSw01sGsERY+MyAu7hrgpHm48uPQekDqdGAN\nhOfByuvh7stgy3Pg7H+BC/8K7CkQPUEAK3sI0ntz7+n7Kdz9J1yVzhfPXrR/ad+Vv/+Q/WMzYkau\nDnnnDTdw+8zTpBVa4MKtnPek5Zz9hdTePTJmXnvx1Sy7dQ2XhR0YmYR4GP7meVx8zeVc/OEY5vfS\nGwgyTfI1GkwFolPdDpq36wAtTAuJyHSR4KoDWhvKRkD+tJAwEgasmq72Xl24tkLBYGcpMimUDfeF\nOGghbUNnEJomfO7db+SO735FyMIMdsf+7+f7v/cNt8gU7ga8+a+fyFtHbiKdchruw4CtgqzGAvwA\nrN9XVNLy4xdLZI9uScH2bf4InM8+Cxb4A4XOqdkMZrCQCcI10sCDEQRgtQW4Y5hqAb79D6PSxo4B\nliELjKu5IB2pGxBA8g/3Fbv40VgNePRrVNqgIgszu6D1Tai7ufP5eqzO3TY1eR856qQrlkVbILwX\n2luEJZtBuaZ66NLEXPJ5uEpSsNo2lwa6CdydwUQkz9Qa8rJz4kxy4gdD97AgnxHoZBBVLxRo/dlo\nHQRozbBQQOPWCDAV8vVzdTp0NA+VeRhYAYQCEmcncPE+OOU70L1WkvMJZDCJYkj/Fs69DRovhr0v\nEX36Pb97GSu+cDK/8ZpP0N4sLCsF7uPesZWcOfhrXJX8F1865xXm4quu4sMv/zMp89t/FT71DC48\ns0s3g6xmhcxHQH0Qat+Af/91PnP6WeacmSl7t1llfu6Ja1n5+S9zy4YJ6AzJs8cGvaZV5JW6L7IZ\nyKYk99ZMSMAw96GtG9Qj8UiCrgNVHeCdm25DaZNZLHWbOKnKRAV46aw/ze/WDIohBHQ1uKcUfq2F\n1u7tZOtOzjNUlOXe95//br/0T+/jcLZu5Rn8yjtPsgemdvHyP8A8begX+Pn6P2BbBanQcvnZBz77\n13blCD9QyB/aPzQRaQkZ7sKLuhx7Oz6A60dZcx1X7wiOoTrAzOdYq3iIc9ljB6Ajfdso8jDASPaz\nHcdG9dlfPhVxYJtVyR9fE7jhPlwO6aT7X1tGddMpAkLsK4DUZ+q+HOyWys3XMNDAyBzCbqY3QasJ\nwUY3gLhJC2bQAfq4gFXUgHA3dP5VFlrpTEtnTJdB1oR4Qlz1qO6CNdNSN2kd2vOFHNCEW+vwDzW4\nsgZnJLL8aJwJy22H4orfAWzX2Wb9mwKsz3C7IdiagLwOdoGLWgfDjuEOuMGuA8FBt+BVKvVciWHg\n2xB8GDgVgmUyzcu2geWQrZd70P0mxN+EFU8D8xT4gxdcx2evfweb77iJk/Y+kX3JVj7Px/kS7z1w\nM431K+Avv8WNF+2B2g7g2cAX4aqXwJu63JbgFrV6enGrxteAfRCCj8Pld3M7UOFcnva6L/DJk28X\n/LdGiGbHwLzv+vptIZ0R0A1mJG0qWOnarZ/vpO25i6zw5ueA6S6RAHFakxMFCQQ1GdSikHxRIWWW\nNYpFeHQ9hEn3+dSDMPPJ97BheAOPec6rCFrQOk3WWP7WR97JwevvgjdxWAvHY37mxX8C/JyULRMC\n4rNVnyctBL5QgK3KH/1p8368YIls5pF3OSZ2fABX3c88sALFsnRDopeqXKAzx9R07QQTyH/yINio\nNGh9cKEyWxMji8s4GSBfK84BblYRYMqqEvUnEfCrPBXa34AskfQm01k4j1Br0A9aKNjqEni6ApMC\nrupWlRTm7ofmFgm2pQbsgLChcC1kB8hn3WW3Q+Ub4mbn6TMt6O6Hzi3CyIMVktNkBpwruw4a90m8\nZl7O2TTwRafTPieG50ZCeiywOYCvAjd2JYjEfN/WpGC1uYQQgB2EaLUrw0qKZRoBM1QMhBg3GAQS\nGa+6AS6uQ3wfJLFMLdWlBm0K3eshuVEAzERyb9M75YGVf/TJZ9lNe68zr96wgrWVJzLKOHvZZDu2\nGXzHsO48cQyaA1Adgc7j3EMgJqDWkgSTCi6R42+Av4PwEgg+Bbwe/vLZXPQXm8F+3f7zsovN5t98\nP99Llsscg/2hexiJYqd/72NkScSgUQz4PekHC8yWNCEFCnkIbiLydLKgK2mKQUeCbFEsg1USiTbU\nH4rQiY4hcNlB+J8/fjuf/893sn7jOZiPvgcy+KXr76QdwdNPf7F94wfexmHM/MLL/5zLr8jLZipB\njT9Z/U7sRG+qsfYF/ez3EwVlPwjoe0lQgPMS2/TSH/KI7PgCbhuhVmaZMLpg2LEAFwDy05AMFPOu\nLXnebeBmimlALBikQDQvh8aq3+LTEf0tkg03g4uOaI3pTgEzPY4/YruYXO4m6Sitp1bvUTPEXKJE\nz4Lqqrs1U1kPt42swNWZI1+k21rI9kJ0nVSLgnaCBF9MB9IOMCmzguxpMqXUDEJ4MiRzMLtXsiBc\nsKQ7A1tH4O/H4OYqnG9gmYGbnfY5PYewcAVdt1ZQrgl2ca5uJAAbjoqckz9Y0+sxxjhvY8BV/0Ek\nuT6TssczIpN0bnIDLALgmsNqLcQXALfATGcvyWUHWFl/DH/9yV+2m/ZeB2Cv3vEb5k2rL+QxlYtI\nnVP/a1y69V/50r2nMJZaeGwNVh+ATWfKUnajAxBOiHRSnUMeJrUaZl8Lw5uAb8I1X+G2szeYC7JB\nOCUkS5dDdwCmY9gTytOI8mnPvkpVARErZmWwjnx9pg9o84bT7dtHG5tDq6ALQYt8Ik13WMiCrQj4\nxjUB4NAUkx20HQ4BDy6Hp//uO3jW+99B/Z45/v+3vdreeOvnzMVP/Ul+5m//g+R555o3/t5n7Z+9\n/yV+6cyGk87hogsu59Wv/C2SVsd+8BPCbtfVzmG0u16CgCwsI/gExQdSlREVA3QSCRSDxRKD7qMb\ncPN0o4pzndfKqxkRV9TUDlM0S/6UV9zU2pzlDpEnXx8ybxXX4f277kQtk5BPJgjaSNqSQ8X4LOje\nIfv2g6Q7dQ64iuN+p9NRvb/xKAPWfuUfO0QYUXevN9OmQ77MnQat+gNZIOzYDDpJYhUE4yIttDfD\nvl0w3RTgV61vFG4bFbYbV1yX1+mhKh/4WxvoRoDTZCMXpAxWCOjm98QDGOPEReMuMFwJ6RSyjOA0\neT5zfAkEp4MdkYwLk4rXodOY66fAv37gffbLf/4+88zx19nrJ67uaRnv2ft089Taz9Nuz5mKqduO\n/Y55knnNa5/L45nCPqsGl/w6b/mTz/HuT5zNi1/9DM7X9O18rdY5GH4z+dOLf/yNRHe+hbtaT5En\nICRVaAzCwQpMGcGJQ/VbfbWFJq8pgvkN7WuD4L3q9wFFCqIXgdKJNJUu8sDSCoSxSA7BgMvxjQ/N\nf20BezbAYx+Aq9/9K/bGW2VdVxuQr3/gMl3My17yh9iKwWTw/v97FW4aij4fx1yy6hX84cn/TLqN\n/MkY/YFwv1373ynD7ZfhlPT3eL1LZ49uSaGNi7ivhmi9AEPkHk6os74WnNqkQ6Ofhxs6tts/xPbt\nm6eR+cdJ3CG0I/g+TgLhOqhcKEvThdO9p/BzCiPvkPqbD7p6Sp1BFHj7+B6mbjGiXyZ7kGeItaWc\nc8isJDMKwRoBVjryg207prnBbeNyonA1xKfLjKD2TmjthWy7AP4w+TPVujpBIvTK5OfYamZCfs9W\nyv0KR5AF3z3xTtP6VIsH5wrXBVyzaRkkww3CzswI+XOGshR5QGZA/sTdoA079t3JDVMfA7DfmPg7\nFjD7rdbHzDKzjlT4kr3R/geyTrMU53TzKXuf3XT1W82pXMr5PU9v6CCzsZ5AEWgalzkAg9OQrJT8\n5FYsKc47gG2+HNtTEHePE5/Nalv0xUm7wG95aekVRP2IVMXp3y3JXMiqENbE00kGnV5e622vkavi\nZXvgRc9+k/nAW9/Em977BM57AqzaDsEsvP/Dv232PvRnvOhX3yA54V1HQGbhY5/6be6852vmT//o\nSn55/UfIHpLfFspICLxL8K2fXOil+3Kbn2Pc7wwchT26GW46DGaFdNxog+iUOu22J3/kcMlzfoa9\nn1ei//NFInfX7LwD3X4tDYrFBJQ+qkjZguhMGRBa10Bl+8LpLv0ukC81+MzHH7WVuPijf/9vWYd8\nWm5iIBuQ8kSnQbTRBfYOQLoLkt3CNuMzXURcJ3sMOSA+Ra4nPeBSlnbCxN0wuQ/qLRjOJL1oCBkI\ntPH7Ex3MiAxC4Qa5b2ZMQFTQRQJ4tumuxfRdlIV0P3TvkvNHp8lAGbhgkKbFBb7Yp/e6CxtWPI4r\nz/99s2H1x2g3mnbf7BY9shkL1zCULafDvJ202ziM2fvspnOuMFf8y4d4td0nhzYumGgNMOrGMD3t\nINjVkIxDcxSmh2F/Fe6K4NvArGZtqMe2gDQrx5oTOSBnugvsZDsUi8AsRO+qnuSmqQVdRFZzqWUK\n2vc3iBgAACAASURBVMbpvWEd4lDylVUKOvgk2NA9DxL4p7+ShW//6YP/h5tv/QIf/PA2bB3Cg7Br\nxyZoYn/nbeeaDavOwXbgT555l8wu2yXtKGbhLqrEQy9D32uTUFar+/ZPmFHPbQlZ7qMbcKNTBWTD\nVQ5sR+lNWPVBV62fDfh3w0/ug8Jv6Ust06nBReKhdyyf7nSRNVz3SlJ3dlDARvVXl6abNxZtSFrc\n/hQxPY2+Zn2n9IEtx/wqhI5BdIGsLiAVnymbcQ92CytOhqkLswnHKYRjvUYdeAaRtU8txOfIY07S\nh2D+ZgmuNRuyzK8uHeHPMMtAOr3qtSspHgPjLKhJkNEepqe0roXkLhlsVYeJEEkiTyz1w9lqibD6\nF7/qjbyo8Ub23r7DnDb2Mnv/lDzM7wXhH3CF/f+Yt1vML5qPkwC7eMB+zf4jgPl583a+zeftZnvb\nSS/kleurMLsG4jZETWGx1iKKiLuPtiIZCd0xaIxIqvDeKmyJBWzvSaGtmRvtotkc0uzoIo8XmpEB\n37gBpqeOEvIHhGUz5Ivw95tmfAQjIsH5fUEnVASJeARBR6SyaACiKnQi8Y7aMXTWQWV3cWNf9cL3\n8qoXvVce8mnBTMO3r/0UpJjvPPcq3vdLbyfZIl5SNi2DgwnlGvzL8GeP+CDqdzHfCU369vEBN0Fi\nBEu0gs2OJTnK0Zs53EMZfmgnNMbyjz/rtNvVFOsOqMrvUUidaaNPJAV6kSk/qrflIXwK8TGRhgJy\n7EBXcfKZsTepIdsD3ftkimz6EDJbLBPWUHOH1Kc46NTE/vFCcyP90yhw6aIne5AFR3RR8gzpvPMx\n2DoEMw5wDbDKAe45ED1GmGsOpm6NXlNx36tQq9Tb1wmVXWmrbkK2H9r/A51rBHB1/FPiqU9I7o5D\n9QkC+MFqisVZoOgpqTDdQ/T2Csz+mRzQVgVk49MlsBetQ2b/+Qv/qOn9dKl9+iSQA9t3Mzt5ANuF\n0V3rWNlYzggy8UOebDHHLFupwqpzOO/s/ew6b5bJJ5/FeRd2JQd5IIGBpoBu2BGMMhlksazhvWUZ\nrOjCbAUmY9gSwdcNfAHYNy9NhVmKLA5df1YDjckwRKeIJxeOO009cqDmcm51nWASCbLZNtg5B7p9\n5s+WDJe74+l91vzDyI0WLqCWunVv06rL47UwuAXi/RT9JB9RnXlTB7N56Q/pXucdTTrdfQZZDjWR\nSvNjEH2y8yHTfH0t15+t6HsLxsV0Xrcda+3hXN1FmTHGblgkzO0wHPX5Hs6OD8ONz6KYWqto5fQ+\nW3Ujmy8pZK43aKaBr+NC4adk3uc+1d7UZWTOEc8HbaWmXckIaN0I6WbRbfWxNH7wq+IOEXqnWkgS\n8CO1s4H04F1PFCAfmYDGOFR/BeIutKYg3QKdr4M9IH/KMSyUYEigazToNQQUA1DIIQErX+vOH1va\n97txx625pyBOXyfuqSoFPTKIrnZC7zF6NPVA3F51jW1DKiy7Hyotdz1tZFLAhAwQSebkhVXCwHtA\n11GjbNaxv2mRh8bitYxU1ko2R1t26yCgFwMjDLGB86jCvhT2DbM+Wcv6Owz8Qg1OTWF5F0ZCGKxB\n7CQBG8gSs7+wCb567aU87Rk38jsnJ9wdwE3ATRb2avyoQcFu/Zl2KW5GmOdzZy2weymmOTvh0jYK\nRpvNykZa6OA62UapY1aDYLn8L8KBrk8jY5EUwrYb2Lw0srApo0mmnklalK+HqLgBLmtSBDYTV/YM\nWRvaPZNKZ1vaeUg6kHSlvGEmW7/G2w9let+6uNRIbQfLhZCxnaWwR3fQzAxRoJI3HSurFZFX/7nG\ngXsWTZA5tqtBsf675yOEHzLWSGwgjSFz03OCkd59s0lo3wr2+zDcLJ535ae6JAir0SCymo//ymb1\n0AeATS+AzhOgOlws2F0FERBb0mAxYE5yDbsloGQhz8bAyEltpwBXmwjVsk2RHQKlEz74ujQsnS6q\ni2Bb4wC3Kp22cj40N0Fzt6cju2uzhnzdCqmsvot2n21CntJGImw03ArB5l6ynXUhOyAdjFQGEus8\njMDlVAP5Ez5sm3w1KtsUMMjm3H8CSNNi+UEdk3Xtxa68fusBGLyWf669hlc+YwjOD2T5hsEUwmox\nzXkqhOXnAlu38t3b1nFVuoPmKZZdCTQ1hUkzOXTdAnWmVA2wkWjn+SLsc1KIzJEA4/xp25Z7b5sU\nz/byIqm2I3WgizYZl7lgKtJeiV19+X67C/vrgvimA9ZNDc5q4qnZyNN9fZarE42Soq7zmY9AsAxx\nI7Le25+vLTLnBsYZyUVO2nL+wBYcyG8yGY6RVwVo41EZUMJxka24hqWwI9VwjTEfBV4A7LPWPu5o\ny3F8ADf3vzXgpW5QTVygLC4A1yQOXFR3VU3ncGq9LwZRvLcpMjNGoxtIpw9WyOdsHto3grkVVjQL\n19QfmZUAxMjsHc1V1+L4Wq5fsxNA8gzX2Lzghna2bF7c+mxavo/WScNNtjngichTd2zbsQ49qXuc\nUDoFQVOuMxynYCzKiPtnMWko3Su4GQSGJQCncURlIKkGuCCf/ZffE3e8fEafY6zpFJhrZZqu7Ttl\n1oVkEnlQYSaSAtPIgj0HkcyHkCLQZIVZGbcIfOAmsWQW7Ch0Dgjo+Ljjp85lwADM/z6//I0KQ5Ov\n48o9AZwZw6pYNL7dwIQRzHxwDHj+Q3Q/P8qd33ks3Lpd1jJunwZn3C9F8h9h47/vRgIewQryZUFz\nloq79zpAzTsS4JBavTDrSIb/lBPfsjaYOTl2hgM0vdd+BUTSf4xboyQZdmCvI+pCIqwnL5i6EB3q\nMsj53c5/CKyanS8A13YLhpyoPqUP8nP/1XVQohHxbsyoA9vlSE79EtmRB+D+Afgg8ImlKMZxAlxf\nzKmI66NzybOY/Gmjh1ja99pvfkCtb6K/nfOYpDPbkMi5bUB3MwR3wIpEHrS4jEJa9sX8Nv+PvfcO\nt+Oq7v4/e+aU26+aJUuy3GVsy2CKaaYm1ARMSbBpLyXUF3hTiIE4BIINxjEJmITwS0LA9GoDpryU\nYOBNMGDTYsCSjXG3uqx2+z33nJn9+2OttWfPnHMlWbq2cGA9z9E9OmfOlD17vnut72pF+NRuvYTQ\nu4lC8Tbg9UiF/nxcPfpOQDGfkB+aqezHIDNu7C5xkJBEiryXCZ9MgR+iZCQZl5bPARNyrelyBbGM\nYnLruAdqRsfMKVGbbZbzsMWljeSvtuoawrWKYF14zaIyEM5jDU21ouRb0qLc8NnGxJyPbg7yHQIu\naS4PXtYGNy78XanJKNHAWgXwBPFyJXJTWrPlzWJeEYTavpBPt17pz3Lfdz+/+UxONwJjr4e9OdKx\n3qZPAqwdgy2LYerB0NkF2U3wiydA/kup5N5EwvVaQLJSfphmEili1cJwFF0VoolSAmGK93G1vLDI\ntVTzjSmAlmj56aDsy6kVEr63FUefs85S1Ww7otB06SyR59elYtbbvWVOtOXQxgpCcaiSLJJ5l4yo\nkjOm83JKteaoGJXr08VAt4+zRhPTdhZIDjLEzHt/lXPu2IU6jcMEuPHh1czNG8Ld+jpi6kIg5M32\ncIZ8sVvTpOr+Vybe2jfnWgrQtwmatZ+CbCtkd0BtizjCVsprYAT6a7KXcU9hrto60UZ4PCP6MwQH\nYiXBI5i/ow5uF7CUUN+BWdEA850F4Objou36CUnbTeqyU68auBuGTAuvOqvSnxGcSV77eWd3QWez\nPPS1NTKBMf6t6vXVC8u3S8hWsqdIEZ0E2qNQP4FQ18Ep15tPCCBmYxSV1KYJXTqS7TC0t6DWY++1\nnnZI8vDjkKfyPnRKUEBN+ikSJ+oyfuZcMo1w/cw3uK3zEzxeDSZHqu/rOGrR53dyI68H/n7p5/e+\ndldz71V8rstLvpbTeAJ/TAeJVV60A1q7YWhc8GtsBuaeDMlpMnxNJy+8nFu2Scd6oAcoGY3QElDL\np2ReJznkBo4W8hWJa/Y26kAA2tVFG3ZquodwCZ2YUyeK03P4BqUUmvPsDMqWDzoPczmOH9Bzh+Al\nNvoHvZ+JJsf4FvgRJMXZamNkxWPraoQkp6Q/AlsrMLWA6WYLnEhxsHIYAVdvqpWii+OnHAQOynLI\nE0t1im1GoKdJpNqtVw98tp3gXAlBgk440uwmaRm9GAHcZZAuhTPrwiiMAb8Gttf0SEaRpYh2M0vh\nzVezNabCBHCa4KZkgvlGdK5K/vlpAUw3ALVF4tlOV0DrB5DdINeU75GJmM/K9smIjp9FYYwTODc/\nAzSgth38XqifogtX1akGQfvv3CqJFo25QgvNG5Cs0aiEKnB0ROPKW6qpKffglWNMrlbunRLrANFp\nmAbamZNFwu8WsHUJ2mFSriMZITjjXB3ecsv9C3TMYW9ni5/Md3N35Kylb+Ub+df9P+0+v/qVG3Gj\nHMn5JNT4MNcyOC332u5p7YfQ+hVk1ijSVpMESVEfIdSCMKsGCFRCNd7WmxZsgOspWjt3KDpHxH6J\nWJTOcTX9qwOeHw1jR8OYZiAO5pBkkI1CUjXLiPZtrzhrR5/HcH62eUOOG/6f6qKqIXB+QJWowbLm\nDrqImEbdLwqF9R3s8kQfovSIsjscchgpBTNf4nhY9KYCZEhMoaXcGrFvoBtrukT/j+iETMNZgrk+\nJt9748gmoT5WyrhiGNbV4cHIbd+BKLK7dM8Bz7Po0HbYSf2RUc6WmVmfgPYU+GGC08+3obNRTGo0\nVCjfJDvKfqkm3QOh/0+g9V2kU/BWGa9kr3CqIdMsg0wpilAtHGhbOM8eSZSorSA8CHFkRnYX0thx\nRrzrba/X2Kfpu3FPNxNfoRGstawTiqLeLlO8doss9rjqsU5yyHMFDBBqRLfZOn4zd7R/5v/dPw+A\nl3Lokk9AY2DYDbhRP+1LPhU/7sdQP4t7hHsGf8LFrObUItLQgV+i03BGnI5AwSGlCiAxaOkAuEzu\nra+rNqhasVdHl2/pwqJ1Z/1sGdDMgeXi5BAnnKc7WiyMzjCM3x9214p52odYjtMnQH1vBLixRJRC\nWBHt2dQF25Sh+WKtSSh6pOkcYYRQ77frkFZmtY/QrSV4qRcQnuY73e/r616SwwS48erlBYRcrhqR\nAqsVNzENN84aowXf+/EneOwZLyqTM4qG+aSa6Dsh2yFgZR1HndINaQY1L4qEVfXSiIRZpH7msP7d\n5qWWbHCOlBwkqMKtk3MuKxxrxl70Aa0bVRuqycRq3whz14pjJV0uO/WzssCkCKfpfiJ0x8DpSCWt\nm8CdITuf24CWT1GZE+0l1h6Zg+x28Fuk2MnsUqEHaidCuljOxU8LheE0DTjzMD0nD2e6utCqw0OE\n3IdcnXWYhotoLDTloZlZLgtEPMPiBarKbJi2W40yu57v8P722X7K72EhxU/AKSuewEuXfMgl7hyf\n9w5I99f4rwJfdS9w7+RpvFlo0cXQ9wgFvTnhKEOHkahofSn7pUJ/WVQGZlZPIZXV+pEyjX3gjhSl\nwdvikxD66yXalMxZlMIK8H2yaM6shOla8ZiFlNuel3ggg1U5//irufm/M3FNmU+uqrHqZLUIImsc\n0BXOsAAyH+A+Ul8m71rYw1blMAFuvDrr6pm0I+3WgqlDygldbueTj35MEU+rEnqHabX9zAB3l5ir\n1ro5Tkrooyipp/d/F5JWvw24BdjSgdwqZlk/KAsLaqOJCX0SxjLbBHdTsQ4Yh5leL50bkqMlYyfb\nKg+nVTrL9xTbx8HhLQ/JlWp616F5tPCyPofOL8Wpku+R6uGmUMVxw8E5tQsm98DsuOzYr1HHnHKN\nyTKopeAtwgE1+WJnj92yiKCOY0RD+mkK+VESXhZruPYc2eQ3C9Wmgd1Ke9a2cAP/yvMXHGwB3jHz\nYC5srudhRz+HD+ypRhx3yxe4kCfwZvxiaD4CSMVySLQ7SeAireMIEEjr2PNqn9uF6/xOhhG+vqP7\nVGdhsky4dWbBwsEs2SHR0pcor+40Rs2bo40yfiW5aLd18/bmlBWWqvTylfTa7ADsdRcXFjHpBapx\neuYCgu5BOs2cc58BHgcsdc5tBP7We/+Rgz2NwwS4kds/LHrxe6+aaJxU3Sm/jhg4Vhxh8W61fmCo\ntr9DOEy/szuRLc65sPh0PdQeDxuc/B1rUYDsJBIcYBlGLTQCYQiSIzWcZRTcOpi9E9Jb9SGYkzjR\n/GfQ/pk4iNwgIU0Te4gisXnXd1cRVjvYBv/5IvAgA7KpYtxsfsZWmc3zHPlnbpto11bw3NrgJEPg\nj1RqoCqVcDs/JaZvvK0bUA0sVXO4H+aMn6PHs6NO0F40nbEu5/lT91UP+1DEb+5scA9Z/Gz+9IQr\nuPQhHbe0ucbvam3qta1bWlvDsoFhvjT6Zs5ee5HMsR0CrNmY0gezom3mntDpAihohix65eocbclY\nucWyTahDAQFQfRvyRdHCZs7DQdVyU0Kam18Ms8dDZ0hPnLKy7TKobwG3F3FY+Mq5xYtCHDRtPFDG\n/pGr+n01ZOQwyUE6zbz3z1/I0zhMgBvHyKLgGkvM0dpNjgG3LZPTV3koA1t7qeffNFrTYmPANY3Q\n5pcm2NyeyntmKAOtAe800OkTXjZdKZ62UJd3SNJv/SOEp3W7kDbZA+CmgB36kFpaZkd+Txvy26AT\n2duWdGWLvuWK2DDGSoglY1Sze+z3dQSoJ7ZIiFA9FWC0tvE48CMUovcn1+rj+ZTQNUwTerq5yDRw\nQ0gh+KaASbqS3qKOIB+yBIo5YAvfd3i/9hW452Tr7K/YnK9nzeLTeP4x7wXOrm7i1jTuz7lrLmfV\nivtJGJYmG4DyjhZqNyD/T3QhSpcJKIYYuyh8xc8iXPxyhJ+1+T5ImU/Rm55oiJmflOgTV40wSMGP\nI5EBaYFxSgvLHPCSfcaYnn9OkSIXg23VJ0L03f5E72tJlJe18LYDknghWCC5L0Qp9MqycM4tAT4H\nHIMUvT/He79Xv/tr4GXI5f2Z9/5bvfcce5zMlR9/F0us5eorn+1xYylAIJ8SbsnPETo1GNAaxVaJ\n+Q+HNp8cyHw0gK3mzeeLJS61tkw900uVg9O6vC4peFBbMPJZ8HugfZvwynbdrh+SFQLScxn4W8tA\napZfrA3G/o2qpRqnFhtfGlMc6QxktxFCrqy2hGtUHmY9gTSBbEaAxI+pIyQ6cNKn16D95Sz2M11N\nqWxj2K0maJhTyE8JD2peKQdcyfu6f7iw4rfM/Mot7ftD3vPw28sctYi78Ljr+ItVi1jEUWTbkVXW\nRMNUnCsWTzes569Bx2ldF9WYElOHrVsuIF0CvOrcdwLItWPlvW+A2ynvk/g+aXQCG8GdJJZDnYJS\nMDrBZQioW8ECK2Jgz1gs8bnEk3Bfsr8wrorvpuuYcTjLQXIA88l9JErhI3RnWZwHXOm9/3vn3F/p\n/89zzp0KPBc4FSmq/23n3Ene+x4jdyCmiYnd6DhRvVcoS8Up4afld4kva7FxcER8CIsqmI0OZdXo\nxyi027lhSNZA4yhCD69kWLW7ZnSAJAJIPUCSS6550g8sIoR04Qk8Xz5VnpOmocRGQRzMXzXXq95/\nGyoD3RDOtle0pWRUKJHUwo7SaIcxgmsKbj4hloNzCBWiIVvJEEUBGqB+ohLfsacbQuxsPg2oyewt\n+aMt4Dve3kG7E0XI33Pid81uBFJ31dpL3ermqeAd71i9HleDlS05N2+OsfiXShnEXvZkBFiKdOAc\nkDkYvO4gVIIm4DgozBeb1/OZ3GYJDsucS49AiipVzfSOLKZJBrUa5K6YN/W2RMu4OmK27aHQvg/E\nmabPrO8VoaAhexYSaNcKMk8Sm1vxuRqH7OkN1LGGvQByX9Bw58myeAZCIgN8DPhPBHSfCXzGe98G\nbnfO3Qw8DLime8+WxnMgEnNHGYVX1GKv6mLWddELepzY+VxdYO2eW56Ezf1Ed21OsnFgIoXOEq3W\ndT/x8rsByhWuYrK/qrWbjedEwwvpjxPyoKbTGsUwWZxfbFnZ32SdOFGaS6C2u9DY43lr11hhbkoe\n67Qtcbcdi+dtFdSCecJN2/VG4ZiWOynaezIUDZgrfkcN0hPB7ZCH0+mKEfp7zUGaRhZJos7FGdn2\nu5Pv87vad/S4ofec/HL7N3jH0RtCLYAQ/5oQahLb/MnRa0E1c0eRQVXXMRwUEPZDhAL5+bSMITlF\n5frYdIl5UzuQeV2R98kiYBjSUyDfCUwX898NIJlkczrV6rpAd6CxB+p7CJk6XsNtfCe633EMLNG5\nWBZjDyXJqWVk0QaWkm+V4twA8/O38QNpEiZ6dPwFkAVWmA9WDobDXeG9367vtwMr9P0qyuC6CdF0\ne4gFePfymFSlYs64uj7EVhdxHgdz4EedhDqZEhHPJaPPTGx+2z03+mAigexoaKzTrKuVFN6oWG2O\niVYoHppOcU5OPdChFOQYcBckSwjxm0mcfgyS9vxEqK2TY6YPA38UzN0GM+uhth5a2p4om4BVVWdi\nPC4UHHY2hWRFZZBMa6hYPdLY1avotRiNlQw0Dce39X01xisViiQZhjgfwU8VgJvXZAyYQLoKDOjY\nN+CJQ3/qljU/6Xe27j3Q3TD1Ldbv/BanJE/i/D0PCJ8/KHk6z+fvSiFrVarHe6Fccgjjl2t6cmIT\nryl//axaA7HNP99jGE9IBWY3SqgJmqxGNNU2sAjmTpLY6UTvSzpDaCFV3wnJXYDW6AilMx1FLzof\naeSRWZ8rzxY6eeg2odZFBKZGIXWlY/eSfT3/C4yQ9wUNd3/ivffOuX0tQ72/u+zrhBux7gRYd2L0\nZfUGhVlNMUGUFyutulWeycmkzhuSJhtFIZTq08ahSrEG00Y03MkEsqOgcbpmXFkh3DgUIH5VsuYC\nGOmBrWq/Rx1Qem7ZjyDfBMmJ8pDkDjoD0BgHvwTqDxBQzreIBpMsAZZA+hA5Zr8eL98L7XdBc2rf\nSkKCxCHPqRaajIFfJefn2+AmKYc4RaZniCHttdN4sRmGJJFr9C0ku2QGfC58bz6uu6wRHG0O+M7O\nf75XwRbw09k48BQAXqlXUHN1zmleRyPibquMllnliYKu1/oGyaguVNOi6ZLr/RsHtwz8HvkbwGpf\nj6LNbQNvkz7gKPCbID8O4ZTb8gKkyO8s1MaBXZDdCbPfk2MnS+Qc99Q3ckTfGpn7cX1jexiqGkkU\nEhMK0N+dFNwqVVChGjbcBBtuvhv7O0C5DwPudufckd77bc65lUgyFkiOwJpou6P0s24550mUQdVG\nwwa+epPj2R3Zz4nWuLXydblGLpiZk4zIvrOdYjrFc9ZMa9tlO9p1jvgUpoBsBTQfBPV1CnIGto1i\nB153lpeyDgQ4XYLEF8dZEJHHNo4m8Fsgm4N8pZqkNwBPVI+3LjLJKkiOojz5jZxNIFkOE6+EmStg\ncYRZsWZmQ2nPSrZbwIJEACKZlbHLvZrHys+Z1uJUuymJLnBdBHJdBtM15R70Mi2Tpip7A3Slfx5O\neRpvYWC6eEpsyG3hdpSz6XL1HVj5yEQpkzCHm+pIHIG5H0HtBL2XVX7TNF87aOTh9V4W3OAVAzha\n5lg6S3DYJW2hqZzGNPqdUg2P/5Zts2n4vzsv4pbZa3jj8V8pNNvY26qrSdJUDlstyhALHCsa9pte\nzrD4uqoTsCLr1ooSZtt9/tvz7Otuyn2YUvgK8BIkJ+MlwJeizz/tnLsEoRLWAj/uvYsDoRJM4psX\nr772tk7gb5M+pNatmknx8fwEzE1DloeqdV2pp1DmdDuDmpl1nGgDIZYsiinzNTX5tXC6V17O+WjS\nG7/nCWlqXlPV4nnnvJh9+V0CUGlL2rQPnE2oFpX0UUxcezhsJ/o3eaBQIJveCqunyj7HqsHg9bjG\np/rZ4vMkpm4sycER0uhK5fl0H6FEYPzw1UTbmzfUR2kY1ye0xlOW/qlb1vdJv3P23uVxq/Ii/jZe\nG0uUVEyx2iV5IFMnqJ/V8TSfg27sRnVjC4kzk8tXdm5SmfNuEVKmzgOalOI8IXvStNtkDokBnxau\nN7sDss1QU4sr2wlfnr2Qdz/3BvFHxB2vTW23FcappRLHFse+imq4TMWBHf5WP6+ioPHVC8jdmtwX\nNNwoy2KZZVkAFwOXOedejoaFAXjvr3fOXQZcjzzer/Xz9u+JzJL9ygF6MF1T+USt++pSSGqiKSbD\nhJjcbFy0uSzvjlawXXeAbECANtX2L6XqRTrJ8oaAct6Ul49UocQmfuUafaaanj6Y1bUnbK5gnG+D\nHdtu44glx0nR6Uw1DNMm48kemfPuSGh+Eu6qCa9Xuw06b4P0LgEFo14z9bAbeIZCNAjHa5ljvlPQ\nCC7R4a9mBMXex8qAhoLWWvshn6IoN5gIxZAcK8daygou6bvdrR5ch0tg2/RNvpPdq4E97s/dF/gD\nup99A9qq1gsKyFbCMAaTmO7SsW08NtqZaQB2sOpzYWFcFkzeBL9L7gMj8lurpJeaRq0V6fyYzCFq\nkCyFue1yL9KV8OF109SPFo077NskpaDDbFWJPYex38KusURsdw9qkF5RSr1MrwWU+0JY2D6yLJ44\nz/YXARft/7AxcJVcv9GrKtVVv9cKiWiAAfgG1dEzKpMqsSI2eyHbI+DVyYpde5DqZX1i+tWspml/\n93FwArJZv4BzHvFfTptjFTulmIizhFKKaQ/QzykrNa4N/3zlC3j7s66WYVHTzcUe7nnMM0DKHiYw\nezrwb5C9BfLroOPANxTorMcbBEeYy+QYucU7xw+jhom5BiQa8I8XMA2ebgiFyr2CUOjYEPXxCtRE\nXUDYWZTDErj4iRtwDr75638EXt/7Ahde3LFuHW/mlPB02PQ0CiG2ouNorgxCAR4ih2I+K4t/aY7H\nZrhpdrZjT/l+pjq200iFj35CBmMpjCXmeueERsi2FZ8lIxJl4/olIqa2VkCYWLu1c4kd2zGXEp1T\nF30Uxcr7qiOV8rZGHZUKroPEsg/S5Yw7VLkvaLj3nBhAxCSq3TC70fv7/XwjWKMUxO60IE4y0Ttj\nfAAAIABJREFUAvkIuDGkrUtDaIbcSkCBmLXDUBuRiegWFRpekAjofQK+LqAbTL5MP3fgOkg9CL22\nwDOrqel895oTz7E2cAlP5s7dv+R7N3yCx576ohDmlqeEHlNdT75FdGSq+aCm5hLg4ZraOwSpNiRM\nBuV8QpF2LW1prX2MxgieZwPqAYq4NOQ3WYsQ0G/ZZC4tNNvYzC4Vo46G1+iKZFS2ecoxf8G9BLgu\ncY6P8A2OYk0pmqlqLRv3bvQ8KBh76ExCPoFknelYeg0XK4GsaYkR5VD6PAoJcyOIF3cCcT4OUdQE\nJdpHJtvlW4tFDWQhrK0pxtUNETp8WNQMfTJ3jQcOr6xyjDh0zESPnU9TdK6YRyw0zrcUmGeR+hyz\nugBEnTIWSn67AdfAtqoqxJkmRO+rK2zVXIvSK+PsLRIBXN8h9IICQuB5lggo+jkkkL9fHHGWopoY\nldBLVKNwHXEy2XZOq5ulM1CbRropWFGdaaSjgwbTxxSsqxyqwwwf5XV+g79SP3mxe//ZT2Dp0lWy\nQDT1OuJW5RUtx7U1CN8LHwuQPB06/ynbJGqOhhzQaLz9DDCsgJ1QVKdKigfBDSoop/J7PyHfhSLv\nel+tL5n8iHDf54t2qDrf8l71HRZW3IAb4c+4jK/xlJDybZZt5KcNin6MRaYr2DBak8x8K2Spam9z\nQB+kw9GOddxK/7e53svLYydgCSVaD6PkQLPaz7PlfXzomlfyqrM+KItkQmhiSU3PLSeUT/QustDs\nYo1WqH5edQhoen21sFT83pQO2ybE0CcU3Lenu7rYIch92Gm2ABKHU7nos6oZHnF8pZuq25iTB+TB\n967QwqxYta/L5MknBRydFu+OF2qP3tymaG6kAjDBG2sSp31FtXprHslh9wpys5rxM4lk9cwI0Gbb\nkULbk8VlVyPJ7HJvYr3/bqUq0Teuv4QXPvLd8gO77rilSkwt2MKDUBfmLXdj0Pd08FuE1zZnWVVc\nv+w7GSTUiKChIGuHaxTb51bNqqPnVK+Mn2q8SVNvc0vvTQwWdYq+ZVbfsgNXbnwfL+k+xbsj7gz3\nTI7hgf4L/oKeG7ySD3ImTyk53uPWe+j/uxyOFGDciL5rT8i9Brnf+YRaErrIJSMUHQ+stkGs5fZK\nt405VONp7aT0+1CjoiKv/qMPShSIciLOLsjmT013ryGBREksgc+1i42f00gDNp6ellJRkZYbt7YC\nXYQraqfTcwhdOffVleJuyu803F5miQFrHBJjYmZOHn1nN1i1JZdSFMkwwFHNwaV6M+sFGBtgJAYk\nVkvAFYALlDVHm3g10WzrXpxSeAH2RGvahuK5mpmVbZGXtdRxrUKLqhW7DNbkP/F83lweHf+1De9x\nn3rxu+W6rMttrlpmQigEE0xEX5yXaxWf1U8B1uqk3wOtq0Urq4rrlzTSZJHs02q+WphQl9qQ6/mo\nU894OOPznL5PdHHzMU8YZQ06TZf2M/KgfmvnP3af3N2UM3gmj+FPgC7AdRe4X/D7PKCrE4Xdkxh0\nDWiN/TJGoPqiDe2d0s0imYRkXCgti8PORwqu0g3rQh8DKrajeKcUk8S4jCrHvhhZ5CMONV0NbpHM\nQz+GWCrWM8zLX9/QxXIM3nD+A3jA6U/ixS94T+CDQ/vqXpEINpCWZZarpjoRfZkrIFe43VJRm4ZY\nmCQUSTULJL/dgLsvqXJaUAZZKFZUe+h1giZmXueUwMA6n8YaWel4tUjbcDoZ4wUhfpJswjh9NjIB\nM2ffdSganc1Avgc6myDbqBruDmBPoVzEFczi2OA/5TNIanRZ3v7Nx/C2p18lWvKQmK4kEqsbTMu4\nFZGZdhZOFjmpXCZ/m4+V7KPOjQJ22WZCeUCnfaZCCqdK0qQoYmMScfPOiuKg2o4G/zNL0dQyWnDt\nvRskpEu7IfBD8I8vvNUtHzne7xi/rWs8DlD8v/mXIYWVymf8WvdpnsQDekUcBiu/Vvk8Dhs3Rc0i\nF+L/+zbSeFLThZNh1XDrouH6ESQ9fFLeBwetqxwonC3Fimygq33sQmjdoGrSWiCIjowjdbjsv/6W\ns9b9FZPju1i+5uigQfoUob4AhuHdf/NLNt1xPZs2X092R+7f8K9SuOq9f7kBhmHp4jX09w2XFgLf\nUS02XiSiZ7A91WGyvZPRxpHl69GBdP0yFjZ3SuOwAPLbTSlAtzt+PpkvagEFgCg4Mtekh7CKRqay\n1XDF4k3tO3Vm+QZSdb+p2+gq6+Ygd5Ezy8w9M6+M/zXANQ/1pALsRgXcHeB3q1ZMGXCr4NsBruJD\nPa9519Sd3Lzlx5yw/GEkUV3U0JrGbmnM7aUU4Vp59FJQdgNQXwv144R66dwMndvoRhqTPIpeiCTp\nE3BORillLSVm9uaEppjoubqYyE4po5wukvVBuOClV7n7rT7b37j56p7jchDiVrmTeQOnzVsIKF5f\nk8p31aAZO+UAttF2WQuytgLvWKThjsi9SxaX99VVftF2VHU2x7G8UYysOwbxWaimiTqRz3nK23EO\nfvbTL7F87oWhIJGzi7VVYwh+eNNlMIe//GvBIvCvv2QdgHv0/V/A0cev41mPf3OgtFwK37vuk+ze\ntoWnn/YmXA2uuMV+65iZneDXY9/nAUsk2O7ERY/k/iueLIqLFn5y6oy14us9FaSDlHulFNL+5TAB\nbhy8CN0z3SQOp+phylhdheDxzirYHHlLvYKgxePm2t/MxGJOaULSEc0uNw//AOSZamxmZ8buaQiq\nTT4lWm2+EzpbBHDzHeCmoZZ150405JDhZVbZf/h/7zVyfufUnW7xwLO4+KzrGBlYKnyZ13M3J0zE\nM5ecjVWN3Tw+qpnnmpCRLIGaI8QL+xlgQKiJfK74fYhkqIspaOmi4UJihyjF8cwRV/LIG2LVot9S\nXMeyU1dzvw2PAu4W4Lo3N67xF809oueXqzmZY7g/0A2oMdjammWnW9WWXPSdWenVFGADP9/S2Odx\nyOsyZmmnvNNQnzh8QKHVmmnfRIB0jCLrkWg/sfMYOSGnjSkf86gXhs9KfEk05uc843zwuEv+7jlc\ne+03/Ccue1MYmu9f92mXJAmtiWme9/QLmdk1yVs+/Eju2n07F77gx/x4y2V84ep3+I171tMt1wC4\nkcYy/nrou6w54v6i4fbp89WgSCn/nYa7QJJTgILxtrHEjoFegdSRdzfpVy+2F0CtmmF+jhAPmo+L\nBmeVuqyQuXnf/aTwaWikge8jFFhJRoWTcgMELtPOKZ9G4h41sSLfCfkuqcbldwrQWsEY02Ib0d8+\nilBIqwtyqbuOv+Txfszvqo6e3zO9FVjm/uns21m+5Bhcok6ZuNNA7HTstaDFHudMNSHT0C0Q37RY\n3Vdunu+svJC5mixQIfU5jul00bHiWNSYINWB8Q3I4zhq45/bYhn8r2f9g1u26HN+596N1TGZT+YF\nW7usOJouVrQtwiqLPosDa3z0vU3NeEob8MbBGTngPaS6WPlZWcjDAfrEWvKDajXFnlQ7AZtEWm3M\nDyMNoW7XudtPURYyXvhAANpSDolO3o5lz5rdswTWHHUaa5ad5h5+/6v9j667IgxdnufAO4F3usvf\n4/nDh/45ru78uR85FYD9sO5+fG6nu+DMGZns1r23TlHU3QZ0geR3HG4ssU03X0hMr9+YsyVDvN6m\n0UZg4pVT9bl+1y4mux9DJtyk8lgDSBjZtE7kpm5TE63VNSlifNWLHgqeWyHtMQFbPyG8WD0rt/Yx\noDVcaiDPgWm49lA/idN4Fi8D/mHeIXjnNx/Pnz/xco5fcQauKYuJtV1x5vmuRC0EiRYyb2NnhcHn\nim18SzUj1ea74itrsuiFhIcYGOIsuBidckoarW8K0HYGIR+gqEnh1dqwyI8mPPXkvwDOnXdM7o7k\nlNcUKHyPsX8oXvsNv+JIhVgviJVFw8lerJgDqWI3A/lumXtJQ67djcviE4L/DcFtwjTke1/XxWmF\nLHh+Wi9oHCmfuYQidMKAvRWdnIZxhdogtl28uuhFPvIR5wBXALhHP/h5rFxzP+644+f+xz//Mp//\n1tu5ddPP/E82fOVAh96tXfxI/uwRUk3QGW/bRxFmuIBJD3BIgOuceyqyhKTAh7z3B91q8jAmPkDZ\nMRXH+u2Dty2ZyFDY4BpP62eK34dCKLlqZy0BRKvrCgI0sbriZ5HYXNV8c9XEjF/KLF4oQQK2NabQ\nOgX7aclpr+XdVnJMI/RRphJiwDXl71X8pVvmPul3+rg9bxC/Y+J297JH/oTjl58hscQIveBS0c4T\nc1RVx1rHJIxLm6KDhtrRoW5rzvwFZZzythbaZPdHL9xHSRHOHHgxitUENPIB6cPVHtasvZqa3UCS\ni8mdTEO9Dj/f8Y15Tubui69rl/m2jL2BZuzAhGJaGg7E4eKxH7fiJypJDL72f4f8oK1WUaYgZ8kJ\nmFkda7aNwhLIG4ivwWLQF4tlliyniH+Goj+g9UVTR8F5f3cGF7/pp8WFGmURn6DG7D7qcc9zl7zr\nNJiBV73sGPqbw0yN73WnnHCWv+GWtx3giBeydtmZLB6Yp3wr0UAtkBwkpeCcS4H3I9m1m4GfOOe+\n4r2/4WD29xsQFra/lSzm8uLAR/suDthXJ5Y13POqoeEJqaW50Q6ecpseL6DtEGogvt/Gg2WOUlyu\npXCi2TVxWFccThQrfbXofZOiRbtpv3Z5KXA/jmQtp1Luh14S/+GrX+vO/4PTud+aM+W6W7rTOSSL\nDtlp0t/9Y2trE8ZrWhcutRDyaUL0gG/R7cwxzasW7V/vkW8IkGZNoQWSOUjjBdUp2Kpm2x4RwJ1L\nCz8QQJJAowaNulghb337lRwCuef+fMVX/T9tPwvAXzv3Jfeq0c/wsObzyaahnoeytYG1isPEKvRo\nSeIglmp+gIlN4TiJrIYsktld+p9+SCZlnqZx8W6roaDWQNbUMY4eYZfJmFo8NB2KrDHbj03KDC5+\n60+7n0GbgFUfi4c1q05jemJMih3lOd/63r/4G275/r7Gu5e4dUt/n/Me+e6uwQk+gaqPZwHk4DXc\nhwE3e+9vB3DOfRZptnBfAlxX+Wvve3kjYN+gq5/Hefq5hWXRg1KAUhZOPN+8bV85tZKLc4ygqris\nrPVU/9o+0srnVY3X/m/nEPOCH+Xb7rHuFf57/lLmEX/+Nx7lPvViL9yXxrq6JoXXpxmBb/xDc9a0\n6Zn7HqJA0AchFg2jCyE8sSQCDO1hAVPnJRHEa+yyywU8fV1q/nYGoT0IrTSsXcEfaQZFfwp9QzBw\niLxeV3fkUaivgmwrtPdCZ7yggDKE7rF7GdMEYX/R+zh6sDqVexlttv4kgPe60KvlZfcygG29vEDl\nDXG6eTP9deKkLTl4msv2iVN6wYpAWMRIs3JCCsI/uuYLbNy8ARpeLzThOc94KwB5nvGvl76Meq2P\n5UuP81/4+jv3MdI9xT3+2Jdx3kMvlYUemUPeLMuGzl27pt+IONzVQOwz2AQ8/GB3dpgANwZN42Hj\nUwn2VmU7KGZyNLO9lxvmp0UrMz41SE4XNxk43mj38UNlINz1fEfu65heq0Y2xZcRa7mxuWoAHPOA\ntnv7bAB4Aa8A5gVcAD70g1fyiod/sChCY+eojj+vJxGcatXxiC9RP08WUbYmYkkoNzIkuvgUsn4B\nhkw71+aqoSYaBYITczhvCui2akVLI80XIUe0/yE9jcGdcNGFT+J5h9K5unKtbkAKuiRLJHQv3wWz\nO+WeXTD5IN7JtcHKri6m89HjMfASbbMvpgxfABCOwuwxGqGp1IuOadYn45lrGrTLVKuNnptQqtGL\nJpxExyoR0XqC163/Dv/66Zf7qemxMDyXvHl90ADyVuav+dkX57uC/Yl79Or/xZ+c9s/FJzbRWxQL\nQVtovKQaNnKIcvDVwhaU2ziMgBtfR6/TiG04E0OyiEbIp5HaBBPiqfdjlFIbSxkrajabt9xBUa+U\nAhxjOqAX3RyfSvUBrAJ3vE0vsLU5Z5pt1b/kgCPpd8e4U9nMr31HkNStdif5zf7X4Xw27d3A5OQe\nBusS1GkFpfMpAtdaamMdX1wkXouFW9tvEqSClwXJT1O6dy56wE0b800BhGwA5rS4eFqXzyztOZ0T\nwM36od0P006Mh61Iude9COYMIM2blgB7btvOxo29Qo0OWPw/3nlW6YPLNr2Rx615LumRCflu0XQn\n7xrn/ev/mC1soLUYWhNIY0Zf3Ov4HsaLdC8qIY7Ws6GLF9cM4W9riyHVRpHOPKl94PsgV9qlMygL\nVFYXYApBBglSNwPCh4nTbWrC7cY+VEuV+4+v/Yv/0Bde5979tl+w6bbrY7AFwNUcdGDX7o1MTu7m\nIMQd0X8sS5vH8Nf3/wRMRWOUyDmSii8hMcsxNvcWSObTcH8F3LjPX26m3FhhDaLlHpT8BsThxhxT\nVaqBkT56b6ikxTLycQHbfLK3eWy0g4lr6Lz0AspWuSuOvY8BdD5ATaJXjF+xhhyboga6seZsnKUW\nNit15UmBtZzO59jAp3mPe6zb4L/nP8JDeSZRBIO/adfV7uWnX87vHfMq+aBNCM9yNaQwe6XEokt7\nvAeSYYpYyBqh/Q0eGCZwvXY/wnjrgHgF13aj6CyeOkgbAryNJtQnFQRqMOOkNdct+tqDaLkNJFPV\nDKD/+n/v9nvGt7FA4laNnMx5z7mMdGkiD3oDvvjLC7jzzvV+/fi3XS2p0zhd+NV8J8xNgZsRp2hG\nOfTVwDRmxmIQtr9quQMR35tCshiSZfpaUgbcbAg6o9AegrlBaCvPbftzQF058VQtBxLRgF1N+HMQ\nB6TPdZHswBVfvMh/+qt/A+DfcMHpPQfp3AtP4+w/fBs/+Nnn/Katd5u3dMePPJTzTrmcZc1jxPka\nNdsMjtUZuQhvzui6UgsLyOPO5zQ7SV8mX+3a4qfAWm2muwXpTH7QJtZhAlx7QKtqZCzVz+NZbA/6\nHCGRwc9ANoE4zeZ68IqOeTN48gg8YkCNrZoYNKsgWwXcPPp/NVmrqs06ipR4K3dgWi4UmtQS4M84\nl11Muye4lv+O7w4XK9EmUCT7N6L/m0Sfu2gFcKlQCckwoQknTnk1QwiHjPN0Zb92X5ICeOyrkIfh\noNOEvppEH8zWYRfw38BPgCnNyjKuZm8NZhMYZUFnq7vo6f/NW/9iKctWHB0Wok9cca7/6g8vCRu9\n86xraawVqiHbBPkeAd/2XYS26XaPjYu16Rk/4DFlFVsvGUKpuEVQWw21oyBdg5Ri1LCVvL+I4Jgb\nglkXis8BPSKoLNJBTyAAvTqFfQ1+df1VfOgDr2XT1uv59L7Hyee5B84/0HGNxQ3VF/PO07/MqF8p\niUax0qQXkKui42ryHCcjsk0+n+fxYOVASz2WC/947zvOuf8D/Acy2pcebIQC/MZQCr1s9aqY/W2/\n19xtK+ZtnvYumQ/QATcooOEyyKa7lezY6VUF0HitiL83rTT+PvbxWXRUfGqm3WrQQHhgDbTstzVg\nFQNcwqfcEvcdvzt0T5ZR+fD6V7t/f+Jz6a9pHxcL62r3WICiCH2r3ORS2S6xBz6+CCu5aPchGtNQ\nt1Tva9KGpAWNDvQpaFtkkl3/dAqdFHYC1+Twixlo7SVBwvpyEmAAxofBD8KvHbBsuavV677TPmhG\nzvX3DfG6J3yah5/1oMJjGTTz8gQ6/tHrIFqAOpuKbfOtkHXKyQ7xXxvimEKwV4ZEvDAI6TJIjhDA\nTY+S/1vMoO8rONvOALQUbKOmHMGnBtF8c5DUBMzwwuH6tmjAPoNNt13v79x8SNTMvsQN1Rcz2ljJ\n//ewDeJP0RPOozBNnM6biYhGHiLUnCDvMWcP5aQGD3DD7kpr3vtvAAsSjniYANdM+yoBti+JAx1z\noQ6y7eLkyCdk9U7UkRAXLjZvr3XKBQrAtqBvjXTPskJbjZ1pJr0021jDMEAm2sZ+b2BjvJ36B2IH\nc9hHHDpkiRJ2rDqi7d1df4Kf2cekSwrzzWnd23DQGP0VbF2COOF6SVvM7voeWcjqw8I3tvugXYNJ\npIb2ncD1GdwxRbJ7L8vYyzKm6cMzhWMnNXYzgyeDiRps7oPfe+UbOeHWlENJfDh17eN5+LPOKpcf\ndDJvTjrmTOC9AO7Mdefw+tfr9ThIV9pAihWR3UVR3JvivpkGG0t8342vdUOQroJ0hWi1tdXqpBxC\nwvq0k0g2oPRMIguVlZG1R8hCdbv4Yy9arsugNgE7b7+Fq676JOkM/nOXn3/Q47cfcY20j9fd7+M8\ncPTpRTRQThERY1ZYogMyAIyLlemnZVz8jICv6wa/g5a4KcE+pSuxc0Hl8AButkdX8jj1CnqbEFXu\nVgEgH4dsF9KRd1JN3n7h4YJZHXvl60gRFZSD9BROM03tZVJuvJmHMe9WDVWsOsZ6OdjsN6bZxHH/\nZu3HuGb7MbC14YkTJuKIiLsrfo6eBUESe2qdAEApLipOpwo/oOhrhvC7oY2MBtinOSTT0NilgLEI\n5kZhbkBA9/sZbNzEMflOTmKWk5ljER08nnHqrKfOBnJ2kOBpwp46bE7h2W/+Sw4p0yxB0l+bArKo\nU8nNwNXXXhY2O/NRzxXg05vogPQIGZN8pzj7/GxhkNkwVcP67Lug2SbgFotjrLZawXYN0lxSOVvU\n4Zj3CeWQ10UjNtBuIY7FGoLPpmWHS1SgTVvQuB3Of+uj2LN7i99x1+0HP24HKOef/N+s6j+llIQU\nOjso2Frh/HyCEFnhphVkpwlZcwvZ8eGANdx7VhYw7uJuSLZRwDKoAzFfk/X4PH5vM1tvnJ+gUEXV\nvnKD+rJyjbXofVX1TIrv4oell6YC3dhj4nq8jw9j+zPaoEP3pRvYxsVtLA04LnLTqzb1fiWpgK3u\nPInGqHYsRfZF/KqKJ7T68VpDwu9BSkWaCjsGbje4HVDbAX1boW8X9LVFqZnwNPIZTmCG02hxJHPU\nmKNOmyNoczJtTqLNkMWIdTqy2zuGce/8qx/e3asvpCOe+1wBzfrR5XOYh88N9I3Qv3ykK47PDYlD\nK10JySDkSTFf4jkTvw/lKZzytUsFuAPYrkZqICjQookNeUMjEWrCiVf1jvhR6GLovADZrV/7rn/+\nOc7feOMP7w2wdW9Y8y1WZqeEriZ01Pqck//nk6IcMY2kzE+D3wl+N9JncJvw5PlWyDbLa6EkPeLA\nXvewHB4Nt32L/E36KUobVt38UCBerBZmumKqFhvasezrUqohAZG4hK5Cx+YXsr9VcKs6Rqq0c8Uv\nUAr9qr5ix0uVmog1XFM66wimHZA/wUrnQeE4s2NoaJxPJBQpWRwdBIqnuQcR7TPRQPIpeXD8HPht\nss9kqdxXrLOwanSuJacz2A+jS+GkhOzHI2zOx1lOzgAZTb3/jpxlZBxJxjLmmGKKvNOCCQ0x88MH\ncvHzSA38gGioJVmCdMAFHnTqH3LauicKPWWmhkbEhGSPUVEa5mYKqyM2BkqGQQ18v0Z/LBIqITlS\nrAlnPcXieFtL242zyOimlSxTrcu5myEtlSYPYZwOQq4e/ySnNp4kES1GHVhYpjZWdbmerz5zObJt\nPiHXm00RyjMupPyGaLiHB3A7NyIjXYf6CZTbkBvoxqhUyQrIx2UTV1dTTEHYys91FViJY3FjjRm6\nPPuxsyrmxeaL4+ulAFadJnGBppiOMOdarEhB2UFXLeV4oIlWLm6f0kN8G9IlSDcAq4UQh1TEq0w8\nEF5MxXyPvlSbyScIsVKhGIkWIU+WQe04ecj6arBsAB49QLbzSG662bGNLTyEhHWkjOBpklMDFpGz\nlJyNzJH7VlFXfTYOizxwcRe//Se8+E1HkI2K9uiU5wQBxBe94j3ukjf/LS98zSh+WDnEeMGxSI5+\nWaDyUWh3IG+X50EA2xSJZ9ZOtMlSSEcLsE1GKcBWJ4Cvq0Yb3WiXQy0tQtH6i1PpDmP0FEkQB8pb\nLoz4H4x93J3YvJnz+n8QqIPUzomyz8MWiBIN01H/hhaEOhjabD75rQbc2jh0fo14xVN5GN0ghYsf\n5rWb/DSF18mpaTdNkXpqDrJqwZVIJbTECCvrGI7jypvHQREx/lSxqCq2vZ1mHr2PFfk4iD7mc4k+\nt0JOCsr1FNjBtnanFONVEZvVGUULnsrsTZdCerSCrTnJ7MJ78Sa6v6CNaE3hbJtQCfmO4nscoS+Z\nmeHZZqEsahkMpXDcyfC0lPxbKxm7uZ+r2tu4jXFOYo6VdFikg9yPx9mAuhxqCWSLB9zw4BI/MXXA\ngfhuyeAqLvnsaaSjfVKASC/T2eoKjB63htFj10i9XrOEKuaNVbRKFglodjLojIGbKw9xlsjcdCMa\n5TAivG2yRGmJxZSLa2hMambpuhaO14GkI9fdSYv2Z/GxbN7UkIiEpKOAuzBhVW4gGWVRIoVmHlg/\niz8auLirR5nFa//1csi36TnQO4zS5nqsT8UUnveFdblQcsBOs3tWDg/gpkiTvXxMvL1uQJxo1lql\nRHxq/KDdlXxKwLTUYrtGV7JDNaTEW1aB/d/urt5p641GIiBVdW5FCl7JkRavB3baVdo55vag0KBt\n31m0r/m03wY0UhjIYO+nuMiPd9fJBeD/3vYuzj7xIqkCBmLKLaHrVltvrfiBD+MRxzMRvdcQMz8j\n98ESTvKdAjjG7tjC2AH8lNQnyHfJexpQG4CRYTh+JTytDj8epf3zJreN72Ibe1nBNKtoUwNuo0ZG\nUxyeSxJR2o5ZdTqve/anXH/9bD/TPjC7+blPvpDaYB95tKAHYPJq4up1Oq8AWgUsA0Ht1pCPSCxx\n7nTRyYrNkgHZLl0iYV/JiGq4R0ScbWS+WHaebxDqJFitBIsIiYHKblMczZIi5+M6cu6dkQMamn2J\ne8bo23jp4tN4cP05B5wfG4NqUnnFABxHdsTfxXk0CyW/1RpuMLk1JTfbgWiro4TuAQB4Qj+yoN3O\nUYCJbTaPvR+a2mViYvs4/qoHQFu1oryirRjFHLalAEu7njgiwX4TO0/2RUmYFhzHU0YislIPAAAg\nAElEQVQRS6TQSGAp0GjD3vmqJQL+K7f8nfvYmReJ9TAoOzUuLV6EnFVlicMeYm+fSazxarxa0Jjn\n5OU75dA1e5Ac4uzKp8VENhrCj0NjKyxKIT0S+lJY0Q/XrGJm4yLuYC+bmSAhp02NnFHoG4ETgGGg\nPgEPPuOp9H15mAMgKt0pax/NG976LKk7oKCaZIR4YXIB39KPbEUseaPk5Rqi6SYjhMXaZfICJOql\n3gNsl2nolxVDtgpgcTSC8rd5TcHWnGY6wWxeQLHAWQ5LXQvnO60StvbEh7szH3m2/+HVl+9vjLrG\n7LT+J3POovfwzNHTik9rheVoz0v4gdXvVUojttqqmZuxlRf7u+MkmbvtFN7fBf02A+6cI0y6fFJu\nVN6P3KwGXZWtvHUhsFoAeUQLWP1blVKzwyikx2vQq5+k2x5TNdQN6n6tRi6F1lmdAPMBLHQHWsQc\nLtHv7JmuU/RYtH1XNOIEyXJN5pgXYtwz3fl8i3fz8EWv5qVrP0AaN+LzhG4XtVX0dlJCOSbN1Chb\nXcyJlFB0Q27Mr4nEGn9i/2nJBbgx6NP433Q5DNRgaQLrh/A3DNGenJHEAu8E3I5NYBkw4KG+VzzZ\n71m6ZZ6LKE5h1ejJXHLRVbSPJQBuMicabDoLbpZQ5KXr5Lvc//o3oWh6qJ028GIZASRNpLeb9S1b\nJJqtKRThhqtm65sKuAa2kZYLMgbeFcBkt8n+pkCtXVyXgX+zb5DB4SX7Gp+eY7a6eSpvX/Ef3d8k\n+oxokX/XR7nEqWoPdV+ArV2uLQyx8Wo/Ma0dwpq+TyXlYOReiEA4EDlMiQ+DOlmhaOo4LiaZa6p5\n15WzSOhMEFTH+UoKgvCHsadTl1Cvn8cFbsJ2FiibyXHyvKyt2aRIi81CDr1tE1uhMT0MZXC2EDFb\n2XOKsM+k8nkGHS83q7WPgAz/ZX8+gLv4uDdy58zPOabvgfhZQrpke70AnF/W69eU1Se70FjtUJB2\nTdXwhoUWoibgGHPTMf3u6qoRIQtkPgbpgJxLEwG89Aho9MNSB8cDW/olBn0WeWCPQQIJGlOQjkPr\nFqhtxj3KvdT/wH+013i4B686i3e99nL8cQJgJHIsp9qtU4DqSv2Dslcni95DkWDTlPd5Jpv7TBei\npoxNMiIgm4wQOucGUr6umq2FgKmGa2CbNYv5Ehse8TqZAKmHdE7ANmmp1q7FOUIt3LspDx05h5/N\nfZ4zhp8jzuhUr82iXvRvPoGkDKsSBOD3lB15dbr9EzGFFg91HK8+X/jlQcuBepvvWTk8gOsaxU0D\nBVwDwKTQeoPzhTJAAuEuVksE3rjne3zs+teFu/jc0y7mgUc+TSaNHtvP6DlU1dY+St1ofTvSWvSz\nGGh78bNE7+0BMY01DifrBcT2vMcRWWqmd2pwYwJ9mlewT3n3pifxjEVv5ejmA0P9iPaN0L8DZpui\n5fpWdP0R6eb1CfFOwcl2ahdaR1IwBxRMxsEPC+jk7bKm4m3fRkF4GdNsp4JVKqDbMEtkmXToHUSq\nkE4hobEdYBFCJ6QaquBS4U4fxcuAj1aHwL191XW85g9W03dCU9YMc7ypB99M73ATTeIHM15le9i5\nIRrDgvd9YWG5oQhsRwjV22Ji3lrkWAhYrhEKeb04XJXKik8v0CIGuEYpzEGqC8tBoJb/0l3nu4Ha\nKAONUU5b8iSSUci2EiajPUPJkFqASiL7KajPFtRSlVYI520H6nF9VQZrweTwpBxU5aAB1zl3OzCO\nBih67x/mnFsCfA7RR24HzvHe7+36sZ+jlEXiZyjqdSbIE+cElP0URbtv5c98Tuhb73NodSbZOX0n\nb7vmob7VmebC0tGeDuAufvQGlvQdRTMbEe3X+Ki0AHe8aAlhjjoB+ljxQU/F6jnbM1mL/h+LTTSb\nTHF0Wq84A09RGzqedDm02tD6L77vv+z/uccvi13sbN/unuO2MrQzZ6aZ4BqwYkKL5LTEmvBaoSkO\nh/CmYdXVynAUcdL25GQCJumRSDr1COSj8nBneyDxZaW4S8myNvJRnqqbgIZulGRQGxIQHqJgd6z3\nm0P+UzsZWsfAutsf4z7hPIsRC2EaSZBYvRoGR+Qk0hmoWVyr8rUuoo2CTRufaGz79rJt7XtdgHxE\nS7g+BdkhQkhYCXkizjzOdgv/9zKO3kC8B2g6VFNXsE1bMnet6abTBWXxopVuzVGn+o2bru9xEfOK\nn+6MuRedvoFLfvlUPva0DGc8li2Ow7rQWBx7DslYN8j2UrATusE2QxSJuV783ULIb4aGeyiw74HH\ne+8f5L1/mH52HnCl9/4k4Dv6/x6ijRqB0Oa8tOdZ1S5tEscB53Ux44yk/9rGd/HRG1/rz/v+Ot/q\nTDOP+PO+v45LN7yCL995QYg7df3yMCRDqolodlpiXQxsqY7EgFLj4EPRGav0FWeQEe0idhzEzgPb\nZxupUjcVvawg96S+9gD/zkfmu8bS9X7ev4XdIzOc2oJ1E2KOm2M8gK25t5uScdUZlpqr2aAWTRlQ\nHjGOZFCz2KnJbFylG5AD5K6s9bt5JnqINNFcVTcNjd3Q3AX94zDYkXV3SF99iPmc18X7nqyBgbOA\nB8j5zCI1dCeB2VTji+MW4RmhtkBwbpnKWPXuzIcUsURziCYh7tgiGKyxprOSg3bTe6zecV0KS1pI\nMok4SDqiraYtBVXTYtuFpp62hI9OtNZw0tLvczj7j97G4x79kv1cTE/xn/jF68F7vnTLBWzv+5U8\nc4MVbno51FZAUhfNGop5HV9qHM0Trx+xleiQUp3taNsFk/QAX/esHCqlUJ2VzwAep+8/BvwnPUFX\neR83i9iJ1d0oqhn/VzpcgxBz+8VfXMAVG97hs7yXCtJ91B9tvRy43K0Z/DzPP+G9rBt5Yngo3IBo\nbDnIHR9XzTeTRSHmceOjxZOoqulWOTconr0obLgUt+uj38aRDjmwBfzX/YcP5FoB4UsHo/No6XHb\n6LUpeGZaKKUzqGZuKg9zappfLhpXvEo4Lw9bPg5+hNAx2UfOI1AgruuCWc2N9xQVfGZkn3X93OWQ\njkjsaW7aL6IJdoYgWQzpWmg2pDXP2HUyn/J+SFZA7ShZDGJfgDmeQmKBRV1Up0/M+8ScEMW+nMZ7\nOwhVuXACtCFd3KizXtEf9lG0EFBXbTZaFCzEKyRoQEiKCHSCgm2iDrMY3T7/xbfz+Svezic/233s\nAxCfew+c75b1f5j3PuYOuZ/VBWMK8u0EiipmqmwoqxZiPLfj4a7n8F7+mNc0viAbV5OYDlbu45QC\nMkzfds5lwAe89x8EVngfSgZuB1bMe1inuePOyUPpqg6LKB7XRd1fAbaO3ciNm3/gP/+z8w/qxDdO\nrQee5NIk5Z1n/pKlg2voT4blIUoIXVC9svuuTqlmbvz89RJ7xtNou9ihZP+PF9QqZ2c8cYRHxA7h\nHuIWuZUsYzH349H8Hz7AkxEz3DrHm6MiWQTJUqAf8iHRbDNNd82Vw3VaO9UjQFVzSNuTyORz/ZCu\nlAfOZ3KCudr1HoqOERY5YmF6M8CIcPWJjnm4iFSB3kDW0lxdAUzW2yvRilvNx4JbCn5CqIPacarh\nDiFUiYZX5XU5Xqa8aQA61DxGQSMjZGyFFTMmF6sacKy52rXED3h1VTZQbStwd5AoDq8LXhxjbudj\nyKQhYs4XfG1tBlwrAls9Tqs1xcaNG3yWHbK/3++cudPVkgYXnvlzlo6soTE+jGtAZzNkdwKtYlhi\nf3Y8320oTGw47NI6QJbCjvxmXjPXxzn97wFed6jnXj6RwyuHAriP8t5vdc4dAVzpnPtV/KX33jvX\n5ZUSCVqPxYygGpA5yvSzpEe1oGtv+zr/9NWz/Wx7XvrgQEU143XukWuex1Ejp/HME/9GwKYf3JQC\nRkceAKfoZ89Lku0/bMWoT3tv4gjNGEo8cPyKtWkLntifs+zZnM+beBUafluqQzOnf/ME6qcDQ5CP\nQHtUaq1aHGhel21cDZKaaFNeze10CmnjYifWUNA7gmAnOu2OHFNxzmgIK4mIaMZhAfD6QCZykamC\nK140xrxZgL99btYOA5CukThdryp8MooQugPg+4tr8w0pERniadsF6Lq2mO94SumxiXL4JedZUsxR\nazNfStiJHqvQFDFGF9WuA9CiETEaHRPff4jAPxKX6++Vuy2naskOLv346/wPf3QZCyS+k7eBde6R\ny1/Aq4//FPluYFYsCT8OWVST2i41jrqBAlxjysDezwE04PdG/zdZK+Pxrdftr0D6gcs+0tzvRTlo\nwPXeb9W/dznnrkDaCW93zh3pvd/mnFsJ7Oj5469oSxs3ASfX4dQlkTbUx7yr0a3bf8q/fONFCwG2\npWu5euNnXeIc12z8LC8+5f2c1Hyc0AyzFCUfW4hGDqHcXL6fzJv5+H9T4A0Uw4nQXUUsdqzNgHv5\n0Mf9pZMv7jrUpe46/ojVzFGk58eccUv/ulVQXwvZcmgdKZqtZTPlNcjTSDuZZ5Km9jR1ZPFMlquG\nW5cHz3eQcDRbQGMN0OlYJgK6prYnRNslAkQ1BFjyjqS8hloGCno+VeAboIg6SQjl1bxy01bqMOuD\nTr8sKB4B+tQiFtTL7yJtNGmLtp0qFREKrxtFYSUIlYR3g4QUV3R765nnBig8rRE4Oj0Pl1NKcOi6\nwfZGNdiwOFiB3Mgj+8Wvv5Mf/OSz/s5D6/82n/ird3zafeKpn6L2UPmg9WPo3KEgq89FFWxjQyGm\nEWw4O8A/Np/GU4e+xt6p10ALvrqQNMB9WMN1TtKUvPcTzrlB4MnABcBXgJcA79K/X+q5g2cfRUjj\nTUaQwHGnE7VqZyewZ+823nT5aX5iZhdv/uTBnPJ+Rbmq9cDjXTPt5+L73cyi/lWSnVYrNJrQYCsD\npuSBmk/TNdqgyuNafKI5sWxeWX6BKSyR9cm0bTfVRQS6lf3340JOI0McR7FmHZe2zQB3NLAEZo+C\nuUUCPkkcIEzxYOQICLtYJTHNLCt2mgyBH0X6y6k25q19j4X1GTDpquInVBOuQ6ZqfGKJFWqbBudL\nRwDVOtQmcSaJAXHMt2rGVtYnNIlvQKbxrXNp0UOu7oSCSBtQq8m9NCADpZZyOcc0vu8GvCaJzN2k\nnxI9gqeIhIm9Q3F8s+7KYmZ7AW6pkE1ML9g4aFLQ1u2/5qbbfuQ/88W3dO9kgeUV3xnkYydNySLT\n0md3WM7HT0OnTag5nSBzLDAziSzs1BENuQF7jr8Vf0fO2gxOnBOt2Q3AV+9YoBO+DwMuws1e4aRL\nQA34lPf+W865nwKXOedejoaF9fy1Uy8uNSRkxsC2cjp5C6647gJ+seUbfmLmni3FHolvZTPumMGn\n8+DRZ/KMpW8TKsEeRKNBVEvzE4QWNSYxXxtbwLETLY5FNIXRIh+shmrMhekz5i/1L+o64bes+wHJ\nJhibhplJmPDSAVeZG3LEe7+tD1hLkdYWqd+JuoZdLjxnHj34eU1AL2+qtjlI4cjRk0yGwU8JOPtJ\nyrUsIieOn6NIxVaqJskVTBvK30ZexwRC+JO3+GAInGvJfE/lXL2lyTYjsO1TLdmutzqIZtqrEy12\nVJV+ZHREH6RLKIh2BDhoEmK+e3qLeoWZRbxwCbDNh+F1DsUrcGRl0IGfr/8ml3zwbD8ze+/UZMyy\nNpdfeYHQPrub/MHIecV8qlEqmOTqZWvQNSBNZJycOkA33HQlf7H0G+R7BWxDE9OFkvuw08x7fxvw\nwB6f7waeuN8dpCMUYVfz0Ahf2/APXHXrx/3GPfdY76V9ib9j6lrgWnfi0Dd5y9qrBWCNp6whOfAd\nebj9lDiAyApe0kDVnq04/DL2ytrLQHYuesX0Qgfwa0rn6E5Y8lBe/YQPc9RjF5FqQkO2CXbdBLtu\ngb450XA9El7mHwR966QDbN4QTSMx09nUatWy8phkRrS9mOJwGaQa80lH7meyVL7LlVIIGUgKtnnM\n8VkeflP4S/Po+0F5EOOYIpcr3WCxqtF3PiVEHZjDLxR+sepbVvLQFffF2IegfWmigBXvDtlo+lkx\n6gQ+KFkk7y3E0VlIiFEQERlr7ee7eCI7Edu26nTTVbtKKdh+3vHuJ7J3fDt7x7bea2BL4HPPB3CJ\nS2jNzfCM5gVB00+GkVhdi06pWK7W9SEZlQX68aOvJp9Sf4DTsVzImrgLr+E6585GxuBk4KHe+//e\n328OU6bZKF3VvNrZLDsmbwXwb/ryOl54OE6sh2yeWc/efCuL+rWpVQwEEDzu2S55JXk5C83EQ0nZ\nsfdZtK117rWurJ0ISGpr4NxbV/OyeB91qK1ISO8nDkYH1E4Hvwc6t0HrJpi6CfJpSJbAwJnASphd\nBZMjyiU7yQ5KpwkLimWamXYYgvJj0DVAsgSGppiATgOKg1c6qnWRWCafHSeT732LUPDFDWrkgiUL\nxI4m5UCtjoOPXgawPlEtV0G2V0hW7MRJNTEipMTmEdB25PpcTGHYPtUkToYJNQ9ALbceCwxEN7qq\n9caBq/Z/o0js2nXbsb07mBjbyZ13/NK/99+ez9kv53CLz30OvB14u3vH4g0MLV3MktGV0hljkXD5\nboBQtyQfg3xGFio/K/PT9et26LOy0OUU7xFK4Trg2cAHDvQHhwdwkxFKJRX/Y/0/s2nPev+dX//7\nYTmffYifySbdccNn8bq1l3FE8/guntlbmuOg0AvZbNkTG0v8/JhGCwWIzQGtBDoNQDWEdFQaMVJD\nWJro0Ddv/4l7yP3fxOvPuIz0iAF8KsCZDEB9MdRPhXwnUhwcSE+C9lKYG5EwM6M/LfA+xFHaSeaE\nsCTrPuD1vuVNAWPXKADJ9Sl2aAlHC8UynsTnkFgdC09wnrlEHzQFYAYR2iPWBO2czFSNQMoiKXyq\nDjE712poVvQzdHdxMoRptnH6b6KflaL1I2qBNALZOOREuV9v4SHxgeN92aJCsb9Y6x2b2M6V3/1A\n2OiX67/tb/jV9/kNFf/WPevc8cNn8ED/dPpnBznrCW8gHaGU3u36wY2Dt+I3KRJRYhak/bOAkQX5\nwkOd9/5XAM65/W0a5DBpuHX4h2+exc6J2wHYuvfXvp0tVITzgou/beJn7rxH3cqRRxxf0k4s5CcH\nnHbF9Zk4DCJFGChohug5DdJB+cVhpDvCEaKRWkaPG4RsO1RurDvx2IfyN2/6OPXRASmqrXGc+RDS\n0LEpDq0EIIFssWRpOQ/9rlgUOoMCVqbZhXOuq6YbRwcYQOX6uYGgchdW/cvPUjQJ1NXE5/qAaQyu\nrTpuQM/VqCZLuIhNhAPRUHyBd3a+vZxQQek1XjnmYfPir4VelVZQM+d1TIMmGpkwfhYu+Ojv8bY/\n+X9FaGMcuxsT91VKATj3b+8f3rfbs37rtpsP4OJ/Y8TfOvFT4KcuTWtcdfvHIIX3nHudfOtkkU5G\nIFcu3PUj8yPSUFz1ATpEyRewIeUhiPO+qobdwwd0zns/b4/t32hxn3ppNFge8cqPC7D4MTWVxoST\nCgXRE9UC0+L/YYfa+ytdKkCbrhSAdEOR00C99n4CLrj08X7DHf8Vfn75pR6GFRTV2eSTCCQs0SAV\nj30+AO1hAdh2UkQppUioV1LJaoLILDcAqqaStiCxrAyLSbXjKzj5DkWatr2fJqRvW8q2q8u1uz5K\nxYBLLQ2il1fqwDhbkmiR0P9nRjWoQy1LCkC2aw4ZW5oym1r2lqXIxoR69W/Mqe7rWTLA1W127L6N\nufYMJPDuj/2R37z9xn38+H+UuMUjR/LWl36nVH+iIv7cf10HC4MXzjnPJ6/s/eX1v4AbflH8/4pP\nEB/POXclcGSPX77Ze/9V3eb/Aef+5nK491VJh8vPlJ8U7SxpI11ftQoZw4gHXjVMK9dn1aVCpbN+\ninoEiwonTInTixyLb3v1f7rHn/wy/5+/+oh7+hnn8pLXUmRFWb3fBFxLQCcfFaD1NaUArN5qBPoh\n/MxBUhcQcjmhyR8QohEMnNK4hqxqwC6l8NDZeZu22KRIWIAAThajGnjZKCmhK2IeevM0+rnL9Nrz\nyKqvFXx63lDqQp1upUfYFpko8iJQDTHA9gLVfYBtKNeo4/Tdn1zK7rFN4B1XXvMBv3tsCwCv//se\n1/Q/V/ye8W3Aunv3oPNA3SkPkZfJFZ8o/8z7Jy3kafwOcO+OuFFC4oNlWrkcAde2hkY1gRkxj6wm\nqjlXQoGTQTWrTIutU/QVizOWoDClFUxe8dz3u0ve+JfcvP1/k98CyUmUTFurx2qhUSinWW1MCAUw\nxTjm4/AKXwBlnDbqk4Le8Br3CgRnVilzA8oexMgsD7HNccCy/b/qYTRAjrI5YqdZfG1xGx0r3m3R\nDYknNDU0vjZVzTaJXk7vadHnnDK4Vq7Jz1GqIWGfv//LL+L2bT8HB9t33eJbc/vo1/E7ucdkPsBd\nODkgLfx3lMLdEDc8sIQPvWGXAG4baRU+Syh36FvRQ+eUHrDutQqybkSAKXCVBlix6RwnoFsYUlpo\nr4xDdgu88TPreM7v/y1nPuq55Xp4NTWj64ROAlldKIWsT+NS0246MoRLeWjuFk22rSFkBk6hW4JV\nrFKe05xrVmd2cmwXe/ds96//q/1qMu7I5SfwV6/5SvFJAkeuOJFa2gjarrf4Z6MPUrmGEI2QFItF\nbvSKbaPhYXkaae6OUI8gnVZn41wRseCs/JsVouhFIWjscq5ZKZvuDCUQ/bkfvXc1uP+hsmCUwkev\nPrCNX/pIDvR4zrlnA+9DepGMAdd67/9gX7/5nYZ7dyVokDH/5AreMfg/jC5oqkardIKLywAaL1kF\n2Wi/tq1XcMn6VNNeBpc8cEN3xoTl6ZtJHNPOcxJy5VPlMl2h2cZKqXOQL4VaVpjquELLNY4z8L2q\nIicaIjYzOcH7/uWF/tqf92jT0i1+245bqJiY7g+f9GcMDyyFxIu23nD4xAdHnXdloD31VW8gHRyk\nz8OoFVJJoKEXFiIhDJjRsanJmFh/M+NxQ+3NuEh57Djzsrh+/soLJMa27vxl//W2A7ne38lhkHtA\nw/XeXwFccXd+8zvAvTvyjtd9vzCZI3F94pm2qmaJBnsHJ5AGg3elmvXKfonjTk39rBUAY0Vckn7l\nTmcoc4z20lvrnB7Si6aaqSOp5QoHu2G/YXYD6ZFVnyi42qRDqdYqNZgblXNIZ6E2Cd+75lN85XMX\nk3XafvPWQ3IC+a9f+b67s71bdOJncWmtFDnmgNdefvX/3969x8hVlnEc/z473S272ypyv1vEjQEx\nlKBIgsiiAYvKxUsEEgigohEUiBcQuUtIucSIhmiioFyUcilCUBELpCgmFqhSKC2XtlJbkLbcet12\nuzvz+Mf7ntnpMrt7psycM539fZqTnT17zszTp9Nn33nP+76HyZ2TmGDQtY7QIk3e9knhrfjlWf7l\nUvlv5JSTdNOMs3l+0ePxYHzZilwm5kiNGt+lkIq6FGpg91zvYUxqfxzWFL+WbxEUx35ZV2zZTqA8\n2yYpxJUf+7foBki+DhvjWdlFkKwlAJRXuCrfkyveeqbqPOJ2KL0nDAkb7A7LMW6KQ686+0OxpC2u\nHNYRJly0l6BjNUPjUeMC2MnFuRUb/su6zg3MefRO/9ONVzU49XVjV9+zgIlvhZZ5Ka5xUNk1AkAR\nbrntPH/muUdyDVbq16Xw6+fTHXzW/qm7FLYqFhXc9Oy0z13L8UdcEIY59YdRCskUVo/jTctTOAuU\n70phyXTViRVFNym4SStq+JX5yoJZeaGIuC9OQS13JySFtxhjiH3DpdVh1hkboM3CtblCG/RNhMEd\nYMJ2UNgD6ILB98X+zu2h+J7QbdC+NnYfxCv3M+++Ei8Yf599q6987T9Z5F3Gr7oV3F8tSnfwN3oa\nWnCbo529rbj/sWs5/sgLyqv4+0D86BnHmob76g4NByovJO2EZR6BkkNbF9WneZYqjq/cXxz6+RZv\nBae8eEkyuaDUF2Lonw/8G/bcBPus56AdBzjfw0qDr0yAh4sw+82JsKwdlm8HxQK0twEdcHvhXF5g\n9tAohaGr775smT5Cy7anVK37LnsquDWLfXrJsK5kempbW3w8maErUUl1jAXLk6myFReayhdhEkkR\nNoZuLxInWTAQR0EkxTqOf/XY0i2tguJL0DaLwvuhY0fYeze4sB1Ong1tT+6D9b9B+wf7uHAXmLtf\n/yuH7dy/1u9b/5D/3L9XDuHUuiZMJH/DF2jLiQpuzSoGr1obQ+sOxBllxAVQKpcGS2ZWQbzo1Uf5\nbheWrESePG+8gOOb43NaaLWW1lBe7KW4Aoqr4sW47tCt0eGw42roWgJ3cuWnTueEL+7C1KMXw843\nwdLHexi8+FL+ccklf3h12bL5Ez4AM417Sot9ATVdnhLZBjVJwVUfbg2sUCjwpY9dzokfuTQssjIQ\np/UOhiJKfyiupeTmY071++IUKC9bZ91suRZwCdomxpash4JcWgPF12NL9tWw7GJ7LMwFC8V2CXO4\nkbMowOGdvHfmdP543Pf5zKa32Ti4Aga6J9PW3c3KFSuWr3Ff09BEidRJ3fpwb1iS7uDz91MfbrPw\nYrFohxYWsPbJPrbzrvLwow62zGTlda/k2hcMjfgavh5K+eJ4MqIgfl8qhhZ0B+EWXf3x+SYSWr7m\nfWz2pZzB4b7WV3NeOO3xWbbgnEv5+lPP+VMNSIPItkct3G2XTbMz7Bf8tqNiRmtnG6x3GEiWe00K\nbRyM0GkwCegohcbrGqDPYaKFfZsdNm0kdNsmxTopzEkx30hoML8FvLE9LPbHfPrqozL7i4tkrG4t\n3GtStnB/qBZu0/GH/BY70OYOxH+WObOZ316E/g5YPRk2FcLs2KJBu0NnMXzq7x6ICTdY2wnrCrDD\nZmjvD4MXXtsTHumAFy00ZhcCqwbBVwM/4zru5fby2rmDG6CfDUzPKQki2xK1cFvbXdfZXQf0cGAh\nLiLGZsINHhcC/wOmQXGfsCRBctPVA850zb8XqVC3Fu6PU7ZwL1MLd5t00gV+UkHhabcAAAcISURB\nVN4xiEg0fCG3nKjgikjra5IuBRVcEWl9KrgiIhnpyzuAQAVXRFpfk9xnQwVXRFqfWrgiIhlRC1dE\nJCMNKLhmdj3wecJI+iXAmT7GOiXNsUikiEgj9aXcajML+LC7HwS8BFw01gl1L7hmNs3MXjCzRWZ2\nYb2fX0SkZhtTbjVw94fdy7fpfgLYa6xz6lpwzawA3AhMAw4ATjGz/asc11vP160HxZROM8YEzRmX\nYkonk5jeTLltva8CD451UL1buIcCi919qbsPAHcCJ1Q5rrfOr1sPvXkHUEVv3gFU0Zt3ACPozTuA\nKnrzDqCK3rwDqKK34a+wlS1cM3vYzOZX2Y6rOOZiYLO73zFWGPW+aLYnsLzi+1eAj9f5NUREajNS\nd8HAHBh4YsTT3P3o0Z7WzM4APgt8Ok0Y9S642S49JiKSxogXxA6LWyL9/abMbBrwA+BId692a5d3\nnlPP5RnN7DDgCnefFr+/CCi5+7UVx6goi0hqdVmekZTLM5J+eUYzW0S4H8tbcdc/3f3s0c6pdwt3\nLtBjZlMIq76eBJxSecB4WAtXRJrN4NiH1Mjde2o9p64F190HzezbwF8Jt7K92d2fr+driIjUrv4F\nd2tkfscHEZEshS6Ff6U8+hDd8UFE5N1pjhZuplN7m2UWmpktNbNnzexpM3sy7tshjrl7ycxmmdn2\nGcTxGzNbaWbzK/aNGIeZXRRz94KZHZNhTFeY2SsxX0+b2bEZx7S3mc02swVm9pyZnRv355arUWLK\nLVdmtp2ZPWFm88xsoZlNj/vzzNNIMWWcp8GUW4O5eyYboU93MTAFaAfmAftn9frDYnkZ2GHYvuuA\nC+LjC4FrMojjCOBgYP5YcRBm7s2LuZsSc9mWUUyXA9+tcmxWMe0GTI2PJwEvAvvnmatRYso7V13x\n6wRgDvCJJnhPVYspszwBDn9LueH1/vtXblm2cNPOQsvK8H6a44Fb4+NbgRMbHYC7Pw68nTKOE4AZ\n7j7g7ksJb8RDM4oJ3pmvLGNa4e7z4uP1wPOESTa55WqUmCDfXCUjTjsIjZy3yf89VS0myDRPzdHC\nzbLgVpuFtucIxzaaA4+Y2VwzOyvu29XdV8bHK4Fd8wltxDj2IOQskXX+vmNmz5jZzRUfSTOPKQ45\nPJiwWEhT5KoipjlxV265MrM2M5tHyMdsd19AznkaISbINE/jr+A203CIw939YOBY4BwzO6Lyhx4+\nh+Qeb4o4sorxl8C+wFTgNeAnoxzbsJjMbBJwL3Ceu6/b4kVzylWMaWaMaT0558rdS+4+lbBy1SfN\n7KhhP888T1Vi6iXzPI2/gvsqsHfF93uz5W+yzLj7a/Hr68B9hI8sK81sNwAz2x1YlUdso8QxPH97\nxX0N5+6rPAJuYugjXmYxmVk7odje7u73x9255qoipt8lMTVDrmIca4A/A4fQJO+pipg+mn2eNqXc\nGivLgluehWZmHYRZaA9k+PoAmFmXmU2Oj7uBY4D5MZbT42GnA/dXf4aGGymOB4CTzazDzPYFeoAn\nswgo/idNfIGQr8xiMjMDbgYWuvsNFT/KLVcjxZRnrsxsp+SjuZl1AkcDT5NvnqrGlPwCiBqepwLF\nVFujZTYO15tnFtquwH3h/wsTgN+7+ywzmwvcbWZfA5YCX2l0IGY2AzgS2MnMlgOXAddUi8PdF5rZ\n3cBCwmefs2ProNExXQ70mtlUwke7l4FvZhkTcDhwKvCsmT0d911EvrmqFtOPCGtA55Wr3YFbzayN\n0Ji63d0fjfHllaeRYrotyzx1URr7IGDd2Ie8K5ppJiItzcx8N2akOnYFp+CaaSYisvW6878GDqjg\nisg4kLZLodFUcEWk5amFKyKSkZ0zGIGQhgquiLQ8tXBFRDKigisikpFGXDQzs6sICwM58CZwhrsv\nH+2cTNfDFRHJQzeeaqvRde5+UFwn4n7CJKFRqYUrIi2vEV0KwxZQmgS8MdY5Krgi0vImNGgcrpld\nDZwG9AGHjXW8uhREpOWVUv4ZLt6aaH6V7TgAd7/Y3fcBbgF+OlYcauGKSMurVkwBXuYlXmbRiOe5\n+9EpX+IO4MGxDlLBFZGW5yP04U6hhyn0lL+fzV9SP6eZ9bh7Uq1PICyFOSoVXBFpeSMV3Hdpupl9\nCCgCS4BvjXWCCq6ItLyRuhTeDXf/cq3nqOCKSMtrUAu3Ziq4ItLyilq8RkQkG2rhiohkRAVXRCQj\njbhotjVUcEWk5fWzKe8QABVcERkHNrIh7xAAFVwRGQf66Ms7BEAFV0TGgbdYlXcIgAquiIwDauGK\niGREfbgiIhlRwRURyYi6FEREMqIWrohIRpql4Jp7c8wxFhFpBDOrqci5uzUqFt1EUkQkI2rhiohk\nRC1cEZGMqOCKiGREBVdEJCMquCIiGVHBFRHJyP8BY+OV+cGFOWwAAAAASUVORK5CYII=\n", "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "#plot the data\n", "import matplotlib.pyplot as plt\n", "plt.imshow(one_slice, origin='lower')\n", "plt.set_cmap('spectral')\n", "plt.colorbar()\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": 24, "metadata": { "collapsed": false }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "shape: [525000]\n", "type: [('Key', ' 0: # setup the permision to grant this user exclusive write access # and public read for everyone self.setDefaultAcl() filePath = fileUtil.getFilePath(domain, auth) # convert any "%2E" substrings with "." (since dot isn't allowed for domain name) filePath = self.nameDecode(filePath) if checkExists: while True: if fileUtil.isFile(filePath): break # Unfortunately the host query parameter substitues '/' for "%2E", # so check to see if any slashes should really be dots. # clients should prefer using the host header if this is an issue self.log.info("filePath: " + filePath + " not found") host_query = self.get_query_argument("host", default=None) if host_query is None: # If using host header, we don't need to guess about the %2E substitution break if domain.find('.') > -1: domain = domain.replace('.', '%2E', 1) try: filePath = fileUtil.getFilePath(domain, auth) except HTTPError: self.log.info("invalid domain, ignoring") break filePath = self.nameDecode(filePath) else: break self.log.info("verifyFile: " + filePath) fileUtil.verifyFile(filePath) # throws exception if not found return filePath def convertExternalPath(self, path_name): """ convert external path returned by h5db to h5domain convention Note: The external path might be a unix posix path or a HDF Server domain name. Apply some heuristics to make a best guess at which it is. """ server_domain = config.get("domain") dns_suffixes = [".org", ".edu", ".com", ".gov", ".net", ".mil", server_domain] hdf5_extension = config.get("hdf5_ext") parent_domain = self.domain n = parent_domain.find('.') if n > 0: parent_domain = self.domain[n:] h5domain = None if path_name.find('/') == -1: if path_name.find('.') == -1: # no slash or dot, tack on the dns path relative to the source domain h5domain = path_name + parent_domain else: # has a dot, no slashes if path_name.endswith(hdf5_extension): # strip off extension and prepend to front of domain h5domain = path_name[:-len(hdf5_extension)] + parent_domain else: for dns_suffix in dns_suffixes: if path_name.endswith(dns_suffix): # looks like absoutle DNS path, return that h5domain = path_name break if h5domain is None: # if we get to here, assume it a relative DNS path if path_name.endswith(hdf5_extension): h5domain = path_name[:-len(hdf5_extension)] + parent_domain else: h5domain = path_name + parent_domain else: # assume relative or absolute Unix file path if path_name.startswith('/'): h5domain = fileUtil.getDomain(path_name) else: # relative posix file path parts = path_name.split('/') basename = parts[-1] if basename.endswith(hdf5_extension): basename = basename[:-len(hdf5_extension)] parts[-1] = basename h5domain = parent_domain[1:] # don't include first dot for part in parts: if part: h5domain = part + '.' + h5domain h5domain = self.nameEncode(h5domain) return h5domain def isWritable(self, filePath): """Helper method - raise 403 error if given file path is not writable """ fileUtil.verifyFile(filePath, writable=True) def isTocFilePath(self, filePath): """Helper method - return True if this is a TOC file apth """ if tocUtil.isTocFilePath(filePath): return True else: return False def nameDecode(self, name): """ Helper function - convert url-encoded name to orignal format """ name = name.replace('%2E', '.') return name def nameEncode(self, name): """ Helper function - convert name to url-friendly format Replaces all non-alphanumeric characters with '%' """ valid_chars = ['-', '.', '_', '~', ':', '/', '?', '#', '[', ']', '@', '!', '$', '&', "'", '(', ')', '*', '+', ',', ';', '='] out = [] for ch in name: if ch.isalnum(): out.append(ch) elif ch == ' ': out.append('+') elif ch == '%': # pass through encoded chars ('%xx' where xx are hexidecimal values) out.append(ch) elif ch in valid_chars: # other valid url chars out.append(ch) else: hex = format(ord(ch), '02X') out.append('%' + hex) return ''.join(out) def getRequestId(self): """ Helper method - return request uuid from request URI URI' are of the form: /groups//xxx /datasets//xxx /datatypes//xxx extract the and return it. Throw 500 error is the URI is not in the above form """ uri = self.request.path if uri.startswith('/groups/'): uri = uri[len('/groups/'):] # get stuff after /groups/ elif uri.startswith('/datasets/'): uri = uri[len('/datasets/'):] # get stuff after /datasets/ elif uri.startswith('/datatypes/'): uri = uri[len('/datatypes/'):] # get stuff after /datatypes/ else: #msg = "unexpected uri: " + uri #self.log.error(msg) #raise HTTPError(500, reason=msg) return None npos = uri.find('/') if npos < 0: uuid = uri elif npos == 0: msg = "Bad Request: uri is invalid" self.log.info(msg) raise HTTPError(400, reason=msg) else: uuid = uri[:npos] self.log.info('got uuid: [' + uuid + ']') return uuid """ Get requested content type. Returns either "binary" if the accept header is octet stream, otherwise json. Currently does not support q fields. """ def getAcceptType(self): content_type = self.request.headers.get('Accept') if content_type: self.log.info("CONTENT_TYPE:" + content_type) if content_type == "application/octet-stream": return "binary" else: return "json" class LinkCollectionHandler(BaseHandler): def get(self): self.baseHandler() # Get optional query parameters limit = self.get_query_argument("Limit", 0) if type(limit) is not int: try: limit = int(limit) except ValueError: msg = "Bad Request: Expected int type for limit" self.log.info(msg) raise HTTPError(400, reason=msg) marker = self.get_query_argument("Marker", None) response = {} items = None rootUUID = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') current_user_acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(current_user_acl, 'read') # throws exception is unauthorized items = db.getLinkItems(self.reqUuid, marker=marker, limit=limit) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # got everything we need, put together the response links = [] hrefs = [] hostQuery = '' if self.get_query_argument("host", default=None): hostQuery = "?host=" + self.get_query_argument("host") hrefs.append({ 'rel': 'self', 'href': self.getHref('groups/' + self.reqUuid + '/links') }) for item in items: link_item = {} link_item['class'] = item['class'] link_item['title'] = item['title'] link_item['href'] = item['href'] = self.href + '/groups/' + self.reqUuid + '/links/' + self.nameEncode(item['title']) + hostQuery if item['class'] == 'H5L_TYPE_HARD': link_item['id'] = item['id'] link_item['collection'] = item['collection'] link_item['target'] = self.href + '/' + item['collection'] + '/' + item['id'] + hostQuery elif item['class'] == 'H5L_TYPE_SOFT': link_item['h5path'] = item['h5path'] elif item['class'] == 'H5L_TYPE_EXTERNAL': link_item['h5path'] = item['h5path'] link_item['h5domain'] = self.convertExternalPath(item['file']) if link_item['h5domain'].endswith(config.get('domain')): link_item['target'] = self.getExternalHref(link_item['h5domain'], link_item['h5path']) links.append(link_item) response['links'] = links hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID) }) home_dir = config.get("home_dir") hrefs.append({'rel': home_dir, 'href': self.getHref('') }) hrefs.append({ 'rel': 'owner', 'href': self.getHref('groups/' + self.reqUuid) }) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) class LinkHandler(BaseHandler): def getName(self, uri): # helper method # uri should be in the form: /group//links/ # this method returns name npos = uri.find('/links/') if npos < 0: # shouldn't be possible to get here msg = "Internal Server Error: Unexpected uri" self.log.error(msg) raise HTTPError(500, reason=msg) if npos+len('/links/') >= len(uri): # no name specified msg = "Bad Request: no name specified" self.log.info(msg) raise HTTPError(400, reason=msg) linkName = uri[npos+len('/links/'):] if linkName.find('/') >= 0: # can't have '/' in link name msg = "Bad Request: invalid linkname, '/' not allowed" self.log.info(msg) raise HTTPError(400, reason=msg) npos = linkName.rfind('?') if npos >= 0: # trim off the query params linkName = linkName[:npos] linkName = url_unescape(linkName) return linkName def get(self): self.baseHandler() linkName = self.getName(self.request.path) self.log.info("linkName:["+linkName+"]") response = {} rootUUID = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'read') # throws exception is unauthorized item = db.getLinkItemByUuid(self.reqUuid, linkName) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) response['lastModified'] = unixTimeToUTC(item['mtime']) response['created'] = unixTimeToUTC(item['ctime']) for key in ('mtime', 'ctime', 'href'): if key in item: del item[key] # replace 'file' key by 'h5domain' if present if 'file' in item: h5domain = item['file'] del item['file'] item['h5domain'] = self.convertExternalPath(h5domain) response['link'] = item hrefs = [] hrefs.append({ 'rel': 'self', 'href': self.getHref('groups/' + self.reqUuid + '/links/' + url_escape(linkName)) }) hrefs.append({ 'rel': 'root', 'href': self.getHref( 'groups/' + rootUUID) }) hrefs.append({ 'rel': 'home', 'href': self.getHref('') }) hrefs.append({ 'rel': 'owner', 'href': self.getHref('groups/' + self.reqUuid) }) target = None if item['class'] == 'H5L_TYPE_HARD': target = self.getHref(item['collection'] + '/' + item['id']) elif item['class'] == 'H5L_TYPE_SOFT': target = self.getHref('/#h5path(' + item['h5path'] + ')') elif item['class'] == 'H5L_TYPE_EXTERNAL': if item['h5domain'].endswith(config.get('domain')): target = self.getExternalHref(h5domain, item['h5path']) if target: hrefs.append({'rel': 'target', 'href': target}) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) def put(self): self.baseHandler() # put - create a new link # patterns are: # PUT /groups//links/ {id: } # PUT /groups//links/ {h5path: } # PUT /groups//links/ {h5path: , h5domain: } linkName = self.getName(self.request.path) body = None try: body = json_decode(self.request.body) except ValueError as e: msg = "JSON Parser Error: " + e.message log.info(msg) raise HTTPError(400, reason=msg) childUuid = None h5path = None h5domain = None filename = None # fake filename if "id" in body: childUuid = body["id"] if childUuid is None or len(childUuid) == 0: msg = "Bad Request: id not specified" self.log.info(msg) raise HTTPError(400, reason=msg) elif "h5path" in body: # todo h5path = body["h5path"] if h5path is None or len(h5path) == 0: raise HTTPError(400) # if h5domain is present, this will be an external link if "h5domain" in body: h5domain = body["h5domain"] else: msg = "Bad request: missing required body keys" self.log.info(msg) raise HTTPError(400, reasoln=msg) if self.isTocFilePath(self.filePath): msg = "Forbidden: links can not be directly created in TOC domain" self.log.info(msg) raise HTTPError(403, reason=msg) response = {} rootUUID = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'create') # throws exception is unauthorized try: existingItem = db.getLinkItemByUuid(self.reqUuid, linkName) if existingItem: # link alread exist msg = "Unable to create link (Name already exists)" self.log.info(msg) raise HTTPError(409, reason=msg) except IOError as e: # link not found, so we can add one with this name pass if childUuid: db.linkObject(self.reqUuid, childUuid, linkName) elif h5domain: db.createExternalLink(self.reqUuid, h5domain, h5path, linkName) elif h5path: db.createSoftLink(self.reqUuid, h5path, linkName) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) hrefs = [] hrefs.append({ 'rel': 'self', 'href': self.getHref('groups/' + self.reqUuid + '/links/' + url_escape(linkName)) }) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID) }) hrefs.append({ 'rel': 'home', 'href': self.getHref('') }) hrefs.append({ 'rel': 'owner', 'href': self.getHref('groups/' + self.reqUuid) }) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) self.set_status(201) def delete(self): self.baseHandler() linkName = self.getName(self.request.path) response = {} rootUUID = None self.isWritable(self.filePath) if self.isTocFilePath(self.filePath): msg = "Forbidden: links can not be directly modified in TOC domain" self.log.info(msg) raise HTTPError(403, reason=msg) try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'delete') # throws exception is unauthorized db.unlinkItem(self.reqUuid, linkName) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) hrefs = [] hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID) }) hrefs.append({'rel': 'home', 'href': self.getHref('')}) hrefs.append({ 'rel': 'owner', 'href': self.getHref('groups/' + self.reqUuid)}) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) class AclHandler(BaseHandler): def getRequestCollectionName(self): # request is in the form /(datasets|groups|datatypes)//acls(/), # or /acls(/) for domain acl # return datasets | groups | datatypes uri = self.request.path npos = uri.find('/') if npos < 0: self.log.info("bad uri") raise HTTPError(400) if uri.startswith('/acls/'): # domain request - return group collection return 'groups' uri = uri[(npos+1):] npos = uri.find('/') # second '/' if npos < 0: # uri is "/acls" return "groups" col_name = uri[:npos] self.log.info('got collection name: [' + col_name + ']') if col_name not in ('datasets', 'groups', 'datatypes'): msg = "Internal Server Error: collection name unexpected" self.log.error(msg) raise HTTPError(500, reason=msg) # shouldn't get routed here in this case return col_name def getName(self): uri = self.request.path if uri.endswith('/acls'): return None # default domain acl # helper method # uri should be in the form: /group//acl/ # this method returns name npos = uri.find('/acls/') if npos < 0: # shouldn't be possible to get here msg = "Internal Server Error: Unexpected uri" self.log.error(msg) raise HTTPError(500, reason=msg) if npos+len('/acls/') >= len(uri): # no name specified msg = "Bad Request: no name specified" self.log.info(msg) raise HTTPError(400, reason=msg) userName = uri[npos+len('/acls/'):] if userName.find('/') >= 0: # can't have '/' in link name msg = "Bad Request: invalid linkname, '/' not allowed" self.log.info(msg) raise HTTPError(400, reason=msg) npos = userName.rfind('?') if npos >= 0: # trim off the query params userName = userName[:npos] return userName def convertUserIdToUserName(self, acl_in): """ convertUserIdToUserName - replace userids with username """ acl_out = None if type(acl_in) in (list, tuple): # convert list to list acl_out = [] for item in acl_in: acl_out.append(self.convertUserIdToUserName(item)) else: acl_out = {} for key in acl_in.keys(): if key == 'userid': # convert userid to username userid = acl_in['userid'] user_name = '???' if userid == 0: user_name = 'default' else: user_name = auth.getUserName(userid) if user_name is None: self.log.warning("user not found for userid: " + str(userid)) acl_out['userName'] = user_name else: value = acl_in[key] acl_out[key] = True if value else False return acl_out def get(self): self.baseHandler() req_uuid = None if not self.request.path.startswith("/acls"): # get UUID for object unless this is a get on domain acl req_uuid = self.getRequestId() rootUUID = None filePath = self.getFilePath(self.domain) userName = self.getName() col_name = self.getRequestCollectionName() req_userid = None if userName: if userName == 'default': req_userid = 0 else: req_userid = auth.getUserId(userName) if req_userid is None: # username not found msg = "username does not exist" self.log.info(msg) raise HTTPError(404, reason=msg) request = {} acl = None current_user_acl = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') if req_uuid: obj_uuid = req_uuid else: obj_uuid = rootUUID current_user_acl = db.getAcl(obj_uuid, self.userid) self.verifyAcl(current_user_acl, 'readACL') # throws exception is unauthorized if req_userid is None: acl = db.getAcls(obj_uuid) else: acl = db.getAcl(obj_uuid, req_userid) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) response = {} acl = self.convertUserIdToUserName(acl) if userName is None: userName = '' # for string concat in the hrefs response['acls'] = acl else: response['acl'] = acl hrefs = [] if current_user_acl: if userName: hrefs.append({ 'rel': 'self', 'href': self.getHref(col_name + '/' + obj_uuid + '/acls/' + url_escape(userName)) }) else: hrefs.append({ 'rel': 'self', 'href': self.getHref(col_name + '/' + obj_uuid + '/acls') }) else: hrefs.append({ 'rel': 'self', 'href': self.getHref(col_name + '/' + obj_uuid + '/acls') }) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID) }) hrefs.append({'rel': 'home', 'href': self.getHref('')}) hrefs.append({ 'rel': 'owner', 'href': self.getHref(col_name + '/' + obj_uuid) }) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) def put(self): self.baseHandler() # put - create/update an acl # patterns are: # PUT /group//acls/ {'read': True, 'write': False } # PUT /acls/ {'read'... } req_uuid = None if not self.request.path.startswith("/acls/"): req_uuid = self.getRequestId() col_name = self.getRequestCollectionName() userName = url_unescape(self.getName()) if userName is None or len(userName) == 0: msg = "Bad Request: username not provided" self.log.info(msg) raise HTTPError(400, reason=msg) req_userid = None # this is the userid of the acl we'll be updating # self.userid is the userid of the requestor if userName == 'default': req_userid = 0 else: req_userid = auth.getUserId(userName) if req_userid is None: msg = "Bad Request: username not found" self.log.info(msg) raise HTTPError(400, reason=msg) body = None try: body = json_decode(self.request.body) except ValueError as e: msg = "JSON Parser Error: " + e.message self.log.info(msg) raise HTTPError(400, reason=msg) acl = {} acl['userid'] = req_userid for key in ('create', 'read', 'update', 'delete', 'readACL', 'updateACL'): if key in body: acl[key] = 1 if body[key] else 0 if len(acl) == 1: msg = "Bad Request: no acl permissions found in request body" self.log.info(msg) raise HTTPError(400, reason=msg) response = {} rootUUID = None obj_uuid = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') if req_uuid is None: obj_uuid = rootUUID else: obj_uuid = req_uuid current_user_acl = db.getAcl(obj_uuid, self.userid) self.verifyAcl(current_user_acl, 'updateACL') # throws exception is unauthorized db.setAcl(obj_uuid, acl) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) hrefs = [] hrefs.append({ 'rel': 'self', 'href': self.getHref(col_name + '/' + obj_uuid + '/acls/' + url_escape(userName)) }) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) hrefs.append({'rel': 'home', 'href': self.getHref('') }) hrefs.append({ 'rel': 'owner', 'href': self.getHref(col_name + '/' + obj_uuid) }) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) self.set_status(201) class TypeHandler(BaseHandler): def get(self): self.baseHandler() if not self.reqUuid: msg = "Bad Request: id is not specified" self.log.info(msg) raise HTTPError(400, reason=msg) response = {} hrefs = [] rootUUID = None item = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'read') # throws exception is unauthorized item = db.getCommittedTypeItemByUuid(self.reqUuid) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # got everything we need, put together the response hrefs.append({ 'rel': 'self', 'href': self.getHref('datatypes/' + self.reqUuid) }) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) hrefs.append({ 'rel': 'attributes', 'href': self.getHref('datatypes/' + self.reqUuid + '/attributes') }) hrefs.append({'rel': 'home', 'href': self.getHref('')}) response['id'] = self.reqUuid typeItem = item['type'] response['type'] = h5json.getTypeResponse(typeItem) response['created'] = unixTimeToUTC(item['ctime']) response['lastModified'] = unixTimeToUTC(item['mtime']) response['attributeCount'] = item['attributeCount'] response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) def delete(self): self.baseHandler() self.isWritable(self.filePath) response = {} hrefs = [] rootUUID = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'delete') # throws exception is unauthorized db.deleteObjectByUuid('datatype', self.reqUuid) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # got everything we need, put together the response hrefs.append({'rel': 'self', 'href': self.getHref('datatypes')}) hrefs.append({'rel': 'home', 'href': self.getHref('')}) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) class DatatypeHandler(BaseHandler): def get(self): self.baseHandler() response = {} hrefs = [] rootUUID = None item = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'read') # throws exception is unauthorized item = db.getDatasetTypeItemByUuid(self.reqUuid) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # got everything we need, put together the response hrefs.append({ 'rel': 'self', 'href': self.getHref('datasets/' + self.reqUuid + '/type') }) hrefs.append({ 'rel': 'owner', 'href': self.getHref('datasets/' + self.reqUuid)}) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) response['type'] = item['type'] response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) class ShapeHandler(BaseHandler): def get(self): self.baseHandler() response = {} hrefs = [] rootUUID = None item = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'read') # throws exception is unauthorized item = db.getDatasetItemByUuid(self.reqUuid) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # got everything we need, put together the response hrefs.append({ 'rel': 'self', 'href': self.getHref('datasets/' + self.reqUuid)}) hrefs.append({ 'rel': 'owner', 'href': self.getHref('datasets/' + self.reqUuid)}) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) shape = item['shape'] response['shape'] = shape response['created'] = unixTimeToUTC(item['ctime']) response['lastModified'] = unixTimeToUTC(item['mtime']) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) def put(self): self.baseHandler() self.isWritable(self.filePath) response = {} hrefs = [] rootUUID = None body = None try: body = json_decode(self.request.body) except ValueError as e: msg = "JSON Parser Error: " + e.message self.log.info(msg) raise HTTPError(400, reason=msg) if "shape" not in body: msg = "Bad Request: Shape not specified" self.log.info(msg) raise HTTPError(400, reason=msg) # missing shape shape = body["shape"] if type(shape) == int: dim1 = shape shape = [dim1] elif type(shape) == list or type(shape) == tuple: pass # can use as is else: msg = "Bad Request: invalid shape argument" self.log.info(msg) raise HTTPError(400, reason=msg) # validate shape for extent in shape: if type(extent) != int: msg = "Bad Request: invalid shape type (expecting int)" self.log.info(msg) raise HTTPError(400, reason=msg) if extent < 0: msg = "Bad Request: invalid shape (negative extent)" self.log.info(msg) raise HTTPError(400, reason=msg) try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'update') # throws exception is unauthorized db.resizeDataset(self.reqUuid, shape) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) self.log.info("resize OK") # put together the response hrefs.append({ 'rel': 'self', 'href': self.getHref('datasets/' + self.reqUuid)}) hrefs.append({ 'rel': 'owner', 'href': self.getHref('datasets/' + self.reqUuid)}) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) response['hrefs'] = hrefs self.set_status(201) # resource created self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) class DatasetHandler(BaseHandler): def getDatasetNumElements(self, shape_item): if shape_item['class'] == 'H5S_SCALAR': return 1 elif shape_item['class'] != 'H5S_SIMPLE': return 0 dims = shape_item['dims'] rank = len(dims) if rank == 0: return 1 count = 1 for i in range(rank): count *= dims[i] return count def getPreviewQuery(self, shape_item): """Helper method - return query options for a "reasonable" size data preview selection. Return None if the dataset is small enough that a preview is not needed. """ select = "select=[" dims = shape_item['dims'] rank = len(dims) ncols = dims[rank-1] if rank > 1: nrows = dims[rank-2] else: nrows = 1 # use some rough heuristics to define the selection # aim to return no more than 100 elements if ncols > 100: ncols = 100 if nrows > 100: nrows = 100 if nrows*ncols > 100: if nrows > ncols: nrows = 100 // ncols else: ncols = 100 // nrows for i in range(rank): if i == rank-1: select += "0:" + str(ncols) elif i == rank-2: select += "0:" + str(nrows) + "," else: select += "0:1," select += "]" return select def get(self): self.baseHandler() response = {} hrefs = [] rootUUID = None item = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'read') # throws exception is unauthorized item = db.getDatasetItemByUuid(self.reqUuid) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # got everything we need, put together the response count = self.getDatasetNumElements(item['shape']) if count <= 100: # small number of values, provide link to entire dataset hrefs.append({ 'rel': 'data', 'href': self.getHref('datasets/' + self.reqUuid + '/value') }) else: # large number of values, create preview link previewQuery = self.getPreviewQuery(item['shape']) hrefs.append({ 'rel': 'preview', 'href': self.getHref('datasets/' + self.reqUuid + '/value', query=previewQuery) }) hrefs.append({ 'rel': 'self', 'href': self.getHref('datasets/' + self.reqUuid)}) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) hrefs.append({ 'rel': 'attributes', 'href': self.getHref('datasets/' + self.reqUuid + '/attributes') }) hrefs.append({'rel': 'home', 'href': self.getHref('')}) response['id'] = self.reqUuid typeItem = item['type'] response['type'] = h5json.getTypeResponse(typeItem) response['shape'] = item['shape'] if 'creationProperties' in item: response['creationProperties'] = item['creationProperties'] response['created'] = unixTimeToUTC(item['ctime']) response['lastModified'] = unixTimeToUTC(item['mtime']) response['attributeCount'] = item['attributeCount'] response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') json_rsp = json_encode(response) self.write(json_rsp) def delete(self): self.baseHandler() self.isWritable(self.filePath) response = {} hrefs = [] rootUUID = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'delete') # throws exception is unauthorized db.deleteObjectByUuid('dataset', self.reqUuid) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # write the response href = self.request.protocol + '://' + self.request.host + '/' hostQuery = '' if self.get_query_argument("host", default=None): hostQuery = "?host=" + self.get_query_argument("host") hrefs.append({'rel': 'self', 'href': href + 'datasets' + hostQuery}) hrefs.append({ 'rel': 'root', 'href': href + 'groups/' + rootUUID + hostQuery}) hrefs.append({'rel': 'home', 'href': href + hostQuery}) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) class ValueHandler(BaseHandler): def getSliceQueryParam(self, dim, extent): """ Helper method - return slice for dim based on query params Query arg should be in the form: [, , ... , ] brackets are optional for one dimensional arrays. Each dimension, valid formats are: single integer: n start and end: n:m start, end, and stride: n:m:s """ # Get optional query parameters for given dim self.log.info("getSliceQueryParam: " + str(dim) + ", " + str(extent)) query = self.get_query_argument("select", default='ALL') if query == 'ALL': # just return a slice for the entire dimension self.log.info("getSliceQueryParam: return default") return slice(0, extent) self.log.info("select query value: [" + query + "]") if not query.startswith('['): msg = "Bad Request: selection query missing start bracket" self.log.info(msg) raise HTTPError(400, reason=msg) if not query.endswith(']'): msg = "Bad Request: selection query missing end bracket" self.log.info(msg) raise HTTPError(400, reason=msg) # now strip out brackets query = query[1:-1] query_array = query.split(',') if dim > len(query_array): msg = "Not enough dimensions supplied to query argument" self.log.info(msg) raise HTTPError(400, reason=msg) dim_query = query_array[dim].strip() start = 0 stop = extent step = 1 if dim_query.find(':') < 0: # just a number - return start = stop for this value try: start = int(dim_query) except ValueError: msg = "Bad Request: invalid selection parameter (can't convert to int) for dimension: " + str(dim) self.log.info(msg) raise HTTPError(400, reason=msg) stop = start elif dim_query == ':': # select everything pass else: fields = dim_query.split(":") if len(fields) > 3: msg = "Bad Request: Too many ':' seperators for dimension: " + str(dim) self.log.info(msg) raise HTTPError(400, reason=msg) try: if fields[0]: start = int(fields[0]) if fields[1]: stop = int(fields[1]) if len(fields) > 2 and fields[2]: step = int(fields[2]) except ValueError: msg = "Bad Request: invalid selection parameter (can't convert to int) for dimension: " + str(dim) self.log.info(msg) raise HTTPError(400, reason=msg) if start < 0 or start > extent: msg = "Bad Request: Invalid selection start parameter for dimension: " + str(dim) self.log.info(msg) raise HTTPError(400, reason=msg) if stop > extent: msg = "Bad Request: Invalid selection stop parameter for dimension: " + str(dim) self.log.info(msg) raise HTTPError(400, reason=msg) if step <= 0: msg = "Bad Request: invalid selection step parameter for dimension: " + str(dim) self.log.info(msg) raise HTTPError(400, reason=msg) s = slice(start, stop, step) self.log.info( "dim query[" + str(dim) + "] returning: start: " + str(start) + " stop: " + str(stop) + " step: " + str(step)) return s def getHyperslabSelection(self, dsetshape, start, stop, step): """ Get slices given lists of start, stop, step values """ rank = len(dsetshape) if start: if type(start) is not list: start = [start] if len(start) != rank: msg = "Bad Request: start array length not equal to dataset rank" self.log.info(msg) raise HTTPError(400, reason=msg) for dim in range(rank): if start[dim] < 0 or start[dim] >= dsetshape[dim]: msg = "Bad Request: start index invalid for dim: " + str(dim) self.log.info(msg) raise HTTPError(400, reason=msg) else: start = [] for dim in range(rank): start.append(0) if stop: if type(stop) is not list: stop = [stop] if len(stop) != rank: msg = "Bad Request: stop array length not equal to dataset rank" self.log.info(msg) raise HTTPError(400, reason=msg) for dim in range(rank): if stop[dim] <= start[dim] or stop[dim] > dsetshape[dim]: msg = "Bad Request: stop index invalid for dim: " + str(dim) self.log.info(msg) raise HTTPError(400, reason=msg) else: stop = [] for dim in range(rank): stop.append(dsetshape[dim]) if step: if type(step) is not list: step = [step] if len(step) != rank: msg = "Bad Request: step array length not equal to dataset rank" self.log.info(msg) raise HTTPError(400, reason=msg) for dim in range(rank): if step[dim] <= 0 or step[dim] > dsetshape[dim]: msg = "Bad Request: step index invalid for dim: " + str(dim) self.log.info(msg) raise HTTPError(400, reason=msg) else: step = [] for dim in range(rank): step.append(1) slices = [] for dim in range(rank): try: s = slice(int(start[dim]), int(stop[dim]), int(step[dim])) except ValueError: msg = "Bad Request: invalid start/stop/step value" self.log.info(msg) raise HTTPError(400, reason=msg) slices.append(s) return tuple(slices) def get(self): self.baseHandler() request_content_type = self.getAcceptType() response_content_type = "json" self.log.info("contenttype:" + request_content_type) response = {} hrefs = [] rootUUID = None item = None item_shape = None rank = None item_type = None values = None indexes = None slices = [] query_selection = self.get_query_argument("query", default=None) limit = self.get_query_argument("Limit", default=None) if limit: try: limit = int(limit) # convert to int except ValueError as e: msg = "invalid Limit: " + e.message log.info(msg) raise HTTPError(400, msg) if query_selection: self.log.info("query: " + query_selection) try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'read') # throws exception is unauthorized item = db.getDatasetItemByUuid(self.reqUuid) item_type = item['type'] if item_type['class'] == 'H5T_OPAQUE': # TODO - support for returning OPAQUE data... msg = "Not Implemented: GET OPAQUE data not supported" self.log.info(msg) raise HTTPError(501, reason=msg) # Not implemented elif item_type['class'] != 'H5T_COMPOUND' and query_selection: msg = "Bad Request: query selection is only supported for compound types" self.log.info(msg) raise HTTPError(400, reason=msg) item_shape = item['shape'] if item_shape['class'] == 'H5S_NULL': pass # don't return a value elif item_shape['class'] == 'H5S_SCALAR': if query_selection: msg = "Bad Request: query selection not valid with scalar dataset" self.log.info(msg) raise HTTPError(400, reason=msg) values = db.getDatasetValuesByUuid(self.reqUuid, Ellipsis) elif item_shape['class'] == 'H5S_SIMPLE': dims = item_shape['dims'] rank = len(dims) if query_selection and rank != 1: msg = "Bad Request: query selection is only supported for " msg += "one dimensional datasets" self.log.info(msg) raise HTTPError(400, reason=msg) nelements = 1 for dim in range(rank): dim_slice = self.getSliceQueryParam(dim, dims[dim]) self.log.info("dim_size[{}]: {}".format(dim, dim_slice)) nelements *= (dim_slice.stop - dim_slice.start) slices.append(dim_slice) if query_selection: start = slices[0].start stop = slices[0].stop step = slices[0].step (indexes, values) = db.doDatasetQueryByUuid(self.reqUuid, query_selection, start=start, stop=stop, step=step, limit=limit) else: if request_content_type == "binary": self.log.info("nelements:" + str(nelements)) itemSize = h5json.getItemSize(item_type) self.log.info("itemSize: " + str(itemSize)) if itemSize != "H5T_VARIABLE" and nelements > 1: response_content_type = "binary" self.log.info("response_content_type: " + response_content_type) values = db.getDatasetValuesByUuid( self.reqUuid, tuple(slices), format=response_content_type) else: msg = "Internal Server Error: unexpected shape class: " + shape['class'] self.log.error(msg) raise HTTPError(500, reason=msg) rootUUID = db.getUUIDByPath('/') except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # got everything we need, put together the response if response_content_type == "binary": # binary transfer, just write the bytes and return self.log.info("writing binary stream") self.set_header('Content-Type', 'application/octet-stream') self.write(values) return if request_content_type == "binary": #unable to return binary data self.log.info("requested binary response, but returning JSON instead") selfQuery = [] if self.get_query_argument("select", default=''): selfQuery.append('select=' + self.get_query_argument("select")) if self.get_query_argument("query", default=''): selfQuery.append('query=' + self.get_query_argument( "select", default='')) if values is not None: response['value'] = values else: response['value'] = None if indexes is not None: response['index'] = indexes hrefs.append({ 'rel': 'self', 'href': self.getHref('datasets/' + self.reqUuid + '/value', query=selfQuery) }) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) hrefs.append({ 'rel': 'owner', 'href': self.getHref('datasets/' + self.reqUuid)}) hrefs.append({ 'rel': 'home', 'href': self.getHref('')}) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) def post(self): self.baseHandler() body = None try: body = json_decode(self.request.body) except ValueError as e: msg = "JSON Parser Error: " + e.message self.log.info(msg) raise HTTPError(400, reason=msg) self.log.info("type body: {}".format(type(body))) if "points" not in body: msg = "Bad Request: value post request without points in body" self.log.info(msg) raise HTTPError(400, reason=msg) #self.log.info("points type: {}".format(type(points))) self.log.info("body type: {}".format(type(body))) self.log.info("body keys: {}".format(list(body.keys()))) points = body['points'] if type(points) != list: msg = "Bad Request: expecting list of points, got: {}".format(type(points)) self.log.info(msg) raise HTTPError(400, reason=msg) response = {} hrefs = [] rootUUID = None item = None values = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'read') # throws exception is unauthorized item = db.getDatasetItemByUuid(self.reqUuid) shape = item['shape'] if shape['class'] == 'H5S_SCALAR': msg = "Bad Request: point selection is not supported on scalar datasets" self.log.info(msg) raise HTTPError(400, reason=msg) if shape['class'] == 'H5S_NULL': msg = "Bad Request: point selection is not supported on Null Space datasets" self.log.info(msg) raise HTTPError(400, reason=msg) rank = len(shape['dims']) for point in points: if rank == 1 and type(point) != int: msg = "Bad Request: elements of points should be int type for datasets of rank 1" self.log.info(msg) raise HTTPError(400, reason=msg) elif rank > 1 and type(point) != list: msg = "Bad Request: elements of points should be list type for datasets of rank >1" self.log.info(msg) raise HTTPError(400, reason=msg) if len(point) != rank: msg = "Bad Request: one or more points have a missing coordinate value" self.log.info(msg) raise HTTPError(400, reason=msg) values = db.getDatasetPointSelectionByUuid(self.reqUuid, points) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # got everything we need, put together the response response['value'] = values hrefs.append({ 'rel': 'self', 'href': self.getHref('datasets/' + self.reqUuid + '/value') }) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) hrefs.append({ 'rel': 'owner', 'href': self.getHref('datasets/' + self.reqUuid)}) hrefs.append({'rel': 'home', 'href': self.getHref('')}) self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) def put(self): self.baseHandler() points = None start = None stop = None step = None body = None format = "json" data = None try: body = json_decode(self.request.body) except ValueError as e: try: msg = "JSON Parser Error: " + e.message except AttributeError: msg = "JSON Parser Error" log.info(msg) raise HTTPError(400, reason=msg) if "value" in body: data = body["value"] format = "json" elif "value_base64" in body: base64_data = body["value_base64"] base64_data = base64_data.encode("ascii") data = base64.b64decode(base64_data) format = "binary" else: msg = "Bad Request: Value not specified" self.log.info(msg) raise HTTPError(400, reason=msg) # missing data if "points" in body: points = body['points'] if type(points) != list: msg = "Bad Request: expecting list of points" self.log.info(msg) raise HTTPError(400, reason=msg) if 'start' in body or 'stop' in body or 'step' in body: msg = "Bad Request: can use hyperslab selection and points selection in one request" self.log.info(msg) raise HTTPError(400, reason=msg) if len(points) > len(data): msg = "Bad Request: more points provided than values" self.log.info(msg) raise HTTPError(400, reason=msg) else: # hyperslab selection if 'start' in body: start = body['start'] if 'stop' in body: stop = body['stop'] if 'step' in body: step = body['step'] try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'update') # throws exception is unauthorized item = db.getDatasetItemByUuid(self.reqUuid) item_type = item['type'] dims = None if 'shape' not in item: msg = "Unexpected error, shape information not found" self.log.info(msg) raise HTTPError(500, reason=msg) datashape = item['shape'] if datashape['class'] == 'H5S_NULL': msg = "Bad Request: PUT value can't be used with Null Space datasets" self.log.info(msg) raise HTTPError(400, reason=msg) # missing data if format == "binary": item_size = h5json.getItemSize(item_type) if item_size == "H5T_VARIABLE": msg = "binary data cannot be used with variable length types" self.log.info(msg) raise HTTPError(400, reason=msg) # need to use json if datashape['class'] == 'H5S_SIMPLE': dims = datashape['dims'] elif datashape['class'] == 'H5S_SCALAR': if start is not None or stop is not None or step is not None: msg = "Bad Request: start/stop/step option can't be used with Scalar Space datasets" self.log.info(msg) raise HTTPError(400, reason=msg) # missing data elif points: msg = "Bad Request: Point selection can't be used with scalar datasets" self.log.info(msg) raise HTTPError(400, reason=msg) # missing data if points is not None: # write point selection db.setDatasetValuesByPointSelection(self.reqUuid, data, points, format=format) else: slices = None if dims is not None: slices = self.getHyperslabSelection( dims, start, stop, step) # todo - check that the types are compatible db.setDatasetValuesByUuid(self.reqUuid, data, slices, format=format) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) self.log.info("value put succeeded") class AttributeHandler(BaseHandler): # convert embedded list (list of lists) to tuples def convertToTuple(self, data): if type(data) == list or type(data) == tuple: sublist = [] for e in data: sublist.append(self.convertToTuple(e)) return tuple(sublist) else: return data def getRequestName(self): # request is in the form /(datasets|groups|datatypes)//attributes(/), # return # return None if the uri doesn't end with ".../" uri = self.request.path name = None npos = uri.rfind('/attributes') if npos <= 0: msg = "Bad Request: URI is invalid" self.log.info(msg) raise HTTPError(400, reason=msg) uri = uri[npos+len('/attributes'):] if uri[0:1] == '/': uri = uri[1:] if len(uri) > 0: # strip off any query param npos = uri.rfind('?') if npos > 0: uri = uri[:npos] name = url_unescape(uri) # todo: handle possible query string? self.log.info('got name: [' + name + ']') return name def getRequestCollectionName(self): # request is in the form /(datasets|groups|datatypes)//attributes(/), # return datasets | groups | datatypes uri = self.request.path npos = uri.find('/') if npos < 0: log.info("bad uri") raise HTTPError(400) uri = uri[(npos+1):] npos = uri.find('/') # second '/' col_name = uri[:npos] self.log.info('got collection name: [' + col_name + ']') if col_name not in ('datasets', 'groups', 'datatypes'): msg = "Internal Server Error: collection name unexpected" self.log.error(msg) raise HTTPError(500, reason=msg) # shouldn't get routed here in this case return col_name def get(self): self.baseHandler() col_name = self.getRequestCollectionName() attr_name = self.getRequestName() response = {} hrefs = [] rootUUID = None items = [] # Get optional query parameters limit = self.get_query_argument("Limit", 0) if type(limit) is not int: try: limit = int(limit) except ValueError: log.info("expected int type for limit") raise HTTPError(400) marker = self.get_query_argument("Marker", None) try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'read') # throws exception is unauthorized if attr_name is not None: item = db.getAttributeItem(col_name, self.reqUuid, attr_name) items.append(item) else: # get all attributes (but without data) items = db.getAttributeItems(col_name, self.reqUuid, marker, limit) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # got everything we need, put together the response owner_uri = col_name + '/' + self.reqUuid self_uri = owner_uri + '/attributes' if attr_name is not None: self_uri += '/' + url_escape(attr_name) hostQuery = '' if self.get_query_argument("host", default=None): hostQuery = "?host=" + self.get_query_argument("host") responseItems = [] for item in items: responseItem = {} responseItem['name'] = item['name'] typeItem = item['type'] responseItem['type'] = h5json.getTypeResponse(typeItem) responseItem['shape'] = item['shape'] responseItem['created'] = unixTimeToUTC(item['ctime']) responseItem['lastModified'] = unixTimeToUTC(item['mtime']) if not attr_name or typeItem['class'] == 'H5T_OPAQUE': pass # TODO - send data for H5T_OPAQUE's elif 'value' in item: responseItem['value'] = item['value'] else: responseItem['value'] = None if attr_name is None: # add an href to the attribute responseItem['href'] = self.getHref(self_uri + '/' + url_escape(item['name'])) responseItems.append(responseItem) hrefs.append({'rel': 'self', 'href': self.getHref(self_uri)}) hrefs.append({'rel': 'owner', 'href': self.getHref(owner_uri)}) hrefs.append({'rel': 'root', 'href': self.getHref('/groups/' + rootUUID)}) hrefs.append({'rel': 'home', 'href': self.getHref('')}) if attr_name is None: # specific attribute response response['attributes'] = responseItems else: if len(responseItems) == 0: # should have raised exception earlier log.error("attribute not found: " + attr_name) raise HTTPError(404) responseItem = responseItems[0] for k in responseItem: response[k] = responseItem[k] response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) def put(self): self.baseHandler() col_name = self.getRequestCollectionName() attr_name = self.getRequestName() if attr_name is None: msg = "Bad Request: attribute name not supplied" log.info(msg) raise HTTPError(400, reason=msg) body = None try: body = json_decode(self.request.body) except ValueError as e: msg = "JSON Parser Error" try: msg += ": " + e.message except AttributeError: pass # no message property self.log.info(msg) raise HTTPError(400, reason=msg) if "type" not in body: self.log.info("Type not supplied") raise HTTPError(400) # missing type dims = () # default as empty tuple (will create a scalar attribute) if "shape" in body: shape = body["shape"] if type(shape) == int: dims = [shape] elif type(shape) == list or type(shape) == tuple: dims = shape # can use as is elif type(shape) in (str, unicode) and shape == 'H5S_NULL': dims = None else: msg = "Bad Request: shape is invalid!" self.log.info(msg) raise HTTPError(400, reason=msg) datatype = body["type"] # validate shape if dims: for extent in dims: if type(extent) != int: msg = "Bad Request: invalid shape type" self.log.info(msg) raise HTTPError(400, reason=msg) if extent < 0: msg = "Bad Request: invalid shape (negative extent)" self.log.info(msg) raise HTTPError(400, reason=msg) # convert list values to tuples (otherwise h5py is not happy) data = None if dims is not None: if "value" not in body: msg = "Bad Request: value not specified" self.log.info(msg) raise HTTPError(400, reason=msg) # missing value value = body["value"] data = self.convertToTuple(value) try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'create') # throws exception is unauthorized attribute_exist = True try: db.getAttributeItem(col_name, self.reqUuid, attr_name) except IOError: attribute_exist = False if attribute_exist: self.log.info("attribute {} already exist".format(attr_name)) raise HTTPError(409, "Attribute already exist") db.createAttribute( col_name, self.reqUuid, attr_name, dims, datatype, data) rootUUID = db.getUUIDByPath('/') except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) response = {} # got everything we need, put together the response root_href = self.getHref('groups/' + rootUUID) owner_href = self.getHref(col_name + '/' + self.reqUuid) self_href = owner_href + '/attributes' if attr_name is not None: self_href = self.getHref(col_name + '/' + self.reqUuid + '/' + attr_name) else: self_href = self.getHref(col_name + '/' + self.reqUuid) hrefs = [] hrefs.append({'rel': 'self', 'href': self_href}) hrefs.append({'rel': 'owner', 'href': owner_href}) hrefs.append({'rel': 'root', 'href': root_href}) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) self.set_status(201) # resource created def delete(self): self.baseHandler() col_name = self.getRequestCollectionName() attr_name = self.getRequestName() if attr_name is None: msg = "Bad Request: attribute name not specified" self.log.info(msg) raise HTTPError(400, reason=msg) filePath = self.getFilePath(self.domain) self.isWritable(self.filePath) response = {} hrefs = [] rootUUID = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'delete') # throws exception is unauthorized db.deleteAttribute(col_name, self.reqUuid, attr_name) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # got everything we need, put together the response root_href = self.getHref('groups/' + rootUUID) owner_href = self.getHref(col_name + '/' + self.reqUuid) self_href = self.getHref(col_name + '/' + self.reqUuid + '/attributes') home_href = self.getHref('') hrefs.append({'rel': 'self', 'href': self_href}) hrefs.append({'rel': 'owner', 'href': owner_href}) hrefs.append({'rel': 'root', 'href': root_href}) hrefs.append({'rel': 'home', 'href': home_href}) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) self.log.info("Attribute delete succeeded") class GroupHandler(BaseHandler): def get(self): self.baseHandler() response = {} hrefs = [] links = [] rootUUID = None item = None include_links = self.get_query_argument("include_links", 0) try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'read') # throws exception is unauthorized item = db.getGroupItemByUuid(self.reqUuid) if include_links: # TBD: add marker & limit options for pagination links = db.getLinkItems(self.reqUuid) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # got everything we need, put together the response hrefs.append({ 'rel': 'self', 'href': self.getHref('groups/' + self.reqUuid) }) hrefs.append({ 'rel': 'links', 'href': self.getHref('groups/' + self.reqUuid + '/links') }) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID) }) hrefs.append({ 'rel': 'home', 'href': self.getHref('') }) hrefs.append({ 'rel': 'attributes', 'href': self.getHref('groups/' + self.reqUuid + '/attributes') }) response['id'] = self.reqUuid response['created'] = unixTimeToUTC(item['ctime']) response['lastModified'] = unixTimeToUTC(item['mtime']) response['attributeCount'] = item['attributeCount'] response['linkCount'] = item['linkCount'] response['hrefs'] = hrefs if links: hostQuery = '' if self.get_query_argument("host", default=None): hostQuery = "?host=" + self.get_query_argument("host") response["links"] = [] for item in links: link_item = {} link_item['class'] = item['class'] link_item['title'] = item['title'] link_item['href'] = item['href'] = self.href + '/groups/' + self.reqUuid + '/links/' + self.nameEncode(item['title']) + hostQuery if item['class'] == 'H5L_TYPE_HARD': link_item['id'] = item['id'] link_item['collection'] = item['collection'] link_item['target'] = self.href + '/' + item['collection'] + '/' + item['id'] + hostQuery elif item['class'] == 'H5L_TYPE_SOFT': link_item['h5path'] = item['h5path'] elif item['class'] == 'H5L_TYPE_EXTERNAL': link_item['h5path'] = item['h5path'] link_item['h5domain'] = self.convertExternalPath(item['file']) if link_item['h5domain'].endswith(config.get('domain')): link_item['target'] = self.getExternalHref(link_item['h5domain'], link_item['h5path']) response["links"].append(link_item) self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) def delete(self): self.baseHandler() self.isWritable(self.filePath) try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(self.reqUuid, self.userid) self.verifyAcl(acl, 'delete') # throws exception is unauthorized db.deleteObjectByUuid('group', self.reqUuid) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) response = {} hrefs = [] # write the response hrefs.append({'rel': 'self', 'href': self.getHref('groups')}) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) hrefs.append({'rel': 'home', 'href': self.getHref('')}) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) class GroupCollectionHandler(BaseHandler): def get(self): self.baseHandler() rootUUID = None # Get optional query parameters limit = self.get_query_argument("Limit", 0) if type(limit) is not int: try: limit = int(limit) except ValueError: log.info("expected int type for limit") raise HTTPError(400) marker = self.get_query_argument("Marker", None) response = {} items = None hrefs = [] try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(rootUUID, self.userid) self.verifyAcl(acl, 'read') # throws exception is unauthorized items = db.getCollection("groups", marker, limit) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # write the response response['groups'] = items hrefs.append({ 'rel': 'self', 'href': self.getHref('groups')}) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) hrefs.append({ 'rel': 'home', 'href': self.getHref('')}) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) def post(self): self.baseHandler() if self.request.path != '/groups': msg = "Method Not Allowed: bad group post request: " + self.request.path self.log.info(msg) raise HTTPError(405, reason=msg) # Method not allowed parent_group_uuid = None link_name = None body = {} if self.request.body: try: body = json_decode(self.request.body) except ValueError as e: msg = "JSON Parser Error: " + e.message self.log.info(msg) raise HTTPError(400, reason=msg) if "link" in body: link_options = body["link"] if "id" not in link_options or "name" not in link_options: msg = "Bad Request: missing link parameter" self.log.info(msg) raise HTTPError(400, reason=msg) parent_group_uuid = link_options["id"] link_name = link_options["name"] self.log.info( "add link to: " + parent_group_uuid + " with name: " + link_name) self.isWritable(self.filePath) try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') current_user_acl = db.getAcl(rootUUID, self.userid) self.verifyAcl(current_user_acl, 'create') # throws exception is unauthorized if parent_group_uuid: # verify no link already exists before creating a new group link_exists = False try: item = db.getLinkItemByUuid(parent_group_uuid, link_name) if item: link_exists = True except IOError: pass # ok, link not found if link_exists: self.log.info("Link already exists") raise HTTPError(409, "Link already exists") grpUUID = db.createGroup() item = db.getGroupItemByUuid(grpUUID) # if link info is provided, link the new group if parent_group_uuid: # link the new dataset db.linkObject(parent_group_uuid, grpUUID, link_name) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) href = self.request.protocol + '://' + self.domain self.set_header('Location', href + '/groups/' + grpUUID) self.set_header('Content-Location', href + '/groups/' + grpUUID) # got everything we need, put together the response response = {} hrefs = [] hrefs.append({ 'rel': 'self', 'href': self.getHref('groups/' + grpUUID)}) hrefs.append({ 'rel': 'links', 'href': self.getHref('groups/' + grpUUID + '/links') }) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) hrefs.append({ 'rel': 'home', 'href': self.getHref('')}) hrefs.append({ 'rel': 'attributes', 'href': self.getHref('groups/' + grpUUID + '/attributes') }) response['id'] = grpUUID response['created'] = unixTimeToUTC(item['ctime']) response['lastModified'] = unixTimeToUTC(item['mtime']) response['attributeCount'] = item['attributeCount'] response['linkCount'] = item['linkCount'] response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) self.set_status(201) # resource created class DatasetCollectionHandler(BaseHandler): def get(self): self.baseHandler() # Get optional query parameters limit = self.get_query_argument("Limit", 0) if type(limit) is not int: try: limit = int(limit) except ValueError: msg = "Bad Request: expected int type for limit" self.log.info(msg) raise HTTPError(400, reason=msg) marker = self.get_query_argument("Marker", None) response = {} hrefs = [] rootUUID = None items = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(rootUUID, self.userid) self.verifyAcl(acl, 'read') # throws exception is unauthorized items = db.getCollection("datasets", marker, limit) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # write the response response['datasets'] = items hrefs.append({'rel': 'self', 'href': self.getHref('datasets')}) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) hrefs.append({'rel': 'home', 'href': self.getHref('')}) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) def post(self): self.baseHandler() if self.request.path != '/datasets': msg = "Method not Allowed: invalid datasets post request" log.info(msg) raise HTTPError(405, reason=msg) # Method not allowed self.isWritable(self.filePath) dims = None group_uuid = None link_name = None body = {} if self.request.body: try: body = json_decode(self.request.body) except ValueError as e: msg = "JSON Parser Error: " + e.message self.log.info(msg) raise HTTPError(400, reason=msg) if "type" not in body: msg = "Bad Request: Type not specified" self.log.info(msg) raise HTTPError(400, reason=msg) # missing type if "shape" in body: shape = body["shape"] if type(shape) == int: dims = [shape] elif type(shape) == list or type(shape) == tuple: dims = shape # can use as is elif type(shape) in (str, unicode) and shape == 'H5S_NULL': dims = None else: msg = "Bad Request: shape is invalid" self.log.info(msg) raise HTTPError(400, reason=msg) else: dims = () # empty tuple if "link" in body: link_options = body["link"] if "id" not in link_options or "name" not in link_options: msg = "Bad Request: No 'name' or 'id' not specified" self.log.info(msg) raise HTTPError(400, reason=msg) group_uuid = link_options["id"] link_name = link_options["name"] self.log.info("add link to: " + group_uuid + " with name: " + link_name) datatype = body["type"] maxdims = None if "maxdims" in body: maxdims = body["maxdims"] if type(maxdims) == int: dim1 = maxdims maxdims = [dim1] elif type(maxdims) == list or type(maxdims) == tuple: pass # can use as is else: msg = "Bad Request: maxdims is invalid" log.info(msg) raise HTTPError(400, reason=msg) # validate shape if dims: for extent in dims: if type(extent) != int: msg = "Bad Request: Invalid shape type" self.log.info(msg) raise HTTPError(400, reason=msg) if extent < 0: msg = "Bad Request: shape dimension is negative" self.log.info("msg") raise HTTPError(400, reason=msg) if maxdims: if dims is None: # can't use maxdims with null_space dataset msg = "Bad Request: maxdims not valid for H5S_NULL dataspace" self.log.info(msg) raise HTTPError(400, reason=msg) if len(maxdims) != len(dims): msg = "Bad Request: maxdims array length must equal shape array length" self.log.info(msg) raise HTTPError(400, reason=msg) for i in range(len(dims)): maxextent = maxdims[i] if maxextent != 0 and maxextent < dims[i]: msg = "Bad Request: maxdims extent can't be smaller than shape extent" self.log.info(msg) raise HTTPError(400, reason=msg) if maxextent == 0: maxdims[i] = None # this indicates unlimited creationProps = None if "creationProperties" in body: creationProps = body["creationProperties"] item = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(rootUUID, self.userid) self.verifyAcl(acl, 'create') # throws exception is unauthorized # verify the link perm as well if group_uuid and group_uuid != rootUUID: acl = db.getAcl(group_uuid, self.userid) self.verifyAcl(acl, 'create') # throws exception is unauthorized # verify the link name doesn't already exists if group_uuid: # verify no link already exists before creating a new group link_exists = False try: item = db.getLinkItemByUuid(group_uuid, link_name) if item: link_exists = True except IOError: pass # ok, link not found if link_exists: self.log.info("Link already exists") raise HTTPError(409, "Link already exists") item = db.createDataset(datatype, dims, maxdims, creation_props=creationProps) if group_uuid: # link the new dataset db.linkObject(group_uuid, item['id'], link_name) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) response = {} # got everything we need, put together the response hrefs = [] hrefs.append({ 'rel': 'self', 'href': self.getHref('datasets/' + item['id']) }) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID) }) hrefs.append({ 'rel': 'attributes', 'href': self.getHref('datasets/' + item['id'] + '/attributes') }) hrefs.append({ 'rel': 'value', 'href': self.getHref('datasets/' + item['id'] + '/value')}) response['id'] = item['id'] response['attributeCount'] = item['attributeCount'] response['hrefs'] = hrefs response['created'] = unixTimeToUTC(item['ctime']) response['lastModified'] = unixTimeToUTC(item['mtime']) self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) self.set_status(201) # resource created class TypeCollectionHandler(BaseHandler): def get(self): self.baseHandler() # Get optional query parameters limit = self.get_query_argument("Limit", 0) if type(limit) is not int: try: limit = int(limit) except ValueError: msg = "Bad Request: expected int type for Limit" log.info(msg) raise HTTPError(400, reason=msg) marker = self.get_query_argument("Marker", None) response = {} hrefs = [] rootUUID = None items = None try: with Hdf5db(self.filePath) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(rootUUID, self.userid) self.verifyAcl(acl, 'read') # throws exception is unauthorized items = db.getCollection("datatypes", marker, limit) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) # write the response response['datatypes'] = items hrefs.append({ 'rel': 'self', 'href': self.getHref('datatypes') }) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) hrefs.append({'rel': 'home', 'href': self.getHref('')}) response['hrefs'] = hrefs self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) def post(self): self.baseHandler() if self.request.path != '/datatypes': msg = "Method not Allowed: invalid URI" log.info(msg) raise HTTPError(405, reason=msg) # Method not allowed self.isWritable(self.filePath) body = None try: body = json_decode(self.request.body) except ValueError as e: msg = "JSON Parser Error: " + e.message self.log.info(msg) raise HTTPError(400, reason=msg) parent_group_uuid = None link_name = None if "type" not in body: msg = "Type not specified" self.log.info(msg) raise HTTPError(400, reason=msg) # missing type if "link" in body: link_options = body["link"] if "id" not in link_options or "name" not in link_options: msg = "Bad Request: missing link parameter" self.log.info(msg) raise HTTPError(400, reason=msg) parent_group_uuid = link_options["id"] link_name = link_options["name"] self.log.info( "add link to: " + parent_group_uuid + " with name: " + link_name) datatype = body["type"] item = None rootUUID = None try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(rootUUID, self.userid) self.verifyAcl(acl, 'create') # throws exception is unauthorized if parent_group_uuid: # verify no link already exists before creating a new group link_exists = False try: item = db.getLinkItemByUuid(parent_group_uuid, link_name) if item: link_exists = True except IOError: pass # ok, link not found if link_exists: self.log.info("Link already exists") raise HTTPError(409, "Link already exists") item = db.createCommittedType(datatype) # if link info is provided, link the new group if parent_group_uuid: # link the new dataset db.linkObject(parent_group_uuid, item['id'], link_name) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) response = {} # got everything we need, put together the response hrefs = [] hrefs.append({ 'rel': 'self', 'href': self.getHref('datatypes/' + item['id']) }) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) hrefs.append({ 'rel': 'attributes', 'href': self.getHref('datatypes/' + item['id'] + '/attributes') }) response['id'] = item['id'] response['attributeCount'] = 0 response['hrefs'] = hrefs response['created'] = unixTimeToUTC(item['ctime']) response['lastModified'] = unixTimeToUTC(item['mtime']) self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) self.set_status(201) # resource created class RootHandler(BaseHandler): def getRootResponse(self, filePath): acl = None # used by GET / and PUT / try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(rootUUID, self.userid) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) self.verifyAcl(acl, 'read') # throws exception is unauthorized # generate response hrefs = [] hrefs.append({ 'rel': 'self', 'href': self.getHref('')}) hrefs.append({ 'rel': 'database', 'href': self.getHref('datasets')}) hrefs.append({'rel': 'groupbase', 'href': self.getHref('groups')}) hrefs.append({ 'rel': 'typebase', 'href': self.getHref('datatypes')}) hrefs.append({ 'rel': 'root', 'href': self.getHref('groups/' + rootUUID)}) response = {} response['created'] = unixTimeToUTC(op.getctime(filePath)) response['lastModified'] = unixTimeToUTC(op.getmtime(filePath)) response['root'] = rootUUID response['hrefs'] = hrefs return response def get(self): self.baseHandler() """ self.log.info("header keys...") for k in self.request.headers.keys(): self.log.info("header[" + k + "]: " + self.request.headers[k]) self.log.info('remote_ip: ' + self.request.remote_ip) """ try: response = self.getRootResponse(self.filePath) except HTTPError as e: if e.status_code == 401: # no user provied, just return 401 response return raise e # re-throw the exception root_uuid = response['root'] self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) def put(self): self.baseHandler(checkExists=False) new_domain_policy = config.get("new_domain_policy") if new_domain_policy: # should be one of ANON, AUTH, NEVER if new_domain_policy.upper() == "NEVER": msg = "Forbidden: new domains not allowed" self.log.info(msg) raise HTTPError(403, reason=msg) elif new_domain_policy.upper() == "AUTH" and self.userid <= 0: msg = "Unauthorized" self.log.info(msg) raise HTTPError(401, reason=msg) self.log.info("filePath: " + self.filePath) if self.filePath is not None and fileUtil.isFile(self.filePath): # the file already exists msg = "Conflict: resource exists: " + self.filePath self.log.info(msg) raise HTTPError(409, reason=msg) # Conflict - is this the correct code? if self.filePath is not None and self.isTocFilePath(self.filePath): msg = "Forbidden: invalid resource" self.log.info(msg) raise HTTPError(403, reason=msg) # Forbidden - TOC file if self.filePath is None: msg = "domain not valid" self.log.info(msg) raise HTTPError(400, reason=msg) self.log.info("FilePath: " + self.filePath) # create directories as needed fileUtil.makeDirs(op.dirname(self.filePath)) self.log.info("creating file: [" + self.filePath + "]") try: Hdf5db.createHDF5File(self.filePath) except IOError as e: self.log.info( "IOError creating new HDF5 file: " + str(e.errno) + " " + e.strerror) raise HTTPError( 500, "Unexpected error: unable to create collection") response = self.getRootResponse(self.filePath) try: tocUtil.addTocEntry(self.domain, self.filePath, userid=self.userid) except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) self.set_status(201) # resource created def delete(self): self.baseHandler() self.isWritable(self.filePath) if not op.isfile(self.filePath): # file not there msg = "Not found: resource does not exist" self.log.info(msg) raise HTTPError(404, reason=msg) # Not found # don't use os.access since it will always return OK if uid is root if not os.stat(self.filePath).st_mode & 0o200: # file is read-only msg = "Forbidden: Resource is read-only" self.log.info(msg) raise HTTPError(403, reason=msg) # Forbidden if self.isTocFilePath(self.filePath): msg = "Forbidden: Resource is read-only" self.log.info(msg) raise HTTPError(403, reason=msg) # Forbidden - TOC file try: with Hdf5db(self.filePath, app_logger=self.log) as db: rootUUID = db.getUUIDByPath('/') acl = db.getAcl(rootUUID, self.userid) self.verifyAcl(acl, 'delete') # throws exception is unauthorized except IOError as e: self.log.info("IOError: " + str(e.errno) + " " + e.strerror) status = errNoToHttpStatus(e.errno) raise HTTPError(status, reason=e.strerror) try: tocUtil.removeTocEntry(self.domain, self.filePath, userid=self.userid) except IOError as ioe: # This exception may happen if the file has been imported directly # after toc creation self.log.warn("IOError removing toc entry") try: os.remove(self.filePath) except IOError as ioe: self.log.info( "IOError deleting HDF5 file: " + str(ioe.errno) + " " + ioe.strerror) raise HTTPError( 500, "Unexpected error: unable to delete collection") class InfoHandler(RequestHandler): def get(self): log = logging.getLogger("h5serv") log.info('InfoHandler.get ' + self.request.host) log.info('remote_ip: ' + self.request.remote_ip) greeting = "Welcome to h5serv!" about = "h5serv is a webservice for HDF5 data" doc_href = "http://h5serv.readthedocs.org" h5serv_version = "0.2" response = Hdf5db.getVersionInfo() response['name'] = "h5serv" response['greeting'] = greeting response['about'] = about response['documentation'] = doc_href response['h5serv_version'] = h5serv_version accept_type = '' if 'accept' in self.request.headers: accept = self.request.headers['accept'] # just extract the first type and not worry about q values for now... accept_values = accept.split(',') accept_types = accept_values[0].split(';') accept_type = accept_types[0] # print 'accept_type:', accept_type if accept_type == 'text/html': self.set_header('Content-Type', 'text/html') htmlText = "

" + response['greeting'] + "

" htmlText += "

" + response['about'] + "

" htmlText += "

Documentation: h5serv documentation

" htmlText += "

server version: " + response['h5serv_version'] + "

" htmlText += "

h5py version: " + response['h5py_version'] + "

" htmlText += "

hdf5 version: " + response['hdf5_version'] + "

" htmlText += "" self.write(htmlText) else: self.set_header('Content-Type', 'application/json') self.write(json_encode(response)) def sig_handler(sig, frame): log = logging.getLogger("h5serv") log.warning('Caught signal: %s', sig) IOLoop.instance().add_callback(shutdown) def shutdown(): log = logging.getLogger("h5serv") MAX_WAIT_SECONDS_BEFORE_SHUTDOWN = 2 log.info('Stopping http server') log.info( 'Will shutdown in %s seconds ...', MAX_WAIT_SECONDS_BEFORE_SHUTDOWN) io_loop = tornado.ioloop.IOLoop.instance() deadline = time.time() + MAX_WAIT_SECONDS_BEFORE_SHUTDOWN def stop_loop(): now = time.time() if now < deadline: io_loop.add_timeout(now + 1, stop_loop) else: io_loop.stop() log.info('Shutdown') stop_loop() log.info("closing db") def make_app(): static_url = config.get('static_url') static_path = config.get('static_path') settings = {} config_debug = config.get('debug') if type(config_debug) is str: if config_debug[0] in ('T', 't'): settings["debug"] = True else: settings["debug"] = False else: settings["debug"] = config_debug favicon_path = "favicon.ico" print("favicon_path:", favicon_path) print('Static content in the path:' + static_path + " will be displayed via the url: " + static_url) print('isdebug:', settings['debug']) app = Application([ url(r"/datasets/.*/type", DatatypeHandler), url(r"/datasets/.*/shape", ShapeHandler), url(r"/datasets/.*/attributes/.*", AttributeHandler), url(r"/datasets/.*/acls/.*", AclHandler), url(r"/datasets/.*/acls", AclHandler), url(r"/groups/.*/attributes/.*", AttributeHandler), url(r"/groups/.*/acls/.*", AclHandler), url(r"/groups/.*/acls", AclHandler), url(r"/datatypes/.*/attributes/.*", AttributeHandler), url(r"/datasets/.*/attributes", AttributeHandler), url(r"/groups/.*/attributes", AttributeHandler), url(r"/datatypes/.*/attributes", AttributeHandler), url(r"/datatypes/.*/acls/.*", AclHandler), url(r"/datatypes/.*/acls", AclHandler), url(r"/datatypes/.*", TypeHandler), url(r"/datatypes/", TypeHandler), url(r"/datatypes\?.*", TypeCollectionHandler), url(r"/datatypes", TypeCollectionHandler), url(r"/datasets/.*/value", ValueHandler), url(r"/datasets/.*/value\?.*", ValueHandler), url(r"/datasets/.*", DatasetHandler), url(r"/datasets/", DatasetHandler), url(r"/datasets\?.*", DatasetCollectionHandler), url(r"/datasets", DatasetCollectionHandler), url(r"/groups/.*/links/.*", LinkHandler), url(r"/groups/.*/links\?.*", LinkCollectionHandler), url(r"/groups/.*/links", LinkCollectionHandler), url(r"/groups/", GroupHandler), url(r"/groups/.*", GroupHandler), url(r"/groups\?.*", GroupCollectionHandler), url(r"/groups", GroupCollectionHandler), url(r"/info", InfoHandler), url(static_url, tornado.web.StaticFileHandler, {'path': static_path}), url(r"/(favicon\.ico)", tornado.web.StaticFileHandler, {'path': favicon_path}), url(r"/acls/.*", AclHandler), url(r"/acls", AclHandler), url(r"/", RootHandler), url(r".*", DefaultHandler) ], **settings) return app # # update TOC when files are added via some out of process method # (e.g. scp to the server) # def updateToc(filepath): log = logging.getLogger("h5serv") log.info("updateToc(%s)", filepath) if os.name == 'nt': filepath = filepath.replace('\\', '/') # match HDF5 convention hdf5_ext = config.get('hdf5_ext') if not filepath.endswith(hdf5_ext): log.info("ignoring non-HDF5 file added to data directory") return if filepath.endswith(config.get('toc_name')): log.info("ignore toc file creation") return base_domain = fileUtil.getDomain(filepath) log.info("base domain: " + base_domain) try: if fileUtil.isFile(filepath): tocUtil.addTocEntry(base_domain, filepath) else: tocUtil.removeTocEntry(base_domain, filepath) except IOError as e: log.info("periodic callback: unable to update toc") # # Background processing callback # def periodicCallback(): # callback for background processing log = logging.getLogger("h5serv") #log.info("periodicCallback") # check event queue while not event_queue.empty(): item = event_queue.get() log.info("process_queue, got: %s", item) # just add file events for now updateToc(item) def main(): # create logger log = logging.getLogger("h5serv") log_file = config.get("log_file") log_level = config.get("log_level") # add file handler if given in config if log_file: print("Using logfile: ", log_file) # set daily rotating log handler = logging.handlers.TimedRotatingFileHandler( log_file, when="midnight", interval=1, backupCount=0, utc=True) # add formatter to handler # create formatter formatter = logging.Formatter( "%(asctime)s:%(levelname)s:%(filename)s:%(lineno)d::%(message)s") handler.setFormatter(formatter) # add handler to logger log.addHandler(handler) else: print("No logfile") # add default logger (to stdout) handler = logging.StreamHandler(sys.stdout) # create formatter formatter = logging.Formatter( "%(levelname)s:%(filename)s:%(lineno)d::%(message)s") handler.setFormatter(formatter) log.addHandler(handler) log.propagate = False # otherwise, we'll get repeated lines password_uri = "none" x = "password_uri" if x.upper() in os.environ: password_uri = os.environ[x.upper()] password_uri = config.get("password_uri") print("password_uri config:", password_uri) # log levels: ERROR, WARNING, INFO, DEBUG, or NOTSET if not log_level or log_level == "NOTSET": log.setLevel(logging.NOTSET) if log_level == "ERROR": print("Setting log level to: ERROR") log.setLevel(logging.ERROR) elif log_level == "WARNING": print("Setting log level to: WARNING") log.setLevel(logging.WARNING) elif log_level == "INFO": print("Setting log level to: INFO") log.setLevel(logging.INFO) elif log_level == "DEBUG": print("Setting log level to: DEBUG") log.setLevel(logging.DEBUG) else: print("No logging!") log.setLevel(logging.NOTSET) log.info("log test") app = make_app() domain = config.get("domain") print("domain:", domain) ssl_cert = config.get('ssl_cert') if ssl_cert: print("ssl_cert:", ssl_cert) ssl_key = config.get('ssl_key') if ssl_key: print("ssl_key:", ssl_key) ssl_port = config.get('ssl_port') if ssl_port: print("ssl_port:", ssl_port) # # Setup listener for changes in the file system # data_path = config.get('datapath') global event_queue event_queue = Queue() # implemented in h5watchdog.py background_timeout = int(config.get("background_timeout")) if background_timeout: print("Setting watchdog on: ", data_path) h5observe(data_path, event_queue) tornado.ioloop.PeriodicCallback(periodicCallback, 1000).start() # # Insantiate auth class # global auth auth = getAuthClient() if ssl_cert and op.isfile(ssl_cert) and ssl_key and op.isfile(ssl_key) and ssl_port: ssl_cert_pwd = config.get('ssl_cert_pwd') ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ssl_ctx.load_cert_chain(ssl_cert, keyfile=ssl_key, password=ssl_cert_pwd) ssl_server = tornado.httpserver.HTTPServer(app, ssl_options=ssl_ctx) ssl_server.listen(ssl_port) msg = "Running SSL on port: " + str(ssl_port) + " (SSL)" else: server = tornado.httpserver.HTTPServer(app, xheaders=True) port = int(config.get('port')) server.listen(port) msg = "Starting event loop on port: " + str(port) signal.signal(signal.SIGTERM, sig_handler) signal.signal(signal.SIGINT, sig_handler) log.info("INITIALIZING...") log.info(msg) print(msg) IOLoop.current().start() ================================================ FILE: h5serv/authFile.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import six if six.PY3: unicode = str import os.path as op import time import logging import h5py from tornado.web import HTTPError from h5serv.passwordUtil import encrypt_pwd, to_string cache_expire_time = 10.0 # ten seconds class AuthClient(object): def __init__(self, filepath): self.log = logging.getLogger("h5serv") self.log.info("AuthFile class init(" + filepath + ")") self.filepath = filepath self.username_cache = {} self.userid_cache = {} """ Password util helper functions """ def getUserInfo(self, user_name): """ getUserInfo: return user data """ userid = None if not user_name: return None self.log.info("Auth.getUserInfo: [" + to_string(user_name) + "]") if user_name in self.username_cache: item = self.username_cache[user_name] if item['timestamp'] - time.time() > cache_expire_time: self.log.info("Auth-cache expired") # delete the entry and re-fetch below del self.username_cache[user_name] else: self.log.info("Auth-got cache value") data = item['data'] return data # verify file exists and is writable if not op.isfile(self.filepath): self.log.error("password file is missing") raise HTTPError(500, message="bad configuration") if not h5py.is_hdf5(self.filepath): self.log.error("password file is invalid") raise HTTPError(500, message="bad configuration") with h5py.File(self.filepath, 'r') as f: if user_name not in f.attrs: return None data = f.attrs[user_name] # add to cache self.log.info("Auth - added to cache") item = {} timestamp = time.time() item['timestamp'] = timestamp item['data'] = data self.username_cache[user_name] = item item = {} item['timestamp'] = timestamp item['username'] = user_name userid = data['userid'] self.userid_cache[userid] = item return data def getUserId(self, user_name): """ getUserId: get id for given user name """ self.log.info("Auth.getUserId: [" + user_name + "]") data = self.getUserInfo(user_name) userid = None if data is not None: userid = data['userid'] return userid def getUserName(self, userid): """ getUserName: return user name for given user id #todo: may need to be optimized to support large number of users """ self.log.info("Auth.getUserName: [" + str(userid) + "]") if userid in self.userid_cache: item = self.userid_cache[userid] if item['timestamp'] - time.time() > cache_expire_time: # delete the entry and re-fetch below self.log.info("Auth-cache expired") del self.userid_cache[userid] else: self.log.info("Auth-got cache value") username = item['username'] return to_string(username) # verify file exists and is writable if not op.isfile(self.filepath): self.log.error("password file is missing") raise HTTPError(500, message="bad configuration") if not h5py.is_hdf5(self.filepath): self.log.error("password file is invalid") raise HTTPError(500, message="bad configuration") user_name = None with h5py.File(self.filepath, 'r') as f: for attr_name in f.attrs: attr = f.attrs[attr_name] if attr['userid'] == userid: user_name = to_string(attr_name) self.log.info("Auth-add to cachecache") item = {} item['timestamp'] = time.time() item['username'] = user_name self.userid_cache[userid] = item return user_name def validateUserPassword(self, user_name, password): """ validateUserPassword: verify user and password. throws exception if not valid """ if not user_name: self.log.info('validateUserPassword - null user') raise HTTPError(401, message="provide user name and password") if not password: self.log.info('isPasswordValid - null password') raise HTTPError(401, message="provide password") data = self.getUserInfo(user_name) if data is None: self.log.info("user not found") raise HTTPError(401, message="provide user and password") userid = None if data['pwd'] == encrypt_pwd(password): self.log.info("user password validated") userid = data['userid'] else: self.log.info("user password is not valid") raise HTTPError(401, message="invalid user name/password") return userid ================================================ FILE: h5serv/authMongo.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import six if six.PY3: unicode = str import os.path as op import time import hashlib import logging from pymongo import MongoClient from tornado.web import HTTPError import h5serv.config as config from h5serv.passwordUtil import encrypt_pwd, to_string, to_bytes cache_expire_time = 10.0 # ten seconds class AuthClient(object): def __init__(self, mongouri): self.log = logging.getLogger("h5serv") self.log.info("AuthMongo class init(" + mongouri + ")") self.client = MongoClient(mongouri) db_name = config.get('mongo_dbname') self.db = self.client[db_name] self.username_cache = {} self.userid_cache = {} """ Password util helper functions """ def getUserInfo(self, user_name): """ getUserInfo: return user data """ userid = None if not user_name: return None self.log.info("Auth.getUserInfo: [" + to_string(user_name) + "]") if user_name in self.username_cache: item = self.username_cache[user_name] if item['timestamp'] - time.time() > cache_expire_time: self.log.info("Auth-cache expired") # delete the entry and re-fetch below del self.username_cache[user_name] else: self.log.info("Auth-got cache value") data = item['data'] return data # mongodb lookup self.log.info("mongo query") users = self.db["users"] data = users.find_one({"username": to_string(user_name)}) if data is None: return None # add to cache self.log.info("Auth - added to cache") item = {} timestamp = time.time() item['timestamp'] = timestamp item['data'] = data self.username_cache[user_name] = item item = {} item['timestamp'] = timestamp item['username'] = user_name userid = data['userid'] self.userid_cache[userid] = item return data def getUserId(self, user_name): """ getUserId: get id for given user name """ self.log.info("Auth.getUserId: [" + user_name + "]") data = self.getUserInfo(user_name) userid = None if data is not None: userid = data['userid'] return userid def getUserName(self, userid): """ getUserName: return user name for given user id #todo: may need to be optimized to support large number of users """ self.log.info("Auth.getUserName: [" + str(userid) + "]") if userid in self.userid_cache: item = self.userid_cache[userid] if item['timestamp'] - time.time() > cache_expire_time: # delete the entry and re-fetch below self.log.info("Auth-cache expired") del self.userid_cache[userid] else: self.log.info("Auth-got cache value") username = item['username'] return to_string(username) # mongodb lookup users = self.db["users"] data = users.find_one({"userid": userid}) if data is None: return None user_name = data["username"] self.log.info("Auth-add to cachecache") item = {} item['timestamp'] = time.time() item['username'] = user_name self.userid_cache[userid] = item return user_name def validateUserPassword(self, user_name, password): """ validateUserPassword: verify user and password. throws exception if not valid """ if not user_name: self.log.info('validateUserPassword - null user') raise HTTPError(401, message="provide user name and password") if not password: self.log.info('isPasswordValid - null password') raise HTTPError(401, message="provide password") data = self.getUserInfo(user_name) if data is None: self.log.info("user not found") raise HTTPError(401, message="provide user and password") userid = None saved_password = to_bytes(data['password']) if saved_password == encrypt_pwd(password): self.log.info("user password validated") userid = data['userid'] else: self.log.info("user password is not valid") raise HTTPError(401, message="invalid user name/password") return userid ================================================ FILE: h5serv/config.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import os import sys __all__ = ['get', 'update'] _cfgDefault = { 'port': 5000, 'debug': True, 'datapath': 'data', 'public_dir': ['public', 'test'], 'domain': 'hdfgroup.org', 'hdf5_ext': '.h5', 'toc_name': '.toc.h5', 'home_dir': 'home', 'ssl_port': 6050, 'ssl_cert': '', # certs/data.hdfgroup.org.crt', # add relative path to cert for SSL 'ssl_key': '', # certs/data.hdfgroup.org.key', # add relative path to cert key for SSL 'ssl_cert_pwd': '', 'password_uri': 'util/admin/passwd.h5', #'password_uri': 'mongodb://mongo:27017', 'mongo_dbname': 'hdfdevtest', 'static_url': r'/views/(.*)', 'static_path': 'static', 'cors_domain': '*', # set to None to disallow CORS (cross-origin resource sharing) 'log_file': 'h5serv.log', 'log_level': 'INFO', # ERROR, WARNING, INFO, DEBUG, or NOTSET, 'background_timeout': 1000, # (ms) set to 0 to disable background processing 'new_domain_policy': 'ANON', # Ability to create domains (files) on serv: ANON - anonymous users ok, AUTH - only authenticated, NEVER - never allow 'allow_noauth': True # Allow anonymous requests (i.e. without auth header) } def get(x): # see if there is a command-line override option = '--'+x+'=' val = None for i in range(1, len(sys.argv)): #print i, sys.argv[i] if sys.argv[i].startswith(option): # found an override arg = sys.argv[i] val = arg[len(option):] # return text after option string # see if there are an environment variable override if val is None and x.upper() in os.environ: val = os.environ[x.upper()] # if no command line or env override, just get the cfg value if val is None and x in _cfgDefault: val = _cfgDefault[x] if isinstance(val, str): # convert True/False strings to booleans if val.upper() in ("T", "TRUE"): val = True elif val.upper() in ("F", "FALSE"): val = False return val def update(d): _cfgDefault.update(d) ================================================ FILE: h5serv/fileUtil.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## """File util helper functions (primarily from mapping files to domains and vice-versa). """ import os import os.path as op import logging from tornado.web import HTTPError from h5py import is_hdf5 import h5serv.config as config from h5serv.passwordUtil import getAuthClient def getFileModCreateTimes(filePath): (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(filePath) return (mtime, ctime) def isIPAddress(s): """Return True if the string looks like an IP address: n.n.n.n where n is between 0 and 255 """ parts = s.split('.') if len(parts) == 1: # treat as IP address for names like "localhost" or other one-word names # that may get mapped to IP address via /etc/hosts entries return True if len(parts) != 4: return False for part in parts: try: n = int(part) if n < 0 or n > 255: return False except ValueError: return False return True # Convert windows style path names to posxipaths # # todo: any edge cases this doesn't handle? def posixpath(filepath): if os.name == 'nt': pp = filepath.replace('\\', '/') else: pp = filepath return pp # Join to pathnames and convert to posix style # # todo: any edge cases this doesn't handle? def join(path, paths): pp = op.join(path, paths) if os.name == 'nt': pp = posixpath(pp) return pp def getFilePath(host_value, auth=None): # logging.info('getFilePath[' + host_value + ']') # strip off port specifier (if present) npos = host_value.rfind(':') if npos > 0: host = host_value[:npos] else: host = host_value topdomain = config.get('domain') # check to see if this is an ip address if isIPAddress(host): host = topdomain # use topdomain if host.lower() == topdomain: # if host is the same as topdomain, return toc path # filePath = getTocFilePath() filePath = config.get('datapath') filePath = join(filePath, config.get('toc_name') ) return filePath print("host:", host, "topdomain:", topdomain) if len(host) <= len(topdomain) or host[-len(topdomain):].lower() != topdomain: msg = "top-level domain is not valid" print(msg) raise HTTPError(403, message=msg) if host[-(len(topdomain) + 1)] != '.': # there needs to be a dot separator raise HTTPError(400, message='domain name is not valid') host = host[:-(len(topdomain)+1)] # strip off top domain part if len(host) == 0 or host[0] == '.' or host[-1] == '.': # needs a least one character (which can't be '.', or have '.' as first or last char) raise HTTPError(400, message='domain name is not valid') dns_path = host.split('.') dns_path.reverse() # flip to filesystem ordering filePath = config.get('datapath') num_parts = 0 for field in dns_path: if len(field) == 0: raise HTTPError(400) # Bad syntax filePath = join(filePath, field) num_parts += 1 # check to see if this is the user's home domain if num_parts == 2 and dns_path[0] == config.get('home_dir'): if auth is None: auth = getAuthClient user_info = auth.getUserInfo(dns_path[1]) if user_info is None: raise HTTPError(404) # not found makeDirs(filePath) # add user directory if it doesn't exist filePath = join(filePath, config.get('toc_name') ) else: filePath += config.get('hdf5_ext') # add extension #print('getFilePath[' + host + '] -> "' + filePath + '"') return filePath # # Return filepath to TOC file - either the public toc file or the per # user TOC file (if the dns path includes the "home" directory). # For the later, method will throw 404 if the user is not registered. # def getTocFilePathForDomain(host_value, auth=None): """ Return toc file path for given domain value. Will return path "../data/.toc.h5" for public domains or "../data/home//.toc.h5" for user domains. """ # logging.info('getFilePath[' + host_value + ']') # strip off port specifier (if present) npos = host_value.rfind(':') if npos > 0: host = host_value[:npos] else: host = host_value topdomain = config.get('domain') # check to see if this is an ip address if isIPAddress(host): host = topdomain # use topdomain if host.lower() == topdomain: # if host is the same as topdomain, return toc path # filePath = getTocFilePath() filePath = config.get('datapath') filePath = join(filePath, config.get('toc_name') ) return filePath if len(host) <= len(topdomain) or host[-len(topdomain):].lower() != topdomain: host = topdomain # use topdomain else: if host[-(len(topdomain) + 1)] != '.': # there needs to be a dot separator raise HTTPError(400, message='domain name is not valid') host = host[:-(len(topdomain)+1)] # strip off top domain part if len(host) == 0 or host[0] == '.' or host[-1] == '.': # needs a least one character (which can't be '.', or have '.' as first or last char) raise HTTPError(400, message='domain name is not valid') dns_path = host.split('.') dns_path.reverse() # flip to filesystem ordering filePath = config.get('datapath') if dns_path[0] == config.get('home_dir'): filePath = join(filePath, config.get('home_dir')) filePath = join(filePath, dns_path[1]) if auth is None: auth = getAuthClient() user_info = auth.getUserInfo(dns_path[1]) if user_info is None: raise HTTPError(404) # not found makeDirs(filePath) # add user directory if it doesn't exist filePath = join(filePath, config.get('toc_name')) #print("return user toc filepath") else: # not home dir, just return top-level toc filePath = join(filePath, config.get('toc_name')) #print("return default toc filepath") return filePath # # If the filePath passed references the user's home directory, return a path relative # to the base location of the user's toc file. Otherwise returns the path relative to # the base data directory # def getUserFilePath(file_path): data_path = config.get('datapath') file_path = file_path[len(data_path):] # strip off base data path if len(file_path) > 1 and file_path[0] == '/': file_path = file_path[1:] # don't include first slash if preseent- messes up the split path_names = file_path.split('/') if path_names[0] == config.get('home_dir') and len(path_names) > 1: # return a path relative to user's base dir file_path = '/' path_names = path_names[2:] # skip home, userid for path_name in path_names: file_path = op.join(file_path, path_name) return file_path def getDomain(file_path, base_domain=None): # Get domain given a file path data_path = op.normpath(config.get('datapath')) # base path for data directory data_path = posixpath(data_path) file_path = posixpath(file_path) hdf5_ext = config.get("hdf5_ext") if op.isabs(file_path): # compare with absolute path if we're given an absolute path data_path = posixpath(op.abspath(data_path)) if file_path == data_path: return config.get('domain') if file_path.endswith(hdf5_ext): domain = op.basename(file_path)[:-(len(hdf5_ext))] else: domain = op.basename(file_path) # replace dots with %2E in basename domain = domain.replace('.', '%2E') dirname = op.dirname(file_path) while len(dirname) > 1 and dirname != data_path: domain += '.' domain += op.basename(dirname) if len(op.dirname(dirname)) >= len(dirname): break dirname = op.dirname(dirname) domain += '.' if base_domain: domain += base_domain else: domain += config.get('domain') return domain def verifyFile(filePath, writable=False): """ verify given file exists and is an HDF5 file """ log = logging.getLogger("h5serv") log.info("verifyFile('" + filePath + "', " + str(writable) + ")") if not op.isfile(filePath): log.info("not a file") raise HTTPError(404) # not found if not is_hdf5(filePath): log.info('this is not a hdf5 file!') raise HTTPError(404) if writable and not os.access(filePath, os.W_OK): log.warning('attempting update of read-only file') raise HTTPError(403) def isFile(filePath): """ verify given file exists and is an HDF5 file """ if not op.isfile(filePath): return False if not is_hdf5(filePath): # logging.warning('this is not a hdf5 file!') return False return True def makeDirs(filePath): # Make any directories along path as needed if len(filePath) == 0 or op.isdir(filePath): return dirname = op.dirname(filePath) if len(dirname) >= len(filePath): return makeDirs(dirname) # recursive call os.mkdir(filePath) # should succeed since parent directory is created ================================================ FILE: h5serv/h5watchdog.py ================================================ import sys import time import os.path as op import logging from watchdog.observers import Observer from watchdog.events import FileSystemEventHandler class H5EventHandler(FileSystemEventHandler): """Put create events inteo queue.""" def __init__(self, event_queue): self.log = logging.getLogger("h5serv") self.event_queue = event_queue def on_moved(self, event): super(H5EventHandler, self).on_moved(event) what = 'directory' if event.is_directory else 'file' self.log.info("H5EventHandler -- Moved %s: from %s to %s", what, event.src_path, event.dest_path) def on_created(self, event): super(H5EventHandler, self).on_created(event) what = 'directory' if event.is_directory else 'file' self.log.info("H5EventHandler -- Created %s: %s", what, event.src_path) # ignore directories if not op.isdir(event.src_path): self.event_queue.put(event.src_path) def on_deleted(self, event): super(H5EventHandler, self).on_deleted(event) what = 'directory' if event.is_directory else 'file' self.log.info("H5EventHandler -- Deleted %s: %s", what, event.src_path) if not op.isdir(event.src_path): self.event_queue.put(event.src_path) def on_modified(self, event): super(H5EventHandler, self).on_modified(event) what = 'directory' if event.is_directory else 'file' self.log.info("H5EventHandler -- Modified %s: %s", what, event.src_path) # # Watch file system at location data_path and add any file create events to the event_queue # Call at application startup # def h5observe(data_path, event_queue): event_handler = H5EventHandler(event_queue) observer = Observer() observer.schedule(event_handler, data_path, recursive=True) observer.start() ================================================ FILE: h5serv/httpErrorUtil.py ================================================ import errno def errNoToHttpStatus(error_code): """Convert IOError error numbers to HTTP equivalent status codes.""" httpStatus = 500 if error_code == errno.EINVAL: # formerly EBADMSG httpStatus = 400 # bad request elif error_code == errno.EACCES: httpStatus = 401 # unauthorized elif error_code == errno.EPERM: httpStatus = 403 # forbidden elif error_code == errno.ENXIO: httpStatus = 404 # Not Found elif error_code == errno.EEXIST: httpStatus = 409 # conflict elif error_code == errno.ENOENT: # formerly EIDRM httpStatus = 410 # Gone elif error_code == errno.EIO: httpStatus = 500 # Internal Error elif error_code == errno.ENOSYS: httpStatus = 501 # Not implemented return httpStatus ================================================ FILE: h5serv/passwordUtil.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import six if six.PY3: unicode = str import hashlib import logging import h5serv.config as config """ Password util helper functions """ def to_string(data): if six.PY3: if type(data) is bytes: return data.decode('utf-8') else: return data else: return data def to_bytes(data): if six.PY3: if type(data) is unicode: return data.encode('utf-8') else: return data else: return data def encrypt_pwd(passwd): """ One way password encryptyion """ encrypted = hashlib.sha224(passwd).hexdigest() return to_bytes(encrypted) def getAuthClient(): log = logging.getLogger("h5serv") log.info("getAuthClient") password_uri = config.get("password_uri") log.info("password_uri:" + password_uri) auth = None if password_uri.startswith("mongo"): # use mongodb user db from h5serv.authMongo import AuthClient auth = AuthClient(password_uri) else: # use HDF5 file-based user db from h5serv.authFile import AuthClient auth = AuthClient(password_uri) return auth ================================================ FILE: h5serv/timeUtil.py ================================================ from datetime import datetime import pytz def unixTimeToUTC(timestamp): """Convert unix timestamp (seconds since Jan 1, 1970, to ISO-8601 compatible UTC time string. """ utc = pytz.utc dtTime = datetime.fromtimestamp(int(timestamp), utc) iso_str = dtTime.isoformat() # isoformat returns a string like this: # '2014-10-30T04:25:21+00:00' # strip off the '+00:00' and replace # with 'Z' (both are ISO-8601 compatible) npos = iso_str.rfind('+') iso_z = iso_str[:npos] + 'Z' return iso_z ================================================ FILE: h5serv/tocUtil.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import os import os.path as op import re from tornado.web import HTTPError import logging import h5py import h5serv.config as config import h5serv.fileUtil as fileUtil from h5json import Hdf5db """ TOC (Table of contents) util helper functions Creates a directory listing in the form of an HDF5 file """ def getTocFilePath(user=None): datapath = config.get('datapath') if user is None: #print("get default toc") toc_file_path = fileUtil.join(datapath, config.get('toc_name')) else: #print("get user toc") toc_file_path = fileUtil.join(datapath, config.get('home_dir')) toc_file_path = fileUtil.join(toc_file_path, config.get('toc_name')) return toc_file_path def isTocFilePath(filePath): datapath = config.get('datapath') toc_file_path = fileUtil.join(datapath, config.get('toc_name')) if filePath == toc_file_path: isTocFilePath = True else: isTocFilePath = False return isTocFilePath """ helper - get group uuid of hardlink, or None if no link """ def getSubgroupId(db, group_uuid, link_name): #print("link_name:", link_name) subgroup_uuid = None try: item = db.getLinkItemByUuid(group_uuid, link_name) if item['class'] != 'H5L_TYPE_HARD': return None if item['collection'] != 'groups': return None subgroup_uuid = item['id'] except IOError: # link_name doesn't exist, return None pass return subgroup_uuid """ Update toc with new filename """ def addTocEntry(domain, filePath, userid=None): """ Helper method - update TOC when a domain is created If userid is provide, the acl will be checked to ensure userid has permissions to modify the object. """ log = logging.getLogger("h5serv") hdf5_ext = config.get('hdf5_ext') dataPath = config.get('datapath') log.info("addTocEntry - domain: " + domain + " filePath: " + filePath) if not filePath.startswith(dataPath): log.error("unexpected filepath: " + filePath) raise HTTPError(500) filePath = fileUtil.getUserFilePath(filePath) tocFile = fileUtil.getTocFilePathForDomain(domain) log.info("tocFile: " + tocFile) acl = None try: with Hdf5db(tocFile, app_logger=log) as db: group_uuid = db.getUUIDByPath('/') pathNames = filePath.split('/') for linkName in pathNames: if not linkName: continue if linkName.endswith(hdf5_ext): linkName = linkName[:-(len(hdf5_ext))] print("linkName:", linkName) if userid is not None: acl = db.getAcl(group_uuid, userid) if not acl['create']: self.log.info("unauthorized access to group:" + group_uuid) raise IOError(errno.EACCES) # unauthorized log.info("createExternalLink -- uuid %s, domain: %s, linkName: %s", group_uuid, domain, linkName) db.createExternalLink(group_uuid, domain, '/', linkName) else: subgroup_uuid = getSubgroupId(db, group_uuid, linkName) if subgroup_uuid is None: if userid is not None: acl = db.getAcl(group_uuid, userid) if not acl['create']: self.log.info("unauthorized access to group:" + group_uuid) raise IOError(errno.EACCES) # unauthorized # create subgroup and link to parent group subgroup_uuid = db.createGroup() # link the new group log.info("linkObject -- uuid: %s, subgroup_uuid: %s, linkName: %s", group_uuid, subgroup_uuid, linkName) db.linkObject(group_uuid, subgroup_uuid, linkName) group_uuid = subgroup_uuid except IOError as e: log.info("IOError: " + str(e.errno) + " " + e.strerror) raise e """ Helper method - update TOC when a domain is deleted """ def removeTocEntry(domain, filePath, userid=None): log = logging.getLogger("h5serv") hdf5_ext = config.get('hdf5_ext') dataPath = config.get('datapath') if not filePath.startswith(dataPath): log.error("unexpected filepath: " + filePath) raise HTTPError(500) filePath = fileUtil.getUserFilePath(filePath) tocFile = fileUtil.getTocFilePathForDomain(domain) log.info("removeTocEntry - domain: " + domain + " filePath: " + filePath + " tocfile: " + tocFile) pathNames = filePath.split('/') log.info("pathNames: " + str(pathNames)) try: with Hdf5db(tocFile, app_logger=log) as db: group_uuid = db.getUUIDByPath('/') log.info("group_uuid:" + group_uuid) for linkName in pathNames: if not linkName: continue log.info("linkName:" + linkName) if linkName.endswith(hdf5_ext): linkName = linkName[:-(len(hdf5_ext))] log.info("unklink " + group_uuid + ", " + linkName) db.unlinkItem(group_uuid, linkName) else: subgroup_uuid = getSubgroupId(db, group_uuid, linkName) if subgroup_uuid is None: msg = "Didn't find expected subgroup: " + group_uuid log.error(msg) raise HTTPError(500, reason=msg) group_uuid = subgroup_uuid except IOError as e: log.info("IOError: " + str(e.errno) + " " + e.strerror) raise e """ Create a populate TOC file if not present """ def createTocFile(datapath): log = logging.getLogger("h5serv") log.info("createTocFile(" + datapath + ")") data_dir = fileUtil.posixpath(op.normpath(config.get('datapath'))) home_dir = fileUtil.join(data_dir, config.get("home_dir")) log.info("home dir: " + home_dir) if datapath.startswith(home_dir): log.info("user toc") user_toc = True else: log.info("system toc") user_toc = False if datapath.endswith(config.get('toc_name')): toc_dir = fileUtil.posixpath(op.normpath(op.dirname(datapath))) toc_file = datapath else: toc_dir = fileUtil.posixpath(op.normpath(datapath)) toc_file = fileUtil.join(toc_dir, config.get("toc_name")) log.info("toc_dir:[" + toc_dir + "]") log.info("data_dir:[" + data_dir + "]") log.info("home_dir:[" + home_dir + "]") log.info("check toc with path: " + toc_file) if op.exists(toc_file): msg = "toc file already exists" log.warn(msg) raise IOError(msg) base_domain = fileUtil.getDomain(toc_dir) log.info("base domain: " + base_domain) #if os.name == 'nt': # toc_dir = toc_dir.replace('\\', '/') # use unix style to map to HDF5 convention hdf5_ext = config.get('hdf5_ext') f = h5py.File(toc_file, 'w') for root, subdirs, files in os.walk(toc_dir): root = fileUtil.posixpath(root) log.info( "toc walk: " + root) if toc_dir == data_dir: log.info(fileUtil.join(toc_dir, home_dir)) if root.startswith(home_dir): log.info("skipping home dir") continue grppath = root[len(toc_dir):] if not grppath: grppath = '/' if grppath[-1] == '.': grppath = grppath[:-1] log.info("grppath: " + grppath) if os.name == 'nt': grppath = grppath.replace('\\', '/') # match HDF5 convention grp = None if grppath == '/': grp = f['/'] # use root group domainpath = fileUtil.getDomain(grppath, base_domain=base_domain) log.info("grppath: " + grppath) log.info("base_domain: " + base_domain) log.info("domainpath: " + domainpath) for filename in os.listdir(root): log.info("walk, file: " + filename) if filename[0] == '.': log.info("skip hidden") continue # skip 'hidden' files filepath = fileUtil.join(root, filename) log.info("walk, filepath: " + filepath) link_target = '/' if op.islink(filepath): log.info("symlink: " + filepath) # todo - quick hack for now to set a symlink with to sub-folder of data dir # todo - revamp to use os.readlink and do the proper thing with the link value filedomain = config.get('domain') link_target += filename log.info("setting symbolic link domainpath to: " + filedomain + " target: /" + filename) else: ext_len = len(hdf5_ext) if len(filename) < ext_len or filename[-ext_len:] != hdf5_ext: log.info("skip non-hdf5 extension") continue if not h5py.is_hdf5(filepath): log.info("skip non-hdf5 file") continue filename = filename[:-ext_len] # replace any dots with '%2E' to disambiguate from domain seperators filename_encoded = filename.replace('.', '%2E') log.info("filename (noext): " + filename) if domainpath[0] == '.': filedomain = filename_encoded + domainpath else: filedomain = filename_encoded + '.' + domainpath # create the grp at grppath if it doesn't exist if not grp: log.info("tocfile - create_group: " + grppath) grp = f.create_group(grppath) # verify that we can convert the domain back to a file path log.info("filedomain: " + filedomain) try: fileUtil.getFilePath(filedomain) # ok - add the external link log.info("tocFile - ExternalLink: " + domainpath) grp[filename] = h5py.ExternalLink(filedomain, link_target) except HTTPError: log.info("file path: [" + filepath + "] is not valid dns name, ignoring") ================================================ FILE: setup.py ================================================ """A setuptools based setup module for h5serv. See: https://packaging.python.org/en/latest/distributing.html https://github.com/pypa/sampleproject """ # create universal wheel: python setup.py bdist_wheel --universal # after install the wheel, run server with: #$ python h5serv --log_file=/h5serv.log --datapath= # where log_dir is the full path to the desired directory for log file output, # and datapath is full path to the desired data directory. # # Always prefer setuptools over distutils from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='h5serv', # Versions should comply with PEP440. For a discussion on single-sourcing # the version across setup.py and the project code, see # https://packaging.python.org/en/latest/single_source_version.html version='1.2.0', description='HDF REST Server', long_description=long_description, # The project's main homepage. url='https://github.com/HDFGroup/h5serv', # Author details author='John Readey', author_email='jreadey@hdfgroup.org', # Choose your license license='BSD', # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 5 - Production/Stable', # Indicate who your project is intended for 'Intended Audience :: Developers', 'Topic :: Software Development :: Build Tools', # Pick your license as you wish (should match "license" above) 'License :: OSI Approved :: BSD License', # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', ], # What does your project relate to? keywords='json hdf5 numpy array data', # You can just specify the packages manually here if your project is # simple. Or you can use find_packages(). packages=('h5serv',), # Alternatively, if you want to distribute just a my_module.py, uncomment # this: # py_modules=["my_module"], # List run-time dependencies here. These will be installed by pip when # your project is installed. For an analysis of "install_requires" vs pip's # requirements files see: # https://packaging.python.org/en/latest/requirements.html install_requires=['numpy>=1.10.4', 'h5py>=2.5', 'h5json>=1.1', 'watchdog>=0.8.3', 'tornado>=4.2.1', 'requests>=2.10.0', 'pyzmq>=14.7.0', 'pytz'], # List additional groups of dependencies here (e.g. development # dependencies). You can install these using the following syntax, # for example: # $ pip install -e .[dev,test] extras_require={ 'dev': ['check-manifest'], 'test': ['coverage'], }, # If there are data files included in your packages that need to be # installed, specify them here. If using Python 2.6 or less, then these # have to be included in MANIFEST.in as well. package_data={ 'h5serv': ['data/*',] }, # Although 'package_data' is the preferred approach, in some case you may # need to place data files outside of your packages. See: # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa # In this case, 'data_file' will be installed into '/my_data' #data_files=[('my_data', ['data/data_file'])], # To provide executable scripts, use entry points in preference to the # "scripts" keyword. Entry points provide cross-platform support and allow # pip to create the appropriate form of executable for the target platform. entry_points={ 'console_scripts': [ 'h5serv = h5serv.app:main' ] }, #scripts=['server/app.py', 'util/admin/import_file.py'], ) ================================================ FILE: test/aws/config.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## from h5serv.config import * cfg = { 'server': 'data.hdfgroup.org', 'port': 7258, # HTTPS port 'domain': 'test.data.hdfgroup.org', 'hdf5_ext': '.h5' } update(cfg) ================================================ FILE: test/aws/roottest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import requests import config import unittest import json import base64 class RootTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(RootTest, self).__init__(*args, **kwargs) self.endpoint = 'https://' + config.get('server') + ':' + str(config.get('port')) #self.endpoint = "https://data.hdfgroup.org:7258" def testGetInfo(self): req = self.endpoint + "/info" rsp = requests.get(req, verify=False) self.failUnlessEqual(rsp.status_code, 200) self.failUnlessEqual(rsp.headers['content-type'], 'application/json') rspJson = json.loads(rsp.text) self.assertTrue('h5serv_version' in rspJson) def testGetDomain(self): domain = 'tall.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers, verify=False) self.failUnlessEqual(rsp.status_code, 200) self.failUnlessEqual(rsp.headers['content-type'], 'application/json') rspJson = json.loads(rsp.text) if __name__ == '__main__': unittest.main() ================================================ FILE: test/integ/acltest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import requests import config import helper import unittest import json import base64 no_perm = { 'read': False, 'create': False, 'update': False, 'delete': False, 'readACL': False, 'updateACL': False } readonly_perm = { 'read': True, 'create': False, 'update': False, 'delete': False, 'readACL': False, 'updateACL': False } allaccess_perm = { 'read': True, 'create': True, 'update': True, 'delete': True, 'readACL': True, 'updateACL': True } class AclTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(AclTest, self).__init__(*args, **kwargs) self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port')) self.domain = None self.user1 = {'username':'test_user1', 'password':'test'} self.user2 = {'username':'test_user2', 'password':'test'} def getHeaders(self, user=None): headers = {'host': self.domain} if user is not None: # if user is supplied, add the auth header headers['Authorization'] = helper.getAuthString(user['username'], user['password']) return headers def getUUIDByPath(self, path): username = self.user1['username'] password = self.user1['password'] obj_uuid = helper.getUUIDByPath(self.domain, path, user=username, password=password) return obj_uuid def setupAcls(self): rootUUID = self.getUUIDByPath('/') self.assertTrue(helper.validateId(rootUUID)) headers = self.getHeaders() # set allaccess acl for test_user2 payload = allaccess_perm req = self.endpoint + "/acls/test_user2" rsp = requests.put(req, data=json.dumps(payload), headers=headers) if rsp.status_code == 401: # acl is already setup by another test, return return self.assertEqual(rsp.status_code, 201) # set read-only acl for test_user1 payload = readonly_perm req = self.endpoint + "/acls/test_user1" rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # set default acl for domain payload = no_perm req = self.endpoint + "/acls/default" rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # try - again - should report authorizationis required now rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 401) def testGetDomainDefaultAcls(self): self.domain = 'tall.' + config.get('domain') req = self.endpoint + "/acls" headers = self.getHeaders() rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['content-type'], 'application/json') rspJson = json.loads(rsp.text) self.assertTrue('acls' in rspJson) def testGetDomainAcls(self): self.domain = 'tall_acl.' + config.get('domain') self.setupAcls() self.assertEqual(self.domain, 'tall_acl.' + config.get('domain') ) headers = self.getHeaders() # read domain acls req = self.endpoint + "/acls" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 401) # needs Authorization # try with test_user1 headers = self.getHeaders(self.user1) req = self.endpoint + "/acls" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 403) # unAuthorization - test_user1 only has read access # try with test_user2 headers = self.getHeaders(self.user2) req = self.endpoint + "/acls" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('acls' in rspJson) acls = rspJson['acls'] self.assertEqual(len(acls), 3) # get acl for a particular user headers = self.getHeaders(self.user2) req = self.endpoint + "/acls/" + self.user1['username'] rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('acl' in rspJson) acl = rspJson['acl'] self.assertEqual(len(acl.keys()), 7) def testPutDomain(self): self.domain = 'new_domain.test_user1.' + config.get('home_domain') headers = self.getHeaders() # put domain in user home folder req = self.endpoint + "/" rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # todo - above should fail with 401 - need authorization def testAttributes(self): self.domain = 'tall_acl.' + config.get('domain') self.setupAcls() rootUUID = self.getUUIDByPath('/') self.assertTrue(helper.validateId(rootUUID)) # create attribute headers = self.getHeaders() payload = {'type': 'H5T_STD_I32LE', 'value': 42} req = self.endpoint + "/groups/" + rootUUID + "/attributes/a1" rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 401) # auth needed headers = self.getHeaders(user=self.user1) rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 403) # not authorized headers = self.getHeaders(user=self.user2) rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # OK # read group attribute headers = self.getHeaders() req = self.endpoint + "/groups/" + rootUUID + "/attributes/a1" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 401) # un-authorized headers = self.getHeaders(user=self.user1) rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # OK rspJson = json.loads(rsp.text) self.assertEqual(rspJson['value'], 42) # delete attribute headers = self.getHeaders() req = self.endpoint + "/groups/" + rootUUID + "/attributes/" + 'a1' rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 401) # auth needed headers = self.getHeaders(user=self.user1) rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 403) # not authorized headers = self.getHeaders(user=self.user2) rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) def testDataset(self): self.domain = 'tall_acl.' + config.get('domain') self.setupAcls() rootUUID = self.getUUIDByPath('/') self.assertTrue(helper.validateId(rootUUID)) # create dataset headers = self.getHeaders() payload = {'type': 'H5T_STD_I32LE' } req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 401) # auth needed headers = self.getHeaders(user=self.user1) rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 403) # not authorized headers = self.getHeaders(user=self.user2) rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # OK rspJson = json.loads(rsp.text) dataset_uuid = rspJson['id'] # read dataset headers = self.getHeaders() req = self.endpoint + "/datasets/" + dataset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 401) # un-authorized headers = self.getHeaders(user=self.user1) rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # OK # read dataset acls req += "/acls" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 403) # test_user1 doesn't have readACL permission headers = self.getHeaders(user=self.user2) rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("acls" in rspJson) acls = rspJson["acls"] self.assertEqual(len(acls), 0) # empty list of acls # delete dataset headers = self.getHeaders() req = self.endpoint + "/datasets/" + dataset_uuid rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 401) # auth needed headers = self.getHeaders(user=self.user1) rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 403) # not authorized headers = self.getHeaders(user=self.user2) rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) # OK def testValue(self): self.domain = 'tall_acl.' + config.get('domain') self.setupAcls() dset_uuid = self.getUUIDByPath('/g1/g1.1/dset1.1.1') self.assertTrue(helper.validateId(dset_uuid)) # read value headers = self.getHeaders() req = self.endpoint + "/datasets/" + dset_uuid + "/value" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 401) # auth needed headers = self.getHeaders(user=self.user1) req = self.endpoint + "/datasets/" + dset_uuid + "/value" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # OK # point selection points = [] for i in range(10): points.append((i,i)) # get diagonal req = self.endpoint + "/datasets/" + dset_uuid + "/value" payload = {'points': points} headers = self.getHeaders() rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 401) # auth needed # write values data = [] for i in range(10): row = [] for j in range(10): row.append(j*10 + i) data.append(row) req = self.endpoint + "/datasets/" + dset_uuid + "/value" payload = { 'value': data } headers = self.getHeaders() rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 401) # auth needed headers = self.getHeaders(user=self.user1) rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 403) # not authorized headers = self.getHeaders(user=self.user2) rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # OK def testDatatypes(self): self.domain = 'tall_acl.' + config.get('domain') self.setupAcls() payload = {'type': 'H5T_IEEE_F32LE'} req = self.endpoint + "/datatypes" # test create headers = self.getHeaders() rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 401) # auth needed headers = self.getHeaders(user=self.user1) rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 403) # not authorized headers = self.getHeaders(user=self.user2) rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # created rspJson = json.loads(rsp.text) type_uuid = rspJson['id'] self.assertTrue(helper.validateId(type_uuid)) # test read req = self.endpoint + "/datatypes/" + type_uuid headers = self.getHeaders() rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 401) # auth needed headers = self.getHeaders(user=self.user1) rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # OK # read dataset acls req += "/acls" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 403) # test_user1 doesn't have readACL permission headers = self.getHeaders(user=self.user2) rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("acls" in rspJson) acls = rspJson["acls"] self.assertEqual(len(acls), 0) # empty list of acls # test delete headers = self.getHeaders() req = self.endpoint + "/datatypes/" + type_uuid rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 401) # auth needed headers = self.getHeaders(user=self.user1) rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 403) # un authorized headers = self.getHeaders(user=self.user1) rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 403) # OK def testGroups(self): self.domain = 'tall_acl.' + config.get('domain') self.setupAcls() g1_uuid = self.getUUIDByPath('/g1') self.assertTrue(helper.validateId(g1_uuid)) # read group g1 headers = self.getHeaders() req = self.endpoint + "/groups/" + g1_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 401) # needs Authorization headers = self.getHeaders(user=self.user1) rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # read group acls req += "/acls" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 403) # test_user1 doesn't have readACL permission headers = self.getHeaders(user=self.user2) rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("acls" in rspJson) acls = rspJson["acls"] self.assertEqual(len(acls), 0) # empty list of acls # read links headers = self.getHeaders() req = self.endpoint + "/groups/" + g1_uuid + '/links' rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 401) # needs Authorization headers = self.getHeaders(user=self.user1) rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # OK # read link headers = self.getHeaders() req = self.endpoint + "/groups/" + g1_uuid + '/links/g1.1' rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 401) # needs Authorization headers = self.getHeaders(user=self.user1) rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # OK # create group headers = self.getHeaders() req = self.endpoint + "/groups" rsp = requests.post(req, headers=headers) self.assertEqual(rsp.status_code, 401) # needs Authorization headers = self.getHeaders(user=self.user1) rsp = requests.post(req, headers=headers) self.assertEqual(rsp.status_code, 403) # un-authorized headers = self.getHeaders(user=self.user2) rsp = requests.post(req, headers=headers) self.assertEqual(rsp.status_code, 201) # Created rspJson = json.loads(rsp.text) grp_uuid = rspJson['id'] # add link headers = self.getHeaders() payload = { "id": grp_uuid } req = self.endpoint + "/groups/" + g1_uuid + '/links/new_group' rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 401) # needs Authorization headers = self.getHeaders(user=self.user1) rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 403) # un-authorized headers = self.getHeaders(user=self.user2) rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # created # delete link headers = self.getHeaders() req = self.endpoint + "/groups/" + g1_uuid + '/links/new_group' rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 401) # needs Authorization headers = self.getHeaders(user=self.user1) rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 403) # un-authorized headers = self.getHeaders(user=self.user2) rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) # OK # delete group headers = self.getHeaders() req = self.endpoint + "/groups/" + grp_uuid rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 401) # needs Authorization headers = self.getHeaders(user=self.user1) rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 403) # un-authorized headers = self.getHeaders(user=self.user2) rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) # OK def testRoot(self): self.domain = 'tall_acl_delete.' + config.get('domain') self.setupAcls() # read domain resource headers = self.getHeaders() req = self.endpoint + "/" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 401) # needs Authorization\ headers = self.getHeaders(user=self.user1) rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # delete domain! headers = self.getHeaders() req = self.endpoint + "/" rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 401) # needs Authorization # try malformed auth string headers['Authorization'] = "Basic " + "xxx123" rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 400) # bad auth header # try invalid password headers['Authorization'] = helper.getAuthString("test_user1", "notmypassword") rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 401) # need valid auth header headers = self.getHeaders(user=self.user1) rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 403) # not authorized headers = self.getHeaders(user=self.user2) rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) # OK if __name__ == '__main__': unittest.main() ================================================ FILE: test/integ/attributetest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import requests import config import helper import unittest import json class AttributeTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(AttributeTest, self).__init__(*args, **kwargs) self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port')) def testGetGroupAttr(self): for domain_name in ('tall', 'tall_ro'): domain = domain_name + '.' + config.get('domain') rootUUID = helper.getRootUUID(domain) req = helper.getEndpoint() + "/groups/" + rootUUID + "/attributes/attr1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertEqual(rspJson['name'], 'attr1') self.assertTrue('type' in rspJson) type = rspJson['type'] self.assertEqual(type['class'], 'H5T_INTEGER') self.assertEqual(type['base'], 'H5T_STD_I8LE') self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 10) self.assertTrue('maxdims' not in shape) data = rspJson['value'] self.assertEqual(len(data), 10) # data should be the array [97, 98, 99, ..., 105, 0] expected = list(range(97, 107)) expected[9] = 0 self.assertEqual(data, expected) self.assertEqual(len(rspJson['hrefs']), 4) def testGetDatasetAttr(self): for domain_name in ('tall', 'tall_ro'): domain = domain_name + '.' + config.get('domain') rootUUID = helper.getRootUUID(domain) # get dataset uuid at path: 'g1/g1.1/dset1.1.1' req = helper.getEndpoint() + "/groups/" + rootUUID + "/links/g1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('link' in rspJson) link = rspJson['link'] g1UUID = link['id'] req = helper.getEndpoint() + "/groups/" + g1UUID + "/links/g1.1" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('link' in rspJson) link = rspJson['link'] g11UUID = link['id'] req = helper.getEndpoint() + "/groups/" + g11UUID + "/links/dset1.1.1" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('link' in rspJson) link = rspJson['link'] dset111UUID = link['id'] req = helper.getEndpoint() + "/datasets/" + dset111UUID + "/attributes/attr1" rsp = requests.get(req, headers=headers) rspJson = json.loads(rsp.text) self.assertEqual(rspJson['name'], 'attr1') self.assertTrue('type' in rspJson) type = rspJson['type'] self.assertEqual(type['class'], 'H5T_INTEGER') self.assertEqual(type['base'], 'H5T_STD_I8LE') self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 27) self.assertTrue('maxdims' not in shape) data = rspJson['value'] self.assertEqual(len(data), 27) # first value is 49 self.assertEqual(data[0], 49) self.assertEqual(len(rspJson['hrefs']), 4) def testGetAll(self): for domain_name in ('tall', 'tall_ro'): domain = domain_name + '.' + config.get('domain') rootUUID = helper.getRootUUID(domain) req = helper.getEndpoint() + "/groups/" + rootUUID + "/attributes" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertEqual(len(rspJson['hrefs']), 4) attrsJson = rspJson['attributes'] self.assertEqual(len(attrsJson), 2) self.assertEqual(attrsJson[0]['name'], 'attr1') self.assertEqual(attrsJson[1]['name'], 'attr2') self.assertFalse('value' in attrsJson[0]) def testGetBatch(self): domain = 'attr1k.' + config.get('domain') rootUUID = helper.getRootUUID(domain) req = helper.getEndpoint() + "/groups/" + rootUUID + "/attributes" headers = {'host': domain} params = {'Limit': 50 } names = set() # get attributes in 20 batches of 50 links each lastName = None for batchno in range(20): if lastName: params['Marker'] = lastName rsp = requests.get(req, headers=headers, params=params) self.assertEqual(rsp.status_code, 200) if rsp.status_code != 200: break rspJson = json.loads(rsp.text) attrs = rspJson['attributes'] self.assertEqual(len(attrs) <= 50, True) for attr in attrs: lastName = attr['name'] names.add(lastName) if len(attrs) == 0: break self.assertEqual(len(names), 1000) # should get 1000 unique attributes def testGetNullSpace(self): domain = "null_space_attr." + config.get('domain') rootUUID = helper.getRootUUID(domain) req = helper.getEndpoint() + "/groups/" + rootUUID + "/attributes/attr1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertEqual(rspJson['name'], 'attr1') self.assertTrue('type' in rspJson) type = rspJson['type'] self.assertEqual(type['class'], 'H5T_FLOAT') self.assertEqual(type['base'], 'H5T_IEEE_F64LE') self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_NULL') self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(value, None) self.assertEqual(len(rspJson['hrefs']), 4) def testGetCompound(self): for domain_name in ('compound_attr', ): domain = domain_name + '.' + config.get('domain') rootUUID = helper.getRootUUID(domain) req = helper.getEndpoint() + "/groups/" + rootUUID + "/attributes/weather" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertEqual(rspJson['name'], 'weather') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 1) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_COMPOUND') self.assertEqual(len(typeItem['fields']), 4) fields = typeItem['fields'] field0 = fields[0] self.assertEqual(field0['name'], 'time') field0Type = field0['type'] self.assertEqual(field0Type['class'], 'H5T_INTEGER') self.assertEqual(field0Type['base'], 'H5T_STD_I64LE') field1 = fields[1] self.assertEqual(field1['name'], 'temp') field1Type = field1['type'] self.assertEqual(field1Type['class'], 'H5T_INTEGER') self.assertEqual(field1Type['base'], 'H5T_STD_I64LE') field2 = fields[2] self.assertEqual(field2['name'], 'pressure') field2Type = field2['type'] self.assertEqual(field2Type['class'], 'H5T_FLOAT') self.assertEqual(field2Type['base'], 'H5T_IEEE_F64LE') field3 = fields[3] self.assertEqual(field3['name'], 'wind') field3Type = field3['type'] self.assertEqual(field3Type['class'], 'H5T_STRING') self.assertEqual(field3Type['charSet'], 'H5T_CSET_ASCII') self.assertEqual(field3Type['length'], 6) self.assertEqual(field3Type['strPad'], 'H5T_STR_NULLPAD') def testGetCompoundArray(self): for domain_name in ('compound_array_attr', ): domain = domain_name + '.' + config.get('domain') root_uuid = helper.getRootUUID(domain) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/attributes/A1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertEqual(rspJson['name'], 'A1') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SCALAR') typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_COMPOUND') self.assertEqual(len(typeItem['fields']), 2) fields = typeItem['fields'] field0 = fields[0] self.assertEqual(field0['name'], 'temp') field0Type = field0['type'] self.assertEqual(field0Type['class'], 'H5T_FLOAT') self.assertEqual(field0Type['base'], 'H5T_IEEE_F64LE') field1 = fields[1] self.assertEqual(field1['name'], '2x2') field1Type = field1['type'] self.assertEqual(field1Type['class'], 'H5T_ARRAY') self.assertEqual(field1Type['dims'], [2, 2]) baseType = field1Type['base'] self.assertEqual(baseType['class'], 'H5T_FLOAT') self.assertEqual(baseType['base'], 'H5T_IEEE_F32LE') def testGetCommitted(self): domain = 'committed_type.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) req = helper.getEndpoint() + "/groups/" + root_uuid + "/attributes/attr1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SCALAR') self.assertTrue('dims' not in shape) typeItem = rspJson['type'] # returns '/datatypes/' npos = typeItem.rfind('/') type_uuid = typeItem[(npos+1):] self.assertTrue(helper.validateId(type_uuid)) def testGetArray(self): domain = 'array_attr.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/attributes/A1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 4) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_ARRAY') self.assertTrue('dims' in typeItem) typeShape = typeItem['dims'] self.assertEqual(len(typeShape), 2) self.assertEqual(typeShape[0], 3) self.assertEqual(typeShape[1], 5) typeBase = typeItem['base'] self.assertEqual(typeBase['class'], 'H5T_INTEGER') self.assertEqual(typeBase['base'], 'H5T_STD_I64LE') self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 4) elem = value[0] # elem should be a 3x5 array self.assertEqual(len(elem), 3) self.assertEqual(elem[2], [0, -2, -4, -6, -8]) def testGetBool(self): domain = 'bool_attr.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) req = helper.getEndpoint() + "/groups/" + root_uuid + "/attributes/attr1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 4) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_ENUM') typeBase = typeItem['base'] self.assertEqual(typeBase['class'], 'H5T_INTEGER') self.assertEqual(typeBase['base'], 'H5T_STD_I8LE') self.assertTrue('mapping' in typeItem) mapping = typeItem['mapping'] self.assertEqual(len(mapping), 2) self.assertEqual(mapping['FALSE'], 0) self.assertEqual(mapping['TRUE'], 1) def testGetVLenString(self): domain = 'vlen_string_attr.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/attributes/A1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 4) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_STRING') self.assertEqual(typeItem['charSet'], 'H5T_CSET_ASCII') self.assertEqual(typeItem['length'], 'H5T_VARIABLE') self.assertEqual(typeItem['strPad'], 'H5T_STR_NULLTERM') self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 4) self.assertEqual(value[0], "Parting") self.assertEqual(value[1], "is such") self.assertEqual(value[2], "sweet") self.assertEqual(value[3], "sorrow.") def testGetFixedString(self): domain = 'fixed_string_attr.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/attributes/A1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 4) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_STRING') self.assertEqual(typeItem['charSet'], 'H5T_CSET_ASCII') self.assertEqual(typeItem['length'], 7) self.assertEqual(typeItem['strPad'], 'H5T_STR_NULLPAD') self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 4) self.assertEqual(value[0], "Parting") self.assertEqual(value[1], "is such") self.assertEqual(value[2], "sweet") self.assertEqual(value[3], "sorrow.") def testGetEnum(self): domain = 'enum_attr.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/attributes/A1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 2) self.assertEqual(shape['dims'][0], 4) self.assertEqual(shape['dims'][1], 7) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_ENUM') baseType = typeItem['base'] self.assertEqual(baseType['class'], 'H5T_INTEGER') self.assertEqual(baseType['base'], 'H5T_STD_I16BE') self.assertTrue('mapping' in typeItem) mapping = typeItem['mapping'] self.assertEqual(len(mapping), 4) self.assertEqual(mapping['SOLID'], 0) self.assertEqual(mapping['LIQUID'], 1) self.assertEqual(mapping['GAS'], 2) self.assertEqual(mapping['PLASMA'], 3) self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 4) self.assertEqual(value[1][2], mapping['GAS']) def testGetVlen(self): domain = 'vlen_attr.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/attributes/A1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 2) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_VLEN') baseType = typeItem['base'] self.assertEqual(baseType['class'], 'H5T_INTEGER') self.assertEqual(baseType['base'], 'H5T_STD_I32LE') #verify data returned value = rspJson['value'] self.assertEqual(len(value), 2) self.assertEqual(len(value[1]), 12) self.assertEqual(value[1][11], 144) def testGetOpaque(self): domain = 'opaque_attr.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/attributes/A1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 4) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_OPAQUE') self.assertEqual(typeItem['size'], 7) self.assertTrue('value' not in rspJson) # opaque data is not supported yet def testGetObjectReference(self): domain = 'objref_attr.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) ds1_uuid = helper.getUUID(domain, root_uuid, 'DS1') ds2_uuid = helper.getUUID(domain, root_uuid, 'DS2') g1_uuid = helper.getUUID(domain, root_uuid, 'G1') req = helper.getEndpoint() + "/datasets/" + ds1_uuid + "/attributes/A1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 2) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_REFERENCE') self.assertEqual(typeItem['base'], 'H5T_STD_REF_OBJ') self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 2) self.assertEqual(value[0], 'groups/' + g1_uuid) self.assertEqual(value[1], 'datasets/' + ds2_uuid) def testGetRegionReference(self): domain = 'regionref_attr.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) ds1_uuid = helper.getUUID(domain, root_uuid, 'DS1') ds2_uuid = helper.getUUID(domain, root_uuid, 'DS2') req = helper.getEndpoint() + "/datasets/" + ds1_uuid + "/attributes/A1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 2) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_REFERENCE') self.assertEqual(typeItem['base'], 'H5T_STD_REF_DSETREG') self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 2) value = rspJson['value'] self.assertEqual(len(value), 2) ref0 = value[0] self.assertEqual(ref0['select_type'], 'H5S_SEL_POINTS') self.assertEqual(ref0['id'], ds2_uuid) points = ref0['selection'] self.assertEqual(len(points), 4) self.assertEqual(points[0], [0, 1]) self.assertEqual(points[1], [2,11]) self.assertEqual(points[2], [1, 0]) self.assertEqual(points[3], [2, 4]) ref1 = value[1] self.assertEqual(ref1['select_type'], 'H5S_SEL_HYPERSLABS') self.assertEqual(ref1['id'], ds2_uuid) hyperslabs = ref1['selection'] self.assertEqual(len(hyperslabs), 4) self.assertEqual(hyperslabs[0][0], [0, 0]) self.assertEqual(hyperslabs[0][1], [1, 3]) self.assertEqual(hyperslabs[1][0], [0, 11]) self.assertEqual(hyperslabs[1][1], [1, 14]) self.assertEqual(hyperslabs[2][0], [2, 0]) self.assertEqual(hyperslabs[2][1], [3, 3]) self.assertEqual(hyperslabs[3][0], [2, 11]) self.assertEqual(hyperslabs[3][1], [3, 14]) def testGetScalar(self): domain = 'scalar.' + config.get('domain') root_uuid = helper.getRootUUID(domain) req = helper.getEndpoint() + "/groups/" + root_uuid + "/attributes/attr1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SCALAR') self.assertTrue('dims' not in shape) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_INTEGER') self.assertEqual(typeItem['base'], 'H5T_STD_I64LE') data = rspJson['value'] self.assertEqual(type(data), int) self.assertEqual(data, 42) def testGetScalarString(self): domain = 'scalar.' + config.get('domain') root_uuid = helper.getRootUUID(domain) # now try reading a scalar string req = helper.getEndpoint() + "/groups/" + root_uuid + "/attributes/attr2" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SCALAR') self.assertTrue('dims' not in shape) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_STRING') self.assertEqual(typeItem['charSet'], 'H5T_CSET_ASCII') self.assertEqual(typeItem['length'], 'H5T_VARIABLE') self.assertEqual(typeItem['strPad'], 'H5T_STR_NULLTERM') data = rspJson['value'] self.assertEqual(data, "hello") def testGetDimensionScale(self): domain = 'dim_scale.' + config.get('domain') root_uuid = helper.getRootUUID(domain) dset_uuid = helper.getUUID(domain, root_uuid, 'temperatures') scale_x_uuid = helper.getUUID(domain, root_uuid, 'scale_x') scale_y_uuid = helper.getUUID(domain, root_uuid, 'scale_y') scale_z_uuid = helper.getUUID(domain, root_uuid, 'scale_z') # now try reading the dimension list attribute req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/attributes/DIMENSION_LIST" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertTrue('dims' in shape) dims = shape['dims'] self.assertEqual(len(dims), 1) self.assertEqual(dims[0], 3) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_VLEN') baseType = typeItem['base'] self.assertEqual(baseType['class'], 'H5T_REFERENCE') self.assertEqual(baseType['base'], 'H5T_STD_REF_OBJ') data = rspJson['value'] self.assertEqual(len(data), 3) self.assertEqual(data[0], ['datasets/' + scale_x_uuid]) self.assertEqual(data[1], ['datasets/' + scale_y_uuid]) self.assertEqual(data[2], ['datasets/' + scale_z_uuid]) # read the x dimenscale and verify it refernces the temperature dataset req = helper.getEndpoint() + "/datasets/" + scale_x_uuid + "/attributes/REFERENCE_LIST" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_COMPOUND') fields = typeItem['fields'] self.assertEqual(len(fields), 2) refType = fields[0]["type"] self.assertEqual(refType["class"], 'H5T_REFERENCE') intType = fields[1]["type"] self.assertEqual(intType["class"], 'H5T_INTEGER') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertTrue('dims' in shape) dims = shape['dims'] self.assertEqual(len(dims), 1) self.assertEqual(dims[0], 1) data = rspJson['value'] elem = data[0] self.assertEqual(len(elem), 2) # two fields of a compound type self.assertEqual(elem[0], 'datasets/' + dset_uuid) # reference primary dataset self.assertEqual(elem[1], 0) # first dimension def testPut(self): domain = 'tall_updated.' + config.get('domain') attr_name = 'attr3' rootUUID = helper.getRootUUID(domain) headers = {'host': domain} payload = {'type': 'H5T_IEEE_F32LE', 'shape': (1,), 'value': (3.12,)} req = self.endpoint + "/groups/" + rootUUID + "/attributes/" + attr_name rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create attribute rspJson = json.loads(rsp.text) self.assertEqual(len(rspJson['hrefs']), 3) # do a get and verify the space is simple rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # get attribute rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') dims = shape['dims'] self.assertEqual(len(dims), 1) self.assertEqual(dims[0], 1) # try creating the attribute again, should return 409 req = self.endpoint + "/groups/" + rootUUID + "/attributes/" + attr_name rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 409) # conflict def testPutScalar(self): domain = 'tall_updated.' + config.get('domain') attr_name = 'attr4' rootUUID = helper.getRootUUID(domain) headers = {'host': domain} payload = {'type': 'H5T_STD_I32LE', 'value': 42} req = self.endpoint + "/groups/" + rootUUID + "/attributes/" + attr_name rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create attribute rspJson = json.loads(rsp.text) self.assertEqual(len(rspJson['hrefs']), 3) # do a get and verify the space is scalar rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # get attribute rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SCALAR') def testPutList(self): domain = 'tall_updated.' + config.get('domain') attr_name = 'attr5' rootUUID = helper.getRootUUID(domain) headers = {'host': domain} data = list(range(10)) payload = {'type': 'H5T_STD_I32LE', 'shape': (10,), 'value': data} req = self.endpoint + "/groups/" + rootUUID + "/attributes/" + attr_name rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create attribute rspJson = json.loads(rsp.text) self.assertEqual(len(rspJson['hrefs']), 3) # do a get and verify the space has 10 elements rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # get attribute rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') dims = shape['dims'] self.assertEqual(len(dims), 1) self.assertEqual(dims[0], 10) def testPutFixedString(self): domain = 'tall_updated.' + config.get('domain') attr_name = 'attr6' rootUUID = helper.getRootUUID(domain) headers = {'host': domain} data = "Hello, I'm a fixed-width string!" str_type = { 'charSet': 'H5T_CSET_ASCII', 'class': 'H5T_STRING', 'strPad': 'H5T_STR_NULLPAD', 'length': 40} payload = {'type': str_type, 'shape': (1,), 'value': data} req = self.endpoint + "/groups/" + rootUUID + "/attributes/" + attr_name rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create attribute rspJson = json.loads(rsp.text) self.assertEqual(len(rspJson['hrefs']), 3) def testPutVariableString(self): domain = 'tall_updated.' + config.get('domain') attr_name = 'attr7' rootUUID = helper.getRootUUID(domain) headers = {'host': domain} data = ["Hypermedia", "as", "the", "engine", "of", "state."] str_type = { 'charSet': 'H5T_CSET_ASCII', 'class': 'H5T_STRING', 'strPad': 'H5T_STR_NULLPAD', 'length': 'H5T_VARIABLE'} payload = {'type': str_type, 'shape': (6,), 'value': data} req = self.endpoint + "/groups/" + rootUUID + "/attributes/" + attr_name rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create attribute rspJson = json.loads(rsp.text) self.assertEqual(len(rspJson['hrefs']), 3) def testPutNullSpace(self): domain = 'tall_updated.' + config.get('domain') attr_name = 'attr8' rootUUID = helper.getRootUUID(domain) headers = {'host': domain} payload = {'type': 'H5T_STD_I32LE', 'shape': 'H5S_NULL'} req = self.endpoint + "/groups/" + rootUUID + "/attributes/" + attr_name rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create attribute rspJson = json.loads(rsp.text) self.assertEqual(len(rspJson['hrefs']), 3) # do a get and verify the space is scalar rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # get attribute rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_NULL') def testPutObjReference(self): domain = 'tall_updated.' + config.get('domain') attr_name = 'attr9' root_uuid = helper.getRootUUID(domain) g2_uuid = helper.getUUID(domain, root_uuid, 'g2') d22_uuid = helper.getUUID(domain, g2_uuid, 'dset2.2') headers = {'host': domain} datatype = {'class': 'H5T_REFERENCE', 'base': 'H5T_STD_REF_OBJ' } value = ('groups/' + g2_uuid, '', 'datasets/' + d22_uuid) payload = {'type': datatype, 'shape': 3, 'value': value} req = self.endpoint + "/groups/" + root_uuid + "/attributes/" + attr_name rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create attribute rspJson = json.loads(rsp.text) self.assertEqual(len(rspJson['hrefs']), 3) def testPutRegionReference(self): domain = 'tall_updated.' + config.get('domain') attr_name = 'attr10' root_uuid = helper.getRootUUID(domain) g1_uuid = helper.getUUID(domain, root_uuid, 'g1') g11_uuid = helper.getUUID(domain, g1_uuid, 'g1.1') d111_uuid = helper.getUUID(domain, g11_uuid, 'dset1.1.1') headers = {'host': domain} datatype = {'class': 'H5T_REFERENCE', 'base': 'H5T_STD_REF_DSETREG' } region_ref = { } region_ref['id'] = d111_uuid region_ref['select_type'] = 'H5S_SEL_HYPERSLABS' region_ref['selection'] = (((0,0),(1,1)),((2,2),(4,4)), ((5,5),(10,10))) point_ref = { } point_ref['id'] = d111_uuid point_ref['select_type'] = 'H5S_SEL_POINTS' point_ref['selection'] = ((0,0),(1,1),(2,2),(3,3),(4,4),(5,5),(6,6),(7,7),(8,8),(9,9)) all_ref = {} all_ref['id'] = d111_uuid all_ref['select_type'] = 'H5S_SEL_ALL' none_ref = {} none_ref['id'] = d111_uuid none_ref['select_type'] = 'H5S_SEL_NONE' value = ( region_ref , point_ref, all_ref, none_ref ) payload = {'type': datatype, 'shape': 4, 'value': value} req = self.endpoint + "/groups/" + root_uuid + "/attributes/" + attr_name rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create attribute rspJson = json.loads(rsp.text) self.assertEqual(len(rspJson['hrefs']), 3) def testPutCompound(self): domain = 'tall_updated.' + config.get('domain') attr_name = 'attr_compound' root_uuid = helper.getRootUUID(domain) headers = {'host': domain} fields = ({'name': 'temp', 'type': 'H5T_STD_I32LE'}, {'name': 'pressure', 'type': 'H5T_IEEE_F32LE'}) datatype = {'class': 'H5T_COMPOUND', 'fields': fields } value = ((55, 32.34), (59, 29.34)) payload = {'type': datatype, 'shape': 2, 'value': value} req = self.endpoint + "/groups/" + root_uuid + "/attributes/" + attr_name rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create attribute rspJson = json.loads(rsp.text) self.assertEqual(len(rspJson['hrefs']), 3) """ tbd - fix issue passing attribute data def testPutCompoundArray(self): domain = 'tall_updated.' + config.get('domain') attr_name = 'attr_compound_array' root_uuid = helper.getRootUUID(domain) headers = {'host': domain} fields = ({'name': 'temp', 'type': 'H5T_STD_I32LE'}, {'name': '2x2', 'type': { 'class': 'H5T_ARRAY', 'dims': [2,2], 'base': 'H5T_IEEE_F32LE'} }) datatype = {'class': 'H5T_COMPOUND', 'fields': fields } value = ((3.14, ((55.0, 32.34), (59.0, 29.34))), (6.28, ((110.0, 64.68), (118.0, 58.68)))) payload = {'type': datatype, 'shape': 2 'value': 0} print "payload:", json.dumps(payload) req = self.endpoint + "/groups/" + root_uuid + "/attributes/" + attr_name rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create attribute rspJson = json.loads(rsp.text) self.assertEqual(len(rspJson['hrefs']), 3) """ def testPutCommittedType(self): domain = 'tall_updated.' + config.get('domain') attr_name = 'attr_committed' root_uuid = helper.getRootUUID(domain) headers = {'host': domain} # create the datatype payload = {'type': 'H5T_IEEE_F32LE'} req = self.endpoint + "/datatypes" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create datatype rspJson = json.loads(rsp.text) dtype_uuid = rspJson['id'] self.assertTrue(helper.validateId(dtype_uuid)) # link new datatype as 'dtype1' root_uuid = helper.getRootUUID(domain) name = 'dtype1' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {'id': dtype_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create the attribute using the type created above value = [] for i in range(10): value.append(i*0.5) payload = {'type': dtype_uuid, 'shape': 10, 'value': value} req = self.endpoint + "/groups/" + root_uuid + "/attributes/" + attr_name rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create attribute rspJson = json.loads(rsp.text) self.assertEqual(len(rspJson['hrefs']), 3) def testPutDimensionScale(self): domain = 'dim_scale_updated.' + config.get('domain') root_uuid = helper.getRootUUID(domain) headers = {'host': domain} dset_uuid = helper.getUUID(domain, root_uuid, 'temperatures') scale_x_uuid = helper.getUUID(domain, root_uuid, 'scale_x') scale_y_uuid = helper.getUUID(domain, root_uuid, 'scale_y') scale_z_uuid = helper.getUUID(domain, root_uuid, 'scale_z') # attach a dimension_list attribute to temperatures dataset reftype = {'class': 'H5T_REFERENCE', 'base': 'H5T_STD_REF_OBJ' } attr_name = "DIMENSION_LIST" vlen_type = {'class': 'H5T_VLEN', 'base': reftype } value = [] for item_uuid in (scale_x_uuid, scale_y_uuid, scale_z_uuid): obj_ref = 'datasets/' + item_uuid vlen_item = (obj_ref,) value.append(vlen_item) payload = {'type': vlen_type, 'shape': 3, 'value': value} req = self.endpoint + "/datasets/" + dset_uuid + "/attributes/" + attr_name rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create attribute def testPutInvalid(self): domain = 'tall_updated.' + config.get('domain') attr_name = 'attr_invalid' rootUUID = helper.getRootUUID(domain) headers = {'host': domain} # attempt to pass in a string directly (which is not valid JSON) payload = "{'type': 'H5T_IEEE_F32LE', 'shape': (0,), 'value': 3.12}" req = self.endpoint + "/groups/" + rootUUID + "/attributes/" + attr_name rsp = requests.put(req, data=payload, headers=headers) self.assertEqual(rsp.status_code, 400) # Bad Request def testDelete(self): domain = 'tall_updated.' + config.get('domain') attr_name = 'attr1' rootUUID = helper.getRootUUID(domain) headers = {'host': domain} req = self.endpoint + "/groups/" + rootUUID + "/attributes/" + attr_name rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) # delete attribute def testGetInvalidName(self): domain = 'tall.' + config.get('domain') rootUUID = helper.getRootUUID(domain) req = helper.getEndpoint() + "/groups/" + rootUUID + "/attributes/no_attr_here" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 404) if __name__ == '__main__': unittest.main() ================================================ FILE: test/integ/config.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## from h5serv.config import * cfg = { 'server': '127.0.0.1', 'home_domain': 'home.hdfgroup.org', 'port': 5000, 'domain': 'test.hdfgroup.org', 'hdf5_ext': '.h5', 'home_dir': 'home' } update(cfg) ================================================ FILE: test/integ/datasettest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import requests import config import helper import unittest import json class DatasetTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(DatasetTest, self).__init__(*args, **kwargs) self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port')) def testGet(self): domain = 'tall.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) g2_uuid = helper.getUUID(domain, root_uuid, 'g2') dset21_uuid = helper.getUUID(domain, g2_uuid, 'dset2.1') req = helper.getEndpoint() + "/datasets/" + dset21_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('type' in rspJson) type_json = rspJson['type'] self.assertEqual(type_json['class'], 'H5T_FLOAT') self.assertEqual(type_json['base'], 'H5T_IEEE_F32BE') self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 10) self.assertTrue('maxdims' not in shape) def testGetResizable(self): domain = 'resizable.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) resizable_1d_uuid = helper.getUUID(domain, root_uuid, 'resizable_1d') req = helper.getEndpoint() + "/datasets/" + resizable_1d_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) type_json = rspJson['type'] self.assertEqual(type_json['class'], 'H5T_INTEGER') self.assertEqual(type_json['base'], 'H5T_STD_I64LE') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 10) self.assertEqual(shape['maxdims'][0], 20) resizable_2d_uuid = helper.getUUID(domain, root_uuid, 'resizable_2d') req = helper.getEndpoint() + "/datasets/" + resizable_2d_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) type_json = rspJson['type'] self.assertEqual(type_json['class'], 'H5T_INTEGER') self.assertEqual(type_json['base'], 'H5T_STD_I64LE') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 2) self.assertEqual(shape['dims'][1], 10) self.assertEqual(shape['maxdims'][1], 20) unlimited_1d_uuid = helper.getUUID(domain, root_uuid, 'unlimited_1d') req = helper.getEndpoint() + "/datasets/" + unlimited_1d_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) type_json = rspJson['type'] self.assertEqual(type_json['class'], 'H5T_INTEGER') self.assertEqual(type_json['base'], 'H5T_STD_I64LE') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 10) self.assertEqual(shape['maxdims'][0], 0) unlimited_2d_uuid = helper.getUUID(domain, root_uuid, 'unlimited_2d') req = helper.getEndpoint() + "/datasets/" + unlimited_2d_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) type_json = rspJson['type'] self.assertEqual(type_json['class'], 'H5T_INTEGER') self.assertEqual(type_json['base'], 'H5T_STD_I64LE') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 2) self.assertEqual(shape['dims'][1], 10) self.assertEqual(shape['maxdims'][1], 0) def testGetScalar(self): domain = 'scalar.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, '0d') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) type_json = rspJson['type'] self.assertEqual(type_json['class'], 'H5T_INTEGER') self.assertEqual(type_json['base'], 'H5T_STD_I32LE') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SCALAR') self.assertTrue('dims' not in shape) self.assertTrue('maxdims' not in shape) def testGetScalarString(self): domain = 'scalar.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, '0ds') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) type_json = rspJson['type'] self.assertEqual(type_json['class'], 'H5T_STRING') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SCALAR') self.assertTrue('dims' not in shape) self.assertTrue('maxdims' not in shape) def testGetSimpleOneElement(self): domain = 'scalar.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, '1d') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) type_json = rspJson['type'] self.assertEqual(type_json['class'], 'H5T_INTEGER') self.assertEqual(type_json['base'], 'H5T_STD_I32LE') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertTrue('dims' in shape) self.assertEqual(shape['dims'][0], 1) def testGetSimpleOneElementString(self): domain = 'scalar.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, '1ds') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) type = rspJson['type'] self.assertEqual(type['class'], 'H5T_STRING') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertTrue('dims' in shape) self.assertEqual(shape['dims'][0], 1) def testGetNullSpace(self): domain = 'null_space_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) type = rspJson['type'] self.assertEqual(type['class'], 'H5T_INTEGER') self.assertEqual(type['base'], 'H5T_STD_I32LE') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_NULL') self.assertTrue('dims' not in shape) self.assertTrue('maxdims' not in shape) def testGetCompound(self): domain = 'compound.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'dset') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 72) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_COMPOUND') self.assertTrue('fields' in typeItem) fields = typeItem['fields'] self.assertEqual(len(fields), 5) timeField = fields[1] self.assertEqual(timeField['name'], 'time') self.assertTrue('type' in timeField) timeFieldType = timeField['type'] self.assertEqual(timeFieldType['class'], 'H5T_STRING') self.assertEqual(timeFieldType['charSet'], 'H5T_CSET_ASCII') self.assertEqual(timeFieldType['length'], 6) self.assertEqual(timeFieldType['strPad'], 'H5T_STR_NULLPAD') tempField = fields[2] self.assertEqual(tempField['name'], 'temp') tempFieldType = tempField['type'] self.assertEqual(tempFieldType['class'], 'H5T_INTEGER') self.assertEqual(tempFieldType['base'], 'H5T_STD_I64LE') def testGetCompoundArray(self): for domain_name in ('compound_array_dset', ): domain = domain_name + '.' + config.get('domain') root_uuid = helper.getRootUUID(domain) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 10) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_COMPOUND') self.assertEqual(len(typeItem['fields']), 2) fields = typeItem['fields'] field0 = fields[0] self.assertEqual(field0['name'], 'temp') field0Type = field0['type'] self.assertEqual(field0Type['class'], 'H5T_FLOAT') self.assertEqual(field0Type['base'], 'H5T_IEEE_F64LE') field1 = fields[1] self.assertEqual(field1['name'], '2x2') field1Type = field1['type'] self.assertEqual(field1Type['class'], 'H5T_ARRAY') self.assertEqual(field1Type['dims'], [2, 2]) baseType = field1Type['base'] self.assertEqual(baseType['class'], 'H5T_FLOAT') self.assertEqual(baseType['base'], 'H5T_IEEE_F32LE') def testGetCompoundCommitted(self): domain = 'compound_committed.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'dset') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 72) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_COMPOUND') self.assertTrue('fields' in typeItem) fields = typeItem['fields'] self.assertEqual(len(fields), 3) timeField = fields[1] self.assertEqual(timeField['name'], 'time') self.assertTrue('type' in timeField) timeFieldType = timeField['type'] self.assertEqual(timeFieldType['class'], 'H5T_STRING') self.assertEqual(timeFieldType['charSet'], 'H5T_CSET_ASCII') self.assertEqual(timeFieldType['length'], 6) self.assertEqual(timeFieldType['strPad'], 'H5T_STR_NULLPAD') tempField = fields[2] self.assertEqual(tempField['name'], 'temp') tempFieldType = tempField['type'] self.assertEqual(tempFieldType['class'], 'H5T_INTEGER') self.assertEqual(tempFieldType['base'], 'H5T_STD_I32LE') def testGetCompoundArray(self): # compound where the fields are array type domain = 'tstr.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'comp1') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 2) self.assertEqual(shape['dims'][0], 3) self.assertEqual(shape['dims'][1], 6) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_COMPOUND') self.assertTrue('fields' in typeItem) fields = typeItem['fields'] self.assertEqual(len(fields), 2) intField = fields[0] self.assertEqual(intField['name'], 'int_array') self.assertTrue('type' in intField) intFieldType = intField['type'] self.assertEqual(intFieldType['class'], 'H5T_ARRAY') intFieldTypeDims = intFieldType['dims'] self.assertEqual(len(intFieldTypeDims), 2) self.assertEqual(intFieldTypeDims[0], 8) self.assertEqual(intFieldTypeDims[1], 10) self.assertTrue('base' in intFieldType) intFieldTypeBase = intFieldType['base'] self.assertEqual(intFieldTypeBase['class'], 'H5T_INTEGER') self.assertEqual(intFieldTypeBase['base'], 'H5T_STD_I32BE') strField = fields[1] self.assertEqual(strField['name'], 'string') self.assertTrue('type' in strField) strFieldType = strField['type'] self.assertEqual(strFieldType['class'], 'H5T_ARRAY') strFieldTypeDims = strFieldType['dims'] self.assertEqual(len(strFieldTypeDims), 2) self.assertEqual(strFieldTypeDims[0], 3) self.assertEqual(strFieldTypeDims[1], 4) self.assertTrue('base' in strFieldType) strFieldTypeBase = strFieldType['base'] self.assertEqual(strFieldTypeBase['class'], 'H5T_STRING') self.assertEqual(strFieldTypeBase['charSet'], 'H5T_CSET_ASCII') self.assertEqual(strFieldTypeBase['length'], 32) # todo - fix, cf https://github.com/HDFGroup/h5serv/issues/20 #self.assertEqual(strFieldTypeBase['strPad'], 'H5T_STR_SPACEPAD') def testGetCommitted(self): domain = 'committed_type.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 4) typeItem = rspJson['type'] # returns '/datatypes/' npos = typeItem.rfind('/') type_uuid = typeItem[(npos+1):] self.assertTrue(helper.validateId(type_uuid)) def testGetArray(self): domain = 'array_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 4) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_ARRAY') self.assertTrue('dims' in typeItem) typeShape = typeItem['dims'] self.assertEqual(len(typeShape), 2) self.assertEqual(typeShape[0], 3) self.assertEqual(typeShape[1], 5) typeItemBase = typeItem['base'] self.assertEqual(typeItemBase['class'], 'H5T_INTEGER') self.assertEqual(typeItemBase['base'], 'H5T_STD_I64LE') def testGetFixedString(self): domain = 'fixed_string_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 4) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_STRING') self.assertEqual(typeItem['charSet'], 'H5T_CSET_ASCII') self.assertEqual(typeItem['length'], 7) self.assertEqual(typeItem['strPad'], 'H5T_STR_NULLPAD') def testGetEnum(self): domain = 'enum_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 2) self.assertEqual(shape['dims'][0], 4) self.assertEqual(shape['dims'][1], 7) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_ENUM') typeBase = typeItem['base'] self.assertEqual(typeBase['class'], 'H5T_INTEGER') self.assertEqual(typeBase['base'], 'H5T_STD_I16BE') self.assertTrue('mapping' in typeItem) mapping = typeItem['mapping'] self.assertEqual(len(mapping), 4) self.assertEqual(mapping['SOLID'], 0) self.assertEqual(mapping['LIQUID'], 1) self.assertEqual(mapping['GAS'], 2) self.assertEqual(mapping['PLASMA'], 3) def testGetBool(self): domain = 'bool_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 4) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_ENUM') typeBase = typeItem['base'] self.assertEqual(typeBase['class'], 'H5T_INTEGER') self.assertEqual(typeBase['base'], 'H5T_STD_I8LE') self.assertTrue('mapping' in typeItem) mapping = typeItem['mapping'] self.assertEqual(len(mapping), 2) self.assertEqual(mapping['FALSE'], 0) self.assertEqual(mapping['TRUE'], 1) def testGetVlen(self): domain = 'vlen_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 2) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_VLEN') typeBase = typeItem['base'] self.assertEqual(typeBase['class'], 'H5T_INTEGER') self.assertEqual(typeBase['base'], 'H5T_STD_I32LE') def testGetOpaque(self): domain = 'opaque_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 4) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_OPAQUE') self.assertEqual(typeItem['size'], 7) def testGetObjReference(self): domain = 'objref_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 2) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_REFERENCE') self.assertEqual(typeItem['base'], 'H5T_STD_REF_OBJ') def testGetNullObjReference(self): domain = 'null_objref_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 1) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_REFERENCE') self.assertEqual(typeItem['base'], 'H5T_STD_REF_OBJ') def testGetRegionReference(self): domain = 'regionref_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 2) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_REFERENCE') self.assertEqual(typeItem['base'], 'H5T_STD_REF_DSETREG') def testGetFillValueProp(self): domain = 'fillvalue.' + config.get('domain') root_uuid = helper.getRootUUID(domain) dset_uuid = helper.getUUID(domain, root_uuid, 'dset') req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('creationProperties' in rspJson) creationProps = rspJson['creationProperties'] self.assertTrue('fillValue' in creationProps) self.assertEqual(creationProps['fillValue'], 42) def testGetCreationProps(self): domain = 'dset_gzip.' + config.get('domain') headers = {'host': domain} root_uuid = helper.getRootUUID(domain) # dset1 dset_uuid = helper.getUUID(domain, root_uuid, 'dset1') req = helper.getEndpoint() + "/datasets/" + dset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('creationProperties' in rspJson) creationProps = rspJson['creationProperties'] self.assertTrue('fillTime' in creationProps) self.assertEqual(creationProps['fillTime'], 'H5D_FILL_TIME_ALLOC') self.assertTrue('layout' in creationProps) layout = creationProps['layout'] self.assertEqual(layout['class'], 'H5D_CHUNKED') self.assertEqual(layout['dims'], [100, 100]) self.assertTrue('allocTime' in creationProps) self.assertEqual(creationProps['allocTime'], 'H5D_ALLOC_TIME_INCR') self.assertTrue('filters' in creationProps) filters = creationProps['filters'] self.assertEqual(len(filters), 1) deflate_filter = filters[0] self.assertTrue('id' in deflate_filter) self.assertEqual(deflate_filter['id'], 1) self.assertTrue('class' in deflate_filter) self.assertEqual(deflate_filter['class'], 'H5Z_FILTER_DEFLATE') self.assertTrue('level' in deflate_filter) self.assertEqual(deflate_filter['level'], 9) self.assertTrue('name' in deflate_filter) self.assertEqual(deflate_filter['name'], 'deflate') # dset2 dset_uuid = helper.getUUID(domain, root_uuid, 'dset2') req = helper.getEndpoint() + "/datasets/" + dset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('creationProperties' in rspJson) creationProps = rspJson['creationProperties'] self.assertTrue('fillTime' in creationProps) self.assertEqual(creationProps['fillTime'], 'H5D_FILL_TIME_ALLOC') self.assertTrue('layout' in creationProps) layout = creationProps['layout'] self.assertEqual(layout['class'], 'H5D_CHUNKED') self.assertEqual(layout['dims'], [100, 100]) self.assertTrue('allocTime' in creationProps) self.assertEqual(creationProps['allocTime'], 'H5D_ALLOC_TIME_INCR') self.assertTrue('filters' in creationProps) filters = creationProps['filters'] self.assertEqual(len(filters), 2) shuffle_filter = filters[0] self.assertTrue('id' in shuffle_filter) self.assertEqual(shuffle_filter['id'], 2) self.assertTrue('class' in shuffle_filter) self.assertEqual(shuffle_filter['class'], 'H5Z_FILTER_SHUFFLE') self.assertTrue('name' in shuffle_filter) self.assertEqual(shuffle_filter['name'], 'shuffle') deflate_filter = filters[1] self.assertTrue('id' in deflate_filter) self.assertEqual(deflate_filter['id'], 1) self.assertTrue('class' in deflate_filter) self.assertEqual(deflate_filter['class'], 'H5Z_FILTER_DEFLATE') self.assertTrue('level' in deflate_filter) self.assertEqual(deflate_filter['level'], 9) self.assertTrue('name' in deflate_filter) self.assertEqual(deflate_filter['name'], 'deflate') # dset3 dset_uuid = helper.getUUID(domain, root_uuid, 'dset3') req = helper.getEndpoint() + "/datasets/" + dset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('creationProperties' in rspJson) creationProps = rspJson['creationProperties'] self.assertTrue('fillTime' in creationProps) self.assertEqual(creationProps['fillTime'], 'H5D_FILL_TIME_ALLOC') self.assertTrue('layout' in creationProps) layout = creationProps['layout'] self.assertEqual(layout['class'], 'H5D_CHUNKED') self.assertEqual(layout['dims'], [100, 100]) self.assertTrue('allocTime' in creationProps) self.assertEqual(creationProps['allocTime'], 'H5D_ALLOC_TIME_INCR') self.assertTrue('filters' in creationProps) filters = creationProps['filters'] self.assertEqual(len(filters), 3) fletcher_filter = filters[0] self.assertTrue('id' in fletcher_filter) self.assertEqual(fletcher_filter['id'], 3) self.assertTrue('class' in fletcher_filter) self.assertEqual(fletcher_filter['class'], 'H5Z_FILTER_FLETCHER32') self.assertTrue('name' in fletcher_filter) self.assertEqual(fletcher_filter['name'], 'fletcher32') shuffle_filter = filters[1] self.assertTrue('id' in shuffle_filter) self.assertEqual(shuffle_filter['id'], 2) self.assertTrue('class' in shuffle_filter) self.assertEqual(shuffle_filter['class'], 'H5Z_FILTER_SHUFFLE') self.assertTrue('name' in shuffle_filter) self.assertEqual(shuffle_filter['name'], 'shuffle') deflate_filter = filters[2] self.assertTrue('id' in deflate_filter) self.assertEqual(deflate_filter['id'], 1) self.assertTrue('class' in deflate_filter) self.assertEqual(deflate_filter['class'], 'H5Z_FILTER_DEFLATE') self.assertTrue('level' in deflate_filter) self.assertEqual(deflate_filter['level'], 9) self.assertTrue('name' in deflate_filter) self.assertEqual(deflate_filter['name'], 'deflate') def testGetFilters(self): # # map of filter properties we expect to get # filter_props = {"h5ex_d_checksum": [{'id': 3},], "h5ex_d_gzip": [{'id': 1, 'level': 9},], "h5ex_d_nbit": [{'id': 5},], "h5ex_d_shuffle": [{'id': 2}, {'id': 1, 'level': 9}], "h5ex_d_sofloat": [{'id': 6},], "h5ex_d_soint": [{'id': 6, 'scaleType': 'H5Z_SO_INT'},], "h5ex_d_unlimgzip": [{'id': 1, 'level': 9},] } for domain_val in filter_props.keys(): domain = domain_val + '.' + config.get('domain') #print "domain", domain_val headers = {'host': domain} root_uuid = helper.getRootUUID(domain) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('creationProperties' in rspJson) creationProps = rspJson['creationProperties'] self.assertTrue('filters' in creationProps) filters = creationProps['filters'] num_filters = len(filters) ref_vals = filter_props[domain_val] # check we got the expected number of filters self.assertTrue(num_filters, len(ref_vals)) for i in range(num_filters): #print "filter:", i filter_prop = filters[i] #print "filter_prop", filter_prop ref_val = ref_vals[i] # check filter property values are correct for k in ref_val.keys(): #print "checking key:", k self.assertTrue(k in filter_prop) self.assertEqual(filter_prop[k], ref_val[k]) def testPost(self): domain = 'newdset.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain payload = {'type': 'H5T_IEEE_F32LE', 'shape': 10} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link new dataset as 'dset1' root_uuid = helper.getRootUUID(domain) name = 'dset1' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # verify we can read the dataset back req = self.endpoint + "/datasets/" + dset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') # verify type class is float rsp_type = rspJson['type'] self.assertEqual(rsp_type['class'], 'H5T_FLOAT') def testPostScalar(self): domain = 'newscalar.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain str_type = { 'charSet': 'H5T_CSET_ASCII', 'class': 'H5T_STRING', 'strPad': 'H5T_STR_NULLPAD', 'length': 40} payload = {'type': str_type} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link new dataset as 'dset1' root_uuid = helper.getRootUUID(domain) name = 'dset1' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # verify the dataspace is scalar req = self.endpoint + "/datasets/" + dset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SCALAR') # verify type class is string rsp_type = rspJson['type'] self.assertEqual(rsp_type['class'], 'H5T_STRING') def testPostNullSpace(self): domain = 'newnullspace.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain payload = {'type': 'H5T_IEEE_F32LE', 'shape': 'H5S_NULL'} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link new dataset as 'dset1' root_uuid = helper.getRootUUID(domain) name = 'dset1' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # verify the dataspace is has a null dataspace req = self.endpoint + "/datasets/" + dset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_NULL') # verify type class is string type_json = rspJson['type'] self.assertEqual(type_json['class'], 'H5T_FLOAT') def testPostZeroDim(self): domain = 'new0d.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain payload = {'type': 'H5T_STD_I32LE', 'shape': (1,)} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link new dataset as 'dset1' root_uuid = helper.getRootUUID(domain) name = 'dset1' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # verify the dataspace is one dimensional/one-element req = self.endpoint + "/datasets/" + dset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 1) def testPostTypes(self): domain = 'datatypes.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain root_uuid = helper.getRootUUID(domain) # todo - add 8-bit types to list: # 'H5T_STD_I8', 'H5T_STD_U8' # See https://github.com/HDFGroup/h5serv/issues/51 datatypes = ( 'H5T_STD_I16', 'H5T_STD_U16', 'H5T_STD_I32', 'H5T_STD_U32', 'H5T_STD_I64', 'H5T_STD_U64', 'H5T_IEEE_F32', 'H5T_IEEE_F64' ) endianess = ('LE', 'BE') for datatype in datatypes: for endian in endianess: payload = {'type': datatype+endian, 'shape': 10} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link new dataset using the type name name = datatype + endian req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # Do a GET on the datasets we just created req = helper.getEndpoint() + "/datasets/" + dset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) # verify the type self.assertTrue('type' in rspJson) type_json = rspJson['type'] self.assertTrue(type_json['class'] in ('H5T_FLOAT', 'H5T_INTEGER')) self.assertEqual(type_json['base'], datatype+endian) def testPostCompoundType(self): domain = 'compound.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain root_uuid = helper.getRootUUID(domain) fields = ({'name': 'temp', 'type': 'H5T_STD_I32LE'}, {'name': 'pressure', 'type': 'H5T_IEEE_F32LE'}) datatype = {'class': 'H5T_COMPOUND', 'fields': fields } payload = {'type': datatype, 'shape': 10} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link the new dataset name = "dset" req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) def testPostCompoundArrayVLenStringType(self): domain = 'compound_array_vlen_string.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain root_uuid = helper.getRootUUID(domain) fields = [ {"type": {"class": "H5T_INTEGER", "base": "H5T_STD_U64BE"}, "name": "VALUE1"}, {"type": {"class": "H5T_FLOAT", "base": "H5T_IEEE_F64BE"}, "name": "VALUE2"}, {"type": {"class": "H5T_ARRAY", "dims": [8], "base": {"class": "H5T_STRING", "charSet": "H5T_CSET_ASCII", "strPad": "H5T_STR_NULLTERM", "length": "H5T_VARIABLE"}}, "name": "VALUE3"}] datatype = {'class': 'H5T_COMPOUND', 'fields': fields } payload = {'type': datatype, 'shape': 5} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link the new dataset name = "dset" req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) def testPostCompoundFillValue(self): domain = 'compound_fillvalue.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain root_uuid = helper.getRootUUID(domain) fields = ({'name': 'temp', 'type': 'H5T_STD_I32LE'}, {'name': 'pressure', 'type': 'H5T_IEEE_F32LE'}) datatype = {'class': 'H5T_COMPOUND', 'fields': fields } payload = {'type': datatype, 'shape': 10} payload['creationProperties'] = {'fillValue': [42, 3.12] } req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link the new dataset name = "dset" req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) def testPostCompoundArray(self): domain = 'compound_array.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain root_uuid = helper.getRootUUID(domain) fields = ({'name': 'temp', 'type': 'H5T_STD_I32LE'}, {'name': '2x2', 'type': { 'class': 'H5T_ARRAY', 'dims': [2,2], 'base': 'H5T_IEEE_F32LE'} }) datatype = {'class': 'H5T_COMPOUND', 'fields': fields } payload = {'type': datatype, 'shape': 2 } req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link the new dataset name = "dset" req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) def testPostCommittedType(self): domain = 'committedtype.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain # create the datatype payload = {'type': 'H5T_IEEE_F32LE'} req = self.endpoint + "/datatypes" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create datatype rspJson = json.loads(rsp.text) dtype_uuid = rspJson['id'] self.assertTrue(helper.validateId(dtype_uuid)) # link new datatype as 'dtype1' root_uuid = helper.getRootUUID(domain) name = 'dtype1' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {'id': dtype_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create the dataset payload = {'type': dtype_uuid, 'shape': [10, 10]} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link new dataset as 'dset1' name = 'dset1' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # Verify the dataset type req = self.endpoint + "/datasets/" + dset_uuid + "/type" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("type" in rspJson) rsp_type = rspJson["type"] self.assertEqual(rsp_type["base"], 'H5T_IEEE_F32LE') self.assertEqual(rsp_type["class"], 'H5T_FLOAT') def testPostObjReference(self): domain = 'objref.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain datatype = {'class': 'H5T_REFERENCE', 'base': 'H5T_STD_REF_OBJ' } payload = {'type': datatype, 'shape': (1,)} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link new dataset as 'dset1' root_uuid = helper.getRootUUID(domain) name = 'dset1' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) def testPostArray(self): domain = 'newarraydset.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain datatype = {'class': 'H5T_ARRAY', 'base': 'H5T_STD_I64LE', 'dims': (3, 5) } payload = {'type': datatype, 'shape': 10} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link new dataset as 'dset1' root_uuid = helper.getRootUUID(domain) name = 'dset1' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) def testPostResizable(self): domain = 'resizabledset.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain payload = {'type': 'H5T_IEEE_F32LE', 'shape': 10, 'maxdims': 20} payload['creationProperties'] = {'fillValue': 3.12 } req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link new dataset as 'resizable' root_uuid = helper.getRootUUID(domain) name = 'resizable' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # verify type and shape req = helper.getEndpoint() + "/datasets/" + dset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) type_json = rspJson['type'] self.assertEqual(type_json['class'], 'H5T_FLOAT') self.assertEqual(type_json['base'], 'H5T_IEEE_F32LE') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 10) self.assertTrue('maxdims' in shape) self.assertEqual(shape['maxdims'][0], 20) # create a datataset with unlimited dimension payload = {'type': 'H5T_IEEE_F32LE', 'shape': 10, 'maxdims': 0} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link new dataset as 'resizable' root_uuid = helper.getRootUUID(domain) name = 'unlimited' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) def testPostInvalidType(self): domain = 'tall.' + config.get('domain') root_uuid = helper.getRootUUID(domain) payload = {'type': 'badtype', 'shape': 10} headers = {'host': domain} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) def testPostInvalidShape(self): domain = 'tall.' + config.get('domain') root_uuid = helper.getRootUUID(domain) payload = {'type': 'H5T_STD_I32LE', 'shape': -5} headers = {'host': domain} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) def testPostNoBody(self): domain = 'tall.' + config.get('domain') root_uuid = helper.getRootUUID(domain) headers = {'host': domain} req = self.endpoint + "/datasets" rsp = requests.post(req, headers=headers) self.assertEqual(rsp.status_code, 400) def testPostWithLink(self): domain = 'newdsetwithlink.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain root_uuid = helper.getRootUUID(domain) type_vstr = {"charSet": "H5T_CSET_ASCII", "class": "H5T_STRING", "strPad": "H5T_STR_NULLTERM", "length": "H5T_VARIABLE" } payload = {'type': type_vstr, 'shape': 10, 'link': {'id': root_uuid, 'name': 'linked_dset'} } req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) def testPostCreationProps(self): domain = 'newdset_creationprops.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain creation_props = { 'allocTime': 'H5D_ALLOC_TIME_INCR', 'fillTime': 'H5D_FILL_TIME_NEVER', 'layout': {'class': 'H5D_CHUNKED', 'dims': [10, 10] }} payload = {'type': 'H5T_IEEE_F32LE', 'shape': (100, 100), 'creationProperties': creation_props } req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link new dataset as 'dset1' root_uuid = helper.getRootUUID(domain) name = 'dset1' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # read back the dataset and verify the creation props are returned req = self.endpoint + "/datasets/" + dset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('creationProperties' in rspJson) creationProps = rspJson['creationProperties'] self.assertTrue('allocTime' in creationProps) self.assertEqual(creationProps['allocTime'], 'H5D_ALLOC_TIME_INCR') self.assertTrue('fillTime' in creationProps) self.assertEqual(creationProps['fillTime'], 'H5D_FILL_TIME_NEVER') self.assertTrue('layout' in creationProps) layout = creationProps['layout'] self.assertTrue('class' in layout) self.assertEqual(layout['class'], 'H5D_CHUNKED') self.assertTrue('dims' in layout) self.assertEqual(layout['dims'], [10, 10]) self.assertEqual(len(creationProps.keys()), 3) # just return what we set def testInvalidCreationProps(self): domain = 'newdset_badcreationprops.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain creation_props = { 'layout': {'class': 'H5D_CHUNKED', 'dims': [200, 200] }} payload = {'type': 'H5T_IEEE_F32LE', 'shape': (100, 100), 'creationProperties': creation_props } req = self.endpoint + "/datasets" # should fail because the chunk dimension is larger than the dataset dimensions rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) # bad request def testPostDeflateFilter(self): domain = 'newdset_gzip.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain filters = [ { 'id': 1, 'level': 9 }, ] # deflate filter (gzip) creation_props = { 'layout': {'class': 'H5D_CHUNKED', 'dims': [100, 100] }, 'filters': filters } payload = {'type': 'H5T_IEEE_F32LE', 'shape': (1000, 1000), 'creationProperties': creation_props } req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link new dataset as 'dset1' root_uuid = helper.getRootUUID(domain) name = 'dset1' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # read back the dataset and verify the creation props are returned req = self.endpoint + "/datasets/" + dset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('creationProperties' in rspJson) creationProps = rspJson['creationProperties'] self.assertTrue('filters' in creationProps) filters = creationProps['filters'] self.assertEqual(len(filters), 1) filter_prop = filters[0] self.assertTrue('id' in filter_prop) self.assertEqual(filter_prop['id'], 1) self.assertTrue('class' in filter_prop) self.assertEqual(filter_prop['class'], 'H5Z_FILTER_DEFLATE') self.assertTrue('level' in filter_prop) self.assertEqual(filter_prop['level'], 9) self.assertTrue('layout' in creationProps) # should see chunks returned, even though it was specified in creation layout = creationProps['layout'] self.assertTrue('class' in layout) self.assertEqual(layout['class'], 'H5D_CHUNKED') self.assertTrue('dims' in layout) def testPostLZFFilter(self): domain = 'newdset_lzf.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain filters = [ { 'id': 32000}, ] # LZF filter creation_props = { 'filters': filters } payload = {'type': 'H5T_IEEE_F32LE', 'shape': (1000, 1000), 'creationProperties': creation_props } req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link new dataset as 'dset1' root_uuid = helper.getRootUUID(domain) name = 'dset1' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # read back the dataset and verify the creation props are returned req = self.endpoint + "/datasets/" + dset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('creationProperties' in rspJson) creationProps = rspJson['creationProperties'] self.assertTrue('filters' in creationProps) filters = creationProps['filters'] self.assertEqual(len(filters), 1) filter_prop = filters[0] self.assertTrue('id' in filter_prop) self.assertEqual(filter_prop['id'], 32000) self.assertTrue('class' in filter_prop) self.assertEqual(filter_prop['class'], 'H5Z_FILTER_LZF') self.assertTrue('level' not in filter_prop) self.assertTrue('layout' in creationProps) # should see chunks returned, even though it was specified in creation layout = creationProps['layout'] self.assertTrue('class' in layout) self.assertEqual(layout['class'], 'H5D_CHUNKED') self.assertTrue('dims' in layout) def testPostSZIPFilter(self): domain = 'newdset_szip.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain filters = [ { 'id': 4, 'bitsPerPixel': 8, 'coding': 'H5_SZIP_EC_OPTION_MASK', 'pixelsPerBlock': 32, 'pixelsPerScanline': 100}, ] # SZIP filter creation_props = { 'layout': {'class': 'H5D_CHUNKED', 'dims': (100, 100) }, 'filters': filters } payload = {'type': 'H5T_IEEE_F32LE', 'shape': (1000, 1000), 'creationProperties': creation_props } req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link new dataset as 'dset1' root_uuid = helper.getRootUUID(domain) name = 'dset1' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # read back the dataset and verify the creation props are returned req = self.endpoint + "/datasets/" + dset_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('creationProperties' in rspJson) creationProps = rspJson['creationProperties'] self.assertTrue('filters' in creationProps) filters = creationProps['filters'] self.assertEqual(len(filters), 1) filter_prop = filters[0] self.assertTrue('id' in filter_prop) self.assertEqual(filter_prop['id'], 4) self.assertTrue('class' in filter_prop) self.assertEqual(filter_prop['class'], 'H5Z_FILTER_SZIP') self.assertTrue('level' not in filter_prop) self.assertTrue('bitsPerPixel' in filter_prop) self.assertEqual(filter_prop['bitsPerPixel'], 8) self.assertTrue('coding' in filter_prop) self.assertEqual(filter_prop['coding'], 'H5_SZIP_EC_OPTION_MASK') self.assertTrue('layout' in creationProps) # should see chunks returned, even though it was specified in creation layout = creationProps['layout'] self.assertTrue('class' in layout) self.assertEqual(layout['class'], 'H5D_CHUNKED') self.assertTrue('dims' in layout) def testDelete(self): domain = 'tall_dset112_deleted.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) g1_uuid = helper.getUUID(domain, root_uuid, 'g1') self.assertTrue(helper.validateId(g1_uuid)) g11_uuid = helper.getUUID(domain, g1_uuid, 'g1.1') self.assertTrue(helper.validateId(g11_uuid)) d112_uuid = helper.getUUID(domain, g11_uuid, 'dset1.1.2') self.assertTrue(helper.validateId(d112_uuid)) req = self.endpoint + "/datasets/" + d112_uuid headers = {'host': domain} rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) # verify that a GET on the dataset fails req = helper.getEndpoint() + "/datasets/" + d112_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 410) def testDeleteRootChild(self): # test delete with a dset that is child of root domain = 'scalar_1d_deleted.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, '1d') self.assertTrue(helper.validateId(dset_uuid)) req = self.endpoint + "/datasets/" + dset_uuid headers = {'host': domain} # verify that a GET on the dataset succeeds req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # now delete the dataset rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) # verify that a GET on the dataset fails req = helper.getEndpoint() + "/datasets/" + dset_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 410) def testDeleteAnonymous(self): # test delete works with anonymous dataset domain = 'tall_dset22_deleted.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) g2_uuid = helper.getUUID(domain, root_uuid, 'g2') self.assertTrue(helper.validateId(g2_uuid)) d22_uuid = helper.getUUID(domain, g2_uuid, 'dset2.2') self.assertTrue(helper.validateId(d22_uuid)) # delete g2, that will make dataset anonymous req = self.endpoint + "/groups/" + g2_uuid headers = {'host': domain} rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) # verify that a GET on the dataset succeeds still req = helper.getEndpoint() + "/datasets/" + d22_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # delete dataset... req = self.endpoint + "/datasets/" + d22_uuid headers = {'host': domain} rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) # verify that a GET on the dataset fails req = helper.getEndpoint() + "/datasets/" + d22_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 410) def testDeleteBadUUID(self): domain = 'tall_dset112_deleted.' + config.get('domain') req = self.endpoint + "/datasets/dff53814-2906-11e4-9f76-3c15c2da029e" headers = {'host': domain} rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 404) def testGetCollection(self): for domain_name in ('tall', 'tall_ro'): domain = domain_name + '.' + config.get('domain') req = self.endpoint + "/datasets" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) datasetIds = rspJson["datasets"] self.assertEqual(len(datasetIds), 4) for uuid in datasetIds: self.assertTrue(helper.validateId(uuid)) def testGetCollectionBatch(self): domain = 'dset1k.' + config.get('domain') req = self.endpoint + "/datasets" headers = {'host': domain} params = {'Limit': 50 } uuids = set() # get ids in 20 batches of 50 links each last_uuid = None for batchno in range(20): if last_uuid: params['Marker'] = last_uuid rsp = requests.get(req, headers=headers, params=params) self.assertEqual(rsp.status_code, 200) if rsp.status_code != 200: break rspJson = json.loads(rsp.text) dsetIds = rspJson['datasets'] self.assertEqual(len(dsetIds) <= 50, True) for dsetId in dsetIds: uuids.add(dsetId) last_uuid = dsetId if len(dsetIds) == 0: break self.assertEqual(len(uuids), 1000) # should get 1000 unique uuid's if __name__ == '__main__': unittest.main() ================================================ FILE: test/integ/datasettypetest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import requests import config import helper import unittest import json class DatasetTypeTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(DatasetTypeTest, self).__init__(*args, **kwargs) self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port')) def testGet(self): domain = 'tall.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) g2_uuid = helper.getUUID(domain, root_uuid, 'g2') dset21_uuid = helper.getUUID(domain, g2_uuid, 'dset2.1') req = helper.getEndpoint() + "/datasets/" + dset21_uuid + '/type' headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) typeItem = rspJson['type'] self.assertEqual(typeItem['base'], 'H5T_IEEE_F32BE') self.assertEqual(typeItem['class'], 'H5T_FLOAT') def testGetScalar(self): domain = 'scalar.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, '0d') req = helper.getEndpoint() + "/datasets/" + dset_uuid + '/type' headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) typeItem = rspJson['type'] self.assertEqual(typeItem['base'], 'H5T_STD_I32LE') self.assertEqual(typeItem['class'], 'H5T_INTEGER') def testGetCompound(self): domain = 'compound.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'dset') req = helper.getEndpoint() + "/datasets/" + dset_uuid + '/type' headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_COMPOUND') self.assertTrue('fields' in typeItem) fields = typeItem['fields'] self.assertEqual(len(fields), 5) timeField = fields[1] self.assertEqual(timeField['name'], 'time') self.assertTrue('type' in timeField) timeFieldType = timeField['type'] self.assertEqual(timeFieldType['class'], 'H5T_STRING') self.assertEqual(timeFieldType['charSet'], 'H5T_CSET_ASCII') self.assertEqual(timeFieldType['length'], 6) self.assertEqual(timeFieldType['strPad'], 'H5T_STR_NULLPAD') if __name__ == '__main__': unittest.main() ================================================ FILE: test/integ/datatypetest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import requests import config import helper import unittest import json class DatatypeTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(DatatypeTest, self).__init__(*args, **kwargs) self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port')) def testGet(self): domain = 'namedtype.' + config.get('domain') root_uuid = helper.getRootUUID(domain) dtype_uuid = helper.getUUID(domain, root_uuid, 'dtype_simple') self.assertTrue(helper.validateId(dtype_uuid)) req = helper.getEndpoint() + "/datatypes/" + dtype_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertEqual(rspJson['id'], dtype_uuid) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_FLOAT') self.assertEqual(typeItem['base'], 'H5T_IEEE_F32LE') self.assertEqual(rspJson['attributeCount'], 1) def testGetCompound(self): domain = 'namedtype.' + config.get('domain') root_uuid = helper.getRootUUID(domain) dtype_uuid = helper.getUUID(domain, root_uuid, 'dtype_compound') req = helper.getEndpoint() + "/datatypes/" + dtype_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) typeItem = rspJson['type'] self.assertEqual(typeItem['class'], 'H5T_COMPOUND') self.assertTrue('fields' in typeItem) fields = typeItem['fields'] self.assertEqual(len(fields), 2) tempField = fields[0] self.assertEqual(tempField['name'], 'temp') tempFieldType = tempField['type'] self.assertEqual(tempFieldType['class'], 'H5T_INTEGER') self.assertEqual(tempFieldType['base'], 'H5T_STD_I32LE') pressureField = fields[1] self.assertEqual(pressureField['name'], 'pressure') pressureFieldType = pressureField['type'] self.assertEqual(pressureFieldType['class'], 'H5T_FLOAT') self.assertEqual(pressureFieldType['base'], 'H5T_IEEE_F32LE') def testPost(self): domain = 'newdtype.datatypetest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain payload = {'type': 'H5T_IEEE_F32LE'} req = self.endpoint + "/datatypes" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create datatype rspJson = json.loads(rsp.text) dtype_uuid = rspJson['id'] self.assertTrue(helper.validateId(dtype_uuid)) # link new dataset as 'dtype1' root_uuid = helper.getRootUUID(domain) name = 'dtype1' req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {'id': dtype_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) def testPostWithLink(self): # test PUT_root domain = 'newlinkedtype.datatypetest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) root_uuid = helper.getRootUUID(domain) payload = { 'type': 'H5T_IEEE_F64LE', 'link': {'id': root_uuid, 'name': 'linked_dtype'} } req = self.endpoint + "/datatypes" headers = {'host': domain} # create a new group rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) rspJson = json.loads(rsp.text) self.assertEqual(rspJson["attributeCount"], 0) self.assertTrue(helper.validateId(rspJson["id"]) ) def testPostTypes(self): domain = 'datatypes.datatypetest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain root_uuid = helper.getRootUUID(domain) # list of types supported datatypes = ( 'H5T_STD_I8LE', 'H5T_STD_U8LE', 'H5T_STD_I16LE', 'H5T_STD_U16LE', 'H5T_STD_I32LE', 'H5T_STD_U32LE', 'H5T_STD_I64LE', 'H5T_STD_U64LE', 'H5T_IEEE_F32LE', 'H5T_IEEE_F64LE' ) #todo: check on 'vlen_bytes', 'vlen_unicode' for datatype in datatypes: payload = {'type': datatype} req = self.endpoint + "/datatypes" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create datatypes rspJson = json.loads(rsp.text) dtype_uuid = rspJson['id'] self.assertTrue(helper.validateId(dtype_uuid)) # link new datatype using the type name name = datatype req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dtype_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) def testPostCompoundType(self): domain = 'compound.datatypetest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain root_uuid = helper.getRootUUID(domain) fields = ({'name': 'temp', 'type': 'H5T_STD_I32LE'}, {'name': 'pressure', 'type': 'H5T_IEEE_F32LE'}) datatype = {'class': 'H5T_COMPOUND', 'fields': fields } payload = {'type': datatype} req = self.endpoint + "/datatypes" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create datatype rspJson = json.loads(rsp.text) dtype_uuid = rspJson['id'] self.assertTrue(helper.validateId(dtype_uuid)) # link the new datatype name = "dtype_compound" req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dtype_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) """ This test fails due to h5py issue #540: https://github.com/h5py/h5py/issues/540 Commenting out for now. def testPostVLenStringType(self): domain = 'vlenstr.datatypetest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain root_uuid = helper.getRootUUID(domain) data_type = { 'charSet': 'H5T_CSET_ASCII', 'class': 'H5T_STRING', 'strPad': 'H5T_STR_NULLPAD', 'length': 'H5T_VARIABLE'} payload = {'type': data_type} req = self.endpoint + "/datatypes" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create datatype rspJson = json.loads(rsp.text) dtype_uuid = rspJson['id'] self.assertTrue(helper.validateId(dtype_uuid)) # link the new datatype name = "dtype_vlenstr" req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dtype_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) """ def testPostInvalidType(self): domain = 'tall.' + config.get('domain') root_uuid = helper.getRootUUID(domain) payload = {'type': 'badtype'} headers = {'host': domain} req = self.endpoint + "/datatypes" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) def testDelete(self): domain = 'namedtype_deleted.' + config.get('domain') root_uuid = helper.getRootUUID(domain) dtype_uuid = helper.getUUID(domain, root_uuid, 'dtype_simple') self.assertTrue(helper.validateId(dtype_uuid)) req = helper.getEndpoint() + "/datatypes/" + dtype_uuid headers = {'host': domain} rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) # verify that it's gone req = helper.getEndpoint() + "/datatypes/" + dtype_uuid headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 410) def testGetCollection(self): domain = 'namedtype.' + config.get('domain') req = self.endpoint + "/datatypes" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) datatypeIds = rspJson["datatypes"] self.assertEqual(len(datatypeIds), 2) for uuid in datatypeIds: self.assertTrue(helper.validateId(uuid)) def testGetCollectionBatch(self): domain = 'type1k.' + config.get('domain') req = self.endpoint + "/datatypes" headers = {'host': domain} params = {'Limit': 50 } uuids = set() # get ids in 20 batches of 50 links each last_uuid = None for batchno in range(20): if last_uuid: params['Marker'] = last_uuid rsp = requests.get(req, headers=headers, params=params) self.assertEqual(rsp.status_code, 200) if rsp.status_code != 200: break rspJson = json.loads(rsp.text) typeIds = rspJson["datatypes"] self.assertEqual(len(typeIds) <= 50, True) for typeId in typeIds: uuids.add(typeId) last_uuid = typeId if len(typeIds) == 0: break self.assertEqual(len(uuids), 1000) # should get 1000 unique uuid's if __name__ == '__main__': unittest.main() ================================================ FILE: test/integ/dirtest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import requests import config import helper import unittest import json import os import time from shutil import copyfile from tornado.escape import url_escape class DirTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(DirTest, self).__init__(*args, **kwargs) self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port')) self.user1 = {'username':'test_user1', 'password':'test'} def testGetToc(self): domain = config.get('domain') if domain.startswith('test.'): domain = domain[5:] req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['content-type'], 'application/json') rspJson = json.loads(rsp.text) self.assertTrue('root' in rspJson) root_uuid = rspJson['root'] req = self.endpoint + "/groups/" + root_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # get top-level links req = self.endpoint + "/groups/" + root_uuid + "/links" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("links" in rspJson) links = rspJson["links"] home_dir = config.get("home_dir") for item in links: if item['title'] == home_dir: self.assertTrue(False) # should not see home dir from root toc # get group uuid that maps to "test" sub-directory req = self.endpoint + "/groups/" + root_uuid + "/links/test" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("link" in rspJson) link = rspJson['link'] group_uuid = link['id'] # verify we see "tall" under links name = "tall" req = self.endpoint + "/groups/" + group_uuid + "/links/" + name rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("link" in rspJson) link = rspJson['link'] self.assertEqual(link['class'], 'H5L_TYPE_EXTERNAL') self.assertEqual(link['title'], name) self.assertEqual(link['h5path'], '/') self.assertEqual(link['h5domain'], name + '.test.' + domain) # verify that "filename with space" shows up properly url encoded name = "filename with space" name_escaped = url_escape(name) req = self.endpoint + "/groups/" + group_uuid + "/links/" + name rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("link" in rspJson) link = rspJson['link'] self.assertEqual(link['class'], 'H5L_TYPE_EXTERNAL') self.assertEqual(link['title'], name) self.assertEqual(link['h5path'], '/') self.assertEqual(link['h5domain'], name_escaped + '.test.' + domain) # get all the links in the test group req = self.endpoint + "/groups/" + group_uuid + "/links" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("links" in rspJson) links = rspJson["links"] tall_link = None # normal link file_space_link = None # link that contains a space file_dot_link = None # link that contains a dot for link in links: self.assertTrue("title" in link) self.assertTrue("class" in link) if link['title'] == "tall": tall_link = link elif link['title'] == "filename with space": file_space_link = link elif link['title'] == "tall.dots.need.to.be.encoded": file_dot_link = link self.assertTrue(tall_link is not None) name = "tall" link = tall_link self.assertEqual(link['class'], 'H5L_TYPE_EXTERNAL') self.assertEqual(link['title'], name) self.assertEqual(link['h5path'], '/') self.assertEqual(link['h5domain'], name + '.test.' + domain) href = "groups/" + group_uuid + "/links/" + name self.assertTrue(link['href'].endswith(href)) self.assertTrue(file_space_link is not None) name = "filename with space" link = file_space_link self.assertEqual(link['class'], 'H5L_TYPE_EXTERNAL') self.assertEqual(link['title'], name) self.assertEqual(link['h5path'], '/') self.assertEqual(link['h5domain'], url_escape(name) + '.test.' + domain) href = "groups/" + group_uuid + "/links/" + url_escape(name) self.assertTrue(link['href'].endswith(href)) self.assertTrue(file_dot_link is not None) name = "tall.dots.need.to.be.encoded" name_encoded = name.replace('.', '%2E') link = file_dot_link self.assertEqual(link['class'], 'H5L_TYPE_EXTERNAL') self.assertEqual(link['title'], name) self.assertEqual(link['h5path'], '/') self.assertEqual(link['h5domain'], name_encoded + '.test.' + domain) href = "groups/" + group_uuid + "/links/" + name self.assertTrue(link['href'].endswith(href)) def testGetUserToc(self): domain = config.get('domain') if domain.startswith('test.'): domain = domain[5:] # backup over the test part home_dir = config.get("home_dir") user_domain = self.user1['username'] + '.' + home_dir + '.' + domain req = self.endpoint + "/" headers = {'host': user_domain} # this should get the users .toc file rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['content-type'], 'application/json') rspJson = json.loads(rsp.text) self.assertTrue('root' in rspJson) root_uuid = rspJson['root'] req = self.endpoint + "/groups/" + root_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) if os.name == 'nt': return # symbolic links used below are not supported on Windows # get link to 'public' folder req = self.endpoint + "/groups/" + root_uuid + "/links/public" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("link" in rspJson) link_json = rspJson["link"] self.assertEqual(link_json["class"], "H5L_TYPE_EXTERNAL") self.assertEqual(link_json["title"], "public") self.assertEqual(link_json["h5domain"], domain) self.assertEqual(link_json["h5path"], "/public") # get link to 'tall' file req = self.endpoint + "/groups/" + root_uuid + "/links/tall" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("link" in rspJson) link_json = rspJson["link"] self.assertEqual(link_json["class"], "H5L_TYPE_EXTERNAL") self.assertEqual(link_json["title"], "tall") self.assertEqual(link_json["h5domain"], "tall." + user_domain) def testPutUserDomain(self): domain = config.get('domain') home_dir = config.get("home_dir") if domain.startswith('test.'): domain = domain[5:] # backup over the test part user_domain = self.user1['username'] + '.' + home_dir + '.' + domain # this should get the users .toc file headers = {'host': user_domain } req = self.endpoint + '/' rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('root' in rspJson) toc_root_uuid = rspJson['root'] req = self.endpoint + "/groups/" + toc_root_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # verify that "myfile" doesn't exist yet user_file = "myfile." + user_domain req = self.endpoint + "/" headers = {'host': user_file} #verify that the domain doesn't exist yet rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 404) # do a put on "myfile" rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # now the domain should exist rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # go back to users toc and get "/myfile" link headers = {'host': user_domain } req = self.endpoint + "/groups/" + toc_root_uuid + "/links/myfile" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) link = rspJson['link'] self.assertTrue('class' in link) self.assertEqual(link['class'], "H5L_TYPE_EXTERNAL") self.assertTrue('h5path' in link) self.assertEqual(link['h5path'], "/") self.assertTrue('h5domain' in link) self.assertEqual(link['h5domain'], "myfile." + user_domain) def testDeleteUserDomain(self): domain = config.get('domain') home_dir = config.get("home_dir") if domain.startswith('test.'): domain = domain[5:] # backup over the test part user_domain = self.user1['username'] + '.' + home_dir + '.' + domain # this should get the users .toc file headers = {'host': user_domain } req = self.endpoint + '/' rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('root' in rspJson) toc_root_uuid = rspJson['root'] req = self.endpoint + "/groups/" + toc_root_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # "tall_deleteme" should be a link req = req + "/link/tall_deleteme" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # And we should be able to query directly user_file = "tall_deleteme." + user_domain req = self.endpoint + "/" headers = {'host': user_file} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) # Delete "tall_deleteme" user_file = "tall_deleteme." + user_domain req = self.endpoint + "/" headers = {'host': user_file} rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) # link in user TOC should be removed req = self.endpoint + "/groups/" + toc_root_uuid + "/link/tall_deleteme" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 404) def testNoHostHeader(self): req = self.endpoint + "/" rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['content-type'], 'application/json') rspJson = json.loads(rsp.text) self.assertTrue('root' in rspJson) def testPutDomain(self): domain_name = "dirtest_putdomain" # get toc root uuid req = self.endpoint + "/" rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('root' in rspJson) toc_root_uuid = rspJson['root'] # get toc 'test' group uuid req = self.endpoint + "/groups/" + toc_root_uuid rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) req = self.endpoint + "/groups/" + toc_root_uuid + "/links/test" rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("link" in rspJson) link = rspJson['link'] test_group_uuid = link['id'] # verify that the domain name is not present req = self.endpoint + "/groups/" + test_group_uuid + "/links/" + domain_name rsp = requests.get(req) self.assertTrue(rsp.status_code in (404, 410)) # create a new domain domain = domain_name + '.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) rspJson = json.loads(rsp.text) # external link should exist now req = self.endpoint + "/groups/" + test_group_uuid + "/links/" + domain_name rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) # delete the domain req = self.endpoint + "/" headers = {'host': domain} rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) # external link should be gone req = self.endpoint + "/groups/" + test_group_uuid + "/links/" + domain_name rsp = requests.get(req) self.assertEqual(rsp.status_code, 410) def testWatchdog(self): domain_name = "dirtest_watchdogadd" # get toc root uuid req = self.endpoint + "/" rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('root' in rspJson) toc_root_uuid = rspJson['root'] # get toc 'test' group uuid req = self.endpoint + "/groups/" + toc_root_uuid rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) req = self.endpoint + "/groups/" + toc_root_uuid + "/links/test" rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("link" in rspJson) link = rspJson['link'] test_group_uuid = link['id'] # verify that the domain name is not present req = self.endpoint + "/groups/" + test_group_uuid + "/links/" + domain_name rsp = requests.get(req) self.assertTrue(rsp.status_code in (404, 410)) # copy file to target domain src_file = "../test_files/tall.h5" des_file = "../../data/test/" + domain_name + ".h5" copyfile(src_file, des_file) # sleep to give the watchdog time to update the toc time.sleep(2) # external link should exist now req = self.endpoint + "/groups/" + test_group_uuid + "/links/" + domain_name rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) # delete the file os.remove(des_file) # sleep to give the watchdog time to update the toc time.sleep(2) # external link should be gone req = self.endpoint + "/groups/" + test_group_uuid + "/links/" + domain_name rsp = requests.get(req) self.assertEqual(rsp.status_code, 410) def testDeleteToc(self): #test DELETE toc req = self.endpoint + "/" rsp = requests.delete(req) self.assertEqual(rsp.status_code, 403) def testPutToc(self): # test PUT toc req = self.endpoint + "/" rsp = requests.put(req) # status code be Forbiden or Conflict based on TOC file # existing or not self.assertTrue(rsp.status_code in (403, 409)) def testDeleteRoot(self): req = self.endpoint + "/" rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('root' in rspJson) root_uuid = rspJson['root'] req = self.endpoint + "/groups/" + root_uuid rsp = requests.delete(req) self.assertEqual(rsp.status_code, 403) def testPutLink(self): req = self.endpoint + "/" rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('root' in rspJson) root_uuid = rspJson['root'] name = 'dirtest.testPutLink' req = helper.getEndpoint() + "/groups/" + root_uuid + "/links/" + name payload = {"h5path": "somewhere"} # verify softlink does not exist rsp = requests.get(req, data=json.dumps(payload)) self.assertEqual(rsp.status_code, 404) # make request rsp = requests.put(req, data=json.dumps(payload)) self.assertEqual(rsp.status_code, 403) def testDeleteLink(self): req = self.endpoint + "/" rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('root' in rspJson) root_uuid = rspJson['root'] req = self.endpoint + "/groups/" + root_uuid + "/links/test" rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) rsp = requests.delete(req) # try to delete the link self.assertEqual(rsp.status_code, 403) if __name__ == '__main__': unittest.main() ================================================ FILE: test/integ/grouptest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import requests import config import helper import unittest import json class GroupTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(GroupTest, self).__init__(*args, **kwargs) self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port')) def testGet(self): for domain_name in ('tall', 'tall_ro'): domain = domain_name + '.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) rootUUID = rspJson["root"] self.assertTrue(helper.validateId(rootUUID)) req = self.endpoint + "/groups/" + rootUUID rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertEqual(rsp.status_code, 200) self.assertEqual(rspJson["linkCount"], 2) self.assertEqual(rspJson["attributeCount"], 2) self.assertFalse("links" in rspJson) def testGetInvalidUUID(self): for domain_name in ('tall', 'tall_ro'): domain = domain_name + '.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) import uuid bad_uuid = str(uuid.uuid1()) req = self.endpoint + "/groups/" + bad_uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 404) def testGetWithHostQuery(self): for domain_name in ('tall',): domain = domain_name + '.' + config.get('domain') req = self.endpoint + "/?host=" + domain rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) rootUUID = rspJson["root"] self.assertTrue(helper.validateId(rootUUID)) req = self.endpoint + "/groups/" + rootUUID + "?host=" + domain rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertEqual(rspJson["linkCount"], 2) self.assertEqual(rspJson["attributeCount"], 2) self.assertEqual(rsp.status_code, 200) def testGetWithLinks(self): for domain_name in ('tall',): domain = domain_name + '.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) rootUUID = rspJson["root"] self.assertTrue(helper.validateId(rootUUID)) req = self.endpoint + "/groups/" + rootUUID params = {'include_links': True } rsp = requests.get(req, params=params, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertEqual(rsp.status_code, 200) self.assertEqual(rspJson["linkCount"], 2) self.assertEqual(rspJson["attributeCount"], 2) self.assertTrue("links" in rspJson) links = rspJson["links"] self.assertEqual(len(links), 2) for link in links: self.assertTrue("collection" in link) self.assertTrue(link["collection"], 'groups') self.assertTrue("class" in link) self.assertEqual(link["class"], 'H5L_TYPE_HARD') self.assertTrue("id" in link) self.assertTrue("title" in link) self.assertTrue("href" in link) def testPost(self): # test PUT_root domain = 'testGroupPost.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) req = self.endpoint + "/groups" headers = {'host': domain} # create a new group rsp = requests.post(req, headers=headers) self.assertEqual(rsp.status_code, 201) rspJson = json.loads(rsp.text) self.assertEqual(rspJson["linkCount"], 0) self.assertEqual(rspJson["attributeCount"], 0) self.assertTrue(helper.validateId(rspJson["id"]) ) def testPostWithLink(self): # test PUT_root domain = 'testGroupPostWithLink.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) root_uuid = helper.getRootUUID(domain) payload = { 'link': {'id': root_uuid, 'name': 'linked_dset'} } req = self.endpoint + "/groups" headers = {'host': domain} # create a new group rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) rspJson = json.loads(rsp.text) self.assertEqual(rspJson["linkCount"], 0) self.assertEqual(rspJson["attributeCount"], 0) self.assertTrue(helper.validateId(rspJson["id"]) ) # try repeat post with same link - should return 409 rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 409) def testBadPost(self): domain = 'tall.' + config.get('domain') req = self.endpoint + "/groups/dff53814-2906-11e4-9f76-3c15c2da029e" headers = {'host': domain} rsp = requests.post(req, headers=headers) # post is not allowed to provide uri, so should fail self.assertEqual(rsp.status_code, 405) def testDelete(self): domain = 'tall_g2_deleted.' + config.get('domain') rootUUID = helper.getRootUUID(domain) helper.validateId(rootUUID) g2UUID = helper.getUUID(domain, rootUUID, 'g2') self.assertTrue(helper.validateId(g2UUID)) req = self.endpoint + "/groups/" + g2UUID headers = {'host': domain} rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("hrefs" in rspJson) # do a GET, should return 410 (GONE) req = self.endpoint + "/groups/" + g2UUID rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 410) def testDeleteAnonymous(self): # Test deleting anonymous (not linked) group domain = 'testGroupDelete.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) req = self.endpoint + "/groups" headers = {'host': domain} # create a new group rsp = requests.post(req, headers=headers) self.assertEqual(rsp.status_code, 201) rspJson = json.loads(rsp.text) uuid = rspJson["id"] self.assertTrue(helper.validateId(uuid)) req = self.endpoint + "/groups/" + uuid headers = {'host': domain} rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) # do a GET, should return 410 (GONE) req = self.endpoint + "/groups/" + uuid rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 410) def testDeleteBadUUID(self): domain = 'tall_g2_deleted.' + config.get('domain') req = self.endpoint + "/groups/dff53814-2906-11e4-9f76-3c15c2da029e" headers = {'host': domain} rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 404) def testDeleteRoot(self): domain = 'tall.' + config.get('domain') headers = {'host': domain} req = self.endpoint + "/" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) rootUUID = rspJson["root"] req = self.endpoint + "/groups/" + rootUUID rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 403) def testGetCollection(self): for domain_name in ('tall', 'tall_ro'): domain = domain_name + '.' + config.get('domain') req = self.endpoint + "/groups" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) groupIds = rspJson["groups"] self.assertEqual(len(groupIds), 5) for uuid in groupIds: self.assertTrue(helper.validateId(uuid)) def testGetCollectionBatch(self): domain = 'group1k.' + config.get('domain') req = self.endpoint + "/groups" headers = {'host': domain} params = {'Limit': 50 } uuids = set() # get ids in 20 batches of 50 links each last_uuid = None for batchno in range(20): if last_uuid: params['Marker'] = last_uuid rsp = requests.get(req, headers=headers, params=params) self.assertEqual(rsp.status_code, 200) if rsp.status_code != 200: break rspJson = json.loads(rsp.text) groupIds = rspJson['groups'] self.assertEqual(len(groupIds) <= 50, True) for groupId in groupIds: uuids.add(groupId) last_uuid = groupId if len(groupIds) == 0: break self.assertEqual(len(uuids), 1000) # should get 1000 unique uuid's if __name__ == '__main__': unittest.main() ================================================ FILE: test/integ/helper.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import six if six.PY3: unicode = str import requests import config import unittest import json import base64 """ Helper function - get endpoint we'll send http requests to """ def getEndpoint(): endpoint = 'http://' + config.get('server') + ':' + str(config.get('port')) return endpoint """ Helper function - return true if the parameter looks like a UUID """ def validateId(id): if type(id) != str and type(id) != unicode: # should be a string return False if len(id) != 36: # id's returned by uuid.uuid1() are always 36 chars long return False return True """ Helper function - get auth string """ def getAuthString(user, password): auth_string = user + ':' + password auth_string = auth_string.encode('utf-8') auth_string = base64.b64encode(auth_string) auth_string = b"Basic " + auth_string return auth_string """ Helper function - get root uuid """ def getRootUUID(domain, user=None, password=None): req = getEndpoint() + "/" headers = {'host': domain} if user is not None: # if user is supplied, add the auth header headers['Authorization'] = getAuthString(user, password) rsp = requests.get(req, headers=headers) rootUUID = None if rsp.status_code == 200: rspJson = json.loads(rsp.text) rootUUID = rspJson["root"] return rootUUID """ Helper function - get uuid given parent group uuid and link name """ def getUUID(domain, parentUuid, name): if type(name) != str or len(name) == 0: return None req = getEndpoint() + "/groups/" + parentUuid + "/links/" + name headers = {'host': domain} rsp = requests.get(req, headers=headers) tgtUuid = None if rsp.status_code == 200: rspJson = json.loads(rsp.text) target = rspJson['link'] if target['class'] != 'H5L_TYPE_HARD': # soft/external links return None tgtUuid = target['id'] return tgtUuid """ Helper function - get uuid for a given path """ def getUUIDByPath(domain, path, user=None, password=None): if path[0] != '/': raise KeyError("only abs paths") # only abs paths parent_uuid = getRootUUID(domain, user=user, password=password) if path == '/': return parent_uuid headers = {'host': domain} if user is not None: # if user is supplied, add the auth header headers['Authorization'] = getAuthString(user, password) # make a fake tgt_json to represent 'link' to root group tgt_json = {'collection': "groups", 'class': "H5L_TYPE_HARD", 'id': parent_uuid } tgt_uuid = None names = path.split('/') for name in names: if not name: continue if parent_uuid is None: raise KeyError("not found") req = getEndpoint() + "/groups/" + parent_uuid + "/links/" + name rsp = requests.get(req, headers=headers) if rsp.status_code != 200: raise KeyError("not found") rsp_json = json.loads(rsp.text) tgt_json = rsp_json['link'] if tgt_json['class'] == 'H5L_TYPE_HARD': #print "hard link, collection:", link_json['collection'] if tgt_json['collection'] == 'groups': parent_uuid = tgt_json['id'] else: parent_uuid = None tgt_uuid = tgt_json['id'] else: raise KeyError("non-hard link") return tgt_uuid """ Helper function - create an anonymous group """ def createGroup(domain): # test PUT_root req = getEndpoint() + "/groups" headers = {'host': domain} # create a new group rsp = requests.post(req, headers=headers) if rsp.status_code != 201: return None rspJson = json.loads(rsp.text) id = rspJson["id"] return id """ Helper function - link given object/name """ def linkObject(domain, objUuid, name, parentUuid=None): if parentUuid == None: # use root as parent if not specified parentUuid = getRootUUID(domain) req = getEndpoint() + "/groups/" + parentUuid + "/links/" + name payload = {"id": objUuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) if rsp.status_code == 201: return True else: return False """ Helper function - return data from dataset """ def readDataset(domain, dsetUuid): req = getEndpoint() + "/datasets/" + dsetUuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) if rsp.status_code != 200: return None rspJson = json.loads(rsp.text) data = rspJson['value'] return data """ Helper function - convert name to url-friendly format Replaces all non-alphanumeric characters with '%' """ def nameEncode(name): out = [] for ch in name: if ch.isalnum(): out.append(ch) elif ch == ' ': out.append('+') else: hex = format(ord(ch), '02X') out.append('%' + hex) return ''.join(out) ================================================ FILE: test/integ/linktest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import requests import config import unittest import helper import json import logging class LinkTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(LinkTest, self).__init__(*args, **kwargs) self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port')) def testGetHard(self): logging.info("LinkTest.testGetHard") for domain_name in ('tall', 'tall_ro'): g1_uuid = None domain = domain_name + '.' + config.get('domain') root_uuid = helper.getRootUUID(domain) req = self.endpoint + "/groups/" + root_uuid + "/links/g1" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("created" in rspJson) self.assertTrue("lastModified" in rspJson) self.assertTrue('link' in rspJson) target = rspJson['link'] self.assertTrue(helper.validateId(target['id'])) self.assertEqual(target['class'], 'H5L_TYPE_HARD') self.assertEqual(target['title'], 'g1') self.assertEqual(target['collection'], 'groups') def testGetMising(self): logging.info("LinkTest.testGetMissing") for domain_name in ('tall', 'tall_ro'): g1_uuid = None domain = domain_name + '.' + config.get('domain') root_uuid = helper.getRootUUID(domain) req = self.endpoint + "/groups/" + root_uuid + "/links/not_a_link" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 404) def testGetSoft(self): logging.info("LinkTest.testGetSoft") for domain_name in ('tall', 'tall_ro'): g1_uuid = None domain = domain_name + '.' + config.get('domain') root_uuid = helper.getRootUUID(domain) g1_uuid = helper.getUUID(domain, root_uuid, 'g1') g12_uuid = helper.getUUID(domain, g1_uuid, 'g1.2') g121_uuid = helper.getUUID(domain, g12_uuid, 'g1.2.1') req = self.endpoint + "/groups/" + g121_uuid + "/links/slink" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("created" in rspJson) self.assertTrue("lastModified" in rspJson) target = rspJson['link'] self.assertEqual(target['h5path'], 'somevalue') self.assertEqual(target['class'], 'H5L_TYPE_SOFT') self.assertEqual(target['title'], 'slink') self.assertTrue('collection' not in target) def testGetExternal(self): logging.info("LinkTest.testGetExternal") for domain_name in ('tall', 'tall_ro'): g1_uuid = None domain = domain_name + '.' + config.get('domain') root_uuid = helper.getRootUUID(domain) g1_uuid = helper.getUUID(domain, root_uuid, 'g1') g12_uuid = helper.getUUID(domain, g1_uuid, 'g1.2') req = self.endpoint + "/groups/" + g12_uuid + "/links/extlink" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("created" in rspJson) self.assertTrue("lastModified" in rspJson) target = rspJson['link'] # self.assertEqual(target, "http://somefile/#h5path(somepath)") expected_h5domain = 'somefile' + '.' + config.get('domain') self.assertEqual(target['class'], 'H5L_TYPE_EXTERNAL') self.assertEqual(target['h5domain'], expected_h5domain) self.assertEqual(target['h5path'], 'somepath') self.assertEqual(target['title'], 'extlink') self.assertTrue('collection' not in target) def testGetExternalLinkDomain(self): logging.info("LinkTest.testExternalLinkDomain") domain = "link_example." + config.get('domain') root_uuid = helper.getRootUUID(domain) headers = {'host': domain} # test file has seven external links in the root group that should all # map to the same external file in either the same directory or a # a subdirectory "subdir" expected_curdir = "tall." + config.get('domain') expected_subdir = "tall.subdir." + config.get('domain') expected_h5path = "g1/g1.1" for i in range(7): external_link_name = "external_link" + str(i+1) req = self.endpoint + "/groups/" + root_uuid + "/links/" + external_link_name rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("created" in rspJson) self.assertTrue("lastModified" in rspJson) self.assertTrue("link" in rspJson) target = rspJson['link'] self.assertTrue("h5path" in target) self.assertEqual(target["h5path"], expected_h5path) self.assertTrue("h5domain" in target) h5domain = target["h5domain"] if i < 4: # these links map to a file in the same directory self.assertEqual(h5domain, expected_curdir) else: # these map to a file in "subdir" self.assertEqual(h5domain, expected_subdir) # get all the links in one request and very the external filename req = self.endpoint + "/groups/" + root_uuid + "/links" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("links" in rspJson) links = rspJson["links"] external_link_count = 0 for link in links: if link["class"] != 'H5L_TYPE_EXTERNAL': continue self.assertTrue("title" in link) title = link["title"] if not title.startswith("external_link"): continue external_link_count += 1 link_no = int(title[-1]) self.assertTrue("h5path" in link) self.assertEqual(link["h5path"], expected_h5path) self.assertTrue("h5domain" in link) if link_no < 5: self.assertEqual(link["h5domain"], expected_curdir) else: self.assertEqual(link["h5domain"], expected_subdir) def testGetUDLink(self): logging.info("LinkTest.testGetUDLink") domain_name = 'tall_with_udlink' domain = domain_name + '.' + config.get('domain') root_uuid = helper.getRootUUID(domain) g2_uuid = helper.getUUID(domain, root_uuid, 'g2') req = self.endpoint + "/groups/" + g2_uuid + "/links/udlink" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("created" in rspJson) self.assertTrue("lastModified" in rspJson) target = rspJson['link'] self.assertEqual(target['class'], 'H5L_TYPE_USER_DEFINED') self.assertEqual(target['title'], 'udlink') def testGetLinks(self): logging.info("LinkTest.testGetLinks") for domain_name in ('tall', 'tall_ro'): g1_uuid = None domain = domain_name + '.' + config.get('domain') root_uuid = helper.getRootUUID(domain) g1_uuid = helper.getUUID(domain, root_uuid, 'g1') g12_uuid = helper.getUUID(domain, g1_uuid, 'g1.2') req = self.endpoint + "/groups/" + g12_uuid + "/links" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("links" in rspJson) links = rspJson["links"] self.assertEqual(len(links), 2) for link in links: self.assertTrue("title" in link) self.assertTrue("class" in link) def testGetBatch(self): logging.info("LinkTest.testGetBatch") domain = 'group1k.' + config.get('domain') root_uuid = helper.getRootUUID(domain) req = helper.getEndpoint() + "/groups/" + root_uuid + "/links" headers = {'host': domain} params = {'Limit': 50 } names = set() # get links in 20 batches of 50 links each lastName = None for batchno in range(20): if lastName: params['Marker'] = lastName rsp = requests.get(req, headers=headers, params=params) self.assertEqual(rsp.status_code, 200) if rsp.status_code != 200: break rspJson = json.loads(rsp.text) links = rspJson['links'] self.assertEqual(len(links) <= 50, True) for link in links: lastName = link['title'] names.add(lastName) if len(links) == 0: break self.assertEqual(len(names), 1000) # should get 1000 unique links #Fix - This needs to be made more efficient - when deleting links, the code now # searches all objects to see if the linked target needs to be made anonymous or not. # idea: keep back pointers for all links? # Tracked as Issue #12 in Github """ def testMoveLinks(self): logging.info("LinkTest.testMoveLinks") domain = 'group1k_updated.' + config.get('domain') root_uuid = helper.getRootUUID(domain) # create a new subgroup to move others to targetGroupId = helper.createGroup(domain) req = helper.getEndpoint() + "/groups/" + root_uuid + "/links" headers = {'host': domain} params = {'Limit': 100 } names = set() # get links in batches of 100 links each count = 0 while True: print 'count:', count rsp = requests.get(req, headers=headers, params=params) self.assertEqual(rsp.status_code, 200) if rsp.status_code != 200: break rspJson = json.loads(rsp.text) links = rspJson['links'] if len(links) == 0: break count += len(links) for link in links: # delete link del_req = helper.getEndpoint() + "/groups/" + root_uuid + "/links/" + link['title'] rsp = requests.delete(del_req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(count, 1000) # should get 1000 unique links """ def testGetBadParam(self): logging.info("LinkTest.testGetBatchBadParam") domain = 'tall.' + config.get('domain') root_uuid = helper.getRootUUID(domain) req = helper.getEndpoint() + "/groups/" + root_uuid + "/links" headers = {'host': domain} params = {'Limit': 'abc' } rsp = requests.get(req, headers=headers, params=params) self.assertEqual(rsp.status_code, 400) def testPut(self): logging.info("LinkTest.testPut") domain = 'tall_updated.' + config.get('domain') grpId = helper.createGroup(domain) rootId = helper.getRootUUID(domain) name = 'g3' req = helper.getEndpoint() + "/groups/" + rootId + "/links/" + name payload = {"id": grpId} headers = {'host': domain} rsp = requests.get(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 404) # link doesn't exist rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) rsp = requests.get(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # it's there now! # make a request second time (verify idempotent) rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 409) # status - conflict, already exists # now try with a different payload grpId2 = helper.createGroup(domain) payload["id"] = grpId2 rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 409) def testPutNameWithSpaces(self): logging.info("LinkTest.testPutNameWithSpaces") domain = 'tall_updated.' + config.get('domain') grpId = helper.createGroup(domain) rootId = helper.getRootUUID(domain) name = 'name with spaces' req = helper.getEndpoint() + "/groups/" + rootId + "/links/" + name payload = {"id": grpId} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # verify we can read the link back rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue("link" in rspJson) link = rspJson["link"] self.assertTrue("title" in link) self.assertEqual(link["title"], name) self.assertTrue("class" in link) self.assertEqual(link["class"], "H5L_TYPE_HARD") def testPutBadReqId(self): logging.info("LinkTest.testPutBadReqId") domain = 'tall_updated.' + config.get('domain') grpId = helper.createGroup(domain) badReqId = 'b2771194-347f-11e4-bb67-3c15c2da029e' # doesn't exist in tall.h5 name = 'g3' req = helper.getEndpoint() + "/groups/" + badReqId + "/links/" + name payload = {"id": grpId} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 404) def testPutBadLinkId(self): logging.info("LinkTest.testPutBadLinkId") domain = 'tall_updated.' + config.get('domain') grpId = helper.createGroup(domain) rootId = helper.getRootUUID(domain) badLinkId = 'b2771194-347f-11e4-bb67-3c15c2da029e' # doesn't exist in tall.h5 name = 'badid' req = helper.getEndpoint() + "/groups/" + rootId + "/links/" + name payload = {"id": badLinkId} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 404) def testPutNoName(self): logging.info("LinkTest.testPutNoName") domain = 'tall_updated.' + config.get('domain') grpId = helper.createGroup(domain) rootId = helper.getRootUUID(domain) req = helper.getEndpoint() + "/groups/" + rootId + "/links/" payload = {"id": grpId} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) def testPutBadName(self): logging.info("LinkTest.testPutBadName") domain = 'tall_updated.' + config.get('domain') grpId = helper.createGroup(domain) rootId = helper.getRootUUID(domain) name = 'bad/name' # forward slash not allowed req = helper.getEndpoint() + "/groups/" + rootId + "/links/" + name payload = {"id": grpId} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) def testPutSoftLink(self): logging.info("LinkTest.testPutSoftLink") domain = 'tall_updated.' + config.get('domain') grpId = helper.createGroup(domain) rootId = helper.getRootUUID(domain) name = 'softlink' req = helper.getEndpoint() + "/groups/" + rootId + "/links/" + name payload = {"h5path": "somewhere"} headers = {'host': domain} # verify softlink does not exist rsp = requests.get(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 404) # make request rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # verify link is created rsp = requests.get(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # verify idempotent rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 409) def testPutExternalLink(self): logging.info("LinkTest.testPutExternalLink") domain = 'tall_updated.' + config.get('domain') target_domain = 'external_target.' + config.get('domain') target_path = '/dset1' grpId = helper.createGroup(domain) rootId = helper.getRootUUID(domain) name = 'extlink' req = helper.getEndpoint() + "/groups/" + rootId + "/links/" + name payload = {"h5path": target_path, "h5domain": target_domain} headers = {'host': domain} # verify extlink does not exist rsp = requests.get(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 404) # make request rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # verify link is created rsp = requests.get(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # verify that it is an external link rspJson = json.loads(rsp.text) target = rspJson['link'] self.assertEqual(target['class'], 'H5L_TYPE_EXTERNAL') self.assertEqual(target['h5domain'], target_domain) self.assertEqual(target['h5path'], target_path) def testPutExternalMissingPath(self): logging.info("LinkTest.testPutExternalMissingPath") fakeId = "14bfeeb8-68b1-11e4-a69a-3c15c2da029e" domain = 'tall_updated.' + config.get('domain') external_domain = 'external_target.' + config.get('domain') grpId = helper.createGroup(domain) rootId = helper.getRootUUID(domain) name = 'extlinkid' req = helper.getEndpoint() + "/groups/" + rootId + "/links/" + name payload = {"h5domain": external_domain} headers = {'host': domain} # verify extlink does not exist rsp = requests.get(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 404) # make request rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) def testDelete(self): logging.info("LinkTest.testDelete") domain = 'tall_updated.' + config.get('domain') grpId = helper.createGroup(domain) rootId = helper.getRootUUID(domain) name = 'deleteme' req = helper.getEndpoint() + "/groups/" + rootId + "/links/" + name payload = {"id": grpId} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # now remove the link rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) # get should fail rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 410) # Group should still be accessible via uuid req = self.endpoint + "/groups/" + grpId rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) if __name__ == '__main__': unittest.main() ================================================ FILE: test/integ/makeattr.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import h5py f = h5py.File("attr1k.h5", "w") for i in range(1000): name = 'a{:04d}'.format(i) f.attrs[name] = "this is attribute: " + str(i) f.close() ================================================ FILE: test/integ/makegroups.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import h5py f = h5py.File("group1k.h5", "w") for i in range(1000): name = 'g{:04d}'.format(i) f.create_group(name) f.close() ================================================ FILE: test/integ/roottest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import requests import config import helper import unittest import json import base64 class RootTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(RootTest, self).__init__(*args, **kwargs) self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port')) def testGetInfo(self): req = self.endpoint + "/info" rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['content-type'], 'application/json') rspJson = json.loads(rsp.text) self.assertTrue('h5serv_version' in rspJson) def testGetDomain(self): domain = 'tall.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['content-type'], 'application/json') rspJson = json.loads(rsp.text) root_uuid = rspJson["root"] helper.validateId(root_uuid) # try again with query arg req = self.endpoint + "/?host=" + domain rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['content-type'], 'application/json') rspJson = json.loads(rsp.text) helper.validateId(rspJson["root"]) self.assertEqual(root_uuid, rspJson["root"]) def testGetReadOnly(self): domain = 'tall_ro.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) helper.validateId(rspJson["root"]) def testGetToc(self): domain = config.get('domain') if domain.startswith('test.'): domain = domain[5:] req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['content-type'], 'application/json') rspJson = json.loads(rsp.text) self.assertTrue('root' in rspJson) def testGetNotFound(self): domain = 'doesnotexist.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 404) def testWrongTopLevelDomain(self): domain = "www.baddomain.org" req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 403) # 403 == Forbidden def testInvalidDomain(self): # can't be just a bare top-level domain domain = config.get('domain') # get top-level domain. e.g.: 'test.hdf.io' -> 'hdf.io' npos = domain.find('.') topdomain = domain[npos+1:] domain = 'two.dots..are.bad.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 400) # 400 == bad syntax domain = 'missingenddot' + topdomain req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 400) # 400 == bad syntax # just a dot is no good domain = '.' + topdomain req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 400) # 400 == bad syntax domain = '.dot.in.front.is.bad.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 400) # 400 == bad syntax domain = 'tall.dots.need.to.be.encoded.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 404) # 404 == not found were expected def testDomainWithSpaces(self): domain = 'filename with space.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) def testGetSubdomain(self): domain = 'zerodim.subdir.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) def testPutSubdomain(self): domain = 'newfile.newsubdir.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) rspJson = json.loads(rsp.text) def testPutSubSubdomain(self): domain = 'newfile.newsubsubdir.newsubdirparent.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) rspJson = json.loads(rsp.text) href = (rspJson["hrefs"][0])[u"href"] self.assertEqual(href, "http://" + domain + "/") def testDelete(self): #test DELETE_root domain = 'deleteme.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) def testDeleteReadonly(self): #test DELETE_root domain = 'readonly.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 403) def testDeleteNotFound(self): domain = 'doesnotexist.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 404) def testDeleteSubSubdomain(self): domain = 'deleteme.subdir.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.delete(req, headers=headers) self.assertEqual(rsp.status_code, 200) def testPut(self): # test PUT_root domain = 'newfile.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) rspJson = json.loads(rsp.text) for k in ("root", "hrefs", "created", "lastModified"): self.assertTrue(k in rspJson) # verify that putting the same domain again fails with a 409 error rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 409) def testGetDomainWithDot(self): domain = helper.nameEncode('tall.dots.need.to.be.encoded') + '.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['content-type'], 'application/json') rspJson = json.loads(rsp.text) helper.validateId(rspJson["root"]) # try using host as query argument req = self.endpoint + "/?host=" + domain rsp = requests.get(req) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['content-type'], 'application/json') rspJson = json.loads(rsp.text) helper.validateId(rspJson["root"]) def testPutNameWithDot(self): # test PUT_root domain = helper.nameEncode('new.file') + '.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) rspJson = json.loads(rsp.text) if __name__ == '__main__': unittest.main() ================================================ FILE: test/integ/setupdata.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import sys import os import stat from shutil import copyfile import h5py import numpy as np SRC = "../test_files" DES = "../../data/test" # files to be copied into test directory testfiles = { 'tall.h5': ('.', 'tall_updated.h5', 'tall_ro.h5', 'tall_g2_deleted.h5', 'tall_dset112_deleted.h5', 'tall_dset22_deleted.h5', 'tall_acl.h5', 'tall_acl_delete.h5', 'tall.dots.need.to.be.encoded.h5', 'subdir/tall.h5'), 'tall_with_udlink.h5': ('.',), 'scalar.h5': ('.', 'scalar_1d_deleted.h5',), 'namedtype.h5': ('.', 'namedtype_deleted.h5'), 'resizable.h5': ('.', 'resized.h5'), 'notahdf5file.h5': ('.',), 'zerodim.h5': ('filename with space.h5', 'deleteme.h5', 'readonly.h5', 'subdir', 'subdir/deleteme.h5', 'subdir/subdir/deleteme.h5'), 'group1k.h5': ('.', 'group1k_updated.h5'), 'attr1k.h5': ('.',), 'type1k.h5': ('.',), 'dset1k.h5': ('.',), 'fillvalue.h5': ('.'), 'null_space_dset.h5': ('.'), 'compound.h5': ('.',), 'compound_attr.h5': ('.',), 'compound_array_attr.h5': ('.',), 'compound_array_dset.h5': ('.',), 'compound_committed.h5': ('.',), 'arraytype.h5': ('.',), 'array_attr.h5': ('.',), 'array_dset.h5': ('.',), 'bitfield_attr.h5': ('.',), 'bitfield_dset.h5': ('.',), 'dim_scale.h5': ('.',), 'dim_scale_data.h5': ('.', 'dim_scale_updated.h5'), 'dset_gzip.h5': ('.',), 'enum_attr.h5': ('.',), 'enum_dset.h5': ('.',), 'fixed_string_attr.h5': ('.',), 'fixed_string_dset.h5': ('.',), 'h5ex_d_alloc.h5': ('.',), 'h5ex_d_checksum.h5': ('.',), 'h5ex_d_chunk.h5': ('.',), 'h5ex_d_compact.h5': ('.',), 'h5ex_d_extern.h5': ('.',), 'h5ex_d_fillval.h5': ('.',), 'h5ex_d_gzip.h5': ('.',), 'h5ex_d_hyper.h5': ('.',), 'h5ex_d_nbit.h5': ('.',), 'h5ex_d_rdwr.h5': ('.',), 'h5ex_d_shuffle.h5': ('.',), 'h5ex_d_sofloat.h5': ('.',), 'h5ex_d_soint.h5': ('.',), 'h5ex_d_transform.h5': ('.',), 'h5ex_d_unlimadd.h5': ('.',), 'h5ex_d_unlimgzip.h5': ('.',), 'h5ex_d_hyper.h5': ('.',), 'link_example.h5': ('.',), 'objref_attr.h5': ('.',), 'objref_dset.h5': ('.', 'objref_dset_updated.h5'), 'null_objref_dset.h5': ('.',), 'regionref_attr.h5': ('.',), 'regionref_dset.h5': ('.', 'regionref_dset_updated.h5'), 'vlen_attr.h5': ('.',), 'vlen_dset.h5': ('.',), 'vlen_string_attr.h5': ('.',), 'vlen_string_dset.h5': ('.',), 'opaque_attr.h5': ('.',), 'opaque_dset.h5': ('.',), 'committed_type.h5': ('.',), 'tstr.h5': ('.',), 'null_space_attr.h5': ('.',), 'bool_dset.h5': ('.',), 'bool_attr.h5': ('.',) } # files that will get set as read-only read_only_files = ( 'tall_ro.h5', 'readonly.h5') """ Create test accounts - add test_user1 and test_user2 if they don't exist already """ def addTestAccount(user_id): password_file = "passwd.h5" cwd = os.getcwd() src_dir = os.path.abspath(SRC) os.chdir('../../util/admin') if not os.path.isfile(password_file): os.system('python makepwd_file.py') add_user_script = 'python update_pwd.py' add_user_script += ' -f ' + password_file os.system(add_user_script + ' -a -u ' + user_id + ' -p test') home_dir = "../../data/home" if not os.path.isdir(home_dir): os.mkdir(home_dir) os.chdir(home_dir) # clean out any old files if os.path.isdir(user_id): removeFilesFromDir(user_id) else: # create user home directory os.mkdir(user_id) os.chdir(user_id) print("cwd:", os.getcwd()) # link to "public" directory # create symlink to public directory public_dir = "../../public" if os.name != 'nt': if not os.path.isdir(public_dir): print("create public dir") os.mkdir(public_dir) if not os.path.islink('public'): print("create symlink") os.symlink(public_dir, "public") copyfile(src_dir + '/tall.h5', 'tall.h5') copyfile(src_dir + '/tall.h5', 'tall_deleteme.h5') os.chdir(cwd) def addTestAccounts(): for test_user in ('test_user1', 'test_user2'): addTestAccount(test_user) """ Make a testfile with 1000 sub-groups """ def makeGroup1k(): file_path = SRC + "/group1k.h5" if os.path.exists(file_path): return # don't waste time re-creating print('makeGroup1k') f = h5py.File(file_path, "w") for i in range(1000): name = 'g{:04d}'.format(i) f.create_group(name) f.close() """ Make a testfile with 1000 attributes """ def makeAttr1k(): file_path = SRC + "/attr1k.h5" if os.path.exists(file_path): return # don't waste time re-creating print('makeAttr1k()') f = h5py.File(file_path, "w") for i in range(1000): name = 'a{:04d}'.format(i) f.attrs[name] = "this is attribute: " + str(i) f.close() """ Make a testfile with 1000 types """ def makeType1k(): file_path = SRC + "/type1k.h5" if os.path.exists(file_path): return # don't waste time re-creating f = h5py.File(file_path, "w") for i in range(1000): name = 'S{:04d}'.format(i+1) f[name] = np.dtype(name) #create fixed length string f.close() """ Make a testfile with 1000 datasets """ def makeDataset1k(): file_path = SRC + "/dset1k.h5" if os.path.exists(file_path): return # don't waste time re-creating f = h5py.File(file_path, "w") for i in range(1000): name = 'd{:04d}'.format(i+1) dim = i+1 f.create_dataset(name, (dim,), dtype=np.int32) f.close() """ Make a testfile with external links """ def makeExternalLinks(): file_path = SRC + "/link_example.h5" if os.path.exists(file_path): return # don't waste time re-creating tgt_link_path = os.path.abspath(DES) # for absolute paths in link tgt_link_path += "/tall.h5" f = h5py.File(file_path, 'w') f.create_group('g1') f.create_group('g1/g1.1') f['soft_link'] = h5py.SoftLink('g1') f['external_link1'] = h5py.ExternalLink('tall.h5', 'g1/g1.1') f['external_link2'] = h5py.ExternalLink('tall', 'g1/g1.1') f['external_link3'] = h5py.ExternalLink('tall.test.hdfgroup.org', 'g1/g1.1') f['external_link4'] = h5py.ExternalLink(tgt_link_path, 'g1/g1.1') f['external_link5'] = h5py.ExternalLink('tall.subdir.test.hdfgroup.org', 'g1/g1.1') f['external_link6'] = h5py.ExternalLink('tall.subdir', 'g1/g1.1') f['external_link7'] = h5py.ExternalLink('subdir/tall.h5', 'g1/g1.1') f.close() """ Remove files from given directory """ def removeFilesFromDir(dir_name): print('remove files', dir_name) if not os.path.isdir(dir_name): print("expected", dir_name, "to be a directory") sys.exit() for file_name in os.listdir(dir_name): file_path = os.path.join(dir_name, file_name) try: if os.path.isdir(file_path): if os.path.islink(file_path): os.unlink(file_path) # just remove the link else: removeFilesFromDir(file_path) os.rmdir(file_path) else: if os.path.isfile(file_path): # check for read-only if (os.stat(file_path).st_mode & stat.S_IWUSR) == 0: # make read-write os.chmod(file_path, 0O666) os.unlink(file_path) except Exception as e: print(e) """ main """ # verify we are in the right place and the correct argument has been passed if len(sys.argv) > 1 and sys.argv[1] == '-h': print("this script will remove all files from ../../data/test and repopulate using files from ../../testdata") sys.exit(); if not os.path.exists(SRC): print("run this from the integ test directory!") sys.exit() if not os.path.exists(DES): # create the data/test directory if it doesn't exist os.mkdir(DES) # create test accounts addTestAccounts() # create group1k.h5 (if not created before) makeGroup1k() # create attr1k.h5 (if not created before) makeAttr1k() # create type1k.h5 (if not created before) makeType1k() # create dset1k.h5 (if not created before) makeDataset1k() # create link_example.h5 (if not created before)) makeExternalLinks() removeFilesFromDir(DES) test_dirs = ('.', 'subdir', 'subdir/subdir') for dir_name in test_dirs: tgt_dir = DES if dir_name != '.': tgt_dir += '/' + dir_name if not os.path.exists(tgt_dir): os.mkdir(tgt_dir) for file_name in testfiles: for tgt in testfiles[file_name]: src = SRC + '/' + file_name des = DES + '/' if tgt == '.': # copy to DES des += file_name else: des += tgt if os.path.isdir(des): # copy to directory des += '/' des += file_name print('copyfile("'+file_name+'", "'+des+'")') copyfile(src, des) for file_name in read_only_files: file_path = DES + '/' + file_name print('chmod', file_path) os.chmod(file_path, 0O444) ================================================ FILE: test/integ/shapetest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import requests import config import helper import unittest import json class ShapeTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(ShapeTest, self).__init__(*args, **kwargs) self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port')) def testGet(self): domain = 'tall.' + config.get('domain') root_uuid = helper.getRootUUID(domain) g2_uuid = helper.getUUID(domain, root_uuid, 'g2') dset21_uuid = helper.getUUID(domain, g2_uuid, 'dset2.1') req = helper.getEndpoint() + "/datasets/" + dset21_uuid + "/shape" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 10) self.assertTrue('maxdims' not in shape) # not re-sizeable def testGetResizable(self): domain = 'resizable.' + config.get('domain') root_uuid = helper.getRootUUID(domain) resizable_1d_uuid = helper.getUUID(domain, root_uuid, 'resizable_1d') req = helper.getEndpoint() + "/datasets/" + resizable_1d_uuid + "/shape" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 10) self.assertEqual(shape['maxdims'][0], 20) resizable_2d_uuid = helper.getUUID(domain, root_uuid, 'resizable_2d') req = helper.getEndpoint() + "/datasets/" + resizable_2d_uuid + "/shape" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(len(shape['dims']), 2) self.assertEqual(shape['dims'][1], 10) self.assertTrue('maxdims' in shape) # is re-sizeable! self.assertEqual(shape['maxdims'][1], 20) unlimited_1d_uuid = helper.getUUID(domain, root_uuid, 'unlimited_1d') req = helper.getEndpoint() + "/datasets/" + unlimited_1d_uuid + "/shape" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 10) self.assertTrue('maxdims' in shape) # is re-sizeable self.assertEqual(shape['maxdims'][0], 0) unlimited_2d_uuid = helper.getUUID(domain, root_uuid, 'unlimited_2d') req = helper.getEndpoint() + "/datasets/" + unlimited_2d_uuid + "/shape" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(len(shape['dims']), 2) self.assertEqual(shape['dims'][1], 10) self.assertTrue('maxdims' in shape) # is re-sizeable self.assertEqual(shape['maxdims'][1], 0) def testPutResizable(self): domain = 'resized.' + config.get('domain') headers = {'host': domain} root_uuid = helper.getRootUUID(domain) resizable_1d_uuid = helper.getUUID(domain, root_uuid, 'resizable_1d') req = helper.getEndpoint() + "/datasets/" + resizable_1d_uuid + "/shape" # get the existing shape rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 10) self.assertEqual(shape['maxdims'][0], 20) # modify shape by setting extent to maxdims payload = { 'shape': 20 } headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # get the shape again rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 20) self.assertTrue('maxdims' not in shape) # two-dimensional - verify existing shape resizable_2d_uuid = helper.getUUID(domain, root_uuid, 'resizable_2d') req = helper.getEndpoint() + "/datasets/" + resizable_2d_uuid + "/shape" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(len(shape['dims']), 2) self.assertEqual(shape['dims'][0], 10) self.assertEqual(shape['dims'][1], 10) self.assertTrue('maxdims' in shape) # is re-sizeable self.assertEqual(shape['maxdims'][0], 10) self.assertEqual(shape['maxdims'][1], 20) # modify shape by setting extent to maxdims payload = { 'shape': [10, 20] } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # verify the changed shape rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(len(shape['dims']), 2) self.assertEqual(shape['dims'][0], 10) self.assertEqual(shape['dims'][1], 20) self.assertTrue('maxdims' not in shape) unlimited_1d_uuid = helper.getUUID(domain, root_uuid, 'unlimited_1d') req = helper.getEndpoint() + "/datasets/" + unlimited_1d_uuid + "/shape" payload = { 'shape': 25 } headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 25) self.assertTrue('maxdims' in shape) # is re-sizeable self.assertEqual(shape['maxdims'][0], 0) unlimited_2d_uuid = helper.getUUID(domain, root_uuid, 'unlimited_2d') req = helper.getEndpoint() + "/datasets/" + unlimited_2d_uuid + "/shape" payload = { 'shape': [10, 25] } headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('shape' in rspJson) shape = rspJson['shape'] self.assertEqual(len(shape['dims']), 2) self.assertEqual(shape['dims'][0], 10) self.assertEqual(shape['maxdims'][0], 10) self.assertEqual(shape['dims'][1], 25) self.assertTrue('maxdims' in shape) # is re-sizeable self.assertEqual(shape['maxdims'][1], 0) def testPutInvalidShape(self): domain = 'resized.' + config.get('domain') headers = {'host': domain} root_uuid = helper.getRootUUID(domain) resizable_1d_uuid = helper.getUUID(domain, root_uuid, 'resizable_1d') req = helper.getEndpoint() + "/datasets/" + resizable_1d_uuid + "/shape" payload = { 'shape': [20, 10] } # wrong rank headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) payload = { 'shape': 8 } # try to shrink headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) resizable_2d_uuid = helper.getUUID(domain, root_uuid, 'resizable_2d') req = helper.getEndpoint() + "/datasets/" + resizable_2d_uuid + "/shape" payload = { 'shape': [12, 20] } # try to extend non-extendable dimension headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) if __name__ == '__main__': unittest.main() ================================================ FILE: test/integ/spidertest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import requests import config import helper import unittest import json class SpiderTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(SpiderTest, self).__init__(*args, **kwargs) self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port')) self.verifiedhrefs = set() self.unverifiedhrefs = set() self.headers = {} def validateHrefs(self, href): self.verifiedhrefs.add(href) # convert to local endpoint domain = config.get('domain') npos = href.find(domain) if npos > 0: req = self.endpoint + href[(npos+len(domain)):] else: req = href rsp = requests.get(req, headers=self.headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['content-type'], 'application/json') rspJson = json.loads(rsp.text) self.assertTrue("hrefs" in rspJson) hrefs = rspJson["hrefs"] self.assertTrue(len(hrefs) > 0) links = {} for link in hrefs: self.assertTrue('href' in link) self.assertTrue('rel' in link) rel = link['rel'] url = link['href'] self.assertTrue(rel not in links) links[rel] = url if url in self.verifiedhrefs: continue self.unverifiedhrefs.add(url) self.assertTrue('self' in links) self.assertTrue('root' in links) while len(self.unverifiedhrefs) > 0: link = self.unverifiedhrefs.pop() self.validateHrefs(link) def testHateoas(self): domains = ('tall', 'tall_ro', 'group1k') for name in domains: domain = name + '.' + config.get('domain') self.verifiedhrefs.clear() self.unverifiedhrefs.clear() req = self.endpoint + "/" self.headers = {'host': domain} self.validateHrefs(self.endpoint + "/") if __name__ == '__main__': unittest.main() ================================================ FILE: test/integ/valuetest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import six import requests import config import helper import unittest import json import base64 class ValueTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(ValueTest, self).__init__(*args, **kwargs) self.endpoint = 'http://' + config.get('server') + ':' + str(config.get('port')) """ Test 32-bit memory word at given offset from value against expected. Expected must be less than 256. """ def compareWord32(self, value, offset, expected): if six.PY3: self.assertEqual(value[offset+0], 0) self.assertEqual(value[offset+1], 0) self.assertEqual(value[offset+2], 0) self.assertEqual(value[offset+3], expected) else: self.assertEqual(ord(value[offset+0]), 0) self.assertEqual(ord(value[offset+1]), 0) self.assertEqual(ord(value[offset+2]), 0) self.assertEqual(ord(value[offset+3]), expected) def testGet(self): for domain_name in ('tall', 'tall_ro'): domain = domain_name + '.' + config.get('domain') rootUUID = helper.getRootUUID(domain) g1UUID = helper.getUUID(domain, rootUUID, 'g1') g11UUID = helper.getUUID(domain, g1UUID, 'g1.1') # rank 1 dataset dset112UUID = helper.getUUID(domain, g11UUID, 'dset1.1.2') req = helper.getEndpoint() + "/datasets/" + dset112UUID headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertEqual(rspJson['id'], dset112UUID) typeItem = rspJson['type'] self.assertEqual(typeItem['base'], 'H5T_STD_I32BE') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 20) req = helper.getEndpoint() + "/datasets/" + dset112UUID + "/value" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/json") rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(len(data), 20) for i in range(20): self.assertEqual(data[i], i) # rank 2 dataset dset111UUID = helper.getUUID(domain, g11UUID, 'dset1.1.1') req = helper.getEndpoint() + "/datasets/" + dset111UUID headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertEqual(rspJson['id'], dset111UUID) typeItem = rspJson['type'] self.assertEqual(typeItem['base'], 'H5T_STD_I32BE') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 2) self.assertEqual(shape['dims'][0], 10) self.assertEqual(shape['dims'][1], 10) req = helper.getEndpoint() + "/datasets/" + dset111UUID + "/value" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/json") rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(len(data), 10) for i in range(10): arr = data[i] self.assertEqual(len(arr), 10) for j in range(10): self.assertEqual(arr[j], i*j) def testGetBinary(self): for domain_name in ('tall', 'tall_ro'): domain = domain_name + '.' + config.get('domain') rootUUID = helper.getRootUUID(domain) g1UUID = helper.getUUID(domain, rootUUID, 'g1') g11UUID = helper.getUUID(domain, g1UUID, 'g1.1') # rank 1 dataset dset112UUID = helper.getUUID(domain, g11UUID, 'dset1.1.2') req = helper.getEndpoint() + "/datasets/" + dset112UUID headers = {'host': domain} headers_binary = {'host': domain, 'accept': "application/octet-stream"} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertEqual(rspJson['id'], dset112UUID) typeItem = rspJson['type'] self.assertEqual(typeItem['base'], 'H5T_STD_I32BE') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 1) self.assertEqual(shape['dims'][0], 20) req = helper.getEndpoint() + "/datasets/" + dset112UUID + "/value" rsp = requests.get(req, headers=headers_binary) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/octet-stream") data = rsp.content self.assertEqual(len(data), 80) for i in range(20): self.compareWord32(data, i*4, i) # rank 2 dataset dset111UUID = helper.getUUID(domain, g11UUID, 'dset1.1.1') req = helper.getEndpoint() + "/datasets/" + dset111UUID headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertEqual(rspJson['id'], dset111UUID) typeItem = rspJson['type'] self.assertEqual(typeItem['base'], 'H5T_STD_I32BE') shape = rspJson['shape'] self.assertEqual(shape['class'], 'H5S_SIMPLE') self.assertEqual(len(shape['dims']), 2) self.assertEqual(shape['dims'][0], 10) self.assertEqual(shape['dims'][1], 10) req = helper.getEndpoint() + "/datasets/" + dset111UUID + "/value" rsp = requests.get(req, headers=headers_binary) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/octet-stream") data = rsp.content self.assertEqual(len(data), 400) row_offset = 0 for i in range(10): col_offset = 0 for j in range(10): # 4 byte integers, little indian self.compareWord32(data, row_offset+col_offset, i*j) col_offset += 4 row_offset += col_offset def testGetSelection(self): for domain_name in ('tall', 'tall_ro'): domain = domain_name + '.' + config.get('domain') headers = {'host': domain} rootUUID = helper.getRootUUID(domain) g1UUID = helper.getUUID(domain, rootUUID, 'g1') g11UUID = helper.getUUID(domain, g1UUID, 'g1.1') # rank 1 dataset dset112UUID = helper.getUUID(domain, g11UUID, 'dset1.1.2') # dataset has shape (20,) and type 'int32' # get values starting at index 2 req = helper.getEndpoint() + "/datasets/" + dset112UUID + "/value" + \ "?select=[2:]" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/json") rspJson = json.loads(rsp.text) data = rspJson['value'] # should be [2, 3, 4, ..., 19] self.assertEqual(len(data), 18) self.assertEqual(data, list(range(2, 20))) # get values starting at index 2 with stop of 10 req = helper.getEndpoint() + "/datasets/" + dset112UUID + "/value" + \ "?select=[2:10]" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) data = rspJson['value'] # should be [2, 3, 4, ..., 9] self.assertEqual(len(data), 8) self.assertEqual(data, list(range(2, 10))) # get values starting at index 2 with stop of 10, and stride of 2 req = helper.getEndpoint() + "/datasets/" + dset112UUID + "/value" + \ "?select=[2:10:2]" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/json") rspJson = json.loads(rsp.text) data = rspJson['value'] # should be [2, 4, 6, 8] self.assertEqual(len(data), 4) self.assertEqual(data, list(range(2, 9, 2))) # rank 2 dataset dset111UUID = helper.getUUID(domain, g11UUID, 'dset1.1.1') # dataset has shape (10,10) and type 'int32' # get rows 2, 3, 4, and 5 req = helper.getEndpoint() + "/datasets/" + dset111UUID + "/value" + \ "?select=[:,2:6]" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/json") rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(len(data), 10) for i in range(10): arr = data[i] self.assertEqual(len(arr), 4) for j in range(4): self.assertEqual(arr[j], i*(j+2)) # get 2d subregion with stride req = helper.getEndpoint() + "/datasets/" + dset111UUID + "/value" + \ "?select=[1:9,1:9:2]" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/json") rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(len(data), 8) for i in range(8): arr = data[i] self.assertEqual(len(arr), 4) for j in range(4): self.assertEqual(arr[j], (i+1)*(j*2+1)) def testGetSelectionBinary(self): for domain_name in ('tall', ): domain = domain_name + '.' + config.get('domain') headers = {'host': domain} headers_binary = {'host': domain, 'accept': "application/octet-stream"} rootUUID = helper.getRootUUID(domain) g1UUID = helper.getUUID(domain, rootUUID, 'g1') g11UUID = helper.getUUID(domain, g1UUID, 'g1.1') # rank 1 dataset dset112UUID = helper.getUUID(domain, g11UUID, 'dset1.1.2') # dataset has shape (20,) and type 'int32' # get values starting at index 2 req = helper.getEndpoint() + "/datasets/" + dset112UUID + "/value" + \ "?select=[2:]" rsp = requests.get(req, headers=headers_binary) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/octet-stream") # content should be [2, 3, 4, ..., 19] data = rsp.content self.assertEqual(len(data), 18*4) # 18 elements with 4 bytes per element for i in range(18): self.compareWord32(data, i*4, i+2) # get values starting at index 2 with stop of 10 req = helper.getEndpoint() + "/datasets/" + dset112UUID + "/value" + \ "?select=[2:10]" rsp = requests.get(req, headers=headers_binary) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/octet-stream") data = rsp.content # should be [2, 3, 4, ..., 9] self.assertEqual(len(data), 8*4) for i in range(8): self.compareWord32(data, i*4, i+2) # get values starting at index 2 with stop of 10, and stride of 2 req = helper.getEndpoint() + "/datasets/" + dset112UUID + "/value" + \ "?select=[2:10:2]" rsp = requests.get(req, headers=headers_binary) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/octet-stream") data = rsp.content # should be [2, 4, 6, 8] self.assertEqual(len(data), 4*4) for i in range(4): offset = i*4 self.compareWord32(data, offset, (i*2)+2) # rank 2 dataset dset111UUID = helper.getUUID(domain, g11UUID, 'dset1.1.1') # dataset has shape (10,10) and type 'int32' # get rows 2, 3, 4, and 5 req = helper.getEndpoint() + "/datasets/" + dset111UUID + "/value" + \ "?select=[:,2:6]" rsp = requests.get(req, headers=headers_binary) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/octet-stream") data = rsp.content self.assertEqual(len(data), 4*10*4) row_offset = 0 for i in range(10): col_offset = 0 for j in range(4): # 4 byte integers, little indian self.compareWord32(data, row_offset+col_offset, i*(j+2)) col_offset += 4 row_offset += col_offset # get 2d subregion with stride req = helper.getEndpoint() + "/datasets/" + dset111UUID + "/value" + \ "?select=[1:9,1:9:2]" rsp = requests.get(req, headers=headers_binary) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/octet-stream") data = rsp.content self.assertEqual(len(data), 8*4*4) row_offset = 0 for i in range(8): col_offset = 0 for j in range(4): # 4 byte integers, little indian self.compareWord32(data, row_offset+col_offset, (i+1)*(j*2+1)) col_offset += 4 row_offset += col_offset def testGetSelectionBadQuery(self): domain = 'tall.' + config.get('domain') headers = {'host': domain} rootUUID = helper.getRootUUID(domain) g1UUID = helper.getUUID(domain, rootUUID, 'g1') g11UUID = helper.getUUID(domain, g1UUID, 'g1.1') # rank 1 dataset dset112UUID = helper.getUUID(domain, g11UUID, 'dset1.1.2') # don't use bracket req = helper.getEndpoint() + "/datasets/" + dset112UUID + "/value" + \ "?select=abc" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 400) # not a number req = helper.getEndpoint() + "/datasets/" + dset112UUID + "/value" + \ "?select=[a:b:c]" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 400) # start is negative req = helper.getEndpoint() + "/datasets/" + dset112UUID + "/value" + \ "?select=[-1:3]" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 400) # stop past extent req = helper.getEndpoint() + "/datasets/" + dset112UUID + "/value" + \ "?select=[1:25]" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 400) # pass in 0 step req = helper.getEndpoint() + "/datasets/" + dset112UUID + "/value" + \ "?select=[1:2:0]" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 400) def testGetScalar(self): domain = 'scalar.' + config.get('domain') headers = {'host': domain} root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, '0d') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/json") rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(data, 42) def testGetNullSpace(self): domain = 'null_space_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('value' in rspJson) data = rspJson['value'] self.assertEqual(data, None) def testGetScalarString(self): domain = 'scalar.' + config.get('domain') headers = {'host': domain} root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, '0ds') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/json") rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(data, "hello") def testGetScalarStringBinary(self): domain = 'scalar.' + config.get('domain') headers = {'host': domain} headers_binary = {'host': domain, 'accept': "application/octet-stream"} root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, '0ds') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" rsp = requests.get(req, headers=headers_binary) self.assertEqual(rsp.status_code, 200) # requested binary, but got json (because it's a variable length string) self.assertEqual(rsp.headers['Content-Type'], "application/json") rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(data, "hello") def testGetSimpleOneElement(self): domain = 'scalar.' + config.get('domain') headers = {'host': domain} root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, '1d') req = helper.getEndpoint() + "/datasets/" + dset_uuid req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(data, [42,]) def testGetSimpleOneElementString(self): domain = 'scalar.' + config.get('domain') headers = {'host': domain} root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, '1ds') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(data, ["hello",]) def testGetCompound(self): domain = 'compound.' + config.get('domain') root_uuid = helper.getRootUUID(domain) dset_uuid = helper.getUUID(domain, root_uuid, 'dset') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/json") rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(len(data), 72) first = data[0] self.assertEqual(len(first), 5) self.assertEqual(first[0], 24) self.assertEqual(first[1], "13:53") # get first element via selection query # get values starting at index 2 req += "?select=[0:1]" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/json") rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(len(data), 1) first = data[0] self.assertEqual(len(first), 5) self.assertEqual(first[0], 24) self.assertEqual(first[1], "13:53") def testGetCompoundBinary(self): domain = 'compound.' + config.get('domain') root_uuid = helper.getRootUUID(domain) dset_uuid = helper.getUUID(domain, root_uuid, 'dset') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" headers = {'host': domain} headers_binary = {'host': domain, 'accept': "application/octet-stream"} rsp = requests.get(req, headers=headers_binary) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/octet-stream") data = rsp.content self.assertEqual(len(data) // 36, 72 ) # get first element via selection query # get values starting at index 2 req += "?select=[0:1]" rsp = requests.get(req, headers=headers_binary) self.assertEqual(rsp.status_code, 200) # just one element, so expect json response self.assertEqual(rsp.headers['Content-Type'], "application/json") rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(len(data), 1) first = data[0] self.assertEqual(len(first), 5) self.assertEqual(first[0], 24) self.assertEqual(first[1], "13:53") def testGetCommitted(self): domain = 'committed_type.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(len(data), 4) def testGetArray(self): domain = 'array_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 4) # four dataset elements (each an array) self.assertEqual(len(value[0]), 3) # 3x5 array shape self.assertEqual(len((value[0])[0]), 5) # 3x5 array shape self.assertEqual(value[0][2][4], -8) # pull out a value from the array def testGetVLenString(self): domain = 'vlen_string_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) rspJson = json.loads(rsp.text) self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 4) self.assertEqual(value[0], "Parting") self.assertEqual(value[1], "is such") self.assertEqual(value[2], "sweet") self.assertEqual(value[3], "sorrow.") def testGetFixedString(self): domain = 'fixed_string_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 4) self.assertEqual(value[0], "Parting") self.assertEqual(value[1], "is such") self.assertEqual(value[2], "sweet") self.assertEqual(value[3], "sorrow.") def testGetFixedStringBinary(self): domain = 'fixed_string_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" headers = {'host': domain, 'accept': "application/octet-stream"} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) self.assertEqual(rsp.headers['Content-Type'], "application/octet-stream") data = rsp.content self.assertEqual(data, b"Partingis suchsweet\x00\x00sorrow.") def testGetEnum(self): domain = 'enum_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 4) self.assertEqual(value[1][2], 2) def testGetVlen(self): domain = 'vlen_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 2) self.assertEqual(len(value[1]), 12) self.assertEqual(value[1][11], 144) def testGetOpaque(self): domain = 'opaque_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) # get for Opaque data is not supported yet. Check that the call returns 501 self.assertEqual(rsp.status_code, 501) def testGetObjectReference(self): domain = 'objref_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) ds1_uuid = helper.getUUID(domain, root_uuid, 'DS1') ds2_uuid = helper.getUUID(domain, root_uuid, 'DS2') g1_uuid = helper.getUUID(domain, root_uuid, 'G1') req = helper.getEndpoint() + "/datasets/" + ds1_uuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 2) self.assertEqual(value[0], 'groups/' + g1_uuid) self.assertEqual(value[1], 'datasets/' + ds2_uuid) def testGetNullObjReference(self): domain = 'null_objref_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) dset_uuid = helper.getUUID(domain, root_uuid, 'DS1') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 1) self.assertEqual(value[0], "null") def testGetRegionReference(self): domain = 'regionref_dset.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) ds1_uuid = helper.getUUID(domain, root_uuid, 'DS1') ds2_uuid = helper.getUUID(domain, root_uuid, 'DS2') req = helper.getEndpoint() + "/datasets/" + ds1_uuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 2) ref0 = value[0] self.assertEqual(ref0['select_type'], 'H5S_SEL_POINTS') self.assertEqual(ref0['id'], ds2_uuid) points = ref0['selection'] self.assertEqual(len(points), 4) self.assertEqual(points[0], [0, 1]) self.assertEqual(points[1], [2,11]) self.assertEqual(points[2], [1, 0]) self.assertEqual(points[3], [2, 4]) ref1 = value[1] self.assertEqual(ref1['select_type'], 'H5S_SEL_HYPERSLABS') self.assertEqual(ref1['id'], ds2_uuid) hyperslabs = ref1['selection'] self.assertEqual(len(hyperslabs), 4) self.assertEqual(hyperslabs[0][0], [0, 0]) self.assertEqual(hyperslabs[0][1], [1, 3]) self.assertEqual(hyperslabs[1][0], [0, 11]) self.assertEqual(hyperslabs[1][1], [1, 14]) self.assertEqual(hyperslabs[2][0], [2, 0]) self.assertEqual(hyperslabs[2][1], [3, 3]) self.assertEqual(hyperslabs[3][0], [2, 11]) self.assertEqual(hyperslabs[3][1], [3, 14]) def testGetFillValue(self): domain = 'fillvalue.' + config.get('domain') root_uuid = helper.getRootUUID(domain) # create a new dataset payload = {'type': 'H5T_STD_I32LE', 'shape': 10} payload['creationProperties'] = {'fillValue': 42 } req = self.endpoint + "/datasets" headers = {'host': domain} rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset_uuid = rspJson['id'] self.assertTrue(helper.validateId(dset_uuid)) # link the new dataset name = "dset_new" req = self.endpoint + "/groups/" + root_uuid + "/links/" + name payload = {"id": dset_uuid} headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # retrieve the values req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(data, [42,]*10) # # Query tests # def testQuery(self): domain = 'compound.' + config.get('domain') root_uuid = helper.getRootUUID(domain) dset_uuid = helper.getUUID(domain, root_uuid, 'dset') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" req += "?query=date == 23" # values where date field = 23 headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('hrefs' in rspJson) self.assertTrue('index' in rspJson) index = rspJson['index'] self.assertEqual(len(index), 24) self.assertEqual(index[0], 14) self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 24) item = value[0] self.assertEqual(len(item), 5) self.assertEqual(item[0], 23) """ def testsnp(self): limit = 20 domain = 'snp500.demo.hdfgroup.org' root_uuid = helper.getRootUUID(domain) self.assertTrue(root_uuid is not None) dset_uuid = helper.getUUID(domain, root_uuid, 'dset') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" req += "?query=symbol == 'AAPL'&Limit=" + str(limit) # values where date field = 23 print req headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) print rspJson """ def testQueries(self): # use '%26' rather than '&' since otherwise it will be # interpreted as a http query param seperator queries = { "date == 23": 24, "wind == b'W 5'": 3, "temp > 61": 53, "(date >=22) %26 (date <= 24)": 62, "(date == 21) %26 (temp > 70)": 4, "(wind == b'E 7') | (wind == b'S 7')": 7 } #queries = { "(date == 21) %26 (temp >= 72)": 4 } domain = 'compound.' + config.get('domain') headers = {'host': domain} root_uuid = helper.getRootUUID(domain) dset_uuid = helper.getUUID(domain, root_uuid, 'dset') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" for key in queries.keys(): query = req + "?query=" + key rsp = requests.get(query, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('hrefs' in rspJson) self.assertTrue('index' in rspJson) index = rspJson['index'] self.assertTrue(len(index), queries[key]) self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), queries[key]) def testQuerySelection(self): domain = 'compound.' + config.get('domain') root_uuid = helper.getRootUUID(domain) dset_uuid = helper.getUUID(domain, root_uuid, 'dset') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" req += "?query=date == 23" # values where date field = 23 req += "&select=[10:20]" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('hrefs' in rspJson) self.assertTrue('index' in rspJson) index = rspJson['index'] self.assertEqual(len(index), 6) self.assertEqual(index[0], 14) self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 6) item = value[0] self.assertEqual(len(item), 5) self.assertEqual(item[0], 23) def testQueryBatch(self): domain = 'compound.' + config.get('domain') headers = {'host': domain} root_uuid = helper.getRootUUID(domain) dset_uuid = helper.getUUID(domain, root_uuid, 'dset') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" start = 0 stop = 72 count = 0 count = req_count=0 limit = 10 req += "?query=date == 23" # values where date field = 23 req += "&Limit=" + str(limit) # return no more than 10 results at a time for i in range(50): sreq = req+"&select=[" + str(start) + ":" + str(stop) + "]" rsp = requests.get(sreq, headers=headers) self.assertEqual(rsp.status_code, 200) req_count += 1 rspJson = json.loads(rsp.text) self.assertTrue('hrefs' in rspJson) self.assertTrue('index' in rspJson) index = rspJson['index'] self.assertTrue(len(index) <= limit) self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), len(index)) count += len(index) if len(index) < limit: break # no more results start = index[-1] + 1 # start at next index self.assertEqual(count, 24) self.assertEqual(req_count, 3) def testBadQuery(self): domain = 'compound.' + config.get('domain') root_uuid = helper.getRootUUID(domain) dset_uuid = helper.getUUID(domain, root_uuid, 'dset') req = helper.getEndpoint() + "/datasets/" + dset_uuid + "/value" req += "?query=foobar" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 400) # # Post tests # def testPost(self): for domain_name in ('tall','tall_ro'): domain = domain_name + '.' + config.get('domain') rootUUID = helper.getRootUUID(domain) g1UUID = helper.getUUID(domain, rootUUID, 'g1') g11UUID = helper.getUUID(domain, g1UUID, 'g1.1') # rank 1 dataset dset112UUID = helper.getUUID(domain, g11UUID, 'dset1.1.2') points = (19, 17, 13, 11, 7, 5, 3, 2) req = self.endpoint + "/datasets/" + dset112UUID + "/value" payload = {'points': points} headers = {'host': domain} rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(len(data), len(points)) self.assertEqual(points[0], data[0]) # rank 2 dataset dset111UUID = helper.getUUID(domain, g11UUID, 'dset1.1.1') points = [] for i in range(10): points.append((i,i)) # get diagonal req = self.endpoint + "/datasets/" + dset111UUID + "/value" payload = {'points': points} headers = {'host': domain} rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) data = rspJson['value'] self.assertEqual(len(data), len(points)) self.assertEqual(9, data[3]) # # Put tests # def testPut(self): # create domain domain = 'valueput.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain #create scalar dataset payload = {'type': 'H5T_STD_I32LE'} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset0UUID = rspJson['id'] self.assertTrue(helper.validateId(dset0UUID)) # link new dataset as 'dset0' ok = helper.linkObject(domain, dset0UUID, 'dset0') self.assertTrue(ok) # write to dset0 req = self.endpoint + "/datasets/" + dset0UUID + "/value" data = 42 payload = { 'value': data } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # read back the data readData = helper.readDataset(domain, dset0UUID) self.assertEqual(readData, data) # verify we got back what we started with #create 1d/one element dataset payload = {'type': 'H5T_STD_I32LE', 'shape': 1} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset1UUID = rspJson['id'] self.assertTrue(helper.validateId(dset1UUID)) # link new dataset as 'dset1_0' ok = helper.linkObject(domain, dset1UUID, 'dset1_0') self.assertTrue(ok) # write to dset1 req = self.endpoint + "/datasets/" + dset1UUID + "/value" data = [42,] payload = { 'value': data } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # read back the data readData = helper.readDataset(domain, dset1UUID) self.assertEqual(readData, data) # verify we got back what we started with #create 1d dataset payload = {'type': 'H5T_STD_I32LE', 'shape': 10} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset1UUID = rspJson['id'] self.assertTrue(helper.validateId(dset1UUID)) # link new dataset as 'dset1' ok = helper.linkObject(domain, dset1UUID, 'dset1') self.assertTrue(ok) # write to dset1 req = self.endpoint + "/datasets/" + dset1UUID + "/value" data = [2,3,5,7,11,13,17,19,23,29] # payload = {'type': 'H5T_STD_I32LE', 'shape': 10, 'value': data } payload = { 'value': data } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # read back the data readData = helper.readDataset(domain, dset1UUID) self.assertEqual(readData, data) # verify we got back what we started with # verify attempting the wrong number of elements fails data = [9, 99, 999, 999] payload = { 'value': data } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) # Bad Request #create 2d dataset payload = {'type': 'H5T_STD_I32LE', 'shape': (10,10)} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset2UUID = rspJson['id'] self.assertTrue(helper.validateId(dset2UUID)) # link new dataset as 'dset2' ok = helper.linkObject(domain, dset2UUID, 'dset2') self.assertTrue(ok) req = self.endpoint + "/datasets/" + dset2UUID + "/value" data = [] for i in range(10): row = [] for j in range(10): row.append(i*10 + j) data.append(row) payload = { 'value': data } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # read back the data readData = helper.readDataset(domain, dset2UUID) self.assertEqual(readData, data) # verify we got back what we started with def testPutBinary(self): # create domain domain = 'valueput_binary.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain #create scalar dataset payload = {'type': 'H5T_STD_I32LE'} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset0UUID = rspJson['id'] self.assertTrue(helper.validateId(dset0UUID)) # link new dataset as 'dset0' ok = helper.linkObject(domain, dset0UUID, 'dset0') self.assertTrue(ok) # write to dset0 req = self.endpoint + "/datasets/" + dset0UUID + "/value" byte_array = bytearray(4) byte_array[0] = 42 # create 4-byte int, little endian data = base64.b64encode(bytes(byte_array)) data = data.decode("ascii") payload = { 'value_base64': data } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # read back the data readData = helper.readDataset(domain, dset0UUID) self.assertEqual(readData, 42) # verify we got back what we started with #create 1d dataset payload = {'type': 'H5T_STD_I32LE', 'shape': 10} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset1UUID = rspJson['id'] self.assertTrue(helper.validateId(dset1UUID)) # link new dataset as 'dset1' ok = helper.linkObject(domain, dset1UUID, 'dset1') self.assertTrue(ok) # write to dset1 req = self.endpoint + "/datasets/" + dset1UUID + "/value" primes = [2,3,5,7,11,13,17,19,23,29] data = bytearray(4 * 10) for i in range(10): data[i*4] = primes[i] data = base64.b64encode(bytes(data)) data = data.decode("ascii") payload = { 'value_base64': data } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # read back the data readData = helper.readDataset(domain, dset1UUID) self.assertEqual(readData, primes) # verify we got back what we started with #create 2d dataset payload = {'type': 'H5T_STD_I32LE', 'shape': (10,10)} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset2UUID = rspJson['id'] self.assertTrue(helper.validateId(dset2UUID)) # link new dataset as 'dset2' ok = helper.linkObject(domain, dset2UUID, 'dset2') self.assertTrue(ok) req = self.endpoint + "/datasets/" + dset2UUID + "/value" data = bytearray(10*10*4) for i in range(10): for j in range(10): data[i*10*4 + j*4] = i*j data = base64.b64encode(bytes(data)) data = data.decode("ascii") payload = { 'value_base64': data } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # read back the data read_data = helper.readDataset(domain, dset2UUID) self.assertEqual(len(read_data), 10) # verify we got back what we started with for i in range(10): row = read_data[i] self.assertEqual(len(row), 10) for j in range(10): self.assertEqual(row[j], i*j) def testPutSelection(self): # create domain domain = 'valueputsel.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain #create 1d dataset payload = {'type': 'H5T_STD_I32LE', 'shape': 10} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset1UUID = rspJson['id'] self.assertTrue(helper.validateId(dset1UUID)) # link new dataset as 'dset1' ok = helper.linkObject(domain, dset1UUID, 'dset1') self.assertTrue(ok) req = self.endpoint + "/datasets/" + dset1UUID + "/value" data = [2,3,5,7,11,13,17,19,23,29] data_part1 = data[0:5] data_part2 = data[5:10] # write part 1 payload = { 'start': 0, 'stop': 5, 'value': data_part1 } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # write part 2 payload = { 'start': 5, 'stop': 10, 'value': data_part2 } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # read back the data readData = helper.readDataset(domain, dset1UUID) self.assertEqual(readData, data) # verify we got back what we started with def testPutSelectionValueMismatch(self): # test that putting the wrong number of items in the value body key is handled correctly. # create domain domain = 'valueputselvaluemismatch.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain #create 1d dataset payload = {'type': 'H5T_STD_I32LE', 'shape': 10} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset1UUID = rspJson['id'] self.assertTrue(helper.validateId(dset1UUID)) # link new dataset as 'dset1' ok = helper.linkObject(domain, dset1UUID, 'dset1') self.assertTrue(ok) req = self.endpoint + "/datasets/" + dset1UUID + "/value" data_9 = [2,3,5,7,11,13,17,19,23] data_10 = [2,3,5,7,11,13,17,19,23,29] data_11 = [2,3,5,7,11,13,17,19,23,29,31] # try writing 9 elements when the selection has 10 slots payload = { 'start': 0, 'stop': 10, 'value': data_9 } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) # should fail # try writing 11 elements when the selection has 10 slots payload = { 'start': 0, 'stop': 10, 'value': data_11 } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) # should fail # try writing 10 elements when the selection has 10 slots payload = { 'start': 0, 'stop': 10, 'value': data_10 } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # just right! def testPutSelectionBinaryValueMismatch(self): # test that putting the wrong number of items in the value body key is handled correctly. # create domain domain = 'valueputselbinaryvaluemismatch.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain #create 1d dataset payload = {'type': 'H5T_STD_I32LE', 'shape': 10} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset1UUID = rspJson['id'] self.assertTrue(helper.validateId(dset1UUID)) # link new dataset as 'dset1' ok = helper.linkObject(domain, dset1UUID, 'dset1') self.assertTrue(ok) req = self.endpoint + "/datasets/" + dset1UUID + "/value" primes = [2,3,5,7,11,13,17,19,23,29,31] data_9 = bytearray(4 * 9) # write 4*9 byte data data_10 = bytearray(4 * 10) # write 4*10 byte data data_11 = bytearray(4 * 11) # write 4*11 byte data for i in range(9): data_9[i*4] = primes[i] for i in range(10): data_10[i*4] = primes[i] for i in range(11): data_11[i*4] = primes[i] data_9 = base64.b64encode(bytes(data_9)) data_10 = base64.b64encode(bytes(data_10)) data_11 = base64.b64encode(bytes(data_11)) data_9 = data_9.decode("ascii") data_10 = data_10.decode("ascii") data_11 = data_11.decode("ascii") # try writing 9 elements when the selection has 10 slots payload = { 'start': 0, 'stop': 10, 'value_base64': data_9 } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) # should fail # try writing 11 elements when the selection has 10 slots payload = { 'start': 0, 'stop': 10, 'value_base64': data_11 } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 400) # should fail # try writing 10 elements when the selection has 10 slots payload = { 'start': 0, 'stop': 10, 'value_base64': data_10 } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # just right! def testPutSelectionBinary(self): # create domain domain = 'valueputsel_binary.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain #create 1d dataset payload = {'type': 'H5T_STD_I32LE', 'shape': 10} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset1UUID = rspJson['id'] self.assertTrue(helper.validateId(dset1UUID)) # link new dataset as 'dset1' ok = helper.linkObject(domain, dset1UUID, 'dset1') self.assertTrue(ok) req = self.endpoint + "/datasets/" + dset1UUID + "/value" primes = [2,3,5,7,11,13,17,19,23,29] data_part1 = bytearray(4 * 5) # write 4*10 byte data in two parts of 20 bytes data_part2 = bytearray(4 * 5) # 2nd part for i in range(5): data_part1[i*4] = primes[i] data_part2[i*4] = primes[i+5] data_part1 = base64.b64encode(bytes(data_part1)) data_part2 = base64.b64encode(bytes(data_part2)) data_part1 = data_part1.decode("ascii") data_part2 = data_part2.decode("ascii") # write part 1 payload = { 'start': 0, 'stop': 5, 'value_base64': data_part1 } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # write part 2 payload = { 'start': 5, 'stop': 10, 'value_base64': data_part2 } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # read back the data readData = helper.readDataset(domain, dset1UUID) self.assertEqual(readData, primes) # verify we got back what we started with def testPutPointSelection(self): # create domain domain = 'valueputpointsel.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain #create 1d dataset payload = {'type': 'H5T_STD_I32LE', 'shape': 100} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset1UUID = rspJson['id'] self.assertTrue(helper.validateId(dset1UUID)) # link new dataset as 'dset1' ok = helper.linkObject(domain, dset1UUID, 'dset1') self.assertTrue(ok) req = self.endpoint + "/datasets/" + dset1UUID + "/value" primes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97] value = [1,] * len(primes) # write 1's at indexes that are prime # write 1's to all the prime indexes payload = { 'points': primes, 'value': value } headers = {'host': domain} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # read back the data readData = helper.readDataset(domain, dset1UUID) self.assertEqual(readData[37], 1) # prime self.assertEqual(readData[38], 0) # not prime def testPutPointSelectionBinary(self): # create domain domain = 'valueputpointsel_binary.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain #create 1d dataset payload = {'type': 'H5T_STD_I32LE', 'shape': 100} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset1UUID = rspJson['id'] self.assertTrue(helper.validateId(dset1UUID)) # link new dataset as 'dset1' ok = helper.linkObject(domain, dset1UUID, 'dset1') self.assertTrue(ok) req = self.endpoint + "/datasets/" + dset1UUID + "/value" primes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97] value = [1,] * len(primes) # write 1's at indexes that are prime data = bytearray(4 * len(primes)) for i in range(len(primes)): data[i*4] = 1 data = base64.b64encode(bytes(data)) data = data.decode("ascii") # write 1's to all the prime indexes payload = { 'points': primes, 'value_base64': data } rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # read back the data readData = helper.readDataset(domain, dset1UUID) self.assertEqual(readData[37], 1) # prime self.assertEqual(readData[38], 0) # not prime def testPutCompound(self): domain = 'valueputcompound.datasettest.' + config.get('domain') req = self.endpoint + "/" headers = {'host': domain} rsp = requests.put(req, headers=headers) self.assertEqual(rsp.status_code, 201) # creates domain root_uuid = helper.getRootUUID(domain) headers = {'host': domain} fields = ({'name': 'temp', 'type': 'H5T_STD_I32LE'}, {'name': 'pressure', 'type': 'H5T_IEEE_F32LE'}) datatype = {'class': 'H5T_COMPOUND', 'fields': fields } # #create compound dataset # payload = {'type': datatype} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset0UUID = rspJson['id'] self.assertTrue(helper.validateId(dset0UUID)) # link new dataset as 'dset0_compound' ok = helper.linkObject(domain, dset0UUID, 'dset0_compound') self.assertTrue(ok) # write entire array value = (42, 0.42) payload = {'value': value} req = self.endpoint + "/datasets/" + dset0UUID + "/value" rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # write value # read back the data readData = helper.readDataset(domain, dset0UUID) self.assertEqual(readData[0], 42) # #create 1d dataset # num_elements = 10 payload = {'type': datatype, 'shape': num_elements} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset1UUID = rspJson['id'] self.assertTrue(helper.validateId(dset1UUID)) # link new dataset as 'dset1' ok = helper.linkObject(domain, dset1UUID, 'dset_compound') self.assertTrue(ok) # write entire array value = [] for i in range(num_elements): item = (i*10, i*10+i/10.0) value.append(item) payload = {'value': value} req = self.endpoint + "/datasets/" + dset1UUID + "/value" rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # write value # selection write payload = { 'start': 0, 'stop': 1, 'value': (42, .42) } req = self.endpoint + "/datasets/" + dset1UUID + "/value" rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # write value # read back the data readData = helper.readDataset(domain, dset1UUID) self.assertEqual(readData[0][0], 42) self.assertEqual(readData[1][0], 10) # # Create 2d dataset # dims = [2,2] payload = {'type': datatype, 'shape': dims} req = self.endpoint + "/datasets" rsp = requests.post(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 201) # create dataset rspJson = json.loads(rsp.text) dset2UUID = rspJson['id'] self.assertTrue(helper.validateId(dset2UUID)) # link new dataset as 'dset2d_compound' ok = helper.linkObject(domain, dset2UUID, 'dset2d_compound') self.assertTrue(ok) # write entire array value = [] for i in range(dims[0]): row = [] for j in range(dims[1]): item = (i*10, i*10+j/2.0) row.append(item) value.append(row) payload = {'value': value} req = self.endpoint + "/datasets/" + dset2UUID + "/value" data = json.dumps(payload) rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # write value def testPutObjectReference(self): domain = 'objref_dset_updated.' + config.get('domain') root_uuid = helper.getRootUUID(domain) self.assertTrue(helper.validateId(root_uuid)) ds1_uuid = helper.getUUID(domain, root_uuid, 'DS1') ds2_uuid = helper.getUUID(domain, root_uuid, 'DS2') g1_uuid = helper.getUUID(domain, root_uuid, 'G1') req = helper.getEndpoint() + "/datasets/" + ds1_uuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) value = ('datasets/' + ds2_uuid, 'groups/' + g1_uuid) payload = {'value': value} req = self.endpoint + "/datasets/" + ds1_uuid + "/value" rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # write value def testPutRegionReference(self): domain = 'regionref_dset_updated.' + config.get('domain') root_uuid = helper.getRootUUID(domain) ds1_uuid = helper.getUUID(domain, root_uuid, 'DS1') ds2_uuid = helper.getUUID(domain, root_uuid, 'DS2') req = helper.getEndpoint() + "/datasets/" + ds1_uuid + "/value" headers = {'host': domain} rsp = requests.get(req, headers=headers) self.assertEqual(rsp.status_code, 200) rspJson = json.loads(rsp.text) self.assertTrue('value' in rspJson) value = rspJson['value'] self.assertEqual(len(value), 2) updated_value = ( value[1], value[0] ) # switch elements payload = {'value': updated_value} rsp = requests.put(req, data=json.dumps(payload), headers=headers) self.assertEqual(rsp.status_code, 200) # write value if __name__ == '__main__': unittest.main() ================================================ FILE: test/test_files/notahdf5file.h5 ================================================ This is not an HDF5 file! ================================================ FILE: test/testall.py ================================================ #!/usr/local/env python ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including s # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## from argparse import ArgumentParser import os import sys unit_tests = ('timeUtilTest', 'fileUtilTest') integ_tests = ('roottest', 'grouptest', 'dirtest', 'linktest', 'datasettest', 'valuetest', 'attributetest', 'datatypetest', 'shapetest', 'datasettypetest', 'acltest') #todo - add spidertest back cwd = os.getcwd() no_server = False parser = ArgumentParser() testKind = parser.add_mutually_exclusive_group() testKind.add_argument('--unit', action='store_true', help='run only the unit tests') testKind.add_argument('--integ', action='store_true', help='run only the integrity tests') parser.add_argument('--failslow', action='store_true', help='keep running if a test fails, instead of terminating early') args = vars(parser.parse_args()) if args['unit']: integ_tests = () elif args['integ']: unit_tests = () test_dir = os.path.dirname(os.path.realpath(__file__)) os.chdir(test_dir) # Run all h5serv tests # Run this script before running any integ tests exit_code = None os.chdir('unit') for file_name in unit_tests: print(file_name) rc = os.system('python ' + file_name + '.py') if rc != 0: if args['failslow']: exit_code = 'Failed' else: os.chdir(cwd) sys.exit("Failed") os.chdir('../integ') if integ_tests: os.system("python ./setupdata.py -f") # initialize data files for file_name in integ_tests: print(file_name) rc = os.system('python ' + file_name + '.py') if rc != 0: if args['failslow']: exit_code = 'Failed' else: os.chdir(cwd) sys.exit("Failed") log_file = "../../h5serv.log" if exit_code: if os.name != 'nt' and os.path.isfile(log_file): # tail not available on windows print("server log...") os.system("tail -n 100 " + log_file) os.chdir(cwd) sys.exit(exit_code) else: os.chdir(cwd) print("Done!") ================================================ FILE: test/unit/config.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## from h5serv.config import * cfg = { 'testfiledir': '../../testfiles/', 'domain': 'unit.hdf.io', 'datapath': '../data/', 'uuidlen': 36, 'hdf5_ext': '.h5' } update(cfg) ================================================ FILE: test/unit/fileUtilTest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import unittest import os from tornado.web import HTTPError from h5serv.fileUtil import getFilePath, getDomain, posixpath, join import config class FileUtilTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(FileUtilTest, self).__init__(*args, **kwargs) # main def testPosixPath(self): path1 = "dir1\\dir2" pp = posixpath(path1) if os.name == 'nt': self.assertEqual(pp, "dir1/dir2") else: self.assertEqual(pp, path1) # no conversion on unix def testJoin(self): path1 = "dir1\\dir2" path2 = "myfile.h5" pp = join(path1, path2) if os.name == 'nt': self.assertEqual(pp, "dir1/dir2/myfile.h5") else: self.assertEqual(pp, "dir1\\dir2/myfile.h5") # no conversion on unix def testDomaintoFilePath(self): domain = 'tall.' + config.get('domain') filePath = getFilePath(domain) self.assertEqual(filePath, "../data/tall.h5") # dot in front domain = '.tall.' + config.get('domain') self.assertRaises(HTTPError, getFilePath, domain) # two dots domain = 'two..dots.' + config.get('domain') self.assertRaises(HTTPError, getFilePath, domain) # no dot before domain domain = 'nodot' + config.get('domain') self.assertRaises(HTTPError, getFilePath, domain) def testGetDomain(self): filePath = "tall.h5" domain = getDomain(filePath) self.assertEqual(domain, 'tall.' + config.get('domain')) filePath = "somevalue" domain = getDomain(filePath) self.assertEqual(domain, 'somevalue.' + config.get('domain')) filePath = "subdir/tall.h5" domain = getDomain(filePath) self.assertEqual(domain, 'tall.subdir.' + config.get('domain')) filePath = os.path.join(config.get('datapath'), 'subdir/tall.h5') domain = getDomain(filePath) self.assertEqual(domain, 'tall.subdir.' + config.get('domain')) filePath = os.path.join(config.get('datapath'), 'subdir/tall.h5') filePath = os.path.abspath(filePath) domain = getDomain(filePath) self.assertEqual(domain, 'tall.subdir.' + config.get('domain')) filePath = os.path.join(config.get('datapath'), 'home/test_user1/tall.h5') domain = getDomain(filePath) self.assertEqual(domain, 'tall.test_user1.home.' + config.get('domain')) filePath = '../data/home/test_user1/tall.h5' domain = getDomain(filePath) self.assertEqual(domain, 'tall.test_user1.home.' + config.get('domain')) #domainpath = fileUtil.getDomain(grppath, base_domain=base_domain) filePath = "../data" domain = getDomain(filePath) self.assertEqual(domain, config.get('domain')) # verify backslashes are ok for windows... if os.name == 'nt': filePath = "subdir\\subsubdir\\tall.h5" domain = getDomain(filePath) self.assertEqual(domain, 'tall.subsubdir.subdir.' + config.get('domain')) if __name__ == '__main__': #setup test files unittest.main() ================================================ FILE: test/unit/timeUtilTest.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import unittest import time from h5serv.timeUtil import unixTimeToUTC import config class TimeUtilTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(TimeUtilTest, self).__init__(*args, **kwargs) # main def testConvertUnixTimetoUTC(self): # get test file now = time.time() utcTime = unixTimeToUTC(now) print(utcTime) self.assertEqual(len(utcTime), 20) self.assertTrue(utcTime.startswith('20')) self.assertTrue(utcTime.endswith('Z')) if __name__ == '__main__': #setup test files unittest.main() ================================================ FILE: util/admin/add_user.py ================================================ import h5py import numpy as np import sys import argparse import os.path as op import os import time import datetime import hashlib import config def encrypt_pwd(passwd): passwd = passwd.encode('utf-8') encrypted = hashlib.sha224(passwd).hexdigest() return encrypted def print_time(timestamp): str_time = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S') return str_time def generate_temp_password(length=6): if not isinstance(length, int) or length < 4: raise ValueError("temp password must have positive length") chars = "ABCDEFGHJKLMNPQRSTUVWXYZ23456789" return "".join([chars[ord(c) % len(chars)] for c in os.urandom(length)]) def main(): if os.name == 'nt': print("Sorry, this utility is not supported on Windows!") return -1 parser = argparse.ArgumentParser() parser.add_argument('-u', "--user", help='user id') parser.add_argument('-p', "--passwd", help='user password') args = parser.parse_args() filename = None passwd = None username = None filename = config.get('password_file') if not filename: print("no password file in config") return -1 if not args.user: print("no userid supplied") return -1 username = args.user if username.find(':') != -1: print("invalid username (':' is not allowed)") return -1 if username.find('/') != -1: print("invalid username ('/' is not allowed)") return -1 if args.passwd: passwd = args.passwd if len(passwd) < 4: print("password must be at least 4 characters long") return -1 else: passwd = generate_temp_password() # verify file exists and is writable if not op.isfile(filename): print("password file:", filename, " does not exist") return -1 if not h5py.is_hdf5(filename): print("invalid password file") return -1 if not os.access(filename, os.W_OK): print("password file is not writable") return -1 f = h5py.File(filename, 'r+') if 'user_type' not in f: print("invalid password file") return -1 user_type = f['user_type'] now = int(time.time()) # add a new user if username in f.attrs: print("user already exists") return -1 # create userid 1 greater than previous used userid = len(f.attrs) + 1 data = np.empty((), dtype=user_type) data['pwd'] = encrypt_pwd(passwd) data['state'] = 'A' data['userid'] = userid data['ctime'] = now data['mtime'] = now f.attrs.create(username, data, dtype=user_type) f.close() datapath = config.get('datapath') if not op.isdir(datapath): print("data directory not found") return -1 userpath = op.join(datapath, config.get('home_dir')) if not op.isdir(userpath): os.mkdir(userpath) userdir = op.join(userpath, username) if op.isdir(userdir): print("user directory already exists") return -1 # create user directory os.mkdir(userdir) # link to "public" directory link_name = op.join(userdir, "public") # create symlink to public directory os.symlink("../../public", link_name) print(passwd) return main() ================================================ FILE: util/admin/config.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import os import sys cfg = { 'datapath': '../../data/', 'domain': 'hdfgroup.org', 'hdf5_ext': '.h5', 'toc_name': '.toc.h5', 'password_file': 'passwd.h5', 'home_dir': 'home' } def get(x): # see if there is a command-line override option = '--'+x+'=' for i in range(1, len(sys.argv)): #print i, sys.argv[i] if sys.argv[i].startswith(option): # found an override arg = sys.argv[i] return arg[len(option):] # return text after option string # see if there are an environment variable override if x.upper() in os.environ: return os.environ[x.upper()] # no command line override, just return the cfg value return cfg[x] ================================================ FILE: util/admin/getacl.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import sys from os.path import isfile import json import numpy as np import h5py from h5json import Hdf5db # # Print usage and exit # def printUsage(): print("usage: python get_acl.py [-h] -file [-path h5path] [userid1, userid2, ...]") print(" options -file: name of file") print(" options -path: h5path to object (default as /)") print(" options [userid]: list of userids (default as all)") print(" ------------------------------------------------------------------------------") print(" Example - get all ACL's for root group of file 'tall.h5' ") print(" python getacl.py -file ../../data/test/tall.h5") print(" Example - get acl for dataset '/g1/g1.1/dset1.1.1' of 'tall.h5', user 123") print(" python getacl.py -file ../../data/test/tall.h5 -path /g1/g1.1/dset1.1.1 123") sys.exit(); """ Get command line argument. Exit with usage message if not available """ def getNextArg(argn): if (argn+1) == len(sys.argv): printUsage(); sys.exit(-1) return sys.argv[argn+1] def main(): h5path = None filename = None req_userids = [] if len(sys.argv) == 1 or sys.argv[1] == "-h": printUsage(); sys.exit(0) argn = 1 while argn < len(sys.argv): arg = sys.argv[argn] if arg == '-file': filename = getNextArg(argn) argn += 2 elif arg == '-path': h5path = getNextArg(argn) argn += 2 else: # process userids try: userid = int(arg) req_userids.append(userid) except ValueError: print("Invalid userid:", userid) sys.exit(1) argn += 1 if not isfile(filename): print(filename, "not found") sys.exit(1) if not h5py.is_hdf5(filename): print(filename, "not an hdf5 file") sys.exit(1) if h5path is None: h5path = '/' fields = ('userid', 'create', 'read', 'update', 'delete', 'readACL', 'updateACL') with Hdf5db(filename) as db: try: obj_uuid = db.getUUIDByPath(h5path) except KeyError: print("no object found at path:", h5path) sys.exit(1) acl_dset = db.getAclDataset(obj_uuid) if acl_dset and acl_dset.shape[0] > 0: acls = {} items = acl_dset[...] for item in items: acls[item[0]] = item userids = list(acls.keys()) userids.sort() # sort to print by userid print("%8s %8s %8s %8s %8s %8s %8s " % fields) for userid in userids: if len(req_userids) > 0 and userid not in req_userids: continue acl = acls[userid] format_args = [userid] for field in ('create', 'read', 'update', 'delete', 'readACL', 'updateACL'): format_args.append('Y' if acl[field] else 'N') print("%8s %8s %8s %8s %8s %8s %8s " % tuple(format_args)) else: print("no ACLs") main() ================================================ FILE: util/admin/import_file.py ================================================ import h5py import numpy as np import sys import argparse import os.path as op import os import shutil from tornado.escape import url_escape from h5json import Hdf5db import config """ Create directories as needed along the given path. """ def makeDirs(filePath): #print("makeDirs:", filePath) # Make any directories along path as needed if len(filePath) == 0 or op.isdir(filePath): return dirname = op.dirname(filePath) if len(dirname) >= len(filePath): return makeDirs(dirname) # recursive call os.mkdir(filePath) # should succeed since parent directory is created """ Get userid given username. If user_name is not found, return None """ def getUserId(user_name, password_file): """ getUserInfo: return user data """ userid = None if not user_name: return None # verify file exists and is writable if not op.isfile(password_file): print("password file not found") raise None with h5py.File(password_file, 'r') as f: if user_name not in f.attrs: return None data = f.attrs[user_name] #print(data) return data['userid'] """ get group uuid of hardlink, or None if no link """ def getSubgroupId(db, group_uuid, link_name): #print("link_name:", link_name) subgroup_uuid = None try: item = db.getLinkItemByUuid(group_uuid, link_name) if item['class'] != 'H5L_TYPE_HARD': return None if item['collection'] != 'groups': return None subgroup_uuid = item['id'] except IOError: # link_name doesn't exist, return None pass return subgroup_uuid """ Update toc with new filename """ def addTocEntry(toc_file, domain, base_domain): """ Helper method - update TOC when a domain is created """ if not domain.endswith(base_domain): sys.exit("unexpected domain value: " + domain) # trim domain by base domain try: with Hdf5db(toc_file) as db: group_uuid = db.getUUIDByPath('/') names = domain.split('.') base_names = base_domain.split('.') indexes = list(range(len(names))) indexes = indexes[::-1] # reverse for i in indexes: if i >= len(names) - len(base_names): continue # still in the base domain linkName = names[i] if not linkName: continue if i == 0: db.createExternalLink(group_uuid, domain, '/', linkName) else: subgroup_uuid = getSubgroupId(db, group_uuid, linkName) if subgroup_uuid is None: # create subgroup and link to parent group subgroup_uuid = db.createGroup() # link the new group db.linkObject(group_uuid, subgroup_uuid, linkName) group_uuid = subgroup_uuid except IOError as e: print("IOError: " + str(e.errno) + " " + e.strerror) sys.exit(-1) """ main method """ def main(): parser = argparse.ArgumentParser() parser.add_argument('-s', "--src", help="source path for the file to be imported") parser.add_argument('-u', "--user", help="user name (optional)") parser.add_argument('-f', "--folder", help='folder path under user home dir (optional)') parser.add_argument('-p', "--passwd_file", help='password file (optional)') args = parser.parse_args() src_path = None username = None folder = None password_file = None if args.src: src_path = args.src else: print("no source file provided") return -1 if not op.isfile(src_path): print("no file found") return -1 if not h5py.is_hdf5(src_path): print("file must be an HDF5 file") if args.user: username = args.user else: print("Importing into public") if args.passwd_file: password_file = args.passwd_file else: password_file = config.get("password_file") if args.folder: folder = args.folder if op.isabs(folder): print("folder path must be relative") return -1 folder = op.normpath(folder) print(">source:", src_path) print(">username:", username) print(">password_file:", password_file) print(">folder:", folder) hdf5_ext = config.get("hdf5_ext") if username: userid = getUserId(username, password_file) if not userid: print("user not found") return -1 tgt_dir = op.join(op.dirname(__file__), config.get("datapath")) tgt_dir = op.normpath(tgt_dir) if username: tgt_dir = op.join(tgt_dir, config.get("home_dir")) tgt_dir = op.join(tgt_dir, username) toc_file = op.join(tgt_dir, config.get("toc_name")) if not op.isfile(toc_file): print("toc_file:", toc_file, "not found") return -1 if folder: tgt_dir = op.join(tgt_dir, folder) if not op.isdir(tgt_dir): print("directory:", tgt_dir, "not found, creating") makeDirs(tgt_dir) tgt_file = op.basename(src_path) tgt_file = op.splitext(tgt_file)[0] # ignore the extension tgt_file = url_escape(tgt_file) # make the filename url compatible tgt_file = tgt_file.replace('.', '_') # replace dots with underscores tgt_path = op.join(tgt_dir, tgt_file) tgt_path = op.normpath(tgt_path) if op.isfile(tgt_path + hdf5_ext): print("file already exists") return -1 # determine target domain domain = tgt_file if folder: domain += '.' + folder if username: domain += '.' + username + '.' + config.get("home_dir") domain += "." + config.get("domain") # determine the base so that the toc update can be done relative to the base. if username: base_domain = username + '.' + config.get("home_dir") + '.' + config.get("domain") else: base_domain = config.get("domain") print("domain:", domain) # add toc entry addTocEntry(toc_file, domain, base_domain) # copy file tgt_path += hdf5_ext shutil.copyfile(src_path, tgt_path) return 0 main() ================================================ FILE: util/admin/makepwd_file.py ================================================ import h5py import numpy as np file_name = 'passwd.h5' f = h5py.File(file_name, 'x') fields = [] fields.append(('pwd', np.dtype('S56'))) fields.append(('state', np.dtype('S1'))) fields.append(('userid', np.int32)) fields.append(('email', np.dtype('S80'))) fields.append(('ctime', np.int32)) fields.append(('mtime', np.int32)) dt = np.dtype(fields) f['user_type'] = dt f.close() print(file_name, "created") ================================================ FILE: util/admin/remove_db.py ================================================ import h5py import sys """ main method """ def main(): if len(sys.argv) < 2 or sys.argv[1] in ("-h", "--help"): print("Delete db from h5serv file.") print("Warning: all object uuids and any user ACLs will be lost") print("Usage: python remove_db.py ") sys.exit(1) filename = sys.argv[1] f = h5py.File(filename, 'a') if "__db__" not in f: print("No db group found") else: del f["__db__"] print("db group removed") f.close() main() ================================================ FILE: util/admin/setacl.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import sys from os.path import isfile import json import numpy as np import h5py from h5json import Hdf5db # # Print usage and exit # def printUsage(): print("usage: python set_acl.py -file [-path h5path] [+-}[crudep] [userid1, userid2, ...]") print(" options -v: verbose, print request and response codes from server") print(" options -file: name of file") print(" options -path: path to object (default as /)") print(" options userids: userid of acl to return") print(" ------------------------------------------------------------------------------") print(" Example - set 'tall.h5' default access to read only") print(" python setacl.py -file ../../data/test/tall.h5 +r-udep") print(" Example - get acl for 'tall.h5' dataset /g1/g1.1/dset1.1.1 to full access for user 123") print(" python setacl.py -file ../../data/test/tall.h5 -path /g1/g1.1/dset1.1.1 +crudep 123") sys.exit(); """ Get command line argument. Exit with usage message if not available """ def getNextArg(argn): if (argn+1) == len(sys.argv): printUsage(); sys.exit(-1) return sys.argv[argn+1] def main(): perm_abvr = {'c':'create', 'r': 'read', 'u': 'update', 'd': 'delete', 'e': 'readACL', 'p':'updateACL'} h5path = None filename = None userids = [] add_list = [] remove_list = [] if len(sys.argv) == 1 or sys.argv[1] == "-h": printUsage(); sys.exit(1) argn = 1 while argn < len(sys.argv): arg = sys.argv[argn] if arg == '-file': filename = getNextArg(argn) argn += 2 elif arg == '-path': h5path = getNextArg(argn) argn += 2 elif arg[0] in ('+', '-'): to_list = None for ch in arg: if ch == '+': to_list = add_list elif ch == '-': to_list = remove_list elif ch in perm_abvr.keys(): to_list.append(perm_abvr[ch]) else: printUsage() sys.exit(1) argn += 1 else: # process userids try: userid = int(arg) userids.append(userid) except ValueError: print("Invalid userid:", userid) sys.exit(1) argn += 1 conflicts = list(set(add_list) & set(remove_list)) if len(conflicts) > 0: print("permission: ", conflicts[0], " set for both add and remove") sys.exit(1) if filename is None: print("no filename specified") sys.exit(1) if not isfile(filename): print(filename, "not found") sys.exit(1) if not h5py.is_hdf5(filename): print(filename, "not an hdf5 file") sys.exit(1) if h5path is None: h5path = '/' if len(userids) == 0: userids.append(0) fields = ('userid', 'create', 'read', 'update', 'delete', 'readACL', 'updateACL') with Hdf5db(filename) as db: try: obj_uuid = db.getUUIDByPath(h5path) except KeyError: print("no object found at path:", h5path) sys.exit(1) print("%8s %8s %8s %8s %8s %8s %8s " % fields) for userid in userids: acl = db.getAclByObjAndUser(obj_uuid, userid) if acl is None and userid != 0: acl = db.getAclByObjAndUser(obj_uuid, 0) if acl is None: acl = db.getDefaultAcl() acl['userid'] = userid for field in add_list: acl[field] = True for field in remove_list: acl[field] = False format_args = [userid] for field in fields: if field == 'userid': continue format_args.append('Y' if acl[field] else 'N') print("%8s %8s %8s %8s %8s %8s %8s " % tuple(format_args)) db.setAcl(obj_uuid, acl) main() ================================================ FILE: util/admin/update_pwd.py ================================================ import h5py import numpy as np import sys import argparse import os.path as op import os import time import datetime import hashlib import config def encrypt_pwd(passwd): passwd = passwd.encode('utf-8') encrypted = hashlib.sha224(passwd).hexdigest() return encrypted def print_time(timestamp): str_time = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S') return str_time def main(): parser = argparse.ArgumentParser() parser.add_argument('-r', "--replace", help="update existing user/password", action="store_true") parser.add_argument('-a', "--add", help="add a new user/password", action="store_true") parser.add_argument('-f', "--file", help='password file') parser.add_argument('-u', "--user", help='user id') parser.add_argument('-e', "--email", help='user email') parser.add_argument('-p', "--passwd", help='user password') args = parser.parse_args() filename = None passwd = None username = None email = None if args.file: filename = args.file else: filename = config.get("password_file") if args.user: username = args.user for ch in username: if ord(ch) >= ord('a') and ord(ch) <= ord('z'): continue # OK if ord(ch) >= ord('0') and ord(ch) <= ord('9'): continue # OK if ord(ch) == ord('_'): continue # OK print("invalid username ('", ch, "' is not allowed)") return -1 if args.passwd: passwd = args.passwd if passwd.find(':') != -1: print("invalid passwd (':' is not allowed)") return -1 if args.email: email = args.email if email.find('@') == -1: print("invalid email address ('@' not found)") return -1 print(">filename:", filename) print(">username:", username) print(">password:", passwd) print(">email:", email) if args.replace: print("replace is on") # verify file exists and is writable if not op.isfile(filename): print("password file:", filename, " does not exist") return -1 if not h5py.is_hdf5(filename): print("invalid password file") return -1 mode = 'r' if args.replace or args.add: mode = 'r+' if not os.access(filename, os.W_OK): print("password file is not writable") return -1 f = h5py.File(filename, mode) if 'user_type' not in f: print("invalid password file") return -1 user_type = f['user_type'] now = int(time.time()) if args.add: # add a new user if username in f.attrs: print("user already exists") return -1 # create userid 1 greater than previous used userid = len(f.attrs) + 1 data = np.empty((), dtype=user_type) data['pwd'] = encrypt_pwd(passwd) data['state'] = 'A' data['userid'] = userid data['email'] = email data['ctime'] = now data['mtime'] = now f.attrs.create(username, data, dtype=user_type) elif args.replace: if username not in f.attrs: print("user not found") return -1 data = f.attrs[username] if passwd: data['pwd'] = encrypt_pwd(passwd) if email: data['email'] = email data['mtime'] = now f.attrs.create(username, data, dtype=user_type) elif username and passwd: if username not in f.attrs: print("user not found") return -1 data = f.attrs[username] if data['pwd'] == encrypt_pwd(passwd): print("password is valid") return 0 else: print("password is not valid") elif username: if username not in f.attrs: print("user not found") return -1 data = f.attrs[username] print("username:", username, "userid:", data['userid'], "email:", data['email'], "state:", data['state'], "ctime:", print_time(data['ctime']), "mtime:", print_time(data['mtime'])) else: # print all users sys.stdout.write("{:<25}{:<8}{:<8}{:<40}{:<20}{:<20}\n".format('username', 'userid', 'state', 'email', 'ctime', 'mtime')) sys.stdout.write(("-" * 120)+'\n') for username in f.attrs.keys(): data = f.attrs[username] sys.stdout.write("{:<25}{:<8}{:<8}{:<40}{:<20}{:<20}\n".format(username, str(data['userid']), data['state'], data['email'], print_time(data['ctime']), print_time(data['mtime']))) f.close() return 0 main() ================================================ FILE: util/dumpobjdb.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import h5py import sys def dumpAttr(col): for k in col.attrs: attr = col.attrs[k] if attr.__class__.__name__ == 'Reference': obj = col[attr] print('\t\tattr[' + k + ']: ->' + obj.name) else: print('\t\tattr[' + k + ']: ' + str(attr)) # path def dumpCol(col): if len(col) == 0: pass # return # skip npos = col.name.rfind('/') + 1 name = col.name[npos:] print('\t{' + name + '}') dumpAttr(col) for uuid in col: g = col[uuid] addr = h5py.h5o.get_info(g.id).addr print('\t\t' + uuid + ': ' + g.__class__.__name__ + ' addr: ' + str(addr)) def dumpFile(filePath): print("db info for: ", filePath) f = h5py.File(filePath, 'r') dbGrp = f['/'] if '__db__' in f: dbGrp = f['__db__'] else: if '{groups}' not in f: print("no db data found!") return print('__db__', 'Group') dumpAttr(dbGrp) dumpCol(dbGrp['{groups}']) dumpCol(dbGrp['{datasets}']) dumpCol(dbGrp['{datatypes}']) dumpCol(dbGrp['{addr}']) f.close() def main(): if len(sys.argv) < 1: print("usage: dumpobjdb ") sys.exit(); dumpFile(sys.argv[1]) main() ================================================ FILE: util/dumptojson.sh ================================================ for f in testfiles/*.h5 do echo "$f" s=${f##*/} b=${s%.h5} python h5tojson.py $f >json_dump/$b.json done ================================================ FILE: util/exporth5.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import six if six.PY3: unicode = str import sys import requests import json import numpy as np import h5py from h5json import Hdf5db """ exporth5 - creates an HDF5 file based from h5serv domain """ class Dumph5: def __init__(self): self.group_uuids = [] self.dataset_uuids = [] self.datatype_uuids = [] # # Make request to service, convert json response to python dictionary # and return. # def makeRequest(self, uri): endpoint = self.endpoint if not endpoint: endpoint = "http://" + self.domain endpoint += ':' endpoint += str(self.port) req = endpoint + uri if self.verbose: print("REQ:", req) #print "headers:", self.domain headers = {'host': self.domain} rsp = requests.get(req, headers=headers) if self.verbose: print("RSP:", rsp.status_code) if rsp.status_code != 200: raise Exception("got bad httpstatus: " + str(rsp.status_code) + " for request: " + uri); #print "got response text:", rsp.text rsp_json = json.loads(rsp.text) return rsp_json # # Create a hard, soft, or external link # def createLink(self, link_obj, parent_uuid): title = link_obj["title"] link_class = link_obj["class"] if link_class == 'H5L_TYPE_HARD': child_uuid = link_obj["id"] self.db.linkObject(parent_uuid, child_uuid, title) elif link_class == 'H5L_TYPE_SOFT': h5path = link_obj["h5path"] self.db.createSoftLink(parent_uuid, h5path, title) elif link_class == 'H5L_TYPE_EXTERNAL': h5path = link_obj["h5path"] link_file = link_obj["h5domain"] self.db.createExternalLink(parent_uuid, link_file, h5path, title) else: print("Unable to create link with class:", link_class) # # Create HDF5 dataset object and write data values # def createDataset(self, uuid): # get json for the dataset rsp_json = self.makeRequest("/datasets/" + uuid) self.dataset_uuids.append(uuid) datatype = rsp_json['type'] if type(datatype) in (str, unicode) and datatype.startswith("datatypes/"): #committed datatype, just pass in the UUID part datatype = datatype[len("datatypes/"):] dims = None max_shape=None creation_props=None if "shape" in rsp_json: shape = rsp_json["shape"] if shape["class"] == 'H5S_SIMPLE': dims = shape["dims"] if type(dims) == int: # convert int to array dim1 = shape dims = [dim1] if "maxdims" in shape: max_shape = shape["maxdims"] if type(max_shape) == int: #convert to array dim1 = max_shape max_shape = [dim1] # convert 0's to None's for i in range(len(max_shape)): if max_shape[i] == 0: max_shape[i] = None if 'creationProperties' in rsp_json: creation_props = rsp_json['creationProperties'] self.db.createDataset(datatype, dims, max_shape=max_shape, creation_props=creation_props, obj_uuid=uuid) # get the data values rsp_json = self.makeRequest("/datasets/" + uuid + '/value') if "value" in rsp_json: data = rsp_json["value"] #print json.dumps(data, sort_keys=True, indent=4) self.db.setDatasetValuesByUuid(uuid, data) # # Create all datasets in the domain # def createDatasets(self): uri = "/datasets" rsp_json = self.makeRequest(uri) dataset_ids = rsp_json['datasets'] for dataset_uuid in dataset_ids: self.createDataset(dataset_uuid) def createAttribute(self, attr_name, col_name, uuid): attr_json = self.makeRequest("/" + col_name + "/" + uuid + "/attributes/" + attr_name) datatype = attr_json["type"] if type(datatype) in (str, unicode) and datatype.startswith("datatypes/"): #committed datatype, just pass in the UUID part datatype = datatype[len("datatypes/"):] attr_value = attr_json["value"] dims = None if "shape" in attr_json: shape = attr_json["shape"] if shape["class"] == 'H5S_SIMPLE': dims = shape["dims"] if type(dims) == int: # convert int to array dim1 = shape dims = [dim1] self.db.createAttribute(col_name, uuid, attr_name, dims, datatype, attr_value) # # create committed datatype HDF5 object # def createDatatype(self, uuid): rsp_json = self.makeRequest("/datatypes/" + uuid) datatype = rsp_json['type'] self.db.createCommittedType(datatype, obj_uuid=uuid) # # create datatypes # def createDatatypes(self): rsp_json = self.makeRequest("/datatypes") datatype_ids = rsp_json['datatypes'] for datatype_uuid in datatype_ids: self.createDatatype(datatype_uuid) # # Create HDF5 group object (links and attributes will be added later) # def createGroup(self, uuid): self.group_uuids.append(uuid) if uuid != self.root_uuid: self.db.createGroup(obj_uuid=uuid) # # Create all groups in the domain # def createGroups(self): rsp_json = self.makeRequest("/groups") group_ids = rsp_json['groups'] group_ids.append(self.root_uuid) # add root group uuid for group_uuid in group_ids: self.createGroup(group_uuid) # # Create all the HDF5 objects defined in the JSON file # def createObjects(self): # create datatypes self.createDatatypes() # create groups self.createGroups() # create datasets self.createDatasets() # # Create all the attributes for HDF5 objects defined in the JSON file # Note: this needs to be done after createObjects since an attribute # may use a committed datatype # def createAttributes(self): # create datatype attributes for datatype_uuid in self.datatype_uuids: rsp_json = self.makeRequest("/datatypes/" + datatype_uuid + "/attributes") attributes = rsp_json["attributes"] for attribute_json in attributes: self.createAttribute(attribute_json["name"], "datatypes", uuid) # create group attributes for group_uuid in self.group_uuids: rsp_json = self.makeRequest("/groups/" + group_uuid + "/attributes") attributes = rsp_json["attributes"] for attribute_json in attributes: self.createAttribute(attribute_json["name"], "groups", group_uuid) # create dataset attributes for dataset_uuid in self.dataset_uuids: rsp_json = self.makeRequest("/datasets/" + dataset_uuid + "/attributes") attributes = rsp_json["attributes"] for attribute_json in attributes: self.createAttribute(attribute_json["name"], "datasets", dataset_uuid) # # Link all the objects # Note: this will "de-anonymous-ize" objects defined in the HDF5 file # Any non-linked objects will be deleted when the __db__ group is deleted # def createLinks(self): for group_uuid in self.group_uuids: rsp_json = self.makeRequest("/groups/" + group_uuid + "/links") links = rsp_json["links"] for link in links: self.createLink(link, group_uuid) def writeFile(self, db): self.db = db self.root_uuid = db.root_uuid print("file root_uuid:", self.root_uuid) self.createObjects() # create datasets, groups, committed datatypes self.createAttributes() # create attributes for objects self.createLinks() # link it all together # # Print usage and exit # def printUsage(): print("usage: python exporth5.py [-v] [-endpoint=] [-port=] ") print(" options -v: verbose, print request and response codes from server") print(" options -endpoint: specify IP endpoint of server") print(" options -port: port address of server [default 7253]") print(" ------------------------------------------------------------------------------") print(" Example - get 'tall' collection from HDF Group server, save to tall.h5:") print(" python exporth5.py tall.data.hdfgroup.org tall.h5") print(" Example - get 'tall' collection from a local server instance ") print(" (assuming the server is using port 5000):") print(" python exporth5.py -endpoint=127.0.0.1 -port=5000 tall.test.hdfgroup.org tall.h5") sys.exit(); def main(): nargs = len(sys.argv) dumper = Dumph5() dumper.verbose = False dumper.endpoint = None dumper.port = 7253 dumper.noDsetData = False dumper.noAttrData = False endpoint_option = "-endpoint=" port_option = "-port=" option_count = 0 for arg in sys.argv: if arg.startswith(endpoint_option): endpoint = arg[len(endpoint_option):] if endpoint.startswith("http"): dumper.endpoint = endpoint else: dumper.endpoint = "http://" + endpoint option_count += 1 elif arg.startswith(port_option): port = arg[len(port_option):] dumper.port = int(port) option_count += 1 elif arg == "-v": dumper.verbose = True if nargs - option_count <= 2: printUsage() domain = sys.argv[nargs-2] filename = sys.argv[nargs-1] print("domain:", domain) print("filename:", filename) dumper.domain = domain domain_json = dumper.makeRequest("/") if "root" not in domain_json: raise Exception("no root key in domain response") root_uuid = domain_json["root"] # create the file, will raise IOError if there's a problem Hdf5db.createHDF5File(filename) with Hdf5db(filename, root_uuid=root_uuid) as db: dumper.writeFile(db) # open with h5py and remove the _db_ group # Note: this will delete any anonymous (un-linked) objects f = h5py.File(filename, 'a') del f["__db__"] f.close() print("done!") main() ================================================ FILE: util/exportjson.py ================================================ ############################################################################## # Copyright by The HDF Group. # # All rights reserved. # # # # This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # # Utilities. The full HDF5 REST Server copyright notice, including # # terms governing use, modification, and redistribution, is contained in # # the file COPYING, which can be found at the root of the source code # # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## import requests import sys import json """ DumpJson - return json representation of all objects within the given domain """ class DumpJson: def __init__(self): pass # # Make request to service, convert json response to python dictionary # and return. # def makeRequest(self, uri): endpoint = self.endpoint if not endpoint: endpoint = "http://" + self.domain endpoint += ':' endpoint += str(self.port) req = endpoint + uri if self.verbose: print("REQ:", req) #print "headers:", self.domain headers = {'host': self.domain} rsp = requests.get(req, headers=headers) if self.verbose: print("RSP:", rsp.status_code) if rsp.status_code != 200: raise Exception("got bad httpstatus: " + str(rsp.status_code) + " for request: " + uri); #print "got response text:", rsp.text rsp_json = json.loads(rsp.text) return rsp_json def dumpAttribute(self, obj_uri): rsp_json = self.makeRequest(obj_uri) attr_json = {} attr_json['name'] = rsp_json['name'] attr_json['type'] = rsp_json['type'] attr_json['shape'] = rsp_json['shape'] if 'value' in rsp_json and rsp_json['value']: attr_json['value'] = rsp_json['value'] return attr_json def dumpAttributes(self, uri, jsonOut): rsp_json = self.makeRequest(uri) attributes_json = rsp_json['attributes'] if len(attributes_json) > 0: items = [] for attr in attributes_json: name = attr['name'] if self.noAttrData: # just copy what we got from "attributes" request items.append(attr) else: # fetch the attribute data uri_attr_request = uri + "/" + name item = self.dumpAttribute(uri_attr_request) items.append(item) jsonOut['attributes'] = items; def dumpLinks(self, uri, jsonOut): rsp_json = self.makeRequest(uri) links_json = rsp_json['links'] if len(links_json) > 0: linkDict = [] jsonOut['links'] = links_json def dumpGroup(self, uri, jsonOut): rsp_json = self.makeRequest(uri) group_uuid = rsp_json['id'] jsonOut[group_uuid] = {} self.dumpLinks(uri + '/links', jsonOut[group_uuid]) self.dumpAttributes(uri + '/attributes', jsonOut[group_uuid]) def dumpGroups(self): uri = "/groups" rsp_json = self.makeRequest(uri) jsonOut = {} self.json['groups'] = jsonOut group_ids = rsp_json['groups'] group_ids.append(self.json['root']) # add in root group for group_uuid in group_ids: uri = "/groups/" + group_uuid self.dumpGroup(uri, jsonOut) def dumpDataset(self, uri, jsonOut): rsp_json = self.makeRequest(uri) dset_uuid = rsp_json['id'] dset_json = {} dset_json['shape'] = rsp_json['shape'] dset_json['type'] = rsp_json['type'] # get the data values rsp_json = self.makeRequest(uri + '/value') if not self.noDsetData: # get the dataset values if 'value' in rsp_json: data = rsp_json['value'] if data: dset_json['value'] = data jsonOut[dset_uuid] = dset_json self.dumpAttributes(uri + '/attributes', jsonOut[dset_uuid]) def dumpDatasets(self): rsp_json = self.makeRequest("/datasets") jsonOut = {} self.json['datasets'] = jsonOut dataset_ids = rsp_json['datasets'] for dset_uuid in dataset_ids: uri = "/datasets/" + dset_uuid self.dumpDataset(uri, jsonOut) def dumpDatatype(self, uri, jsonOut): rsp_json = self.makeRequest(uri) dtype_uuid = rsp_json['id'] type_json = {} type_json['type'] = rsp_json['type'] jsonOut[dtype_uuid] = type_json self.dumpAttributes(uri + '/attributes', jsonOut[dtype_uuid]) def dumpDatatypes(self): rsp_json = self.makeRequest("/datatypes") jsonOut = {} self.json['datatypes'] = jsonOut datatype_ids = rsp_json['datatypes'] for dtype_uuid in datatype_ids: uri = "/datatypes/" + dtype_uuid self.dumpDatatype(uri, jsonOut) def dumpDomain(self): rsp_json = self.makeRequest("/") self.json = {} # save the root uuid self.json['root'] = rsp_json['root'] self.dumpGroups() self.dumpDatasets() self.dumpDatatypes() print(json.dumps(self.json, sort_keys=True, indent=4)) # # Print usage and exit # def printUsage(): print("usage: python exportjson.py [-v] [-D|d] [-endpoint=] [-port=") print(" options -v: verbose, print request and response codes from server") print(" options -D: suppress all data output") print(" options -d: suppress data output for datasets (but not attributes)") print(" options -endpoint: specify IP endpoint of server") print(" options -port: port address of server [default 7253]") print(" ------------------------------------------------------------------------------") print(" Example - get 'tall' collection from HDF Group server:") print(" python exportjson.py tall.data.hdfgroup.org") print(" Example - get 'tall' collection from a local server instance ") print(" (assuming the server is using port 5000):") print(" python exportjson.py -endpoint=127.0.0.1 -port=5000 tall.test.hdfgroup.org") sys.exit(); # # main # def main(): nargs = len(sys.argv) dumper = DumpJson() dumper.verbose = False dumper.endpoint = None dumper.port = 7253 dumper.noDsetData = False dumper.noAttrData = False endpoint_option = "-endpoint=" port_option = "-port=" option_count = 0 for arg in sys.argv: if arg.startswith(endpoint_option): endpoint = arg[len(endpoint_option):] if endpoint.startswith("http"): dumper.endpoint = endpoint else: dumper.endpoint = "http://" + endpoint option_count += 1 elif arg.startswith(port_option): port = arg[len(port_option):] dumper.port = int(port) option_count += 1 elif arg == "-v": dumper.verbose = True elif arg == "-D": dumper.noDsetData = True dumper.noAttrData = True elif arg == "-d": dumper.noDsetData = True if nargs - option_count <= 1: printUsage() dumper.domain = sys.argv[nargs-1] dumper.dumpDomain() main() ================================================ FILE: util/rebuildIndex.py ================================================ import sys import h5py import logging from h5json import Hdf5db dbname = "__db__" if len(sys.argv) < 2 or sys.argv[1] == "-h" or sys.argv[1] == "--help": print("Usage: python rebuildIndex.py [filename]") print("Warning: this utility will delete any previous UUIDs!"); sys.exit() # setup logger log = logging.getLogger("rebuildIndex") log.setLevel(logging.INFO) handler = logging.StreamHandler(sys.stdout) # create formatter formatter = logging.Formatter( "%(levelname)s:%(filename)s:%(lineno)d::%(message)s") handler.setFormatter(formatter) log.addHandler(handler) log.propagate = False filepath = sys.argv[1] log.info("openining file: " + filepath) # remove the old index f = h5py.File(filepath, 'a') if dbname in f: log.info("deleting old db group") del f[dbname] f.close() # now open with hdf5db with Hdf5db(filepath, app_logger=log) as db: # the actual index rebuilding will happen in the init function root_uuid = db.getUUIDByPath('/') print("root_uuid:", root_uuid) print("done!")