commit e9af73dfe8de800650cd2b54c168c5440434c4a6 Author: Christian Klopp Date: Tue Nov 7 14:41:38 2017 +0100 initial commit diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..3e121ee --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,76 @@ +Eclipse Public License -v 1.0 +============================= + +THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE (“AGREEMENT”). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. + +### 1. Definitions + +“Contribution” means: +* **a)** in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and +* **b)** in the case of each subsequent Contributor: + * **i)** changes to the Program, and + * **ii)** additions to the Program; +where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: **(i)** are separate modules of software distributed in conjunction with the Program under their own license agreement, and **(ii)** are not derivative works of the Program. + +“Contributor” means any person or entity that distributes the Program. + +“Licensed Patents ” mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program. + +“Program” means the Contributions distributed in accordance with this Agreement. + +“Recipient” means anyone who receives the Program under this Agreement, including all Contributors. + +### 2. Grant of Rights + +**a)** Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form. + +**b)** Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder. + +**c)** Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program. + +**d)** Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement. + +### 3. Requirements + +A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that: +* **a)** it complies with the terms and conditions of this Agreement; and +* **b)** its license agreement: + * **i)** effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose; + * **ii)** effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits; + * **iii)** states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and + * **iv)** states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange. + +When the Program is made available in source code form: +* **a)** it must be made available under this Agreement; and +* **b)** a copy of this Agreement must be included with each copy of the Program. + +Contributors may not remove or alter any copyright notices contained within the Program. + +Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution. + +### 4. Commercial Distribution + +Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor (“Commercial Contributor”) hereby agrees to defend and indemnify every other Contributor (“Indemnified Contributor”) against any losses, damages and costs (collectively “Losses”) arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: **a)** promptly notify the Commercial Contributor in writing of such claim, and **b)** allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense. + +For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages. + +### 5. No Warranty + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations. + +### 6. Disclaimer of Liability + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +### 7. General + +If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. + +If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed. + +All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive. + +Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved. + +This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation. + diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..8bb307b --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1 @@ +include utils.py diff --git a/README.md b/README.md new file mode 100644 index 0000000..31a947e --- /dev/null +++ b/README.md @@ -0,0 +1,47 @@ +

+ + OpenMTC + +

+ +The OpenMTC SDK aims to provide developers with a convenient yet flexible tool to write oneM2M compliant applications. This includes network applications (NAs), gateway application (GAs), device applications (DAs), as well as interworking proxy entities (IPEs). + +# Table of Content + +- [Quick Start](doc/openmtc-get-started.md) +- [Introduction](doc/introduction.md) +- [Deployment](doc/deployment-guide.md) +- [The MQTT Client](doc/onem2m-client-mqtt.md) +- [Authentication Guide](doc/authentication.md) +- [Installation of the OpenMTC SDK](doc/install-sdk.md) +- [Overview REST API](doc/overview-rest-api.md) +- [Write your first OpenMTC applications](doc/training/training-index.md) +- [SDK - Using the Application Framework](doc/sdk-framework.md) +- [SDK - The low-level CSE Client](doc/sdk-client.md) +- [SDK - The Data Model](doc/sdk-datamodel.md) +- Examples + - [IoT Data Visualization](doc/example-apps/IoT-data-visualization.py) + - [Data Aggregation](doc/example-apps/data-aggregation.py) + - [Simple Decision](doc/example-apps/simple-decision.py) + - [Simple Decision 2](doc/example-apps/simple-decision-2.py) +- Scripts + - [Create App Structure Script](doc/create-app-structure.md) + - [Create binary docker images Script](doc/create-binary-docker.md) +- [Code Repository Structure](doc/repository-structure.md) +- [Developer FAQ](doc/developer-faq.md) + + +# Python + +The OpenMTC SDK is written in and for the Python programming language. Users should therefore have at least a certain knowledge of Python and its paradigms. For this matter, the following material is recommended: + +- [The Python Homepage](http://www.python.org) +- [Expert Python Programming by Tarek Ziadé](http://www.e-reading.by/bookreader.php/138816/Ziade_-_Expert_Python_programming.pdf) +- [Code Like a Pythonista: Idiomatic Python by David Goodger](http://python.net/~goodger/projects/pycon/2007/idiomatic/handout.html) + +# Feedback + +Please create issues for any problems and direct any comments or feedback you are having to support@openmtc.org + +Please let us know what you think. We are also very interested in any use case you are *not* able to implement with the SDK or if you find it difficult to do so. + diff --git a/apps/OrionContextBroker/MANIFEST.in b/apps/OrionContextBroker/MANIFEST.in new file mode 100644 index 0000000..8bb307b --- /dev/null +++ b/apps/OrionContextBroker/MANIFEST.in @@ -0,0 +1 @@ +include utils.py diff --git a/apps/OrionContextBroker/README.md b/apps/OrionContextBroker/README.md new file mode 100644 index 0000000..5de2078 --- /dev/null +++ b/apps/OrionContextBroker/README.md @@ -0,0 +1,103 @@ +# Introduction + +OrionContextBroker is an OpenMTC AE to forward OpenMTC data (via Subscription) to an instance of the Orion Context Broker. +All ContentInstances are expected to use the SenML format. It is possible to connect the AE either to an OpenMTC Gateway or an OpenMTC Backend. + +# Getting started + +Within the openmtc root directory the app can be started via + +``` +./apps/orion-context-broker -v +``` + +## Configuration + +It is possible to configure the AE either via config.json or CLI paramters. All possible paramters can be shown via: + +``` +./apps/orion-context-broker -h +``` + +The most important parameters are: + +* ep (the OpenMTC host) +* labels (the labels that should be forwarded to the OrionCB, one label has to match (OR), empty ([""]) means every label) +* interval (for periodic discovery) +* orion_host (hostname:port of the Orion CB) + +# How the data is stored at the Orion CB + +The Orion CB uses the model of *entities* having *attributes*. The AE matches all Container having the label "openmtc:device" to entities. Attributes are matched to the SenML Key "n" of ContentInstances. The types of values are determined by the AE to match typical Orion CB types (e.g. Int, String, Float...). + +## Example + +### Create Data in OpenMTC + +Create an App with OpenMTC: + +``` +curl -X POST localhost:18000/onem2m/ -H "Content-Type: application/vnd.onem2m-res+json" -d '{"m2m:ae": {"rn": "EXAMPLE_APP_NAME", "api": "placeholder", "rr": "TRUE"}}' +``` + +Create an Device with OpenMTC: + +``` +curl -X POST localhost:18000/onem2m/EXAMPLE_APP_NAME/ -H "Content-Type: application/vnd.onem2m-res+json" -d '{"m2m:cnt": {"rn": "EXAMPLE_DEVICE_NAME", "lbl":["openmtc:device"]}}' +``` + +Create an Measurment (Sensor data container) with OpenMTC: + +``` +curl -X POST localhost:18000/onem2m/EXAMPLE_APP_NAME/EXAMPLE_DEVICE_NAME/ -H "Content-Type: application/vnd.onem2m-res+json" -d '{"m2m:cnt": {"rn": "EXAMPLE_MEASUREMENT_NAME", "lbl":["openmtc:sensor_data"]}}' +``` + +Upload SenML Data to OpenMTC: + +```json +{ + "n": "temperature", + "bn": "openmtc:zigbee:temp", + "v": 24, + "u": "Cel", + "t": "2017-04-13 12:45:12.787239" +} +``` +base64: eyJuIjogInRlbXBlcmF0dXJlIiwgImJuIjogIm9wZW5tdGM6emlnYmVlOnRlbXAiLCAidiI6IDI0LCAidSI6ICJDZWwiLCAidCI6ICIyMDE3LTA0LTEzIDEyOjQ1OjEyLjc4NzIzOSJ9Cg== + +``` +curl -X POST localhost:18000/onem2m/EXAMPLE_APP_NAME/EXAMPLE_DEVICE_NAME/EXAMPLE_MEASUREMENT_NAME/ -H "Content-Type: application/vnd.onem2m-res+json" -d '{"m2m:cin": {"con": "eyJuIjogInRlbXBlcmF0dXJlIiwgImJuIjogIm9wZW5tdGM6emlnYmVlOnRlbXAiLCAidiI6IDI0LCAidSI6ICJDZWwiLCAidCI6ICIyMDE3LTA0LTEzIDEyOjQ1OjEyLjc4NzIzOSJ9Cg==", "cnf": "application/json:1"}}' +``` + +### Query Data Orion CB + +``` +curl localhost:1026/v2/entities/ | jq '.' +``` + +```json +[ + { + "id": "EXAMPLE_DEVICE_NAME", + "type": "openmtc", + "temperature": { + "type": "Int", + "value": 24, + "metadata": { + "bn": { + "type": "String", + "value": "openmtc:zigbee:temp" + }, + "timestamp": { + "type": "String", + "value": "2017-04-13 12:45:12.787239" + }, + "unit": { + "type": "String", + "value": "Cel" + } + } + } + } +] +``` diff --git a/apps/OrionContextBroker/bin/openmtc-orion-context-broker b/apps/OrionContextBroker/bin/openmtc-orion-context-broker new file mode 100755 index 0000000..1908c2d --- /dev/null +++ b/apps/OrionContextBroker/bin/openmtc-orion-context-broker @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +exec python -m orioncontextbroker $@ diff --git a/apps/OrionContextBroker/config.json b/apps/OrionContextBroker/config.json new file mode 100644 index 0000000..49fe4a0 --- /dev/null +++ b/apps/OrionContextBroker/config.json @@ -0,0 +1,22 @@ +{ + "name": "OrionContextBroker", + "ep": "http://localhost:8000", + "cse_base": "onem2m", + "poas": [ + "http://auto:25396" + ], + "originator_pre": "//openmtc.org/mn-cse-1", + "ssl_certs": { + "cert_file": null, + "key_file": null, + "ca_certs": null + }, + "logging": { + "level": "ERROR", + "file": null + }, + "labels": [""], + "interval": 10, + "orion_host": "http://localhost:1026", + "orion_api": "v2" +} diff --git a/apps/OrionContextBroker/docker/configure-orioncontextbroker-and-start b/apps/OrionContextBroker/docker/configure-orioncontextbroker-and-start new file mode 100755 index 0000000..5a2e7cf --- /dev/null +++ b/apps/OrionContextBroker/docker/configure-orioncontextbroker-and-start @@ -0,0 +1,61 @@ +#!/usr/bin/env bash + +CONFIG_FILE="/etc/openmtc/orioncontextbroker/config.json" + +NAME=${NAME-"OrionContextBroker"} +EP=${EP-"http://localhost:8000"} +CSE_BASE=${CSE_BASE-"onem2m"} +POAS=${POAS-'["http://auto:25396"]'} +ORIGINATOR_PRE=${ORIGINATOR_PRE-"//openmtc.org/mn-cse-1"} +SSL_CRT=${SSL_CRT-"/etc/openmtc/certs/orioncontextbroker.cert.pem"} +SSL_KEY=${SSL_KEY-"/etc/openmtc/certs/orioncontextbroker.key.pem"} +SSL_CA=${SSL_CA-"/etc/openmtc/certs/ca-chain.cert.pem"} +ORION_HOST=${ORION_HOST-"http://localhost:1026"} +ORION_API=${ORION_API-"v2"} + +# defaults logging +LOGGING_FILE=${LOGGING_FILE-"/var/log/openmtc/orioncontextbroker.log"} +LOGGING_LEVEL=${LOGGING_LEVEL-"ERROR"} + +# ensure correct level +case ${LOGGING_LEVEL} in + FATAL|ERROR|WARN|INFO|DEBUG) + ;; + *) + LOGGING_LEVEL="ERROR" + ;; +esac + +# local ip +LOCAL_IP=$(ip r get 8.8.8.8 | awk 'NR==1 {print $NF}') + +# set hostname +HOST_NAME=${EXTERNAL_IP-${LOCAL_IP}} + +# Configuration of the service. +CONFIG_TEMP=${CONFIG_FILE}".tmp" +echo -n "Configuring M2M orioncontextbroker..." +JQ_STRING='.' + +# basics +JQ_STRING=${JQ_STRING}' | + .name = "'${NAME}'" | + .ep = "'${EP}'" | + .cse_base = "'${CSE_BASE}'" | + .poas = '${POAS}' | + .originator_pre = "'${ORIGINATOR_PRE}'" | + .orion_host = "'${ORION_HOST}'" | + .orion_api = "'${ORION_API}'" | + .ssl_certs.cert_file = "'${SSL_CRT}'" | + .ssl_certs.key_file = "'${SSL_KEY}'" | + .ssl_certs.ca_certs = "'${SSL_CA}'" | + .logging.file |= "'${LOGGING_FILE}'" | + .logging.level |= "'${LOGGING_LEVEL}'" +' + +cat ${CONFIG_FILE} | jq -M "${JQ_STRING}"> ${CONFIG_TEMP} +mv ${CONFIG_TEMP} ${CONFIG_FILE} + +echo "done" + +exec python -m orioncontextbroker $@ diff --git a/apps/OrionContextBroker/docker/orioncontextbroker-amd64 b/apps/OrionContextBroker/docker/orioncontextbroker-amd64 new file mode 100644 index 0000000..f12c354 --- /dev/null +++ b/apps/OrionContextBroker/docker/orioncontextbroker-amd64 @@ -0,0 +1,30 @@ +############################################################ +# Dockerfile to run openmtc orioncontextbroker binary +############################################################ + +# Set the base image to use openmtc/sdk +FROM openmtc/sdk-amd64:latest + +ENV MOD_NAME=orioncontextbroker + +# Set the file maintainer +MAINTAINER rst + +# install openmtc dependencies +COPY tmp/$MOD_NAME-dependencies.txt /tmp/requirements.txt +RUN pip install --upgrade --requirement /tmp/requirements.txt + +# install openmtc-orioncontextbroker +COPY tmp/openmtc-$MOD_NAME.tar.gz /tmp/openmtc-$MOD_NAME.tar.gz +RUN tar xzf /tmp/openmtc-$MOD_NAME.tar.gz -C / \ + --owner root --group root --no-same-owner --no-overwrite-dir \ + --transform 's/json\.dist/json/' --show-transformed + +RUN mkdir -p /var/log/openmtc + +# add change config +COPY configure-$MOD_NAME-and-start /usr/local/bin/configure-and-start + +# entry point +ENTRYPOINT ["/usr/local/bin/configure-and-start"] +CMD [""] diff --git a/apps/OrionContextBroker/docker/orioncontextbroker-arm b/apps/OrionContextBroker/docker/orioncontextbroker-arm new file mode 100644 index 0000000..115706a --- /dev/null +++ b/apps/OrionContextBroker/docker/orioncontextbroker-arm @@ -0,0 +1,30 @@ +############################################################ +# Dockerfile to run openmtc orioncontextbroker binary +############################################################ + +# Set the base image to use openmtc/sdk +FROM openmtc/sdk-arm:latest + +ENV MOD_NAME=orioncontextbroker + +# Set the file maintainer +MAINTAINER rst + +# install openmtc dependencies +COPY tmp/$MOD_NAME-dependencies.txt /tmp/requirements.txt +RUN pip install --upgrade --requirement /tmp/requirements.txt + +# install openmtc-orioncontextbroker +COPY tmp/openmtc-$MOD_NAME.tar.gz /tmp/openmtc-$MOD_NAME.tar.gz +RUN tar xzf /tmp/openmtc-$MOD_NAME.tar.gz -C / \ + --owner root --group root --no-same-owner --no-overwrite-dir \ + --transform 's/json\.dist/json/' --show-transformed + +RUN mkdir -p /var/log/openmtc + +# add change config +COPY configure-$MOD_NAME-and-start /usr/local/bin/configure-and-start + +# entry point +ENTRYPOINT ["/usr/local/bin/configure-and-start"] +CMD [""] diff --git a/apps/OrionContextBroker/etc/conf/config.json.dist b/apps/OrionContextBroker/etc/conf/config.json.dist new file mode 100644 index 0000000..7162cb0 --- /dev/null +++ b/apps/OrionContextBroker/etc/conf/config.json.dist @@ -0,0 +1,22 @@ +{ + "name": "OrionContextBroker", + "ep": "http://localhost:8000", + "cse_base": "onem2m", + "poas": [ + "http://auto:25396" + ], + "originator_pre": "//openmtc.org/mn-cse-1", + "ssl_certs": { + "cert_file": "/etc/openmtc/certs/orioncontextbroker.cert.pem", + "key_file": "/etc/openmtc/certs/orioncontextbroker.key.pem", + "ca_certs": "/etc/openmtc/certs/ca-chain.cert.pem" + }, + "logging": { + "level": "INFO", + "file": "/var/log/openmtc/orioncontextbroker.log" + }, + "labels": [], + "interval": 1, + "orion_host": "http://localhost:1026", + "orion_api": "v2" +} diff --git a/apps/OrionContextBroker/etc/systemd/system/openmtc-orioncontextbroker.service b/apps/OrionContextBroker/etc/systemd/system/openmtc-orioncontextbroker.service new file mode 100644 index 0000000..02f9b12 --- /dev/null +++ b/apps/OrionContextBroker/etc/systemd/system/openmtc-orioncontextbroker.service @@ -0,0 +1,10 @@ +[Unit] +Description=OpenMTC OrionContextBroker +After=network.target +Wants=ntp.service + +[Service] +ExecStart=/usr/local/bin/orion-context-broker + +[Install] +WantedBy=multi-user.target diff --git a/apps/OrionContextBroker/setup-orioncontextbroker.py b/apps/OrionContextBroker/setup-orioncontextbroker.py new file mode 100755 index 0000000..eb11b17 --- /dev/null +++ b/apps/OrionContextBroker/setup-orioncontextbroker.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python + +from setuptools import setup +from distutils.core import setup +from glob import glob +import sys + +from utils import get_packages, get_pkg_files, OpenMTCSdist, move_config_files + +# name and dir +NAME = "orioncontextbroker" +BASE_DIR = "." + +# import pkg +sys.path.append(BASE_DIR + "/src") +pkg = __import__(NAME) + +# setup name and version +SETUP_NAME = "openmtc-" + NAME +SETUP_VERSION = pkg.__version__ +SETUP_DESCRIPTION = pkg.__description__ + +# meta +SETUP_AUTHOR = pkg.__author_name__ +SETUP_AUTHOR_EMAIL = pkg.__author_mail__ +SETUP_URL = "http://www.openmtc.org" +SETUP_LICENSE = "Fraunhofer FOKUS proprietary" + +# requirements +SETUP_REQUIRES = pkg.__requires__ +SETUP_INSTALL_REQUIRES = pkg.__requires__ + +# packages +PACKAGES = [NAME] +PACKAGE_DIR = {"": BASE_DIR + "/src"} +all_packages = [] +for package in PACKAGES: + all_packages.extend(get_packages(package, PACKAGE_DIR)) + +# scripts +SETUP_SCRIPTS = glob(BASE_DIR + "/bin/*") + +# package data +PACKAGE_DATA = {NAME: get_pkg_files(BASE_DIR, NAME)} + +# data files +CONFIG_FILES = ("config.json",) +CONFIG_DIR = "/etc/openmtc/" + NAME +CONFIG_DIST_FILES = (BASE_DIR + "/etc/conf/config.json.dist",) +DATA_FILES = [(CONFIG_DIR, CONFIG_DIST_FILES)] + +# cmd class +CMD_CLASS = {'sdist': OpenMTCSdist} + +if __name__ == "__main__": + if 'bdist_wheel' in sys.argv: + raise RuntimeError("This setup.py does not support wheels") + + ############################################################################ + # setup + setup(name=SETUP_NAME, + version=SETUP_VERSION, + description=SETUP_DESCRIPTION, + author=SETUP_AUTHOR, + author_email=SETUP_AUTHOR_EMAIL, + url=SETUP_URL, + license=SETUP_LICENSE, + requires=SETUP_REQUIRES, + install_requires=SETUP_INSTALL_REQUIRES, + package_dir=PACKAGE_DIR, + packages=all_packages, + scripts=SETUP_SCRIPTS, + package_data=PACKAGE_DATA, + data_files=DATA_FILES, + cmdclass=CMD_CLASS + ) + + ############################################################################ + # install + if "install" in sys.argv: + # only do this during install + move_config_files(CONFIG_DIR, CONFIG_FILES) diff --git a/apps/OrionContextBroker/src/orioncontextbroker/__init__.py b/apps/OrionContextBroker/src/orioncontextbroker/__init__.py new file mode 100644 index 0000000..5e91faf --- /dev/null +++ b/apps/OrionContextBroker/src/orioncontextbroker/__init__.py @@ -0,0 +1,10 @@ +""" +This App will forward all incoming sensor traffic to the Fiware Orion Context +Broker +""" + +__version__ = "0.1" +__description__ = "OrionContextBroker" +__author_name__ = "Christian Klopp" +__author_mail__ = "christian.klopp@fokus.fraunhofer.de" +__requires__ = [] diff --git a/apps/OrionContextBroker/src/orioncontextbroker/__init__.pyc b/apps/OrionContextBroker/src/orioncontextbroker/__init__.pyc new file mode 100644 index 0000000..262b6c3 Binary files /dev/null and b/apps/OrionContextBroker/src/orioncontextbroker/__init__.pyc differ diff --git a/apps/OrionContextBroker/src/orioncontextbroker/__main__.py b/apps/OrionContextBroker/src/orioncontextbroker/__main__.py new file mode 100644 index 0000000..97cac93 --- /dev/null +++ b/apps/OrionContextBroker/src/orioncontextbroker/__main__.py @@ -0,0 +1,55 @@ +from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser + +from openmtc_app.util import prepare_app, get_value +from openmtc_app.runner import AppRunner as Runner +from .orion_context_broker import OrionContextBroker + +# defaults +default_name = "OrionContextBroker" +default_ep = "http://localhost:8000" +default_labels = [""] +default_interval = 10 # interval(s) to check status updates +default_orion_host = "http://localhost:1026" +default_orion_api = "v2" + +# args parser +parser = ArgumentParser( + description="Stores OpenMTC Date in an\ + instance of the Orion Context Broker", + prog="OrionContextBroker", + formatter_class=ArgumentDefaultsHelpFormatter) +parser.add_argument("-n", "--name", help="Name used for the AE.") +parser.add_argument("-s", "--ep", help="URL of the local Endpoint.") +parser.add_argument("--orion_host", help="URL of Orion CB") +parser.add_argument("--orion_api", help="Orion CB Api version (possible\ + values: \"v2\")") +parser.add_argument('--labels', type=str, help='just subscribe to those\ + labels', nargs='+') +parser.add_argument('--interval', type=int, help='update interval (s)') + +# args, config and logging +args, config = prepare_app(parser, __loader__, __name__, "config.json") + +# variables +nm = get_value("name", (unicode, str), default_name, args, config) +cb = config.get("cse_base", "onem2m") +ep = get_value("ep", (unicode, str), default_ep, args, config) +poas = config.get("poas", ["http://auto:25396"]) +originator_pre = config.get("originator_pre", "//openmtc.org/mn-cse-1") +ssl_certs = config.get("ssl_certs", {}) +interval = get_value("interval", (int), default_ep, args, config) +lbl = get_value("labels", (list), default_labels, args, config) +orion_host = get_value("orion_host", (unicode, str), + default_orion_host, args, config) +orion_api = get_value("orion_api", (unicode, str), + default_orion_api, args, config) + +# start +app = OrionContextBroker( + labels=lbl, interval=interval, orion_host=orion_host, orion_api=orion_api, + name=nm, cse_base=cb, poas=poas, + originator_pre=originator_pre, **ssl_certs +) +Runner(app).run(ep) + +print ("Exiting....") diff --git a/apps/OrionContextBroker/src/orioncontextbroker/orion_api.py b/apps/OrionContextBroker/src/orioncontextbroker/orion_api.py new file mode 100644 index 0000000..1552ccf --- /dev/null +++ b/apps/OrionContextBroker/src/orioncontextbroker/orion_api.py @@ -0,0 +1,137 @@ +""" +Copyright (c) 2017 Fraunhofer FOKUS +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import json +from urlparse import urljoin +import requests +from copy import deepcopy as deepcopy + +from futile.logging import get_logger +logger = get_logger(__name__) + + +# TODO: check if this is sufficient +def _isSenML(senml_dict): + is_senml = True + try: + is_senml = is_senml and ("bn") in senml_dict.keys() + is_senml = is_senml and ("n") in senml_dict.keys() + is_senml = is_senml and ("u") in senml_dict.keys() + is_senml = is_senml and ("v") in senml_dict.keys() + is_senml = is_senml and ("t") in senml_dict.keys() + except BaseException: + return False + return is_senml + + +def _get_type(element): + if isinstance(element, int): + return u"Int" + elif isinstance(element, float): + return u"Float" + elif isinstance(element, bool): + return u"Boolean" + elif isinstance(element, (str, unicode)): + return u"String" + else: + logger.error("Type of \"element\" unknown") + return "Unknown" + + +class OrionAPI: + + def __init__(self, orion_host=None, api_version="v2"): + self.host = orion_host + self.version = api_version + + def create_entity(self, entitiy_name, entity_type="openmtc", + fiware_service=""): + + payload_json = {"id": entitiy_name, "type": entity_type} + + if self.version == "v2": + logger.debug("Send Payload to Orion CB: %s", str(payload_json)) + response = self._request( + self.host + "/v2/entities", + method="post", + json=payload_json, + raw=True, + headers={ + "Content-type": "application/json", + "fiware-service": fiware_service} + ) + logger.debug("Send Payload to Orion CB: %s", str(payload_json)) + else: + logger.error("API version \"%s\" not supported!", self.version) + + def update_attributes(self, entity_id, data_senml, fiware_service=""): + data_senml = data_senml[0] + if not _isSenML(data_senml): + logger.error("Data \"%s\" is not valid SenML", data_senml) + return + + if data_senml["v"] == "type" or data_senml["v"] == "id": + logger.warn( + "SenML[v]=%s contains reserved name. Adding underscore", data_senml["v"]) + data_senml["v"] = data_senml["v"] + "_" + + payload_json = { + data_senml["n"]: { + "value": data_senml["v"], + "type": _get_type(data_senml["v"]), + "metadata": { + "timestamp": {"value": data_senml["t"], "type": "String"}, + "bn": {"value": data_senml["bn"], "type": "String"}, + "unit": {"value": data_senml["u"], "type": "String"} + } + } + } + + if self.version == "v2": + response = self._request( + self.host + "/v2/entities/" + entity_id + "/attrs", + method="post", + json=payload_json, + raw=True, + headers={ + "Content-type": "application/json", + "fiware-service": fiware_service} + ) + logger.debug("Send Payload to Orion CB: %s", str(payload_json)) + else: + logger.error("API version \"%s\" not supported!", self.version) + + def _request( + self, + url, + method='get', + json=None, + params=None, + headers=None, + raw=False): + + joined_url = urljoin(self.host, url) + try: + req = requests.request(method, joined_url, json=json, + params=params, headers=headers) + logger.debug("Status Code: %s", req.status_code) + logger.debug("Content: %s", req.content) + if raw: + return {"status": req.status_code, "content": req.content} + else: + return {"status": req.status_code, "content": req.json()} + except requests.ConnectionError as e: + print "Connection Error: " + str(e) + return {"status": -1, "content": None} diff --git a/apps/OrionContextBroker/src/orioncontextbroker/orion_api.pyc b/apps/OrionContextBroker/src/orioncontextbroker/orion_api.pyc new file mode 100644 index 0000000..b79a1aa Binary files /dev/null and b/apps/OrionContextBroker/src/orioncontextbroker/orion_api.pyc differ diff --git a/apps/OrionContextBroker/src/orioncontextbroker/orion_context_broker.py b/apps/OrionContextBroker/src/orioncontextbroker/orion_context_broker.py new file mode 100644 index 0000000..b9a6bc4 --- /dev/null +++ b/apps/OrionContextBroker/src/orioncontextbroker/orion_context_broker.py @@ -0,0 +1,159 @@ +""" +Copyright (c) 2017 Fraunhofer FOKUS +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse + +from futile.logging import get_logger +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import CSETypeIDE, RemoteCSE + +from orion_api import OrionAPI + +logger = get_logger(__name__) + + +class OrionContextBroker(XAE): + + def __init__(self, labels=[""], interval=10, + orion_host="http://localhost:1026", orion_api="v2", + *args, **kw): + super(OrionContextBroker, self).__init__(*args, **kw) + self.labels = labels + self.remove_registration = True + self.interval = interval + self.orion_api = OrionAPI(orion_host=orion_host, api_version=orion_api) + + def _on_register(self): + # init variables + self._known_remote_cses = {} + self._discovered_devices = {} + self._discovered_sensors = {} + + # connected to backend or gateway? + cse_base = self.get_resource(self.cse_base) + logger.debug("CSE_BASE: %s", cse_base) + + if (cse_base.cseType == CSETypeIDE.MN_CSE or + cse_base.cseType == CSETypeIDE.AEN_CSE): + logger.debug("CSE_BASE identified as gateway") + # discover gateway + self._discover_cse(cse_base.CSE_ID + '/' + self.cse_base) + else: + logger.debug("CSE_BASE identified as backend") + # discover backend + self._discover_cse(cse_base.CSE_ID + '/' + self.cse_base) + # discover remote gateways + self._get_remote_cses(cse_base) + + # get remote CSEs + + def _get_remote_cses(self, cse_base): + + def get_cse_base(): + handle_cse_base(self.get_resource(self.cse_base)) + + def handle_cse_base(cb): + for resource in cb.childResource: + if (isinstance(resource, RemoteCSE) and + resource.path not in self._known_remote_cses): + remote_cse = self.get_resource(resource.id) + self._known_remote_cses[resource.path] = remote_cse + remote_cse_base = (remote_cse.CSE_ID + '/' + + remote_cse.CSEBase) + self._discover_cse(remote_cse_base) + + handle_cse_base(cse_base) + self.run_forever(self.interval, get_cse_base) + + # discover CSE + def _discover_cse(self, cse_base): + + def err_cb(errror_response): + try: + del self._known_remote_cses[remote_cse_id] + except KeyError: + pass + # discover devices + self.periodic_discover(cse_base, {'labels': ['openmtc:device']}, + self.interval, + self._discover_devices) + self.periodic_discover(cse_base, {'labels': ['openmtc:sensor_data', + 'openmtc:actuator_data']}, + self.interval, + self._discover_sensors, err_cb) + + def _discover_devices(self, discovery): + for device_path in discovery: + self._discovered_devices[device_path] = 0 + logger.debug("Discovered devices: %s", self._discovered_devices) + + def _handle_sensor_data(self, container, data): + logger.debug("Got Sensor \"%s\" data: %s", container, data) + # XXX if a label contains 3x '/' assume that we need smart orchestra + # naming + try: + entity_name = next(lbl for lbl in self.get_resource( + container).labels if lbl.count('/') == 3) + tenant_name = entity_name.split('/')[0] + entity_name = '-'.join(entity_name.split('/')[1:3]) + except Exception as e: + entity_name = container.split('/')[-2] + tenant_name = "" + + self.orion_api.create_entity(entity_name, fiware_service=tenant_name) + self.orion_api.update_attributes( + entity_name, + data, + fiware_service=tenant_name) + + def _handle_new_sensor(self, sensor_path): + + # check labels of openmtc:device + device_labels = self.get_resource( + "/".join(sensor_path.split('/')[0:-1])).labels + # if label configured + if not ((len(self.labels) == 0) or + (len(self.labels) == 1 and self.labels[0] == "")): + # if no matching label + if len(set(self.labels) & set(device_labels)) == 0: + # no matching label no subscription + logger.debug("no matching label for %s", sensor_path) + return + + logger.debug("Subscription added for %s", sensor_path) + self.add_container_subscription(sensor_path, self._handle_sensor_data) + + def _discover_sensors(self, discovery): + for sensor_path in discovery: + try: + dev_path = [x for x in self._discovered_devices.keys() + if sensor_path.startswith(x)][0] + except IndexError as e: # todo(rst): ignore, but should not happen + logger.debug("%s", e) + logger.debug("%s", sensor_path) + continue + self._discovered_sensors[sensor_path] = { + 'ID': sensor_path, + 'dev_name': dev_path.split('/')[-1], + 'cse_id': sensor_path.split('/')[1], + 'data': None, + 'type': 'sensor', + 'n': None, + 'u': None + } + self._handle_new_sensor(sensor_path) diff --git a/apps/OrionContextBroker/src/orioncontextbroker/orion_context_broker.pyc b/apps/OrionContextBroker/src/orioncontextbroker/orion_context_broker.pyc new file mode 100644 index 0000000..05f1e86 Binary files /dev/null and b/apps/OrionContextBroker/src/orioncontextbroker/orion_context_broker.pyc differ diff --git a/apps/OrionContextBroker/utils.py b/apps/OrionContextBroker/utils.py new file mode 100644 index 0000000..d8a733f --- /dev/null +++ b/apps/OrionContextBroker/utils.py @@ -0,0 +1,148 @@ +import distutils.command.sdist +import distutils.command.build_py +import os +import subprocess +import sys + + +def echo(msg, *args): + if args: + msg = msg % args + sys.stdout.write(msg + "\n") + + +def get_packages(package, package_dir, excluded_list=None, included_list=None): + included_list = included_list or [] + excluded_list = excluded_list or [] + + try: + root = package_dir[package] + except KeyError: + root = package_dir.get("", ".") + "/" + package + + if not os.path.exists(root): + sys.stderr.write( + "Directory for package %s does not exist: %s\n" % (package, root)) + sys.exit(1) + + def on_error(error): + sys.stderr.write( + "Error while collecting packages for %s: %s\n" % (package, error)) + sys.exit(1) + + packages = [package] + + r_prefix = len(root) + 1 + for path, dirs, files in os.walk(root, onerror=on_error): + is_module = "__init__.py" in files and path != root + excluded = any(map(lambda x: x in path, excluded_list)) + included = any(map(lambda x: x in path, included_list)) + if is_module and (not excluded or included): + packages.append(package + "." + path[r_prefix:].replace("/", ".")) + + return packages + + +def get_pkg_files(base_dir, name): + package_files = [] + pkg_dir = os.path.join(base_dir, 'src', name) + pkg_data_dir = os.path.join(pkg_dir, 'static') + for (path, directories, filenames) in os.walk(pkg_data_dir): + for filename in filenames: + package_files.append(os.path.join(os.path.relpath(path, pkg_dir), + filename)) + return package_files + + +def enable_init_files(init_dir, init_dist_files): + for f in init_dist_files: + os.chmod(os.path.join(init_dir, os.path.basename(f)), 0755) + + +def move_config_files(config_dir, config_files): + for f in config_files: + target_file = os.path.join(config_dir, f) + if not os.path.exists(target_file): + echo("Installing config file %s", target_file) + os.rename(target_file + ".dist", target_file) + # os.chmod(target_file, 0644) + else: + echo("Not overwriting config file %s", target_file) + + +def create_openmtc_user(db_dir=None, log_dir=None): + try: + from pwd import getpwnam + except ImportError: + print "Could not import the 'pwd' module. Skipping user management" + else: + # assuming DB_DIR was created by setup already + try: + pw = getpwnam('openmtc') + except KeyError as e: + try: + # add system user openmtc:openmtc + # useradd --system -UM openmtc + useradd = "useradd --system -UM openmtc" + retcode = subprocess.call(useradd, shell=True) + if retcode: + raise Exception("Failed to add user 'openmtc'") + pw = getpwnam('openmtc') + except Exception as e: + sys.stderr.write("Error creating user: %s\n" % (e, )) + sys.exit(1) + uid = pw.pw_uid + gid = pw.pw_gid + + # set path permissions + if db_dir: + os.chown(db_dir, uid, gid) + if log_dir: + os.chown(log_dir, uid, gid) + + +class OpenMTCSdist(distutils.command.sdist.sdist): + def make_release_tree(self, base_dir, files): + distutils.command.sdist.sdist.make_release_tree(self, base_dir, files) + + script_name = os.path.basename(sys.argv[0]) + + if script_name != "setup.py": + os.rename(base_dir + "/" + script_name, base_dir + "/setup.py") + self.filelist.files.remove(script_name) + self.filelist.files.append("setup.py") + + +class OpenMTCSdistBinary(OpenMTCSdist, object): + def make_release_tree(self, base_dir, files): + super(OpenMTCSdistBinary, self).make_release_tree(base_dir, files) + + script_name = os.path.basename(sys.argv[0]) + + build_py = self.get_finalized_command('build_py') + build_py.compile = 1 + build_py.optimize = 2 + build_py.retain_init_py = 1 + build_py.build_lib = base_dir + build_py.byte_compile( + [base_dir + "/" + f for f in self.filelist.files if + f != script_name and f.endswith(".py")]) + + +class OpenMTCBuildPy(distutils.command.build_py.build_py): + retain_init_py = 0 + + def byte_compile(self, files): + distutils.command.build_py.build_py.byte_compile(self, files) + + +class OpenMTCBuildPyBinary(OpenMTCBuildPy, object): + retain_init_py = 0 + + def byte_compile(self, files): + super(OpenMTCBuildPyBinary, self).byte_compile(files) + + for f in files: + if (f.endswith('.py') and (os.path.basename(f) != "__init__.py" or + not self.retain_init_py)): + os.unlink(f) diff --git a/apps/orion-context-broker b/apps/orion-context-broker new file mode 100755 index 0000000..e575968 --- /dev/null +++ b/apps/orion-context-broker @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +cd $(dirname ${0}) + +. ./prep-env.sh + +cd OrionContextBroker + +PYTHONPATH=${PYTHONPATH}:src exec python -m orioncontextbroker $@ diff --git a/apps/prep-env.sh b/apps/prep-env.sh new file mode 100755 index 0000000..89704b1 --- /dev/null +++ b/apps/prep-env.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env sh + +cd ../common +. ../common/prep-env.sh +cd ../apps + +_SRC_PATH="../openmtc-app/src" +_READLINK_PATH="$(readlink ${_SRC_PATH})" +PYTHONPATH=${PYTHONPATH}:$(pwd)/${_READLINK_PATH:-${_SRC_PATH}} + +echo PYTHONPATH: ${PYTHONPATH} + +export PYTHONPATH diff --git a/build/lib/aplus/__init__.py b/build/lib/aplus/__init__.py new file mode 100644 index 0000000..01a2ba8 --- /dev/null +++ b/build/lib/aplus/__init__.py @@ -0,0 +1,456 @@ +import sys +from logging import DEBUG +from threading import Thread +from traceback import print_stack + +from futile.logging import LoggerMixin +from openmtc.exc import OpenMTCError + +if sys.subversion[0] != "CPython": + from inspect import ismethod, getargspec + +# TODO: kca: can't pass in values for then/error currently + + +def log_error(error): + if isinstance(error, OpenMTCError): + return False + return True + + +class Promise(LoggerMixin): + """ + This is a class that attempts to comply with the + Promises/A+ specification and test suite: + + http://promises-aplus.github.io/promises-spec/ + """ + + __slots__ = ("_state", "value", "reason", + "_callbacks", "_errbacks", "name") + + # These are the potential states of a promise + PENDING = -1 + REJECTED = 0 + FULFILLED = 1 + + def __init__(self, name=None): + """ + Initialize the Promise into a pending state. + """ + self._state = self.PENDING + self.value = None + self.reason = None + self._callbacks = [] + self._errbacks = [] + self.name = name + + def _fulfill(self, value): + """ + Fulfill the promise with a given value. + """ + + assert self._state == self.PENDING, "Promise state is not pending" + + self._state = self.FULFILLED + self.value = value + for callback in self._callbacks: + try: + callback(value) + except Exception: + # Ignore errors in callbacks + self.logger.exception("Error in callback %s", callback) + # We will never call these callbacks again, so allow + # them to be garbage collected. This is important since + # they probably include closures which are binding variables + # that might otherwise be garbage collected. + self._callbacks = [] + self._errbacks = [] + + def fulfill(self, value): + self._fulfill(value) + return self + + def _reject(self, reason, bubbling=False): + """ + Reject this promise for a given reason. + """ + + assert self._state == self.PENDING, "Promise state is not pending" + + if not bubbling and log_error(reason): + exc_info = sys.exc_info() + self.logger.debug("Promise (%s) rejected: %s", self.name, reason, + exc_info=exc_info[0] and exc_info or None) + self.logger.debug(self._errbacks) + if self.logger.isEnabledFor(DEBUG): + print_stack() + else: + pass + + self._state = self.REJECTED + self.reason = reason + for errback in self._errbacks: + try: + errback(reason) + except Exception: + self.logger.exception("Error in errback %s", errback) + # Ignore errors in callbacks + + # We will never call these errbacks again, so allow + # them to be garbage collected. This is important since + # they probably include closures which are binding variables + # that might otherwise be garbage collected. + self._errbacks = [] + self._callbacks = [] + + def reject(self, reason): + self._reject(reason) + return self + + def isPending(self): + """Indicate whether the Promise is still pending.""" + return self._state == self.PENDING + + def isFulfilled(self): + """Indicate whether the Promise has been fulfilled.""" + return self._state == self.FULFILLED + + def isRejected(self): + """Indicate whether the Promise has been rejected.""" + return self._state == self.REJECTED + + def get(self, timeout=None): + """Get the value of the promise, waiting if necessary.""" + self.wait(timeout) + if self._state == self.FULFILLED: + return self.value + raise self.reason + + def wait(self, timeout=None): + """ + An implementation of the wait method which doesn't involve + polling but instead utilizes a "real" synchronization + scheme. + """ + import threading + + if self._state != self.PENDING: + return + + e = threading.Event() + self.addCallback(lambda v: e.set()) + self.addErrback(lambda r: e.set()) + e.wait(timeout) + + def addCallback(self, f): + """ + Add a callback for when this promise is fulfilled. Note that + if you intend to use the value of the promise somehow in + the callback, it is more convenient to use the 'then' method. + """ + self._callbacks.append(f) + + def addErrback(self, f): + """ + Add a callback for when this promise is rejected. Note that + if you intend to use the rejection reason of the promise + somehow in the callback, it is more convenient to use + the 'then' method. + """ + self._errbacks.append(f) + + if sys.subversion[0] != "CPython": + def _invoke(self, func, value): + try: + if value is None: + args, _, _, _ = getargspec(func) + arglen = len(args) + if not arglen or (arglen == 1 and ismethod(func)): + return func() + + return func(value) + except Exception as e: + if log_error(e): + self.logger.exception("Error in handler %s", func) + else: + self.logger.debug("Error in handler %s: %s", func, e) + raise + else: + def _invoke(self, func, value): + try: + if value is None: + try: + target = func.im_func + except AttributeError: + argcount = func.func_code.co_argcount + else: + argcount = target.func_code.co_argcount - 1 + + if argcount == 0: + return func() + + return func(value) + except Exception as e: + if log_error(e): + self.logger.exception("Error in handler %s", func) + else: + self.logger.debug("Error in handler %s: %s", func, repr(e)) + raise + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_tb): + if self.isPending(): + if exc_value is not None: + if log_error(exc_value): + self.logger.exception("Promise automatically rejected") + self._reject(exc_value, bubbling=True) + return True + else: + self.fulfill(None) + + def then(self, success=None, failure=None, name=None): + """ + This method takes two optional arguments. The first argument + is used if the "self promise" is fulfilled and the other is + used if the "self promise" is rejected. In either case, this + method returns another promise that effectively represents + the result of either the first of the second argument (in the + case that the "self promise" is fulfilled or rejected, + respectively). + + Each argument can be either: + * None - Meaning no action is taken + * A function - which will be called with either the value + of the "self promise" or the reason for rejection of + the "self promise". The function may return: + * A value - which will be used to fulfill the promise + returned by this method. + * A promise - which, when fulfilled or rejected, will + cascade its value or reason to the promise returned + by this method. + * A value - which will be assigned as either the value + or the reason for the promise returned by this method + when the "self promise" is either fulfilled or rejected, + respectively. + """ + + if name is None: + try: + name = success.__name__ + except AttributeError: + name = str(success) + + ret = Promise(name=name) + + state = self._state + if state == self.PENDING: + """ + If this is still pending, then add callbacks to the + existing promise that call either the success or + rejected functions supplied and then fulfill the + promise being returned by this method + """ + + def callAndFulfill(v): + """ + A callback to be invoked if the "self promise" + is fulfilled. + """ + try: + # From 3.2.1, don't call non-functions values + if callable(success): + newvalue = self._invoke(success, v) + if _isPromise(newvalue): + newvalue.then(ret._fulfill, + ret._reject) + else: + ret._fulfill(newvalue) + else: + # From 3.2.6.4 + ret._fulfill(v) + except Exception as e: + ret._reject(e) + + def callAndReject(r): + """ + A callback to be invoked if the "self promise" + is rejected. + """ + try: + if callable(failure): + newvalue = failure(r) + if _isPromise(newvalue): + newvalue.then(ret._fulfill, + ret._reject) + else: + ret._fulfill(newvalue) + else: + # From 3.2.6.5 + ret._reject(r) + except Exception as e: + ret._reject(e) + + self._callbacks.append(callAndFulfill) + self._errbacks.append(callAndReject) + + elif state == self.FULFILLED: + # If this promise was already fulfilled, then + # we need to use the first argument to this method + # to determine the value to use in fulfilling the + # promise that we return from this method. + try: + if callable(success): + newvalue = self._invoke(success, self.value) + if _isPromise(newvalue): + newvalue.then(ret._fulfill, + lambda r: ret._reject(r, bubbling=True)) + else: + ret._fulfill(newvalue) + else: + # From 3.2.6.4 + ret._fulfill(self.value) + except Exception as e: + ret._reject(e) + else: + # If this promise was already rejected, then + # we need to use the second argument to this method + # to determine the value to use in fulfilling the + # promise that we return from this method. + try: + if callable(failure): + newvalue = self._invoke(failure, self.reason) + if _isPromise(newvalue): + newvalue.then(ret._fulfill, + ret._reject) + else: + ret._fulfill(newvalue) + else: + # From 3.2.6.5 + ret._reject(self.reason, bubbling=True) + except Exception as e: + ret._reject(e) + + return ret + + +def _isPromise(obj): + """ + A utility function to determine if the specified + object is a promise using "duck typing". + """ + if isinstance(obj, Promise): + return True + + try: + return callable(obj.fulfill) and callable(obj.reject) and\ + callable(obj.then) + except AttributeError: + return False + + +def listPromise(*args): + """ + A special function that takes a bunch of promises + and turns them into a promise for a vector of values. + In other words, this turns an list of promises for values + into a promise for a list of values. + """ + ret = Promise() + + def handleSuccess(v, ret): + for arg in args: + if not arg.isFulfilled(): + return + + value = map(lambda p: p.value, args) + ret._fulfill(value) + + for arg in args: + arg.addCallback(lambda v: handleSuccess(v, ret)) + arg.addErrback(lambda r: ret.reject(r)) + + # Check to see if all the promises are already fulfilled + handleSuccess(None, ret) + + return ret + + +def dictPromise(m): + """ + A special function that takes a dictionary of promises + and turns them into a promise for a dictionary of values. + In other words, this turns an dictionary of promises for values + into a promise for a dictionary of values. + """ + ret = Promise() + + def handleSuccess(v, ret): + for p in m.values(): + if not p.isFulfilled(): + return + + value = {} + for k in m: + value[k] = m[k].value + ret.fulfill(value) + + for p in m.values(): + p.addCallback(lambda v: handleSuccess(v, ret)) + p.addErrback(lambda r: ret.reject(r)) + + # Check to see if all the promises are already fulfilled + handleSuccess(None, ret) + + return ret + + +class BackgroundThread(Thread): + def __init__(self, promise, func): + self.promise = promise + self.func = func + Thread.__init__(self) + + def run(self): + try: + val = self.func() + self.promise.fulfill(val) + except Exception as e: + self.promise.reject(e) + + +def background(f): + p = Promise() + t = BackgroundThread(p, f) + t.start() + return p + + +def spawn(f): + from gevent import spawn + + p = Promise() + + def process(): + try: + val = f() + p.fulfill(val) + except Exception as e: + p.reject(e) + + spawn(process) + return p + + +def FulfilledPromise(result): + p = Promise() + p.fulfill(result) + return p + + +def RejectedPromise(error): + p = Promise() + p.reject(error) + return p diff --git a/build/lib/futile/StringIO/__init__.py b/build/lib/futile/StringIO/__init__.py new file mode 100644 index 0000000..40a6c72 --- /dev/null +++ b/build/lib/futile/StringIO/__init__.py @@ -0,0 +1,4 @@ +try: + from StringIO import StringIO +except ImportError: + from io import StringIO diff --git a/build/lib/futile/__init__.py b/build/lib/futile/__init__.py new file mode 100644 index 0000000..fc280c1 --- /dev/null +++ b/build/lib/futile/__init__.py @@ -0,0 +1,82 @@ +from futile.basictypes import basestring, BASE_STR +from futile.logging import LoggerMixin + +Base = LoggerMixin + + +class NOT_SET(object): + __slots__ = () + + def __bool__(self): + return False + __nonzero__ = __bool__ + + def __str__(self): + return "" + +NOT_SET = NOT_SET() +DEFAULT_ENCODING = "utf-8" +DEFAULT_CHUNK_SIZE = 128 * 1024 +THREADSAFE = True + + +def noop(*args, **kw): + pass + + +def not_implemented(*args, **kw): + raise NotImplementedError() + + +def tostr(o): + if isinstance(o, basestring): + return o + return BASE_STR(o) + + +if basestring == str: + uc = tostr + encstr = not_implemented +else: + def uc(s): + if isinstance(s, unicode): + return s + if isinstance(s, basestring): + return s.decode(DEFAULT_ENCODING) + return unicode(s) + + def encstr(s): + if isinstance(s, str): + return s + if not isinstance(s, unicode): + s = unicode(s) + return s.encode(DEFAULT_ENCODING) + + +def identity(x): + return x + +_isc = issubclass + + +def issubclass(o, classes): + "A safer version of __builtin__.issubclass that does not raise TypeError when called with a non-type object" + + return isinstance(o, type) and _isc(o, classes) + +try: + callable +except NameError: + def callable(x): + return hasattr(x, "__call__") + + +class ObjectProxy(object): + __slots__ = ("_o") + + def __init__(self, proxyobject, *args, **kw): + super(ObjectProxy, self).__init__(*args, **kw) + self._o = proxyobject + + def __getattr__(self, k): + return getattr(self._o, k) diff --git a/build/lib/futile/abchelper.py b/build/lib/futile/abchelper.py new file mode 100644 index 0000000..8b66cbb --- /dev/null +++ b/build/lib/futile/abchelper.py @@ -0,0 +1,12 @@ +''' +Created on 13.11.2012 + +@author: kca +''' + +try: + from abc import ABCMeta, abstractmethod, abstractproperty +except ImportError: + from futile import identity + ABCMeta = type + abstractmethod = abstractproperty = identity diff --git a/build/lib/futile/basictypes.py b/build/lib/futile/basictypes.py new file mode 100644 index 0000000..8230e95 --- /dev/null +++ b/build/lib/futile/basictypes.py @@ -0,0 +1,20 @@ +''' +Created on 11.05.2013 + +@author: kca +''' + +try: + from types import ClassType +except ImportError: + ClassType = type + +try: + basestring = basestring +except NameError: + basestring = str + +try: + BASE_STR = unicode +except NameError: + BASE_STR = str diff --git a/build/lib/futile/caching/__init__.py b/build/lib/futile/caching/__init__.py new file mode 100644 index 0000000..0c79571 --- /dev/null +++ b/build/lib/futile/caching/__init__.py @@ -0,0 +1,63 @@ +''' +Created on 17.07.2011 + +@author: kca +''' + +from ..collections import OrderedDict +import futile + +class LRUCache(OrderedDict): + max_items = 100 + + def __init__(self, max_items = None, threadsafe = None, *args, **kw): + super(LRUCache, self).__init__(*args, **kw) + if max_items is not None: + if max_items <= 0: + raise ValueError(max_items) + self.max_items = max_items + + if threadsafe is None: + threadsafe = futile.THREADSAFE + + if threadsafe: + from threading import RLock + self.__lock = RLock() + else: + self.__lock = None + self.__getitem__ = self._getitem + self.__setitem__ = self._setitem + + def __getitem__(self, k): + if self.__lock is None: + return self._getitem(k) + with self.__lock: + return self._getitem(k) + + def get(self, k, default = None): + try: + return self[k] + except KeyError: + return default + + def _getitem(self, k): + v = super(LRUCache, self).__getitem__(k) + del self[k] + super(LRUCache, self).__setitem__(k, v) + return v + + def __iter__(self): + for k in tuple(super(LRUCache, self).__iter__()): + yield k + + def __setitem__(self, k, v): + if self.__lock is None: + return self._setitem(k, v) + with self.__lock: + self._setitem(k, v) + + def _setitem(self, k, v): + super(LRUCache, self).__setitem__(k, v) + if len(self) > self.max_items: + self.popitem(False) + \ No newline at end of file diff --git a/build/lib/futile/collections/OrderedSet.py b/build/lib/futile/collections/OrderedSet.py new file mode 100644 index 0000000..5de43a7 --- /dev/null +++ b/build/lib/futile/collections/OrderedSet.py @@ -0,0 +1,110 @@ +# Copyright (C) 2009 Raymond Hettinger + +# *** MIT License *** +# Permission is hereby granted, free of charge, to any person obtaining a copy of +# this software and associated documentation files (the "Software"), to deal in +# the Software without restriction, including without limitation the rights to +# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +# of the Software, and to permit persons to whom the Software is furnished to do +# so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +## {{{ http://code.activestate.com/recipes/576694/ (r7) + +# kca: fixed exception at interpreter shutdown +# kca: added list methods + +import collections + +KEY, PREV, NEXT = range(3) + +class OrderedSet(collections.MutableSet): + + def __init__(self, iterable=None): + self.end = end = [] + end += [None, end, end] # sentinel node for doubly linked list + self.map = {} # key --> [key, prev, next] + if iterable is not None: + self |= iterable + + def __len__(self): + return len(self.map) + + def __contains__(self, key): + return key in self.map + + def add(self, key): + if key not in self.map: + end = self.end + curr = end[PREV] + curr[NEXT] = end[PREV] = self.map[key] = [key, curr, end] + append = add + + def discard(self, key): + _KEY, PREV, NEXT = 0, 1, 2 + if key in self.map: + key, prev, next = self.map.pop(key) + prev[NEXT] = next + next[PREV] = prev + + def __iter__(self): + end = self.end + curr = end[NEXT] + while curr is not end: + yield curr[KEY] + curr = curr[NEXT] + + def __reversed__(self): + KEY, PREV, NEXT = 0, 1, 2 + end = self.end + curr = end[PREV] + while curr is not end: + yield curr[KEY] + curr = curr[PREV] + + def pop(self, last=True): + # changed default to last=False - by default, treat as queue. + if not self: + raise KeyError('set is empty') + key = next(reversed(self)) if last else next(iter(self)) + self.discard(key) + return key + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self)) + + def __eq__(self, other): + if isinstance(other, OrderedSet): + return len(self) == len(other) and list(self) == list(other) + return set(self) == set(other) + + def __del__(self): + self.clear() # remove circular references + + def __getitem__(self, index): + return list(self)[index] + + +if __name__ == '__main__': + print(OrderedSet('abracadaba')) + print(OrderedSet('simsalabim')) +## end of http://code.activestate.com/recipes/576694/ }}} + +## kca: + print OrderedSet('simsalabim')[1] + + # Test case for exception at shutdown (yes, really...) + x = OrderedSet('simsalabim') + diff --git a/build/lib/futile/collections/__init__.py b/build/lib/futile/collections/__init__.py new file mode 100644 index 0000000..e5569b4 --- /dev/null +++ b/build/lib/futile/collections/__init__.py @@ -0,0 +1,44 @@ +''' +Created on 17.07.2011 + +@author: kca +''' + +import futile +from futile.basictypes import basestring + +try: + from collections import OrderedDict +except ImportError: + from ordereddict import OrderedDict + +from abc import ABCMeta +from collections import Iterable, Sequence + + +def is_iterable(o): + return isinstance(o, Iterable) and not isinstance(o, basestring) + + +def get_iterable(o): + if o is None: + return () + return ((not isinstance(o, Iterable) or isinstance(o, basestring)) + and (o,) or o) + + +def get_list(o): + if o is None: + return [] + return ((not isinstance(o, Iterable) or isinstance(o, basestring)) + and [o] or list(o)) + + +def yield_buffer(buffer, chunk_size=None): + chunk_size = chunk_size or futile.DEFAULT_CHUNK_SIZE + + while True: + chunk = buffer.read(chunk_size) + if not chunk: + return + yield chunk diff --git a/build/lib/futile/collections/ordereddict.py b/build/lib/futile/collections/ordereddict.py new file mode 100644 index 0000000..55b4bad --- /dev/null +++ b/build/lib/futile/collections/ordereddict.py @@ -0,0 +1,127 @@ +# Copyright (c) 2009 Raymond Hettinger +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation files +# (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, +# publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. + +from UserDict import DictMixin + + +class OrderedDict(dict, DictMixin): + def __init__(self, *args, **kwds): + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__end + except AttributeError: + self.clear() + self.update(*args, **kwds) + + def clear(self): + self.__end = end = [] + end += [None, end, end] # sentinel node for doubly linked list + self.__map = {} # key --> [key, prev, next] + dict.clear(self) + + def __setitem__(self, key, value): + if key not in self: + end = self.__end + curr = end[1] + curr[2] = end[1] = self.__map[key] = [key, curr, end] + dict.__setitem__(self, key, value) + + def __delitem__(self, key): + dict.__delitem__(self, key) + key, prev, next = self.__map.pop(key) + prev[2] = next + next[1] = prev + + def __iter__(self): + end = self.__end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + def __reversed__(self): + end = self.__end + curr = end[1] + while curr is not end: + yield curr[0] + curr = curr[1] + + def popitem(self, last=True): + if not self: + raise KeyError('dictionary is empty') + if last: + key = reversed(self).next() + else: + key = iter(self).next() + value = self.pop(key) + return key, value + + def __reduce__(self): + items = [[k, self[k]] for k in self] + tmp = self.__map, self.__end + del self.__map, self.__end + inst_dict = vars(self).copy() + self.__map, self.__end = tmp + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def keys(self): + return list(self) + + setdefault = DictMixin.setdefault + update = DictMixin.update + pop = DictMixin.pop + values = DictMixin.values + items = DictMixin.items + iterkeys = DictMixin.iterkeys + itervalues = DictMixin.itervalues + iteritems = DictMixin.iteritems + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + + def copy(self): + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + if isinstance(other, OrderedDict): + if len(self) != len(other): + return False + for p, q in zip(self.items(), other.items()): + if p != q: + return False + return True + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other diff --git a/build/lib/futile/collections/sortedlist.py b/build/lib/futile/collections/sortedlist.py new file mode 100644 index 0000000..3a08355 --- /dev/null +++ b/build/lib/futile/collections/sortedlist.py @@ -0,0 +1,38 @@ +try: + from blist import sortedlist +except ImportError: + from futile.logging import get_logger + from heapq import heappush, heappop, heapify + + get_logger(__name__).warning("blist.sortedlist is not available. Using a fallback implementation") + + class sortedlist(object): + def __init__(self, iterable=(), *args, **kw): + super(sortedlist, self).__init__(*args, **kw) + + l = self._list = list(iterable) + + if iterable is not None: + heapify(l) + + def add(self, v): + heappush(self._list, v) + + def pop(self, index=-1): + if index != 0: + raise NotImplementedError() + + return heappop(self._list) + + def remove(self, object): + self._list.remove(object) + heapify(self._list) + + def __getitem__(self, index): + if index != 0: + raise NotImplementedError() + + return self._list[index] + + def __len__(self): + return len(self._list) diff --git a/build/lib/futile/contextlib.py b/build/lib/futile/contextlib.py new file mode 100644 index 0000000..2e16135 --- /dev/null +++ b/build/lib/futile/contextlib.py @@ -0,0 +1,21 @@ +''' +Created on 14.07.2011 + +@author: kca +''' +from futile import ObjectProxy + +class closing(ObjectProxy): + def __enter__(self): + return self._o + + def __exit__(self, exc_type, exc_val, exc_tb): + self._o.close() + + +class exiting(ObjectProxy): + def __enter__(self): + return self._o + + def __exit__(self, exc_type, exc_val, exc_tb): + self._o.__exit__(exc_type, exc_val, exc_tb) \ No newline at end of file diff --git a/build/lib/futile/etree.py b/build/lib/futile/etree.py new file mode 100644 index 0000000..da9677b --- /dev/null +++ b/build/lib/futile/etree.py @@ -0,0 +1,44 @@ +''' +Created on 25.07.2011 + +@author: kca +''' + +import sys +from .logging import get_logger + +try: + from lxml import etree as impl + from lxml.etree import tostring as _ts + + get_logger(__name__).debug("Using lxml etree implementation1.") + + def tostring(element, encoding="utf-8", pretty_print=False): + return _ts(element, encoding=encoding, pretty_print=pretty_print) +except ImportError: + logger = get_logger(__name__) + logger.warning( + "lxml library not found, trying builtin ElementTree implementations. Pretty printing will be disabled.") + try: + from xml.etree import cElementTree as impl + + try: + impl.ParseError = impl.XMLParserError + except AttributeError: + pass + logger.debug("Using native xml.etree.cElementTree") + except ImportError: + from xml.etree import ElementTree as impl + + logger.debug("Using python xml.etree.ElementTree") + + _ts = impl.tostring + + def tostring(element, encoding="utf-8", pretty_print=False): + return _ts(element, encoding=encoding) + + impl.tostring = tostring + impl.XMLSyntaxError = impl.ParseError + +sys.modules[__name__ + ".impl"] = sys.modules[__name__ + ".ElementTree"] = ElementTree = impl + diff --git a/build/lib/futile/exc.py b/build/lib/futile/exc.py new file mode 100644 index 0000000..94b89ab --- /dev/null +++ b/build/lib/futile/exc.py @@ -0,0 +1,22 @@ +''' +Created on 14.07.2011 + +@author: kca +''' + +from . import issubclass + +def errorstr(e): + try: + message = e.message + except AttributeError: + message = str(e) + else: + if not message: + message = str(e) + return message + +def raise_error(e): + if isinstance(e, Exception) or (isinstance(e, type) and issubclass(e, Exception)): + raise e + raise Exception(e) diff --git a/build/lib/futile/logging/__init__.py b/build/lib/futile/logging/__init__.py new file mode 100644 index 0000000..3e8a34e --- /dev/null +++ b/build/lib/futile/logging/__init__.py @@ -0,0 +1,230 @@ +""" +Created on 15.07.2011 + +@author: kca +""" +import logging +import logging.handlers +from futile.basictypes import ClassType, basestring +from futile.threading import current_thread +from logging import Filter +from futile.collections import get_iterable + +# statics +_handlers = [] +_formatter = logging.Formatter('%(asctime)s %(levelname)s - %(name)s: %(message)s') +_level = logging.NOTSET + +# log level constants for convenience +from logging import CRITICAL, FATAL, ERROR, WARNING, INFO, DEBUG, NOTSET + +CRITICAL = CRITICAL +FATAL = FATAL +ERROR = ERROR +WARNING = WARNING +INFO = INFO +DEBUG = DEBUG +NOTSET = NOTSET + + +def get_default_level(): + return _level + + +def set_default_level(l): + global _level + _level = l + logging.basicConfig(level=l) + + +# try: +# from colorlog import ColoredFormatter +# formatter = ColoredFormatter( +# "%(blue)s%(asctime)s %(log_color)s%(levelname) - 8s%(reset)s%(name)s: %(message)s", +# datefmt=None, +# reset=True, +# log_colors={ +# 'DEBUG': 'cyan', +# 'INFO': 'green', +# 'WARNING': 'yellow', +# 'ERROR': 'red', +# 'CRITICAL': 'red', +# } +# ) +# import logging +# hand = logging.StreamHandler() +# hand.setFormatter(formatter) +# futile.logging.add_handler( hand) +# except ImportError: +# pass +def get_default_formatter(): + return _formatter + + +def set_default_formatter(frmt): + global _formatter + if frmt and isinstance(frmt, logging.Formatter): + _formatter = frmt + else: + raise TypeError("Not a logging Formatter: %s" % (frmt, )) + + +def add_handler(h): + if not isinstance(h, logging.Handler): + raise TypeError(h) + + _handlers.append(h) + + +def add_log_file(path, level=None, formatter=None): + """ Adds a log file to all future loggers. + Files will be rotated depending on max_bytes and backups parameters. + + @param path: path to logfile + @param level: minimum log level + @param formatter: a logging.Formatter for this log file + """ + handler = logging.handlers.WatchedFileHandler(path) + handler.setFormatter(formatter or _formatter) + # TODO(rst): probably try/except is necessary + handler.setLevel(level or _level) + add_handler(handler) + + +def get_logger(logger_name=None, level=None): + level = level if level is not None else _level + # logging.basicConfig(level=level) + if logger_name: + if not isinstance(logger_name, basestring): + if not isinstance(logger_name, (type, ClassType)): + l_class = logger_name.__class__ + else: + l_class = logger_name + logger_name = l_class.__module__ + "." + l_class.__name__ + else: + logger_name = __name__ + + try: + logger = logging.getLogger(logger_name) + except Exception as e: + print ("Failed to get logger '%s': %s" % (logger_name, e)) + raise + + try: + logger.setLevel(level) # raises TypeError: not a valid string or int + except TypeError: + logger.setLevel(NOTSET) # TODO(rst): set another level if wrong level? + for h in _handlers: + logger.addHandler(h) + return logger + + +class LoggerMixin(object): + + log_file = None + log_level = None + + def __init__(self): + self.__logger = None + + @classmethod + def _get_logger(cls, logger_name=None): + logger = get_logger(logger_name, cls.log_level) + if cls.log_file: + formatter = get_default_formatter() + handler = logging.handlers.WatchedFileHandler(cls.log_file) + handler.setFormatter(formatter) + logger.addHandler(handler) + + return logger + + def get_logger(self): + try: + if self.__logger is not None: + return self.__logger + except AttributeError: + pass + self.__logger = l = self.get_class_logger() + return l + + def set_logger(self, logger): + self.__logger = logger + logger = property(get_logger, set_logger) + + @classmethod + def get_class_logger(cls): + try: + return cls.__dict__["__logger__"] + except KeyError: + l = cls.__logger__ = cls._get_logger(cls.__name__) + return l + + def __getstate__(self): + l = getattr(self, "_LoggerMixin__logger", None) + self.__logger = None + try: + sgs = super(LoggerMixin, self).__getstate__ + except AttributeError: + state = self.__dict__.copy() + else: + state = sgs() + self.__logger = l + return state + + +class ThreadFilter(Filter): + def __init__(self, thread=None, name=''): + Filter.__init__(self, name=name) + self.thread = thread or current_thread() + + def filter(self, record): + return current_thread() == self.thread + + +class ErrorLogger(LoggerMixin): + def __init__(self, name="operation", logger=None, + level=get_default_level(), *args, **kw): + super(ErrorLogger, self).__init__(*args, **kw) + if logger is not None: + self.logger = logger + self.name = name + self.log_level = level + assert level is not None + + def __enter__(self): + self.logger.debug("Entering %s", self.name) + return self + + def __exit__(self, type, value, traceback): + if type is not None: + self.logger.exception("Error in %s", self.name) + else: + self.logger.log(self.log_level, "%s finished", self.name) + + +def log_errors(f): + def _f(*args, **kw): + with ErrorLogger(f.__name__): + result = f(*args, **kw) + get_logger(f).debug("%s returning: %s", f.__name__, result) + return result + _f.__name__ = f.__name__ + return _f + + +def sanitize_dict(d, keys=("password",), replacement="*", inplace=False): + keys = get_iterable(keys) + if not inplace: + d = dict(d) + + if replacement is None: + for k in keys: + d.pop(k, None) + else: + for k in keys: + v = d[k] + if isinstance(v, basestring): + d[k] = replacement * len(v) + else: + d[k] = replacement + return d diff --git a/build/lib/futile/logging/handlers.py b/build/lib/futile/logging/handlers.py new file mode 100644 index 0000000..e8fee2c --- /dev/null +++ b/build/lib/futile/logging/handlers.py @@ -0,0 +1,14 @@ +''' +Created on 30.08.2011 + +@author: kca +''' + +from logging.handlers import BufferingHandler as _BufferingHandler + +class BufferingHandler(_BufferingHandler): + def __init__(self, capacity = None): + _BufferingHandler.__init__(self, capacity = capacity) + + def shouldFlush(self, record): + return self.capacity and super(BufferingHandler, self).shouldFlush(record) or False diff --git a/build/lib/futile/logging/logbook.py b/build/lib/futile/logging/logbook.py new file mode 100644 index 0000000..601e6bd --- /dev/null +++ b/build/lib/futile/logging/logbook.py @@ -0,0 +1,9 @@ +''' +Created on 30.08.2011 + +@author: kca +''' + +from collections import namedtuple + +Logbook = namedtuple("Logbook", ("name", "component", "entries")) diff --git a/build/lib/futile/logging/logtap.py b/build/lib/futile/logging/logtap.py new file mode 100644 index 0000000..9afbc57 --- /dev/null +++ b/build/lib/futile/logging/logtap.py @@ -0,0 +1,60 @@ +''' +Created on 29.08.2011 + +@author: kca +''' + +import logging +from . import ThreadFilter +from ..collections import get_list +from futile import NOT_SET +from logging import LogRecord, DEBUG +from futile.logging import ErrorLogger + +class LogTap(ErrorLogger): + def __init__(self, handler, logger = None, name = None, level = DEBUG, *args, **kw): + super(LogTap, self).__init__(name = name, logger = logger, level = level, *args, **kw) + handler = get_list(handler) + self.handlers = handler + self.target_logger = logger or logging.root + + def attach(self): + map(self.target_logger.addHandler, self.handlers) + + def detach(self): + for handler in self.handlers: + self.target_logger.removeHandler(handler) + handler.close() + + def emit(self, record): + for handler in self.handlers: + handler.emit(record) + + def __enter__(self): + self.attach() + return super(LogTap, self).__enter__() + + def __exit__(self, type, value, traceback): + super(LogTap, self).__exit__(type, value, traceback) + self.detach() + +class BufferingLogTap(LogTap): + log = None + + def __init__(self, handler = None, name = None, logger = None, level = DEBUG, capacity = None, memhandler = None, *args, **kw): + if not memhandler: + from handlers import BufferingHandler + memhandler = BufferingHandler(capacity) + memhandler.addFilter(ThreadFilter()) + self.memhandler = memhandler + handler = [ memhandler ] + get_list(handler) + super(BufferingLogTap, self).__init__(handler = handler, logger = logger, name = name, level = level, *args, **kw) + + def detach(self): + self.log = map(lambda r: isinstance(r, LogRecord) and self.memhandler.format(r) or r, self.memhandler.buffer) + super(BufferingLogTap, self).detach() + + def emit(self, record, level = NOT_SET): + if isinstance(record, LogRecord): + return super(BufferingLogTap, self).emit(record) + self.memhandler.buffer.append(record) diff --git a/build/lib/futile/multiprocess/RWLock.py b/build/lib/futile/multiprocess/RWLock.py new file mode 100644 index 0000000..de8ba85 --- /dev/null +++ b/build/lib/futile/multiprocess/RWLock.py @@ -0,0 +1,82 @@ +''' +Created on 30.04.2011 + +@author: kca +''' + +import os +from fcntl import lockf, LOCK_EX, LOCK_SH, LOCK_UN +from contextlib import contextmanager +from futile.signal import timeout + +class RWLock(object): + def __init__(self, path = None, threadsafe = True, *args, **kw): + if not path: + raise NotImplementedError() + + if not os.path.exists(path): + open(path, "a").close() + + self.__path = path + + if threadsafe: + import threading + self.__local = threading.local() + else: + class Local(object): + pass + self.__local = Local + + self.__local.f = None + + + @contextmanager + def read_transaction(self, timeout = None): + self.read_acquire(timeout = timeout) + try: + yield + finally: + self.read_release() + pass + pass + + @contextmanager + def write_transaction(self, timeout = None): + self.write_acquire(timeout = timeout) + try: + yield + finally: + self.write_release() + + def __acquire(self, fmode, lmode, to): + assert getattr(self.__local, "f", None) is None + f = open(self.__path, fmode) + try: + if timeout: + with timeout(to): + lockf(f, lmode) + else: + lockf(f, lmode) + except: + f.close() + raise + self.__local.f = f + return f + + def read_acquire(self, timeout = None): + return self.__acquire("r", LOCK_SH, timeout) + + def read_release(self): + with self.__local.f as f: + self.__local.f = None + lockf(f, LOCK_UN) + + write_release = read_release + + def write_acquire(self, timeout = None): + return self.__acquire("a", LOCK_EX, timeout) + + __enter__ = write_acquire + + def __exit__(self, *args): + self.write_release() diff --git a/build/lib/futile/multiprocess/__init__.py b/build/lib/futile/multiprocess/__init__.py new file mode 100644 index 0000000..b4a4a4e --- /dev/null +++ b/build/lib/futile/multiprocess/__init__.py @@ -0,0 +1,3 @@ +from RWLock import RWLock + +Lock = RWLock \ No newline at end of file diff --git a/build/lib/futile/net/PortTester.py b/build/lib/futile/net/PortTester.py new file mode 100644 index 0000000..56290cc --- /dev/null +++ b/build/lib/futile/net/PortTester.py @@ -0,0 +1,83 @@ +''' +Created on 15.07.2011 + +@author: kca +''' + +from asyncore import dispatcher, loop +from socket import AF_INET, SOCK_STREAM, error +from sockethelper import socket +from futile.exc import errorstr +from collections import namedtuple +import sys +from time import time + +class TestResult(namedtuple("TestResultTuple", ("result", "message"))): + def __new__(cls, result, message = ""): + return super(TestResult, cls).__new__(cls, result, message) + + def __bool__(self): + return self.result + __nonzero__ = __bool__ + + def __str__(self): + if self.message: + return "%s - %s" % (self.result, self.message) + return str(self.result) + + def __eq__(self, o): + try: + return self.result == o.result + except AttributeError: + return False + + def __ne__(self, o): + return not (self == o) + +def test_port(host, port, family = AF_INET, type = SOCK_STREAM): + try: + with socket(family, type) as s: + s.connect((host, port)) + except error, e: + return TestResult(False, "%s (%d)" % (e.strerror, e.errno)) + except Exception, e: + return TestResult(False, errorstr(e)) + return TestResult(True) + +class PortTester(dispatcher): + result = TestResult(False, "Test did not run") + + def __init__(self, host, port, family = AF_INET, type = SOCK_STREAM, map = None): + dispatcher.__init__(self, map = map) + self.create_socket(family, type) + self.connect((host, port)) + self.host = host + self.port = port + + def handle_connect(self): + self.result = TestResult(True) + self.close() + + def handle_error(self): + self.result = TestResult(False, errorstr(sys.exc_value)) + self.close() + +def run_test(map, timeout = 0.0): + if timeout and timeout > 0.0: + timeout = float(timeout) + start = time() + while True: + loop(map = map, timeout = timeout, count = 1) + if map: + now = time() + timeout -= now - start + if timeout <= 0.0: + for r in map.itervalues(): + r.result = TestResult(False, "Timeout") + break + start = now + else: + break + else: + loop(map = map) + \ No newline at end of file diff --git a/build/lib/futile/net/__init__.py b/build/lib/futile/net/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/build/lib/futile/net/exc.py b/build/lib/futile/net/exc.py new file mode 100644 index 0000000..1a04adf --- /dev/null +++ b/build/lib/futile/net/exc.py @@ -0,0 +1,4 @@ + + +class NetworkError(Exception): + pass diff --git a/build/lib/futile/net/http/__init__.py b/build/lib/futile/net/http/__init__.py new file mode 100644 index 0000000..19ba75d --- /dev/null +++ b/build/lib/futile/net/http/__init__.py @@ -0,0 +1,96 @@ +''' +Created on 17.07.2011 + +@author: kca +''' +try: + from httplib import HTTPConnection as _HTTPConnection, HTTPSConnection as _HTTPSConnection +except ImportError: + from http.client import HTTPConnection as _HTTPConnection, HTTPSConnection as _HTTPSConnection + +from futile.contextlib import closing +from futile import NOT_SET +import socket +from . import exc as _exc +import sys +import types + +try: + from urllib import quote, quote_plus, unquote, unquote_plus +except ImportError: + from urllib.parse import quote, quote_plus, unquote, unquote_plus + +class HTTPResponseWrapper(object): + def __init__(self, connection, response, *args, **kw): + super(HTTPResponseWrapper, self).__init__(*args, **kw) + + self.__response = response + self.__connection = connection + + #def __del__(self): + # self.close() + + def __getattr__(self, k): + return getattr(self.__response, k) + + def __enter__(self): + return self.__response + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def close(self): + try: + self.__response.close() + except: + pass + finally: + self.__connection.close() + +class HTTPConnection(_HTTPConnection): + response_wrapper = closing + + def __init__(self, host, port=None, strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, response_wrapper = NOT_SET): + _HTTPConnection.__init__(self, host, port, strict, timeout, source_address) + if response_wrapper is not NOT_SET: + self.response_wrapper = response_wrapper + + def getresponse(self, buffering = False): + r = _HTTPConnection.getresponse(self, buffering) + if self.response_wrapper: + r = self.response_wrapper(r) + return r + +class HTTPSConnection(_HTTPSConnection): + response_wrapper = closing + + def __init__(self, host, port=None, key_file = None, cert_file = None, strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, response_wrapper = NOT_SET): + _HTTPSConnection.__init__(self, host, port, key_file = key_file, cert_file = cert_file, strict = strict, timeout = timeout, source_address = source_address) + if response_wrapper is not NOT_SET: + self.response_wrapper = response_wrapper + + def getresponse(self, buffering = False): + r = _HTTPSConnection.getresponse(self, buffering) + if self.response_wrapper: + r = self.response_wrapper(r) + return r + + +class exc(types.ModuleType): + def __getattr__(self, k): + try: + v = getattr(_exc, k) + except AttributeError: + if not k.startswith("HTTPError"): + raise + v = _exc.get_error_class(k[9:]) + setattr(self, k, v) + return v + + +name = __name__ + ".exc" +exc = exc(name) +sys.modules[name] = exc +del name + + \ No newline at end of file diff --git a/build/lib/futile/net/http/client/ConnectionPoolManager.py b/build/lib/futile/net/http/client/ConnectionPoolManager.py new file mode 100644 index 0000000..a33e262 --- /dev/null +++ b/build/lib/futile/net/http/client/ConnectionPoolManager.py @@ -0,0 +1,55 @@ +''' +Created on 19.03.2013 + +@author: kca +''' + +from logging import DEBUG, WARNING +import futile.logging +import urllib3.connectionpool +from urllib3.connectionpool import HTTPConnectionPool, HTTPSConnectionPool +from futile.logging import LoggerMixin +from futile import ObjectProxy + +if not futile.logging.get_logger().isEnabledFor(DEBUG): + urllib3.connectionpool.log.setLevel(WARNING) + +class Urllib3ResponseWrapper(ObjectProxy): + def getheader(self, header, default=None): + return self._o.getheader(header.lower(), default) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def close(self): + self._o.release_conn() + + def isclosed(self): + return False + +class ConnectionPoolManager(LoggerMixin): + def __init__(self, host, port, certfile = None, keyfile = None, cacertfile=None, force_ssl = False, *args, **kw): + super(ConnectionPoolManager, self).__init__(*args, **kw) + + self.logger.debug("Creating ConnectionPoolManager for %s:%s", host, port) + + if certfile or keyfile or force_ssl: + #https://docs.python.org/2/library/ssl.html#ssl.SSLContext + from ssl import SSLContext, PROTOCOL_SSLv23 + ssl_context=SSLContext(PROTOCOL_SSLv23) + ssl_context.load_cert_chain(certfile = certfile, keyfile = keyfile) + ssl_context.load_verify_locations(cafile=cacertfile) + #https://docs.python.org/2/library/httplib.html + self.__pool = HTTPSConnectionPool(host, port, maxsize = 16, context = ssl_context) + else: + self.__pool = HTTPConnectionPool(host, port, maxsize = 16) + + def request(self, method, path, body, headers, timeout): + return Urllib3ResponseWrapper(self.__pool.urlopen(method, path, body, + headers, timeout = timeout, pool_timeout = 30, preload_content = False, assert_same_host = False)) + + + \ No newline at end of file diff --git a/build/lib/futile/net/http/client/RestClient.py b/build/lib/futile/net/http/client/RestClient.py new file mode 100644 index 0000000..3a253a7 --- /dev/null +++ b/build/lib/futile/net/http/client/RestClient.py @@ -0,0 +1,353 @@ +''' +Created on 21.05.2011 + +@author: kca +''' + +from base64 import b64encode +from cStringIO import StringIO +from datetime import datetime +from logging import DEBUG +from socket import getservbyname +from time import time +from urllib import quote_plus +from urllib2 import quote +from urlparse import urlparse + +from futile import ObjectProxy +from futile.logging import LoggerMixin +from futile.net.http.exc import NetworkError, HTTPError + + +def compose_qs(values): + return "&".join([ "%s=%s" % (quote(k), quote(v)) for k, v in dict(values).iteritems() ]) + +class LoggingResponseWrapper(LoggerMixin, ObjectProxy): + def __init__(self, response, *args, **kw): + super(LoggingResponseWrapper, self).__init__(proxyobject = response, *args, **kw) + self.__buffer = StringIO() + self.__finalized = False + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def __enter__(self): + return self + + def read(self, n = None): + s = self._o.read(n) + self.__buffer.write(s) + return s + + def readline(self): + s = self._o.readline() + self.__buffer.write(s) + return s + + def readlines(self, sizehint = None): + lines = self._o.readlines(sizehint) + self.__buffer.write(''.join(lines)) + return lines + + def close(self): + if self.__finalized: + self.logger.debug("%s is already finalized" % (self, )) + return + + self.__finalized = True + try: + if not self._o.isclosed(): + self.__buffer.write(self._o.read()) + self.logger.debug("Read data:\n %s", self.__buffer.getvalue()) + except: + self.logger.exception("Finalizing response failed") + finally: + self._o.close() + + self.__buffer.close() + + +class CachingHttplibResponseWrapper(ObjectProxy, LoggerMixin): + def __init__(self, response, path, tag, last_modified, cache, *args, **kw): + super(CachingHttplibResponseWrapper, self).__init__(proxyobject = response, *args, **kw) + self.__cache = cache + self.__buffer = StringIO() + self.__path = path + self.__tag = tag + self.__last_modified = last_modified + self.__finalized = False + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def __enter__(self): + return self + + def read(self, n = None): + s = self._o.read(n) + self.__buffer.write(s) + return s + + def readline(self): + s = self._o.readline() + self.__buffer.write(s) + return s + + def readlines(self, sizehint = None): + lines = self._o.readlines(sizehint) + self.__buffer.write(''.join(lines)) + return lines + + def close(self): + if self.__finalized: + self.logger.debug("%s is already finalized" % (self, )) + return + + self.__finalized = True + try: + if not self._o.isclosed(): + self.__buffer.write(self._o.read()) + val = self.__buffer.getvalue() + self.logger.debug("Putting to cache: %s -> %s, %s\n %s", self.__path, self.__tag, self.__last_modified, val) + self.__cache[self.__path] = (self.__tag, self.__last_modified, val) + except: + self.logger.exception("Finalizing response failed") + finally: + self._o.close() + + self.__buffer.close() + + def __getattr__(self, name): + return getattr(self._o, name) + + +class closing(ObjectProxy): + def __getattr__(self, k): + return getattr(self._o, k) + + def __enter__(self): + return self._o + + def __exit__(self, exc_type, exc_val, exc_tb): + self._o.close() + + def close(self): + self._o.close() + + +class RestClient(LoggerMixin): + ERROR_RESPONSE_MAX = 320 + + get_timeout = timeout = 120.0 + + def __init__(self, uri, username=None, password=None, certfile=None, + keyfile=None, cacertfile=None, content_type="text/plain", + headers=None, + cache=True, timeout=None, get_timeout=None, + component_name = "server", connection_manager = None, + *args, **kw): + super(RestClient, self).__init__(*args, **kw) + + self.logger.debug("Creating RestClient for %s", uri) + + self.timeout = timeout or self.timeout + self.get_timeout = get_timeout or timeout or self.get_timeout + + if cache: + if cache is True: + from futile.caching import LRUCache + cache = LRUCache() + self.__cache = cache + + if "://" not in uri: + uri = "http://" + uri + + self.__content_type = content_type + self.component_name = component_name + + info = urlparse(uri) + + self.logger.debug("Restclient certfile is %s"%certfile) + if info.scheme == "https": + if bool(certfile) ^ bool(keyfile): + raise ValueError("Must give both certfile and keyfile if any") + if certfile: + from os.path import exists + if not exists(certfile): + raise ValueError("Certificate file not found: %s" % (certfile, )) + if not exists(keyfile): + raise ValueError("Key file not found: %s" % (keyfile, )) + elif info.scheme != "http": + raise ValueError(info.scheme) + else: + # In case of http, we do not want any certificates + keyfile = certfile = None + + port = info.port and int(info.port) or getservbyname(info.scheme) + + self.__base = info.path or "" + #if not self.__base.endswith("/"): + # self.__base += "/" + + if not username: + username = info.username + + if not headers: + headers = {} + + headers.setdefault("Accept", "*/*") + headers["Accept-Encoding"] = "identity" + + if username: + password = password or info.password or "" + headers["Authorization"] = "Basic " + b64encode("%s:%s" % (username, password)) + + self.__headers = headers + + if not connection_manager: + #from SimpleConnectionManager import SimpleConnectionManager as connection_manager + from ConnectionPoolManager import ConnectionPoolManager as connection_manager + + self.__connection_manager = connection_manager(host=info.hostname, + port=port, + certfile = certfile, keyfile = keyfile, cacertfile = cacertfile, force_ssl = info.scheme == "https") + + def set_authinfo(self, username, password=""): + if not username: + self.__headers.pop("Authorization") + else: + self.__headers["Authorization"] = "Basic " + b64encode("%s:%s" % (quote_plus(username), password)) + + def request(self, method, path, data = None, headers = {}, args = None): + if isinstance(data, unicode): + data = data.encode("utf-8") + + fullpath = self.__base + path + + request_headers = self.__headers.copy() + + if args: + fullpath += ("?" in fullpath and "&" or "?") + compose_qs(args) + + if headers: + request_headers.update(headers) + + if method == "GET": + timeout = self.get_timeout + if self.__cache: + try: + etag, modified, cached = self.__cache[fullpath] + if etag: + request_headers["If-None-Match"] = etag + request_headers["If-Modified-Since"] = modified + except KeyError: + request_headers.pop("If-None-Match", None) + request_headers.pop("If-Modified-Since", None) + else: + timeout = self.timeout + + if data: + request_headers.setdefault("Content-Type", self.__content_type) + if hasattr(data, "read") and not hasattr(data, "fileno"): + data = data.read() + + log_headers = request_headers + #if self.logger.isEnabledFor(DEBUG) and "Authorization" in request_headers: + #log_headers = request_headers.copy() + #log_headers["Authorization"] = "" + + if method == "GET": + self.logger.debug("%s: %s (%s)", method, fullpath, log_headers) + else: + self.logger.debug("%s: %s (%s)\n%s", method, fullpath, log_headers, repr(data)) + + t = time() + try: + response = self.__connection_manager.request(method, fullpath, data, request_headers, timeout) + except Exception as e: + if self.logger.isEnabledFor(DEBUG): + self.logger.exception("Error during request") + if str(e) in ("", "''"): + e = repr(e) + try: + error_msg = "An error occurred while contacting the %s: %s. Request was: %s %s (%.4fs)" % (self.component_name, e, method, fullpath, time() - t) + except: + self.logger.exception("Failed to format error message.") + error_msg = "Error during request." + + raise NetworkError(error_msg) + + self.logger.debug("%s %s result: %s (%.4fs)", method, fullpath, response.status, time() - t) + r_status = response.status + if r_status == 304: + response.close() + try: + self.logger.debug("Using cached answer for %s (%s, %s):\n %s", fullpath, etag, modified, cached) + return closing(StringIO(cached)) + except NameError: + raise NetworkError("Error: The %s returned 304 though no cached version is available. Request was: %s %s" % (self.component_name, method, fullpath)) + if r_status == 302: + raise NotImplementedError("HTTP redirect") + if r_status < 200 or r_status >= 300: + with response: + via = response.getheader("Via") + try: + data = response.read(self.ERROR_RESPONSE_MAX and self.ERROR_RESPONSE_MAX + 1 or None) + if not data or (not self.logger.isEnabledFor(DEBUG) and "" in data): + data = "" + else: + if self.ERROR_RESPONSE_MAX and len(data) > self.ERROR_RESPONSE_MAX: + data = data[:self.ERROR_RESPONSE_MAX] + " (truncated)\n" + data = data.encode("utf-8") + except Exception as e: + data = "" % (e, ) + + if not data.endswith("\n"): + data += "\n" + + try: + msg = "Error during execution. The %s said: %s %s - %sRequest was: %s %s. " % (self.component_name, response.status, response.reason, data, method, fullpath) + except: + msg = "Error during execution. The %s said %s. " % (self.component_name, response.status) + + if via: + culprit = via.split(",")[0] + p = culprit.rfind("(") + if p >= 0 and culprit.endswith(")"): + culprit = culprit[p + 1:-1] + msg += "The error occurred after the request went through %s (Via: %s)." % (culprit, via) + else: + msg += "The error seems to have occurred at the %s (No Via header found in response)." % (self.component_name, ) + + raise HTTPError(msg=msg, status=response.status) + + if method == "DELETE": + try: + self.__cache.pop(fullpath, None) + except AttributeError: + pass + else: + etag = response.getheader("Etag") + modified = response.getheader("Last-Modified") + if self.__cache is not False and (etag or modified): + if not modified: + modified = datetime.utcnow().strftime("%a, %d %b %Y %X GMT") + response = CachingHttplibResponseWrapper(response, fullpath, etag, modified, self.__cache) + elif self.logger.isEnabledFor(DEBUG): + response = LoggingResponseWrapper(response) + + return response + + def get(self, path, headers = None, args = None): + return self.request("GET", path, headers = headers, args = args) + + def post(self, path, data, headers = None): + return self.request("POST", path, data, headers) + add = post + + def put(self, path, data, headers = None): + return self.request("PUT", path, data, headers) + update = put + + def delete(self, path, headers = None): + return self.request("DELETE", path, None, headers) diff --git a/build/lib/futile/net/http/client/RestClientAsync.py b/build/lib/futile/net/http/client/RestClientAsync.py new file mode 100644 index 0000000..8e9a491 --- /dev/null +++ b/build/lib/futile/net/http/client/RestClientAsync.py @@ -0,0 +1,327 @@ +''' +Created on 21.05.2011 + +@author: kca +''' + +from base64 import b64encode +from cStringIO import StringIO +from logging import DEBUG +from socket import getservbyname +from urllib2 import quote +from urlparse import urlparse + +#import vertx + +from aplus import Promise +from futile import ObjectProxy +from futile.logging import LoggerMixin +from futile.net.http.exc import NetworkError, HTTPError + + +def compose_qs(values): + return "&".join([ "%s=%s" % (quote(k), quote(v)) for k, v in dict(values).iteritems() ]) + +class LoggingResponseWrapper(LoggerMixin, ObjectProxy): + def __init__(self, response, *args, **kw): + super(LoggingResponseWrapper, self).__init__(proxyobject = response, *args, **kw) + self.__buffer = StringIO() + self.__finalized = False + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def __enter__(self): + return self + + def read(self, n = None): + s = self._o.read(n) + self.__buffer.write(s) + return s + + def readline(self): + s = self._o.readline() + self.__buffer.write(s) + return s + + def readlines(self, sizehint = None): + lines = self._o.readlines(sizehint) + self.__buffer.write(''.join(lines)) + return lines + + def close(self): + if self.__finalized: + self.logger.debug("%s is already finalized" % (self, )) + return + + self.__finalized = True + try: + if not self._o.isclosed(): + self.__buffer.write(self._o.read()) + self.logger.debug("Read data:\n %s", self.__buffer.getvalue()) + except: + self.logger.exception("Finalizing response failed") + finally: + self._o.close() + + self.__buffer.close() + + +class CachingHttplibResponseWrapper(ObjectProxy, LoggerMixin): + def __init__(self, response, path, tag, last_modified, cache, *args, **kw): + super(CachingHttplibResponseWrapper, self).__init__(proxyobject = response, *args, **kw) + self.__cache = cache + self.__buffer = StringIO() + self.__path = path + self.__tag = tag + self.__last_modified = last_modified + self.__finalized = False + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def __enter__(self): + return self + + def read(self, n = None): + s = self._o.read(n) + self.__buffer.write(s) + return s + + def readline(self): + s = self._o.readline() + self.__buffer.write(s) + return s + + def readlines(self, sizehint = None): + lines = self._o.readlines(sizehint) + self.__buffer.write(''.join(lines)) + return lines + + def close(self): + if self.__finalized: + self.logger.debug("%s is already finalized" % (self, )) + return + + self.__finalized = True + try: + if not self._o.isclosed(): + self.__buffer.write(self._o.read()) + val = self.__buffer.getvalue() + self.logger.debug("Putting to cache: %s -> %s, %s\n %s", self.__path, self.__tag, self.__last_modified, val) + self.__cache[self.__path] = (self.__tag, self.__last_modified, val) + except: + self.logger.exception("Finalizing response failed") + finally: + self._o.close() + + self.__buffer.close() + + def __getattr__(self, name): + return getattr(self._o, name) + + +class closing(ObjectProxy): + def __getattr__(self, k): + return getattr(self._o, k) + + def __enter__(self): + return self._o + + def __exit__(self, exc_type, exc_val, exc_tb): + self._o.close() + + def close(self): + self._o.close() + + +class RestClient(LoggerMixin): + ERROR_RESPONSE_MAX = 320 + + get_timeout = timeout = 120.0 + + def __init__(self, uri, username=None, password=None, certfile=None, + keyfile=None, content_type="text/plain", headers=None, + cache=True, timeout=None, get_timeout=None, + component_name = "server", connection_manager = None, + *args, **kw): + super(RestClient, self).__init__(*args, **kw) + + self.logger.debug("Creating RestClient for %s", uri) + + self.timeout = timeout or self.timeout + self.get_timeout = get_timeout or timeout or self.get_timeout + + if cache: + if cache is True: + from futile.caching import LRUCache + cache = LRUCache() + self.__cache = cache + + if "://" not in uri: + uri = "http://" + uri + + self.__content_type = content_type + self.component_name = component_name + + info = urlparse(uri) + + if info.scheme == "https": + if bool(certfile) ^ bool(keyfile): + raise ValueError("Must give both certfile and keyfile if any") + if certfile: + from os.path import exists + if not exists(certfile): + raise ValueError("Certificate file not found: %s" % (certfile, )) + if not exists(keyfile): + raise ValueError("Key file not found: %s" % (keyfile, )) + elif info.scheme != "http": + raise ValueError(info.scheme) + + port = info.port and int(info.port) or getservbyname(info.scheme) + + self.__base = info.path or "" + #if not self.__base.endswith("/"): + # self.__base += "/" + + if not username: + username = info.username + + if not headers: + headers = {} + + headers.setdefault("Accept", "*/*") + headers["Accept-Encoding"] = "identity" + + if username: + password = password or info.password or "" + headers["Authorization"] = "Basic " + b64encode("%s:%s" % (username, password)) + + self.__headers = headers + + #if not connection_manager: + # #from SimpleConnectionManager import SimpleConnectionManager as connection_manager + # from ConnectionPoolManager import ConnectionPoolManager as connection_manager + # + # self.__connection_manager = connection_manager(host = info.hostname, port = port, + # certfile = certfile, keyfile = keyfile, force_ssl = info.scheme == "https") + # + + self.client= vertx.create_http_client() + self.client.host = info.netloc.split(":")[0] + self.client.port = port + + #temporary test server + #import json + #self.srv = vertx.create_http_server() + #def srv_handle(re): + # re.response.put_header("Content-Type","application/json; charset=utf-8") + # re.response.put_header("Location","locationlocation.location") + # re.response.end(json.dumps({"One":"Two"})) + #self.srv.request_handler(srv_handle) + #self.srv.listen(5000) + + def request(self, method, path, data = None, headers = {}, args = None): + if isinstance(data, unicode): + data = data.encode("utf-8") + fullpath = self.__base + path + request_headers = self.__headers.copy() + + if args: + fullpath += ("?" in fullpath and "&" or "?") + compose_qs(args) + + if headers: + request_headers.update(headers) + + if method == "GET": + timeout = self.get_timeout + try: + etag, modified, cached = self.__cache[fullpath] + if etag: + request_headers["If-None-Match"] = etag + request_headers["If-Modified-Since"] = modified + except KeyError: + request_headers.pop("If-None-Match", None) + request_headers.pop("If-Modified-Since", None) + else: + timeout = self.timeout + request_headers.setdefault("Content-Type", self.__content_type) + + log_headers = request_headers + if self.logger.isEnabledFor(DEBUG) and "Authorization" in request_headers: + log_headers = request_headers.copy() + log_headers["Authorization"] = "" + + if method == "GET": + self.logger.debug("%s: %s (%s)", method, fullpath, log_headers) + else: + self.logger.debug("%s: %s (%s)\n%s", method, fullpath, log_headers, repr(data)) + + #t = time() + promise=Promise() + try: + #response = self.__connection_manager.request(method, fullpath, data, request_headers, timeout) + + def response_handler(resp): + if resp.status_code == 304: + try: + promise.fulfill(closing(StringIO(cached))) + except NameError: + promise.reject(NetworkError("Error: The %s returned 304 though no cached version is available. Request was: %s %s" % (self.component_name, method, fullpath))) + if resp.status_code < 200 or resp.status_code >= 300: + try: + promise.reject(HTTPError(msg = resp.status_message, status = resp.status_code)) + except: + promise.reject(HTTPError(msg = "Http error", status = response.status)) + else: + promise.fulfill(resp) + + req=self.client.request(method,fullpath,response_handler) + for head,value in request_headers.items(): + req.put_header(head,value) + if data: + req.chunked = True + req.write_str(data) + req.end() + + except Exception as e: + print "Exception triggered: %s"%e + promise.reject(e) + + return promise + + #if method == "DELETE": + # try: + # self.__cache.pop(fullpath, None) + # except AttributeError: + # pass + #else: + # etag = response.getheader("Etag") + # modified = response.getheader("Last-Modified") + # if etag or modified: + # if not modified: + # modified = datetime.utcnow().strftime("%a, %d %b %Y %X GMT") + # response = CachingHttplibResponseWrapper(response, fullpath, etag, modified, self.__cache) + # elif self.logger.isEnabledFor(DEBUG): + # response = LoggingResponseWrapper(response) + + + + + def get(self, path, headers = None, args = None): + p = self.request("GET", path, headers = headers, args = args) + return p + + def post(self, path, data, headers = None): + p = self.request("POST", path, data, headers) + return p + add = post + + def put(self, path, data, headers = None): + p = self.request("PUT", path, data) + return p + update = put + + def delete(self, path, headers = None): + p = self.request("DELETE", path, None, headers) + return p diff --git a/build/lib/futile/net/http/client/SimpleConnectionManager.py b/build/lib/futile/net/http/client/SimpleConnectionManager.py new file mode 100644 index 0000000..3b1b7dd --- /dev/null +++ b/build/lib/futile/net/http/client/SimpleConnectionManager.py @@ -0,0 +1,61 @@ +''' +Created on 19.03.2013 + +@author: kca +''' + +from httplib import HTTPConnection, HTTPSConnection +from futile.logging import LoggerMixin + +class HttplibResponseWrapper(LoggerMixin): + def __init__(self, connection, *args, **kw): + super(HttplibResponseWrapper, self).__init__(*args, **kw) + + self.__response = connection.getresponse() + self.__connection = connection + + def __getattr__(self, k): + return getattr(self.__response, k) + + def __enter__(self): + return self.__response + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def close(self): + try: + self.__response.close() + except: + self.logger.exception("Error closing response") + finally: + self.__connection.close() + +class SimpleConnectionManager(LoggerMixin): + def __init__(self, host, port, certfile = None, keyfile = None, force_ssl = False, *args, **kw): + super(SimpleConnectionManager, self).__init__(*args, **kw) + + self.logger.debug("Creating SimpleConnectionManager for %s:%s", host, port) + + if keyfile or certfile or force_ssl: + self.__certfile = certfile + self.__keyfile = keyfile + self._get_connection = self._get_secure_connection + + self.__host = host + self.__port = port + + def request(self, method, path, body, headers, timeout): + connection = self._get_connection(timeout) + try: + connection.request(method, path, body, headers) + return HttplibResponseWrapper(connection) + except: + connection.close() + raise + + def _get_connection(self, timeout): + return HTTPConnection(self.__host, self.__port, timeout = timeout) + + def _get_secure_connection(self, timeout): + return HTTPSConnection(self.__host, self.__port, self.__keyfile, self.__certfile, timeout = timeout) diff --git a/build/lib/futile/net/http/client/__init__.py b/build/lib/futile/net/http/client/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/build/lib/futile/net/http/exc.py b/build/lib/futile/net/http/exc.py new file mode 100644 index 0000000..17cc26a --- /dev/null +++ b/build/lib/futile/net/http/exc.py @@ -0,0 +1,134 @@ +''' +Created on 21.07.2011 + +@author: kca +''' + + +from futile.net.exc import NetworkError + +STATUS_STRINGS = { + 100: "Continue", + 101: "Switching Protocols", + 200: "Ok", + 201: "Created", + 202: "Accepted", + 203: "Non-Authoritative Information", + 204: "No Content", + 205: "Reset Content", + 206: "Partial Content", + 300: "Multiple Choices", + 301: "Moved Permanently", + 302: "Found", + 303: "See Other", + 304: "Not Modfied", + 305: "Use Proxy", + 306: "", + 307: "Temporary Redirect", + 400: "Bad Request", + 401: "Unauthorized", + 402: "Payment Required", + 403: "Forbidden", + 404: "Not Found", + 405: "Method Not Allowed", + 406: "Not Acceptable", + 407: "Proxy Authentication Required", + 408: "Request Timeout", + 409: "Conflict", + 410: "Gone", + 411: "Length Required", + 412: "Precondition Failed", + 413: "Request Entity Too Large", + 414: "Request-URI Too Long", + 415: "Unsupported Media Type", + 416: "Requested Range Not Satisfiable", + 417: "Expectation Failed", + 500: "Internal Server Error", + 501: "Not Implemented", + 502: "Bad Gateway", + 503: "Service Unavailable", + 504: "Gateway Timeout", +} + +STATUS_MIN = 100 +STATUS_MAX = 504 +ERROR_MIN = 400 +ERROR_MAX = 504 + + +def get_error_message(statuscode): + try: + return STATUS_STRINGS[statuscode] + except KeyError: + raise ValueError(statuscode) + + +class HTTPErrorType(type): + __classes = {} + + @classmethod + def get_error_class(cls, status): + try: + status = int(status) + except (TypeError, ValueError): + raise ValueError("Not a valid HTTP error code: '%s'" % (status, )) + + try: + errorcls = cls.__classes[status] + except KeyError: + if status < STATUS_MIN or status > STATUS_MAX: + raise ValueError("Not a valid HTTP error code: %s" % (status,)) + name = "HTTPError%s" % (status, ) + errorcls = cls(name, (HTTPError, ), {"__init__": + cls._make_init(status)}) + cls.__classes[status] = errorcls + globals()[name] = errorcls + + return errorcls + + def __call__(self, *args, **kw): + if self is HTTPError: + try: + status = kw.pop("status") + except KeyError: + try: + status = args[0] + args = args[1:] + except IndexError: + return super(HTTPErrorType, self).__call__(*args, **kw) + + self = self.get_error_class(status) + return super(HTTPErrorType, self).__call__(*args, **kw) + + @classmethod + def _make_init(cls, status): + def __init__(self, msg=None, reason=None, *args, **kw): + super(self.__class__, self).__init__(status=status, + reason=reason, msg=msg, *args, **kw) + return __init__ + +get_error_class = HTTPErrorType.get_error_class + + +class HTTPError(NetworkError): + __metaclass__ = HTTPErrorType + + def __init__(self, status, reason=None, msg=None, *args, **kw): + status = int(status) + if not reason: + reason = STATUS_STRINGS.get(status, "Unknown Error") + if not msg: + msg = "HTTP Error %s - %s" % (status, reason) + super(HTTPError, self).__init__(msg, status, reason, *args, **kw) + + @property + def message(self): + return self.args[0] + + @property + def status(self): + return self.args[1] + + @property + def reason(self): + return self.args[2] diff --git a/build/lib/futile/net/http/server/__init__.py b/build/lib/futile/net/http/server/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/build/lib/futile/net/http/server/ssl/__init__.py b/build/lib/futile/net/http/server/ssl/__init__.py new file mode 100644 index 0000000..3a11e0c --- /dev/null +++ b/build/lib/futile/net/http/server/ssl/__init__.py @@ -0,0 +1,54 @@ +''' +Created on 18.08.2011 + +@author: kca +''' + +from futile.logging import LoggerMixin +from ssl import wrap_socket, SSLSocket, SSLError, CERT_OPTIONAL, CERT_NONE +from socket import error +from futile import NOT_SET + +class HTTPSMixin(LoggerMixin): + certfile = keyfile = ca_certs = None + cert_reqs = CERT_NONE + + def init_https(self, certfile, keyfile = None, ca_certs = None, cert_reqs = NOT_SET, secure = True): + self.keyfile = keyfile + self.certfile = certfile + self.ca_certs = ca_certs + if cert_reqs is NOT_SET: + cert_reqs = ca_certs and CERT_OPTIONAL or CERT_NONE + self.cert_reqs = cert_reqs + if secure: + self.enable_https() + + def enable_https(self): + if not self.secure: + if not self.certfile: + raise SSLError("Certificate info missing.") + if self.cert_reqs != CERT_NONE and not self.ca_certs: + raise SSLError("Certificate validation requested but no ca certs available.") + self.logger.debug("Enabling https with certfile=%s kefile=%s ca_certs=%s cert_reqs=%s", self.certfile, self.keyfile, self.ca_certs, self.cert_reqs) + self.socket = wrap_socket(self.socket, server_side = True, keyfile = self.keyfile, certfile = self.certfile, ca_certs = self.ca_certs, cert_reqs = self.cert_reqs) + + def disable_https(self): + if self.secure: + self.socket = self.socket._sock + + def get_request(self): + try: + return self.socket.accept() + except error, e: + self.logger.exception("Error during accept(): %s", e) + raise + + def is_secure(self): + return isinstance(self.socket, SSLSocket) + def set_secure(self, s): + if s: + self.enable_https() + else: + self.disable_https() + return s + secure = property(is_secure) diff --git a/build/lib/futile/net/http/server/wsgi/__init__.py b/build/lib/futile/net/http/server/wsgi/__init__.py new file mode 100644 index 0000000..fc0b1e2 --- /dev/null +++ b/build/lib/futile/net/http/server/wsgi/__init__.py @@ -0,0 +1,19 @@ +''' +Created on 17.07.2011 + +@author: kca +''' + +from wsgiref.simple_server import WSGIRequestHandler, WSGIServer as _WSGIServer +from SocketServer import ThreadingMixIn, ForkingMixIn + +class WSGIServer(_WSGIServer): + def __init__(self, server_address, app = None, RequestHandlerClass = WSGIRequestHandler): + _WSGIServer.__init__(self, server_address, RequestHandlerClass) + self.set_app(app) + +class ThreadingWSGIServer(ThreadingMixIn, WSGIServer): + pass + +class ForkingWSGIServer(ForkingMixIn, WSGIServer): + pass diff --git a/build/lib/futile/net/http/server/wsgi/ssl.py b/build/lib/futile/net/http/server/wsgi/ssl.py new file mode 100644 index 0000000..f7c17b9 --- /dev/null +++ b/build/lib/futile/net/http/server/wsgi/ssl.py @@ -0,0 +1,22 @@ +''' +Created on 22.08.2011 + +@author: kca +''' + +from ..ssl import HTTPSMixin +from ..wsgi import WSGIServer +from SocketServer import ThreadingMixIn, ForkingMixIn +from wsgiref.simple_server import WSGIRequestHandler +from futile import NOT_SET + +class SecureWSGIServer(HTTPSMixin, WSGIServer): + def __init__(self, server_address, certfile, keyfile = None, ca_certs = None, cert_reqs = NOT_SET, app = None, RequestHandlerClass = WSGIRequestHandler): + WSGIServer.__init__(self, server_address, app = app, RequestHandlerClass = RequestHandlerClass) + self.init_https(certfile, keyfile, ca_certs = ca_certs, cert_reqs = cert_reqs) + +class SecureThreadingWSGIServer(ThreadingMixIn, SecureWSGIServer): + pass + +class SecureForkingWSGIServer(ForkingMixIn, SecureWSGIServer): + pass diff --git a/build/lib/futile/net/sockethelper.py b/build/lib/futile/net/sockethelper.py new file mode 100644 index 0000000..0a146f4 --- /dev/null +++ b/build/lib/futile/net/sockethelper.py @@ -0,0 +1,13 @@ +''' +Created on 14.07.2011 + +@author: kca +''' + +from socket import socket as _socket, AF_INET, SOCK_STREAM +from futile.contextlib import closing + +def socket(family = AF_INET, type = SOCK_STREAM, proto = 0): + return closing(_socket(family, type, proto)) + + \ No newline at end of file diff --git a/build/lib/futile/net/wsgi.py b/build/lib/futile/net/wsgi.py new file mode 100644 index 0000000..df6a495 --- /dev/null +++ b/build/lib/futile/net/wsgi.py @@ -0,0 +1,14 @@ +''' +Created on 21.01.2012 + +@author: kca +''' + +from wsgiref.simple_server import WSGIServer +from SocketServer import ThreadingMixIn, ForkingMixIn + +class ThreadingWSGIServer(ThreadingMixIn, WSGIServer): + pass + +class ForkingWSGIServer(ForkingMixIn, WSGIServer): + pass diff --git a/build/lib/futile/net/xmlrpc.py b/build/lib/futile/net/xmlrpc.py new file mode 100644 index 0000000..9d87cc5 --- /dev/null +++ b/build/lib/futile/net/xmlrpc.py @@ -0,0 +1,40 @@ +''' +Created on 17.07.2011 + +@author: kca +''' + +from futile import Base +from SimpleXMLRPCServer import SimpleXMLRPCDispatcher + +class WSGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher, Base): + def __init__(self, encoding=None): + SimpleXMLRPCDispatcher.__init__(self, allow_none = True, encoding = encoding) + + def __call__(self, environ, start_response): + if environ["REQUEST_METHOD"] != "POST": + headers = [("Content-type", "text/html")] + + if environ["REQUEST_METHOD"] == "HEAD": + data = "" + else: + data = "400 Bad request

400 Bad request

" + headers.append(("Content-length", str(len(data)))) + start_response("400 Bad request", headers) + return (data, ) + + l = int(environ["CONTENT_LENGTH"]) + request = environ["wsgi.input"].read(l) + response = self._marshaled_dispatch(request) + headers = [("Content-type", "text/xml"), ("Content-length", str(len(response)))] + start_response("200 OK", headers) + return (response, ) + + def _dispatch(self, *args, **kw): + try: + result = SimpleXMLRPCDispatcher._dispatch(self, *args, **kw) + # self.logger.debug("Result: %s" % (result, )) + return result + except: + self.logger.exception("Error while processing request") + raise \ No newline at end of file diff --git a/build/lib/futile/operator/__init__.py b/build/lib/futile/operator/__init__.py new file mode 100644 index 0000000..dda3a84 --- /dev/null +++ b/build/lib/futile/operator/__init__.py @@ -0,0 +1,9 @@ +from operator import attrgetter + +def attrproperty(name): + return property(attrgetter(name)) + +def resolve_attr(obj, attr): + for name in attr.split("."): + obj = getattr(obj, name) + return obj \ No newline at end of file diff --git a/build/lib/futile/os/__init__.py b/build/lib/futile/os/__init__.py new file mode 100644 index 0000000..f5ee1e2 --- /dev/null +++ b/build/lib/futile/os/__init__.py @@ -0,0 +1,6 @@ + + +def get_fileobj(f): + if not hasattr(f, "read"): + return open(f) + return f diff --git a/build/lib/futile/os/mount.py b/build/lib/futile/os/mount.py new file mode 100644 index 0000000..34e1ec0 --- /dev/null +++ b/build/lib/futile/os/mount.py @@ -0,0 +1,53 @@ +''' +Created on 24.01.2012 + +@author: kca +''' + +from ..path import Path +from ..subprocess import check_output + +def umount(where, force = False): + cmd = [ "umount", where ] + if force: + cmd.append("-f") + check_output(cmd) +unmount = umount + +def mount(what, where, fstype = None, options = None): + return Mount(what, where, fstype, options).mount() + +class Mount(object): + def __init__(self, what, where, fstype = None, options = None): + self.what = Path(what) + self.where = Path(where) + self.fstype = fstype + options = self.options = options and set(options) or set() + if what.isfile(): + options.add("loop") + elif not what.isblockdev(): + raise ValueError("Mount source must be a file or block device: %s" % (what, )) + + def mount(self, fstype = None, options = None): + cmd = [ "mount", self.what, self.where ] + + fstype = fstype or self.fstype + if fstype: + cmd += [ "-t", self.fstype ] + + opts = self.options + if options: + opts += set(self.options) + if opts: + cmd += [ "-o", ','.join(self.options) ] + + check_output(cmd) + return self + __enter__ = mount + + def umount(self, force = False): + umount(self.where, force) + unmount = umount + + def __exit__(self, exc_type, exc_val, exc_tb): + self.umount(True) diff --git a/build/lib/futile/path/__init__.py b/build/lib/futile/path/__init__.py new file mode 100644 index 0000000..c193650 --- /dev/null +++ b/build/lib/futile/path/__init__.py @@ -0,0 +1,865 @@ +""" path.py - An object representing a path to a file or directory. + +Example: + +from path import path +d = path('/home/guido/bin') +for f in d.files('*.py'): + f.chmod(0755) + +This module requires Python 2.2 or later. + + +URL: http://www.jorendorff.com/articles/python/path +Author: Jason Orendorff (and others - see the url!) +Date: 7 Mar 2004 +""" + +# Note - this is an umodified version of Jason Orendorff's 'path' module. + +# TODO +# - Bug in write_text(). It doesn't support Universal newline mode. +# - Better error message in listdir() when self isn't a +# directory. (On Windows, the error message really sucks.) +# - Make sure everything has a good docstring. +# - Add methods for regex find and replace. +# - guess_content_type() method? +# - Perhaps support arguments to touch(). +# - Could add split() and join() methods that generate warnings. +# - Note: __add__() technically has a bug, I think, where +# it doesn't play nice with other types that implement +# __radd__(). Test this. + +from __future__ import generators + +def quote(p): + from urllib2 import quote + return quote(p, "") + + +import sys, os, fnmatch, glob, shutil, codecs + +__version__ = '2.0.4' +__all__ = ['path'] + +# Pre-2.3 support. Are unicode filenames supported? +_base = str +try: + if os.path.supports_unicode_filenames: + _base = unicode +except AttributeError: + pass + +# Pre-2.3 workaround for basestring. +try: + basestring +except NameError: + basestring = (str, unicode) + +# Universal newline support +_textmode = 'r' +if hasattr(file, 'newlines'): + _textmode = 'U' + + +class path(_base): + """ Represents a filesystem path. + + For documentation on individual methods, consult their + counterparts in os.path. + """ + + # --- Special Python methods. + + def __repr__(self): + return 'path(%s)' % _base.__repr__(self) + + # Adding a path and a string yields a path. + def __add__(self, more): + return path(_base(self) + more) + + def __radd__(self, other): + return path(other + _base(self)) + + # The / operator joins paths. + def __div__(self, rel): + """ fp.__div__(rel) == fp / rel == fp.joinpath(rel) + + Join two path components, adding a separator character if + needed. + """ + return path(os.path.join(self, rel)) + + # Make the / operator work even when true division is enabled. + __truediv__ = __div__ + + def getcwd(): + """ Return the current working directory as a path object. """ + return path(os.getcwd()) + getcwd = staticmethod(getcwd) + + + # --- Operations on path strings. + + def abspath(self): return path(os.path.abspath(self)) + def normcase(self): return path(os.path.normcase(self)) + def normpath(self): return path(os.path.normpath(self)) + def realpath(self): return path(os.path.realpath(self)) + def expanduser(self): return path(os.path.expanduser(self)) + def expandvars(self): return path(os.path.expandvars(self)) + def dirname(self): return path(os.path.dirname(self)) + basename = os.path.basename + + def expand(self): + """ Clean up a filename by calling expandvars(), + expanduser(), and normpath() on it. + + This is commonly everything needed to clean up a filename + read from a configuration file, for example. + """ + return self.expandvars().expanduser().normpath() + + def _get_namebase(self): + base, _ext = os.path.splitext(self.name) + return base + + def _get_ext(self): + _f, ext = os.path.splitext(_base(self)) + return ext + + def _get_drive(self): + drive, _r = os.path.splitdrive(self) + return path(drive) + + parent = property( + dirname, None, None, + """ This path's parent directory, as a new path object. + + For example, path('/usr/local/lib/libpython.so').parent == path('/usr/local/lib') + """) + + name = property( + basename, None, None, + """ The name of this file or directory without the full path. + + For example, path('/usr/local/lib/libpython.so').name == 'libpython.so' + """) + + namebase = property( + _get_namebase, None, None, + """ The same as path.name, but with one file extension stripped off. + + For example, path('/home/guido/python.tar.gz').name == 'python.tar.gz', + but path('/home/guido/python.tar.gz').namebase == 'python.tar' + """) + + ext = property( + _get_ext, None, None, + """ The file extension, for example '.py'. """) + + drive = property( + _get_drive, None, None, + """ The drive specifier, for example 'C:'. + This is always empty on systems that don't use drive specifiers. + """) + + def splitpath(self): + """ p.splitpath() -> Return (p.parent, p.name). """ + parent, child = os.path.split(self) + return path(parent), child + + def splitdrive(self): + """ p.splitdrive() -> Return (p.drive, ). + + Split the drive specifier from this path. If there is + no drive specifier, p.drive is empty, so the return value + is simply (path(''), p). This is always the case on Unix. + """ + drive, rel = os.path.splitdrive(self) + return path(drive), rel + + def splitext(self): + """ p.splitext() -> Return (p.stripext(), p.ext). + + Split the filename extension from this path and return + the two parts. Either part may be empty. + + The extension is everything from '.' to the end of the + last path segment. This has the property that if + (a, b) == p.splitext(), then a + b == p. + """ + filename, ext = os.path.splitext(self) + return path(filename), ext + + def stripext(self): + """ p.stripext() -> Remove one file extension from the path. + + For example, path('/home/guido/python.tar.gz').stripext() + returns path('/home/guido/python.tar'). + """ + return self.splitext()[0] + + if hasattr(os.path, 'splitunc'): + def splitunc(self): + unc, rest = os.path.splitunc(self) + return path(unc), rest + + def _get_uncshare(self): + unc, r = os.path.splitunc(self) + return path(unc) + + uncshare = property( + _get_uncshare, None, None, + """ The UNC mount point for this path. + This is empty for paths on local drives. """) + + def joinpath(self, *args): + """ Join two or more path components, adding a separator + character (os.sep) if needed. Returns a new path + object. + """ + return path(os.path.join(self, *args)) + + def splitall(self): + """ Return a list of the path components in this path. + + The first item in the list will be a path. Its value will be + either os.curdir, os.pardir, empty, or the root directory of + this path (for example, '/' or 'C:\\'). The other items in + the list will be strings. + + path.path.joinpath(*result) will yield the original path. + """ + parts = [] + loc = self + while loc != os.curdir and loc != os.pardir: + prev = loc + loc, child = prev.splitpath() + if loc == prev: + break + parts.append(child) + parts.append(loc) + parts.reverse() + return parts + + def relpath(self): + """ Return this path as a relative path, + based from the current working directory. + """ + cwd = path(os.getcwd()) + return cwd.relpathto(self) + + def relpathto(self, dest): + """ Return a relative path from self to dest. + + If there is no relative path from self to dest, for example if + they reside on different drives in Windows, then this returns + dest.abspath(). + """ + origin = self.abspath() + dest = path(dest).abspath() + + orig_list = origin.normcase().splitall() + # Don't normcase dest! We want to preserve the case. + dest_list = dest.splitall() + + if orig_list[0] != os.path.normcase(dest_list[0]): + # Can't get here from there. + return dest + + # Find the location where the two paths start to differ. + i = 0 + for start_seg, dest_seg in zip(orig_list, dest_list): + if start_seg != os.path.normcase(dest_seg): + break + i += 1 + + # Now i is the point where the two paths diverge. + # Need a certain number of "os.pardir"s to work up + # from the origin to the point of divergence. + segments = [os.pardir] * (len(orig_list) - i) + # Need to add the diverging part of dest_list. + segments += dest_list[i:] + if len(segments) == 0: + # If they happen to be identical, use os.curdir. + return path(os.curdir) + else: + return path(os.path.join(*segments)) + + + # --- Listing, searching, walking, and matching + + def listdir(self, pattern=None): + """ D.listdir() -> List of items in this directory. + + Use D.files() or D.dirs() instead if you want a listing + of just files or just subdirectories. + + The elements of the list are path objects. + + With the optional 'pattern' argument, this only lists + items whose names match the given pattern. + """ + names = os.listdir(self) + if pattern is not None: + names = fnmatch.filter(names, pattern) + return [self / child for child in names] + + def dirs(self, pattern=None): + """ D.dirs() -> List of this directory's subdirectories. + + The elements of the list are path objects. + This does not walk recursively into subdirectories + (but see path.walkdirs). + + With the optional 'pattern' argument, this only lists + directories whose names match the given pattern. For + example, d.dirs('build-*'). + """ + return [p for p in self.listdir(pattern) if p.isdir()] + + def devs(self, pattern = None): + return [p for p in self.listdir(pattern) if p.isdev()] + + def blockdevs(self, pattern = None): + return [p for p in self.listdir(pattern) if p.isblockdev()] + + def chardevs(self, pattern = None): + return [p for p in self.listdir(pattern) if p.ischardev()] + + def files(self, pattern=None): + """ D.files() -> List of the files in this directory. + + The elements of the list are path objects. + This does not walk into subdirectories (see path.walkfiles). + + With the optional 'pattern' argument, this only lists files + whose names match the given pattern. For example, + d.files('*.pyc'). + """ + + return [p for p in self.listdir(pattern) if p.isfile()] + + def walk(self, pattern=None): + """ D.walk() -> iterator over files and subdirs, recursively. + + The iterator yields path objects naming each child item of + this directory and its descendants. This requires that + D.isdir(). + + This performs a depth-first traversal of the directory tree. + Each directory is returned just before all its children. + """ + for child in self.listdir(): + if pattern is None or child.fnmatch(pattern): + yield child + if child.isdir(): + for item in child.walk(pattern): + yield item + + def walkdirs(self, pattern=None): + """ D.walkdirs() -> iterator over subdirs, recursively. + + With the optional 'pattern' argument, this yields only + directories whose names match the given pattern. For + example, mydir.walkdirs('*test') yields only directories + with names ending in 'test'. + """ + for child in self.dirs(): + if pattern is None or child.fnmatch(pattern): + yield child + for subsubdir in child.walkdirs(pattern): + yield subsubdir + + def walkfiles(self, pattern=None): + """ D.walkfiles() -> iterator over files in D, recursively. + + The optional argument, pattern, limits the results to files + with names that match the pattern. For example, + mydir.walkfiles('*.tmp') yields only files with the .tmp + extension. + """ + for child in self.listdir(): + if child.isfile(): + if pattern is None or child.fnmatch(pattern): + yield child + elif child.isdir(): + for f in child.walkfiles(pattern): + yield f + + def fnmatch(self, pattern): + """ Return True if self.name matches the given pattern. + + pattern - A filename pattern with wildcards, + for example '*.py'. + """ + return fnmatch.fnmatch(self.name, pattern) + + def glob(self, pattern): + """ Return a list of path objects that match the pattern. + + pattern - a path relative to this directory, with wildcards. + + For example, path('/users').glob('*/bin/*') returns a list + of all the files users have in their bin directories. + """ + return map(path, glob.glob(_base(self / pattern))) + + + # --- Reading or writing an entire file at once. + + def open(self, mode='r'): + """ Open this file. Return a file object. """ + return file(self, mode) + + def bytes(self): + """ Open this file, read all bytes, return them as a string. """ + f = self.open('rb') + try: + return f.read() + finally: + f.close() + + def write_bytes(self, bytes, append=False): + """ Open this file and write the given bytes to it. + + Default behavior is to overwrite any existing file. + Call this with write_bytes(bytes, append=True) to append instead. + """ + if append: + mode = 'ab' + else: + mode = 'wb' + f = self.open(mode) + try: + f.write(bytes) + finally: + f.close() + + def text(self, encoding=None, errors='strict'): + """ Open this file, read it in, return the content as a string. + + This uses 'U' mode in Python 2.3 and later, so '\r\n' and '\r' + are automatically translated to '\n'. + + Optional arguments: + + encoding - The Unicode encoding (or character set) of + the file. If present, the content of the file is + decoded and returned as a unicode object; otherwise + it is returned as an 8-bit str. + errors - How to handle Unicode errors; see help(str.decode) + for the options. Default is 'strict'. + """ + if encoding is None: + # 8-bit + f = self.open(_textmode) + try: + return f.read() + finally: + f.close() + else: + # Unicode + f = codecs.open(self, 'r', encoding, errors) + # (Note - Can't use 'U' mode here, since codecs.open + # doesn't support 'U' mode, even in Python 2.3.) + try: + t = f.read() + finally: + f.close() + return (t.replace(u'\r\n', u'\n') + .replace(u'\r\x85', u'\n') + .replace(u'\r', u'\n') + .replace(u'\x85', u'\n') + .replace(u'\u2028', u'\n')) + + def write_text(self, text, encoding=None, errors='strict', linesep=os.linesep, append=False): + """ Write the given text to this file. + + The default behavior is to overwrite any existing file; + to append instead, use the 'append=True' keyword argument. + + There are two differences between path.write_text() and + path.write_bytes(): newline handling and Unicode handling. + See below. + + Parameters: + + - text - str/unicode - The text to be written. + + - encoding - str - The Unicode encoding that will be used. + This is ignored if 'text' isn't a Unicode string. + + - errors - str - How to handle Unicode encoding errors. + Default is 'strict'. See help(unicode.encode) for the + options. This is ignored if 'text' isn't a Unicode + string. + + - linesep - keyword argument - str/unicode - The sequence of + characters to be used to mark end-of-line. The default is + os.linesep. You can also specify None; this means to + leave all newlines as they are in 'text'. + + - append - keyword argument - bool - Specifies what to do if + the file already exists (True: append to the end of it; + False: overwrite it.) The default is False. + + + --- Newline handling. + + write_text() converts all standard end-of-line sequences + ('\n', '\r', and '\r\n') to your platform's default end-of-line + sequence (see os.linesep; on Windows, for example, the + end-of-line marker is '\r\n'). + + If you don't like your platform's default, you can override it + using the 'linesep=' keyword argument. If you specifically want + write_text() to preserve the newlines as-is, use 'linesep=None'. + + This applies to Unicode text the same as to 8-bit text, except + there are three additional standard Unicode end-of-line sequences: + u'\x85', u'\r\x85', and u'\u2028'. + + (This is slightly different from when you open a file for + writing with fopen(filename, "w") in C or file(filename, 'w') + in Python.) + + + --- Unicode + + If 'text' isn't Unicode, then apart from newline handling, the + bytes are written verbatim to the file. The 'encoding' and + 'errors' arguments are not used and must be omitted. + + If 'text' is Unicode, it is first converted to bytes using the + specified 'encoding' (or the default encoding if 'encoding' + isn't specified). The 'errors' argument applies only to this + conversion. + + """ + if isinstance(text, unicode): + if linesep is not None: + # Convert all standard end-of-line sequences to + # ordinary newline characters. + text = (text.replace(u'\r\n', u'\n') + .replace(u'\r\x85', u'\n') + .replace(u'\r', u'\n') + .replace(u'\x85', u'\n') + .replace(u'\u2028', u'\n')) + text = text.replace(u'\n', linesep) + if encoding is None: + encoding = sys.getdefaultencoding() + bytes = text.encode(encoding, errors) + else: + # It is an error to specify an encoding if 'text' is + # an 8-bit string. + assert encoding is None + + if linesep is not None: + text = (text.replace('\r\n', '\n') + .replace('\r', '\n')) + bytes = text.replace('\n', linesep) + + self.write_bytes(bytes, append) + + def lines(self, encoding=None, errors='strict', retain=True): + """ Open this file, read all lines, return them in a list. + + Optional arguments: + encoding - The Unicode encoding (or character set) of + the file. The default is None, meaning the content + of the file is read as 8-bit characters and returned + as a list of (non-Unicode) str objects. + errors - How to handle Unicode errors; see help(str.decode) + for the options. Default is 'strict' + retain - If true, retain newline characters; but all newline + character combinations ('\r', '\n', '\r\n') are + translated to '\n'. If false, newline characters are + stripped off. Default is True. + + This uses 'U' mode in Python 2.3 and later. + """ + if encoding is None and retain: + f = self.open(_textmode) + try: + return f.readlines() + finally: + f.close() + else: + return self.text(encoding, errors).splitlines(retain) + + def write_lines(self, lines, encoding=None, errors='strict', + linesep=os.linesep, append=False): + """ Write the given lines of text to this file. + + By default this overwrites any existing file at this path. + + This puts a platform-specific newline sequence on every line. + See 'linesep' below. + + lines - A list of strings. + + encoding - A Unicode encoding to use. This applies only if + 'lines' contains any Unicode strings. + + errors - How to handle errors in Unicode encoding. This + also applies only to Unicode strings. + + linesep - The desired line-ending. This line-ending is + applied to every line. If a line already has any + standard line ending ('\r', '\n', '\r\n', u'\x85', + u'\r\x85', u'\u2028'), that will be stripped off and + this will be used instead. The default is os.linesep, + which is platform-dependent ('\r\n' on Windows, '\n' on + Unix, etc.) Specify None to write the lines as-is, + like file.writelines(). + + Use the keyword argument append=True to append lines to the + file. The default is to overwrite the file. Warning: + When you use this with Unicode data, if the encoding of the + existing data in the file is different from the encoding + you specify with the encoding= parameter, the result is + mixed-encoding data, which can really confuse someone trying + to read the file later. + """ + if append: + mode = 'ab' + else: + mode = 'wb' + f = self.open(mode) + try: + for line in lines: + isUnicode = isinstance(line, unicode) + if linesep is not None: + # Strip off any existing line-end and add the + # specified linesep string. + if isUnicode: + if line[-2:] in (u'\r\n', u'\x0d\x85'): + line = line[:-2] + elif line[-1:] in (u'\r', u'\n', + u'\x85', u'\u2028'): + line = line[:-1] + else: + if line[-2:] == '\r\n': + line = line[:-2] + elif line[-1:] in ('\r', '\n'): + line = line[:-1] + line += linesep + if isUnicode: + if encoding is None: + encoding = sys.getdefaultencoding() + line = line.encode(encoding, errors) + f.write(line) + finally: + f.close() + + + # --- Methods for querying the filesystem. + + exists = os.path.exists + isabs = os.path.isabs + isdir = os.path.isdir + isfile = os.path.isfile + islink = os.path.islink + ismount = os.path.ismount + + if hasattr(os.path, 'samefile'): + samefile = os.path.samefile + + getatime = os.path.getatime + atime = property( + getatime, None, None, + """ Last access time of the file. """) + + getmtime = os.path.getmtime + mtime = property( + getmtime, None, None, + """ Last-modified time of the file. """) + + if hasattr(os.path, 'getctime'): + getctime = os.path.getctime + ctime = property( + getctime, None, None, + """ Creation time of the file. """) + + getsize = os.path.getsize + size = property( + getsize, None, None, + """ Size of the file, in bytes. """) + + def isdev(self): + from stat import S_ISBLK, S_ISCHR + mode = self.__st_mode() + return S_ISBLK(mode) or S_ISCHR(mode) + + def __st_mode(self): + try: + return self.stat().st_mode + except OSError as e: + if e.errno != 2: + raise + return 0 + + def ischardev(self): + from stat import S_ISCHR + return S_ISCHR(self.__st_mode()) + + def isblockdev(self): + from stat import S_ISBLK + return S_ISBLK(self.__st_mode()) + + if hasattr(os, 'access'): + def access(self, mode): + """ Return true if current user has access to this path. + + mode - One of the constants os.F_OK, os.R_OK, os.W_OK, os.X_OK + """ + return os.access(self, mode) + + def stat(self): + """ Perform a stat() system call on this path. """ + return os.stat(self) + + def lstat(self): + """ Like path.stat(), but do not follow symbolic links. """ + return os.lstat(self) + + if hasattr(os, 'statvfs'): + def statvfs(self): + """ Perform a statvfs() system call on this path. """ + return os.statvfs(self) + + if hasattr(os, 'pathconf'): + def pathconf(self, name): + return os.pathconf(self, name) + + + # --- Modifying operations on files and directories + + def utime(self, times): + """ Set the access and modified times of this file. """ + os.utime(self, times) + + def chmod(self, mode): + os.chmod(self, mode) + + if hasattr(os, 'chown'): + def chown(self, uid, gid): + os.chown(self, uid, gid) + + def rename(self, new): + os.rename(self, new) + + def renames(self, new): + os.renames(self, new) + # --- Create/delete operations on directories + + def mkdir(self, mode=0750): + os.mkdir(self, mode) + + def makedirs(self, mode=0750): + os.makedirs(self, mode) + + def rmdir(self): + os.rmdir(self) + + def removedirs(self): + os.removedirs(self) + + + # --- Modifying operations on files + + def touch(self, mode = 0640): + """ Set the access/modified times of this file to the current time. + Create the file if it does not exist. + """ + fd = os.open(self, os.O_WRONLY | os.O_CREAT, mode) + os.close(fd) + os.utime(self, None) + + def remove(self): + os.remove(self) + + def unlink(self): + os.unlink(self) + + + # --- Links + + if hasattr(os, 'link'): + def link(self, newpath): + """ Create a hard link at 'newpath', pointing to this file. """ + os.link(self, newpath) + + if hasattr(os, 'symlink'): + def symlink(self, newlink): + """ Create a symbolic link at 'newlink', pointing here. """ + os.symlink(self, newlink) + + if hasattr(os, 'readlink'): + def readlink(self): + """ Return the path to which this symbolic link points. + + The result may be an absolute or a relative path. + """ + return path(os.readlink(self)) + + def readlinkabs(self): + """ Return the path to which this symbolic link points. + + The result is always an absolute path. + """ + p = self.readlink() + if p.isabs(): + return p + else: + return (self.parent / p).abspath() + + def checkdir(self): + if not self.isdir(): + raise Exception("Not a directory: '%s'" % (self, )) + + def checkfile(self): + if not self.isfile(): + raise Exception("Not a file: '%s'" % (self, )) + + def forcedir(self, mode = 0750): + if not self.isdir(): + if self.exists(): + raise Exception("Not a directory: '%s'" % (self, )) + self.makedirs(mode) + + def forcefile(self, mode = 0640): + if not self.exists(): + return self.touch(mode = 0640) + if not self.isfile(): + raise Exception("Not a file: %s" % (self ,)) + + # --- High-level functions from shutil + + copyfile = shutil.copyfile + copymode = shutil.copymode + copystat = shutil.copystat + copy = shutil.copy + copy2 = shutil.copy2 + copytree = shutil.copytree + if hasattr(shutil, 'move'): + move = shutil.move + + def rmtree(self): + if self.isdir(): + return shutil.rmtree(self) + self.unlink() + + quote = quote + + # --- Special stuff from os + + if hasattr(os, 'chroot'): + def chroot(self): + os.chroot(self) + + if hasattr(os, 'startfile'): + startfile = os.startfile + +Path = path \ No newline at end of file diff --git a/build/lib/futile/profile/__init__.py b/build/lib/futile/profile/__init__.py new file mode 100644 index 0000000..43adfb8 --- /dev/null +++ b/build/lib/futile/profile/__init__.py @@ -0,0 +1,16 @@ +from time import time + +def timeit(f): + def _timeit(*args, **kw): + _timeit.__runs__ += 1 + start = time() + try: + return f(*args, **kw) + finally: + spent = _timeit.__last_time__ = time() - start + _timeit.__total_time__ += spent + _timeit.__runs__ = 0 + _timeit.__total_time__ = 0.0 + _timeit.__last_time__ = None + _timeit.__name__ = f.__name__ + return _timeit \ No newline at end of file diff --git a/build/lib/futile/serializer/__init__.py b/build/lib/futile/serializer/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/build/lib/futile/serializer/exc.py b/build/lib/futile/serializer/exc.py new file mode 100644 index 0000000..929535b --- /dev/null +++ b/build/lib/futile/serializer/exc.py @@ -0,0 +1,8 @@ +''' +Created on 24.09.2011 + +@author: kca +''' + +class ParseError(Exception): + pass \ No newline at end of file diff --git a/build/lib/futile/serializer/xml.py b/build/lib/futile/serializer/xml.py new file mode 100644 index 0000000..f52d672 --- /dev/null +++ b/build/lib/futile/serializer/xml.py @@ -0,0 +1,51 @@ +''' +Created on 28.08.2011 + +@author: kca +''' + +from ..logging import LoggerMixin +from logging import DEBUG +from ..etree.impl import ElementTree, XML, ParseError as XMLParseError, XMLSyntaxError, tostring +from abc import ABCMeta, abstractmethod +from futile.serializer.exc import ParseError + +class AbstractXMLSerializer(LoggerMixin): + __metaclass__ = ABCMeta + + def load(self, input): + if self.logger.isEnabledFor(DEBUG): + from cStringIO import StringIO + input = input.read() + self.logger.debug("Parsing input: %s", input) + input = StringIO(input) + root = self._load(input) + return self._parse_input(root) + + def _load(self, input): + try: + if isinstance(input, str): + return XML(input) + else: + return ElementTree().parse(input) + except Exception, e: + self._handle_parse_error(e) + raise ParseError(e) + + def _handle_parse_error(self, e): + self.logger.exception("Error parsing input: %s", e) + + @abstractmethod + def _parse_input(self, root): + raise NotImplementedError() + + def dump(self, o, pretty_print = True): + raise NotImplementedError() + + def dumps(self, o, pretty_print = True): + xml = self._dump_object(o) + return tostring(xml, pretty_print = pretty_print) + + @abstractmethod + def _dump_object(self, o): + raise NotImplementedError() diff --git a/build/lib/futile/signal/__init__.py b/build/lib/futile/signal/__init__.py new file mode 100644 index 0000000..a81e7e8 --- /dev/null +++ b/build/lib/futile/signal/__init__.py @@ -0,0 +1 @@ +from timeout import timeout, Timeout \ No newline at end of file diff --git a/build/lib/futile/signal/timeout.py b/build/lib/futile/signal/timeout.py new file mode 100644 index 0000000..40015ce --- /dev/null +++ b/build/lib/futile/signal/timeout.py @@ -0,0 +1,29 @@ +''' +Created on 20.05.2011 + +@author: kca +''' + +from signal import signal, SIGALRM, alarm +from contextlib import contextmanager +from futile import noop + + +@contextmanager +def timeout(seconds): + if not seconds: + yield + return + + original_handler = signal(SIGALRM, noop) + + try: + alarm(seconds) + yield + finally: + alarm(0) + signal(SIGALRM, original_handler) + + +def Timeout(seconds): + return lambda: timeout(seconds) diff --git a/build/lib/futile/singleton.py b/build/lib/futile/singleton.py new file mode 100644 index 0000000..e171320 --- /dev/null +++ b/build/lib/futile/singleton.py @@ -0,0 +1,30 @@ +''' +Created on 23.07.2011 + +@author: kca +''' +from futile import Base +from futile.logging import LoggerMixin + +class SingletonType(type, LoggerMixin): + __instances = {} + + def get_instance(self): + try: + i = self.__instances[self] + self.logger.debug("Reusing singleton instance for %s.%s" % (self.__module__, self.__name__)) + except KeyError: + self.logger.debug("Creating singleton instance for %s.%s" % (self.__module__, self.__name__)) + i = super(SingletonType, self).__call__() + self.__instances[self] = i + return i + +class ForcedSingletonType(SingletonType): + def __call__(self, *args, **kw): + return self.get_instance() + +class Singleton(Base): + __metaclass__ = SingletonType + +class ForcedSingleton(Base): + __metaclass__ = ForcedSingletonType \ No newline at end of file diff --git a/build/lib/futile/string/__init__.py b/build/lib/futile/string/__init__.py new file mode 100644 index 0000000..bb3f869 --- /dev/null +++ b/build/lib/futile/string/__init__.py @@ -0,0 +1,23 @@ +import string + +letters_digits_underscore = string.letters + string.digits + "_" + + +class InvalidIdentifier(ValueError): + pass + + +def is_identifier(s): + if not s or s[0] not in string.letters: + return False + + for c in s: + if c not in letters_digits_underscore: + return False + + return True + + +def check_identifier(s): + if not is_identifier(s): + raise InvalidIdentifier(s) diff --git a/build/lib/futile/subprocess/__init__.py b/build/lib/futile/subprocess/__init__.py new file mode 100644 index 0000000..cbf830a --- /dev/null +++ b/build/lib/futile/subprocess/__init__.py @@ -0,0 +1,46 @@ +''' +Created on 17.07.2011 + +@author: kca +''' + +import logging, sys +from futile.logging import get_logger +from subprocess import check_output as _check_output, check_call as _check_call, CalledProcessError, STDOUT, Popen + +try: + from subprocces import SubprocessError, TimeoutExpired +except ImportError: + class SubprocessError(Exception): + pass + + class TimeoutExpired(SubprocessError): + pass + +def _pre_call(args): + #needed for chroot safety + import encodings.string_escape + + cmd = ' '.join(args) + get_logger().debug("running %s" % (cmd, )) + return cmd + + +def check_output(args, stdin=None, stderr=STDOUT, shell=False, cwd=None, env=None, *popenargs, **popenkw): + cmd = _pre_call(args) + + try: + return _check_output(args, stdin=stdin, stderr=stderr, shell=shell, cwd=cwd, env=env, *popenargs, **popenkw) + except CalledProcessError as e: + get_logger().debug("Command %s returned exit code %s. This is the programs output:\n%s<>" % (cmd, e.returncode, e.output)) + raise + +def check_call(args, stdin=None, stdout=None, stderr=None, shell=False, cwd=None, env=None, *popenargs, **popenkw): + cmd = _pre_call(args) + + try: + return _check_call(args, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd, env=env, *popenargs, **popenkw) + except CalledProcessError as e: + get_logger().debug("Command %s returned exit code %s." % (cmd, e.returncode)) + raise + diff --git a/build/lib/futile/subprocess/daemon.py b/build/lib/futile/subprocess/daemon.py new file mode 100644 index 0000000..8d6eab3 --- /dev/null +++ b/build/lib/futile/subprocess/daemon.py @@ -0,0 +1,165 @@ +''' +Created on 02.02.2012 + +@author: kca +''' + +from time import sleep +from abc import ABCMeta, abstractproperty, abstractmethod +from futile import Base +from futile.path import Path +from . import check_call, STDOUT + +class DaemonController(Base): + __metaclass__ = ABCMeta + + def __init__(self, sleep = 5, stop_sleep = 3, *args, **kw): + super(DaemonController, self).__init__(*args, **kw) + self.__sleep = int(sleep) + self.__stop_sleep = int(stop_sleep) + + @abstractproperty + def is_running(self): + raise NotImplementedError() + + def start(self): + self._start() + sleep(self.__sleep) + + @abstractmethod + def _start(self): + raise NotImplementedError() + + def stop(self): + self._stop() + sleep(self.__stop_sleep) + + @abstractmethod + def _stop(self): + raise NotImplementedError() + + def __enter__(self): + self.start() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.stop() + +class DummyController(DaemonController): + def __init__(self, sleep = 0, stop_sleep = 0, *args, **kw): + super(DummyController).__init__(sleep = sleep, stop_sleep = stop_sleep, *args, **kw) + + def _start(self): + pass + _stop = _start + + @property + def is_running(self): + return False + +import os +import errno + +class CheckPIDFileController(DaemonController): + def __init__(self, pidfile, *args, **kw): + super(CheckPIDFileController, self).__init__(*args, **kw) + self.__pidfile = Path(pidfile) + + @property + def pidfile(self): + return self.__pidfile + + @property + def is_running(self): + if not self.pidfile.exists(): + return False + + if not self.pidfile.isfile(): + raise Exception("pidfile '%s' is not a file" % (self.pidfile, )) + + try: + pid = int(self.__pidfile.open().readline(16)) + except: + self.logger.exception("Error reading pidfile %s" % (self.pidfile)) + raise + + try: + os.kill(pid, 0) + return True + except OSError, e: + if e.errno == errno.ESRCH: + return False + raise + +class StartStopDaemonController(CheckPIDFileController): + def __init__(self, executable, fork = False, workingdir = None, pidfile = None, makepidfile = False, daemonargs = None, ssd = "/sbin/start-stop-daemon", ldpath = None, outfile = "/dev/null", *args, **kw): + if not pidfile: + pidfile = "/tmp/" + executable.replace("/", "_") + ".pid" + super(StartStopDaemonController, self).__init__(pidfile = pidfile, *args, **kw) + + self.__executable = unicode(executable) + self.__workingdir = workingdir and unicode(workingdir) or None + + if ldpath is not None: + if not isinstance(ldpath, (list, set, tuple, frozenset)): + ldpath = [ ldpath ] + ldpath = tuple(set(ldpath)) + self.__ldpath = ldpath + + self.__makepidfile = makepidfile + self.__daemonargs = daemonargs + self.__fork = fork + self.__ssd = ssd + self.__outfile = outfile + + def get_daemonargs(self): + return self.__daemonargs + def set_daemonargs(self, da): + self.__daemonargs = da + daemonargs = property(get_daemonargs, set_daemonargs) + + def __make_cmd(self, cmd, test): + cmd = [ self.__ssd, cmd, '-x', self.__executable, '-p', self.pidfile, '-o' ] + + if self.__workingdir: + cmd += [ '-d', self.__workingdir ] + + if test: + cmd.append('-t') + + env = None + if self.__ldpath: + env = dict(LD_LIBRARY_PATH = ':'.join(self.__ldpath)) + + return cmd, env + + def __check_cmd(self, cmd, env): + self.logger.debug("ssd env: " + str(env)) + + outfile = self.__outfile + if outfile: + outfile = Path(outfile).open("a") + + try: + check_call(cmd, stdout = outfile, stderr = STDOUT, close_fds = True, cwd = self.__workingdir, env = env) + finally: + if outfile is not None: + outfile.close() + + def _start(self): + cmd, env = self.__make_cmd("-S", False) + if self.__makepidfile: + cmd.append('-m') + + if self.__fork: + cmd.append('-b') + + if self.__daemonargs: + cmd += [ '--' ] + list(self.__daemonargs) + + self.__check_cmd(cmd, env) + + def _stop(self): + cmd, env = self.__make_cmd("-K", False) + self.__check_cmd(cmd, env) + diff --git a/build/lib/futile/tempfile/__init__.py b/build/lib/futile/tempfile/__init__.py new file mode 100644 index 0000000..3047f4b --- /dev/null +++ b/build/lib/futile/tempfile/__init__.py @@ -0,0 +1,38 @@ +from tempfile import mkdtemp as _mkdtemp +from shutil import rmtree +from .. import Base +from futile import noop + +class TempDir(Base): + delete_on_error = delete = True + + def __init__(self, suffix='', prefix='tmp', dir=None, delete = None, delete_on_error = None, *args, **kw): + super(TempDir, self).__init__(*args, **kw) + self.__name = _mkdtemp(suffix, prefix, dir) + if delete is not None: + self.delete = delete + if delete_on_error is not None: + self.delete_on_error = delete_on_error + + @property + def name(self): + return self.__name + + def rmtree(self): + rmtree(self.__name) + self.rmtree = noop + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.delete or (exc_type and self.delete_on_error): + self.rmtree() + + def __del__(self): + self.__exit__(None, None, None) + + def __str__(self): + return self.__name + +mkdtemp = TempDir diff --git a/build/lib/futile/threading/RWLock.py b/build/lib/futile/threading/RWLock.py new file mode 100644 index 0000000..ecd483b --- /dev/null +++ b/build/lib/futile/threading/RWLock.py @@ -0,0 +1,106 @@ +#! /usr/bin/env python +''' +Created on 01.04.2011 + +@author: kca +''' +#TODO: proper timeout handling +from __future__ import with_statement + +from threading import Lock, Event +from contextlib import contextmanager + +class Timeout(Exception): + pass + +class ReverseSemaphore(object): + def __init__(self, *args, **kw): + super(ReverseSemaphore, self).__init__(*args, **kw) + + self.counter = 0 + self.lock = Lock() + self.event = Event() + self.event.set() + pass + + def acquire(self): + with self.lock: + self.counter += 1 + self.event.clear() + pass + pass + + def release(self): + with self.lock: + self.counter -= 1 + if self.counter == 0: + self.event.set() + if self.counter < 0: + self.counter = 0 + pass + pass + pass + + def wait(self): + return self.event.wait() + pass + + def __enter__(self): + self.acquire() + pass + + def __exit__ (self, type, value, tb): + self.release() + pass + pass + + +class RWLock(object): + def __init__(self, *args, **kw): + super(RWLock, self).__init__(*args, **kw) + + self.write_lock = Lock() + self.read_lock = ReverseSemaphore() + self.write_event = Event() + self.write_event.set() + + @contextmanager + def read_transaction(self, timeout = None): + self.read_acquire(timeout = timeout) + try: + yield + finally: + self.read_release() + pass + pass + + @contextmanager + def write_transaction(self, timeout = None): + self.write_acquire(timeout = timeout) + try: + yield + finally: + self.write_release() + pass + pass + + def read_acquire(self, timeout = None): + self.write_event.wait(timeout = timeout) + if not self.write_event.is_set(): + raise Timeout() + self.read_lock.acquire() + return True + + def read_release(self): + self.read_lock.release() + pass + + def write_acquire(self, timeout = None): + self.write_lock.acquire() + self.write_event.clear() + self.read_lock.wait() + pass + + def write_release(self): + self.write_event.set() + self.write_lock.release() diff --git a/build/lib/futile/threading/__init__.py b/build/lib/futile/threading/__init__.py new file mode 100644 index 0000000..3576376 --- /dev/null +++ b/build/lib/futile/threading/__init__.py @@ -0,0 +1,18 @@ +import sys + +try: + from threading import current_thread +except ImportError: + from threading import currentThread as current_thread + + +if sys.version_info < (2, 7): + from threading import _Event + class Event(_Event): + def wait(self, timeout = None): + super(_Event, self).wait(timeout = timeout) + return self.is_set() +else: + from threading import Event + + \ No newline at end of file diff --git a/build/lib/futile/threading/synchronized.py b/build/lib/futile/threading/synchronized.py new file mode 100644 index 0000000..a6b1cf4 --- /dev/null +++ b/build/lib/futile/threading/synchronized.py @@ -0,0 +1,28 @@ +''' +Created on 08.08.2011 + +@author: kca +''' + +from threading import Condition + +def synchronized(f): + done = Condition() + f.in_progress = False + + def sync(*args, **kw): + done.acquire() + if not f.in_progress: + f.in_progress = True + done.release() + try: + return f(*args, **kw) + finally: + f.in_progress = False + with done: + done.notify_all() + else: + done.wait() + assert(not f.in_progress) + done.release() + return sync diff --git a/build/lib/futile/traceback/__init__.py b/build/lib/futile/traceback/__init__.py new file mode 100644 index 0000000..7dccf20 --- /dev/null +++ b/build/lib/futile/traceback/__init__.py @@ -0,0 +1,19 @@ +import sys +from traceback import format_exception + +def get_traceback(self, exc_info=None): + return ''.join(format_exception(*(exc_info or sys.exc_info()))) + + +def current_stack(skip=0): + try: + 1 / 0 + except ZeroDivisionError: + f = sys.exc_info()[2].tb_frame + for _ in xrange(skip + 2): + f = f.f_back + lst = [] + while f is not None: + lst.append((f, f.f_lineno)) + f = f.f_back + return lst diff --git a/build/lib/futile/types/TypeManager.py b/build/lib/futile/types/TypeManager.py new file mode 100644 index 0000000..9b08d2a --- /dev/null +++ b/build/lib/futile/types/TypeManager.py @@ -0,0 +1,9 @@ +''' +Created on 01.09.2011 + +@author: kca +''' +from futile.types import AbstractTypeManager + +class TypeManager(AbstractTypeManager): + pass \ No newline at end of file diff --git a/build/lib/futile/types/__init__.py b/build/lib/futile/types/__init__.py new file mode 100644 index 0000000..fa863be --- /dev/null +++ b/build/lib/futile/types/__init__.py @@ -0,0 +1,52 @@ +''' +Created on 01.09.2011 + +@author: kca +''' + +import sys +from types import ModuleType + +from futile.collections import get_iterable +from ..logging import LoggerMixin + + +class ImmutableType(type): + def __call__(self, *args, **kw): + if args and isinstance(args[0], self): + return args[0] + return super(ImmutableType, self).__call__(*args, **kw) + +class TypeManagerType(LoggerMixin, type): + def __init__(self, *args, **kw): + super(TypeManagerType, self).__init__(*args, **kw) + modname = self.__module__ + "." + self.__name__ + if self.__module__ != __name__: + sys.modules[modname] = self + self.__module_name__ = modname + + +class AbstractTypeManager(LoggerMixin, ModuleType): + __metaclass__ = TypeManagerType + + def __init__(self, name = None, *args, **kw): + name = name or str(id(name)) + self.modulename = self.__module_name__ + "." + getattr(self, "__prefix__", self.__class__.__name__) + name + sys.modules[self.modulename] = self + + def create_type(self, name, base = (), dict = {}, metaclass = type): + try: + existing = getattr(self, name) + if not isinstance(existing, type): + raise ValueError(name) + return existing + except AttributeError: + pass + + base = get_iterable(base) + self.logger.debug("Creating %s %s(%s) with %s", metaclass.__name__, + name, base, dict) + dict["__module__"] = self.modulename + type = metaclass(name, base, dict) + setattr(self, name, type) + return type diff --git a/build/lib/openmtc/__init__.py b/build/lib/openmtc/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/build/lib/openmtc/configuration.py b/build/lib/openmtc/configuration.py new file mode 100644 index 0000000..d2a768f --- /dev/null +++ b/build/lib/openmtc/configuration.py @@ -0,0 +1,178 @@ +import logging +from abc import ABCMeta, abstractmethod + +from enum import Enum + +from futile import NOT_SET, identity +from futile.logging import LoggerMixin +from openmtc.exc import OpenMTCError + + +class ConfigurationError(OpenMTCError): + pass + + +class ConfigurationKeyError(KeyError, ConfigurationError): + pass + + +class ConfigurationAttributeError(AttributeError, ConfigurationError): + pass + + +class ConfigurationValueError(ValueError, ConfigurationError): + pass + + +class ExtraOptionsStrategy(Enum): + ignore = "ignore" + warn = "warn" + prune = "prune" + fatal = "fatal" + + +class ConfigurationOption(LoggerMixin): + __metaclass__ = ABCMeta + + def __init__(self, type, default=NOT_SET, converter=identity, + *args, **kw): + super(ConfigurationOption, self).__init__(*args, **kw) + self.type = type + self.default = default + self.converter = converter + + def convert(self, v): + if v is None: + if self.default is not NOT_SET: + return self.default + raise ConfigurationValueError("Value must not be None") + + v = self._convert(v) + return self.converter(v) + + @abstractmethod + def _convert(self, v): + return v + + +class SimpleOption(ConfigurationOption): + def __init__(self, type=str, default=NOT_SET, converter=identity, + *args, **kw): + super(SimpleOption, self).__init__(type=type, default=default, + converter=converter) + + def _convert(self, v): + if isinstance(v, self.type): + return v + return self.type(v) + + +class ListOption(SimpleOption): + def __init__(self, content_type, type=list, default=NOT_SET, + converter=identity, *args, **kw): + super(ListOption, self).__init__(type=type, default=default, + converter=converter) + self.content_type = content_type + + def _convert(self, v): + v = super(ListOption, self)._convert(v) + return map(self._convert_content, v) + + def _convert_content(self, v): + if not isinstance(v, self.content_type): + v = self.content_type(v) + return v + + +class BooleanOption(ConfigurationOption): + def __init__(self, default=NOT_SET, converter=identity, *args, **kw): + super(BooleanOption, self).__init__(type=bool, default=default, + converter=converter) + + def _convert(self, v): + if isinstance(v, (bool, int)): + return bool(v) + if isinstance(v, basestring): + return v and v.lower() not in ("0", "no", "n", "f", "false") + raise ConfigurationValueError("Illegal value for boolean: %s" % (v, )) + + +class EnumOption(SimpleOption): + def _convert(self, v): + try: + return super(EnumOption, self)._convert(v) + except Exception as exc: + try: + return getattr(self.type, v) + except: + raise exc + + +class LowerCaseEnumOption(EnumOption): + def _convert(self, v): + try: + return super(LowerCaseEnumOption, self)._convert(v) + except Exception as exc: + try: + return getattr(self.type, v.lower()) + except: + raise exc + + +class Configuration(dict): + __options__ = {} + __name__ = "configuration" + __extra_options_strategy__ = ExtraOptionsStrategy.ignore + + def __init__(self, *args, **kw): + config = dict(*args, **kw) + options = self.__options__.copy() + + for k, v in config.copy().items(): + try: + option = options.pop(k) + except KeyError: + strategy = self.__extra_options_strategy__ + if strategy == ExtraOptionsStrategy.fatal: + raise ConfigurationError("Unknown configuration key in %s:" + " %s" % (self.__name__, k)) + if strategy == ExtraOptionsStrategy.prune: + del config[k] + elif strategy == ExtraOptionsStrategy.warn: + self.logger.warn("Unknown configuration key in %s: %s", + self.__name__, k) + else: + config[k] = option.convert(v) + + for k, v in options.items(): + if v.default is NOT_SET: + raise ConfigurationKeyError("Missing configuration key in" + " %s: %s" % + (self.__name__, k, )) + config[k] = v.default + + super(Configuration, self).__init__(config) + + def __getitem__(self, k): + try: + return dict.__getitem__(self, k) + except KeyError: + raise ConfigurationKeyError("Missing configuration key in" + " %s: %s" % + (self.__name__, k, )) + + def __getattr__(self, k, default=NOT_SET): + try: + return self[k] + except ConfigurationKeyError as exc: + if default is not NOT_SET: + return default + raise ConfigurationAttributeError(str(exc)) + + +class LogLevel(Enum): + trace = logging.DEBUG + debug = logging.DEBUG + warning = logging.WARNING + error = logging.ERROR + fatal = logging.FATAL diff --git a/build/lib/openmtc/exc.py b/build/lib/openmtc/exc.py new file mode 100644 index 0000000..7c060bc --- /dev/null +++ b/build/lib/openmtc/exc.py @@ -0,0 +1,13 @@ +from futile.net.exc import NetworkError + + +class OpenMTCError(Exception): + pass + + +class OpenMTCNetworkError(OpenMTCError, NetworkError): + pass + + +class ConnectionFailed(OpenMTCNetworkError): + pass diff --git a/build/lib/openmtc/mapper/__init__.py b/build/lib/openmtc/mapper/__init__.py new file mode 100644 index 0000000..e370ed7 --- /dev/null +++ b/build/lib/openmtc/mapper/__init__.py @@ -0,0 +1,97 @@ +from futile.logging import LoggerMixin +from futile import ObjectProxy +from openmtc.model import Collection +from openmtc.mapper.exc import MapperError + + +class MemberProxy(ObjectProxy): + def __get__(self, instance, owner=None): + if instance is None: + return self._o + + if not instance._synced: + if not _is_attached(instance) or self.name not in instance._changes: + instance._mapper._init_resource(instance) + return self._o.__get__(instance, owner) + + def __set__(self, instance, value): + if _is_attached(instance): + instance._changes.add(self._o.name) + return self._o.__set__(instance, value) + + +class MapperCollection(Collection): + def __init__(self, name, type, parent, collection=(), *args, **kw): + super(MapperCollection, self).__init__(name=name, type=type, + parent=parent, + collection=collection, *args, + **kw) + + def _handle_newitem(self, item): + if _is_attached(item) or item.path is not None: + raise NotImplementedError() + super(MapperCollection, self)._handle_newitem(item) + self._changes.added.add(item) + if _is_attached(self.parent): + self.parent._changes.collection_changes.add(self.name) + if self.parent.parent is not None: + self.parent.parent._changes.subresource_changes.add( + self.parent.name) + + +class BasicMapper(LoggerMixin): + def __init__(self, *args, **kw): + super(BasicMapper, self).__init__(*args, **kw) + # self._patch_model() + self._send_request = lambda x: x + + def create(self, path, instance): + raise NotImplementedError() + + def update(self, instance, fields): + raise NotImplementedError() + + def _do_update(self, instance, fields): + raise NotImplementedError() + + def get(self, path): + raise NotImplementedError() + + def delete(self, instance): + raise NotImplementedError() + + def _get_data(self, path): + raise NotImplementedError() + + def _map(self, path, typename, data): + raise NotImplementedError() + + def _init_resource(self, res): + return self._fill_resource(res, self._get_data(res.path)[1]) + + def _make_subresource(self, type, path, parent): + subresource = type(path=path, parent=parent) + subresource._synced = False + # return self._attach_instance(subresource) + return subresource + + def _fill_resource(self, res, data): + raise NotImplementedError() + + @classmethod + def _patch_model(cls): + import openmtc.model as model + + model.Resource._synced = True + model.Resource._mapper = None + + for t in model.get_types(): + if "_initialized" not in t.__dict__: + setattr(t, "_initialized", True) + for a in t.__members__: + # TODO: deal with name differences + setattr(t, a.name, MemberProxy(a)) + for a in t.collections: + if a.type is not Collection: + raise NotImplementedError() + a.type = MapperCollection diff --git a/build/lib/openmtc/mapper/exc.py b/build/lib/openmtc/mapper/exc.py new file mode 100644 index 0000000..5d5510a --- /dev/null +++ b/build/lib/openmtc/mapper/exc.py @@ -0,0 +1,11 @@ +""" +Created on 02.06.2013 + +@author: kca +""" + +from openmtc.exc import OpenMTCError + + +class MapperError(OpenMTCError): + pass diff --git a/build/lib/openmtc/model/__init__.py b/build/lib/openmtc/model/__init__.py new file mode 100644 index 0000000..0a55181 --- /dev/null +++ b/build/lib/openmtc/model/__init__.py @@ -0,0 +1,706 @@ +from abc import ABCMeta +from collections import Sequence, OrderedDict, Mapping +from datetime import datetime +from enum import Enum +from iso8601 import parse_date, ParseError +from operator import attrgetter + +from futile import basestring, issubclass, NOT_SET +from futile.logging import LoggerMixin +from openmtc.model.exc import ModelError, ModelTypeError + + +class StrEnum(str, Enum): + pass + + +class Collection(Sequence, Mapping): + def __init__(self, name, type, parent, collection=(), *args, **kw): + super(Collection, self).__init__(*args, **kw) + self._map = OrderedDict() + self.type = type + self.parent = parent + self.name = name + for c in collection: + self.append(c) + + def __getitem__(self, index): + if isinstance(index, (int, slice)): + return self._map.values()[index] + return self._map[index] + + def __contains__(self, v): + return v in self._map or v in self._map.values() + + def append(self, v): + if not isinstance(v, self.type): + raise ModelTypeError(v) + + self._handle_newitem(v) + + assert v.name is not None, "name is None: %s %s" % (v, v.path) + self._map[v.name] = v + + add = append + + def get(self, k, default=None): + return self._map.get(k, default) + + def __iter__(self): + return self._map.itervalues() + + def __len__(self): + return len(self._map) + + def __delitem__(self, index): + if isinstance(index, int): + instance = self[index] + index = instance.name + del self._map[index] + + discard = __delitem__ + + def _handle_newitem(self, item): + if item.parent and item.parent is not self.parent: + # TODO ! + return + # raise NotImplementedError() + item.parent = self.parent + + def __str__(self): + try: + return "openmtc.Collection(%s, %s)" % ( + self.name, self._map) + except AttributeError: + return "openmtc.Collection(%s)" % (self.__len__()) + + +class Member(LoggerMixin): + def __init__(self, type=unicode, version="1.0", *args, **kw): + super(Member, self).__init__(*args, **kw) + self.type = type + self.version = version + + def _init(self, name): + self.name = name + + def __set__(self, instance, value): + if value is not None and not isinstance(value, self.type): + value = self.convert(value, instance) + self.set_value(instance, value) + + def set_value(self, instance, value): + setattr(instance, "_" + self.name, value) + + def convert(self, value, instance): + try: + return self.type(value) + except (TypeError, ValueError): + raise ModelTypeError("Illegal value for %s (%s): %r" % + (self.name, self.type, value)) + + def __repr__(self): + return '%s(name="%s", type=%s)' % (type(self).__name__, self.name, + self.type.__name__) + + +class Attribute(Member): + RW = "RW" + RO = "RO" + WO = "WO" + + def __init__(self, type=unicode, default=None, + accesstype=None, mandatory=None, + update_mandatory=None, + id_attribute=None, path_attribute=None, + id_immutable=None, *args, **kw): + super(Attribute, self).__init__(type=type, *args, **kw) + + if path_attribute and id_attribute: + raise ModelError("Attribute can't be id_attribute and " + "path_attribute at the same time") + + self.default = default + self.id_attribute = id_attribute + self.path_attribute = path_attribute + self.id_immutable = id_immutable + + if accesstype is None: + if path_attribute: + accesstype = self.RO + elif id_attribute: + accesstype = self.WO + else: + accesstype = self.RW + self.accesstype = accesstype + + if mandatory is None: + if accesstype == self.WO: + mandatory = True + else: + mandatory = False + self.mandatory = mandatory + + if update_mandatory is None: + if accesstype == self.RW: + update_mandatory = mandatory + else: + update_mandatory = False + self.update_mandatory = update_mandatory + + def __get__(self, instance, owner=None): + if instance is None: + return self + try: + return getattr(instance, "_" + self.name) + except AttributeError: + return self.default + + +try: + unicode + + class UnicodeAttribute(Attribute): + def __init__(self, default=None, accesstype=None, + mandatory=False, *args, **kw): + super(UnicodeAttribute, self).__init__(type=unicode, + default=default, + accesstype=accesstype, + mandatory=mandatory, *args, + **kw) + + def convert(self, value, instance): + if isinstance(value, str): + return value.decode("utf-8") + return super(UnicodeAttribute, self).convert(value, instance) +except NameError: + UnicodeAttribute = Attribute + + +class DatetimeAttribute(Attribute): + def __init__(self, default=None, accesstype=None, + mandatory=False, *args, **kw): + super(DatetimeAttribute, self).__init__(type=datetime, + default=default, + accesstype=accesstype, + mandatory=mandatory, *args, + **kw) + + def convert(self, value, instance): + if isinstance(value, basestring): + try: + return parse_date(value) + except ParseError as e: + raise ValueError(str(e)) + return super(DatetimeAttribute, self).convert(value, instance) + + +class ListAttribute(Attribute): + def __init__(self, content_type=unicode, type=list, + default=NOT_SET, *args, **kw): + super(ListAttribute, self).__init__(type=type, + default=default, *args, **kw) + self.content_type = content_type + + def __get__(self, instance, owner=None): + if instance is None: + return self + + key = "_" + self.name + try: + return getattr(instance, key) + except AttributeError: + if self.default is NOT_SET: + subresource = self.type() + else: + subresource = self.default + setattr(instance, key, subresource) + return subresource + + def _convert_mapping(self, value, instance): + self.logger.debug("Creating %s from %s", self.content_type, value) + return self.content_type(**value) + + def convert_content(self, value, instance): + if isinstance(value, self.content_type): + return value + if issubclass(self.content_type, Entity): + if isinstance(value, Mapping): + return self._convert_mapping(value, instance) + raise ValueError("Illegal value for sequence '%s' (%s): %s (%s)" % + (self.name, self.content_type, value, type(value))) + return self.content_type(value) + + def set_value(self, instance, value): + if value: + value = self.type([self.convert_content(v, instance) + for v in value]) + super(ListAttribute, self).set_value(instance, value) + + +class StringListAttribute(Attribute): + def __init__(self, content_type=unicode, type=list, + default=NOT_SET, *args, **kw): + super(StringListAttribute, self).__init__(type=type, default=default, + *args, **kw) + self.content_type = content_type + + def __get__(self, instance, owner=None): + if instance is None: + return self + + key = "_" + self.name + try: + return getattr(instance, key) + except AttributeError: + if self.default is NOT_SET: + subresource = self.type() + else: + subresource = self.default + setattr(instance, key, subresource) + return subresource + + def convert(self, value, instance): + if isinstance(value, str): + return value.strip(' ').split(' ') + return super(StringListAttribute, self).convert(value, instance) + + def _convert_mapping(self, value, instance): + self.logger.debug("Creating %s from %s", self.content_type, value) + return self.content_type(**value) + + def convert_content(self, value, instance): + if isinstance(value, self.content_type): + return value + if issubclass(self.content_type, Entity): + if isinstance(value, Mapping): + return self._convert_mapping(value, instance) + raise ValueError("Illegal value for sequence '%s' (%s): %s (%s)" % + (self.name, self.content_type, value, type(value))) + return self.content_type(value) + + def set_value(self, instance, value): + if value: + value = self.type([self.convert_content(v, instance) + for v in value]) + super(StringListAttribute, self).set_value(instance, value) + + +class EntityAttribute(Attribute): + def __init__(self, type, default=None, accesstype=None, mandatory=None, + update_mandatory=None): + super(EntityAttribute, self).__init__(type=type, default=default, + accesstype=accesstype, + mandatory=mandatory, + update_mandatory=update_mandatory) + + def convert(self, value, instance): + if isinstance(value, Mapping): + self.logger.debug("Creating %s from %s", self.type, value) + return self.type(**value) + return super(EntityAttribute, self).convert(value, instance) + + +class CollectionMember(Member): + def __init__(self, content_type, type=Collection, *args, + **kw): # TODO: kca: use type for content_type + super(CollectionMember, self).__init__(type=type, *args, **kw) + self.content_type = content_type + + def convert(self, value, instance): + try: + return self.type(collection=value, name=self.name, + parent=instance, type=self.content_type) + except: + return super(CollectionMember, self).convert(value, instance) + + def __get__(self, instance, owner=None): + if instance is None: + return self + + key = "_" + self.name + try: + return getattr(instance, key) + except AttributeError: + subresource = self.type(name=self.name, parent=instance, + type=self.content_type) + setattr(instance, key, subresource) + return subresource + + +class SubresourceMember(Member): + default = None + + def __init__(self, type, virtual=False, default=NOT_SET, *args, **kw): + if type and not issubclass(type, Resource): + raise TypeError(type) + + super(SubresourceMember, self).__init__(type=type, *args, **kw) + + def __get__(self, instance, owner=None): + if instance is None: + return self + + key = "_" + self.name + try: + v = getattr(instance, key) + if v is not None: + return v + except AttributeError: + pass + + # Here we automatically create missing subresources + # Might be a stupid idea to do it here + path = instance.path and instance.path + "/" + self.name or None + subresource = self.type( + path=path, + parent=instance + ) + + # TODO: needs to go into the appropriate resource type(s) + if hasattr(subresource, "creationTime"): + creation_time = instance.creationTime + subresource.creationTime = creation_time + subresource.lastModifiedTime = creation_time + + setattr(instance, key, subresource) + return subresource + + @property + def virtual(self): + return self.type.virtual + + +class ResourceType(ABCMeta): + def __init__(self, *args, **kw): + super(ResourceType, self).__init__(*args, **kw) + + if ("typename" not in self.__dict__ and + not self.__name__.endswith("Collection")): + self.typename = self.__name__[0].lower() + self.__name__[1:] + + self.id_attribute = self.path_attribute = None + attributes = self.attributes = [] + subresources = self.subresources = [] + collections = self.collections = [] + + for name in dir(self): + if name[0] != "_": + attr = getattr(self, name) + if isinstance(attr, Member): + if "_" in name: + name = name.replace("_", "-") + setattr(self, name, attr) + attr._init(name) + if isinstance(attr, SubresourceMember): + subresources.append(attr) + elif isinstance(attr, CollectionMember): + collections.append(attr) + else: + attributes.append(attr) + + if attr.id_attribute and attr.path_attribute: + raise ModelTypeError( + "Attribute %s of resource %s can only be " + "either id_attribute or path_attribute, not " + "both." % (name, self.__name__)) + + if attr.id_attribute: + if self.id_attribute is not None: + raise ModelTypeError( + "Resource %s defines more than one id " + "attribute: %s and %s" % + (self.__name__, self.id_attribute, name)) + self.id_attribute = attr.name + self.id_immutable = attr.id_immutable + + if attr.path_attribute: + if self.path_attribute is not None: + raise ModelTypeError( + "Resource %s defines more than one path " + "attribute: %s and %s" % + (self.__name__, self.id_attribute, name)) + self.path_attribute = attr.name + + self.__members__ = attributes + subresources + collections + + # TODO: caching + @property + def attribute_names(self): + return map(attrgetter("name"), self.attributes) + + @property + def collection_names(self): + return map(attrgetter("name"), self.collections) + + @property + def subresource_names(self): + return map(attrgetter("name"), self.subresources) + + @property + def member_names(self): + return map(attrgetter("name"), self.__members__) + + +class Entity(LoggerMixin): + __metaclass__ = ResourceType + + def __init__(self, *args, **kw): + self.set_values(kw) + + def set_values(self, values): + self.logger.debug("Setting values for entity of type %s with %s", + type(self), values) + values = values.copy() + + for member in self.__members__: + try: + v = values.pop(member.name) + if (v is not None and isinstance(member, ListAttribute) and + not isinstance(v, (list, tuple, set))): + l = [v] + v = l + setattr(self, member.name, v) + except KeyError: + try: + v = values.pop(member.name + "Reference") + # TODO: proper solution? + if (v is not None and isinstance(member, ListAttribute) and + not isinstance(v, (list, tuple, set))): + v = v.values()[0] + setattr(self, member.name, v) + except KeyError: + pass + + if values: + self._set_extra_values(values) + + def _set_extra_values(self, values): + """ + names = type(self).subresource_names + for k in values.keys(): + if k.strip("Reference") in names: + values.pop(k) + print names, values + from traceback import print_stack + print_stack() + """ + if values: + raise ModelTypeError("%s resource has no attribute %s" % + (self.typename, values.keys()[0])) + + @classmethod + def get_typename(cls): + return cls.typename + + def get_attribute_values(self, filter=False): + vals = {} + for attr in self.attributes: + a_name = attr.name + val = getattr(self, a_name) + if (val is None or val == '' or val == []) and filter: + continue + vals[a_name] = val + return vals + attribute_values = property(get_attribute_values) + + def get_values_representation(self, fields=None, internal=False): + vals = {} + id_attribute = self.id_attribute + for attr in self.attributes: + a_name = attr.name + if (fields is None or a_name == id_attribute or a_name in fields) \ + and (internal or attr.accesstype is not None): + val = getattr(self, "_" + a_name, None) + if val is None: + continue + if isinstance(attr, ListAttribute): + # TODO: return simple values. No representation + if attr.content_type is AnyURI: # any uri list + vals[a_name] = {"reference": val} + elif issubclass(attr.content_type, Entity): # complex list + vals[a_name] = { + a_name: [x.get_values_representation() for x in val] + } + else: # simple list + vals[a_name] = {a_name[:-1]: val} + elif isinstance(attr, EntityAttribute): + vals[a_name] = val.values + else: + try: + val = val.isoformat() + except AttributeError: + pass + vals[a_name] = val + return vals + + def get_values(self, filter=False): + return self.get_attribute_values(filter) + + @property + def values(self): + return self.get_values() + + @property + def subresource_values(self): + vals = {} + for attr in self.subresources: + vals[attr.name] = getattr(self, attr.name) + return vals + + +class ContentResource(Entity): + virtual = True + __model_name__ = None + __model_version__ = None + + def __init__(self, value, *args, **kw): + kw = {'CONTENT': value} + super(ContentResource, self).__init__(*args, **kw) + + @property + def values(self): + return self.get_values().get('CONTENT') + + +class Resource(Entity): + virtual = False + __model_name__ = None + __model_version__ = None + + def __init__(self, path=None, parent=None, *args, **kw): + if path is not None and not isinstance(path, basestring): + raise TypeError(path) + self.__path = path + self.parent = parent + super(Resource, self).__init__(*args, **kw) + + def get_path(self): + return self.__path + + def set_path(self, path): + self.__path = path + if self.id_attribute and getattr(self, self.id_attribute) is None: + setattr(self, self.id_attribute, path.rpartition("/")[-1]) + if self.path_attribute and getattr(self, self.path_attribute) is None: + setattr(self, self.path_attribute, path) + + path = property(get_path, set_path) + + @property + def parent_path(self): + if self.__path is not None: + return self.__path.rpartition("/")[0] + + # TODO: deprecated + @property + def name(self): + return self.basename + + @property + def basename(self): + if self.path is not None: + return self.path.rpartition("/")[-1] + if self.id_attribute is not None: + return getattr(self, self.id_attribute) + + def set_values(self, values): + values = values.copy() + + keys = [k for k in values.keys() if "_" in k] + for k in keys: + values[k.replace("_", "-")] = values.pop(k) + + path = self.path + if path is not None: + id_attribute = self.id_attribute + if (id_attribute is not None and + id_attribute not in values): + values[id_attribute] = path.rpartition("/")[-1] + + path_attribute = self.path_attribute + if (path_attribute is not None and + path_attribute not in values): + values[path_attribute] = path + + for member in self.__members__: + try: + v = values.pop(member.name) + # FIXME: move into de-serializer and handle dicts + if (v is not None and isinstance(member, ListAttribute) and + not isinstance(v, (list, tuple, set))): + v = v.values()[0] + setattr(self, member.name, v) + except KeyError: + try: + v = values.pop(member.name + "Reference") + # TODO: proper solution? + if (v is not None and isinstance(member, ListAttribute) and + not isinstance(v, (list, tuple, set))): + v = v.values()[0] + setattr(self, member.name, v) + except KeyError: + pass + + if values: + self._set_extra_values(values) + + def __repr__(self): + return "%s(path='%s', name='%s')" % (type(self).__name__, self.path, + self.name) + + def __eq__(self, o): + try: + return self.path == o.path + except AttributeError: + return False + + def __ne__(self, o): + return not self.__eq__(o) + + +class FlexibleAttributesMixin(object): + def __init__(self, path=None, parent=None, *args, **kw): + self._flex_attrs = set() + + super(FlexibleAttributesMixin, self).__init__(path=path, parent=parent, + *args, **kw) + + def __setattr__(self, k, v): + if not k.startswith("_") and not hasattr(self, k) and k != "parent": + self._flex_attrs.add(k) + + return super(FlexibleAttributesMixin, self).__setattr__(k, v) + + def __delattr__(self, k): + self._flex_attrs.discard(k) + + return super(FlexibleAttributesMixin, self).__delattr__(k) + + @property + def flex_values(self): + return {k: getattr(self, k) for k in self._flex_attrs} + + def get_values(self, filter=False): + vals = super(FlexibleAttributesMixin, self).get_values(filter) + vals.update(self.flex_values) + return vals + + def get_values_representation(self, fields=None, internal=False): + r = super(FlexibleAttributesMixin, self) \ + .get_values_representation(fields=fields, internal=internal) + if fields is None: + r.update(self.flex_values) + return r + + def _set_extra_values(self, values): + for k, v in values.items(): + setattr(self, k, v) + + +class AnyURI(str): + pass + + +class AnyURIList(Entity): + reference = ListAttribute(mandatory=False) diff --git a/build/lib/openmtc/model/exc.py b/build/lib/openmtc/model/exc.py new file mode 100644 index 0000000..87fc14e --- /dev/null +++ b/build/lib/openmtc/model/exc.py @@ -0,0 +1,14 @@ +''' +Created on 26.05.2013 + +@author: kca +''' +from openmtc.exc import OpenMTCError + + +class ModelError(OpenMTCError): + pass + + +class ModelTypeError(ModelError, TypeError): + pass diff --git a/build/lib/openmtc/util.py b/build/lib/openmtc/util.py new file mode 100644 index 0000000..8465bfd --- /dev/null +++ b/build/lib/openmtc/util.py @@ -0,0 +1,39 @@ +from datetime import datetime, timedelta, tzinfo +import time + +ZERO = timedelta(0) + + +class Utc(tzinfo): + """UTC + """ + __slots__ = () + + def utcoffset(self, dt): + return ZERO + + def tzname(self, dt): + return "UTC" + + def dst(self, dt): + return ZERO + +UTC = Utc() + + +#del Utc + + +def datetime_now(): + return datetime.now(UTC) + + +def datetime_the_future(offset = 0): + """ Returns a datetime instance seconds in the future. + @note: if no offset is provided or offset == 0, this is equivalent to datetime_now + @param offset: seconds from now + @return: datetime in seconds + """ + f = time.time() + offset + return datetime.fromtimestamp(f, UTC) + diff --git a/build/lib/openmtc/version.py b/build/lib/openmtc/version.py new file mode 100644 index 0000000..06ce668 --- /dev/null +++ b/build/lib/openmtc/version.py @@ -0,0 +1 @@ +VERSION="4.0.0" \ No newline at end of file diff --git a/build/lib/openmtc_app/__init__.py b/build/lib/openmtc_app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/build/lib/openmtc_app/exc.py b/build/lib/openmtc_app/exc.py new file mode 100644 index 0000000..71ac30c --- /dev/null +++ b/build/lib/openmtc_app/exc.py @@ -0,0 +1,2 @@ +class OpenMTCAppError(Exception): + pass diff --git a/build/lib/openmtc_app/flask_runner/__init__.py b/build/lib/openmtc_app/flask_runner/__init__.py new file mode 100644 index 0000000..2b102dd --- /dev/null +++ b/build/lib/openmtc_app/flask_runner/__init__.py @@ -0,0 +1,89 @@ +from signal import SIGTERM, SIGINT + +from flask import (Flask, request, abort, redirect, url_for, + Response as FlaskResponse) + +from gevent import signal as gevent_signal +from gevent.pywsgi import WSGIServer +from geventwebsocket.handler import WebSocketHandler +from socketio import Server as SioServer, Middleware as SioMiddleware + +from futile.net.http.exc import HTTPError +from openmtc_app.runner import AppRunner + + +class Response(FlaskResponse): + pass + + +class SimpleFlaskRunner(AppRunner): + def __init__(self, m2m_app, port=None, listen_on="0.0.0.0", *args, **kw): + super(SimpleFlaskRunner, self).__init__(m2m_app=m2m_app, *args, **kw) + + self.port = port or 5050 + self.listen_on = listen_on + self.flask_app = Flask(type(self.m2m_app).__module__) + + def _get_server(self): + return WSGIServer((self.listen_on, self.port), self.flask_app) + + def _run(self): + self.m2m_app.run(self, self.m2m_ep) + + _server = self._get_server() + self.logger.debug("Serving on %s:%s", self.listen_on, self.port) + gevent_signal(SIGTERM, _server.stop) + gevent_signal(SIGINT, _server.stop) + _server.serve_forever() + + def add_route(self, route, handler, methods=("POST", "GET")): + def wrapper(): + try: + return handler(request) + except HTTPError as e: + self.logger.exception("Aborting") + abort(e.status) + + self.logger.debug("Adding route: %s -> %s" % (route, handler)) + self.flask_app.add_url_rule(route, view_func=wrapper, + endpoint=route + str(handler), + methods=methods) + + +class FlaskRunner(SimpleFlaskRunner): + def __init__(self, m2m_app, port=None, listen_on="0.0.0.0", *args, **kw): + super(FlaskRunner, self).__init__(m2m_app=m2m_app, port=port, + listen_on=listen_on, *args, **kw) + + @self.flask_app.route("/") + def home(): + return redirect(url_for('static', filename='index.html')) + + self.sio_app = SioServer(async_mode='gevent') + + @self.sio_app.on('connect') + def connect(sid, environ): + self.logger.debug('client connected: %s' % sid) + + def _get_server(self): + return WSGIServer((self.listen_on, self.port), + SioMiddleware(self.sio_app, self.flask_app), + handler_class=WebSocketHandler) + + def emit(self, event, message=None, sid=None): + self.sio_app.emit(event, message, room=sid) + + def get_handler_decorator(self, name): + return self.sio_app.on(name) + + def add_message_handler(self, name, handler, client=False, response=False): + + def wrapper(*args, **kw): + if not client: + args = args[1:] + if response: + return handler(*args, **kw) + else: + handler(*args, **kw) + + self.sio_app.on(name, wrapper) diff --git a/build/lib/openmtc_app/notification/__init__.py b/build/lib/openmtc_app/notification/__init__.py new file mode 100644 index 0000000..9da2470 --- /dev/null +++ b/build/lib/openmtc_app/notification/__init__.py @@ -0,0 +1,276 @@ +from gevent import spawn +from gevent.pywsgi import WSGIServer +from inspect import getargspec +from futile.logging import LoggerMixin +from openmtc_onem2m.exc import OneM2MError +from openmtc_onem2m.model import ( + EventNotificationCriteria, + NotificationEventTypeE, + Subscription, +) +from openmtc_onem2m.serializer import get_onem2m_decoder +from urlparse import urlparse + +from openmtc_onem2m.util import split_onem2m_address + +_handler_map = {} + + +def register_handler(cls, schemes=()): + _handler_map.update({ + scheme: cls for scheme in map(str.lower, schemes) + }) + + +def get_handler(scheme, poa, callback_func, ssl_certs=None): + return _handler_map[scheme](poa, callback_func, ssl_certs) + + +class NotificationManager(LoggerMixin): + handlers = [] + endpoints = [] + callbacks = {} + + def __init__(self, poas, ep, onem2m_mapper, ca_certs=None, cert_file=None, key_file=None): + """ + :param list poas: + :param str ep: + :param openmtc_onem2m.mapper.OneM2MMapper onem2m_mapper: + """ + self.mapper = onem2m_mapper + self.sp_id, self.cse_id, _ = split_onem2m_address(onem2m_mapper.originator) + self.ssl_certs = { + 'ca_certs': ca_certs, + 'cert_file': cert_file, + 'key_file': key_file + } + + for poa in map(urlparse, poas): + if poa.hostname == 'auto': + poa = poa._replace(netloc="%s:%s" % (self._get_auto_host(ep), poa.port)) + + if not poa.scheme: + poa = poa._replace(scheme='http') + + try: + self.handlers.append(get_handler(poa.scheme, poa, self._handle_callback, + self.ssl_certs)) + self.endpoints.append(poa.geturl()) + except: + pass + + self.logger.debug('Available POAs: %s' % ', '.join(self.endpoints)) + + super(NotificationManager, self).__init__() + + @staticmethod + def _get_auto_host(ep): + try: + import socket + from urlparse import urlparse + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + netloc = urlparse(ep).netloc.split(':') + s.connect((netloc[0], int(netloc[1]))) + host = s.getsockname()[0] + s.close() + except: + host = "127.0.0.1" + + return host + + def _normalize_path(self, path): + path = path[len(self.sp_id):] if path.startswith(self.sp_id) and self.sp_id else path + path = path[len(self.cse_id) + 1:] if path.startswith(self.cse_id) and self.cse_id else path + return path + + def _init(self): + for handler in self.handlers: + try: + handler.start() + except: + pass + + def nop(): + pass + self._init = nop + + def register_callback(self, func, sur): + self.callbacks[sur] = func if len(getargspec(func)[0]) > 1 \ + else lambda _, **notification: func(notification['rep']) + + def _handle_callback(self, originator, **notification): + sur = notification.pop('sur') + sur = self._normalize_path(sur) + + try: + callback = self.callbacks[sur] + except KeyError: + if not sur.startswith('/'): + # TODO(rst): maybe not the best, check alternatives + # assumes originator is always in the form //SP-ID/CSE-ID + sur = originator[originator.rfind('/'):] + '/' + sur + try: + callback = self.callbacks[sur] + except KeyError: + return + else: + return + try: + spawn(callback, originator, **notification) + except: + pass + + def get_expiration_time(self): + return None + + def subscribe(self, path, func, filter_criteria=None, expiration_time=None, + notification_types=(NotificationEventTypeE.updateOfResource, )): + self._init() + + event_notification_criteria = filter_criteria or EventNotificationCriteria() + event_notification_criteria.notificationEventType = ( + event_notification_criteria.notificationEventType or list(notification_types)) + + subscription = self.mapper.create(path, Subscription( + notificationURI=[self.mapper.originator], + expirationTime=expiration_time or self.get_expiration_time(), + eventNotificationCriteria=event_notification_criteria, + )) + + reference = self._normalize_path(subscription.subscriberURI or subscription.path) + self.register_callback(func, reference) + return subscription + + def unsubscribe(self, sur): + self.mapper.delete(sur) + del self.callbacks[sur] + + def shutdown(self): + for subscription in self.callbacks.keys(): + try: + self.unsubscribe(subscription) + except OneM2MError: + pass + + for handler in self.handlers: + try: + handler.stop() + except: + pass + + +class BaseNotificationHandler(object): + def __init__(self, poa, callback_func, ssl_certs=None): + self._endpoint = poa + self._callback = callback_func + self._ssl_certs = ssl_certs + + @classmethod + def _unpack_notification(cls, notification): + return { + 'sur': notification.subscriptionReference, + 'net': notification.notificationEvent.notificationEventType, + 'rep': notification.notificationEvent.representation, + } + + def start(self): + raise NotImplementedError + + def stop(self): + pass + + +class MqttNotificationHandler(BaseNotificationHandler): + _client = None + + def start(self): + from openmtc_onem2m.client.mqtt import get_client + from openmtc_onem2m.transport import OneM2MResponse + from openmtc_onem2m.exc import get_response_status + + def wrapper(request): + notification = self._unpack_notification(request.content) + self._callback(request.originator, **notification) + return OneM2MResponse(status_code=get_response_status(2002), request=request) + + self._client = get_client(self._endpoint.geturl(), handle_request_func=wrapper) + + def stop(self): + self._client.stop() + +register_handler(MqttNotificationHandler, ('mqtt', 'mqtts', 'secure-mqtt')) + + +class HttpNotificationHandler(BaseNotificationHandler): + server = None + + def __init__(self, poa, callback_func, ssl_certs=None): + super(HttpNotificationHandler, self).__init__(poa, callback_func, ssl_certs) + + self.ca_certs = ssl_certs.get('ca_certs') + self.cert_file = ssl_certs.get('cert_file') + self.key_file = ssl_certs.get('key_file') + + # TODO(rst): maybe tis needs to be tested when the server is started + if poa.scheme == 'https' and not (self.ca_certs and self.cert_file and self.key_file): + raise Exception() + + def start(self): + from flask import ( + Flask, + request, + Response, + ) + from gevent import signal as gevent_signal + from signal import ( + SIGTERM, + SIGINT, + ) + + app = Flask(__name__) + + @app.after_request + def attach_headers(response): + response.headers['x-m2m-ri'] = request.headers['x-m2m-ri'] + return response + + @app.route('/', methods=['POST']) + def index(): + assert 'x-m2m-origin' in request.headers, 'No originator set' + assert 'x-m2m-ri' in request.headers, 'Missing request id' + assert 'content-type' in request.headers, 'Unspecified content type' + + notification = self._unpack_notification(get_onem2m_decoder(request.content_type).decode(request.data)) + self._callback(request.headers['x-m2m-origin'], **notification) + + return Response( + headers={ + 'x-m2m-rsc': 2000, + }, + ) + + if self._endpoint.scheme == 'https': + self.server = WSGIServer( + ( + self._endpoint.hostname, + self._endpoint.port or 6050 + ), + application=app, + keyfile=self.key_file, certfile=self.cert_file, ca_certs=self.ca_certs + ) + else: + self.server = WSGIServer( + ( + self._endpoint.hostname, + self._endpoint.port or 6050 + ), + application=app, + ) + gevent_signal(SIGINT, self.server.stop) + gevent_signal(SIGTERM, self.server.stop) + spawn(self.server.serve_forever) + + def stop(self): + self.server.stop() + +register_handler(HttpNotificationHandler, ('http', 'https')) diff --git a/build/lib/openmtc_app/onem2m.py b/build/lib/openmtc_app/onem2m.py new file mode 100644 index 0000000..b0b335d --- /dev/null +++ b/build/lib/openmtc_app/onem2m.py @@ -0,0 +1,744 @@ +from base64 import ( + b64decode, + b64encode, +) +from datetime import datetime +from gevent import ( + spawn, + spawn_later, +) +from iso8601 import parse_date +from json import ( + dumps as json_dumps, + loads as json_loads, +) +from futile.logging import LoggerMixin +import logging +from openmtc.util import ( + UTC, + datetime_now, + datetime_the_future, +) +from openmtc_app.flask_runner import FlaskRunner +from openmtc_app.notification import NotificationManager +from openmtc_onem2m.exc import ( + CSENotFound, + CSENotImplemented, + STATUS_CONFLICT, +) +from openmtc_onem2m.mapper import OneM2MMapper +from openmtc_onem2m.model import ( + AE, + Container, + ContentInstance, + EncodingTypeE, + get_short_member_name, + NotificationEventTypeE, + EventNotificationCriteria) +from openmtc_onem2m.serializer import get_onem2m_decoder +from openmtc_onem2m.transport import OneM2MErrorResponse +import time +import re +from urllib import urlencode + +logging.getLogger("iso8601").setLevel(logging.ERROR) + +# fix missing SSLv3 +try: + from gevent.ssl import PROTOCOL_SSLv3 +except ImportError: + import gevent.ssl + + gevent.ssl.PROTOCOL_SSLv3 = gevent.ssl.PROTOCOL_TLSv1 + + +class XAE(LoggerMixin): + """ Generic OpenMTC application class. + Implements functionality common to all typical OpenMTC applications. + """ + + # TODO(rst): add more features + # support several AEs using the same App-ID and appName + + name = None + containers = () + labels = () + # default_access_right = True + default_lifetime = 3600 + max_nr_of_instances = 3 + resume_registration = remove_registration = True + notification_handlers = {} + mapper = None + notification_manager = None + __app = None + + def __init__(self, name=None, cse_base=None, expiration_time=None, announce_to=None, poas=None, + originator_pre=None, ca_certs=None, cert_file=None, key_file=None, *args, **kw): + super(XAE, self).__init__(*args, **kw) + + self.__subscriptions = [] + + self.name = name or type(self).__name__ + self.cse_base = cse_base or "onem2m" + + ae_id = "C" + self.name + self.originator = (originator_pre + '/' + ae_id) if originator_pre else ae_id + + self.ca_certs = ca_certs + self.cert_file = cert_file + self.key_file = key_file + + if expiration_time is not None: + if isinstance(expiration_time, (str, unicode)): + expiration_time = parse_date(expiration_time) + elif isinstance(expiration_time, (int, float)): + expiration_time = datetime.fromtimestamp(expiration_time, UTC) + + if not isinstance(expiration_time, datetime): + raise ValueError(expiration_time) + + self.default_lifetime = (expiration_time - datetime_now()).total_seconds() + + self.announceTo = announce_to + + self.__resumed_registration = False + self.__known_containers = set() + self.__shutdown = False + + self.allow_duplicate = None + self.runner = None + self.poas = poas or [] + + self.fmt_json_regex = re.compile(r'^application/(?:[^+]+\+)?json$', re.IGNORECASE) + self.fmt_xml_regex = re.compile(r'^application/(?:[^+]+\+)?xml$', re.IGNORECASE) + + def get_expiration_time(self): + if self.default_lifetime is None: + return None + return datetime_the_future(self.default_lifetime) + + @property + def application(self): + return self.__app + + def run(self, runner, cse, allow_duplicate=True): + self.mapper = OneM2MMapper(cse, originator=self.originator, ca_certs=self.ca_certs, + cert_file=self.cert_file, key_file=self.key_file) + self.notification_manager = NotificationManager(self.poas, cse, self.mapper, + ca_certs=self.ca_certs, + cert_file=self.cert_file, + key_file=self.key_file) + + self.allow_duplicate = allow_duplicate + self.runner = runner + self.register() + + def shutdown(self): + """ Graceful shutdown. + Deletes all Applications and Subscriptions. + """ + try: + self._on_shutdown() + except: + self.logger.exception("Error in shutdown handler") + + self.logger.debug("shutdown handler is finished") + + self.__shutdown = True + + self.notification_manager.shutdown() + + self._remove_apps() + + def _remove_apps(self): + if self.remove_registration: + try: + if self.__app: + self.mapper.delete(self.__app) + except: + pass + self.logger.debug("app deleted") + + @staticmethod + def run_forever(period=1000, func=None, *args, **kw): + """ executes a given function repeatingly at a given interval + :param period: (optional) frequency of repeated execution (in Hz) + :param func: (optional) function to be executed + """ + + if func is None: + def func(*_): + pass + + def run_periodically(): + func(*args, **kw) + spawn_later(period, run_periodically) + + return spawn(run_periodically) + + def periodic_discover(self, path, fc, interval, cb, err_cb=None): + """ starts periodic discovery at a given frequency + :param path: start directory inside cse for discovery + :param fc: filter criteria (what to discover) + :param interval: frequency of repeated discovery (in Hz) + :param cb: callback function to return the result of the discovery to + :param err_cb: (optional) callback function for errors to return the error of the discovery to + """ + if not isinstance(fc, dict): + fc = {} + + def run_discovery(o): + try: + cb(self.discover(path, o)) + except OneM2MErrorResponse as error_response: + if err_cb: + return err_cb(error_response) + else: + o['createdAfter'] = datetime_now() + + spawn_later(interval, run_discovery, o) + + return spawn(run_discovery, fc) + + def register(self): + """ Registers the Application with the CSE. """ + self.logger.info("Registering application as %s." % (self.name,)) + try: + poa = self.notification_manager.endpoints + except AttributeError: + poa = [] + app = AE(resourceName=self.name, labels=list(self.labels), + pointOfAccess=poa) + app.announceTo = self.announceTo + app.requestReachability = bool(poa) + + try: + registration = self.create_application(app) + except OneM2MErrorResponse as error_response: + if error_response.response_status_code is STATUS_CONFLICT: + registration = self._handle_registration_conflict(app) + if not registration: + raise + else: + self.logger.error('Error at start up') + self.logger.error(error_response.response_status_code) + raise SystemExit + self.__app = registration + + assert registration.path + + try: + self._on_register() + except (KeyboardInterrupt, SystemExit): + raise + except: + self.logger.exception("Error on initialization") + raise + + def _handle_registration_conflict(self, app): + if not self.resume_registration: + return None + # TODO(rst): update app here for expiration_time and poas + + app = self.get_application(app) + + self.__start_refresher(app) + + self.__resumed_registration = True + + return app + + def emit(self, event, message=None): + """ Websocket emit. """ + if not isinstance(self.runner, FlaskRunner): + raise RuntimeError('Runner is not supporting emit!') + self.runner.emit(event, message) + + def _on_register(self): + pass + + def _on_shutdown(self): + pass + + def get_application(self, application, path=None): + """ Retrieves an Application resource. + :param application: old app instance or appId + :param path: (optional) path in the resource tree + """ + if path is None: + # FIXME(rst): use app path and not cse base path + path = self.cse_base + + if not isinstance(application, AE): + application = AE(resourceName=application) + + name = application.resourceName + + path = "%s/%s" % (path, name) if path else name + app = self.mapper.get(path) + + self.logger.debug("retrieved app: %s" % app) + + return app + + def create_application(self, application, path=None): + """ Creates an Application resource. + + :param application: Application instance or appId as str + :param path: (optional) path in the resource tree + """ + # TODO(rst): set app_id via config + # TODO(rst): set requestReachability based on used runner + if path is None: + path = self.cse_base + + def restore_app(app): + self.logger.warn("Restoring app: %s", app.path) + app.expirationTime = None + self.create_application(app, path=path) + + if not isinstance(application, AE): + application = AE(resourceName=application, App_ID='dummy', requestReachability=False) + else: + if not application.App_ID: + application.App_ID = 'dummy' + if not application.requestReachability: + application.requestReachability = False + + application.expirationTime = application.expirationTime or self.get_expiration_time() + app = self.mapper.create(path, application) + self.logger.debug("Created application at %s", app.path) + app = self.get_application(application, path) + assert app.path + self.__start_refresher(app, restore=restore_app) + self.logger.info("Registration successful: %s." % (app.path,)) + + # TODO(rst): handle when ACP is reimplemented + # if accessRight: + # if not isinstance(accessRight, AccessRight): + # accessRight = AccessRight( + # id="ar", + # selfPermissions={"permission": [{ + # "id": "perm", + # "permissionFlags": { + # "flag": ["READ", "WRITE", "CREATE", "DELETE"] + # }, + # "permissionHolders": { + # "all": "all" + # } + # }]}, + # permissions={"permission": [{ + # "id": "perm", + # "permissionFlags": { + # "flag": ["READ", "WRITE", "CREATE", "DELETE"] + # }, + # "permissionHolders": { + # "all": "all" + # } + # }]} + # ) + # accessRight = self.create_accessRight(app, accessRight) + # + # app.accessRightID = accessRight.path + # + # self.mapper.update(app, ("accessRightID",)) + + return app + + # TODO(rst): use FilterCriteria from model and convert + def discover(self, path=None, filter_criteria=None, unstructured=False): + """ Discovers Container resources. + + :param path: (optional) the target path to start the discovery + :param filter_criteria: (optional) FilterCriteria for the for the discovery + :param unstructured: (optional) set discovery_result_type + """ + if path is None: + path = self.cse_base + + # TODO(rst): use filter_criteria from model + if not filter_criteria: + filter_criteria = {} + path += "?fu=1" + if filter_criteria: + path += "&" + urlencode( + { + get_short_member_name(k): v for k, v in filter_criteria.iteritems() + }, + True + ) + + path += '&drt' + str(1 if unstructured else 2) + + discovery = self.mapper.get(path) + + return discovery.CONTENT + + def create_container(self, target, container, labels=None, max_nr_of_instances=None): + """ Creates a Container resource. + + :param target: the target resource/path parenting the Container + :param container: the Container resource or a valid container ID + :param labels: (optional) the container's labels + :param max_nr_of_instances: (optional) the container's maximum number + of instances (0=unlimited) + """ + + def restore_container(c): + self.logger.warn("Restoring container: %s", c.path) + c.expirationTime = None + self.__known_containers.remove(c.path) + self.create_container(target, c, labels=labels) + + if target is None: + target = self.__app + + if not isinstance(container, Container): + container = Container(resourceName=container) + + # if we got max instances..set them + if max_nr_of_instances: + container.maxNrOfInstances = max_nr_of_instances + # if we did not set max instances yet, set them + else: + container.maxNrOfInstances = self.max_nr_of_instances + + if container.expirationTime is None: + container.expirationTime = self.get_expiration_time() + + if labels: + container.labels = labels + + path = getattr(target, "path", target) + + try: + container = self.mapper.create(path, container) + except OneM2MErrorResponse as error_response: + if error_response.response_status_code is STATUS_CONFLICT: + c_path = path + '/' + container.resourceName + container.path = c_path + if (self.__resumed_registration and + c_path not in self.__known_containers): + container = self.mapper.update(container) + else: + raise error_response + else: + raise error_response + + self.__known_containers.add(container.path) + self.__start_refresher(container, restore=restore_container) + self.logger.info("Container created: %s." % (container.path,)) + return container + + # TODO(rst): handle when ACP is reimplemented + # def create_access_right(self, application, accessRight): + # """ Creates an AccessRight resource. + # + # :param application: the Application which will contain the AR + # :param accessRight: the AccessRight instance + # """ + # self.logger.debug("Creating accessRight for %s", application) + # + # if application is None: + # application = self.__app + # assert application.path + # + # path = getattr(application, "path", application) + # + # if not path.endswith("/accessRights"): + # path += "/accessRights" + # + # accessRight = self.mapper.create(path, accessRight) + # accessRight = self.mapper.get(accessRight.path) + # self.__start_refresher(accessRight, extra_fields=["selfPermissions"]) + # self.logger.info("accessRight created: %s." % (accessRight.path,)) + # return accessRight + # + # create_accessRight = create_access_right + + def get_resource(self, path, app_local=False): + if app_local: + path = self.__app.path + '/' + path + + if not path: + return None + + try: + return self.mapper.get(path) + except OneM2MErrorResponse: + return None + + def push_content(self, container, content, fmt=None, text=None): + """ Creates a ContentInstance resource in the given container, + wrapping the content. + Defaults to serialising the content as JSON and base64 encodes it. + NOTE: Will attempt to create the container, if not found. + + :param container: Container object or container path string + :param content: the content data + :param fmt: + :param text: + """ + path = getattr(container, "path", container) + + if isinstance(content, (str, unicode)): + fmt = 'text/plain' if fmt is None else fmt + text = True if text is None else text + elif isinstance(content, (dict, list)): + fmt = 'application/json' if fmt is None else fmt + text = False if text is None else text + else: + raise CSENotImplemented("Only dict, list and str are supported!") + + if re.search(self.fmt_json_regex, fmt): + if text: + # TODO(rst): check if it should be with masked quotation marks + con = json_dumps(content) + cnf = fmt + ':' + str(EncodingTypeE.plain.value) + # raise CSENotImplemented("Only json as b64 is supported!") + else: + con = b64encode(json_dumps(content)) + cnf = fmt + ':' + str(EncodingTypeE.base64String.value) + elif fmt == 'text/plain': + if text: + con = content + cnf = fmt + ':' + str(EncodingTypeE.plain.value) + else: + con = b64encode(content) + cnf = fmt + ':' + str(EncodingTypeE.base64String.value) + else: + # TODO(rst): add handling of other formats or raise not implemented + raise CSENotImplemented("Only json and text are supported!") + + return self.mapper.create(path, ContentInstance( + content=con, + contentInfo=cnf, + )) + + @staticmethod + def _get_content_from_cin(cin): + if isinstance(cin, ContentInstance): + # TODO(rst): handle contentInfo and decode + # resource.contentInfo -> application/json:1 + # media, encodingType = split(':') + # encodingType = 1 -> base64.decodeString(resource.content) + # encodingType = 2 -> not supported + media_type, encoding_type = cin.contentInfo.split(':') + content = cin.content + try: + if int(encoding_type) == EncodingTypeE.base64String: + content = b64decode(content) + + if media_type == 'application/json': + content = json_loads(content) + except ValueError: + pass + + return content + + return cin + + def get_content(self, container): + """ Retrieve the latest ContentInstance of a Container. + + :param container: Container object or container path string + """ + return self._get_content_from_cin( + self.mapper.get( + getattr(container, 'path', container) + '/latest' + ) + ) + + def _get_notification_data(self, data, content_type): + try: + return get_onem2m_decoder(content_type).\ + decode(data).\ + notificationEvent.\ + representation + # serializer = get_onem2m_decoder(content_type) + # notification = serializer.decode(data) + # resource = notification.notificationEvent.representation + # return resource + except (KeyError, TypeError, ValueError, IndexError): + self.logger.error("Failed to get notification data from %s" % data) + return None + + def _remove_route(self, route): + self.logger.debug("removing route: %s", route) + self.runner.flask_app.url_map._rules = filter( + lambda x: x.rule != route, + self.runner.flask_app.url_map._rules + ) + + def _add_subscription(self, path, _, handler, delete_handler, filter_criteria=None, expiration_time=None): + params = { + 'filter_criteria': filter_criteria, + 'expiration_time': expiration_time, + } + self.add_subscription_handler(path, handler, **params) + # self.notification_manager.subscribe(path, handler, **params) + if delete_handler: + params['types'] = (NotificationEventTypeE.deleteOfResource,) + self.add_subscription_handler(path, delete_handler, **params) + + def add_subscription(self, path, handler, delete_handler=None): + """ Creates a subscription resource at path. + And registers handler to receive notification data. + + :param path: path to subscribe to + :param handler: notification handler + :param delete_handler: reference to delete handling function + """ + self._add_subscription(path, None, handler, delete_handler) + + def add_subscription_handler(self, path, handler, types=(NotificationEventTypeE.updateOfResource, ), + filter_criteria=None, expiration_time=None): + """ + + :param path: + :param handler: + :param types: + :param filter_criteria: + :param expiration_time: + :return: + """ + def subscribe(): + return self.notification_manager.subscribe( + path, + handler, + notification_types=types, + filter_criteria=filter_criteria, + expiration_time=expiration_time + ) + + subscription = subscribe() + + def restore_subscription(): + # called to recreate the subscription + # for some reason subscription is not assigned here, + # so we make it a parameter + self.logger.warn("Restoring subscription: %s", subscription.name) + self.notification_manager.unsubscribe(subscription.subscriberURI or subscription.path) + subscribe() + + # refresh expirationTime regularly + # TODO(sho): This should rather be handled through the notification manager itself + self.__start_refresher(subscription, restore=restore_subscription) + return subscription + + def add_container_subscription(self, container, handler, + delete_handler=None, filter_criteria=None): + """ Creates a Subscription to the ContentInstances of the given + Container. + + :param container: Container object or container path string + :param handler: reference of the notification handling function + :param delete_handler: reference to delete handling function + :param filter_criteria: (optional) FilterCriteria for the subscription + """ + + path = getattr(container, "path", container) + + # check if target is container + if not isinstance(self.mapper.get(path), Container): + raise RuntimeError('Target is not a container.') + + # event notification criteria + filter_criteria = filter_criteria or EventNotificationCriteria() + filter_criteria.notificationEventType = list([ + NotificationEventTypeE.createOfDirectChildResource, + ]) + + def content_handler(cin): + handler(path, self._get_content_from_cin(cin)) + + self._add_subscription( + path, + None, + content_handler, + delete_handler, + filter_criteria + ) + + def __start_refresher(self, instance, extra_fields=(), restore=None): + """ Starts a threading.Timer chain, + to repeatedly update a resource instance's expirationTime. + NOTE: instance.expirationTime should already be set and the instance + created. + + :param instance: resource instance + :param extra_fields: additional fields, needed in the update request + :param restore: function that will restore the instance, if it has + expired accidentally. Has to restart the refresher. + """ + if not instance.expirationTime: + return + interval = time.mktime(instance.expirationTime.timetuple()) - (time.time() + time.timezone) + if interval > 120: + interval -= 60 + else: + interval = max(1, interval * 0.75) + + self.logger.debug("Will update expiration time of %s in %s seconds", instance, interval) + self.runner.set_timer(interval, self.__update_exp_time, instance=instance, extra_fields=extra_fields, restore=restore) + + def start_refresher(self, instance, extra_fields=(), restore=None): + self.__start_refresher(instance, extra_fields=extra_fields, restore=restore) + + def __update_exp_time(self, instance=None, the_future=None, extra_fields=(), + interval=None, offset=None, restore=None): + """ Updates a resource instance's expirationTime to the_future + or a default value sometime in the future. + + :note: If instance is not provided or None or False, self.__app is + updated. + :note: Starts a new Timer. + :param instance: resource instance to update + :param the_future: new expirationTime value + :param extra_fields: additional fields, needed in the update request + :param interval: update interval + :param offset: expirationTime offset (should be >0) + :param restore: function that will restore the instance, if it has + expired accidentally. Has to restart the refresher. + :raise CSENotFound: If the instance could not be found and no restore + was provided. + """ + self.logger.debug("updating ExpirationTime of %s", instance) + if self.__shutdown: + # not sure this ever happens. + return + + interval = interval or 60 * 10 # TODO make configurable + offset = offset or 60 * 10 # 10min default + if not the_future: + the_future = datetime.utcfromtimestamp(time.time() + interval + offset) + fields = ["expirationTime"] + fields.extend(extra_fields) + if not instance: + # does this happen if the instance was deleted? + instance = self.__app + instance.expirationTime = the_future + try: + self.mapper.update(instance, fields) + except CSENotFound as e: + self.logger.warn("ExpirationTime update of %s failed: %s", instance, e) + # subscription disappeared? + # missed the expirationTime? + # mb sync issue?; mb congestion? + if restore: + restore(instance) + return + else: + raise + # NOTE: expirationTime might have been changed by CSE at this point. + # update could/should return the updated instance in this case, but + # doesnt. => additional GET to confirm expirationTime ? + + self.logger.debug("Will update expiration time in %s seconds", interval) + self.runner.set_timer( + interval, + self.__update_exp_time, + instance=instance, + extra_fields=extra_fields, + restore=restore, + ) diff --git a/build/lib/openmtc_app/runner/__init__.py b/build/lib/openmtc_app/runner/__init__.py new file mode 100644 index 0000000..099ff22 --- /dev/null +++ b/build/lib/openmtc_app/runner/__init__.py @@ -0,0 +1,51 @@ +from gevent import spawn_later, wait + +from futile.logging import LoggerMixin + + +class AppRunner(LoggerMixin): + def __init__(self, m2m_app, *args, **kw): + super(AppRunner, self).__init__(*args, **kw) + + self._timers = set() + self.m2m_app = m2m_app + self.m2m_ep = None + + def run(self, m2m_ep): + self.m2m_ep = m2m_ep + + try: + self._run() + except (KeyboardInterrupt, SystemExit): + self.logger.info("Exiting...") + except Exception: + self.logger.exception("Error") + raise + finally: + self.logger.debug("Shutting down") + self._shutdown_app() + for timer in self._timers: + timer.kill() + + def _run(self): + self.m2m_app.run(self, self.m2m_ep) + + wait() + + def _shutdown_app(self): + self.m2m_app.shutdown() + + def set_timer(self, t, f, *args, **kw): + timer = None + + def wrapper(): + self._timers.discard(timer) + f(*args, **kw) + + timer = spawn_later(t, wrapper) + self._timers.add(timer) + return timer + + def cancel_timer(self, timer): + self._timers.discard(timer) + timer.kill() diff --git a/build/lib/openmtc_app/util.py b/build/lib/openmtc_app/util.py new file mode 100644 index 0000000..4421e13 --- /dev/null +++ b/build/lib/openmtc_app/util.py @@ -0,0 +1,75 @@ +import sys +from json import load as json_load +from operator import getitem + +import futile + + +def prepare_app(parser, loader, name, default_config_file): + parser.add_argument("-v", "--verbose", action="count", default=None, + help="Increase verbosity in output. This option can be" + " specified multiple times.") + args = parser.parse_args() + + module_ = loader.fullname.split("." + name).pop(0) + + futile.logging.set_default_level(futile.logging.DEBUG) + logger = futile.logging.get_logger(name) + + config_locations = (".", "/etc/openmtc/" + module_) + + try: + import os.path + for d in config_locations: + config_file = os.path.join(os.path.abspath(d), + default_config_file) + logger.debug("Trying config file location: %s", config_file) + if os.path.isfile(config_file): + break + else: + raise Exception("Configuration file %s not found in any of these " + "locations: %s" % default_config_file, + config_locations) + except Exception as e: + sys.stderr.write(str(e) + "\n") + sys.exit(2) + + try: + with open(config_file) as f: + logger.info("Reading configuration file %s.", config_file) + config = json_load(f) + except IOError as e: + logger.warning("Failed to read configuration file %s: %s", + config_file, e) + config = {} + except Exception as e: + logger.critical("Error reading configuration file %s: %s", + config_file, e) + sys.exit(2) + + if "logging" in config: # TODO init logging + log_conf = config["logging"] + if args.verbose is None: + futile.logging.set_default_level(log_conf.get("level") or + futile.logging.WARNING) + elif args.verbose >= 2: + futile.logging.set_default_level(futile.logging.DEBUG) + else: + futile.logging.set_default_level(futile.logging.INFO) + logfile = log_conf.get("file") + if logfile: + futile.logging.add_log_file(logfile) + else: + futile.logging.set_default_level(futile.logging.DEBUG) + + return args, config + + +def get_value(name, value_type, default_value, args, config): + try: + value = (getattr(args, name.replace(".", "_"), None) or + reduce(getitem, name.split("."), config)) + except KeyError: + value = None + value = value if isinstance(value, value_type) else default_value + return value diff --git a/build/lib/openmtc_onem2m/__init__.py b/build/lib/openmtc_onem2m/__init__.py new file mode 100644 index 0000000..6afdef2 --- /dev/null +++ b/build/lib/openmtc_onem2m/__init__.py @@ -0,0 +1,2 @@ +from openmtc_onem2m.transport import AdditionalInformation, MetaInformation, \ + OneM2MRequest, OneM2MResponse diff --git a/build/lib/openmtc_onem2m/client/__init__.py b/build/lib/openmtc_onem2m/client/__init__.py new file mode 100644 index 0000000..5b44da8 --- /dev/null +++ b/build/lib/openmtc_onem2m/client/__init__.py @@ -0,0 +1,23 @@ +from abc import abstractmethod +from futile import LoggerMixin + + +def normalize_path(path): + if not path: + return '' + if path.startswith('//'): + # abs CSE + return '/_' + path[1:] + elif path.startswith('/'): + # sp rel CSE + return '/~' + path + return path + + +class OneM2MClient(LoggerMixin): + def __init__(self): + super(OneM2MClient, self).__init__() + + @abstractmethod + def send_onem2m_request(self, onem2m_request): + pass diff --git a/build/lib/openmtc_onem2m/client/http.py b/build/lib/openmtc_onem2m/client/http.py new file mode 100644 index 0000000..c909682 --- /dev/null +++ b/build/lib/openmtc_onem2m/client/http.py @@ -0,0 +1,217 @@ +import urllib +import ssl +from socket import ( + gaierror, + error as socket_error, +) +from time import time +from urlparse import urlparse +from aplus import Promise +from futile.caching import LRUCache +from geventhttpclient.client import HTTPClient +from geventhttpclient.response import HTTPResponse +from openmtc.exc import ( + OpenMTCNetworkError, + ConnectionFailed, +) +from openmtc_onem2m.exc import ( + get_error_class, + get_response_status, + ERROR_MIN, +) +from openmtc_onem2m.model import ( + ResourceTypeE, + get_short_attribute_name, + get_short_member_name, +) +from openmtc_onem2m.serializer.util import ( + decode_onem2m_content, + encode_onem2m_content, +) +from openmtc_onem2m.transport import ( + OneM2MOperation, + OneM2MResponse, + OneM2MErrorResponse, +) +from . import ( + OneM2MClient, + normalize_path, +) + +_method_map_to_http = { + OneM2MOperation.create: 'POST', + OneM2MOperation.retrieve: 'GET', + OneM2MOperation.update: 'PUT', + OneM2MOperation.delete: 'DELETE', + OneM2MOperation.notify: 'POST', +} + +_clients = LRUCache(threadsafe=False) + +_query_params = frozenset(['rt', 'rp', 'rcn', 'da', 'drt']) + +_header_to_field_map = { + 'X-M2M-ORIGIN': 'originator', + 'X-M2M-RI': 'rqi', + 'X-M2M-GID': 'gid', + 'X-M2M-OT': 'ot', + 'X-M2M-RST': 'rset', + 'X-M2M-RET': 'rqet', + 'X-M2M-OET': 'oet', + 'X-M2M-EC': 'ec', +} + + +def get_client(m2m_ep, use_xml=False, ca_certs=None, cert_file=None, key_file=None, + insecure=False): + try: + return _clients[(m2m_ep, use_xml)] + except KeyError: + # TODO: make connection_timeout and concurrency configurable + client = _clients[(m2m_ep, use_xml)] = OneM2MHTTPClient( + m2m_ep, use_xml, ca_certs, cert_file, key_file, insecure) + return client + + +class OneM2MHTTPClient(OneM2MClient): + # defaults + DEF_SSL_VERSION = ssl.PROTOCOL_TLSv1_2 + + def __init__(self, m2m_ep, use_xml, ca_certs=None, cert_file=None, key_file=None, + insecure=False): + super(OneM2MHTTPClient, self).__init__() + + self.parsed_url = urlparse(m2m_ep) + is_https = self.parsed_url.scheme[-1].lower() == "s" + port = self.parsed_url.port or (is_https and 443 or 80) + host = self.parsed_url.hostname + self.path = self.parsed_url.path.rstrip('/') + if self.path and not self.path.endswith('/'): + self.path += '/' + + # TODO(rst): handle IPv6 host here + # geventhttpclient sets incorrect host header + # i.e "host: ::1:8000" instead of "host: [::1]:8000 + if (is_https and ca_certs is not None and cert_file is not None and + key_file is not None): + ssl_options = { + "ca_certs": ca_certs, + "certfile": cert_file, + "keyfile": key_file, + "ssl_version": self.DEF_SSL_VERSION + } + else: + ssl_options = None + + client = HTTPClient(host, port, connection_timeout=120.0, + concurrency=50, ssl=is_https, + ssl_options=ssl_options, insecure=insecure) + self.request = client.request + + self.content_type = 'application/' + ('xml' if use_xml else 'json') + + def _handle_network_error(self, exc, p, http_request, t, + exc_class=OpenMTCNetworkError): + error_str = str(exc) + if error_str in ("", "''"): + error_str = repr(exc) + method = http_request["method"] + path = http_request["request_uri"] + log_path = "%s://%s/%s" % (self.parsed_url.scheme, self.parsed_url.netloc, path) + error_msg = "Error during HTTP request: %s. " \ + "Request was: %s %s (%.4fs)" % (error_str, method, log_path, time() - t) + p.reject(exc_class(error_msg)) + + def map_onem2m_request_to_http_request(self, onem2m_request): + """ + Maps a OneM2M request to a HTTP request + :param onem2m_request: OneM2M request to be mapped + :return: request: the resulting HTTP request + """ + self.logger.debug("Mapping OneM2M request to generic request: %s", onem2m_request) + + params = { + param: getattr(onem2m_request, param) for param in _query_params + if getattr(onem2m_request, param) is not None + } + + if onem2m_request.fc is not None: + filter_criteria = onem2m_request.fc + params.update({ + (get_short_attribute_name(name) or get_short_member_name(name)): val + for name, val in filter_criteria.get_values(True).iteritems() + }) + + path = normalize_path(onem2m_request.to) + + if params: + path += '?' + urllib.urlencode(params, True) + + content_type, data = encode_onem2m_content(onem2m_request.content, self.content_type, path=path) + + # TODO(rst): check again + # set resource type + if onem2m_request.operation == OneM2MOperation.create: + content_type += '; ty=' + str(ResourceTypeE[onem2m_request.resource_type.typename]) + + headers = { + header: getattr(onem2m_request, field) for header, field in _header_to_field_map.iteritems() + if getattr(onem2m_request, field) is not None + } + headers['content-type'] = content_type + + self.logger.debug("Added request params: %s", params) + + return { + 'method': _method_map_to_http[onem2m_request.operation], + 'request_uri': self.path + path, + 'body': data, + 'headers': headers, + } + + def map_http_response_to_onem2m_response(self, onem2m_request, response): + """ + Maps HTTP response to OneM2M response + :param onem2m_request: the OneM2M request that created the response + :param response: the HTTP response + :return: resulting OneM2MResponse or OneM2MErrorResponse + """ + if not isinstance(response, HTTPResponse): + self.logger.error("Not a valid response: %s", response) + # return OneM2MErrorResponse(STATUS_INTERNAL_SERVER_ERROR) + self.logger.debug("Mapping HTTP response for OneM2M response: %s", response) + rsc = response.get("x-m2m-rsc", 5000) + if int(rsc) >= ERROR_MIN: + return OneM2MErrorResponse( + get_error_class(rsc).response_status_code, onem2m_request) + + return OneM2MResponse( + get_response_status(rsc), + request=onem2m_request, + rsc=rsc, + pc=decode_onem2m_content(response.read(), response.get("content-type")) + ) + + def send_onem2m_request(self, onem2m_request): + with Promise() as p: + http_request = self.map_onem2m_request_to_http_request(onem2m_request) + t = time() + + try: + response = self.request(**http_request) + except (socket_error, gaierror) as exc: + self._handle_network_error(exc, p, http_request, t, ConnectionFailed) + except Exception as exc: + self.logger.exception("Error in HTTP request") + self._handle_network_error(exc, p, http_request, t) + else: + try: + onem2m_response = self.map_http_response_to_onem2m_response(onem2m_request, response) + if isinstance(onem2m_response, OneM2MErrorResponse): + p.reject(onem2m_response) + else: + p.fulfill(onem2m_response) + finally: + response.release() + + return p diff --git a/build/lib/openmtc_onem2m/client/mqtt.py b/build/lib/openmtc_onem2m/client/mqtt.py new file mode 100644 index 0000000..fa875b8 --- /dev/null +++ b/build/lib/openmtc_onem2m/client/mqtt.py @@ -0,0 +1,431 @@ +from aplus import ( + Promise, +) +from collections import deque +from futile.caching import LRUCache +import gevent +from gevent import monkey; monkey.patch_all() +from . import OneM2MClient +from openmtc.exc import ConnectionFailed, OpenMTCNetworkError +from ..exc import ( + ERROR_MIN, + CSEValueError, + CSEError, +) +from ..serializer.util import ( + decode_onem2m_content, + encode_onem2m_content, +) +from ..transport import ( + OneM2MRequest, + OneM2MResponse, + OneM2MErrorResponse, + OneM2MOperation, +) +from ..model import ResourceTypeE +import paho.mqtt.client as mqtt +from simplejson import ( + JSONDecoder, + JSONEncoder, + JSONDecodeError, +) +from socket import error as SocketError +from urlparse import urlparse + +#: Dictionary mapping supported schemes to port numbers +portmap = { + 'mqtt': 1883, + 'mqtts': 8883, + # NB: The correct (i.e. registered with IANA) service-name for SSL/TLS-wrapped MQTT is 'secure-mqtt' in an effort to + # prevent confusion with MQTT-S/N. But as the entire world seems to insist on using 'mqtts' (including TS 0010, + # sec. 6.6) ... We are supporting both names here for maximum compliance and robustness. + 'secure-mqtt': 8883, +} + +MQTT_QOS_LEVEL = 1 + +_clients = LRUCache(threadsafe=False) + + +def get_client(m2m_ep, use_xml=False, client_id=None, handle_request_func=None): + """ + + :param string m2m_ep: + :param boolean use_xml: + :param string client_id: + :param fun handle_request_func: + :return OneM2MMQTTClient: + """ + try: + return _clients[(m2m_ep, use_xml)] + except KeyError: + _clients[(m2m_ep, use_xml)] = OneM2MMQTTClient(m2m_ep, use_xml, client_id, handle_request_func) + return _clients[(m2m_ep, use_xml)] + + +class OneM2MMQTTClient(OneM2MClient): + """ + This class provides for a transport over the MQTT protocol as described in TS 0010 + """ + + __request_fields = frozenset([ + 'op', + 'fr', + 'rqi', + 'ty', + 'pc', + 'rol', + 'ot', + 'rqet', + 'rset', + 'oet', + 'rt', + 'rp', + 'rcn', + 'ec', + 'da', + 'gid', + 'drt', + 'to', + ]) + + __response_fields = frozenset([ + 'rsc', + 'rqi', + 'pc', + 'fr', + 'to', + ]) + + @staticmethod + def _mqtt_mask(id): + return id.lstrip('/').replace('/', ':') + + @staticmethod + def _build_topic(originator='+', receiver='+', type='req'): + """ + Helper function to create topic strings + + :param string originator: + :param string receiver: + :param string type: + :return string: + """ + return '/'.join([ + '/oneM2M', + type, + OneM2MMQTTClient._mqtt_mask(originator), + OneM2MMQTTClient._mqtt_mask(receiver), + ]) + + def attach_callback(self): + """ + Wrapper function to attach callback handlers to the MQTT client. Functions attached in this manner are expected + to have the same name as the handler they seek to implement. + :return fun: + """ + def decorator(func): + def wrapper(_self, *args, **kwargs): + func(_self, *args, **kwargs) + setattr(self._client, func.__name__, func) + return wrapper + return decorator + + def __init__(self, m2m_ep, _, client_id, handle_request_func=None, subscribe_sys_topics=False): + """ + :param str m2m_ep: + :param bool _: + :param str client_id: + :param call handle_request_func: + :param bool subscribe_sys_topics: Whether to subscribe to $SYS topics or not + (cf ) + """ + super(OneM2MMQTTClient, self).__init__() + parsed_url = urlparse(m2m_ep) + self._target_id = parsed_url.fragment + + self._encode = JSONEncoder().encode + self._decode = JSONDecoder().decode + + self._handle_request_func = handle_request_func + + self._processed_request_ids = deque([], maxlen=200) + self._request_promises = LRUCache(threadsafe=False, max_items=200) + + if client_id is None: + import random + import string + client_id = ''.join(random.sample(string.letters, 16)) + + self._client = mqtt.Client( + clean_session=False, + client_id='::'.join([ + 'C' if client_id[0].lower() in ['c', 'm'] else 'A', + self._mqtt_mask(client_id), + ]), + ) + + @self.attach_callback() + def on_connect(client, _, rc): + """ + :param mqtt.Client client: + :param All userdata: + :param integer rc: + :return void: + """ + if not rc == mqtt.CONNACK_ACCEPTED: + raise ConnectionFailed(mqtt.connack_string(rc)) + + def request_callback(client, _, message): + """ + Catch requests and + + :param mqtt.Client client: + :param All _: + :param mqtt.MQTTMessage message: + :return void: + """ + originator = message.topic.split('/')[3] + try: + request = self._decode(message.payload) + except JSONDecodeError as e: + self.logger.warn( + 'Got rubbish request from client %s: %s' + % (originator, e.message, ) + ) + return + + try: + if request['rqi'] in self._processed_request_ids: + self.logger.info('Request %s already processed; discarding duplicate.' % (request['rqi'], )) + return + else: + rqi = request['rqi'] + except KeyError: + self.logger.warn( + 'Special treatment for special request w/o request id from %s.' + % (originator, ) + ) + return + + try: + request['pc'] = decode_onem2m_content(self._encode(request['pc']), 'application/json') + request['ty'] = type(request['pc']) + except KeyError: + # No content, eh? + request['ty'] = None + + self.logger.debug('Decoded JSON request: %s' % (request, )) + + op = OneM2MOperation._member_map_.values()[request['op'] - 1] + to = request['to'] + del request['op'], request['to'] + + try: + response = self._handle_request_func( + OneM2MRequest(op, to, **request) + ).get() + except OneM2MErrorResponse as response: + self.logger.error('OneM2MError: %s' % (response.message, )) + except CSEError as e: + response = OneM2MErrorResponse(status_code=e.response_status_code, rqi=rqi) + + if not response.rqi: + # This really should not happen. No, really, it shouldn't. + self.logger.debug( + 'FIXUP! FIXUP! FIXUP! Adding missing request identifier to response: %s' + % (rqi, ) + ) + response.rqi = rqi + + if response.content: + response.content = self._decode( + encode_onem2m_content(response.content, 'application/json', path=response.to)[1] + ) + + self._publish_message( + self._encode({ + k: getattr(response, k) for k in self.__response_fields if getattr(response, k) is not None + }), + self._build_topic(originator, client_id, type='resp'), + ) + self._processed_request_ids.append(rqi) + + def response_callback(client, _, message): + """ + + :param mqtt.Client client: + :param All _: + :param mqtt.MQTTMessage message: + :return: + """ + try: + response = self._decode(message.payload) + except JSONDecodeError as e: + self.logger.error('Discarding response w/ damaged payload: %s', (e.message, )) + return + + promise_key = (message.topic.split('/')[4], response['rqi']) + try: + p = self._request_promises[promise_key] + except KeyError: + self.logger.debug( + 'Response %s could not be mapped to a request. Discarding.' + % (response['rqi'], ) + ) + return + + try: + response['pc'] = decode_onem2m_content(self._encode(response['pc']), 'application/json') + except KeyError: + pass + except CSEValueError as e: + self.logger.error( + 'Content of response %s could not be parsed, throwing on the trash heap: %s' + % (response['rqi'], e.message) + ) + p.reject(e) + + status_code = response['rsc'] + del response['rsc'] + if status_code >= ERROR_MIN: + p.reject(OneM2MErrorResponse(status_code, **response)) + else: + p.fulfill(OneM2MResponse(status_code, **response)) + + topics = [ + self._build_topic(originator=client_id, receiver='#', type='resp'), + ] + client.message_callback_add(topics[0], response_callback) + + if self._handle_request_func is not None: + topics.append(self._build_topic(receiver=client_id) + '/+') + client.message_callback_add(topics[1], request_callback) + + if subscribe_sys_topics: + topics.append('$SYS/#') + + self.logger.debug('Subscribing to topic(s) %s ...' % (', '.join(topics), )) + client.subscribe([ + (str(topic), MQTT_QOS_LEVEL) for topic in topics + ]) + + @self.attach_callback() + def on_disconnect(client, userdata, rc): + """ + :param mqtt.Client client: + :param All userdata: + :param int rc: + :return void: + """ + if not rc == mqtt.MQTT_ERR_SUCCESS: + self.logger.error( + 'Involuntary connection loss: %s (code %d). Waiting for reconnect ...' + % (mqtt.error_string(rc), rc) + ) + + @self.attach_callback() + def on_message(client, userdata, message): + """ + :param mqtt.Client client: + :param All userdata: + :param mqtt.MQTTMessage message: + :return void: + """ + self.logger.debug('message received on topic %s' % (message.topic, )) + + @self.attach_callback() + def on_log(client, userdata, level, buf): + """ + :param mqtt.Client client: + :param All userdata: + :param integer level: + :param string buf: + :return void: + """ + self.logger.debug('pahomqtt-%d: %s' % (level, buf)) + + if parsed_url.username: + self._client.username_pw_set(parsed_url.username, parsed_url.password) + + try: + self._client.connect( + parsed_url.hostname, + parsed_url.port or portmap[parsed_url.scheme] + ) + except SocketError as e: + raise ConnectionFailed(e.message) + + def loop(): + try: + while self._client.loop(timeout=0.1) != mqtt.mqtt_cs_disconnecting: + gevent.sleep() + except (KeyboardInterrupt, SystemExit): + pass + + gevent.spawn(loop) + + def _publish_message(self, payload, topic): + (rc, mid) = self._client.publish(topic, payload, MQTT_QOS_LEVEL) + if not rc == mqtt.MQTT_ERR_SUCCESS: + self.logger.info('Code %d while sending message %d: %s' % (rc, mid, mqtt.error_string(rc))) + + def send_onem2m_request(self, request): + """ + :param openmtc_onem2m.transport.OneM2MRequest request: + :return Promise: + """ + p = Promise() + + try: + client_id = request.originator.split('/')[-1] + except (KeyError, AttributeError): + # TODO: make this configurable + client_id = 'ae0' + + request.op = 1 + OneM2MOperation._member_map_.keys().index(OneM2MOperation[request.op].name) + if request.pc: + request.pc = self._decode( + encode_onem2m_content(request.pc, 'application/json', path=request.to)[1] + ) + try: + if request.to.startswith('//'): # abs CSE + request.to = '/_' + request.to[1:] + elif request.to.startswith('/'): # sp rel CSE + request.to = '/~' + request.to + except AttributeError: + self.logger.error('Could not resolve target id; defaulting to preset') + request.to = '/' + self._target_id + + if request.ty: + request.ty = ResourceTypeE[request.resource_type.typename].value + + self.logger.debug('Preparing request for transit: %s' % (request, )) + + promises_key = (self._target_id, request.rqi) + + def cleanup(_): + self.logger.debug('Clearing request id %s ...' % (promises_key, )) + del self._request_promises[promises_key] + + p.addCallback(cleanup) + p.addErrback(cleanup) + + self._request_promises[promises_key] = p + + self._publish_message( + self._encode({ + str(k): getattr(request, k) for k in self.__request_fields if getattr(request, k) is not None + }), + self._build_topic(client_id, self._target_id) + '/json', + ) + + return p + + def stop(self): + self._client.disconnect() + # TODO(sho): this is abominable. But for the time being, there seems to be no elegant solution to this. + self._client._clean_session = True + # TS 0010, sec. 6.3 mandates a reconnect in order to leave a clean state with the MQTT broker + self._client.reconnect() + self._client.disconnect() diff --git a/build/lib/openmtc_onem2m/exc.py b/build/lib/openmtc_onem2m/exc.py new file mode 100644 index 0000000..913a8fc --- /dev/null +++ b/build/lib/openmtc_onem2m/exc.py @@ -0,0 +1,183 @@ +""" +Created on 26.05.2013 + +@author: kca +""" +from openmtc.exc import OpenMTCError +from collections import namedtuple + + +STATUS = namedtuple("STATUS", "numeric_code description http_status_code") + +STATUS_ACCEPTED = STATUS( + 1000, "ACCEPTED", 202) +STATUS_OK = STATUS( + 2000, "OK", 200) +STATUS_CREATED = STATUS( + 2001, "CREATED", 201) +STATUS_BAD_REQUEST = STATUS( + 4000, "BAD_REQUEST", 400) +STATUS_NOT_FOUND = STATUS( + 4004, "NOT_FOUND", 404) +STATUS_OPERATION_NOT_ALLOWED = STATUS( + 4005, "OPERATION_NOT_ALLOWED", 405) +STATUS_REQUEST_TIMEOUT = STATUS( + 4008, "REQUEST_TIMEOUT", 408) +STATUS_SUBSCRIPTION_CREATOR_HAS_NO_PRIVILEGE = STATUS( + 4101, ",_SUBSCRIPTION_CREATOR_HAS_NO_PRIVILEGE", 403) +STATUS_CONTENTS_UNACCEPTABLE = STATUS( + 4102, "CONTENTS_UNACCEPTABLE", 400) +STATUS_ORIGINATOR_HAS_NO_PRIVILEGE = STATUS( + 4103, "ORIGINATOR_HAS_NO_PRIVILEGE", 403) +STATUS_GROUP_REQUEST_IDENTIFIER_EXISTS = STATUS( + 4104, "GROUP_REQUEST_IDENTIFIER_EXISTS", 409) +STATUS_CONFLICT = STATUS( + 4105, "CONFLICT", 409) +STATUS_INTERNAL_SERVER_ERROR = STATUS( + 5000, "INTERNAL_SERVER_ERROR", 500) +STATUS_NOT_IMPLEMENTED = STATUS( + 5001, "NOT_IMPLEMENTED", 501) +STATUS_TARGET_NOT_REACHABLE = STATUS( + 5103, "TARGET_NOT_REACHABLE", 404) +STATUS_NO_PRIVILEGE = STATUS( + 5105, "NO_PRIVILEGE", 403) +STATUS_ALREADY_EXISTS = STATUS( + 5106, "ALREADY_EXISTS", 403) +STATUS_TARGET_NOT_SUBSCRIBABLE = STATUS( + 5203, "TARGET_NOT_SUBSCRIBABLE", 403) +STATUS_SUBSCRIPTION_VERIFICATION_INITIATION_FAILED = STATUS( + 5204, "SUBSCRIPTION_VERIFICATION_INITIATION_FAILED", 500) +STATUS_SUBSCRIPTION_HOST_HAS_NO_PRIVILEGE = STATUS( + 5205, "SUBSCRIPTION_HOST_HAS_NO_PRIVILEGE", 403) +STATUS_NON_BLOCKING_REQUEST_NOT_SUPPORTED = STATUS( + 5206, "NON_BLOCKING_REQUEST_NOT_SUPPORTED", 501) +STATUS_EXTERNAL_OBJECT_NOT_REACHABLE = STATUS( + 6003, "EXTERNAL_OBJECT_NOT_REACHABLE", 404) +STATUS_EXTERNAL_OBJECT_NOT_FOUND = STATUS( + 6005, "EXTERNAL_OBJECT_NOT_FOUND", 404) +STATUS_MAX_NUMBER_OF_MEMBER_EXCEEDED = STATUS( + 6010, "MAX_NUMBER_OF_MEMBER_EXCEEDED", 400) +STATUS_MEMBER_TYPE_INCONSISTENT = STATUS( + 6011, "MEMBER_TYPE_INCONSISTENT", 400) +STATUS_MANAGEMENT_SESSION_CANNOT_BE_ESTABLISHED = STATUS( + 6020, "MANAGEMENT_SESSION_CANNOT_BE_ESTABLISHED", 500) +STATUS_MANAGEMENT_SESSION_ESTABLISHMENT_TIMEOUT = STATUS( + 6021, "MANAGEMENT_SESSION_ESTABLISHMENT_TIMEOUT", 500) +STATUS_INVALID_CMDTYPE = STATUS( + 6022, "INVALID_CMDTYPE", 400) +STATUS_INVALID_ARGUMENTS = STATUS( + 6023, "INVALID_ARGUMENTS", 400) +STATUS_INSUFFICIENT_ARGUMENT = STATUS( + 6024, "INSUFFICIENT_ARGUMENT", 400) +STATUS_MGMT_CONVERSION_ERROR = STATUS( + 6025, "MGMT_CONVERSION_ERROR", 500) +STATUS_CANCELLATION_FAILED = STATUS( + 6026, "CANCELLATION_FAILED", 500) +STATUS_ALREADY_COMPLETE = STATUS( + 6028, "ALREADY_COMPLETE", 400) +STATUS_COMMAND_NOT_CANCELLABLE = STATUS( + 6029, "COMMAND_NOT_CANCELLABLE", 400) +STATUS_IMPERSONATION_ERROR = STATUS( + 6101, "IMPERSONATION_ERROR", 400) + + +_status_map = {v.numeric_code: v for v in globals().values() + if isinstance(v, STATUS)} + +ERROR_MIN = STATUS_BAD_REQUEST.numeric_code + + +class OneM2MError(OpenMTCError): + pass + + +class CSEError(OneM2MError): + response_status_code = STATUS_INTERNAL_SERVER_ERROR + + @property + def status_code(self): + return self.response_status_code.http_status_code + + @property + def rsc(self): + return self.response_status_code.numeric_code + + +class CSENotFound(CSEError): + response_status_code = STATUS_NOT_FOUND + + +class CSEOperationNotAllowed(CSEError): + response_status_code = STATUS_OPERATION_NOT_ALLOWED + + +class CSENotImplemented(CSEError): + response_status_code = STATUS_NOT_IMPLEMENTED + + +class CSETargetNotReachable(CSEError): + response_status_code = STATUS_TARGET_NOT_REACHABLE + + +class CSEConflict(CSEError): + response_status_code = STATUS_CONFLICT + + +class CSEBadRequest(CSEError): + response_status_code = STATUS_BAD_REQUEST + + +class CSESyntaxError(CSEBadRequest): + response_status_code = STATUS_BAD_REQUEST + + +class CSEPermissionDenied(CSEError): + response_status_code = STATUS_ORIGINATOR_HAS_NO_PRIVILEGE + + +class CSEImpersonationError(CSEBadRequest): + response_status_code = STATUS_IMPERSONATION_ERROR + + +class CSEValueError(CSESyntaxError, ValueError): + pass + + +class CSETypeError(CSESyntaxError, TypeError): + pass + + +class CSEMissingValue(CSESyntaxError): + pass + + +class CSEContentsUnacceptable(CSEError): + response_status_code = STATUS_CONTENTS_UNACCEPTABLE + + +_error_map = { + STATUS_INTERNAL_SERVER_ERROR.numeric_code: CSEError +} + + +def get_error_class(rsc): + return _error_map.get(int(rsc), CSEError) + + +def get_response_status(rsc): + return _status_map.get(int(rsc), STATUS_INTERNAL_SERVER_ERROR) + + +def all_subclasses(cls): + return cls.__subclasses__() + [g for s in cls.__subclasses__() + for g in all_subclasses(s)] + + +for c in all_subclasses(CSEError): + try: + code = vars(c)["response_status_code"].numeric_code + except KeyError: + continue + _error_map[code] = c + +del c, code diff --git a/build/lib/openmtc_onem2m/mapper/__init__.py b/build/lib/openmtc_onem2m/mapper/__init__.py new file mode 100644 index 0000000..afb628f --- /dev/null +++ b/build/lib/openmtc_onem2m/mapper/__init__.py @@ -0,0 +1,118 @@ +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse + +from openmtc.mapper import BasicMapper, MapperError +from openmtc_onem2m import OneM2MRequest +from openmtc_onem2m.transport import OneM2MOperation + + +def _is_persistent(instance): + return bool(instance.path) + + +class OneM2MMapper(BasicMapper): + def __init__(self, cse, originator=None, ca_certs=None, cert_file=None, key_file=None, *args, **kw): + super(OneM2MMapper, self).__init__(*args, **kw) + + scheme = urlparse(cse).scheme.lower() + if scheme in ("", "https", "http"): + from openmtc_onem2m.client.http import get_client + self._send_request = get_client(cse, use_xml=False, ca_certs=ca_certs, cert_file=cert_file, key_file=key_file).send_onem2m_request + elif scheme in ("mqtt", "mqtts", "secure-mqtt"): + from openmtc_onem2m.client.mqtt import get_client + self._send_request = get_client(cse, use_xml=False, client_id=originator).send_onem2m_request + elif scheme == "coap": + raise NotImplementedError + else: + raise ValueError( + "Unsupported URL scheme: %s" % (scheme,) + ) + self.originator = originator + + def create(self, path, instance): + instance.__dict__.update({ + attribute.name: None for attribute in type(instance).attributes if attribute.accesstype == attribute.RO + }) + + # TODO(rst): add resource_type + response = self._send_request(OneM2MRequest( + OneM2MOperation.create, + path, + self.originator, + ty=type(instance), + pc=instance + )).get() + + try: + instance.__dict__.update(response.content.values) + instance.path = path + '/' + response.content.resourceName + except (AttributeError, ): + instance.path = path + + self.logger.debug("Set instance path: %s" % (instance.path, )) + instance._synced = False + return instance + + def update(self, instance, fields=None): + if not _is_persistent(instance): + raise MapperError("Instance is not yet stored") + return self._do_update(instance, fields) + + def _do_update(self, instance, fields=None): + attributes = type(instance).attributes + fields_to_be_cleared = [a.name for a in attributes if a.accesstype in (a.WO, a.RO)] + if fields: + fields_to_be_cleared.extend([a.name for a in attributes if a.name not in fields]) + instance.childResource = [] + + # remove NP attributes + instance.__dict__.update({ + a: None for a in fields_to_be_cleared + }) + + response = self._send_request(OneM2MRequest( + OneM2MOperation.update, + instance.path, + self.originator, + pc=instance + )).get() + + try: + response.content.path = instance.path + except AttributeError: + pass + + return response.content + + def get(self, path): + response = self._get_data(path) + response.content.path = path + self.logger.debug("Received response: %s", response.content) + return response.content + + def delete(self, instance): + self._send_request(OneM2MRequest( + OneM2MOperation.delete, + getattr(instance, "path", instance), + self.originator + )) + + def _get_data(self, path): + return self._send_request(OneM2MRequest( + OneM2MOperation.retrieve, + path, + self.originator + )).get() + + # TODO(rst): check if this can be removed in parent class + @classmethod + def _patch_model(cls): + pass + + def _fill_resource(self, res, data): + pass + + def _map(self, path, typename, data): + pass diff --git a/build/lib/openmtc_onem2m/model.py b/build/lib/openmtc_onem2m/model.py new file mode 100644 index 0000000..34931cd --- /dev/null +++ b/build/lib/openmtc_onem2m/model.py @@ -0,0 +1,1657 @@ +from enum import IntEnum, unique + +from openmtc.model import (Resource as Res, UnicodeAttribute, DatetimeAttribute, + Attribute, ListAttribute, Entity, EntityAttribute, + AnyURI, StringListAttribute, ContentResource) +from openmtc.model.exc import ModelTypeError +from futile import issubclass + +LATEST_VERSION = "1.6" + + +class OneM2MIntEnum(IntEnum): + def __str__(self): + return str(self.value) + + +class OneM2MEntity(Entity): + pass + + +class OneM2MContentResource(ContentResource, OneM2MEntity): + pass + + +class OneM2MResource(Res, OneM2MEntity): + __model_name__ = "onem2m" + __model_version__ = "1.6" + + +################################################################################ +# enumerationTypes +################################################################################ + +class ResourceTypeE(OneM2MIntEnum): + accessControlPolicy = 1 + AE = 2 + container = 3 + contentInstance = 4 + CSEBase = 5 + delivery = 6 + eventConfig = 7 + execInstance = 8 + group = 9 + localPolicy = 10 + m2mServiceSubscriptionProfile = 11 + mgmtCmd = 12 + mgmtObj = 13 + node = 14 + pollingChannel = 15 + remoteCSE = 16 + request = 17 + schedule = 18 + serviceSubscribedAppRule = 19 + serviceSubscribedNode = 20 + statsCollect = 21 + statsConfig = 22 + subscription = 23 + accessControlPolicyAnnc = 10001 + AEAnnc = 10002 + containerAnnc = 10003 + contentInstanceAnnc = 10004 + groupAnnc = 10009 + locationPolicyAnnc = 10010 + mgmtObjAnnc = 10013 + nodeAnnc = 10014 + remoteCSEAnnc = 10016 + scheduleAnnc = 10018 + + +@unique +class CSETypeIDE(OneM2MIntEnum): + IN_CSE = 1 + MN_CSE = 2 + AEN_CSE = 3 + + +@unique +class LocationSourceE(OneM2MIntEnum): + Network_based = 1 + Device_based = 2 + Sharing_based = 3 + + +@unique +class StdEventCatsE(OneM2MIntEnum): + mmediate = 2 + BestEffort = 3 + Latest = 4 + + +@unique +class OperationE(OneM2MIntEnum): + Create = 1 + Retrieve = 2 + Update = 3 + Delete = 4 + Notify = 5 + + +@unique +class ResponseType(OneM2MIntEnum): + nonBlockingRequestSynch = 1 + nonBlockingRequestAsynch = 2 + blockingRequest = 3 + + +# @unique +# class ResultConentE(OneM2MIntEnum): +# nothing = 0 +# attributes = 1 +# hierarchical_address = 2 +# hierarchical_address_and_attributes = 3 +# attributes_and_child_resources = 4 +# attributes_and_child_resource_references = 6 +# child_resource_references = 6 +# original_resource = 7 + + +@unique +class DiscResTypeE(OneM2MIntEnum): + structured = 1 + unstructured = 2 + + +# TODO: responseStatusCode + + +@unique +class RequestStatusE(OneM2MIntEnum): + COMPLETED = 1 + FAILED = 2 + PENDING = 3 + FORWARDED = 4 + + +@unique +class MemberTypeE(OneM2MIntEnum): + accessControlPolicy = 1 + AE = 2 + container = 3 + contentInstance = 4 + CSEBase = 5 + delivery = 6 + eventConfig = 7 + execInstance = 8 + group = 9 + locationPolicy = 10 + m2mServiceSubscription = 11 + mgmtCmd = 12 + mgmtObj = 13 + node = 14 + pollingChannel = 15 + remoteCSE = 16 + request = 17 + schedule = 18 + serviceSubscribedAppRule = 19 + serviceSubscribedNode = 20 + statsCollect = 21 + statsConfig = 22 + subscription = 23 + token = 32 + dynamicAuthorizationConsultation = 34 + accessControlPolicyAnnc = 10001 + AEAnnc = 10002 + containerAnnc = 10003 + contentInstanceAnnc = 10004 + groupAnnc = 10009 + locationPolicyAnnc = 10010 + mgmtObjAnnc = 10013 + nodeAnnc = 10014 + remoteCSEAnnc = 10016 + scheduleAnnc = 10019 + dynamicAuthorizationConsultationAnnc = 10034 + mixed = 24 + # Mixed is a mixture of the resource types from 1 to 23, 10001 to 10004, 10009 to 10010, + # 10013 to 10014 and 10016 to 10018 as listed above. + + +@unique +class ConsistencyStrategyE(OneM2MIntEnum): + ABANDON_MEMBER = 1 + ABANDON_GROUP = 2 + SET_MIXED = 3 + + +@unique +class CmdTypeE(OneM2MIntEnum): + RESET = 1 + REBOOT = 2 + UPLOAD = 3 + DOWNLOAD = 4 + SOFTWAREINSTALL = 5 + SOFTWAREUNINSTALL = 6 + SOFTWAREUPDATE = 7 + + +@unique +class ExecModeTypeE(OneM2MIntEnum): + MMEDIATEONCE = 1 + IMMEDIATEREPEAT = 2 + RANDOMONCE = 3 + RANDOMREPEAT = 4 + + +@unique +class ExecStatusTypeE(OneM2MIntEnum): + INITIATED = 1 + PENDING = 2 + FINISHED = 3 + CANCELLING = 4 + CANCELLED = 5 + STATUS_NON_CANCELLABLE = 6 + + +@unique +class ExecResultTypeE(OneM2MIntEnum): + STATUS_REQUEST_UNSUPPORTED = 1 + STATUS_REQUEST_DENIED = 2 + STATUS_CANCELLATION_DENIED = 3 + STATUS_INTERNAL_ERROR = 4 + STATUS_INVALID_ARGUMENTS = 5 + STATUS_RESOURCES_EXCEEDED = 6 + STATUS_FILE_TRANSFER_FAILED = 7 + STATUS_FILE_TRANSFER_SERVER_AUTHENTICATION_FAILURE = 8 + STATUS_UNSUPPORTED_PROTOCOL = 9 + STATUS_UPLOAD_FAILED = 10 + STATUS_FILE_TRANSFER_FAILED_MULTICAST_GROUP_UNABLE_JOIN = 11 + STATUS_FILE_TRANSFER_FAILED_SERVER_CONTACT_FAILED = 12 + STATUS_FILE_TRANSFER_FAILED_FILE_ACCESS_FAILED = 13 + STATUS_FILE_TRANSFER_FAILED_DOWNLOAD_INCOMPLETE = 14 + STATUS_FILE_TRANSFER_FAILED_FILE_CORRUPTED = 15 + STATUS_FILE_TRANSFER_FILE_AUTHENTICATION_FAILURE = 16 + STATUS_FILE_TRANSFER_WINDOW_EXCEEDED = 19 + STATUS_INVALID_UUID_FORMAT = 20 + STATUS_UNKNOWN_EXECUTION_ENVIRONMENT = 21 + STATUS_DISABLED_EXECUTION_ENVIRONMENT = 22 + STATUS_EXECUTION_ENVIRONMENT_MISMATCH = 23 + STATUS_DUPLICATE_DEPLOYMENT_UNIT = 24 + STATUS_SYSTEM_RESOURCES_EXCEEDED = 25 + STATUS_UNKNOWN_DEPLOYMENT_UNIT = 26 + STATUS_INVALID_DEPLOYMENT_UNIT_STATE = 27 + STATUS_INVALID_DEPLOYMENT_UNIT_UPDATE_DOWNGRADE_DISALLOWED = 28 + STATUS_INVALID_DEPLOYMENT_UNIT_UPDATE_UPGRADE_DISALLOWED = 29 + STATUS_INVALID_DEPLOYMENT_UNIT_UPDATE_VERSION_EXISTS = 30 + + +@unique +class PendingNotificationE(OneM2MIntEnum): + sendLatest = 1 + sendAllPending = 2 + + +@unique +class NotificationContentTypeE(OneM2MIntEnum): + allAttributes = 1 + modifiedAttributes = 2 + resourceID = 3 + + +@unique +class NotificationEventTypeE(OneM2MIntEnum): + updateOfResource = 1 + deleteOfResource = 2 + createOfDirectChildResource = 3 + deleteOfDirectChildResource = 4 + + +@unique +class StatusE(OneM2MIntEnum): + Successful = 1 + Failure = 2 + In_Process = 3 + + +@unique +class BatteryStatusE(OneM2MIntEnum): + NORMAL = 1 + CHARGING = 2 + CHARGING_COMPLETE = 3 + DAMAGED = 4 + LOW_BATTERY = 5 + NOT_INSTALLED = 6 + UNKNOWN = 7 + + +@unique +class ManagementDefinitionE(OneM2MIntEnum): + firmware = 1001 + software = 1002 + memory = 1003 + areaNwkInfo = 1004 + areaNwkDeviceInfo = 1005 + battery = 1006 + deviceInfo = 1007 + deviceCapability = 1008 + reboot = 1009 + eventLog = 1010 + cmdhPolicy = 1011 + activeCmdhPolicy = 1012 + cmdhDefaults = 1013 + cmdhDefEcValue = 1014 + cmdhEcDefParamValues = 1015 + cmdhLimits = 1016 + cmdhNetworkAccessRules = 1017 + cmdhNwAccessRule = 1018 + cmdhBuffer = 1019 + Unspecified = 0 + + +@unique +class LogTypeIdE(OneM2MIntEnum): + System = 1 + Security = 2 + Event = 3 + Trace = 4 + Panic = 5 + + +@unique +class LogStatusE(OneM2MIntEnum): + Started = 1 + Stopped = 2 + Unknown = 3 + NotPresent = 4 + Error = 5 + + +@unique +class EventTypeE(OneM2MIntEnum): + DATAOPERATION = 1 + STORAGEBASED = 2 + TIMERBASED = 3 + + +@unique +class StatsRuleStatusTypeE(OneM2MIntEnum): + ACTIVE = 1 + INACTIVE = 2 + + +@unique +class StatModelTypeE(OneM2MIntEnum): + EVENTBASED = 1 + + +@unique +class EncodingTypeE(OneM2MIntEnum): + plain = 0 + base64String = 1 + base64Binary = 2 + + +# TODO(rkr): values are wrong? +# => see TS-0004, p.47, m2m:accessControlOperations, +# => more values in xsd enumerationTypes +@unique +class AccessControlOperationE(OneM2MIntEnum): + create = 1 + retrieve = 2 + update = 4 + delete = 8 + notify = 16 + discover = 32 + +# TODO: SRole-ID + + +@unique +class FilterUsageE(OneM2MIntEnum): + Discovery = 1 + ConditionalRetrieval = 2 + + +@unique +class CountryCodeE(OneM2MIntEnum): + india = 91 + usa = 01 + + +@unique +class SecurityInfoTypeE(OneM2MIntEnum): + # TS-0004, p.49, Table 6.3.4.2.35-1 + DynamicAuthorizationRequest = 1 + DynamicAuthorizationResponse = 2 + ReceiverE2ERandObjectRequest = 3 + ReceiverE2ERandObjectResponse = 4 + ESPrimObject = 5 + ESCertKEMessage = 6 + + +################################################################################ +# commonTypes +################################################################################ + +# simple ####################################################################### + + +class IDS(UnicodeAttribute): + pass + +# TODO: nodeID + +# TODO: deviceID + +# TODO: externalID + + +class RequestIDS(UnicodeAttribute): + pass + + +class NhURIS(UnicodeAttribute): + pass + +# TODO: acpType + + +class LabelsS(StringListAttribute): + pass + +# TODO: triggerRecipientID + +# TODO: listOfM2MID + +# TODO: longMin-1 + +# TODO: listOfMinMax + +# TODO: backOffParameters + +# TODO: poaList + + +class TimestampS(DatetimeAttribute): + pass + +# TODO: absRelTimestamp + +# TODO: typeOfContent + +# TODO: permittedMediaTypes + +# TODO: serializations + +# TODO: contentInfo + +# TODO: eventCat + +# TODO: eventCatWithDef + +# TODO: listOfEventCat + +# TODO: listOfEventCatWithDef + +# TODO: scheduleEntry + + +class ListOfURIsS(StringListAttribute): + content_type = AnyURI + + +class AttributeListS(StringListAttribute): + pass + +# complex ###################################################################### + +# TODO: deliveryMetaData + +# TODO: aggregatedRequest + +# TODO: metaInformation + +# TODO: primitiveContent + + +class FilterCriteria(OneM2MEntity): + createdBefore = TimestampS() + createdAfter = TimestampS() + modifiedSince = TimestampS() + unmodifiedSince = TimestampS() + stateTagSmaller = Attribute(int) # xs:positiveInteger + stateTagBigger = Attribute(int) # xs:nonNegativeInteger + expireBefore = TimestampS() + expireAfter = TimestampS() + labels = StringListAttribute() + resourceType = ListAttribute(ResourceTypeE) + sizeAbove = Attribute(int) # xs:nonNegativeInteger + sizeBelow = Attribute(int) # xs:positiveInteger + contentType = UnicodeAttribute() # m2m:typeOfContent + attribute = ListAttribute() # m2m:attribute + filterUsage = EntityAttribute(FilterUsageE) + limit = Attribute(int) # xs:nonNegativeInteger + +# TODO: attribute + +# TODO: scheduleEntries + +# TODO: actionStatus + +# TODO: anyArgType + +# TODO: resetArgsType + +# TODO: rebootArgsType + +# TODO: uploadArgsType + +# TODO: downloadArgsType + +# TODO: softwareInstallArgsType + +# TODO: softwareUpdateArgsType + +# TODO: softwareUninstallArgsType + +# TODO: execReqArgsListType + +# TODO: mgmtLinkRef + +# TODO: childResourceRef + +# TODO: responseTypeInfo + +# TODO: operationResult + + +# TODO(rkr): +# "This is an xs:choice. A locationRegion shall contain either: +# 1) A countryCode element, in which case circRegion shall not appear, or +# 2) A circRegion element, in which case countryCode shall not appear" +class LocationRegionC(OneM2MEntity): + countryCode = ListAttribute(CountryCodeE) + circRegion = ListAttribute(float) # "list of 3 xs:float": values "represent" latitude. longitude, radius + + +class LabeledResource(OneM2MResource): + labels = LabelsS() + + +class ExpiringResource(OneM2MResource): + expirationTime = TimestampS(mandatory=False) + + +class AccessControlPolicyIDHolder(OneM2MResource): + accessControlPolicyIDs = StringListAttribute() + + +class DynamicAuthorizationConsultationIDHolder(OneM2MResource): + dynamicAuthorizationConsultationIDs = ListOfURIsS() + + +class SubscribableResource(OneM2MResource): + pass + + +class AnnounceableResource(OneM2MResource): + announceTo = ListOfURIsS() + announcedAttribute = UnicodeAttribute() # TODO + + +class AnnouncedResource(OneM2MResource): + link = Attribute(AnyURI) + + +class ResourceC(LabeledResource): + __child_types__ = () + + typename = None + + resourceName = UnicodeAttribute(accesstype=Attribute.WO) + + resourceType = EntityAttribute(ResourceTypeE, accesstype=Attribute.RO) + resourceID = IDS(accesstype=Attribute.RO) + parentID = NhURIS(accesstype=Attribute.RO) + + lastModifiedTime = TimestampS(accesstype=Attribute.RO) + creationTime = TimestampS(accesstype=Attribute.RO) + + childResource = ListAttribute() + + @property + def name(self): + return self.resourceName + + @property + def id(self): + return self.resourceID + + def __repr__(self): + return "%s(path='%s', id='%s')" % (type(self).__name__, self.path, + self.id) + + +ResourceC.childResource.content_type = ResourceC + + +class RegularResourceC(ResourceC, ExpiringResource, AccessControlPolicyIDHolder, + DynamicAuthorizationConsultationIDHolder): + pass + + +class AnnounceableResourceC(RegularResourceC, AnnounceableResource): + pass + + +class AnnouncedResourceC(RegularResourceC, AnnouncedResource): + pass + + +class AnnounceableSubordinateResourceC(ResourceC, ExpiringResource, + AnnounceableResource): + pass + + +class AnnouncedSubordinateResourceC(ResourceC, ExpiringResource, + AnnouncedResource): + pass + +# TODO: mgmtResource + +# TODO: announcedMgmtResource + + +################################################################################ +# requestPrimitive +################################################################################ + +class RequestPrimitive(OneM2MEntity): + operation = EntityAttribute(OperationE) + to = Attribute(AnyURI) + from_ = IDS() + requestIdentifier = RequestIDS() + resourceType = EntityAttribute(ResourceTypeE) + name = UnicodeAttribute() + primitiveContent = UnicodeAttribute() # m2m:primitiveContent + role = UnicodeAttribute() # xs:anyType + originatingTimestamp = TimestampS() + requestExpirationTimestamp = TimestampS() # m2m::absRelTimestamp + resultExpirationTimestamp = TimestampS() # m2m::absRelTimestamp + operationExecutionTime = TimestampS() # m2m::absRelTimestamp + responseType = UnicodeAttribute() # m2m:responseTypeInfo + resultPersistence = TimestampS() # m2m::absRelTimestamp + resultContent = UnicodeAttribute() # m2m:resultContent + eventCategory = UnicodeAttribute() # m2m:eventCat + deliveryAggregation = Attribute(bool) + groupRequestIdentifier = UnicodeAttribute() + filterCriteria = EntityAttribute(FilterCriteria) + discoveryResultType = EntityAttribute(DiscResTypeE) + + +class AttributeList(OneM2MContentResource): + typename = "attributeList" + CONTENT = AttributeListS() + + +################################################################################ +# responsePrimitive +################################################################################ + +class ResponsePrimitive(OneM2MEntity): + responseStatusCode = UnicodeAttribute() # m2m:responseStatusCode + requestIdentifier = RequestIDS() + primitiveContent = UnicodeAttribute() # m2m:primitiveContent + to = IDS() + from_ = IDS() + originatingTimestamp = TimestampS() + resultExpirationTimestamp = TimestampS() # m2m:absRelTimestamp + eventCategory = UnicodeAttribute() # m2m:eventCat + + +class Resource(OneM2MContentResource): + pass + + +class URIList(OneM2MContentResource): + typename = "URIList" + CONTENT = ListOfURIsS() + + +class AggregatedResponse(OneM2MEntity): + responsePrimitive = ListAttribute(ResponsePrimitive) + + +################################################################################ +# notification +################################################################################ + +class OperationMonitorTypeC(OneM2MEntity): + operation = UnicodeAttribute() # m2m:operation + originator = UnicodeAttribute() # m2m:ID + + +class NotificationEventC(OneM2MEntity): + representation = EntityAttribute(ResourceC) # xs:anyType + operationMonitor = EntityAttribute(OperationMonitorTypeC) + notificationEventType = EntityAttribute(NotificationEventTypeE) + + +class Notification(OneM2MEntity): + notificationEvent = EntityAttribute(NotificationEventC) + verificationRequest = Attribute(bool) + subscriptionDeletion = Attribute(bool) + subscriptionReference = Attribute(AnyURI) + creator = UnicodeAttribute() # ID + notificationForwardingURI = Attribute(AnyURI) + + +class AggregatedNotification(OneM2MEntity): + """See TS-0004 Table 7.4.1.1-2""" + + notification = ListAttribute(Notification) + + +################################################################################ +# subscription +################################################################################ + +class EventNotificationCriteria(OneM2MEntity): + """See TS-0004 Table 6.3.2.3-1""" + + createdBefore = TimestampS() + createdAfter = TimestampS() + modifiedSince = TimestampS() + unmodifiedSince = TimestampS() + stateTagSmaller = Attribute(int) + stateTagBigger = Attribute(int) + expireBefore = TimestampS() + expireAfter = TimestampS() + sizeAbove = Attribute(int) + sizeBelow = Attribute(int) + operationMonitor = UnicodeAttribute() # ListAttribute(m2m:operation) + # attribute = Attribute(int) # enum but to be defined in the standard + attribute = UnicodeAttribute() # ListAttribute(m2m:attribute) + notificationEventType = ListAttribute(NotificationEventTypeE) + + +class BatchNotify(OneM2MEntity): + pass # TODO + + +class RateLimit(OneM2MEntity): + pass # TODO + + +class Subscription(RegularResourceC): + """ See TS-0001 section 9.6.8 + See TS-0004 Table 7.3.7.1-3""" + + eventNotificationCriteria = EntityAttribute(EventNotificationCriteria) + expirationCounter = Attribute(int) + notificationURI = ListOfURIsS(mandatory=True) + groupID = Attribute(AnyURI) + notificationForwardingURI = Attribute(AnyURI) + batchNotify = EntityAttribute(BatchNotify) + rateLimit = EntityAttribute(RateLimit) + preSubscriptionNotify = Attribute(int, accesstype=Attribute.WO, + mandatory=False) + pendingNotification = Attribute(PendingNotificationE) + notificationStoragePriority = Attribute(int) + latestNotify = Attribute(bool) + notificationContentType = Attribute(NotificationContentTypeE) + notificationEventCat = UnicodeAttribute() # m2m:eventCat + creator = IDS(accesstype=Attribute.WO, mandatory=False) + subscriberURI = Attribute(AnyURI, accesstype=Attribute.WO, mandatory=False) + + __child_types__ = ( + # Schedule, + ) + + +################################################################################ +# accessControlPolicy +################################################################################ + +class AccessControlObjectDetailsC(OneM2MEntity): + # specifies to which resource type the rule applies + resourceType = EntityAttribute(ResourceTypeE) + # TODO(rkr): Child resource types listed in the childResourceType component are subject of + # TODO access control for the Create operation only. Once a child resource is created, + # TODO the Access Control Policies assigned directly to it apply. + # for create operation only, list of creatable child resources + childResourceType = ListAttribute(ResourceTypeE) + specializationID = Attribute(AnyURI) # xs:anyURI + + +class AccessControlIpAddressesC(OneM2MEntity): + ipv4Addresses = ListAttribute(AnyURI) # m2m:ipv4 + ipv6Addresses = ListAttribute(AnyURI) # m2m:ipv6 + + +class AccessControlContextsC(OneM2MEntity): + accessControlWindow = StringListAttribute() # m2m:scheduleEntry + accessControlIpAddresses = EntityAttribute(AccessControlIpAddressesC) + accessControlLocationRegion = ListAttribute(LocationRegionC) # m2m:locationRegion + + +class AccessControlRuleC(OneM2MEntity): + accessControlOriginators = ListOfURIsS() # m2m:listOfURIs # Mand + accessControlOperations = ListAttribute(AccessControlOperationE) # Mand + accessControlContexts = ListAttribute(AccessControlContextsC) # Opt + # accessControlContexts = EntityAttribute(AccessControlContextsC) # Opt + # TODO(rkr): currently default of the Flag is set to False; + # TODO if not explicitly set to True the authorization is performed without authentication + # TODO when authentication is used, it should maybe set to be True by default + accessControlAuthenticationFlag = Attribute(bool, default=False) # Opt + # TODO(rkr): "ObjectDetails" only described in TS-0001, 9.6.2.4, p.121 (which is version 2.10.0) as optional + # TODO parameter of an access control rule, but not in TS-0004 and not in xsd version 2.7.0. + # accessControlObjectDetails = ListAttribute(AccessControlObjectDetailsC) + + +class AccessControlPolicy(AnnounceableSubordinateResourceC, + SubscribableResource): + privileges = ListAttribute(AccessControlRuleC) + selfPrivileges = ListAttribute(AccessControlRuleC) + + __child_types__ = ( + Subscription, + ) + + +class AccessControlPolicyAnnc(AnnouncedSubordinateResourceC, + SubscribableResource): + privileges = ListAttribute(AccessControlRuleC) + selfPrivileges = ListAttribute(AccessControlRuleC) + + __child_types__ = ( + Subscription, + ) + + +################################################################################ +# dynamicAuthorization +################################################################################ + +class DynamicAuthorizationConsultation(AnnounceableResourceC): + dynamicAuthorizationEnabled = Attribute(bool, mandatory=True) + dynamicAuthorizationPoA = ListOfURIsS() + dynamicAuthorizationLifetime = TimestampS() + + +class OriginatorIPTypeC(OneM2MEntity): + ipv4Address = UnicodeAttribute() # m2m:ipv4 + ipv6Address = UnicodeAttribute() # m2m:ipv6 + + +# see "CDT-notification-v2_7_0.xsd" for securityInfoType and dynAuthDasResponse +class DynamicACPInfoC(OneM2MEntity): + grantedPrivileges = ListAttribute(AccessControlRuleC) + privilegesLifetime = TimestampS() # m2m:absRelTimestamp + + +# see "CDT-commonTypes-v2_7_0.xsd" +class DynAuthDasRequestC(OneM2MEntity): + originator = UnicodeAttribute() # m2m:ID + targetedResourceType = EntityAttribute(ResourceTypeE) + operation = UnicodeAttribute() # m2m:operation + # operation = EntityAttribute(OperationE) + originatorIP = EntityAttribute(OriginatorIPTypeC) + originatorLocation = EntityAttribute(LocationRegionC) + requestTimestamp = TimestampS() + targetedResourceID = UnicodeAttribute() # xs:anyURI + #targetedResourceID = Attribute(AnyURI) + proposedPrivilegesLifetime = TimestampS() + # TODO(rkr): is this ok? + originatorRoleIDs = StringListAttribute() # list of m2m:roleID + roleIDsFromACPs = StringListAttribute() # list of m2m:roleID + tokenIDs = StringListAttribute() # list of m2m:tokeID + + +class DynAuthDasResponseC(OneM2MEntity): + # dynamicACPInfo = ListAttribute(DynamicACPInfoC) + dynamicACPInfo = EntityAttribute(DynamicACPInfoC) + tokens = StringListAttribute() # list of simpleType m2m:dynAuthJWT + + +# TODO(rkr): check if correct +class SecurityInfo(OneM2MEntity): + securityInfoType = ListAttribute(SecurityInfoTypeE) + #dasRequest = ListAttribute() # lists are wrong? + #dasResponse = ListAttribute() + dasRequest = EntityAttribute(DynAuthDasRequestC) + dasResponse = EntityAttribute(DynAuthDasResponseC) + esprimRandObject = ListAttribute() # m2m:receiverESPrimRandObject + esprimObject = ListAttribute() # m2m:e2eCompactJWE + escertkeMessage = ListAttribute() # xs:base64Binary + + +################################################################################ +# remoteCSE +################################################################################ + +class RemoteCSE(AnnounceableResourceC, SubscribableResource): + """See TS-0001 section 9.6.4""" + + cseType = Attribute(CSETypeIDE, accesstype=Attribute.WO, mandatory=False) + pointOfAccess = StringListAttribute() + CSEBase = UnicodeAttribute(accesstype=Attribute.WO) + CSE_ID = UnicodeAttribute(accesstype=Attribute.WO) # TODO: CSE-ID (minus!) + M2M_Ext_ID = UnicodeAttribute() # TODO: M2M-Ext-ID (minus!) + Trigger_Recipient_ID = UnicodeAttribute() # TODO: Trigger-Recipient-ID + requestReachability = Attribute(bool) + nodeLink = UnicodeAttribute() + + __child_types__ = ( + Subscription, + ) + + +class RemoteCSEAnnc(AnnouncedResourceC, SubscribableResource): + cseType = Attribute(CSETypeIDE, accesstype=Attribute.WO, mandatory=False) + pointOfAccess = StringListAttribute() + CSEBase = UnicodeAttribute(accesstype=Attribute.WO) + CSE_ID = UnicodeAttribute(accesstype=Attribute.WO) # TODO: CSE-ID (minus!) + requestReachability = Attribute(bool) + nodeLink = UnicodeAttribute() + + __child_types__ = ( + Subscription, + # TODO + ) + + +################################################################################ +# contentInstance +################################################################################ + +class ContentInstance(AnnounceableSubordinateResourceC, + SubscribableResource): + """See TS-0001 section 9.6.7""" + + stateTag = UnicodeAttribute(accesstype=Attribute.RO) + creator = UnicodeAttribute() # m2m:ID + # contentInfo = typeOfContent(:EncodingType) + # typeOfContent => Media Types + # ex: application/json:1 + contentInfo = UnicodeAttribute() # m2m:contentInfo + contentSize = Attribute(int, accesstype=Attribute.RO) + ontologyRef = UnicodeAttribute(accesstype=Attribute.WO) + content = Attribute(bytes, accesstype=Attribute.WO, mandatory=True) + + __child_types__ = ( + Subscription, + ) + + +class ContentInstanceAnnc(AnnouncedSubordinateResourceC): + stateTag = UnicodeAttribute(accesstype=Attribute.RO) + contentInfo = UnicodeAttribute(EncodingTypeE) # m2m:contentInfo + contentSize = Attribute(int, accesstype=Attribute.WO) + ontologyRef = UnicodeAttribute(accesstype=Attribute.WO) + content = Attribute(bytes, accesstype=Attribute.WO, mandatory=True) + + +################################################################################ +# container +################################################################################ + +class Container(AnnounceableResourceC, SubscribableResource): + """See TS-0001 section 9.6.6""" + + stateTag = UnicodeAttribute(accesstype=Attribute.RO) + creator = UnicodeAttribute() + maxNrOfInstances = Attribute(int) + maxByteSize = Attribute(int) + maxInstanceAge = UnicodeAttribute(mandatory=False) # todo + currentNrOfInstances = Attribute(int, accesstype=Attribute.RO) + currentByteSize = Attribute(int, accesstype=Attribute.RO) + locationID = UnicodeAttribute() + ontologyRef = UnicodeAttribute() + latest = Attribute(ContentInstance, mandatory=False) + oldest = Attribute(ContentInstance, mandatory=False) + + __child_types__ = ( + ContentInstance, + Subscription, + ) + +Container.__child_types__ = ( + ContentInstance, + Container, + Subscription, +) + + +class ContainerAnnc(AnnouncedResourceC, SubscribableResource): + + stateTag = UnicodeAttribute(accesstype=Attribute.RO) + maxNrOfInstances = Attribute(int) + maxByteSize = Attribute(int) + maxInstanceAge = UnicodeAttribute(mandatory=False) # todo + currentNrOfInstances = Attribute(int, accesstype=Attribute.RO) + currentByteSize = Attribute(int, accesstype=Attribute.RO) + locationID = UnicodeAttribute() + ontologyRef = UnicodeAttribute() + latest = Attribute(ContentInstance, mandatory=False) + + __child_types__ = ( + ContentInstance, + ContentInstanceAnnc, + Container, + Subscription, + ) + +ContainerAnnc.__child_types__ = ( + ContentInstance, + ContentInstanceAnnc, + Container, + ContainerAnnc, + Subscription, +) + + +################################################################################ +# AE +################################################################################ + +class AE(AnnounceableResourceC, SubscribableResource): + """See TS-0001 section 9.6.5""" + + typename = "AE" + + appName = UnicodeAttribute() + App_ID = UnicodeAttribute(accesstype=Attribute.WO, mandatory=True) + AE_ID = UnicodeAttribute(accesstype=Attribute.RO) # m2m:ID + pointOfAccess = StringListAttribute() # m2m:poaList + ontologyRef = UnicodeAttribute() # xs:anyURI + nodeLink = UnicodeAttribute(accesstype=Attribute.RO) # xs:anyURI + requestReachability = Attribute(bool, mandatory=True) + contentSerialization = UnicodeAttribute() # TODO m2m:serializations + + __child_types__ = ( + Container, + # Group, + Subscription, + AccessControlPolicy, + # PollingChannel, + # Schedule, + DynamicAuthorizationConsultation + ) + + +class AEAnnc(AnnouncedResourceC, SubscribableResource): + + typename = "AEAnnc" + + appName = UnicodeAttribute(accesstype=Attribute.WO) + App_ID = UnicodeAttribute() + AE_ID = UnicodeAttribute() + pointOfAccess = StringListAttribute() + ontologyRef = UnicodeAttribute() + nodeLink = UnicodeAttribute() + requestReachability = Attribute(bool) + + __child_types__ = ( + Container, + ContainerAnnc, + # Group, + # GroupAnnc, + Subscription, + # AccessControlPolicy, + # AccessControlPolicyAnnc, + # PollingChannel, + # Schedule, + ) + + +################################################################################ +# CSEBase +################################################################################ + +class CSEBase(ResourceC, SubscribableResource, AccessControlPolicyIDHolder): + """See TS-0001 section 9.6.3""" + + typename = "CSEBase" + + cseType = Attribute(CSETypeIDE, accesstype=Attribute.WO) + CSE_ID = UnicodeAttribute(accesstype=Attribute.WO) # TODO: CSE-ID (minus!) + supportedResourceType = StringListAttribute(content_type=ResourceTypeE, + accesstype=Attribute.RO) + pointOfAccess = StringListAttribute() + nodeLink = UnicodeAttribute() + + __child_types__ = ( + RemoteCSE, + # Node, + AE, + Container, + # Group, + AccessControlPolicy, + Subscription, + # MgmtCmd, + # LocationPolicy, + # StatsConfig, + # StatsCollect, + # Request, + # Delivery, + # Schedule, + # M2mServiceSubscriptionProfile, + DynamicAuthorizationConsultation + ) + + +################################################################################ +# misc +################################################################################ + +long_to_short_attribute_mapping = { + "accessControlPolicyIDs": "acpi", + "announcedAttribute": "aa", + "announceTo": "at", + "creationTime": "ct", + "expirationTime": "et", + "labels": "lbl", + "lastModifiedTime": "lt", + "parentID": "pi", + "resourceID": "ri", + "resourceType": "ty", + "stateTag": "st", + "resourceName": "rn", + "privileges": "pv", + "selfPrivileges": "pvs", + "App-ID": "api", + "AE-ID": "aei", + "appName": "apn", + "pointOfAccess": "poa", + "ontologyRef": "or", + "nodeLink": "nl", + "contentSerialization": "csz", + "creator": "cr", + "maxNrOfInstances": "mni", + "maxByteSize": "mbs", + "maxInstanceAge": "mia", + "currentNrOfInstances": "cni", + "currentByteSize": "cbs", + "locationID": "li", + "contentInfo": "cnf", + "contentSize": "cs", + "primitiveContent": "pc", + "content": "con", + "cseType": "cst", + "CSE-ID": "csi", + "supportedResourceType": "srt", + "notificationCongestionPolicy": "ncp", + "source": "sr", + "target": "tg", + "lifespan": "ls", + "eventCat": "ec", + "deliveryMetaData": "dmd", + "aggregatedRequest": "arq", + "eventID": "evi", + "eventType": "evt", + "evenStart": "evs", + "eventEnd": "eve", + "operationType": "opt", + "dataSize": "ds", + "execStatus": "exs", + "execResult": "exr", + "execDisable": "exd", + "execTarget": "ext", + "execMode": "exm", + "execFrequency": "exf", + "execDelay": "exy", + "execNumber": "exn", + "execReqArgs": "exra", + "execEnable": "exe", + "memberType": "mt", + "currentNrOfMembers": "cnm", + "maxNrOfMembers": "mnm", + "memberIDs": "mid", + "membersAccessControlPolicyIDs": "macp", + "memberTypeValidated": "mtv", + "consistencyStrategy": "csy", + "groupName": "gn", + "locationSource": "los", + "locationUpdatePeriod": "lou", + "locationTargetId": "lot", + "locationServer": "lor", + "locationContainerID": "loi", + "locationContainerName": "lon", + "locationStatus": "lost", + "serviceRoles": "svr", + "description": "dc", + "cmdType": "cmt", + "mgmtDefinition": "mgd", + "objectIDs": "onis", + "objectPaths": "obps", + "nodeID": "ni", + "hostedCSELink": "hcl", + "CSEBase": "cb", + "M2M-Ext-ID": "mei", + "Trigger-Recipient-ID": "tri", + "requestReachability": "rr", + "originator": "og", + "metaInformation": "mi", + "requestStatus": "rs", + "operationResult": "ol", + "operation": "opn", + "requestID": "rid", + "scheduleElement": "se", + "deviceIdentifier": "di", + "ruleLinks": "rlk", + "statsCollectID": "sci", + "collectingEntityID": "cei", + "collectedEntityID": "cdi", + "devStatus": "ss", + "statsRuleStatus": "srs", + "statModel": "sm", + "collectPeriod": "cp", + "eventNotificationCriteria": "enc", + "expirationCounter": "exc", + "notificationURI": "nu", + "groupID": "gpi", + "notificationForwardingURI": "nfu", + "batchNotify": "bn", + "rateLimit": "rl", + "preSubscriptionNotify": "psn", + "pendingNotification": "pn", + "notificationStoragePriority": "nsp", + "latestNotify": "ln", + "notificationContentType": "nct", + "notificationEventCat": "nec", + "subscriberURI": "su", + "version": "vr", + "URL": "url", + "update": "ud", + "updateStatus": "uds", + "install": "in", + "uninstall": "un", + "installStatus": "ins", + "activate": "act", + "deactivate": "dea", + "activeStatus": "acts", + "memAvailable": "mma", + "memTotal": "mmt", + "areaNwkType": "ant", + "listOfDevices": "idv", + "devId": "dvd", + "devType": "dvt", + "areaNwkId": "awi", + "sleepInterval": "sli", + "sleepDuration": "sld", + "listOfNeighbors": "lnh", + "batteryLevel": "btl", + "batteryStatus": "bts", + "deviceLabel": "dlb", + "manufacturer": "man", + "model": "mod", + "deviceType": "dty", + "fwVersion": "fwv", + "swVersion": "swv", + "hwVersion": "hwv", + "capabilityName": "can", + "attached": "att", + "capabilityActionStatus": "cas", + "enable": "ena", + "disable": "dis", + "currentState": "cus", + "reboot": "rbo", + "factoryReset": "far", + "logTypeId": "lgt", + "logData": "lgd", + "logActionStatus": "lgs", + "logStatus": "lgst", + "logStart": "lga", + "logStop": "lgo", + "firmwareName": "fwnnam", + "softwareName": "swn", + "cmdhPolicyName": "cpn", + "mgmtLink": "cmlk", + "activeCmdhPolicyLink": "acmlk", + "order": "od", + "defEcValue": "dev", + "requestOrigin": "ror", + "requestContext": "rct", + "requestContextNotification": "rcn", + "requestCharacteristics": "rch", + "applicableEventCategories": "aecs", + "applicableEventCategory": "aec", + "defaultRequestExpTime": "dget", + "defaultResultExpTime": "dset", + "defaultOpExecTime": "doet", + "defaultRespPersistence": "drp", + "defaultDelAggregation": "dda", + "limitsEventCategory": "lec", + "limitsRequestExpTime": "lget", + "limitsResultExpTime": "lset", + "limitsOpExecTime": "loet", + "limitsRespPersistence": "lrp", + "limitsDelAggregation": "lda", + "targetNetwork": "ttn", + "minReqVolume": "mrv", + "backOffParameters": "bop", + "otherConditions": "ohc", + "maxBufferSize": "mbfs", + "storagePriority": "sgp", + "applicableCredIDs": "apci", + "allowedApp-IDs": "aai", + "allowedAEs": "aae", + "dynamicAuthorizationConsultationIDs": "daci", + "dynamicAuthorizationEnabled": "dae", + "dynamicAuthorizationPoA": "dap", + "dynamicAuthorizationLifetime": "dal", + # TODO (rkr): resourceType is specified in Table 8.2.3-267, "Resource attribute short names", two times with + # TODO different short names "ty" and "acodTy" + # there is some issue paper from oneM2M where there have found out that this is an issue with usual resource type + # "resourceType": "acodTy" +} + +short_to_long_attribute_mapping = {v: k for k, v in + long_to_short_attribute_mapping.items()} + + +def get_long_attribute_name(n): + return short_to_long_attribute_mapping.get(n) + + +def get_short_attribute_name(n): + return long_to_short_attribute_mapping.get(n) + +long_to_short_resource_mapping = { + "accessControlPolicy": "acp", + "accessControlPolicyAnnc": "acpA", + "AE": "ae", + "AEAnnc": "aeA", + "container": "cnt", + "containerAnnc": "cntA", + "latest": "la", + "oldest": "ol", + "contentInstance": "cin", + "contentInstanceAnnc": "cinA", + "CSEBase": "cb", + "delivery": "dlv", + "eventConfig": "evcg", + "execInstance": "exin", + "fanOutPoint": "fopt", + "group": "grp", + "groupAnnc": "grpA", + "locationPolicy": "lcp", + "locationPolicyAnnc": "lcpA", + "m2mServiceSubscriptionProfile": "mssp", + "mgmtCmd": "mgc", + "mgmtObj": "mgo", + "mgmtObjAnnc": "mgoA", + "node": "nod", + "nodeAnnc": "nodA", + "pollingChannel": "pch", + "pollingChannelURI": "pcu", + "remoteCSE": "csr", + "remoteCSEAnnc": "csrA", + "request": "req", + "schedule": "sch", + "scheduleAnnc": "schA", + "serviceSubscribedAppRule": "asar", + "serviceSubscribedNode": "svsn", + "statsCollect": "stcl", + "statsConfig": "stcg", + "subscription": "sub", + "firmware": "fwr", + "firmwareAnnc": "fwrA", + "software": "swr", + "softwareAnnc": "swrA", + "memory": "mem", + "memoryAnnc": "memA", + "areaNwkInfo": "ani", + "areaNwkInfoAnnc": "aniA", + "areaNwkDeviceInfo": "andi", + "areaNwkDeviceInfoAnnc": "andiA", + "battery": "bat", + "batteryAnnc": "batA", + "deviceInfo": "dvi", + "deviceInfoAnnc": "dviA", + "deviceCapability": "dvc", + "deviceCapabilityAnnc": "dvcA", + "reboot": "rbo", + "rebootAnnc": "rboA", + "eventLog": "evl", + "eventLogAnnc": "evlA", + "cmdhPolicy": "cmp", + "activeCmdhPolicy": "acmp", + "cmdhDefaults": "cmdf", + "cmdhDefEcValue": "cmdv", + "cmdhEcDefParamValues": "cmpv", + "cmdhLimits": "cml", + "cmdhNetworkAccessRules": "cmnr", + "cmdhNwAccessRule": "cmwr", + "cmdhBuffer": "cmbf", + "dynamicAuthorizationConsultation": "dac" +} + +short_to_long_resource_mapping = {v: k for k, v in + long_to_short_resource_mapping.items()} + + +def get_long_resource_name(n): + return short_to_long_resource_mapping.get(n) + + +def get_short_resource_name(n): + return long_to_short_resource_mapping.get(n) + + +long_to_short_member_mapping = { + "createdBefore": "crb", + "createdAfter": "cra", + "modifiedSince": "ms", + "unmodifiedSince": "us", + "stateTagSmaller": "sts", + "stateTagBigger": "stb", + "expireBefore": "exb", + "expireAfter": "exa", + "labels": "lbl", + "resourceType": "ty", + "sizeAbove": "sza", + "sizeBelow": "szb", + "contentType": "cty", + "limit": "lim", + "attribute": "atr", + "notificationEventType": "net", + "operationMonitor": "om", + "representation": "rep", + "filterUsage": "fu", + "eventCatType": "ect", + "eventCatNo": "ecn", + "number": "num", + "duration": "dur", + "notification": "sgn", + "notificationEvent": "nev", + "verificationRequest": "vrq", + "subscriptionDeletion": "sud", + "subscriptionReference": "sur", + "creator": "cr", + "notificationForwardingURI": "nfu", + "operation": "opr", + "originator": "org", + "accessId": "aci", + "MSISDN": "msd", + "action": "acn", + "status": "sus", + "childResource": "ch", + "accessControlRule": "acr", + "accessControlOriginators": "acor", + "accessControlOperations": "acop", + "accessControlContexts": "acco", + "accessControlWindow": "actw", + "accessControlIpAddresses": "acip", + "ipv4Addresses": "ipv4", + "ipv6Addresses": "ipv6", + "accessControlLocationRegion": "aclr", + "countryCode": "accc", + "circRegion": "accr", + "name": "nm", + "value": "val", + "type": "typ", + "maxNrOfNotify": "mnn", + "timeWindow": "tww", + "scheduleEntry": "sce", + "aggregatedNotification": "agn", + "attributeList": "atrl", + "aggregatedResponse": "agr", + "resource": "rce", + "URIList": "uril", + "anyArg": "any", + "fileType": "ftyp", + "URL": "url", + "username": "unm", + "password": "pwd", + "fileSize": "fsi", + "targetFile": "tgf", + "delaySeconds": "dss", + "successURL": "surl", + "startTime": "stt", + "completeTime": "cpt", + "UUID": "uuid", + "executionEnvRef": "eer", + "version": "vr", + "reset": "rst", + "reboot": "rbo", + "upload": "uld", + "download": "dld", + "softwareInstall": "swin", + "softwareUpdate": "swup", + "softwareUninstall": "swun", + "tracingOption": "tcop", + "tracingInfo": "tcin", + "responseTypeValue": "rtv", + "notificationURI": "nu", + "accessControlAuthenticationFlag": "acaf", + "ipv4Address": "ip4", + "ipv6Address": "ip6", + "specializationID": "spid", + "accessControlObjectDetails": "acod", + "childResourceType": "chty", + "targetedResourceType": "trt", + "originatorIP": "oip", + "originatorLocation": "olo", + "originatorRoleIDs": "orid", + "requestTimestamp": "rts", + "targetedResourceID": "trid", + "proposedPrivilegesLifetime": "ppl", + "roleIDsFromACPs": "rfa", + "tokenIDs": "tids", + "dynamicACPInfo": "dai", + "grantedPrivileges": "gp", + "privilegesLifetime": "pl", + "tokens": "tkns", + "securityInfo": "seci", + "securityInfoType": "sit", + "dasRequest": "dreq", + "dasResponse": "dres", + "esprimRandObject": "ero", + "esprimObject": "epo", + "escertkeMessage": "eckm" +} + +short_to_long_member_mapping = {v: k for k, v in + long_to_short_member_mapping.items()} + + +def get_long_member_name(n): + return short_to_long_member_mapping.get(n) + + +def get_short_member_name(n): + return long_to_short_member_mapping.get(n) + +long_to_short_root_mapping = { + "requestPrimitive": "rqp", + "responsePrimitive": "rsp" +} + +short_to_long_root_mapping = {v: k for k, v in + long_to_short_root_mapping.items()} + + +def get_long_root_name(n): + return short_to_long_root_mapping.get(n) + + +def get_short_root_name(n): + return long_to_short_root_mapping.get(n) + +long_to_short_parameter_mapping = { + "operation": "op", + "to": "to", + "from": "fr", + "requestIdentifier": "rqi", + "resourceType": "ty", + "primitiveContent": "pc", + "role": "rol", + "originatingTimestamp": "ot", + "requestExpirationTimestamp": "rqet", + "resultExpirationTimestamp": "rset", + "operationExecutionTime": "oet", + "responseType": "rt", + "resultPersistence": "rp", + "resultContent": "rcn", + "eventCategory": "ec", + "deliveryAggregation": "da", + "groupRequestIdentifier": "gid", + "filterCriteria": "fc", + "discoveryResultType": "drt", + "responseStatusCode": "rsc" +} + +short_to_long_parameter_mapping = {v: k for k, v in + long_to_short_parameter_mapping.items()} + + +def get_long_parameter_name(n): + return short_to_long_parameter_mapping.get(n) + + +def get_short_parameter_name(n): + return long_to_short_parameter_mapping.get(n) + +_all_types = {k: v for k, v in globals().iteritems() + if issubclass(v, OneM2MEntity) and not v.__subclasses__()} + +_all_types_short = {} +_all_types_long = {} + +for k, v in _all_types.iteritems(): + if get_short_resource_name(k): + long_name = k + short_name = get_short_resource_name(k) + elif get_short_attribute_name(k): + long_name = k + short_name = get_short_attribute_name(k) + elif get_short_member_name(k): + long_name = k + short_name = get_short_member_name(k) + elif get_short_root_name(k): + long_name = k + short_name = get_short_root_name(k) + elif get_short_resource_name(k[0].lower() + k[1:]): + long_name = k[0].lower() + k[1:] + short_name = get_short_resource_name(long_name) + elif get_short_attribute_name(k[0].lower() + k[1:]): + long_name = k[0].lower() + k[1:] + short_name = get_short_attribute_name(long_name) + elif get_short_member_name(k[0].lower() + k[1:]): + long_name = k[0].lower() + k[1:] + short_name = get_short_member_name(long_name) + elif get_short_root_name(k[0].lower() + k[1:]): + long_name = k[0].lower() + k[1:] + short_name = get_short_root_name(long_name) + else: + continue + _all_types_short[short_name] = v + _all_types_long[long_name] = v + + +_resource_types = {k: v for k, v in _all_types.iteritems() + if issubclass(v, ResourceC)} + +_resource_types_short = {} +_resource_types_long = {} + +for k, v in _resource_types.iteritems(): + if get_short_resource_name(k): + long_name = k + short_name = get_short_resource_name(k) + elif get_short_resource_name(k[0].lower() + k[1:]): + long_name = k[0].lower() + k[1:] + short_name = get_short_resource_name(long_name) + else: + continue + _resource_types_short[short_name] = v + _resource_types_long[long_name] = v + + +def get_onem2m_type(typename): + try: + try: + return _all_types_short[typename] + except KeyError: + return _all_types_long[typename] + except KeyError: + raise ModelTypeError("Not a valid type: %s" % (typename,)) + + +def get_onem2m_resource_type(typename): + try: + try: + return _resource_types_short[typename] + except KeyError: + return _resource_types_long[typename] + except KeyError: + raise ModelTypeError("Not a valid resource type: %s" % (typename,)) + + +def get_onem2m_types(): + return _all_types.values() + + +def get_onem2m_resource_types(): + return _resource_types.values() diff --git a/build/lib/openmtc_onem2m/serializer/__init__.py b/build/lib/openmtc_onem2m/serializer/__init__.py new file mode 100644 index 0000000..a587b67 --- /dev/null +++ b/build/lib/openmtc_onem2m/serializer/__init__.py @@ -0,0 +1,93 @@ +from .json import OneM2MJsonSerializer +from openmtc_onem2m.exc import CSEBadRequest, CSEContentsUnacceptable +from werkzeug import Accept, parse_accept_header +from futile.logging import get_logger +from openmtc.exc import OpenMTCError + +_factories = {"application/json": OneM2MJsonSerializer, + "application/vnd.onem2m-res+json": OneM2MJsonSerializer, + "application/vnd.onem2m-ntfy+json": OneM2MJsonSerializer, + "application/vnd.onem2m-attrs+json": OneM2MJsonSerializer, + "text/plain": OneM2MJsonSerializer} +_serializers = {} + + +def create_onem2m_serializer(content_type): + try: + factory = _factories[content_type] + except KeyError: + raise CSEBadRequest("Unsupported content type: %s. Try one of %s" % + (content_type, ', '.join(_factories.keys()))) + return factory() + + +def get_onem2m_supported_content_types(): + return _factories.keys() + + +def get_onem2m_decoder(content_type): + # TODO: Check if this is faster than split + content_type, _, _ = content_type.partition(";") + + content_type = content_type.strip().lower() + + try: + return _serializers[content_type] + except KeyError: + serializer = create_onem2m_serializer(content_type) + _serializers[content_type] = serializer + return serializer +get_serializer = get_onem2m_decoder + + +def get_onem2m_encoder(accept): + # TODO: optimize + if accept: + parsed_accept_header = parse_accept_header(accept, Accept) + """:type : Accept""" + supported = get_onem2m_supported_content_types() + accepted_type = parsed_accept_header.best_match(supported) + if not accepted_type: + raise CSEContentsUnacceptable("%s is not supported. " + "Supported content types are: %s" % + (accept, ', '.join(supported))) + else: + # TODO: use config["default_content_type"] + accepted_type = "application/json" + + # TODO: optimize + return get_serializer(accepted_type) + + +def register_onem2m_serializer(content_type, factory): + set_value = _factories.setdefault(content_type, factory) + + if set_value is not factory: + raise OpenMTCError("Content type is already registered: %s" % + (content_type, )) + +################################################################################ +# import other serializers at serializers +################################################################################ +# import impl +# import pkgutil +# +# logger = get_logger(__name__) +# +# for _importer, modname, ispkg in pkgutil.iter_modules(impl.__path__): +# modname = impl.__name__ + "." + modname +# logger.debug("Found onem2m serializer module %s (is a package: %s)" % +# (modname, ispkg)) +# try: +# __import__(modname) +# except: +# logger.error("Failed to import serializer %s", modname) +# raise +# del _importer +# del modname +# del ispkg +# +# del impl +# del pkgutil +# del logger + diff --git a/build/lib/openmtc_onem2m/serializer/base.py b/build/lib/openmtc_onem2m/serializer/base.py new file mode 100644 index 0000000..5052821 --- /dev/null +++ b/build/lib/openmtc_onem2m/serializer/base.py @@ -0,0 +1,203 @@ +from abc import ABCMeta, abstractmethod +from datetime import datetime +from re import compile as re_compile + +from futile.logging import LoggerMixin +from openmtc_onem2m.exc import CSESyntaxError, CSEBadRequest, CSEValueError +from openmtc_onem2m.model import (get_onem2m_type, ContentInstance, + ResourceTypeE, Notification, + get_onem2m_resource_type, + get_short_attribute_name, + get_short_member_name, get_long_member_name, + get_short_resource_name, + get_long_attribute_name, + OneM2MEntity, OneM2MResource, Container, + get_long_resource_name, OneM2MContentResource, + URIList, OneM2MIntEnum) + +_typename_matcher = re_compile(r'^m2m:([a-z]+)$') + + +def get_typename(tn): + return _typename_matcher.findall(tn).pop() + + +class OneM2MSerializer(LoggerMixin): + __metaclass__ = ABCMeta + + @abstractmethod + def encode_resource(self, resource, response, pretty=False, + encoding="utf-8", fields=None): + raise NotImplementedError() + + @abstractmethod + def decode_resource_values(self, s): + pass + + def decode(self, s): + resource_type, data = self.decode_resource_values(s) + if issubclass(resource_type, OneM2MContentResource): + return resource_type(data) + child_resource = data.pop("childResource", None) + if child_resource: + try: + def map_child_resource(v): + res_type = ResourceTypeE(v["type"]) + res_cls = get_onem2m_resource_type(res_type.name) + return res_cls(v["name"], resourceID=v["value"], resourceType=res_type) + child_resource = map(map_child_resource, child_resource) + except (TypeError, AttributeError, KeyError, ValueError): + raise CSEValueError("Invalid entry in child resources: %s", + child_resource) + if resource_type is Notification and "notificationEvent" in data: + representation = data["notificationEvent"]["representation"] + representation = self.decode(self.dumps(representation)) + data["notificationEvent"]["representation"] = representation + resource = resource_type(**data) + if child_resource: + resource.childResource = child_resource + return resource + + +class OneM2MDictSerializer(OneM2MSerializer): + def encode_resource(self, resource, pretty=False, path=None, encoding="utf-8", fields=None, + encapsulated=False): + representation = resource.values + + self.logger.debug("Encoding representation: %s", representation) + + if isinstance(resource, Notification): + # handle notifications + try: + event = representation["notificationEvent"] + if event: + e = event.values + e['representation'] = self.encode_resource( + event.representation, pretty, path, encoding, fields, True + ) + representation["notificationEvent"] = { + get_short_attribute_name(k) or get_short_member_name(k): v + for k, v in e.iteritems() + } + except (AttributeError, KeyError): + self.logger.exception("failed to encode notify") + + def make_val(val_path, resource_id): + try: + if val_path: + val_path += '/' if not val_path.endswith('/') else '' + except AttributeError: + val_path = '' + + if resource_id.startswith(val_path): + return resource_id + return val_path + resource_id + + if isinstance(resource, OneM2MResource): + + def get_child_rep(c): + return { + "val": make_val(path, c.resourceID), + "nm": c.basename, + "typ": c.resourceType + } + representation["childResource"] = map(get_child_rep, representation["childResource"]) + + if isinstance(resource, URIList): + representation = [make_val(path, x) for x in representation] + + if isinstance(resource, Container): + if isinstance(resource.latest, ContentInstance): + representation['latest'] = resource.latest.resourceID + if isinstance(resource.oldest, ContentInstance): + representation['oldest'] = resource.oldest.resourceID + + # cleans representation + def clean_representation(o): + try: + # removes empty attributes + empty_keys = [] + for k, v in o.items(): + if v is None: + empty_keys.append(k) + elif isinstance(v, OneM2MEntity): + o[k] = self.encode_resource(v, pretty, path, encoding, fields) + elif isinstance(v, list): + + def encode_list_item(item): + if isinstance(item, OneM2MEntity): + return self.encode_resource(item, pretty, path, encoding, fields) + return item + o[k] = map(encode_list_item, v) + else: + try: + if len(v) == 0: + empty_keys.append(k) + except TypeError: + pass + + for k in empty_keys: + del o[k] + + for k, v in o.items(): + if not isinstance(v, (unicode, str, bool, datetime, + OneM2MIntEnum)): + clean_representation(v) + except AttributeError: + if isinstance(o, list): + for p in o: + clean_representation(p) + + if not isinstance(resource, OneM2MContentResource): + representation = { + get_short_resource_name(k) or get_short_attribute_name(k) or + get_short_member_name(k): v for + k, v in representation.items()} + + clean_representation(representation) + + if (not isinstance(resource, OneM2MResource) and + not isinstance(resource, Notification) and + not isinstance(resource, OneM2MContentResource)): + return representation + + typename = 'm2m:' + (get_short_resource_name(resource.typename) or + get_short_member_name(resource.typename)) + + if encapsulated: + return {typename: representation} + + if pretty: + return self.pretty_dumps({typename: representation}) + + return self.dumps({typename: representation}) + + def _handle_partial_addressing(self, resource, pretty): + for k, v in resource.iteritems(): + if k in ('latest', 'oldest') and isinstance(v, ContentInstance): + resource[k] = v.resourceID + if pretty: + return self.pretty_dumps(resource) + return self.dumps(resource) + + def decode_resource_values(self, s): + + def convert_to_long_keys(d): + return {get_long_resource_name(k) or get_long_attribute_name(k) or + get_long_member_name(k) or k: v for k, v in d.iteritems()} + + try: + if hasattr(s, "read"): + data = self.load(s, object_hook=convert_to_long_keys) + else: + data = self.loads(s, object_hook=convert_to_long_keys) + except (ValueError, TypeError) as exc: + raise CSEBadRequest("Failed to parse input: %s" % (exc, )) + + self.logger.debug("Read data: %s", data) + + try: + typename, data = data.items()[0] + return get_onem2m_type(get_typename(typename)), data + except (AttributeError, IndexError, TypeError): + raise CSESyntaxError("Not a valid resource representation") diff --git a/build/lib/openmtc_onem2m/serializer/impl/__init__.py b/build/lib/openmtc_onem2m/serializer/impl/__init__.py new file mode 100644 index 0000000..de40ea7 --- /dev/null +++ b/build/lib/openmtc_onem2m/serializer/impl/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/build/lib/openmtc_onem2m/serializer/json/__init__.py b/build/lib/openmtc_onem2m/serializer/json/__init__.py new file mode 100644 index 0000000..8c7076a --- /dev/null +++ b/build/lib/openmtc_onem2m/serializer/json/__init__.py @@ -0,0 +1,62 @@ +from openmtc_onem2m.serializer.base import OneM2MDictSerializer +from json import JSONEncoder +from futile.logging import get_logger +from datetime import datetime +from openmtc_onem2m.model import ContentInstance + +logger = get_logger(__name__) + +# rst: ujson and yajl are not supporting object_hooks, but conversion is needed +# rst: some measurements are necessary what is better +# try: +# from ujson import load, loads +# logger.debug("using ujson for decoding JSON") +# except ImportError: +# try: +# from yajl import load, loads +# logger.debug("using yajl for decoding JSON") +# except ImportError: +try: + # simplejson is faster on decoding, tiny bit slower on encoding + from simplejson import load, loads + logger.debug("using simplejson for decoding JSON") +except ImportError: + logger.debug("using builtin json for decoding JSON") + from json import load, loads + + +del logger + + +def _default(x): + if isinstance(x, datetime): + try: + isoformat = x.isoformat + except AttributeError: + raise TypeError("%s (%s)" % (x, type(x))) + + return isoformat() + elif isinstance(x, ContentInstance): + return x.resourceID + else: + try: # handle model classes + return x.values + except AttributeError: + raise TypeError("%s (%s)" % (x, type(x))) + + +_simple_encoder = JSONEncoder(check_circular=False, separators=(',', ':'), + default=_default) + +_pretty_encoder = JSONEncoder(default=_default, indent=2, + separators=(',', ':'), + check_circular=False) + + +class OneM2MJsonSerializer(OneM2MDictSerializer): + def __init__(self, *args, **kw): + + self.loads = loads + self.load = load + self.dumps = _simple_encoder.encode + self.pretty_dumps = _pretty_encoder.encode diff --git a/build/lib/openmtc_onem2m/serializer/util.py b/build/lib/openmtc_onem2m/serializer/util.py new file mode 100644 index 0000000..9a5d7d7 --- /dev/null +++ b/build/lib/openmtc_onem2m/serializer/util.py @@ -0,0 +1,38 @@ +from futile.logging import get_logger +from openmtc_onem2m.exc import CSEValueError +from openmtc_onem2m.serializer import get_onem2m_encoder, get_onem2m_decoder + +logger = get_logger(__name__) + + +def decode_onem2m_content(content, content_type): + if content == "": + content = None + if content_type and content is not None: + serializer = get_onem2m_decoder(content_type) + try: + data = serializer.decode(content) + except CSEValueError as e: + logger.exception("Error reading input") + raise e + + return data + return None + + +def encode_onem2m_content(content, content_type, pretty=False, path=None, + fields=None): + logger.debug("Encoding result: %s - %s", content, content_type) + + if content is None: + return None, None + + fields = fields # TODO(rst): maybe necessary + #fields = ["resourceID"] + + serializer = get_onem2m_encoder(content_type) + + data = serializer.encode_resource(content, pretty=pretty, path=path, + fields=fields) + + return content_type + "; charset=utf-8", data diff --git a/build/lib/openmtc_onem2m/transport.py b/build/lib/openmtc_onem2m/transport.py new file mode 100644 index 0000000..4019e6e --- /dev/null +++ b/build/lib/openmtc_onem2m/transport.py @@ -0,0 +1,444 @@ +import random +import string + +from enum import Enum, unique + +from futile.logging import get_logger +from openmtc.model import StrEnum +from openmtc_onem2m.exc import OneM2MError + + +@unique +class RequestMethod(Enum): + create = "create" + retrieve = "retrieve" + update = "update" + delete = "delete" + notify = "notify" + execute = "execute" + observe = "observe" + + +_logger = get_logger(__name__) + + +class MetaInformation(object): + def __init__(self, ri=None, ot=None, rqet=None, rset=None, rt=None, rd=None, + rc=None, rp=None, oet=None, ls=None, ec=None, da=None, + gid=None, role=None): + """Meta info about request, contains: + ri (Request Identifier), + ot (optional originating timestamp), + rqet (optional request expiration timestamp), + rset (optional result expiration timestamp), + rt (optional response type), + rd (optional result destination), + rc (optional result content), + rp (optional response persistence), + oet (optional operational execution time), + ls (optional lifespan), + ec (optional event category), + da (optional delivery aggregation), + gid (optional group request identifier) + role () + """ + + @property + def ri(self): + return self.identifier + + @ri.setter + def ri(self, ri): + self.identifier = ri + + @property + def ot(self): + return self.originating_timestamp + + @ot.setter + def ot(self, ot): + self.originating_timestamp = ot + + @property + def rqet(self): + return self.request_expiration_timestamp + + @rqet.setter + def rqet(self, rqet): + self.request_expiration_timestamp = rqet + + @property + def rset(self): + return self.result_expiration_timestamp + + @rset.setter + def rset(self, rset): + self.result_expiration_timestamp = rset + + @property + def rt(self): + return self.response_type + + @rt.setter + def rt(self, rt): + self.response_type = rt + + @property + def rd(self): + return self.result_destination + + @rd.setter + def rd(self, rd): + self.result_destination = rd + + @property + def rc(self): + return self.result_content + + @rc.setter + def rc(self, rc): + self.result_content = rc + + @property + def rp(self): + return self.response_persistence + + @rp.setter + def rp(self, rp): + self.response_persistence = rp + + @property + def oet(self): + return self.operational_execution_time + + @oet.setter + def oet(self, oet): + self.operational_execution_time = oet + + @property + def ec(self): + return self.event_category + + @ec.setter + def ec(self, ec): + self.event_category = ec + + @property + def ls(self): + return self.lifespan + + @ls.setter + def ls(self, ls): + self.lifespan = ls + + @property + def da(self): + return self.delivery_aggregation + + @da.setter + def da(self, da): + self.delivery_aggregation = da + + @property + def gid(self): + return self.group_request_identifier + + @gid.setter + def gid(self, gid): + self.group_request_identifier = gid + + @property + def ro(self): + return self.role + + @ro.setter + def ro(self, ro): + self.role = ro + + def __str__(self): + s = '' + for k in self.__dict__: + if getattr(self, k): + s = s + ' | mi.' + str(k) + ': ' + str(self.__dict__[k]) + return s + + +MI = MetaInformation + + +class AdditionalInformation(object): + def __init__(self, cs=None, ra=None): + """Optional additional information about the request, contains: + cs (optional, status codes), + ra (optional, address for the temporary storage of end node Responses) + """ + self.cs = cs + self.ra = ra + + def __str__(self): + s = '' + for k in self.__dict__: + if getattr(self, k): + s = s + ' | ai.' + str(k) + ': ' + str(self.__dict__[k]) + return s + + +AI = AdditionalInformation + + +class OneM2MOperation(StrEnum): + create = "create" + retrieve = "retrieve" + update = "update" + delete = "delete" + notify = "notify" + + +class OneM2MRequest(object): + internal = False + cascading = False + + """Class representing a OneM2M request""" + + def __init__(self, op, to, fr=None, rqi=None, ty=None, pc=None, rol=None, + ot=None, rqet=None, rset=None, oet=None, rt=None, rp=None, + rcn=None, ec=None, da=None, gid=None, filter_criteria=None, + drt=None): + # Operation + self.operation = op + # Target uri + self.to = to + # Originator ID + self.originator = fr # original long name is from + self.request_identifier = rqi or ''.join(random.sample(string.letters + string.digits, 16)) + # Type of a created resource + self.resource_type = ty + # Resource content to be transferred. + self.content = pc + self.role = rol + self.originating_timestamp = ot + self.request_expiration_timestamp = rqet + self.result_expiration_timestamp = rset + self.operation_execution_time = oet + self.response_type = rt + self.result_persistence = rp + self.result_content = rcn + self.event_category = ec + self.delivery_aggregation = da + self.group_request_identifier = gid + self.filter_criteria = filter_criteria + # Optional Discovery result type + self.discovery_result_type = drt + + @property + def op(self): + return self.operation + + @op.setter + def op(self, op): + self.operation = op + + @property + def fr(self): + return self.originator + + @fr.setter + def fr(self, fr): + self.originator = fr + + @property + def rqi(self): + return self.request_identifier + + @rqi.setter + def rqi(self, rqi): + self.request_identifier = rqi + + @property + def ty(self): + return self.resource_type + + @ty.setter + def ty(self, ty): + self.resource_type = ty + + @property + def pc(self): + return self.content + + @pc.setter + def pc(self, pc): + self.content = pc + + @property + def rol(self): + return self.role + + @rol.setter + def rol(self, rol): + self.role = rol + + @property + def ot(self): + return self.originating_timestamp + + @ot.setter + def ot(self, ot): + self.originating_timestamp = ot + + @property + def rqet(self): + return self.request_expiration_timestamp + + @rqet.setter + def rqet(self, rqet): + self.request_expiration_timestamp = rqet + + @property + def rset(self): + return self.result_expiration_timestamp + + @rset.setter + def rset(self, rset): + self.result_expiration_timestamp = rset + + @property + def oet(self): + return self.operation_execution_time + + @oet.setter + def oet(self, oet): + self.operation_execution_time = oet + + @property + def rt(self): + return self.response_type + + @rt.setter + def rt(self, rt): + self.response_type = rt + + @property + def rp(self): + return self.result_persistence + + @rp.setter + def rp(self, rp): + self.result_persistence = rp + + @property + def rcn(self): + return self.result_content + + @rcn.setter + def rcn(self, rcn): + self.result_content = rcn + + @property + def ec(self): + return self.event_category + + @ec.setter + def ec(self, ec): + self.event_category = ec + + @property + def da(self): + return self.delivery_aggregation + + @da.setter + def da(self, da): + self.delivery_aggregation = da + + @property + def gid(self): + return self.group_request_identifier + + @gid.setter + def gid(self, gid): + self.group_request_identifier = gid + + @property + def fc(self): + return self.filter_criteria + + @fc.setter + def fc(self, fc): + self.filter_criteria = fc + + @property + def drt(self): + return self.discovery_result_type + + @drt.setter + def drt(self, drt): + self.discovery_result_type = drt + + def __str__(self): + return '%s: %s' % (self.__class__.__name__, ' | '.join([ + '%s: %s' % (str(k), str(v)) for k, v in self.__dict__.iteritems() + ])) + + +class OneM2MResponse(object): + """Class representing a OneM2M response""" + + def __init__(self, status_code, request=None, rqi=None, pc=None, to=None, + fr=None, rsc=None): + # Operation result + self.response_status_code = status_code + if request: + self.request_identifier = request.rqi + # Target uri + self.to = request.to + # Originator ID + self.originator = request.fr + else: + self.request_identifier = rqi + # Target uri + self.to = to + # Originator ID + self.originator = fr + # Resource content to be transferred. + self.content = pc + + @property + def status_code(self): + return self.response_status_code.http_status_code + + @property + def rsc(self): + return self.response_status_code.numeric_code + + @property + def rqi(self): + return self.request_identifier + + @rqi.setter + def rqi(self, rqi): + self.request_identifier = rqi + + @property + def pc(self): + return self.content + + @pc.setter + def pc(self, pc): + self.content = pc + + @property + def fr(self): + return self.originator + + @fr.setter + def fr(self, fr): + self.originator = fr + + def __str__(self): + return '%s: %s' % (self.__class__.__name__, ' | '.join([ + '%s: %s' % (str(k), str(v)) for k, v in self.__dict__.iteritems() + ])) + + +class OneM2MErrorResponse(OneM2MResponse, OneM2MError): + pass diff --git a/build/lib/openmtc_onem2m/util.py b/build/lib/openmtc_onem2m/util.py new file mode 100644 index 0000000..a530fa7 --- /dev/null +++ b/build/lib/openmtc_onem2m/util.py @@ -0,0 +1,37 @@ +from re import compile as re_compile + + +def _get_regex_path_component(): + # see http://tools.ietf.org/html/rfc3986#section-3.3 + # path-abempty = *( "/" segment ) + # segment = *pchar + # pchar = unreserved / pct-encoded / sub-delims / ":" / "@" + # pct-encoded = "%" HEXDIG HEXDIG + # unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" + # sub-delims = "!" / "$" / "&" / """ / "(" / ")" / "*" / "+" / "," / ";" / + # "=" + + unreserved = r"[\w\.\-~]" + pct_encoded = "%[A-Fa-f0-9][A-Fa-f0-9]" + sub_delims = r"[!$&'()\*\+,;=]" + + pchar = "(?:" + unreserved + "|" + pct_encoded + "|" + sub_delims + "|:|@)" + segment = pchar + "+" + + return segment + + +_sp_id = r'(//%s)?' % _get_regex_path_component() +_cse_id = r'(/%s)?' % _get_regex_path_component() +_path_suffix = r'(?:/?(%s(?:/%s)*))?' % (_get_regex_path_component(), _get_regex_path_component()) + +_onem2m_address_splitter = re_compile(r'^%s%s%s' % (_sp_id, _cse_id, _path_suffix)) + + +def split_onem2m_address(onem2m_address): + """ + + :param str onem2m_address: + :return: sp_id, cse_id, cse-relative rest + """ + return _onem2m_address_splitter.findall(onem2m_address).pop() diff --git a/build/lib/pyio.py b/build/lib/pyio.py new file mode 100644 index 0000000..830f300 --- /dev/null +++ b/build/lib/pyio.py @@ -0,0 +1 @@ +from io import * \ No newline at end of file diff --git a/bumb-version b/bumb-version new file mode 100755 index 0000000..66ec8a7 --- /dev/null +++ b/bumb-version @@ -0,0 +1,16 @@ +#!/bin/bash + +VERSION=${1} + +if ! [[ "${VERSION}" =~ ^[0-9]+\.[0-9]+(\.[0-9]+)?$ ]]; then + echo "Wrong version number! Only x.y or x.y.z is allowed." + exit 1 +fi + +SETUPS=( gevent-all sdk gevent-all-with-abs-gip arduinogip cul868gip roomui + testgip zigbeegip ) + +for setup in "${SETUPS[@]}"; do + sed -i -re 's/(^\W*SETUP_VERSION\W*=\W*")[0-9]+\.[0-9]+(\.[0-9]+)?"/\1'${VERSION}'"/' setup-${setup}.py +done + diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/__init__.py b/common/openmtc-onem2m/src/openmtc_onem2m/__init__.py new file mode 100644 index 0000000..6afdef2 --- /dev/null +++ b/common/openmtc-onem2m/src/openmtc_onem2m/__init__.py @@ -0,0 +1,2 @@ +from openmtc_onem2m.transport import AdditionalInformation, MetaInformation, \ + OneM2MRequest, OneM2MResponse diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/__init__.pyc b/common/openmtc-onem2m/src/openmtc_onem2m/__init__.pyc new file mode 100644 index 0000000..596d169 Binary files /dev/null and b/common/openmtc-onem2m/src/openmtc_onem2m/__init__.pyc differ diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/client/__init__.py b/common/openmtc-onem2m/src/openmtc_onem2m/client/__init__.py new file mode 100644 index 0000000..5b44da8 --- /dev/null +++ b/common/openmtc-onem2m/src/openmtc_onem2m/client/__init__.py @@ -0,0 +1,23 @@ +from abc import abstractmethod +from futile import LoggerMixin + + +def normalize_path(path): + if not path: + return '' + if path.startswith('//'): + # abs CSE + return '/_' + path[1:] + elif path.startswith('/'): + # sp rel CSE + return '/~' + path + return path + + +class OneM2MClient(LoggerMixin): + def __init__(self): + super(OneM2MClient, self).__init__() + + @abstractmethod + def send_onem2m_request(self, onem2m_request): + pass diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/client/__init__.pyc b/common/openmtc-onem2m/src/openmtc_onem2m/client/__init__.pyc new file mode 100644 index 0000000..cadffee Binary files /dev/null and b/common/openmtc-onem2m/src/openmtc_onem2m/client/__init__.pyc differ diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/client/http.py b/common/openmtc-onem2m/src/openmtc_onem2m/client/http.py new file mode 100644 index 0000000..c909682 --- /dev/null +++ b/common/openmtc-onem2m/src/openmtc_onem2m/client/http.py @@ -0,0 +1,217 @@ +import urllib +import ssl +from socket import ( + gaierror, + error as socket_error, +) +from time import time +from urlparse import urlparse +from aplus import Promise +from futile.caching import LRUCache +from geventhttpclient.client import HTTPClient +from geventhttpclient.response import HTTPResponse +from openmtc.exc import ( + OpenMTCNetworkError, + ConnectionFailed, +) +from openmtc_onem2m.exc import ( + get_error_class, + get_response_status, + ERROR_MIN, +) +from openmtc_onem2m.model import ( + ResourceTypeE, + get_short_attribute_name, + get_short_member_name, +) +from openmtc_onem2m.serializer.util import ( + decode_onem2m_content, + encode_onem2m_content, +) +from openmtc_onem2m.transport import ( + OneM2MOperation, + OneM2MResponse, + OneM2MErrorResponse, +) +from . import ( + OneM2MClient, + normalize_path, +) + +_method_map_to_http = { + OneM2MOperation.create: 'POST', + OneM2MOperation.retrieve: 'GET', + OneM2MOperation.update: 'PUT', + OneM2MOperation.delete: 'DELETE', + OneM2MOperation.notify: 'POST', +} + +_clients = LRUCache(threadsafe=False) + +_query_params = frozenset(['rt', 'rp', 'rcn', 'da', 'drt']) + +_header_to_field_map = { + 'X-M2M-ORIGIN': 'originator', + 'X-M2M-RI': 'rqi', + 'X-M2M-GID': 'gid', + 'X-M2M-OT': 'ot', + 'X-M2M-RST': 'rset', + 'X-M2M-RET': 'rqet', + 'X-M2M-OET': 'oet', + 'X-M2M-EC': 'ec', +} + + +def get_client(m2m_ep, use_xml=False, ca_certs=None, cert_file=None, key_file=None, + insecure=False): + try: + return _clients[(m2m_ep, use_xml)] + except KeyError: + # TODO: make connection_timeout and concurrency configurable + client = _clients[(m2m_ep, use_xml)] = OneM2MHTTPClient( + m2m_ep, use_xml, ca_certs, cert_file, key_file, insecure) + return client + + +class OneM2MHTTPClient(OneM2MClient): + # defaults + DEF_SSL_VERSION = ssl.PROTOCOL_TLSv1_2 + + def __init__(self, m2m_ep, use_xml, ca_certs=None, cert_file=None, key_file=None, + insecure=False): + super(OneM2MHTTPClient, self).__init__() + + self.parsed_url = urlparse(m2m_ep) + is_https = self.parsed_url.scheme[-1].lower() == "s" + port = self.parsed_url.port or (is_https and 443 or 80) + host = self.parsed_url.hostname + self.path = self.parsed_url.path.rstrip('/') + if self.path and not self.path.endswith('/'): + self.path += '/' + + # TODO(rst): handle IPv6 host here + # geventhttpclient sets incorrect host header + # i.e "host: ::1:8000" instead of "host: [::1]:8000 + if (is_https and ca_certs is not None and cert_file is not None and + key_file is not None): + ssl_options = { + "ca_certs": ca_certs, + "certfile": cert_file, + "keyfile": key_file, + "ssl_version": self.DEF_SSL_VERSION + } + else: + ssl_options = None + + client = HTTPClient(host, port, connection_timeout=120.0, + concurrency=50, ssl=is_https, + ssl_options=ssl_options, insecure=insecure) + self.request = client.request + + self.content_type = 'application/' + ('xml' if use_xml else 'json') + + def _handle_network_error(self, exc, p, http_request, t, + exc_class=OpenMTCNetworkError): + error_str = str(exc) + if error_str in ("", "''"): + error_str = repr(exc) + method = http_request["method"] + path = http_request["request_uri"] + log_path = "%s://%s/%s" % (self.parsed_url.scheme, self.parsed_url.netloc, path) + error_msg = "Error during HTTP request: %s. " \ + "Request was: %s %s (%.4fs)" % (error_str, method, log_path, time() - t) + p.reject(exc_class(error_msg)) + + def map_onem2m_request_to_http_request(self, onem2m_request): + """ + Maps a OneM2M request to a HTTP request + :param onem2m_request: OneM2M request to be mapped + :return: request: the resulting HTTP request + """ + self.logger.debug("Mapping OneM2M request to generic request: %s", onem2m_request) + + params = { + param: getattr(onem2m_request, param) for param in _query_params + if getattr(onem2m_request, param) is not None + } + + if onem2m_request.fc is not None: + filter_criteria = onem2m_request.fc + params.update({ + (get_short_attribute_name(name) or get_short_member_name(name)): val + for name, val in filter_criteria.get_values(True).iteritems() + }) + + path = normalize_path(onem2m_request.to) + + if params: + path += '?' + urllib.urlencode(params, True) + + content_type, data = encode_onem2m_content(onem2m_request.content, self.content_type, path=path) + + # TODO(rst): check again + # set resource type + if onem2m_request.operation == OneM2MOperation.create: + content_type += '; ty=' + str(ResourceTypeE[onem2m_request.resource_type.typename]) + + headers = { + header: getattr(onem2m_request, field) for header, field in _header_to_field_map.iteritems() + if getattr(onem2m_request, field) is not None + } + headers['content-type'] = content_type + + self.logger.debug("Added request params: %s", params) + + return { + 'method': _method_map_to_http[onem2m_request.operation], + 'request_uri': self.path + path, + 'body': data, + 'headers': headers, + } + + def map_http_response_to_onem2m_response(self, onem2m_request, response): + """ + Maps HTTP response to OneM2M response + :param onem2m_request: the OneM2M request that created the response + :param response: the HTTP response + :return: resulting OneM2MResponse or OneM2MErrorResponse + """ + if not isinstance(response, HTTPResponse): + self.logger.error("Not a valid response: %s", response) + # return OneM2MErrorResponse(STATUS_INTERNAL_SERVER_ERROR) + self.logger.debug("Mapping HTTP response for OneM2M response: %s", response) + rsc = response.get("x-m2m-rsc", 5000) + if int(rsc) >= ERROR_MIN: + return OneM2MErrorResponse( + get_error_class(rsc).response_status_code, onem2m_request) + + return OneM2MResponse( + get_response_status(rsc), + request=onem2m_request, + rsc=rsc, + pc=decode_onem2m_content(response.read(), response.get("content-type")) + ) + + def send_onem2m_request(self, onem2m_request): + with Promise() as p: + http_request = self.map_onem2m_request_to_http_request(onem2m_request) + t = time() + + try: + response = self.request(**http_request) + except (socket_error, gaierror) as exc: + self._handle_network_error(exc, p, http_request, t, ConnectionFailed) + except Exception as exc: + self.logger.exception("Error in HTTP request") + self._handle_network_error(exc, p, http_request, t) + else: + try: + onem2m_response = self.map_http_response_to_onem2m_response(onem2m_request, response) + if isinstance(onem2m_response, OneM2MErrorResponse): + p.reject(onem2m_response) + else: + p.fulfill(onem2m_response) + finally: + response.release() + + return p diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/client/http.pyc b/common/openmtc-onem2m/src/openmtc_onem2m/client/http.pyc new file mode 100644 index 0000000..1285807 Binary files /dev/null and b/common/openmtc-onem2m/src/openmtc_onem2m/client/http.pyc differ diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/client/mqtt.py b/common/openmtc-onem2m/src/openmtc_onem2m/client/mqtt.py new file mode 100644 index 0000000..fa875b8 --- /dev/null +++ b/common/openmtc-onem2m/src/openmtc_onem2m/client/mqtt.py @@ -0,0 +1,431 @@ +from aplus import ( + Promise, +) +from collections import deque +from futile.caching import LRUCache +import gevent +from gevent import monkey; monkey.patch_all() +from . import OneM2MClient +from openmtc.exc import ConnectionFailed, OpenMTCNetworkError +from ..exc import ( + ERROR_MIN, + CSEValueError, + CSEError, +) +from ..serializer.util import ( + decode_onem2m_content, + encode_onem2m_content, +) +from ..transport import ( + OneM2MRequest, + OneM2MResponse, + OneM2MErrorResponse, + OneM2MOperation, +) +from ..model import ResourceTypeE +import paho.mqtt.client as mqtt +from simplejson import ( + JSONDecoder, + JSONEncoder, + JSONDecodeError, +) +from socket import error as SocketError +from urlparse import urlparse + +#: Dictionary mapping supported schemes to port numbers +portmap = { + 'mqtt': 1883, + 'mqtts': 8883, + # NB: The correct (i.e. registered with IANA) service-name for SSL/TLS-wrapped MQTT is 'secure-mqtt' in an effort to + # prevent confusion with MQTT-S/N. But as the entire world seems to insist on using 'mqtts' (including TS 0010, + # sec. 6.6) ... We are supporting both names here for maximum compliance and robustness. + 'secure-mqtt': 8883, +} + +MQTT_QOS_LEVEL = 1 + +_clients = LRUCache(threadsafe=False) + + +def get_client(m2m_ep, use_xml=False, client_id=None, handle_request_func=None): + """ + + :param string m2m_ep: + :param boolean use_xml: + :param string client_id: + :param fun handle_request_func: + :return OneM2MMQTTClient: + """ + try: + return _clients[(m2m_ep, use_xml)] + except KeyError: + _clients[(m2m_ep, use_xml)] = OneM2MMQTTClient(m2m_ep, use_xml, client_id, handle_request_func) + return _clients[(m2m_ep, use_xml)] + + +class OneM2MMQTTClient(OneM2MClient): + """ + This class provides for a transport over the MQTT protocol as described in TS 0010 + """ + + __request_fields = frozenset([ + 'op', + 'fr', + 'rqi', + 'ty', + 'pc', + 'rol', + 'ot', + 'rqet', + 'rset', + 'oet', + 'rt', + 'rp', + 'rcn', + 'ec', + 'da', + 'gid', + 'drt', + 'to', + ]) + + __response_fields = frozenset([ + 'rsc', + 'rqi', + 'pc', + 'fr', + 'to', + ]) + + @staticmethod + def _mqtt_mask(id): + return id.lstrip('/').replace('/', ':') + + @staticmethod + def _build_topic(originator='+', receiver='+', type='req'): + """ + Helper function to create topic strings + + :param string originator: + :param string receiver: + :param string type: + :return string: + """ + return '/'.join([ + '/oneM2M', + type, + OneM2MMQTTClient._mqtt_mask(originator), + OneM2MMQTTClient._mqtt_mask(receiver), + ]) + + def attach_callback(self): + """ + Wrapper function to attach callback handlers to the MQTT client. Functions attached in this manner are expected + to have the same name as the handler they seek to implement. + :return fun: + """ + def decorator(func): + def wrapper(_self, *args, **kwargs): + func(_self, *args, **kwargs) + setattr(self._client, func.__name__, func) + return wrapper + return decorator + + def __init__(self, m2m_ep, _, client_id, handle_request_func=None, subscribe_sys_topics=False): + """ + :param str m2m_ep: + :param bool _: + :param str client_id: + :param call handle_request_func: + :param bool subscribe_sys_topics: Whether to subscribe to $SYS topics or not + (cf ) + """ + super(OneM2MMQTTClient, self).__init__() + parsed_url = urlparse(m2m_ep) + self._target_id = parsed_url.fragment + + self._encode = JSONEncoder().encode + self._decode = JSONDecoder().decode + + self._handle_request_func = handle_request_func + + self._processed_request_ids = deque([], maxlen=200) + self._request_promises = LRUCache(threadsafe=False, max_items=200) + + if client_id is None: + import random + import string + client_id = ''.join(random.sample(string.letters, 16)) + + self._client = mqtt.Client( + clean_session=False, + client_id='::'.join([ + 'C' if client_id[0].lower() in ['c', 'm'] else 'A', + self._mqtt_mask(client_id), + ]), + ) + + @self.attach_callback() + def on_connect(client, _, rc): + """ + :param mqtt.Client client: + :param All userdata: + :param integer rc: + :return void: + """ + if not rc == mqtt.CONNACK_ACCEPTED: + raise ConnectionFailed(mqtt.connack_string(rc)) + + def request_callback(client, _, message): + """ + Catch requests and + + :param mqtt.Client client: + :param All _: + :param mqtt.MQTTMessage message: + :return void: + """ + originator = message.topic.split('/')[3] + try: + request = self._decode(message.payload) + except JSONDecodeError as e: + self.logger.warn( + 'Got rubbish request from client %s: %s' + % (originator, e.message, ) + ) + return + + try: + if request['rqi'] in self._processed_request_ids: + self.logger.info('Request %s already processed; discarding duplicate.' % (request['rqi'], )) + return + else: + rqi = request['rqi'] + except KeyError: + self.logger.warn( + 'Special treatment for special request w/o request id from %s.' + % (originator, ) + ) + return + + try: + request['pc'] = decode_onem2m_content(self._encode(request['pc']), 'application/json') + request['ty'] = type(request['pc']) + except KeyError: + # No content, eh? + request['ty'] = None + + self.logger.debug('Decoded JSON request: %s' % (request, )) + + op = OneM2MOperation._member_map_.values()[request['op'] - 1] + to = request['to'] + del request['op'], request['to'] + + try: + response = self._handle_request_func( + OneM2MRequest(op, to, **request) + ).get() + except OneM2MErrorResponse as response: + self.logger.error('OneM2MError: %s' % (response.message, )) + except CSEError as e: + response = OneM2MErrorResponse(status_code=e.response_status_code, rqi=rqi) + + if not response.rqi: + # This really should not happen. No, really, it shouldn't. + self.logger.debug( + 'FIXUP! FIXUP! FIXUP! Adding missing request identifier to response: %s' + % (rqi, ) + ) + response.rqi = rqi + + if response.content: + response.content = self._decode( + encode_onem2m_content(response.content, 'application/json', path=response.to)[1] + ) + + self._publish_message( + self._encode({ + k: getattr(response, k) for k in self.__response_fields if getattr(response, k) is not None + }), + self._build_topic(originator, client_id, type='resp'), + ) + self._processed_request_ids.append(rqi) + + def response_callback(client, _, message): + """ + + :param mqtt.Client client: + :param All _: + :param mqtt.MQTTMessage message: + :return: + """ + try: + response = self._decode(message.payload) + except JSONDecodeError as e: + self.logger.error('Discarding response w/ damaged payload: %s', (e.message, )) + return + + promise_key = (message.topic.split('/')[4], response['rqi']) + try: + p = self._request_promises[promise_key] + except KeyError: + self.logger.debug( + 'Response %s could not be mapped to a request. Discarding.' + % (response['rqi'], ) + ) + return + + try: + response['pc'] = decode_onem2m_content(self._encode(response['pc']), 'application/json') + except KeyError: + pass + except CSEValueError as e: + self.logger.error( + 'Content of response %s could not be parsed, throwing on the trash heap: %s' + % (response['rqi'], e.message) + ) + p.reject(e) + + status_code = response['rsc'] + del response['rsc'] + if status_code >= ERROR_MIN: + p.reject(OneM2MErrorResponse(status_code, **response)) + else: + p.fulfill(OneM2MResponse(status_code, **response)) + + topics = [ + self._build_topic(originator=client_id, receiver='#', type='resp'), + ] + client.message_callback_add(topics[0], response_callback) + + if self._handle_request_func is not None: + topics.append(self._build_topic(receiver=client_id) + '/+') + client.message_callback_add(topics[1], request_callback) + + if subscribe_sys_topics: + topics.append('$SYS/#') + + self.logger.debug('Subscribing to topic(s) %s ...' % (', '.join(topics), )) + client.subscribe([ + (str(topic), MQTT_QOS_LEVEL) for topic in topics + ]) + + @self.attach_callback() + def on_disconnect(client, userdata, rc): + """ + :param mqtt.Client client: + :param All userdata: + :param int rc: + :return void: + """ + if not rc == mqtt.MQTT_ERR_SUCCESS: + self.logger.error( + 'Involuntary connection loss: %s (code %d). Waiting for reconnect ...' + % (mqtt.error_string(rc), rc) + ) + + @self.attach_callback() + def on_message(client, userdata, message): + """ + :param mqtt.Client client: + :param All userdata: + :param mqtt.MQTTMessage message: + :return void: + """ + self.logger.debug('message received on topic %s' % (message.topic, )) + + @self.attach_callback() + def on_log(client, userdata, level, buf): + """ + :param mqtt.Client client: + :param All userdata: + :param integer level: + :param string buf: + :return void: + """ + self.logger.debug('pahomqtt-%d: %s' % (level, buf)) + + if parsed_url.username: + self._client.username_pw_set(parsed_url.username, parsed_url.password) + + try: + self._client.connect( + parsed_url.hostname, + parsed_url.port or portmap[parsed_url.scheme] + ) + except SocketError as e: + raise ConnectionFailed(e.message) + + def loop(): + try: + while self._client.loop(timeout=0.1) != mqtt.mqtt_cs_disconnecting: + gevent.sleep() + except (KeyboardInterrupt, SystemExit): + pass + + gevent.spawn(loop) + + def _publish_message(self, payload, topic): + (rc, mid) = self._client.publish(topic, payload, MQTT_QOS_LEVEL) + if not rc == mqtt.MQTT_ERR_SUCCESS: + self.logger.info('Code %d while sending message %d: %s' % (rc, mid, mqtt.error_string(rc))) + + def send_onem2m_request(self, request): + """ + :param openmtc_onem2m.transport.OneM2MRequest request: + :return Promise: + """ + p = Promise() + + try: + client_id = request.originator.split('/')[-1] + except (KeyError, AttributeError): + # TODO: make this configurable + client_id = 'ae0' + + request.op = 1 + OneM2MOperation._member_map_.keys().index(OneM2MOperation[request.op].name) + if request.pc: + request.pc = self._decode( + encode_onem2m_content(request.pc, 'application/json', path=request.to)[1] + ) + try: + if request.to.startswith('//'): # abs CSE + request.to = '/_' + request.to[1:] + elif request.to.startswith('/'): # sp rel CSE + request.to = '/~' + request.to + except AttributeError: + self.logger.error('Could not resolve target id; defaulting to preset') + request.to = '/' + self._target_id + + if request.ty: + request.ty = ResourceTypeE[request.resource_type.typename].value + + self.logger.debug('Preparing request for transit: %s' % (request, )) + + promises_key = (self._target_id, request.rqi) + + def cleanup(_): + self.logger.debug('Clearing request id %s ...' % (promises_key, )) + del self._request_promises[promises_key] + + p.addCallback(cleanup) + p.addErrback(cleanup) + + self._request_promises[promises_key] = p + + self._publish_message( + self._encode({ + str(k): getattr(request, k) for k in self.__request_fields if getattr(request, k) is not None + }), + self._build_topic(client_id, self._target_id) + '/json', + ) + + return p + + def stop(self): + self._client.disconnect() + # TODO(sho): this is abominable. But for the time being, there seems to be no elegant solution to this. + self._client._clean_session = True + # TS 0010, sec. 6.3 mandates a reconnect in order to leave a clean state with the MQTT broker + self._client.reconnect() + self._client.disconnect() diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/exc.py b/common/openmtc-onem2m/src/openmtc_onem2m/exc.py new file mode 100644 index 0000000..913a8fc --- /dev/null +++ b/common/openmtc-onem2m/src/openmtc_onem2m/exc.py @@ -0,0 +1,183 @@ +""" +Created on 26.05.2013 + +@author: kca +""" +from openmtc.exc import OpenMTCError +from collections import namedtuple + + +STATUS = namedtuple("STATUS", "numeric_code description http_status_code") + +STATUS_ACCEPTED = STATUS( + 1000, "ACCEPTED", 202) +STATUS_OK = STATUS( + 2000, "OK", 200) +STATUS_CREATED = STATUS( + 2001, "CREATED", 201) +STATUS_BAD_REQUEST = STATUS( + 4000, "BAD_REQUEST", 400) +STATUS_NOT_FOUND = STATUS( + 4004, "NOT_FOUND", 404) +STATUS_OPERATION_NOT_ALLOWED = STATUS( + 4005, "OPERATION_NOT_ALLOWED", 405) +STATUS_REQUEST_TIMEOUT = STATUS( + 4008, "REQUEST_TIMEOUT", 408) +STATUS_SUBSCRIPTION_CREATOR_HAS_NO_PRIVILEGE = STATUS( + 4101, ",_SUBSCRIPTION_CREATOR_HAS_NO_PRIVILEGE", 403) +STATUS_CONTENTS_UNACCEPTABLE = STATUS( + 4102, "CONTENTS_UNACCEPTABLE", 400) +STATUS_ORIGINATOR_HAS_NO_PRIVILEGE = STATUS( + 4103, "ORIGINATOR_HAS_NO_PRIVILEGE", 403) +STATUS_GROUP_REQUEST_IDENTIFIER_EXISTS = STATUS( + 4104, "GROUP_REQUEST_IDENTIFIER_EXISTS", 409) +STATUS_CONFLICT = STATUS( + 4105, "CONFLICT", 409) +STATUS_INTERNAL_SERVER_ERROR = STATUS( + 5000, "INTERNAL_SERVER_ERROR", 500) +STATUS_NOT_IMPLEMENTED = STATUS( + 5001, "NOT_IMPLEMENTED", 501) +STATUS_TARGET_NOT_REACHABLE = STATUS( + 5103, "TARGET_NOT_REACHABLE", 404) +STATUS_NO_PRIVILEGE = STATUS( + 5105, "NO_PRIVILEGE", 403) +STATUS_ALREADY_EXISTS = STATUS( + 5106, "ALREADY_EXISTS", 403) +STATUS_TARGET_NOT_SUBSCRIBABLE = STATUS( + 5203, "TARGET_NOT_SUBSCRIBABLE", 403) +STATUS_SUBSCRIPTION_VERIFICATION_INITIATION_FAILED = STATUS( + 5204, "SUBSCRIPTION_VERIFICATION_INITIATION_FAILED", 500) +STATUS_SUBSCRIPTION_HOST_HAS_NO_PRIVILEGE = STATUS( + 5205, "SUBSCRIPTION_HOST_HAS_NO_PRIVILEGE", 403) +STATUS_NON_BLOCKING_REQUEST_NOT_SUPPORTED = STATUS( + 5206, "NON_BLOCKING_REQUEST_NOT_SUPPORTED", 501) +STATUS_EXTERNAL_OBJECT_NOT_REACHABLE = STATUS( + 6003, "EXTERNAL_OBJECT_NOT_REACHABLE", 404) +STATUS_EXTERNAL_OBJECT_NOT_FOUND = STATUS( + 6005, "EXTERNAL_OBJECT_NOT_FOUND", 404) +STATUS_MAX_NUMBER_OF_MEMBER_EXCEEDED = STATUS( + 6010, "MAX_NUMBER_OF_MEMBER_EXCEEDED", 400) +STATUS_MEMBER_TYPE_INCONSISTENT = STATUS( + 6011, "MEMBER_TYPE_INCONSISTENT", 400) +STATUS_MANAGEMENT_SESSION_CANNOT_BE_ESTABLISHED = STATUS( + 6020, "MANAGEMENT_SESSION_CANNOT_BE_ESTABLISHED", 500) +STATUS_MANAGEMENT_SESSION_ESTABLISHMENT_TIMEOUT = STATUS( + 6021, "MANAGEMENT_SESSION_ESTABLISHMENT_TIMEOUT", 500) +STATUS_INVALID_CMDTYPE = STATUS( + 6022, "INVALID_CMDTYPE", 400) +STATUS_INVALID_ARGUMENTS = STATUS( + 6023, "INVALID_ARGUMENTS", 400) +STATUS_INSUFFICIENT_ARGUMENT = STATUS( + 6024, "INSUFFICIENT_ARGUMENT", 400) +STATUS_MGMT_CONVERSION_ERROR = STATUS( + 6025, "MGMT_CONVERSION_ERROR", 500) +STATUS_CANCELLATION_FAILED = STATUS( + 6026, "CANCELLATION_FAILED", 500) +STATUS_ALREADY_COMPLETE = STATUS( + 6028, "ALREADY_COMPLETE", 400) +STATUS_COMMAND_NOT_CANCELLABLE = STATUS( + 6029, "COMMAND_NOT_CANCELLABLE", 400) +STATUS_IMPERSONATION_ERROR = STATUS( + 6101, "IMPERSONATION_ERROR", 400) + + +_status_map = {v.numeric_code: v for v in globals().values() + if isinstance(v, STATUS)} + +ERROR_MIN = STATUS_BAD_REQUEST.numeric_code + + +class OneM2MError(OpenMTCError): + pass + + +class CSEError(OneM2MError): + response_status_code = STATUS_INTERNAL_SERVER_ERROR + + @property + def status_code(self): + return self.response_status_code.http_status_code + + @property + def rsc(self): + return self.response_status_code.numeric_code + + +class CSENotFound(CSEError): + response_status_code = STATUS_NOT_FOUND + + +class CSEOperationNotAllowed(CSEError): + response_status_code = STATUS_OPERATION_NOT_ALLOWED + + +class CSENotImplemented(CSEError): + response_status_code = STATUS_NOT_IMPLEMENTED + + +class CSETargetNotReachable(CSEError): + response_status_code = STATUS_TARGET_NOT_REACHABLE + + +class CSEConflict(CSEError): + response_status_code = STATUS_CONFLICT + + +class CSEBadRequest(CSEError): + response_status_code = STATUS_BAD_REQUEST + + +class CSESyntaxError(CSEBadRequest): + response_status_code = STATUS_BAD_REQUEST + + +class CSEPermissionDenied(CSEError): + response_status_code = STATUS_ORIGINATOR_HAS_NO_PRIVILEGE + + +class CSEImpersonationError(CSEBadRequest): + response_status_code = STATUS_IMPERSONATION_ERROR + + +class CSEValueError(CSESyntaxError, ValueError): + pass + + +class CSETypeError(CSESyntaxError, TypeError): + pass + + +class CSEMissingValue(CSESyntaxError): + pass + + +class CSEContentsUnacceptable(CSEError): + response_status_code = STATUS_CONTENTS_UNACCEPTABLE + + +_error_map = { + STATUS_INTERNAL_SERVER_ERROR.numeric_code: CSEError +} + + +def get_error_class(rsc): + return _error_map.get(int(rsc), CSEError) + + +def get_response_status(rsc): + return _status_map.get(int(rsc), STATUS_INTERNAL_SERVER_ERROR) + + +def all_subclasses(cls): + return cls.__subclasses__() + [g for s in cls.__subclasses__() + for g in all_subclasses(s)] + + +for c in all_subclasses(CSEError): + try: + code = vars(c)["response_status_code"].numeric_code + except KeyError: + continue + _error_map[code] = c + +del c, code diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/exc.pyc b/common/openmtc-onem2m/src/openmtc_onem2m/exc.pyc new file mode 100644 index 0000000..ab774a6 Binary files /dev/null and b/common/openmtc-onem2m/src/openmtc_onem2m/exc.pyc differ diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/mapper/__init__.py b/common/openmtc-onem2m/src/openmtc_onem2m/mapper/__init__.py new file mode 100644 index 0000000..afb628f --- /dev/null +++ b/common/openmtc-onem2m/src/openmtc_onem2m/mapper/__init__.py @@ -0,0 +1,118 @@ +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse + +from openmtc.mapper import BasicMapper, MapperError +from openmtc_onem2m import OneM2MRequest +from openmtc_onem2m.transport import OneM2MOperation + + +def _is_persistent(instance): + return bool(instance.path) + + +class OneM2MMapper(BasicMapper): + def __init__(self, cse, originator=None, ca_certs=None, cert_file=None, key_file=None, *args, **kw): + super(OneM2MMapper, self).__init__(*args, **kw) + + scheme = urlparse(cse).scheme.lower() + if scheme in ("", "https", "http"): + from openmtc_onem2m.client.http import get_client + self._send_request = get_client(cse, use_xml=False, ca_certs=ca_certs, cert_file=cert_file, key_file=key_file).send_onem2m_request + elif scheme in ("mqtt", "mqtts", "secure-mqtt"): + from openmtc_onem2m.client.mqtt import get_client + self._send_request = get_client(cse, use_xml=False, client_id=originator).send_onem2m_request + elif scheme == "coap": + raise NotImplementedError + else: + raise ValueError( + "Unsupported URL scheme: %s" % (scheme,) + ) + self.originator = originator + + def create(self, path, instance): + instance.__dict__.update({ + attribute.name: None for attribute in type(instance).attributes if attribute.accesstype == attribute.RO + }) + + # TODO(rst): add resource_type + response = self._send_request(OneM2MRequest( + OneM2MOperation.create, + path, + self.originator, + ty=type(instance), + pc=instance + )).get() + + try: + instance.__dict__.update(response.content.values) + instance.path = path + '/' + response.content.resourceName + except (AttributeError, ): + instance.path = path + + self.logger.debug("Set instance path: %s" % (instance.path, )) + instance._synced = False + return instance + + def update(self, instance, fields=None): + if not _is_persistent(instance): + raise MapperError("Instance is not yet stored") + return self._do_update(instance, fields) + + def _do_update(self, instance, fields=None): + attributes = type(instance).attributes + fields_to_be_cleared = [a.name for a in attributes if a.accesstype in (a.WO, a.RO)] + if fields: + fields_to_be_cleared.extend([a.name for a in attributes if a.name not in fields]) + instance.childResource = [] + + # remove NP attributes + instance.__dict__.update({ + a: None for a in fields_to_be_cleared + }) + + response = self._send_request(OneM2MRequest( + OneM2MOperation.update, + instance.path, + self.originator, + pc=instance + )).get() + + try: + response.content.path = instance.path + except AttributeError: + pass + + return response.content + + def get(self, path): + response = self._get_data(path) + response.content.path = path + self.logger.debug("Received response: %s", response.content) + return response.content + + def delete(self, instance): + self._send_request(OneM2MRequest( + OneM2MOperation.delete, + getattr(instance, "path", instance), + self.originator + )) + + def _get_data(self, path): + return self._send_request(OneM2MRequest( + OneM2MOperation.retrieve, + path, + self.originator + )).get() + + # TODO(rst): check if this can be removed in parent class + @classmethod + def _patch_model(cls): + pass + + def _fill_resource(self, res, data): + pass + + def _map(self, path, typename, data): + pass diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/mapper/__init__.pyc b/common/openmtc-onem2m/src/openmtc_onem2m/mapper/__init__.pyc new file mode 100644 index 0000000..bd98b3c Binary files /dev/null and b/common/openmtc-onem2m/src/openmtc_onem2m/mapper/__init__.pyc differ diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/model.py b/common/openmtc-onem2m/src/openmtc_onem2m/model.py new file mode 100644 index 0000000..a574224 --- /dev/null +++ b/common/openmtc-onem2m/src/openmtc_onem2m/model.py @@ -0,0 +1,1700 @@ +from enum import IntEnum, unique + +from openmtc.model import (Resource as Res, UnicodeAttribute, DatetimeAttribute, + Attribute, ListAttribute, Entity, EntityAttribute, + AnyURI, StringListAttribute, ContentResource) +from openmtc.model.exc import ModelTypeError +from futile import issubclass + +LATEST_VERSION = "1.6" + + +class OneM2MIntEnum(IntEnum): + def __str__(self): + return str(self.value) + + +class OneM2MEntity(Entity): + pass + + +class OneM2MContentResource(ContentResource, OneM2MEntity): + pass + + +class OneM2MResource(Res, OneM2MEntity): + __model_name__ = "onem2m" + __model_version__ = "1.6" + + +################################################################################ +# enumerationTypes +################################################################################ + +class ResourceTypeE(OneM2MIntEnum): + accessControlPolicy = 1 + AE = 2 + container = 3 + contentInstance = 4 + CSEBase = 5 + delivery = 6 + eventConfig = 7 + execInstance = 8 + group = 9 + localPolicy = 10 + m2mServiceSubscriptionProfile = 11 + mgmtCmd = 12 + mgmtObj = 13 + node = 14 + pollingChannel = 15 + remoteCSE = 16 + request = 17 + schedule = 18 + serviceSubscribedAppRule = 19 + serviceSubscribedNode = 20 + statsCollect = 21 + statsConfig = 22 + subscription = 23 + semanticDescriptor = 24 + accessControlPolicyAnnc = 10001 + AEAnnc = 10002 + containerAnnc = 10003 + contentInstanceAnnc = 10004 + groupAnnc = 10009 + locationPolicyAnnc = 10010 + mgmtObjAnnc = 10013 + nodeAnnc = 10014 + remoteCSEAnnc = 10016 + scheduleAnnc = 10018 + + +@unique +class CSETypeIDE(OneM2MIntEnum): + IN_CSE = 1 + MN_CSE = 2 + AEN_CSE = 3 + + +@unique +class LocationSourceE(OneM2MIntEnum): + Network_based = 1 + Device_based = 2 + Sharing_based = 3 + + +@unique +class StdEventCatsE(OneM2MIntEnum): + mmediate = 2 + BestEffort = 3 + Latest = 4 + + +@unique +class OperationE(OneM2MIntEnum): + Create = 1 + Retrieve = 2 + Update = 3 + Delete = 4 + Notify = 5 + + +@unique +class ResponseType(OneM2MIntEnum): + nonBlockingRequestSynch = 1 + nonBlockingRequestAsynch = 2 + blockingRequest = 3 + + +# @unique +# class ResultConentE(OneM2MIntEnum): +# nothing = 0 +# attributes = 1 +# hierarchical_address = 2 +# hierarchical_address_and_attributes = 3 +# attributes_and_child_resources = 4 +# attributes_and_child_resource_references = 6 +# child_resource_references = 6 +# original_resource = 7 + + +@unique +class DiscResTypeE(OneM2MIntEnum): + structured = 1 + unstructured = 2 + + +# TODO: responseStatusCode + + +@unique +class RequestStatusE(OneM2MIntEnum): + COMPLETED = 1 + FAILED = 2 + PENDING = 3 + FORWARDED = 4 + + +@unique +class MemberTypeE(OneM2MIntEnum): + accessControlPolicy = 1 + AE = 2 + container = 3 + contentInstance = 4 + CSEBase = 5 + delivery = 6 + eventConfig = 7 + execInstance = 8 + group = 9 + locationPolicy = 10 + m2mServiceSubscription = 11 + mgmtCmd = 12 + mgmtObj = 13 + node = 14 + pollingChannel = 15 + remoteCSE = 16 + request = 17 + schedule = 18 + serviceSubscribedAppRule = 19 + serviceSubscribedNode = 20 + statsCollect = 21 + statsConfig = 22 + subscription = 23 + token = 32 + dynamicAuthorizationConsultation = 34 + accessControlPolicyAnnc = 10001 + AEAnnc = 10002 + containerAnnc = 10003 + contentInstanceAnnc = 10004 + groupAnnc = 10009 + locationPolicyAnnc = 10010 + mgmtObjAnnc = 10013 + nodeAnnc = 10014 + remoteCSEAnnc = 10016 + scheduleAnnc = 10019 + dynamicAuthorizationConsultationAnnc = 10034 + mixed = 24 + # Mixed is a mixture of the resource types from 1 to 23, 10001 to 10004, 10009 to 10010, + # 10013 to 10014 and 10016 to 10018 as listed above. + + +@unique +class ConsistencyStrategyE(OneM2MIntEnum): + ABANDON_MEMBER = 1 + ABANDON_GROUP = 2 + SET_MIXED = 3 + + +@unique +class CmdTypeE(OneM2MIntEnum): + RESET = 1 + REBOOT = 2 + UPLOAD = 3 + DOWNLOAD = 4 + SOFTWAREINSTALL = 5 + SOFTWAREUNINSTALL = 6 + SOFTWAREUPDATE = 7 + + +@unique +class ExecModeTypeE(OneM2MIntEnum): + MMEDIATEONCE = 1 + IMMEDIATEREPEAT = 2 + RANDOMONCE = 3 + RANDOMREPEAT = 4 + + +@unique +class ExecStatusTypeE(OneM2MIntEnum): + INITIATED = 1 + PENDING = 2 + FINISHED = 3 + CANCELLING = 4 + CANCELLED = 5 + STATUS_NON_CANCELLABLE = 6 + + +@unique +class ExecResultTypeE(OneM2MIntEnum): + STATUS_REQUEST_UNSUPPORTED = 1 + STATUS_REQUEST_DENIED = 2 + STATUS_CANCELLATION_DENIED = 3 + STATUS_INTERNAL_ERROR = 4 + STATUS_INVALID_ARGUMENTS = 5 + STATUS_RESOURCES_EXCEEDED = 6 + STATUS_FILE_TRANSFER_FAILED = 7 + STATUS_FILE_TRANSFER_SERVER_AUTHENTICATION_FAILURE = 8 + STATUS_UNSUPPORTED_PROTOCOL = 9 + STATUS_UPLOAD_FAILED = 10 + STATUS_FILE_TRANSFER_FAILED_MULTICAST_GROUP_UNABLE_JOIN = 11 + STATUS_FILE_TRANSFER_FAILED_SERVER_CONTACT_FAILED = 12 + STATUS_FILE_TRANSFER_FAILED_FILE_ACCESS_FAILED = 13 + STATUS_FILE_TRANSFER_FAILED_DOWNLOAD_INCOMPLETE = 14 + STATUS_FILE_TRANSFER_FAILED_FILE_CORRUPTED = 15 + STATUS_FILE_TRANSFER_FILE_AUTHENTICATION_FAILURE = 16 + STATUS_FILE_TRANSFER_WINDOW_EXCEEDED = 19 + STATUS_INVALID_UUID_FORMAT = 20 + STATUS_UNKNOWN_EXECUTION_ENVIRONMENT = 21 + STATUS_DISABLED_EXECUTION_ENVIRONMENT = 22 + STATUS_EXECUTION_ENVIRONMENT_MISMATCH = 23 + STATUS_DUPLICATE_DEPLOYMENT_UNIT = 24 + STATUS_SYSTEM_RESOURCES_EXCEEDED = 25 + STATUS_UNKNOWN_DEPLOYMENT_UNIT = 26 + STATUS_INVALID_DEPLOYMENT_UNIT_STATE = 27 + STATUS_INVALID_DEPLOYMENT_UNIT_UPDATE_DOWNGRADE_DISALLOWED = 28 + STATUS_INVALID_DEPLOYMENT_UNIT_UPDATE_UPGRADE_DISALLOWED = 29 + STATUS_INVALID_DEPLOYMENT_UNIT_UPDATE_VERSION_EXISTS = 30 + + +@unique +class PendingNotificationE(OneM2MIntEnum): + sendLatest = 1 + sendAllPending = 2 + + +@unique +class NotificationContentTypeE(OneM2MIntEnum): + allAttributes = 1 + modifiedAttributes = 2 + resourceID = 3 + + +@unique +class NotificationEventTypeE(OneM2MIntEnum): + updateOfResource = 1 + deleteOfResource = 2 + createOfDirectChildResource = 3 + deleteOfDirectChildResource = 4 + + +@unique +class StatusE(OneM2MIntEnum): + Successful = 1 + Failure = 2 + In_Process = 3 + + +@unique +class BatteryStatusE(OneM2MIntEnum): + NORMAL = 1 + CHARGING = 2 + CHARGING_COMPLETE = 3 + DAMAGED = 4 + LOW_BATTERY = 5 + NOT_INSTALLED = 6 + UNKNOWN = 7 + + +@unique +class ManagementDefinitionE(OneM2MIntEnum): + firmware = 1001 + software = 1002 + memory = 1003 + areaNwkInfo = 1004 + areaNwkDeviceInfo = 1005 + battery = 1006 + deviceInfo = 1007 + deviceCapability = 1008 + reboot = 1009 + eventLog = 1010 + cmdhPolicy = 1011 + activeCmdhPolicy = 1012 + cmdhDefaults = 1013 + cmdhDefEcValue = 1014 + cmdhEcDefParamValues = 1015 + cmdhLimits = 1016 + cmdhNetworkAccessRules = 1017 + cmdhNwAccessRule = 1018 + cmdhBuffer = 1019 + Unspecified = 0 + + +@unique +class LogTypeIdE(OneM2MIntEnum): + System = 1 + Security = 2 + Event = 3 + Trace = 4 + Panic = 5 + + +@unique +class LogStatusE(OneM2MIntEnum): + Started = 1 + Stopped = 2 + Unknown = 3 + NotPresent = 4 + Error = 5 + + +@unique +class EventTypeE(OneM2MIntEnum): + DATAOPERATION = 1 + STORAGEBASED = 2 + TIMERBASED = 3 + + +@unique +class StatsRuleStatusTypeE(OneM2MIntEnum): + ACTIVE = 1 + INACTIVE = 2 + + +@unique +class StatModelTypeE(OneM2MIntEnum): + EVENTBASED = 1 + + +@unique +class EncodingTypeE(OneM2MIntEnum): + plain = 0 + base64String = 1 + base64Binary = 2 + + +# TODO(rkr): values are wrong? +# => see TS-0004, p.47, m2m:accessControlOperations, +# => more values in xsd enumerationTypes +@unique +class AccessControlOperationE(OneM2MIntEnum): + create = 1 + retrieve = 2 + update = 4 + delete = 8 + notify = 16 + discover = 32 + +# TODO: SRole-ID + + +@unique +class FilterUsageE(OneM2MIntEnum): + Discovery = 1 + ConditionalRetrieval = 2 + + +@unique +class CountryCodeE(OneM2MIntEnum): + india = 91 + usa = 01 + + +@unique +class SecurityInfoTypeE(OneM2MIntEnum): + # TS-0004, p.49, Table 6.3.4.2.35-1 + DynamicAuthorizationRequest = 1 + DynamicAuthorizationResponse = 2 + ReceiverE2ERandObjectRequest = 3 + ReceiverE2ERandObjectResponse = 4 + ESPrimObject = 5 + ESCertKEMessage = 6 + + +################################################################################ +# commonTypes +################################################################################ + +# simple ####################################################################### + + +class IDS(UnicodeAttribute): + pass + +# TODO: nodeID + +# TODO: deviceID + +# TODO: externalID + + +class RequestIDS(UnicodeAttribute): + pass + + +class NhURIS(UnicodeAttribute): + pass + +# TODO: acpType + + +class LabelsS(StringListAttribute): + pass + +# TODO: triggerRecipientID + +# TODO: listOfM2MID + +# TODO: longMin-1 + +# TODO: listOfMinMax + +# TODO: backOffParameters + +# TODO: poaList + + +class TimestampS(DatetimeAttribute): + pass + +# TODO: absRelTimestamp + +# TODO: typeOfContent + +# TODO: permittedMediaTypes + +# TODO: serializations + +# TODO: contentInfo + +# TODO: eventCat + +# TODO: eventCatWithDef + +# TODO: listOfEventCat + +# TODO: listOfEventCatWithDef + +# TODO: scheduleEntry + + +class ListOfURIsS(StringListAttribute): + content_type = AnyURI + + +class AttributeListS(StringListAttribute): + pass + +# complex ###################################################################### + +# TODO: deliveryMetaData + +# TODO: aggregatedRequest + +# TODO: metaInformation + +# TODO: primitiveContent + + +class FilterCriteria(OneM2MEntity): + createdBefore = TimestampS() + createdAfter = TimestampS() + modifiedSince = TimestampS() + unmodifiedSince = TimestampS() + stateTagSmaller = Attribute(int) # xs:positiveInteger + stateTagBigger = Attribute(int) # xs:nonNegativeInteger + expireBefore = TimestampS() + expireAfter = TimestampS() + labels = StringListAttribute() + resourceType = ListAttribute(ResourceTypeE) + sizeAbove = Attribute(int) # xs:nonNegativeInteger + sizeBelow = Attribute(int) # xs:positiveInteger + contentType = UnicodeAttribute() # m2m:typeOfContent + attribute = ListAttribute() # m2m:attribute + filterUsage = EntityAttribute(FilterUsageE) + limit = Attribute(int) # xs:nonNegativeInteger + +# TODO: attribute + +# TODO: scheduleEntries + +# TODO: actionStatus + +# TODO: anyArgType + +# TODO: resetArgsType + +# TODO: rebootArgsType + +# TODO: uploadArgsType + +# TODO: downloadArgsType + +# TODO: softwareInstallArgsType + +# TODO: softwareUpdateArgsType + +# TODO: softwareUninstallArgsType + +# TODO: execReqArgsListType + +# TODO: mgmtLinkRef + +# TODO: childResourceRef + +# TODO: responseTypeInfo + +# TODO: operationResult + + +# TODO(rkr): +# "This is an xs:choice. A locationRegion shall contain either: +# 1) A countryCode element, in which case circRegion shall not appear, or +# 2) A circRegion element, in which case countryCode shall not appear" +class LocationRegionC(OneM2MEntity): + countryCode = ListAttribute(CountryCodeE) + circRegion = ListAttribute(float) # "list of 3 xs:float": values "represent" latitude. longitude, radius + + +class LabeledResource(OneM2MResource): + labels = LabelsS() + + +class ExpiringResource(OneM2MResource): + expirationTime = TimestampS(mandatory=False) + + +class AccessControlPolicyIDHolder(OneM2MResource): + accessControlPolicyIDs = StringListAttribute() + + +class DynamicAuthorizationConsultationIDHolder(OneM2MResource): + dynamicAuthorizationConsultationIDs = ListOfURIsS() + + +class SubscribableResource(OneM2MResource): + pass + + +class AnnounceableResource(OneM2MResource): + announceTo = ListOfURIsS() + announcedAttribute = UnicodeAttribute() # TODO + + +class AnnouncedResource(OneM2MResource): + link = Attribute(AnyURI) + + +class ResourceC(LabeledResource): + __child_types__ = () + + typename = None + + resourceName = UnicodeAttribute(accesstype=Attribute.WO) + + resourceType = EntityAttribute(ResourceTypeE, accesstype=Attribute.RO) + resourceID = IDS(accesstype=Attribute.RO) + parentID = NhURIS(accesstype=Attribute.RO) + + lastModifiedTime = TimestampS(accesstype=Attribute.RO) + creationTime = TimestampS(accesstype=Attribute.RO) + + childResource = ListAttribute() + + @property + def name(self): + return self.resourceName + + @property + def id(self): + return self.resourceID + + def __repr__(self): + return "%s(path='%s', id='%s')" % (type(self).__name__, self.path, + self.id) + + def has_child_type(self, resource_type): + return resource_type in self.__child_types__ + + +ResourceC.childResource.content_type = ResourceC + + +class RegularResourceC(ResourceC, ExpiringResource, AccessControlPolicyIDHolder, + DynamicAuthorizationConsultationIDHolder): + pass + + +class AnnounceableResourceC(RegularResourceC, AnnounceableResource): + pass + + +class AnnouncedResourceC(RegularResourceC, AnnouncedResource): + pass + + +class AnnounceableSubordinateResourceC(ResourceC, ExpiringResource, + AnnounceableResource): + pass + + +class AnnouncedSubordinateResourceC(ResourceC, ExpiringResource, + AnnouncedResource): + pass + +# TODO: mgmtResource + +# TODO: announcedMgmtResource + + +################################################################################ +# requestPrimitive +################################################################################ + +class RequestPrimitive(OneM2MEntity): + operation = EntityAttribute(OperationE) + to = Attribute(AnyURI) + from_ = IDS() + requestIdentifier = RequestIDS() + resourceType = EntityAttribute(ResourceTypeE) + name = UnicodeAttribute() + primitiveContent = UnicodeAttribute() # m2m:primitiveContent + role = UnicodeAttribute() # xs:anyType + originatingTimestamp = TimestampS() + requestExpirationTimestamp = TimestampS() # m2m::absRelTimestamp + resultExpirationTimestamp = TimestampS() # m2m::absRelTimestamp + operationExecutionTime = TimestampS() # m2m::absRelTimestamp + responseType = UnicodeAttribute() # m2m:responseTypeInfo + resultPersistence = TimestampS() # m2m::absRelTimestamp + resultContent = UnicodeAttribute() # m2m:resultContent + eventCategory = UnicodeAttribute() # m2m:eventCat + deliveryAggregation = Attribute(bool) + groupRequestIdentifier = UnicodeAttribute() + filterCriteria = EntityAttribute(FilterCriteria) + discoveryResultType = EntityAttribute(DiscResTypeE) + + +class AttributeList(OneM2MContentResource): + typename = "attributeList" + CONTENT = AttributeListS() + + +################################################################################ +# responsePrimitive +################################################################################ + +class ResponsePrimitive(OneM2MEntity): + responseStatusCode = UnicodeAttribute() # m2m:responseStatusCode + requestIdentifier = RequestIDS() + primitiveContent = UnicodeAttribute() # m2m:primitiveContent + to = IDS() + from_ = IDS() + originatingTimestamp = TimestampS() + resultExpirationTimestamp = TimestampS() # m2m:absRelTimestamp + eventCategory = UnicodeAttribute() # m2m:eventCat + + +class Resource(OneM2MContentResource): + pass + + +class URIList(OneM2MContentResource): + typename = "URIList" + CONTENT = ListOfURIsS() + + +class AggregatedResponse(OneM2MEntity): + responsePrimitive = ListAttribute(ResponsePrimitive) + + +################################################################################ +# notification +################################################################################ + +class OperationMonitorTypeC(OneM2MEntity): + operation = UnicodeAttribute() # m2m:operation + originator = UnicodeAttribute() # m2m:ID + + +class NotificationEventC(OneM2MEntity): + representation = EntityAttribute(ResourceC) # xs:anyType + operationMonitor = EntityAttribute(OperationMonitorTypeC) + notificationEventType = EntityAttribute(NotificationEventTypeE) + + +class Notification(OneM2MEntity): + notificationEvent = EntityAttribute(NotificationEventC) + verificationRequest = Attribute(bool) + subscriptionDeletion = Attribute(bool) + subscriptionReference = Attribute(AnyURI) + creator = UnicodeAttribute() # ID + notificationForwardingURI = Attribute(AnyURI) + + +class AggregatedNotification(OneM2MEntity): + """See TS-0004 Table 7.4.1.1-2""" + + notification = ListAttribute(Notification) + + +################################################################################ +# subscription +################################################################################ + +class EventNotificationCriteria(OneM2MEntity): + """See TS-0004 Table 6.3.2.3-1""" + + createdBefore = TimestampS() + createdAfter = TimestampS() + modifiedSince = TimestampS() + unmodifiedSince = TimestampS() + stateTagSmaller = Attribute(int) + stateTagBigger = Attribute(int) + expireBefore = TimestampS() + expireAfter = TimestampS() + sizeAbove = Attribute(int) + sizeBelow = Attribute(int) + operationMonitor = UnicodeAttribute() # ListAttribute(m2m:operation) + # attribute = Attribute(int) # enum but to be defined in the standard + attribute = UnicodeAttribute() # ListAttribute(m2m:attribute) + notificationEventType = ListAttribute(NotificationEventTypeE) + + +class BatchNotify(OneM2MEntity): + pass # TODO + + +class RateLimit(OneM2MEntity): + pass # TODO + + +class Subscription(RegularResourceC): + """ See TS-0001 section 9.6.8 + See TS-0004 Table 7.3.7.1-3""" + + eventNotificationCriteria = EntityAttribute(EventNotificationCriteria) + expirationCounter = Attribute(int) + notificationURI = ListOfURIsS(mandatory=True) + groupID = Attribute(AnyURI) + notificationForwardingURI = Attribute(AnyURI) + batchNotify = EntityAttribute(BatchNotify) + rateLimit = EntityAttribute(RateLimit) + preSubscriptionNotify = Attribute(int, accesstype=Attribute.WO, + mandatory=False) + pendingNotification = Attribute(PendingNotificationE) + notificationStoragePriority = Attribute(int) + latestNotify = Attribute(bool) + notificationContentType = Attribute(NotificationContentTypeE) + notificationEventCat = UnicodeAttribute() # m2m:eventCat + creator = IDS(accesstype=Attribute.WO, mandatory=False) + subscriberURI = Attribute(AnyURI, accesstype=Attribute.WO, mandatory=False) + + __child_types__ = ( + # Schedule, + ) + + +################################################################################ +# accessControlPolicy +################################################################################ + +class AccessControlObjectDetailsC(OneM2MEntity): + # specifies to which resource type the rule applies + resourceType = EntityAttribute(ResourceTypeE) + # TODO(rkr): Child resource types listed in the childResourceType component are subject of + # TODO access control for the Create operation only. Once a child resource is created, + # TODO the Access Control Policies assigned directly to it apply. + # for create operation only, list of creatable child resources + childResourceType = ListAttribute(ResourceTypeE) + specializationID = Attribute(AnyURI) # xs:anyURI + + +class AccessControlIpAddressesC(OneM2MEntity): + ipv4Addresses = ListAttribute(AnyURI) # m2m:ipv4 + ipv6Addresses = ListAttribute(AnyURI) # m2m:ipv6 + + +class AccessControlContextsC(OneM2MEntity): + accessControlWindow = StringListAttribute() # m2m:scheduleEntry + accessControlIpAddresses = EntityAttribute(AccessControlIpAddressesC) + accessControlLocationRegion = ListAttribute(LocationRegionC) # m2m:locationRegion + + +class AccessControlRuleC(OneM2MEntity): + accessControlOriginators = ListOfURIsS() # m2m:listOfURIs # Mand + accessControlOperations = ListAttribute(AccessControlOperationE) # Mand + accessControlContexts = ListAttribute(AccessControlContextsC) # Opt + # accessControlContexts = EntityAttribute(AccessControlContextsC) # Opt + # TODO(rkr): currently default of the Flag is set to False; + # TODO if not explicitly set to True the authorization is performed without authentication + # TODO when authentication is used, it should maybe set to be True by default + accessControlAuthenticationFlag = Attribute(bool, default=False) # Opt + # TODO(rkr): "ObjectDetails" only described in TS-0001, 9.6.2.4, p.121 (which is version 2.10.0) as optional + # TODO parameter of an access control rule, but not in TS-0004 and not in xsd version 2.7.0. + # accessControlObjectDetails = ListAttribute(AccessControlObjectDetailsC) + + +class AccessControlPolicy(AnnounceableSubordinateResourceC, + SubscribableResource): + privileges = ListAttribute(AccessControlRuleC) + selfPrivileges = ListAttribute(AccessControlRuleC) + + __child_types__ = ( + Subscription, + ) + + +class AccessControlPolicyAnnc(AnnouncedSubordinateResourceC, + SubscribableResource): + privileges = ListAttribute(AccessControlRuleC) + selfPrivileges = ListAttribute(AccessControlRuleC) + + __child_types__ = ( + Subscription, + ) + + +################################################################################ +# SemanticDescriptor +################################################################################ + +class SemanticDescriptor(AnnounceableResourceC, SubscribableResource): + + # Has a default value : application/rdf+xml:1 + descriptorRepresentation = StringListAttribute(mandatory=True) + semanticOpExec = Attribute(str) + descriptor = Attribute(str, mandatory=True) + ontologyRef = Attribute(AnyURI, mandatory=False) + relatedSemantics = StringListAttribute(mandatory=False) + + __child_types__ = ( + Subscription, + ) + + +################################################################################ +# dynamicAuthorization +################################################################################ + +class DynamicAuthorizationConsultation(AnnounceableResourceC): + dynamicAuthorizationEnabled = Attribute(bool, mandatory=True) + dynamicAuthorizationPoA = ListOfURIsS() + dynamicAuthorizationLifetime = TimestampS() + + +class OriginatorIPTypeC(OneM2MEntity): + ipv4Address = UnicodeAttribute() # m2m:ipv4 + ipv6Address = UnicodeAttribute() # m2m:ipv6 + + +# see "CDT-notification-v2_7_0.xsd" for securityInfoType and dynAuthDasResponse +class DynamicACPInfoC(OneM2MEntity): + grantedPrivileges = ListAttribute(AccessControlRuleC) + privilegesLifetime = TimestampS() # m2m:absRelTimestamp + + +# see "CDT-commonTypes-v2_7_0.xsd" +class DynAuthDasRequestC(OneM2MEntity): + originator = UnicodeAttribute() # m2m:ID + targetedResourceType = Attribute(ResourceTypeE) + operation = UnicodeAttribute() # m2m:operation + # operation = EntityAttribute(OperationE) + originatorIP = EntityAttribute(OriginatorIPTypeC) + originatorLocation = EntityAttribute(LocationRegionC) + requestTimestamp = TimestampS() + targetedResourceID = UnicodeAttribute() # xs:anyURI + # targetedResourceID = Attribute(AnyURI) + proposedPrivilegesLifetime = TimestampS() + # TODO(rkr): is this ok? + originatorRoleIDs = StringListAttribute() # list of m2m:roleID + roleIDsFromACPs = StringListAttribute() # list of m2m:roleID + tokenIDs = StringListAttribute() # list of m2m:tokeID + + +class DynAuthDasResponseC(OneM2MEntity): + # dynamicACPInfo = ListAttribute(DynamicACPInfoC) + dynamicACPInfo = EntityAttribute(DynamicACPInfoC) + tokens = StringListAttribute() # list of simpleType m2m:dynAuthJWT + + +# TODO(rkr): check if correct +class SecurityInfo(OneM2MEntity): + securityInfoType = Attribute(SecurityInfoTypeE) + # dasRequest = ListAttribute() # lists are wrong? + # dasResponse = ListAttribute() + dasRequest = EntityAttribute(DynAuthDasRequestC) + dasResponse = EntityAttribute(DynAuthDasResponseC) + esprimRandObject = ListAttribute() # m2m:receiverESPrimRandObject + esprimObject = ListAttribute() # m2m:e2eCompactJWE + escertkeMessage = ListAttribute() # xs:base64Binary + + +################################################################################ +# remoteCSE +################################################################################ + +class RemoteCSE(AnnounceableResourceC, SubscribableResource): + """See TS-0001 section 9.6.4""" + + cseType = Attribute(CSETypeIDE, accesstype=Attribute.WO, mandatory=False) + pointOfAccess = StringListAttribute() + CSEBase = UnicodeAttribute(accesstype=Attribute.WO) + CSE_ID = UnicodeAttribute(accesstype=Attribute.WO) # TODO: CSE-ID (minus!) + M2M_Ext_ID = UnicodeAttribute() # TODO: M2M-Ext-ID (minus!) + Trigger_Recipient_ID = UnicodeAttribute() # TODO: Trigger-Recipient-ID + requestReachability = Attribute(bool) + nodeLink = UnicodeAttribute() + + __child_types__ = ( + Subscription, + SemanticDescriptor, + ) + + +class RemoteCSEAnnc(AnnouncedResourceC, SubscribableResource): + cseType = Attribute(CSETypeIDE, accesstype=Attribute.WO, mandatory=False) + pointOfAccess = StringListAttribute() + CSEBase = UnicodeAttribute(accesstype=Attribute.WO) + CSE_ID = UnicodeAttribute(accesstype=Attribute.WO) # TODO: CSE-ID (minus!) + requestReachability = Attribute(bool) + nodeLink = UnicodeAttribute() + + __child_types__ = ( + Subscription, + SemanticDescriptor, + # TODO + ) + + +################################################################################ +# contentInstance +################################################################################ + +class ContentInstance(AnnounceableSubordinateResourceC, + SubscribableResource): + """See TS-0001 section 9.6.7""" + + stateTag = UnicodeAttribute(accesstype=Attribute.RO) + creator = UnicodeAttribute() # m2m:ID + # contentInfo = typeOfContent(:EncodingType) + # typeOfContent => Media Types + # ex: application/json:1 + contentInfo = UnicodeAttribute() # m2m:contentInfo + contentSize = Attribute(int, accesstype=Attribute.RO) + ontologyRef = UnicodeAttribute(accesstype=Attribute.WO) + content = Attribute(bytes, accesstype=Attribute.WO, mandatory=True) + + __child_types__ = ( + Subscription, + SemanticDescriptor, + ) + + +class ContentInstanceAnnc(AnnouncedSubordinateResourceC): + stateTag = UnicodeAttribute(accesstype=Attribute.RO) + contentInfo = UnicodeAttribute(EncodingTypeE) # m2m:contentInfo + contentSize = Attribute(int, accesstype=Attribute.WO) + ontologyRef = UnicodeAttribute(accesstype=Attribute.WO) + content = Attribute(bytes, accesstype=Attribute.WO, mandatory=True) + + +################################################################################ +# container +################################################################################ + +class Container(AnnounceableResourceC, SubscribableResource): + """See TS-0001 section 9.6.6""" + + stateTag = UnicodeAttribute(accesstype=Attribute.RO) + creator = UnicodeAttribute() + maxNrOfInstances = Attribute(int) + maxByteSize = Attribute(int) + maxInstanceAge = UnicodeAttribute(mandatory=False) # todo + currentNrOfInstances = Attribute(int, accesstype=Attribute.RO) + currentByteSize = Attribute(int, accesstype=Attribute.RO) + locationID = UnicodeAttribute() + ontologyRef = UnicodeAttribute() + latest = Attribute(ContentInstance, mandatory=False) + oldest = Attribute(ContentInstance, mandatory=False) + + __child_types__ = ( + ContentInstance, + Subscription, + SemanticDescriptor, + ) + + +Container.__child_types__ = ( + ContentInstance, + Container, + Subscription, + SemanticDescriptor, +) + + +class ContainerAnnc(AnnouncedResourceC, SubscribableResource): + + stateTag = UnicodeAttribute(accesstype=Attribute.RO) + maxNrOfInstances = Attribute(int) + maxByteSize = Attribute(int) + maxInstanceAge = UnicodeAttribute(mandatory=False) # todo + currentNrOfInstances = Attribute(int, accesstype=Attribute.RO) + currentByteSize = Attribute(int, accesstype=Attribute.RO) + locationID = UnicodeAttribute() + ontologyRef = UnicodeAttribute() + latest = Attribute(ContentInstance, mandatory=False) + + __child_types__ = ( + ContentInstance, + ContentInstanceAnnc, + Container, + Subscription, + SemanticDescriptor, + ) + + +ContainerAnnc.__child_types__ = ( + ContentInstance, + ContentInstanceAnnc, + Container, + ContainerAnnc, + Subscription, + SemanticDescriptor, +) + + +################################################################################ +# AE +################################################################################ + +class AE(AnnounceableResourceC, SubscribableResource): + """See TS-0001 section 9.6.5""" + + typename = "AE" + + appName = UnicodeAttribute() + App_ID = UnicodeAttribute(accesstype=Attribute.WO, mandatory=True) + AE_ID = UnicodeAttribute(accesstype=Attribute.RO) # m2m:ID + pointOfAccess = StringListAttribute() # m2m:poaList + ontologyRef = UnicodeAttribute() # xs:anyURI + nodeLink = UnicodeAttribute(accesstype=Attribute.RO) # xs:anyURI + requestReachability = Attribute(bool, mandatory=True) + contentSerialization = UnicodeAttribute() # TODO m2m:serializations + + __child_types__ = ( + Container, + # Group, + Subscription, + AccessControlPolicy, + # PollingChannel, + # Schedule, + DynamicAuthorizationConsultation, + SemanticDescriptor, + ) + + +class AEAnnc(AnnouncedResourceC, SubscribableResource): + + typename = "AEAnnc" + + appName = UnicodeAttribute(accesstype=Attribute.WO) + App_ID = UnicodeAttribute() + AE_ID = UnicodeAttribute() + pointOfAccess = StringListAttribute() + ontologyRef = UnicodeAttribute() + nodeLink = UnicodeAttribute() + requestReachability = Attribute(bool) + + __child_types__ = ( + Container, + ContainerAnnc, + # Group, + # GroupAnnc, + Subscription, + # AccessControlPolicy, + # AccessControlPolicyAnnc, + # PollingChannel, + # Schedule, + SemanticDescriptor, + ) + + +################################################################################ +# CSEBase +################################################################################ + +class CSEBase(ResourceC, SubscribableResource, AccessControlPolicyIDHolder): + """See TS-0001 section 9.6.3""" + + typename = "CSEBase" + + cseType = Attribute(CSETypeIDE, accesstype=Attribute.WO) + CSE_ID = UnicodeAttribute(accesstype=Attribute.WO) # TODO: CSE-ID (minus!) + supportedResourceType = StringListAttribute(content_type=ResourceTypeE, + accesstype=Attribute.RO) + pointOfAccess = StringListAttribute() + nodeLink = UnicodeAttribute() + + __child_types__ = ( + RemoteCSE, + # Node, + AE, + Container, + # Group, + AccessControlPolicy, + Subscription, + # MgmtCmd, + # LocationPolicy, + # StatsConfig, + # StatsCollect, + # Request, + # Delivery, + # Schedule, + # M2mServiceSubscriptionProfile, + DynamicAuthorizationConsultation, + SemanticDescriptor, + ) + + +################################################################################ +# misc +################################################################################ + +long_to_short_attribute_mapping = { + "accessControlPolicyIDs": "acpi", + "announcedAttribute": "aa", + "announceTo": "at", + "creationTime": "ct", + "expirationTime": "et", + "labels": "lbl", + "lastModifiedTime": "lt", + "parentID": "pi", + "resourceID": "ri", + "resourceType": "ty", + "stateTag": "st", + "resourceName": "rn", + "privileges": "pv", + "selfPrivileges": "pvs", + "App-ID": "api", + "AE-ID": "aei", + "appName": "apn", + "pointOfAccess": "poa", + "ontologyRef": "or", + "nodeLink": "nl", + "contentSerialization": "csz", + "creator": "cr", + "maxNrOfInstances": "mni", + "maxByteSize": "mbs", + "maxInstanceAge": "mia", + "currentNrOfInstances": "cni", + "currentByteSize": "cbs", + "locationID": "li", + "contentInfo": "cnf", + "contentSize": "cs", + "primitiveContent": "pc", + "content": "con", + "cseType": "cst", + "CSE-ID": "csi", + "supportedResourceType": "srt", + "notificationCongestionPolicy": "ncp", + "source": "sr", + "target": "tg", + "lifespan": "ls", + "eventCat": "ec", + "deliveryMetaData": "dmd", + "aggregatedRequest": "arq", + "eventID": "evi", + "eventType": "evt", + "evenStart": "evs", + "eventEnd": "eve", + "operationType": "opt", + "dataSize": "ds", + "execStatus": "exs", + "execResult": "exr", + "execDisable": "exd", + "execTarget": "ext", + "execMode": "exm", + "execFrequency": "exf", + "execDelay": "exy", + "execNumber": "exn", + "execReqArgs": "exra", + "execEnable": "exe", + "memberType": "mt", + "currentNrOfMembers": "cnm", + "maxNrOfMembers": "mnm", + "memberIDs": "mid", + "membersAccessControlPolicyIDs": "macp", + "memberTypeValidated": "mtv", + "consistencyStrategy": "csy", + "groupName": "gn", + "locationSource": "los", + "locationUpdatePeriod": "lou", + "locationTargetId": "lot", + "locationServer": "lor", + "locationContainerID": "loi", + "locationContainerName": "lon", + "locationStatus": "lost", + "serviceRoles": "svr", + "description": "dc", + "cmdType": "cmt", + "mgmtDefinition": "mgd", + "objectIDs": "onis", + "objectPaths": "obps", + "nodeID": "ni", + "hostedCSELink": "hcl", + "CSEBase": "cb", + "M2M-Ext-ID": "mei", + "Trigger-Recipient-ID": "tri", + "requestReachability": "rr", + "originator": "og", + "metaInformation": "mi", + "requestStatus": "rs", + "operationResult": "ol", + "operation": "opn", + "requestID": "rid", + "scheduleElement": "se", + "deviceIdentifier": "di", + "ruleLinks": "rlk", + "statsCollectID": "sci", + "collectingEntityID": "cei", + "collectedEntityID": "cdi", + "devStatus": "ss", + "statsRuleStatus": "srs", + "statModel": "sm", + "collectPeriod": "cp", + "eventNotificationCriteria": "enc", + "expirationCounter": "exc", + "notificationURI": "nu", + "groupID": "gpi", + "notificationForwardingURI": "nfu", + "batchNotify": "bn", + "rateLimit": "rl", + "preSubscriptionNotify": "psn", + "pendingNotification": "pn", + "notificationStoragePriority": "nsp", + "latestNotify": "ln", + "notificationContentType": "nct", + "notificationEventCat": "nec", + "subscriberURI": "su", + "version": "vr", + "URL": "url", + "update": "ud", + "updateStatus": "uds", + "install": "in", + "uninstall": "un", + "installStatus": "ins", + "activate": "act", + "deactivate": "dea", + "activeStatus": "acts", + "memAvailable": "mma", + "memTotal": "mmt", + "areaNwkType": "ant", + "listOfDevices": "idv", + "devId": "dvd", + "devType": "dvt", + "areaNwkId": "awi", + "sleepInterval": "sli", + "sleepDuration": "sld", + "listOfNeighbors": "lnh", + "batteryLevel": "btl", + "batteryStatus": "bts", + "deviceLabel": "dlb", + "manufacturer": "man", + "model": "mod", + "deviceType": "dty", + "fwVersion": "fwv", + "swVersion": "swv", + "hwVersion": "hwv", + "capabilityName": "can", + "attached": "att", + "capabilityActionStatus": "cas", + "enable": "ena", + "disable": "dis", + "currentState": "cus", + "reboot": "rbo", + "factoryReset": "far", + "logTypeId": "lgt", + "logData": "lgd", + "logActionStatus": "lgs", + "logStatus": "lgst", + "logStart": "lga", + "logStop": "lgo", + "firmwareName": "fwnnam", + "softwareName": "swn", + "cmdhPolicyName": "cpn", + "mgmtLink": "cmlk", + "activeCmdhPolicyLink": "acmlk", + "order": "od", + "defEcValue": "dev", + "requestOrigin": "ror", + "requestContext": "rct", + "requestContextNotification": "rcn", + "requestCharacteristics": "rch", + "applicableEventCategories": "aecs", + "applicableEventCategory": "aec", + "defaultRequestExpTime": "dget", + "defaultResultExpTime": "dset", + "defaultOpExecTime": "doet", + "defaultRespPersistence": "drp", + "defaultDelAggregation": "dda", + "limitsEventCategory": "lec", + "limitsRequestExpTime": "lget", + "limitsResultExpTime": "lset", + "limitsOpExecTime": "loet", + "limitsRespPersistence": "lrp", + "limitsDelAggregation": "lda", + "targetNetwork": "ttn", + "minReqVolume": "mrv", + "backOffParameters": "bop", + "otherConditions": "ohc", + "maxBufferSize": "mbfs", + "storagePriority": "sgp", + "applicableCredIDs": "apci", + "allowedApp-IDs": "aai", + "allowedAEs": "aae", + "dynamicAuthorizationConsultationIDs": "daci", + "dynamicAuthorizationEnabled": "dae", + "dynamicAuthorizationPoA": "dap", + "dynamicAuthorizationLifetime": "dal", + # TODO (rkr): resourceType is specified in Table 8.2.3-267, "Resource attribute short names", two times with + # TODO different short names "ty" and "acodTy" + # there is some issue paper from oneM2M where there have found out that this is an issue with usual resource type + # "resourceType": "acodTy" + "descriptorRepresentation": "dcrp", + "semanticOpExec": "soe", + "descriptor": "dsp", + "relatedSemantics": "rels", +} + +short_to_long_attribute_mapping = {v: k for k, v in + long_to_short_attribute_mapping.items()} + + +def get_long_attribute_name(n): + return short_to_long_attribute_mapping.get(n) + + +def get_short_attribute_name(n): + return long_to_short_attribute_mapping.get(n) + + +long_to_short_resource_mapping = { + "accessControlPolicy": "acp", + "accessControlPolicyAnnc": "acpA", + "AE": "ae", + "AEAnnc": "aeA", + "container": "cnt", + "containerAnnc": "cntA", + "latest": "la", + "oldest": "ol", + "contentInstance": "cin", + "contentInstanceAnnc": "cinA", + "CSEBase": "cb", + "delivery": "dlv", + "eventConfig": "evcg", + "execInstance": "exin", + "fanOutPoint": "fopt", + "group": "grp", + "groupAnnc": "grpA", + "locationPolicy": "lcp", + "locationPolicyAnnc": "lcpA", + "m2mServiceSubscriptionProfile": "mssp", + "mgmtCmd": "mgc", + "mgmtObj": "mgo", + "mgmtObjAnnc": "mgoA", + "node": "nod", + "nodeAnnc": "nodA", + "pollingChannel": "pch", + "pollingChannelURI": "pcu", + "remoteCSE": "csr", + "remoteCSEAnnc": "csrA", + "request": "req", + "schedule": "sch", + "scheduleAnnc": "schA", + "serviceSubscribedAppRule": "asar", + "serviceSubscribedNode": "svsn", + "statsCollect": "stcl", + "statsConfig": "stcg", + "subscription": "sub", + "semanticDescriptor": "smd", + "firmware": "fwr", + "firmwareAnnc": "fwrA", + "software": "swr", + "softwareAnnc": "swrA", + "memory": "mem", + "memoryAnnc": "memA", + "areaNwkInfo": "ani", + "areaNwkInfoAnnc": "aniA", + "areaNwkDeviceInfo": "andi", + "areaNwkDeviceInfoAnnc": "andiA", + "battery": "bat", + "batteryAnnc": "batA", + "deviceInfo": "dvi", + "deviceInfoAnnc": "dviA", + "deviceCapability": "dvc", + "deviceCapabilityAnnc": "dvcA", + "reboot": "rbo", + "rebootAnnc": "rboA", + "eventLog": "evl", + "eventLogAnnc": "evlA", + "cmdhPolicy": "cmp", + "activeCmdhPolicy": "acmp", + "cmdhDefaults": "cmdf", + "cmdhDefEcValue": "cmdv", + "cmdhEcDefParamValues": "cmpv", + "cmdhLimits": "cml", + "cmdhNetworkAccessRules": "cmnr", + "cmdhNwAccessRule": "cmwr", + "cmdhBuffer": "cmbf", + "dynamicAuthorizationConsultation": "dac" +} + +short_to_long_resource_mapping = {v: k for k, v in + long_to_short_resource_mapping.items()} + + +def get_long_resource_name(n): + return short_to_long_resource_mapping.get(n) + + +def get_short_resource_name(n): + return long_to_short_resource_mapping.get(n) + + +long_to_short_member_mapping = { + "createdBefore": "crb", + "createdAfter": "cra", + "modifiedSince": "ms", + "unmodifiedSince": "us", + "stateTagSmaller": "sts", + "stateTagBigger": "stb", + "expireBefore": "exb", + "expireAfter": "exa", + "labels": "lbl", + "resourceType": "ty", + "sizeAbove": "sza", + "sizeBelow": "szb", + "contentType": "cty", + "limit": "lim", + "attribute": "atr", + "notificationEventType": "net", + "operationMonitor": "om", + "representation": "rep", + "filterUsage": "fu", + "eventCatType": "ect", + "eventCatNo": "ecn", + "number": "num", + "duration": "dur", + "notification": "sgn", + "notificationEvent": "nev", + "verificationRequest": "vrq", + "subscriptionDeletion": "sud", + "subscriptionReference": "sur", + "creator": "cr", + "notificationForwardingURI": "nfu", + "operation": "opr", + "originator": "org", + "accessId": "aci", + "MSISDN": "msd", + "action": "acn", + "status": "sus", + "childResource": "ch", + "accessControlRule": "acr", + "accessControlOriginators": "acor", + "accessControlOperations": "acop", + "accessControlContexts": "acco", + "accessControlWindow": "actw", + "accessControlIpAddresses": "acip", + "ipv4Addresses": "ipv4", + "ipv6Addresses": "ipv6", + "accessControlLocationRegion": "aclr", + "countryCode": "accc", + "circRegion": "accr", + "name": "nm", + "value": "val", + "type": "typ", + "maxNrOfNotify": "mnn", + "timeWindow": "tww", + "scheduleEntry": "sce", + "aggregatedNotification": "agn", + "attributeList": "atrl", + "aggregatedResponse": "agr", + "resource": "rce", + "URIList": "uril", + "anyArg": "any", + "fileType": "ftyp", + "URL": "url", + "username": "unm", + "password": "pwd", + "fileSize": "fsi", + "targetFile": "tgf", + "delaySeconds": "dss", + "successURL": "surl", + "startTime": "stt", + "completeTime": "cpt", + "UUID": "uuid", + "executionEnvRef": "eer", + "version": "vr", + "reset": "rst", + "reboot": "rbo", + "upload": "uld", + "download": "dld", + "softwareInstall": "swin", + "softwareUpdate": "swup", + "softwareUninstall": "swun", + "tracingOption": "tcop", + "tracingInfo": "tcin", + "responseTypeValue": "rtv", + "notificationURI": "nu", + "accessControlAuthenticationFlag": "acaf", + "ipv4Address": "ip4", + "ipv6Address": "ip6", + "specializationID": "spid", + "accessControlObjectDetails": "acod", + "childResourceType": "chty", + "targetedResourceType": "trt", + "originatorIP": "oip", + "originatorLocation": "olo", + "originatorRoleIDs": "orid", + "requestTimestamp": "rts", + "targetedResourceID": "trid", + "proposedPrivilegesLifetime": "ppl", + "roleIDsFromACPs": "rfa", + "tokenIDs": "tids", + "dynamicACPInfo": "dai", + "grantedPrivileges": "gp", + "privilegesLifetime": "pl", + "tokens": "tkns", + "securityInfo": "seci", + "securityInfoType": "sit", + "dasRequest": "dreq", + "dasResponse": "dres", + "esprimRandObject": "ero", + "esprimObject": "epo", + "escertkeMessage": "eckm" +} + +short_to_long_member_mapping = {v: k for k, v in + long_to_short_member_mapping.items()} + + +def get_long_member_name(n): + return short_to_long_member_mapping.get(n) + + +def get_short_member_name(n): + return long_to_short_member_mapping.get(n) + + +long_to_short_root_mapping = { + "requestPrimitive": "rqp", + "responsePrimitive": "rsp" +} + +short_to_long_root_mapping = {v: k for k, v in + long_to_short_root_mapping.items()} + + +def get_long_root_name(n): + return short_to_long_root_mapping.get(n) + + +def get_short_root_name(n): + return long_to_short_root_mapping.get(n) + + +long_to_short_parameter_mapping = { + "operation": "op", + "to": "to", + "from": "fr", + "requestIdentifier": "rqi", + "resourceType": "ty", + "primitiveContent": "pc", + "role": "rol", + "originatingTimestamp": "ot", + "requestExpirationTimestamp": "rqet", + "resultExpirationTimestamp": "rset", + "operationExecutionTime": "oet", + "responseType": "rt", + "resultPersistence": "rp", + "resultContent": "rcn", + "eventCategory": "ec", + "deliveryAggregation": "da", + "groupRequestIdentifier": "gid", + "filterCriteria": "fc", + "discoveryResultType": "drt", + "responseStatusCode": "rsc" +} + +short_to_long_parameter_mapping = {v: k for k, v in + long_to_short_parameter_mapping.items()} + + +def get_long_parameter_name(n): + return short_to_long_parameter_mapping.get(n) + + +def get_short_parameter_name(n): + return long_to_short_parameter_mapping.get(n) + + +_all_types = {k: v for k, v in globals().iteritems() + if issubclass(v, OneM2MEntity) and not v.__subclasses__()} + +_all_types_short = {} +_all_types_long = {} + +for k, v in _all_types.iteritems(): + if get_short_resource_name(k): + long_name = k + short_name = get_short_resource_name(k) + elif get_short_attribute_name(k): + long_name = k + short_name = get_short_attribute_name(k) + elif get_short_member_name(k): + long_name = k + short_name = get_short_member_name(k) + elif get_short_root_name(k): + long_name = k + short_name = get_short_root_name(k) + elif get_short_resource_name(k[0].lower() + k[1:]): + long_name = k[0].lower() + k[1:] + short_name = get_short_resource_name(long_name) + elif get_short_attribute_name(k[0].lower() + k[1:]): + long_name = k[0].lower() + k[1:] + short_name = get_short_attribute_name(long_name) + elif get_short_member_name(k[0].lower() + k[1:]): + long_name = k[0].lower() + k[1:] + short_name = get_short_member_name(long_name) + elif get_short_root_name(k[0].lower() + k[1:]): + long_name = k[0].lower() + k[1:] + short_name = get_short_root_name(long_name) + else: + continue + _all_types_short[short_name] = v + _all_types_long[long_name] = v + + +_resource_types = {k: v for k, v in _all_types.iteritems() + if issubclass(v, ResourceC)} + +_resource_types_short = {} +_resource_types_long = {} + +for k, v in _resource_types.iteritems(): + if get_short_resource_name(k): + long_name = k + short_name = get_short_resource_name(k) + elif get_short_resource_name(k[0].lower() + k[1:]): + long_name = k[0].lower() + k[1:] + short_name = get_short_resource_name(long_name) + else: + continue + _resource_types_short[short_name] = v + _resource_types_long[long_name] = v + + +def get_onem2m_type(typename): + try: + try: + return _all_types_short[typename] + except KeyError: + return _all_types_long[typename] + except KeyError: + raise ModelTypeError("Not a valid type: %s" % (typename,)) + + +def get_onem2m_resource_type(typename): + try: + try: + return _resource_types_short[typename] + except KeyError: + return _resource_types_long[typename] + except KeyError: + raise ModelTypeError("Not a valid resource type: %s" % (typename,)) + + +def get_onem2m_types(): + return _all_types.values() + + +def get_onem2m_resource_types(): + return _resource_types.values() diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/model.pyc b/common/openmtc-onem2m/src/openmtc_onem2m/model.pyc new file mode 100644 index 0000000..85f07ee Binary files /dev/null and b/common/openmtc-onem2m/src/openmtc_onem2m/model.pyc differ diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/serializer/__init__.py b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/__init__.py new file mode 100644 index 0000000..a587b67 --- /dev/null +++ b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/__init__.py @@ -0,0 +1,93 @@ +from .json import OneM2MJsonSerializer +from openmtc_onem2m.exc import CSEBadRequest, CSEContentsUnacceptable +from werkzeug import Accept, parse_accept_header +from futile.logging import get_logger +from openmtc.exc import OpenMTCError + +_factories = {"application/json": OneM2MJsonSerializer, + "application/vnd.onem2m-res+json": OneM2MJsonSerializer, + "application/vnd.onem2m-ntfy+json": OneM2MJsonSerializer, + "application/vnd.onem2m-attrs+json": OneM2MJsonSerializer, + "text/plain": OneM2MJsonSerializer} +_serializers = {} + + +def create_onem2m_serializer(content_type): + try: + factory = _factories[content_type] + except KeyError: + raise CSEBadRequest("Unsupported content type: %s. Try one of %s" % + (content_type, ', '.join(_factories.keys()))) + return factory() + + +def get_onem2m_supported_content_types(): + return _factories.keys() + + +def get_onem2m_decoder(content_type): + # TODO: Check if this is faster than split + content_type, _, _ = content_type.partition(";") + + content_type = content_type.strip().lower() + + try: + return _serializers[content_type] + except KeyError: + serializer = create_onem2m_serializer(content_type) + _serializers[content_type] = serializer + return serializer +get_serializer = get_onem2m_decoder + + +def get_onem2m_encoder(accept): + # TODO: optimize + if accept: + parsed_accept_header = parse_accept_header(accept, Accept) + """:type : Accept""" + supported = get_onem2m_supported_content_types() + accepted_type = parsed_accept_header.best_match(supported) + if not accepted_type: + raise CSEContentsUnacceptable("%s is not supported. " + "Supported content types are: %s" % + (accept, ', '.join(supported))) + else: + # TODO: use config["default_content_type"] + accepted_type = "application/json" + + # TODO: optimize + return get_serializer(accepted_type) + + +def register_onem2m_serializer(content_type, factory): + set_value = _factories.setdefault(content_type, factory) + + if set_value is not factory: + raise OpenMTCError("Content type is already registered: %s" % + (content_type, )) + +################################################################################ +# import other serializers at serializers +################################################################################ +# import impl +# import pkgutil +# +# logger = get_logger(__name__) +# +# for _importer, modname, ispkg in pkgutil.iter_modules(impl.__path__): +# modname = impl.__name__ + "." + modname +# logger.debug("Found onem2m serializer module %s (is a package: %s)" % +# (modname, ispkg)) +# try: +# __import__(modname) +# except: +# logger.error("Failed to import serializer %s", modname) +# raise +# del _importer +# del modname +# del ispkg +# +# del impl +# del pkgutil +# del logger + diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/serializer/__init__.pyc b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/__init__.pyc new file mode 100644 index 0000000..77c0c63 Binary files /dev/null and b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/__init__.pyc differ diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/serializer/base.py b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/base.py new file mode 100644 index 0000000..c61689a --- /dev/null +++ b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/base.py @@ -0,0 +1,202 @@ +from abc import ABCMeta, abstractmethod +from datetime import datetime +from re import compile as re_compile + +from futile.logging import LoggerMixin +from openmtc_onem2m.exc import CSESyntaxError, CSEBadRequest, CSEValueError +from openmtc_onem2m.model import (get_onem2m_type, ContentInstance, + ResourceTypeE, Notification, + get_onem2m_resource_type, + get_short_attribute_name, + get_short_member_name, get_long_member_name, + get_short_resource_name, + get_long_attribute_name, + OneM2MEntity, OneM2MResource, Container, + get_long_resource_name, OneM2MContentResource, + URIList, OneM2MIntEnum, SecurityInfo) + +_typename_matcher = re_compile(r'^m2m:([a-z]+)$') + + +def get_typename(tn): + return _typename_matcher.findall(tn).pop() + + +class OneM2MSerializer(LoggerMixin): + __metaclass__ = ABCMeta + + @abstractmethod + def encode_resource(self, resource, response, pretty=False, + encoding="utf-8", fields=None): + raise NotImplementedError() + + @abstractmethod + def decode_resource_values(self, s): + pass + + def decode(self, s): + resource_type, data = self.decode_resource_values(s) + if issubclass(resource_type, OneM2MContentResource): + return resource_type(data) + child_resource = data.pop("childResource", None) + if child_resource: + try: + def map_child_resource(v): + res_type = ResourceTypeE(v["type"]) + res_cls = get_onem2m_resource_type(res_type.name) + return res_cls(v["name"], resourceID=v["value"], resourceType=res_type) + child_resource = map(map_child_resource, child_resource) + except (TypeError, AttributeError, KeyError, ValueError): + raise CSEValueError("Invalid entry in child resources: %s", + child_resource) + if resource_type is Notification and "notificationEvent" in data: + representation = data["notificationEvent"]["representation"] + representation = self.decode(self.dumps(representation)) + data["notificationEvent"]["representation"] = representation + resource = resource_type(**data) + if child_resource: + resource.childResource = child_resource + return resource + + +class OneM2MDictSerializer(OneM2MSerializer): + def encode_resource(self, resource, pretty=False, path=None, encoding="utf-8", fields=None, + encapsulated=False): + representation = resource.values + + self.logger.debug("Encoding representation: %s", representation) + + if isinstance(resource, Notification): + # handle notifications + try: + event = representation["notificationEvent"] + if event: + e = event.values + e['representation'] = self.encode_resource( + event.representation, pretty, path, encoding, fields, True + ) + representation["notificationEvent"] = { + get_short_attribute_name(k) or get_short_member_name(k): v + for k, v in e.iteritems() + } + except (AttributeError, KeyError): + self.logger.exception("failed to encode notify") + + def make_val(val_path, resource_id): + try: + if val_path: + val_path += '/' if not val_path.endswith('/') else '' + except AttributeError: + val_path = '' + + if resource_id.startswith(val_path): + return resource_id + return val_path + resource_id + + if isinstance(resource, OneM2MResource): + + def get_child_rep(c): + return { + "val": make_val(path, c.resourceID), + "nm": c.basename, + "typ": c.resourceType + } + representation["childResource"] = map(get_child_rep, representation["childResource"]) + + if isinstance(resource, URIList): + representation = [make_val(path, x) for x in representation] + + if isinstance(resource, Container): + if isinstance(resource.latest, ContentInstance): + representation['latest'] = resource.latest.resourceID + if isinstance(resource.oldest, ContentInstance): + representation['oldest'] = resource.oldest.resourceID + + # cleans representation + def clean_representation(o): + try: + # removes empty attributes + empty_keys = [] + for k, v in o.items(): + if v is None: + empty_keys.append(k) + elif isinstance(v, OneM2MEntity): + o[k] = self.encode_resource(v, pretty, path, encoding, fields) + elif isinstance(v, list): + + def encode_list_item(item): + if isinstance(item, OneM2MEntity): + return self.encode_resource(item, pretty, path, encoding, fields) + return item + o[k] = map(encode_list_item, v) + else: + try: + if len(v) == 0: + empty_keys.append(k) + except TypeError: + pass + + for k in empty_keys: + del o[k] + + for k, v in o.items(): + if not isinstance(v, (unicode, str, bool, datetime, + OneM2MIntEnum)): + clean_representation(v) + except AttributeError: + if isinstance(o, list): + for p in o: + clean_representation(p) + + if not isinstance(resource, OneM2MContentResource): + representation = { + get_short_resource_name(k) or get_short_attribute_name(k) or + get_short_member_name(k): v for + k, v in representation.items()} + + clean_representation(representation) + + if not isinstance(resource, (OneM2MResource, Notification, + SecurityInfo, OneM2MContentResource)): + return representation + + typename = 'm2m:' + (get_short_resource_name(resource.typename) or + get_short_member_name(resource.typename)) + + if encapsulated: + return {typename: representation} + + if pretty: + return self.pretty_dumps({typename: representation}) + + return self.dumps({typename: representation}) + + def _handle_partial_addressing(self, resource, pretty): + for k, v in resource.iteritems(): + if k in ('latest', 'oldest') and isinstance(v, ContentInstance): + resource[k] = v.resourceID + if pretty: + return self.pretty_dumps(resource) + return self.dumps(resource) + + def decode_resource_values(self, s): + + def convert_to_long_keys(d): + return {get_long_resource_name(k) or get_long_attribute_name(k) or + get_long_member_name(k) or k: v for k, v in d.iteritems()} + + try: + if hasattr(s, "read"): + data = self.load(s, object_hook=convert_to_long_keys) + else: + data = self.loads(s, object_hook=convert_to_long_keys) + except (ValueError, TypeError) as exc: + raise CSEBadRequest("Failed to parse input: %s" % (exc, )) + + self.logger.debug("Read data: %s", data) + + try: + typename, data = data.items()[0] + return get_onem2m_type(get_typename(typename)), data + except (AttributeError, IndexError, TypeError): + raise CSESyntaxError("Not a valid resource representation") diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/serializer/base.pyc b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/base.pyc new file mode 100644 index 0000000..7a56f03 Binary files /dev/null and b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/base.pyc differ diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/serializer/impl/__init__.py b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/impl/__init__.py new file mode 100644 index 0000000..de40ea7 --- /dev/null +++ b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/impl/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/serializer/json/__init__.py b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/json/__init__.py new file mode 100644 index 0000000..8c7076a --- /dev/null +++ b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/json/__init__.py @@ -0,0 +1,62 @@ +from openmtc_onem2m.serializer.base import OneM2MDictSerializer +from json import JSONEncoder +from futile.logging import get_logger +from datetime import datetime +from openmtc_onem2m.model import ContentInstance + +logger = get_logger(__name__) + +# rst: ujson and yajl are not supporting object_hooks, but conversion is needed +# rst: some measurements are necessary what is better +# try: +# from ujson import load, loads +# logger.debug("using ujson for decoding JSON") +# except ImportError: +# try: +# from yajl import load, loads +# logger.debug("using yajl for decoding JSON") +# except ImportError: +try: + # simplejson is faster on decoding, tiny bit slower on encoding + from simplejson import load, loads + logger.debug("using simplejson for decoding JSON") +except ImportError: + logger.debug("using builtin json for decoding JSON") + from json import load, loads + + +del logger + + +def _default(x): + if isinstance(x, datetime): + try: + isoformat = x.isoformat + except AttributeError: + raise TypeError("%s (%s)" % (x, type(x))) + + return isoformat() + elif isinstance(x, ContentInstance): + return x.resourceID + else: + try: # handle model classes + return x.values + except AttributeError: + raise TypeError("%s (%s)" % (x, type(x))) + + +_simple_encoder = JSONEncoder(check_circular=False, separators=(',', ':'), + default=_default) + +_pretty_encoder = JSONEncoder(default=_default, indent=2, + separators=(',', ':'), + check_circular=False) + + +class OneM2MJsonSerializer(OneM2MDictSerializer): + def __init__(self, *args, **kw): + + self.loads = loads + self.load = load + self.dumps = _simple_encoder.encode + self.pretty_dumps = _pretty_encoder.encode diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/serializer/json/__init__.pyc b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/json/__init__.pyc new file mode 100644 index 0000000..04e3137 Binary files /dev/null and b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/json/__init__.pyc differ diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/serializer/util.py b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/util.py new file mode 100644 index 0000000..c9a1f6e --- /dev/null +++ b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/util.py @@ -0,0 +1,38 @@ +from futile.logging import get_logger +from openmtc_onem2m.exc import CSEValueError +from openmtc_onem2m.serializer import get_onem2m_encoder, get_onem2m_decoder + +logger = get_logger(__name__) + + +def decode_onem2m_content(content, content_type): + if content == "": + content = None + if content_type and content is not None: + serializer = get_onem2m_decoder(content_type) + try: + data = serializer.decode(content) + except CSEValueError as e: + logger.exception("Error reading input") + raise e + + return data + return None + + +def encode_onem2m_content(content, content_type, pretty=False, path=None, + fields=None): + logger.debug("Encoding result: %s - %s", content, content_type) + + if content is None: + return None, None + + fields = fields # TODO(rst): maybe necessary + # fields = ["resourceID"] + + serializer = get_onem2m_encoder(content_type) + + data = serializer.encode_resource(content, pretty=pretty, path=path, + fields=fields) + + return content_type + "; charset=utf-8", data diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/serializer/util.pyc b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/util.pyc new file mode 100644 index 0000000..fcadce2 Binary files /dev/null and b/common/openmtc-onem2m/src/openmtc_onem2m/serializer/util.pyc differ diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/transport.py b/common/openmtc-onem2m/src/openmtc_onem2m/transport.py new file mode 100644 index 0000000..4019e6e --- /dev/null +++ b/common/openmtc-onem2m/src/openmtc_onem2m/transport.py @@ -0,0 +1,444 @@ +import random +import string + +from enum import Enum, unique + +from futile.logging import get_logger +from openmtc.model import StrEnum +from openmtc_onem2m.exc import OneM2MError + + +@unique +class RequestMethod(Enum): + create = "create" + retrieve = "retrieve" + update = "update" + delete = "delete" + notify = "notify" + execute = "execute" + observe = "observe" + + +_logger = get_logger(__name__) + + +class MetaInformation(object): + def __init__(self, ri=None, ot=None, rqet=None, rset=None, rt=None, rd=None, + rc=None, rp=None, oet=None, ls=None, ec=None, da=None, + gid=None, role=None): + """Meta info about request, contains: + ri (Request Identifier), + ot (optional originating timestamp), + rqet (optional request expiration timestamp), + rset (optional result expiration timestamp), + rt (optional response type), + rd (optional result destination), + rc (optional result content), + rp (optional response persistence), + oet (optional operational execution time), + ls (optional lifespan), + ec (optional event category), + da (optional delivery aggregation), + gid (optional group request identifier) + role () + """ + + @property + def ri(self): + return self.identifier + + @ri.setter + def ri(self, ri): + self.identifier = ri + + @property + def ot(self): + return self.originating_timestamp + + @ot.setter + def ot(self, ot): + self.originating_timestamp = ot + + @property + def rqet(self): + return self.request_expiration_timestamp + + @rqet.setter + def rqet(self, rqet): + self.request_expiration_timestamp = rqet + + @property + def rset(self): + return self.result_expiration_timestamp + + @rset.setter + def rset(self, rset): + self.result_expiration_timestamp = rset + + @property + def rt(self): + return self.response_type + + @rt.setter + def rt(self, rt): + self.response_type = rt + + @property + def rd(self): + return self.result_destination + + @rd.setter + def rd(self, rd): + self.result_destination = rd + + @property + def rc(self): + return self.result_content + + @rc.setter + def rc(self, rc): + self.result_content = rc + + @property + def rp(self): + return self.response_persistence + + @rp.setter + def rp(self, rp): + self.response_persistence = rp + + @property + def oet(self): + return self.operational_execution_time + + @oet.setter + def oet(self, oet): + self.operational_execution_time = oet + + @property + def ec(self): + return self.event_category + + @ec.setter + def ec(self, ec): + self.event_category = ec + + @property + def ls(self): + return self.lifespan + + @ls.setter + def ls(self, ls): + self.lifespan = ls + + @property + def da(self): + return self.delivery_aggregation + + @da.setter + def da(self, da): + self.delivery_aggregation = da + + @property + def gid(self): + return self.group_request_identifier + + @gid.setter + def gid(self, gid): + self.group_request_identifier = gid + + @property + def ro(self): + return self.role + + @ro.setter + def ro(self, ro): + self.role = ro + + def __str__(self): + s = '' + for k in self.__dict__: + if getattr(self, k): + s = s + ' | mi.' + str(k) + ': ' + str(self.__dict__[k]) + return s + + +MI = MetaInformation + + +class AdditionalInformation(object): + def __init__(self, cs=None, ra=None): + """Optional additional information about the request, contains: + cs (optional, status codes), + ra (optional, address for the temporary storage of end node Responses) + """ + self.cs = cs + self.ra = ra + + def __str__(self): + s = '' + for k in self.__dict__: + if getattr(self, k): + s = s + ' | ai.' + str(k) + ': ' + str(self.__dict__[k]) + return s + + +AI = AdditionalInformation + + +class OneM2MOperation(StrEnum): + create = "create" + retrieve = "retrieve" + update = "update" + delete = "delete" + notify = "notify" + + +class OneM2MRequest(object): + internal = False + cascading = False + + """Class representing a OneM2M request""" + + def __init__(self, op, to, fr=None, rqi=None, ty=None, pc=None, rol=None, + ot=None, rqet=None, rset=None, oet=None, rt=None, rp=None, + rcn=None, ec=None, da=None, gid=None, filter_criteria=None, + drt=None): + # Operation + self.operation = op + # Target uri + self.to = to + # Originator ID + self.originator = fr # original long name is from + self.request_identifier = rqi or ''.join(random.sample(string.letters + string.digits, 16)) + # Type of a created resource + self.resource_type = ty + # Resource content to be transferred. + self.content = pc + self.role = rol + self.originating_timestamp = ot + self.request_expiration_timestamp = rqet + self.result_expiration_timestamp = rset + self.operation_execution_time = oet + self.response_type = rt + self.result_persistence = rp + self.result_content = rcn + self.event_category = ec + self.delivery_aggregation = da + self.group_request_identifier = gid + self.filter_criteria = filter_criteria + # Optional Discovery result type + self.discovery_result_type = drt + + @property + def op(self): + return self.operation + + @op.setter + def op(self, op): + self.operation = op + + @property + def fr(self): + return self.originator + + @fr.setter + def fr(self, fr): + self.originator = fr + + @property + def rqi(self): + return self.request_identifier + + @rqi.setter + def rqi(self, rqi): + self.request_identifier = rqi + + @property + def ty(self): + return self.resource_type + + @ty.setter + def ty(self, ty): + self.resource_type = ty + + @property + def pc(self): + return self.content + + @pc.setter + def pc(self, pc): + self.content = pc + + @property + def rol(self): + return self.role + + @rol.setter + def rol(self, rol): + self.role = rol + + @property + def ot(self): + return self.originating_timestamp + + @ot.setter + def ot(self, ot): + self.originating_timestamp = ot + + @property + def rqet(self): + return self.request_expiration_timestamp + + @rqet.setter + def rqet(self, rqet): + self.request_expiration_timestamp = rqet + + @property + def rset(self): + return self.result_expiration_timestamp + + @rset.setter + def rset(self, rset): + self.result_expiration_timestamp = rset + + @property + def oet(self): + return self.operation_execution_time + + @oet.setter + def oet(self, oet): + self.operation_execution_time = oet + + @property + def rt(self): + return self.response_type + + @rt.setter + def rt(self, rt): + self.response_type = rt + + @property + def rp(self): + return self.result_persistence + + @rp.setter + def rp(self, rp): + self.result_persistence = rp + + @property + def rcn(self): + return self.result_content + + @rcn.setter + def rcn(self, rcn): + self.result_content = rcn + + @property + def ec(self): + return self.event_category + + @ec.setter + def ec(self, ec): + self.event_category = ec + + @property + def da(self): + return self.delivery_aggregation + + @da.setter + def da(self, da): + self.delivery_aggregation = da + + @property + def gid(self): + return self.group_request_identifier + + @gid.setter + def gid(self, gid): + self.group_request_identifier = gid + + @property + def fc(self): + return self.filter_criteria + + @fc.setter + def fc(self, fc): + self.filter_criteria = fc + + @property + def drt(self): + return self.discovery_result_type + + @drt.setter + def drt(self, drt): + self.discovery_result_type = drt + + def __str__(self): + return '%s: %s' % (self.__class__.__name__, ' | '.join([ + '%s: %s' % (str(k), str(v)) for k, v in self.__dict__.iteritems() + ])) + + +class OneM2MResponse(object): + """Class representing a OneM2M response""" + + def __init__(self, status_code, request=None, rqi=None, pc=None, to=None, + fr=None, rsc=None): + # Operation result + self.response_status_code = status_code + if request: + self.request_identifier = request.rqi + # Target uri + self.to = request.to + # Originator ID + self.originator = request.fr + else: + self.request_identifier = rqi + # Target uri + self.to = to + # Originator ID + self.originator = fr + # Resource content to be transferred. + self.content = pc + + @property + def status_code(self): + return self.response_status_code.http_status_code + + @property + def rsc(self): + return self.response_status_code.numeric_code + + @property + def rqi(self): + return self.request_identifier + + @rqi.setter + def rqi(self, rqi): + self.request_identifier = rqi + + @property + def pc(self): + return self.content + + @pc.setter + def pc(self, pc): + self.content = pc + + @property + def fr(self): + return self.originator + + @fr.setter + def fr(self, fr): + self.originator = fr + + def __str__(self): + return '%s: %s' % (self.__class__.__name__, ' | '.join([ + '%s: %s' % (str(k), str(v)) for k, v in self.__dict__.iteritems() + ])) + + +class OneM2MErrorResponse(OneM2MResponse, OneM2MError): + pass diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/transport.pyc b/common/openmtc-onem2m/src/openmtc_onem2m/transport.pyc new file mode 100644 index 0000000..3d8c45d Binary files /dev/null and b/common/openmtc-onem2m/src/openmtc_onem2m/transport.pyc differ diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/util.py b/common/openmtc-onem2m/src/openmtc_onem2m/util.py new file mode 100644 index 0000000..a530fa7 --- /dev/null +++ b/common/openmtc-onem2m/src/openmtc_onem2m/util.py @@ -0,0 +1,37 @@ +from re import compile as re_compile + + +def _get_regex_path_component(): + # see http://tools.ietf.org/html/rfc3986#section-3.3 + # path-abempty = *( "/" segment ) + # segment = *pchar + # pchar = unreserved / pct-encoded / sub-delims / ":" / "@" + # pct-encoded = "%" HEXDIG HEXDIG + # unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" + # sub-delims = "!" / "$" / "&" / """ / "(" / ")" / "*" / "+" / "," / ";" / + # "=" + + unreserved = r"[\w\.\-~]" + pct_encoded = "%[A-Fa-f0-9][A-Fa-f0-9]" + sub_delims = r"[!$&'()\*\+,;=]" + + pchar = "(?:" + unreserved + "|" + pct_encoded + "|" + sub_delims + "|:|@)" + segment = pchar + "+" + + return segment + + +_sp_id = r'(//%s)?' % _get_regex_path_component() +_cse_id = r'(/%s)?' % _get_regex_path_component() +_path_suffix = r'(?:/?(%s(?:/%s)*))?' % (_get_regex_path_component(), _get_regex_path_component()) + +_onem2m_address_splitter = re_compile(r'^%s%s%s' % (_sp_id, _cse_id, _path_suffix)) + + +def split_onem2m_address(onem2m_address): + """ + + :param str onem2m_address: + :return: sp_id, cse_id, cse-relative rest + """ + return _onem2m_address_splitter.findall(onem2m_address).pop() diff --git a/common/openmtc-onem2m/src/openmtc_onem2m/util.pyc b/common/openmtc-onem2m/src/openmtc_onem2m/util.pyc new file mode 100644 index 0000000..83fb1eb Binary files /dev/null and b/common/openmtc-onem2m/src/openmtc_onem2m/util.pyc differ diff --git a/common/openmtc/lib/aplus/__init__.py b/common/openmtc/lib/aplus/__init__.py new file mode 100644 index 0000000..01a2ba8 --- /dev/null +++ b/common/openmtc/lib/aplus/__init__.py @@ -0,0 +1,456 @@ +import sys +from logging import DEBUG +from threading import Thread +from traceback import print_stack + +from futile.logging import LoggerMixin +from openmtc.exc import OpenMTCError + +if sys.subversion[0] != "CPython": + from inspect import ismethod, getargspec + +# TODO: kca: can't pass in values for then/error currently + + +def log_error(error): + if isinstance(error, OpenMTCError): + return False + return True + + +class Promise(LoggerMixin): + """ + This is a class that attempts to comply with the + Promises/A+ specification and test suite: + + http://promises-aplus.github.io/promises-spec/ + """ + + __slots__ = ("_state", "value", "reason", + "_callbacks", "_errbacks", "name") + + # These are the potential states of a promise + PENDING = -1 + REJECTED = 0 + FULFILLED = 1 + + def __init__(self, name=None): + """ + Initialize the Promise into a pending state. + """ + self._state = self.PENDING + self.value = None + self.reason = None + self._callbacks = [] + self._errbacks = [] + self.name = name + + def _fulfill(self, value): + """ + Fulfill the promise with a given value. + """ + + assert self._state == self.PENDING, "Promise state is not pending" + + self._state = self.FULFILLED + self.value = value + for callback in self._callbacks: + try: + callback(value) + except Exception: + # Ignore errors in callbacks + self.logger.exception("Error in callback %s", callback) + # We will never call these callbacks again, so allow + # them to be garbage collected. This is important since + # they probably include closures which are binding variables + # that might otherwise be garbage collected. + self._callbacks = [] + self._errbacks = [] + + def fulfill(self, value): + self._fulfill(value) + return self + + def _reject(self, reason, bubbling=False): + """ + Reject this promise for a given reason. + """ + + assert self._state == self.PENDING, "Promise state is not pending" + + if not bubbling and log_error(reason): + exc_info = sys.exc_info() + self.logger.debug("Promise (%s) rejected: %s", self.name, reason, + exc_info=exc_info[0] and exc_info or None) + self.logger.debug(self._errbacks) + if self.logger.isEnabledFor(DEBUG): + print_stack() + else: + pass + + self._state = self.REJECTED + self.reason = reason + for errback in self._errbacks: + try: + errback(reason) + except Exception: + self.logger.exception("Error in errback %s", errback) + # Ignore errors in callbacks + + # We will never call these errbacks again, so allow + # them to be garbage collected. This is important since + # they probably include closures which are binding variables + # that might otherwise be garbage collected. + self._errbacks = [] + self._callbacks = [] + + def reject(self, reason): + self._reject(reason) + return self + + def isPending(self): + """Indicate whether the Promise is still pending.""" + return self._state == self.PENDING + + def isFulfilled(self): + """Indicate whether the Promise has been fulfilled.""" + return self._state == self.FULFILLED + + def isRejected(self): + """Indicate whether the Promise has been rejected.""" + return self._state == self.REJECTED + + def get(self, timeout=None): + """Get the value of the promise, waiting if necessary.""" + self.wait(timeout) + if self._state == self.FULFILLED: + return self.value + raise self.reason + + def wait(self, timeout=None): + """ + An implementation of the wait method which doesn't involve + polling but instead utilizes a "real" synchronization + scheme. + """ + import threading + + if self._state != self.PENDING: + return + + e = threading.Event() + self.addCallback(lambda v: e.set()) + self.addErrback(lambda r: e.set()) + e.wait(timeout) + + def addCallback(self, f): + """ + Add a callback for when this promise is fulfilled. Note that + if you intend to use the value of the promise somehow in + the callback, it is more convenient to use the 'then' method. + """ + self._callbacks.append(f) + + def addErrback(self, f): + """ + Add a callback for when this promise is rejected. Note that + if you intend to use the rejection reason of the promise + somehow in the callback, it is more convenient to use + the 'then' method. + """ + self._errbacks.append(f) + + if sys.subversion[0] != "CPython": + def _invoke(self, func, value): + try: + if value is None: + args, _, _, _ = getargspec(func) + arglen = len(args) + if not arglen or (arglen == 1 and ismethod(func)): + return func() + + return func(value) + except Exception as e: + if log_error(e): + self.logger.exception("Error in handler %s", func) + else: + self.logger.debug("Error in handler %s: %s", func, e) + raise + else: + def _invoke(self, func, value): + try: + if value is None: + try: + target = func.im_func + except AttributeError: + argcount = func.func_code.co_argcount + else: + argcount = target.func_code.co_argcount - 1 + + if argcount == 0: + return func() + + return func(value) + except Exception as e: + if log_error(e): + self.logger.exception("Error in handler %s", func) + else: + self.logger.debug("Error in handler %s: %s", func, repr(e)) + raise + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_tb): + if self.isPending(): + if exc_value is not None: + if log_error(exc_value): + self.logger.exception("Promise automatically rejected") + self._reject(exc_value, bubbling=True) + return True + else: + self.fulfill(None) + + def then(self, success=None, failure=None, name=None): + """ + This method takes two optional arguments. The first argument + is used if the "self promise" is fulfilled and the other is + used if the "self promise" is rejected. In either case, this + method returns another promise that effectively represents + the result of either the first of the second argument (in the + case that the "self promise" is fulfilled or rejected, + respectively). + + Each argument can be either: + * None - Meaning no action is taken + * A function - which will be called with either the value + of the "self promise" or the reason for rejection of + the "self promise". The function may return: + * A value - which will be used to fulfill the promise + returned by this method. + * A promise - which, when fulfilled or rejected, will + cascade its value or reason to the promise returned + by this method. + * A value - which will be assigned as either the value + or the reason for the promise returned by this method + when the "self promise" is either fulfilled or rejected, + respectively. + """ + + if name is None: + try: + name = success.__name__ + except AttributeError: + name = str(success) + + ret = Promise(name=name) + + state = self._state + if state == self.PENDING: + """ + If this is still pending, then add callbacks to the + existing promise that call either the success or + rejected functions supplied and then fulfill the + promise being returned by this method + """ + + def callAndFulfill(v): + """ + A callback to be invoked if the "self promise" + is fulfilled. + """ + try: + # From 3.2.1, don't call non-functions values + if callable(success): + newvalue = self._invoke(success, v) + if _isPromise(newvalue): + newvalue.then(ret._fulfill, + ret._reject) + else: + ret._fulfill(newvalue) + else: + # From 3.2.6.4 + ret._fulfill(v) + except Exception as e: + ret._reject(e) + + def callAndReject(r): + """ + A callback to be invoked if the "self promise" + is rejected. + """ + try: + if callable(failure): + newvalue = failure(r) + if _isPromise(newvalue): + newvalue.then(ret._fulfill, + ret._reject) + else: + ret._fulfill(newvalue) + else: + # From 3.2.6.5 + ret._reject(r) + except Exception as e: + ret._reject(e) + + self._callbacks.append(callAndFulfill) + self._errbacks.append(callAndReject) + + elif state == self.FULFILLED: + # If this promise was already fulfilled, then + # we need to use the first argument to this method + # to determine the value to use in fulfilling the + # promise that we return from this method. + try: + if callable(success): + newvalue = self._invoke(success, self.value) + if _isPromise(newvalue): + newvalue.then(ret._fulfill, + lambda r: ret._reject(r, bubbling=True)) + else: + ret._fulfill(newvalue) + else: + # From 3.2.6.4 + ret._fulfill(self.value) + except Exception as e: + ret._reject(e) + else: + # If this promise was already rejected, then + # we need to use the second argument to this method + # to determine the value to use in fulfilling the + # promise that we return from this method. + try: + if callable(failure): + newvalue = self._invoke(failure, self.reason) + if _isPromise(newvalue): + newvalue.then(ret._fulfill, + ret._reject) + else: + ret._fulfill(newvalue) + else: + # From 3.2.6.5 + ret._reject(self.reason, bubbling=True) + except Exception as e: + ret._reject(e) + + return ret + + +def _isPromise(obj): + """ + A utility function to determine if the specified + object is a promise using "duck typing". + """ + if isinstance(obj, Promise): + return True + + try: + return callable(obj.fulfill) and callable(obj.reject) and\ + callable(obj.then) + except AttributeError: + return False + + +def listPromise(*args): + """ + A special function that takes a bunch of promises + and turns them into a promise for a vector of values. + In other words, this turns an list of promises for values + into a promise for a list of values. + """ + ret = Promise() + + def handleSuccess(v, ret): + for arg in args: + if not arg.isFulfilled(): + return + + value = map(lambda p: p.value, args) + ret._fulfill(value) + + for arg in args: + arg.addCallback(lambda v: handleSuccess(v, ret)) + arg.addErrback(lambda r: ret.reject(r)) + + # Check to see if all the promises are already fulfilled + handleSuccess(None, ret) + + return ret + + +def dictPromise(m): + """ + A special function that takes a dictionary of promises + and turns them into a promise for a dictionary of values. + In other words, this turns an dictionary of promises for values + into a promise for a dictionary of values. + """ + ret = Promise() + + def handleSuccess(v, ret): + for p in m.values(): + if not p.isFulfilled(): + return + + value = {} + for k in m: + value[k] = m[k].value + ret.fulfill(value) + + for p in m.values(): + p.addCallback(lambda v: handleSuccess(v, ret)) + p.addErrback(lambda r: ret.reject(r)) + + # Check to see if all the promises are already fulfilled + handleSuccess(None, ret) + + return ret + + +class BackgroundThread(Thread): + def __init__(self, promise, func): + self.promise = promise + self.func = func + Thread.__init__(self) + + def run(self): + try: + val = self.func() + self.promise.fulfill(val) + except Exception as e: + self.promise.reject(e) + + +def background(f): + p = Promise() + t = BackgroundThread(p, f) + t.start() + return p + + +def spawn(f): + from gevent import spawn + + p = Promise() + + def process(): + try: + val = f() + p.fulfill(val) + except Exception as e: + p.reject(e) + + spawn(process) + return p + + +def FulfilledPromise(result): + p = Promise() + p.fulfill(result) + return p + + +def RejectedPromise(error): + p = Promise() + p.reject(error) + return p diff --git a/common/openmtc/lib/aplus/__init__.pyc b/common/openmtc/lib/aplus/__init__.pyc new file mode 100644 index 0000000..36ea96c Binary files /dev/null and b/common/openmtc/lib/aplus/__init__.pyc differ diff --git a/common/openmtc/lib/openmtc_sdk.egg-info/PKG-INFO b/common/openmtc/lib/openmtc_sdk.egg-info/PKG-INFO new file mode 100644 index 0000000..d9abe54 --- /dev/null +++ b/common/openmtc/lib/openmtc_sdk.egg-info/PKG-INFO @@ -0,0 +1,24 @@ +Metadata-Version: 1.1 +Name: openmtc-sdk +Version: 4.99.0 +Summary: The OpenMTC Python SDK +Home-page: http://www.openmtc.org +Author: Konrad Campowsky +Author-email: konrad.campowsky@fraunhofer.fokus.de +License: Fraunhofer FOKUS proprietary +Description: UNKNOWN +Platform: UNKNOWN +Requires: urllib3 +Requires: gevent (>=1.0) +Requires: iso8601 (>=0.1.5) +Requires: werkzeug (>=0.9) +Requires: blist +Requires: simplejson +Requires: ujson +Requires: python_socketio +Requires: gevent_websocket +Requires: flask +Requires: pyxb (==1.2.3) +Requires: enum34 +Requires: dtls +Requires: geventhttpclient diff --git a/common/openmtc/lib/openmtc_sdk.egg-info/SOURCES.txt b/common/openmtc/lib/openmtc_sdk.egg-info/SOURCES.txt new file mode 100644 index 0000000..55ed653 --- /dev/null +++ b/common/openmtc/lib/openmtc_sdk.egg-info/SOURCES.txt @@ -0,0 +1,96 @@ +MANIFEST.in +setup-sdk.py +utils.py +common/openmtc-onem2m/src/openmtc_onem2m/__init__.py +common/openmtc-onem2m/src/openmtc_onem2m/exc.py +common/openmtc-onem2m/src/openmtc_onem2m/model.py +common/openmtc-onem2m/src/openmtc_onem2m/transport.py +common/openmtc-onem2m/src/openmtc_onem2m/util.py +common/openmtc-onem2m/src/openmtc_onem2m/client/__init__.py +common/openmtc-onem2m/src/openmtc_onem2m/client/http.py +common/openmtc-onem2m/src/openmtc_onem2m/client/mqtt.py +common/openmtc-onem2m/src/openmtc_onem2m/mapper/__init__.py +common/openmtc-onem2m/src/openmtc_onem2m/serializer/__init__.py +common/openmtc-onem2m/src/openmtc_onem2m/serializer/base.py +common/openmtc-onem2m/src/openmtc_onem2m/serializer/util.py +common/openmtc-onem2m/src/openmtc_onem2m/serializer/impl/__init__.py +common/openmtc-onem2m/src/openmtc_onem2m/serializer/json/__init__.py +common/openmtc/lib/pyio.py +common/openmtc/lib/aplus/__init__.py +common/openmtc/lib/openmtc_sdk.egg-info/PKG-INFO +common/openmtc/lib/openmtc_sdk.egg-info/SOURCES.txt +common/openmtc/lib/openmtc_sdk.egg-info/dependency_links.txt +common/openmtc/lib/openmtc_sdk.egg-info/requires.txt +common/openmtc/lib/openmtc_sdk.egg-info/top_level.txt +common/openmtc/src/openmtc/__init__.py +common/openmtc/src/openmtc/configuration.py +common/openmtc/src/openmtc/exc.py +common/openmtc/src/openmtc/util.py +common/openmtc/src/openmtc/version.py +common/openmtc/src/openmtc/mapper/__init__.py +common/openmtc/src/openmtc/mapper/exc.py +common/openmtc/src/openmtc/model/__init__.py +common/openmtc/src/openmtc/model/exc.py +futile/src/futile/__init__.py +futile/src/futile/abchelper.py +futile/src/futile/basictypes.py +futile/src/futile/contextlib.py +futile/src/futile/etree.py +futile/src/futile/exc.py +futile/src/futile/singleton.py +futile/src/futile/StringIO/__init__.py +futile/src/futile/caching/__init__.py +futile/src/futile/collections/OrderedSet.py +futile/src/futile/collections/__init__.py +futile/src/futile/collections/ordereddict.py +futile/src/futile/collections/sortedlist.py +futile/src/futile/logging/__init__.py +futile/src/futile/logging/handlers.py +futile/src/futile/logging/logbook.py +futile/src/futile/logging/logtap.py +futile/src/futile/multiprocess/RWLock.py +futile/src/futile/multiprocess/__init__.py +futile/src/futile/net/PortTester.py +futile/src/futile/net/__init__.py +futile/src/futile/net/exc.py +futile/src/futile/net/sockethelper.py +futile/src/futile/net/wsgi.py +futile/src/futile/net/xmlrpc.py +futile/src/futile/net/http/__init__.py +futile/src/futile/net/http/exc.py +futile/src/futile/net/http/client/ConnectionPoolManager.py +futile/src/futile/net/http/client/RestClient.py +futile/src/futile/net/http/client/RestClientAsync.py +futile/src/futile/net/http/client/SimpleConnectionManager.py +futile/src/futile/net/http/client/__init__.py +futile/src/futile/net/http/server/__init__.py +futile/src/futile/net/http/server/ssl/__init__.py +futile/src/futile/net/http/server/wsgi/__init__.py +futile/src/futile/net/http/server/wsgi/ssl.py +futile/src/futile/operator/__init__.py +futile/src/futile/os/__init__.py +futile/src/futile/os/mount.py +futile/src/futile/path/__init__.py +futile/src/futile/profile/__init__.py +futile/src/futile/serializer/__init__.py +futile/src/futile/serializer/exc.py +futile/src/futile/serializer/xml.py +futile/src/futile/signal/__init__.py +futile/src/futile/signal/timeout.py +futile/src/futile/string/__init__.py +futile/src/futile/subprocess/__init__.py +futile/src/futile/subprocess/daemon.py +futile/src/futile/tempfile/__init__.py +futile/src/futile/threading/RWLock.py +futile/src/futile/threading/__init__.py +futile/src/futile/threading/synchronized.py +futile/src/futile/traceback/__init__.py +futile/src/futile/types/TypeManager.py +futile/src/futile/types/__init__.py +openmtc-app/src/openmtc_app/__init__.py +openmtc-app/src/openmtc_app/exc.py +openmtc-app/src/openmtc_app/onem2m.py +openmtc-app/src/openmtc_app/util.py +openmtc-app/src/openmtc_app/flask_runner/__init__.py +openmtc-app/src/openmtc_app/notification/__init__.py +openmtc-app/src/openmtc_app/runner/__init__.py \ No newline at end of file diff --git a/common/openmtc/lib/openmtc_sdk.egg-info/dependency_links.txt b/common/openmtc/lib/openmtc_sdk.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/common/openmtc/lib/openmtc_sdk.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/common/openmtc/lib/openmtc_sdk.egg-info/requires.txt b/common/openmtc/lib/openmtc_sdk.egg-info/requires.txt new file mode 100644 index 0000000..87e3e37 --- /dev/null +++ b/common/openmtc/lib/openmtc_sdk.egg-info/requires.txt @@ -0,0 +1,14 @@ +urllib3 +gevent >= 1.0 +iso8601 >= 0.1.5 +werkzeug >= 0.9 +blist +simplejson +ujson +python_socketio +gevent_websocket +flask +pyxb == 1.2.3 +enum34 +dtls +geventhttpclient diff --git a/common/openmtc/lib/openmtc_sdk.egg-info/top_level.txt b/common/openmtc/lib/openmtc_sdk.egg-info/top_level.txt new file mode 100644 index 0000000..b02bf4f --- /dev/null +++ b/common/openmtc/lib/openmtc_sdk.egg-info/top_level.txt @@ -0,0 +1,6 @@ +aplus +futile +openmtc +openmtc_app +openmtc_onem2m +pyio diff --git a/common/openmtc/lib/pyio.py b/common/openmtc/lib/pyio.py new file mode 100644 index 0000000..830f300 --- /dev/null +++ b/common/openmtc/lib/pyio.py @@ -0,0 +1 @@ +from io import * \ No newline at end of file diff --git a/common/openmtc/src/openmtc/__init__.py b/common/openmtc/src/openmtc/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/common/openmtc/src/openmtc/__init__.pyc b/common/openmtc/src/openmtc/__init__.pyc new file mode 100644 index 0000000..3a01f89 Binary files /dev/null and b/common/openmtc/src/openmtc/__init__.pyc differ diff --git a/common/openmtc/src/openmtc/configuration.py b/common/openmtc/src/openmtc/configuration.py new file mode 100644 index 0000000..d2a768f --- /dev/null +++ b/common/openmtc/src/openmtc/configuration.py @@ -0,0 +1,178 @@ +import logging +from abc import ABCMeta, abstractmethod + +from enum import Enum + +from futile import NOT_SET, identity +from futile.logging import LoggerMixin +from openmtc.exc import OpenMTCError + + +class ConfigurationError(OpenMTCError): + pass + + +class ConfigurationKeyError(KeyError, ConfigurationError): + pass + + +class ConfigurationAttributeError(AttributeError, ConfigurationError): + pass + + +class ConfigurationValueError(ValueError, ConfigurationError): + pass + + +class ExtraOptionsStrategy(Enum): + ignore = "ignore" + warn = "warn" + prune = "prune" + fatal = "fatal" + + +class ConfigurationOption(LoggerMixin): + __metaclass__ = ABCMeta + + def __init__(self, type, default=NOT_SET, converter=identity, + *args, **kw): + super(ConfigurationOption, self).__init__(*args, **kw) + self.type = type + self.default = default + self.converter = converter + + def convert(self, v): + if v is None: + if self.default is not NOT_SET: + return self.default + raise ConfigurationValueError("Value must not be None") + + v = self._convert(v) + return self.converter(v) + + @abstractmethod + def _convert(self, v): + return v + + +class SimpleOption(ConfigurationOption): + def __init__(self, type=str, default=NOT_SET, converter=identity, + *args, **kw): + super(SimpleOption, self).__init__(type=type, default=default, + converter=converter) + + def _convert(self, v): + if isinstance(v, self.type): + return v + return self.type(v) + + +class ListOption(SimpleOption): + def __init__(self, content_type, type=list, default=NOT_SET, + converter=identity, *args, **kw): + super(ListOption, self).__init__(type=type, default=default, + converter=converter) + self.content_type = content_type + + def _convert(self, v): + v = super(ListOption, self)._convert(v) + return map(self._convert_content, v) + + def _convert_content(self, v): + if not isinstance(v, self.content_type): + v = self.content_type(v) + return v + + +class BooleanOption(ConfigurationOption): + def __init__(self, default=NOT_SET, converter=identity, *args, **kw): + super(BooleanOption, self).__init__(type=bool, default=default, + converter=converter) + + def _convert(self, v): + if isinstance(v, (bool, int)): + return bool(v) + if isinstance(v, basestring): + return v and v.lower() not in ("0", "no", "n", "f", "false") + raise ConfigurationValueError("Illegal value for boolean: %s" % (v, )) + + +class EnumOption(SimpleOption): + def _convert(self, v): + try: + return super(EnumOption, self)._convert(v) + except Exception as exc: + try: + return getattr(self.type, v) + except: + raise exc + + +class LowerCaseEnumOption(EnumOption): + def _convert(self, v): + try: + return super(LowerCaseEnumOption, self)._convert(v) + except Exception as exc: + try: + return getattr(self.type, v.lower()) + except: + raise exc + + +class Configuration(dict): + __options__ = {} + __name__ = "configuration" + __extra_options_strategy__ = ExtraOptionsStrategy.ignore + + def __init__(self, *args, **kw): + config = dict(*args, **kw) + options = self.__options__.copy() + + for k, v in config.copy().items(): + try: + option = options.pop(k) + except KeyError: + strategy = self.__extra_options_strategy__ + if strategy == ExtraOptionsStrategy.fatal: + raise ConfigurationError("Unknown configuration key in %s:" + " %s" % (self.__name__, k)) + if strategy == ExtraOptionsStrategy.prune: + del config[k] + elif strategy == ExtraOptionsStrategy.warn: + self.logger.warn("Unknown configuration key in %s: %s", + self.__name__, k) + else: + config[k] = option.convert(v) + + for k, v in options.items(): + if v.default is NOT_SET: + raise ConfigurationKeyError("Missing configuration key in" + " %s: %s" % + (self.__name__, k, )) + config[k] = v.default + + super(Configuration, self).__init__(config) + + def __getitem__(self, k): + try: + return dict.__getitem__(self, k) + except KeyError: + raise ConfigurationKeyError("Missing configuration key in" + " %s: %s" % + (self.__name__, k, )) + + def __getattr__(self, k, default=NOT_SET): + try: + return self[k] + except ConfigurationKeyError as exc: + if default is not NOT_SET: + return default + raise ConfigurationAttributeError(str(exc)) + + +class LogLevel(Enum): + trace = logging.DEBUG + debug = logging.DEBUG + warning = logging.WARNING + error = logging.ERROR + fatal = logging.FATAL diff --git a/common/openmtc/src/openmtc/configuration.pyc b/common/openmtc/src/openmtc/configuration.pyc new file mode 100644 index 0000000..ef53c42 Binary files /dev/null and b/common/openmtc/src/openmtc/configuration.pyc differ diff --git a/common/openmtc/src/openmtc/exc.py b/common/openmtc/src/openmtc/exc.py new file mode 100644 index 0000000..7c060bc --- /dev/null +++ b/common/openmtc/src/openmtc/exc.py @@ -0,0 +1,13 @@ +from futile.net.exc import NetworkError + + +class OpenMTCError(Exception): + pass + + +class OpenMTCNetworkError(OpenMTCError, NetworkError): + pass + + +class ConnectionFailed(OpenMTCNetworkError): + pass diff --git a/common/openmtc/src/openmtc/exc.pyc b/common/openmtc/src/openmtc/exc.pyc new file mode 100644 index 0000000..b5f83ce Binary files /dev/null and b/common/openmtc/src/openmtc/exc.pyc differ diff --git a/common/openmtc/src/openmtc/mapper/__init__.py b/common/openmtc/src/openmtc/mapper/__init__.py new file mode 100644 index 0000000..e370ed7 --- /dev/null +++ b/common/openmtc/src/openmtc/mapper/__init__.py @@ -0,0 +1,97 @@ +from futile.logging import LoggerMixin +from futile import ObjectProxy +from openmtc.model import Collection +from openmtc.mapper.exc import MapperError + + +class MemberProxy(ObjectProxy): + def __get__(self, instance, owner=None): + if instance is None: + return self._o + + if not instance._synced: + if not _is_attached(instance) or self.name not in instance._changes: + instance._mapper._init_resource(instance) + return self._o.__get__(instance, owner) + + def __set__(self, instance, value): + if _is_attached(instance): + instance._changes.add(self._o.name) + return self._o.__set__(instance, value) + + +class MapperCollection(Collection): + def __init__(self, name, type, parent, collection=(), *args, **kw): + super(MapperCollection, self).__init__(name=name, type=type, + parent=parent, + collection=collection, *args, + **kw) + + def _handle_newitem(self, item): + if _is_attached(item) or item.path is not None: + raise NotImplementedError() + super(MapperCollection, self)._handle_newitem(item) + self._changes.added.add(item) + if _is_attached(self.parent): + self.parent._changes.collection_changes.add(self.name) + if self.parent.parent is not None: + self.parent.parent._changes.subresource_changes.add( + self.parent.name) + + +class BasicMapper(LoggerMixin): + def __init__(self, *args, **kw): + super(BasicMapper, self).__init__(*args, **kw) + # self._patch_model() + self._send_request = lambda x: x + + def create(self, path, instance): + raise NotImplementedError() + + def update(self, instance, fields): + raise NotImplementedError() + + def _do_update(self, instance, fields): + raise NotImplementedError() + + def get(self, path): + raise NotImplementedError() + + def delete(self, instance): + raise NotImplementedError() + + def _get_data(self, path): + raise NotImplementedError() + + def _map(self, path, typename, data): + raise NotImplementedError() + + def _init_resource(self, res): + return self._fill_resource(res, self._get_data(res.path)[1]) + + def _make_subresource(self, type, path, parent): + subresource = type(path=path, parent=parent) + subresource._synced = False + # return self._attach_instance(subresource) + return subresource + + def _fill_resource(self, res, data): + raise NotImplementedError() + + @classmethod + def _patch_model(cls): + import openmtc.model as model + + model.Resource._synced = True + model.Resource._mapper = None + + for t in model.get_types(): + if "_initialized" not in t.__dict__: + setattr(t, "_initialized", True) + for a in t.__members__: + # TODO: deal with name differences + setattr(t, a.name, MemberProxy(a)) + for a in t.collections: + if a.type is not Collection: + raise NotImplementedError() + a.type = MapperCollection diff --git a/common/openmtc/src/openmtc/mapper/__init__.pyc b/common/openmtc/src/openmtc/mapper/__init__.pyc new file mode 100644 index 0000000..2ad0266 Binary files /dev/null and b/common/openmtc/src/openmtc/mapper/__init__.pyc differ diff --git a/common/openmtc/src/openmtc/mapper/exc.py b/common/openmtc/src/openmtc/mapper/exc.py new file mode 100644 index 0000000..5d5510a --- /dev/null +++ b/common/openmtc/src/openmtc/mapper/exc.py @@ -0,0 +1,11 @@ +""" +Created on 02.06.2013 + +@author: kca +""" + +from openmtc.exc import OpenMTCError + + +class MapperError(OpenMTCError): + pass diff --git a/common/openmtc/src/openmtc/mapper/exc.pyc b/common/openmtc/src/openmtc/mapper/exc.pyc new file mode 100644 index 0000000..315d199 Binary files /dev/null and b/common/openmtc/src/openmtc/mapper/exc.pyc differ diff --git a/common/openmtc/src/openmtc/model/__init__.py b/common/openmtc/src/openmtc/model/__init__.py new file mode 100644 index 0000000..0a55181 --- /dev/null +++ b/common/openmtc/src/openmtc/model/__init__.py @@ -0,0 +1,706 @@ +from abc import ABCMeta +from collections import Sequence, OrderedDict, Mapping +from datetime import datetime +from enum import Enum +from iso8601 import parse_date, ParseError +from operator import attrgetter + +from futile import basestring, issubclass, NOT_SET +from futile.logging import LoggerMixin +from openmtc.model.exc import ModelError, ModelTypeError + + +class StrEnum(str, Enum): + pass + + +class Collection(Sequence, Mapping): + def __init__(self, name, type, parent, collection=(), *args, **kw): + super(Collection, self).__init__(*args, **kw) + self._map = OrderedDict() + self.type = type + self.parent = parent + self.name = name + for c in collection: + self.append(c) + + def __getitem__(self, index): + if isinstance(index, (int, slice)): + return self._map.values()[index] + return self._map[index] + + def __contains__(self, v): + return v in self._map or v in self._map.values() + + def append(self, v): + if not isinstance(v, self.type): + raise ModelTypeError(v) + + self._handle_newitem(v) + + assert v.name is not None, "name is None: %s %s" % (v, v.path) + self._map[v.name] = v + + add = append + + def get(self, k, default=None): + return self._map.get(k, default) + + def __iter__(self): + return self._map.itervalues() + + def __len__(self): + return len(self._map) + + def __delitem__(self, index): + if isinstance(index, int): + instance = self[index] + index = instance.name + del self._map[index] + + discard = __delitem__ + + def _handle_newitem(self, item): + if item.parent and item.parent is not self.parent: + # TODO ! + return + # raise NotImplementedError() + item.parent = self.parent + + def __str__(self): + try: + return "openmtc.Collection(%s, %s)" % ( + self.name, self._map) + except AttributeError: + return "openmtc.Collection(%s)" % (self.__len__()) + + +class Member(LoggerMixin): + def __init__(self, type=unicode, version="1.0", *args, **kw): + super(Member, self).__init__(*args, **kw) + self.type = type + self.version = version + + def _init(self, name): + self.name = name + + def __set__(self, instance, value): + if value is not None and not isinstance(value, self.type): + value = self.convert(value, instance) + self.set_value(instance, value) + + def set_value(self, instance, value): + setattr(instance, "_" + self.name, value) + + def convert(self, value, instance): + try: + return self.type(value) + except (TypeError, ValueError): + raise ModelTypeError("Illegal value for %s (%s): %r" % + (self.name, self.type, value)) + + def __repr__(self): + return '%s(name="%s", type=%s)' % (type(self).__name__, self.name, + self.type.__name__) + + +class Attribute(Member): + RW = "RW" + RO = "RO" + WO = "WO" + + def __init__(self, type=unicode, default=None, + accesstype=None, mandatory=None, + update_mandatory=None, + id_attribute=None, path_attribute=None, + id_immutable=None, *args, **kw): + super(Attribute, self).__init__(type=type, *args, **kw) + + if path_attribute and id_attribute: + raise ModelError("Attribute can't be id_attribute and " + "path_attribute at the same time") + + self.default = default + self.id_attribute = id_attribute + self.path_attribute = path_attribute + self.id_immutable = id_immutable + + if accesstype is None: + if path_attribute: + accesstype = self.RO + elif id_attribute: + accesstype = self.WO + else: + accesstype = self.RW + self.accesstype = accesstype + + if mandatory is None: + if accesstype == self.WO: + mandatory = True + else: + mandatory = False + self.mandatory = mandatory + + if update_mandatory is None: + if accesstype == self.RW: + update_mandatory = mandatory + else: + update_mandatory = False + self.update_mandatory = update_mandatory + + def __get__(self, instance, owner=None): + if instance is None: + return self + try: + return getattr(instance, "_" + self.name) + except AttributeError: + return self.default + + +try: + unicode + + class UnicodeAttribute(Attribute): + def __init__(self, default=None, accesstype=None, + mandatory=False, *args, **kw): + super(UnicodeAttribute, self).__init__(type=unicode, + default=default, + accesstype=accesstype, + mandatory=mandatory, *args, + **kw) + + def convert(self, value, instance): + if isinstance(value, str): + return value.decode("utf-8") + return super(UnicodeAttribute, self).convert(value, instance) +except NameError: + UnicodeAttribute = Attribute + + +class DatetimeAttribute(Attribute): + def __init__(self, default=None, accesstype=None, + mandatory=False, *args, **kw): + super(DatetimeAttribute, self).__init__(type=datetime, + default=default, + accesstype=accesstype, + mandatory=mandatory, *args, + **kw) + + def convert(self, value, instance): + if isinstance(value, basestring): + try: + return parse_date(value) + except ParseError as e: + raise ValueError(str(e)) + return super(DatetimeAttribute, self).convert(value, instance) + + +class ListAttribute(Attribute): + def __init__(self, content_type=unicode, type=list, + default=NOT_SET, *args, **kw): + super(ListAttribute, self).__init__(type=type, + default=default, *args, **kw) + self.content_type = content_type + + def __get__(self, instance, owner=None): + if instance is None: + return self + + key = "_" + self.name + try: + return getattr(instance, key) + except AttributeError: + if self.default is NOT_SET: + subresource = self.type() + else: + subresource = self.default + setattr(instance, key, subresource) + return subresource + + def _convert_mapping(self, value, instance): + self.logger.debug("Creating %s from %s", self.content_type, value) + return self.content_type(**value) + + def convert_content(self, value, instance): + if isinstance(value, self.content_type): + return value + if issubclass(self.content_type, Entity): + if isinstance(value, Mapping): + return self._convert_mapping(value, instance) + raise ValueError("Illegal value for sequence '%s' (%s): %s (%s)" % + (self.name, self.content_type, value, type(value))) + return self.content_type(value) + + def set_value(self, instance, value): + if value: + value = self.type([self.convert_content(v, instance) + for v in value]) + super(ListAttribute, self).set_value(instance, value) + + +class StringListAttribute(Attribute): + def __init__(self, content_type=unicode, type=list, + default=NOT_SET, *args, **kw): + super(StringListAttribute, self).__init__(type=type, default=default, + *args, **kw) + self.content_type = content_type + + def __get__(self, instance, owner=None): + if instance is None: + return self + + key = "_" + self.name + try: + return getattr(instance, key) + except AttributeError: + if self.default is NOT_SET: + subresource = self.type() + else: + subresource = self.default + setattr(instance, key, subresource) + return subresource + + def convert(self, value, instance): + if isinstance(value, str): + return value.strip(' ').split(' ') + return super(StringListAttribute, self).convert(value, instance) + + def _convert_mapping(self, value, instance): + self.logger.debug("Creating %s from %s", self.content_type, value) + return self.content_type(**value) + + def convert_content(self, value, instance): + if isinstance(value, self.content_type): + return value + if issubclass(self.content_type, Entity): + if isinstance(value, Mapping): + return self._convert_mapping(value, instance) + raise ValueError("Illegal value for sequence '%s' (%s): %s (%s)" % + (self.name, self.content_type, value, type(value))) + return self.content_type(value) + + def set_value(self, instance, value): + if value: + value = self.type([self.convert_content(v, instance) + for v in value]) + super(StringListAttribute, self).set_value(instance, value) + + +class EntityAttribute(Attribute): + def __init__(self, type, default=None, accesstype=None, mandatory=None, + update_mandatory=None): + super(EntityAttribute, self).__init__(type=type, default=default, + accesstype=accesstype, + mandatory=mandatory, + update_mandatory=update_mandatory) + + def convert(self, value, instance): + if isinstance(value, Mapping): + self.logger.debug("Creating %s from %s", self.type, value) + return self.type(**value) + return super(EntityAttribute, self).convert(value, instance) + + +class CollectionMember(Member): + def __init__(self, content_type, type=Collection, *args, + **kw): # TODO: kca: use type for content_type + super(CollectionMember, self).__init__(type=type, *args, **kw) + self.content_type = content_type + + def convert(self, value, instance): + try: + return self.type(collection=value, name=self.name, + parent=instance, type=self.content_type) + except: + return super(CollectionMember, self).convert(value, instance) + + def __get__(self, instance, owner=None): + if instance is None: + return self + + key = "_" + self.name + try: + return getattr(instance, key) + except AttributeError: + subresource = self.type(name=self.name, parent=instance, + type=self.content_type) + setattr(instance, key, subresource) + return subresource + + +class SubresourceMember(Member): + default = None + + def __init__(self, type, virtual=False, default=NOT_SET, *args, **kw): + if type and not issubclass(type, Resource): + raise TypeError(type) + + super(SubresourceMember, self).__init__(type=type, *args, **kw) + + def __get__(self, instance, owner=None): + if instance is None: + return self + + key = "_" + self.name + try: + v = getattr(instance, key) + if v is not None: + return v + except AttributeError: + pass + + # Here we automatically create missing subresources + # Might be a stupid idea to do it here + path = instance.path and instance.path + "/" + self.name or None + subresource = self.type( + path=path, + parent=instance + ) + + # TODO: needs to go into the appropriate resource type(s) + if hasattr(subresource, "creationTime"): + creation_time = instance.creationTime + subresource.creationTime = creation_time + subresource.lastModifiedTime = creation_time + + setattr(instance, key, subresource) + return subresource + + @property + def virtual(self): + return self.type.virtual + + +class ResourceType(ABCMeta): + def __init__(self, *args, **kw): + super(ResourceType, self).__init__(*args, **kw) + + if ("typename" not in self.__dict__ and + not self.__name__.endswith("Collection")): + self.typename = self.__name__[0].lower() + self.__name__[1:] + + self.id_attribute = self.path_attribute = None + attributes = self.attributes = [] + subresources = self.subresources = [] + collections = self.collections = [] + + for name in dir(self): + if name[0] != "_": + attr = getattr(self, name) + if isinstance(attr, Member): + if "_" in name: + name = name.replace("_", "-") + setattr(self, name, attr) + attr._init(name) + if isinstance(attr, SubresourceMember): + subresources.append(attr) + elif isinstance(attr, CollectionMember): + collections.append(attr) + else: + attributes.append(attr) + + if attr.id_attribute and attr.path_attribute: + raise ModelTypeError( + "Attribute %s of resource %s can only be " + "either id_attribute or path_attribute, not " + "both." % (name, self.__name__)) + + if attr.id_attribute: + if self.id_attribute is not None: + raise ModelTypeError( + "Resource %s defines more than one id " + "attribute: %s and %s" % + (self.__name__, self.id_attribute, name)) + self.id_attribute = attr.name + self.id_immutable = attr.id_immutable + + if attr.path_attribute: + if self.path_attribute is not None: + raise ModelTypeError( + "Resource %s defines more than one path " + "attribute: %s and %s" % + (self.__name__, self.id_attribute, name)) + self.path_attribute = attr.name + + self.__members__ = attributes + subresources + collections + + # TODO: caching + @property + def attribute_names(self): + return map(attrgetter("name"), self.attributes) + + @property + def collection_names(self): + return map(attrgetter("name"), self.collections) + + @property + def subresource_names(self): + return map(attrgetter("name"), self.subresources) + + @property + def member_names(self): + return map(attrgetter("name"), self.__members__) + + +class Entity(LoggerMixin): + __metaclass__ = ResourceType + + def __init__(self, *args, **kw): + self.set_values(kw) + + def set_values(self, values): + self.logger.debug("Setting values for entity of type %s with %s", + type(self), values) + values = values.copy() + + for member in self.__members__: + try: + v = values.pop(member.name) + if (v is not None and isinstance(member, ListAttribute) and + not isinstance(v, (list, tuple, set))): + l = [v] + v = l + setattr(self, member.name, v) + except KeyError: + try: + v = values.pop(member.name + "Reference") + # TODO: proper solution? + if (v is not None and isinstance(member, ListAttribute) and + not isinstance(v, (list, tuple, set))): + v = v.values()[0] + setattr(self, member.name, v) + except KeyError: + pass + + if values: + self._set_extra_values(values) + + def _set_extra_values(self, values): + """ + names = type(self).subresource_names + for k in values.keys(): + if k.strip("Reference") in names: + values.pop(k) + print names, values + from traceback import print_stack + print_stack() + """ + if values: + raise ModelTypeError("%s resource has no attribute %s" % + (self.typename, values.keys()[0])) + + @classmethod + def get_typename(cls): + return cls.typename + + def get_attribute_values(self, filter=False): + vals = {} + for attr in self.attributes: + a_name = attr.name + val = getattr(self, a_name) + if (val is None or val == '' or val == []) and filter: + continue + vals[a_name] = val + return vals + attribute_values = property(get_attribute_values) + + def get_values_representation(self, fields=None, internal=False): + vals = {} + id_attribute = self.id_attribute + for attr in self.attributes: + a_name = attr.name + if (fields is None or a_name == id_attribute or a_name in fields) \ + and (internal or attr.accesstype is not None): + val = getattr(self, "_" + a_name, None) + if val is None: + continue + if isinstance(attr, ListAttribute): + # TODO: return simple values. No representation + if attr.content_type is AnyURI: # any uri list + vals[a_name] = {"reference": val} + elif issubclass(attr.content_type, Entity): # complex list + vals[a_name] = { + a_name: [x.get_values_representation() for x in val] + } + else: # simple list + vals[a_name] = {a_name[:-1]: val} + elif isinstance(attr, EntityAttribute): + vals[a_name] = val.values + else: + try: + val = val.isoformat() + except AttributeError: + pass + vals[a_name] = val + return vals + + def get_values(self, filter=False): + return self.get_attribute_values(filter) + + @property + def values(self): + return self.get_values() + + @property + def subresource_values(self): + vals = {} + for attr in self.subresources: + vals[attr.name] = getattr(self, attr.name) + return vals + + +class ContentResource(Entity): + virtual = True + __model_name__ = None + __model_version__ = None + + def __init__(self, value, *args, **kw): + kw = {'CONTENT': value} + super(ContentResource, self).__init__(*args, **kw) + + @property + def values(self): + return self.get_values().get('CONTENT') + + +class Resource(Entity): + virtual = False + __model_name__ = None + __model_version__ = None + + def __init__(self, path=None, parent=None, *args, **kw): + if path is not None and not isinstance(path, basestring): + raise TypeError(path) + self.__path = path + self.parent = parent + super(Resource, self).__init__(*args, **kw) + + def get_path(self): + return self.__path + + def set_path(self, path): + self.__path = path + if self.id_attribute and getattr(self, self.id_attribute) is None: + setattr(self, self.id_attribute, path.rpartition("/")[-1]) + if self.path_attribute and getattr(self, self.path_attribute) is None: + setattr(self, self.path_attribute, path) + + path = property(get_path, set_path) + + @property + def parent_path(self): + if self.__path is not None: + return self.__path.rpartition("/")[0] + + # TODO: deprecated + @property + def name(self): + return self.basename + + @property + def basename(self): + if self.path is not None: + return self.path.rpartition("/")[-1] + if self.id_attribute is not None: + return getattr(self, self.id_attribute) + + def set_values(self, values): + values = values.copy() + + keys = [k for k in values.keys() if "_" in k] + for k in keys: + values[k.replace("_", "-")] = values.pop(k) + + path = self.path + if path is not None: + id_attribute = self.id_attribute + if (id_attribute is not None and + id_attribute not in values): + values[id_attribute] = path.rpartition("/")[-1] + + path_attribute = self.path_attribute + if (path_attribute is not None and + path_attribute not in values): + values[path_attribute] = path + + for member in self.__members__: + try: + v = values.pop(member.name) + # FIXME: move into de-serializer and handle dicts + if (v is not None and isinstance(member, ListAttribute) and + not isinstance(v, (list, tuple, set))): + v = v.values()[0] + setattr(self, member.name, v) + except KeyError: + try: + v = values.pop(member.name + "Reference") + # TODO: proper solution? + if (v is not None and isinstance(member, ListAttribute) and + not isinstance(v, (list, tuple, set))): + v = v.values()[0] + setattr(self, member.name, v) + except KeyError: + pass + + if values: + self._set_extra_values(values) + + def __repr__(self): + return "%s(path='%s', name='%s')" % (type(self).__name__, self.path, + self.name) + + def __eq__(self, o): + try: + return self.path == o.path + except AttributeError: + return False + + def __ne__(self, o): + return not self.__eq__(o) + + +class FlexibleAttributesMixin(object): + def __init__(self, path=None, parent=None, *args, **kw): + self._flex_attrs = set() + + super(FlexibleAttributesMixin, self).__init__(path=path, parent=parent, + *args, **kw) + + def __setattr__(self, k, v): + if not k.startswith("_") and not hasattr(self, k) and k != "parent": + self._flex_attrs.add(k) + + return super(FlexibleAttributesMixin, self).__setattr__(k, v) + + def __delattr__(self, k): + self._flex_attrs.discard(k) + + return super(FlexibleAttributesMixin, self).__delattr__(k) + + @property + def flex_values(self): + return {k: getattr(self, k) for k in self._flex_attrs} + + def get_values(self, filter=False): + vals = super(FlexibleAttributesMixin, self).get_values(filter) + vals.update(self.flex_values) + return vals + + def get_values_representation(self, fields=None, internal=False): + r = super(FlexibleAttributesMixin, self) \ + .get_values_representation(fields=fields, internal=internal) + if fields is None: + r.update(self.flex_values) + return r + + def _set_extra_values(self, values): + for k, v in values.items(): + setattr(self, k, v) + + +class AnyURI(str): + pass + + +class AnyURIList(Entity): + reference = ListAttribute(mandatory=False) diff --git a/common/openmtc/src/openmtc/model/__init__.pyc b/common/openmtc/src/openmtc/model/__init__.pyc new file mode 100644 index 0000000..ee541c9 Binary files /dev/null and b/common/openmtc/src/openmtc/model/__init__.pyc differ diff --git a/common/openmtc/src/openmtc/model/exc.py b/common/openmtc/src/openmtc/model/exc.py new file mode 100644 index 0000000..87fc14e --- /dev/null +++ b/common/openmtc/src/openmtc/model/exc.py @@ -0,0 +1,14 @@ +''' +Created on 26.05.2013 + +@author: kca +''' +from openmtc.exc import OpenMTCError + + +class ModelError(OpenMTCError): + pass + + +class ModelTypeError(ModelError, TypeError): + pass diff --git a/common/openmtc/src/openmtc/model/exc.pyc b/common/openmtc/src/openmtc/model/exc.pyc new file mode 100644 index 0000000..ed6287a Binary files /dev/null and b/common/openmtc/src/openmtc/model/exc.pyc differ diff --git a/common/openmtc/src/openmtc/util.py b/common/openmtc/src/openmtc/util.py new file mode 100644 index 0000000..8465bfd --- /dev/null +++ b/common/openmtc/src/openmtc/util.py @@ -0,0 +1,39 @@ +from datetime import datetime, timedelta, tzinfo +import time + +ZERO = timedelta(0) + + +class Utc(tzinfo): + """UTC + """ + __slots__ = () + + def utcoffset(self, dt): + return ZERO + + def tzname(self, dt): + return "UTC" + + def dst(self, dt): + return ZERO + +UTC = Utc() + + +#del Utc + + +def datetime_now(): + return datetime.now(UTC) + + +def datetime_the_future(offset = 0): + """ Returns a datetime instance seconds in the future. + @note: if no offset is provided or offset == 0, this is equivalent to datetime_now + @param offset: seconds from now + @return: datetime in seconds + """ + f = time.time() + offset + return datetime.fromtimestamp(f, UTC) + diff --git a/common/openmtc/src/openmtc/util.pyc b/common/openmtc/src/openmtc/util.pyc new file mode 100644 index 0000000..a366d22 Binary files /dev/null and b/common/openmtc/src/openmtc/util.pyc differ diff --git a/common/openmtc/src/openmtc/version.py b/common/openmtc/src/openmtc/version.py new file mode 100644 index 0000000..06ce668 --- /dev/null +++ b/common/openmtc/src/openmtc/version.py @@ -0,0 +1 @@ +VERSION="4.0.0" \ No newline at end of file diff --git a/common/prep-env.sh b/common/prep-env.sh new file mode 100644 index 0000000..3a3422b --- /dev/null +++ b/common/prep-env.sh @@ -0,0 +1,11 @@ +#!/bin/sh + +for d in ../futile/src ../common/openmtc/lib ../common/*/src ../serializers/*/src ../openmtc-app/src ; do + _SRC_PATH="${d}" + _READLINK_PATH="$(readlink ${_SRC_PATH})" + PYTHONPATH=${PYTHONPATH}:$(pwd)/${_READLINK_PATH:-${_SRC_PATH}} +done + +echo PYTHONPATH: ${PYTHONPATH} + +export PYTHONPATH diff --git a/create-app-structure b/create-app-structure new file mode 100755 index 0000000..f2500e7 --- /dev/null +++ b/create-app-structure @@ -0,0 +1,537 @@ +#!/usr/bin/env bash + +# OS relevant options +case "${OSTYPE}" in + "linux-gnu") + MD5_PROG="md5sum" + ;; + "darwin") + MD5_PROG="md5 -r" + ;; + *) + echo "OS type '${OSTYPE}' not supported. Exiting now!" + exit 1 + ;; +esac + +print_help () { +cat <] APP_NAME + + APP_NAME Name of the app, used as folder and class name. + + -a If not provided, it will be created automatically by + converting the APP_NAME from camel case to snake case. + + If APP_NAME or APP_SCRIPT ends with IPE or similar, it + will be assumed as IPE and will be created under ipes. + + -w if true, FlaskRunner is used and basic index.html is + created + + -i if true, IN-AE is expected and default endpoint is + configured to backend + + -d if true, no setup file and docker files will be created +EOM +} + +optspec=":wdia:" + +WEB_APP="false" +IN_AE="false" +NO_DOCKER="false" + +while getopts "${optspec}" optchar; do + case "${optchar}" in + w) + WEB_APP="true" + ;; + i) + IN_AE="true" + ;; + d) + NO_DOCKER="true" + ;; + a) + if [[ ${OPTARG} == -* ]]; then + print_help + exit 1 + fi + APP_SCRIPT=${OPTARG} + ;; + \?|:) + print_help + exit 1 + esac +done + +shift $((OPTIND-1)) + +if [ $# -ne 1 ]; then + print_help + exit 1 +fi + +# set variables +APP_NAME="$1" +APP_SCRIPT=${APP_SCRIPT-$(printf "${APP_NAME}" | \ + perl -p -e 's/([a-z0-9])([A-Z]+)/\1-\L\2/g' | \ + perl -p -e 's/^([A-Z])/\l$1/g')} +MOD_NAME=$(printf "${APP_SCRIPT}" | tr "-" "_" | sed 's/.*/\L&/g') +PKG_NAME=$(printf "${APP_SCRIPT}" | tr -d "-" | sed 's/.*/\L&/g') + +if [[ ${PKG_NAME} == *"ipe" ]]; then + APPS_FOLDER="ipes" +else + APPS_FOLDER="apps" +fi +APP_FOLDER="${APPS_FOLDER}/${APP_NAME}" + +# check if existing +if [ -e "${APP_FOLDER}" ]; then + printf "App folder already existing. Aborting.\n" + exit 1 +fi + +# change to root folder +cd "$(dirname ${0})" + +# set port suffix +APP_PORT_SUFFIX=$(printf %04d $((0x$(${MD5_PROG} <<< "${APP_NAME}" | cut -c1-4) % 10000))) + +################################################################################ +# creating module files +mkdir -p "${APP_FOLDER}" + +SRC_FOLDER="${APP_FOLDER}/src/${PKG_NAME}" +mkdir -p "${SRC_FOLDER}" + +# init file +cat > "${SRC_FOLDER}/__init__.py" << EOF +""" +TODO: Add description here +""" + +__version__ = "ADD-VERSION-HERE" +__description__ = "${APP_NAME}" +__author_name__ = "ADD_AUTHOR_HERE" +__author_mail__ = "ADD_MAIL_HERE" +__requires__ = [] +EOF + +# main file +MAIN_MODULE="${SRC_FOLDER}/__main__.py" +cat > "${MAIN_MODULE}" << EOF +from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser + +from openmtc_app.util import prepare_app, get_value +from openmtc_app.flask_runner import FlaskRunner as Runner +from .${MOD_NAME} import ${APP_NAME} + +# defaults +default_name = "${APP_NAME}" +default_ep = "http://localhost:8000" + +# args parser +parser = ArgumentParser( + description="An IPE called ${APP_NAME}", + prog="${APP_NAME}", + formatter_class=ArgumentDefaultsHelpFormatter) +parser.add_argument("-n", "--name", help="Name used for the AE.") +parser.add_argument("-s", "--ep", help="URL of the local Endpoint.") + +# args, config and logging +args, config = prepare_app(parser, __loader__, __name__, "config.json") + +# variables +nm = get_value("name", (unicode, str), default_name, args, config) +cb = config.get("cse_base", "onem2m") +ep = get_value("ep", (unicode, str), default_ep, args, config) +poas = config.get("poas", ["http://auto:2${APP_PORT_SUFFIX}"]) +originator_pre = config.get("originator_pre", "//openmtc.org/mn-cse-1") +ssl_certs = config.get("ssl_certs", {}) +port = int(config.get("port", 1${APP_PORT_SUFFIX})) + +# start +app = ${APP_NAME}( + name=nm, cse_base=cb, poas=poas, + originator_pre=originator_pre, **ssl_certs +) +Runner(app, port=port).run(ep) + +print ("Exiting....") +EOF + +if [ ${WEB_APP} == "false" ]; then + perl -i -p -e 's/flask_runner/runner/' "${MAIN_MODULE}" + perl -i -p -e 's/FlaskRunner/AppRunner/' "${MAIN_MODULE}" + perl -i -p -e 's/app, port=port/app/' "${MAIN_MODULE}" + perl -i -n -e 'print unless /port = int\(/' "${MAIN_MODULE}" +fi + +if [ ${IN_AE} == "true" ]; then + perl -i -p -e 's/mn-cse-1/in-cse-1/' "${MAIN_MODULE}" + perl -i -p -e 's/localhost:8000/localhost:18000/' "${MAIN_MODULE}" +fi + +# main module +cat > "${SRC_FOLDER}/${MOD_NAME}.py" << EOF +from openmtc_app.onem2m import XAE + + +class ${APP_NAME}(XAE): + interval = 10 + + def _on_register(self): + # start endless loop + self.run_forever(self.interval) +EOF + +# index html if web app +if [ ${WEB_APP} == "true" ]; then + mkdir -p "${SRC_FOLDER}/static" + cat > "${SRC_FOLDER}/static/index.html" << EOF + + + + + Title + + +Hello World! + + +EOF +fi + +################################################################################ +# creating config files + +mkdir -p "${APP_FOLDER}/etc/conf" + +# main config +MAIN_CONFIG="${APP_FOLDER}/config.json" +cat > "${MAIN_CONFIG}" << EOF +{ + "name": "${APP_NAME}", + "ep": "http://localhost:8000", + "cse_base": "onem2m", + "poas": [ + "http://auto:2${APP_PORT_SUFFIX}" + ], + "originator_pre": "//openmtc.org/mn-cse-1", + "ssl_certs": { + "cert_file": null, + "key_file": null, + "ca_certs": null + }, + "port": 1${APP_PORT_SUFFIX}, + "logging": { + "level": "ERROR", + "file": null + } +} +EOF + +# dist config +DIST_CONFIG="${APP_FOLDER}/etc/conf/config.json.dist" +cat > "${DIST_CONFIG}" << EOF +{ + "name": "${APP_NAME}", + "ep": "http://localhost:8000", + "cse_base": "onem2m", + "poas": [ + "http://auto:2${APP_PORT_SUFFIX}" + ], + "originator_pre": "//openmtc.org/mn-cse-1", + "ssl_certs": { + "cert_file": "/etc/openmtc/certs/${PKG_NAME}.cert.pem", + "key_file": "/etc/openmtc/certs/${PKG_NAME}.key.pem", + "ca_certs": "/etc/openmtc/certs/ca-chain.cert.pem" + }, + "port": 1${APP_PORT_SUFFIX}, + "logging": { + "level": "INFO", + "file": "/var/log/openmtc/${PKG_NAME}.log" + } +} +EOF + +if [ ${WEB_APP} == "false" ]; then + perl -i -n -e 'print unless /"port": 1/' "${MAIN_CONFIG}" + perl -i -n -e 'print unless /"port": 1/' "${DIST_CONFIG}" +fi + +if [ ${IN_AE} == "true" ]; then + perl -i -p -e 's/mn-cse-1/in-cse-1/' "${MAIN_CONFIG}" + perl -i -p -e 's/localhost:8000/localhost:18000/' "${MAIN_CONFIG}" + perl -i -p -e 's/mn-cse-1/in-cse-1/' "${DIST_CONFIG}" + perl -i -p -e 's/localhost:8000/localhost:18000/' "${DIST_CONFIG}" +fi + +################################################################################ +# create start script +START_SCRIPT="${APPS_FOLDER}/${APP_SCRIPT}" +cat > "${START_SCRIPT}" << EOF +#!/usr/bin/env bash + +cd \$(dirname \${0}) + +. ./prep-env.sh + +cd ${APP_NAME} + +PYTHONPATH=\${PYTHONPATH}:src exec python -m ${PKG_NAME} \$@ +EOF + +chmod +x "${START_SCRIPT}" + +################################################################################ +# create bin script +mkdir -p "${APP_FOLDER}/bin" +BIN_SCRIPT="${APP_FOLDER}/bin/openmtc-${APP_SCRIPT}" +cat > "${BIN_SCRIPT}" << EOF +#!/usr/bin/env bash + +exec python -m ${PKG_NAME} \$@ +EOF + +chmod +x "${BIN_SCRIPT}" + +################################################################################ +# create systemd unit file +SYSTEMD_FOLDER="${APP_FOLDER}/etc/systemd/system" +mkdir -p ${SYSTEMD_FOLDER} +cat > "${SYSTEMD_FOLDER}/openmtc-${PKG_NAME}.service" << EOF +[Unit] +Description=OpenMTC ${APP_NAME} +After=network.target +Wants=ntp.service + +[Service] +ExecStart=/usr/local/bin/${APP_SCRIPT} + +[Install] +WantedBy=multi-user.target +EOF + +################################################################################ +# return here if docker is not needed +if [ ${NO_DOCKER} == "true" ]; then + exit 0 +fi + +################################################################################ +# create setup file +SETUP_FILE="${APP_FOLDER}/setup-${PKG_NAME}.py" +cat > "${SETUP_FILE}" << EOF +#!/usr/bin/env python + +from setuptools import setup +from distutils.core import setup +from glob import glob +import sys + +from utils import get_packages, get_pkg_files, OpenMTCSdist, move_config_files + +# name and dir +NAME = "${PKG_NAME}" +BASE_DIR = "." + +# import pkg +sys.path.append(BASE_DIR + "/src") +pkg = __import__(NAME) + +# setup name and version +SETUP_NAME = "openmtc-" + NAME +SETUP_VERSION = pkg.__version__ +SETUP_DESCRIPTION = pkg.__description__ + +# meta +SETUP_AUTHOR = pkg.__author_name__ +SETUP_AUTHOR_EMAIL = pkg.__author_mail__ +SETUP_URL = "http://www.openmtc.org" +SETUP_LICENSE = "Fraunhofer FOKUS proprietary" + +# requirements +SETUP_REQUIRES = pkg.__requires__ +SETUP_INSTALL_REQUIRES = pkg.__requires__ + +# packages +PACKAGES = [NAME] +PACKAGE_DIR = {"": BASE_DIR + "/src"} +all_packages = [] +for package in PACKAGES: + all_packages.extend(get_packages(package, PACKAGE_DIR)) + +# scripts +SETUP_SCRIPTS = glob(BASE_DIR + "/bin/*") + +# package data +PACKAGE_DATA = {NAME: get_pkg_files(BASE_DIR, NAME)} + +# data files +CONFIG_FILES = ("config.json",) +CONFIG_DIR = "/etc/openmtc/" + NAME +CONFIG_DIST_FILES = (BASE_DIR + "/etc/conf/config.json.dist",) +DATA_FILES = [(CONFIG_DIR, CONFIG_DIST_FILES)] + +# cmd class +CMD_CLASS = {'sdist': OpenMTCSdist} + +if __name__ == "__main__": + if 'bdist_wheel' in sys.argv: + raise RuntimeError("This setup.py does not support wheels") + + ############################################################################ + # setup + setup(name=SETUP_NAME, + version=SETUP_VERSION, + description=SETUP_DESCRIPTION, + author=SETUP_AUTHOR, + author_email=SETUP_AUTHOR_EMAIL, + url=SETUP_URL, + license=SETUP_LICENSE, + requires=SETUP_REQUIRES, + install_requires=SETUP_INSTALL_REQUIRES, + package_dir=PACKAGE_DIR, + packages=all_packages, + scripts=SETUP_SCRIPTS, + package_data=PACKAGE_DATA, + data_files=DATA_FILES, + cmdclass=CMD_CLASS + ) + + ############################################################################ + # install + if "install" in sys.argv: + # only do this during install + move_config_files(CONFIG_DIR, CONFIG_FILES) +EOF + +chmod +x "${SETUP_FILE}" + +# copy needed helper files +cp MANIFEST.in utils.py "${APP_FOLDER}/." + +################################################################################ +# create docker files + +DOCKER_FOLDER="${APP_FOLDER}/docker" +mkdir -p ${DOCKER_FOLDER} + +# create configure script +DOCKER_SCRIPT="${DOCKER_FOLDER}/configure-${PKG_NAME}-and-start" +cat > "${DOCKER_SCRIPT}" << EOF +#!/usr/bin/env bash + +CONFIG_FILE="/etc/openmtc/${PKG_NAME}/config.json" + +NAME=\${NAME-"${APP_NAME}"} +EP=\${EP-"http://localhost:8000"} +CSE_BASE=\${CSE_BASE-"onem2m"} +POAS=\${POAS-'["http://auto:2${APP_PORT_SUFFIX}"]'} +ORIGINATOR_PRE=\${ORIGINATOR_PRE-"//openmtc.org/mn-cse-1"} +SSL_CRT=\${SSL_CRT-"/etc/openmtc/certs/${PKG_NAME}.cert.pem"} +SSL_KEY=\${SSL_KEY-"/etc/openmtc/certs/${PKG_NAME}.key.pem"} +SSL_CA=\${SSL_CA-"/etc/openmtc/certs/ca-chain.cert.pem"} +PORT=\${PORT-"1${APP_PORT_SUFFIX}"} + +# defaults logging +LOGGING_FILE=\${LOGGING_FILE-"/var/log/openmtc/${PKG_NAME}.log"} +LOGGING_LEVEL=\${LOGGING_LEVEL-"ERROR"} + +# ensure correct level +case \${LOGGING_LEVEL} in + FATAL|ERROR|WARN|INFO|DEBUG) + ;; + *) + LOGGING_LEVEL="ERROR" + ;; +esac + +# local ip +LOCAL_IP=\$(ip r get 8.8.8.8 | awk 'NR==1 {print \$NF}') + +# set hostname +HOST_NAME=\${EXTERNAL_IP-\${LOCAL_IP}} + +# Configuration of the service. +CONFIG_TEMP=\${CONFIG_FILE}".tmp" +echo -n "Configuring M2M ${PKG_NAME}..." +JQ_STRING='.' + +# basics +JQ_STRING=\${JQ_STRING}' | + .name = "'\${NAME}'" | + .ep = "'\${EP}'" | + .cse_base = "'\${CSE_BASE}'" | + .poas = '\${POAS}' | + .originator_pre = "'\${ORIGINATOR_PRE}'" | + .ssl_certs.cert_file = "'\${SSL_CRT}'" | + .ssl_certs.key_file = "'\${SSL_KEY}'" | + .ssl_certs.ca_certs = "'\${SSL_CA}'" | + .port = "'\${PORT}'" | + .logging.file |= "'\${LOGGING_FILE}'" | + .logging.level |= "'\${LOGGING_LEVEL}'" +' + +cat \${CONFIG_FILE} | jq -M "\${JQ_STRING}"> \${CONFIG_TEMP} +mv \${CONFIG_TEMP} \${CONFIG_FILE} + +echo "done" + +exec python -m ${PKG_NAME} \$@ +EOF + +if [ ${WEB_APP} == "false" ]; then + perl -i -n -e 'print unless /PORT=/' "${DOCKER_SCRIPT}" + perl -i -n -e 'print unless /.port =/' "${DOCKER_SCRIPT}" +fi + +if [ ${IN_AE} == "true" ]; then + perl -i -p -e 's/mn-cse-1/in-cse-1/' "${DOCKER_SCRIPT}" + perl -i -p -e 's/localhost:8000/localhost:18000/' "${DOCKER_SCRIPT}" +fi + +chmod +x "${DOCKER_SCRIPT}" + +# create docker files +DOCKER_FILE_AMD64="${DOCKER_FOLDER}/${PKG_NAME}-amd64" +DOCKER_FILE_ARM="${DOCKER_FOLDER}/${PKG_NAME}-arm" +cat > "${DOCKER_FILE_AMD64}" << EOF +############################################################ +# Dockerfile to run openmtc ${PKG_NAME} binary +############################################################ + +# Set the base image to use openmtc/sdk +FROM openmtc/sdk-amd64:latest + +ENV MOD_NAME=${PKG_NAME} + +# Set the file maintainer +MAINTAINER rst + +# install openmtc dependencies +COPY tmp/\$MOD_NAME-dependencies.txt /tmp/requirements.txt +RUN pip install --upgrade --requirement /tmp/requirements.txt + +# install openmtc-${PKG_NAME} +COPY tmp/openmtc-\$MOD_NAME.tar.gz /tmp/openmtc-\$MOD_NAME.tar.gz +RUN tar xzf /tmp/openmtc-\$MOD_NAME.tar.gz -C / \\ + --owner root --group root --no-same-owner --no-overwrite-dir \\ + --transform 's/json\.dist/json/' --show-transformed + +RUN mkdir -p /var/log/openmtc + +# add change config +COPY configure-\$MOD_NAME-and-start /usr/local/bin/configure-and-start + +# entry point +ENTRYPOINT ["/usr/local/bin/configure-and-start"] +CMD [""] +EOF + +cat "${DOCKER_FILE_AMD64}" | sed 's/-amd64/-arm/g' > "${DOCKER_FILE_ARM}" diff --git a/create-binary-dist b/create-binary-dist new file mode 100755 index 0000000..87f4ff2 --- /dev/null +++ b/create-binary-dist @@ -0,0 +1,129 @@ +#!/usr/bin/env bash + +base_path=$(cd $(dirname "${0}"); pwd) + +usage () { +echo "Usage: create-binary-dist " +} + +if [ -z "${1}" ]; then + usage + exit 1 +fi + +case $1 in +backend|be) + name="backend" + setup_file="setup-gevent-all.py" + binary_prefix="openmtc-all" + ;; +gateway|gw) + name="gateway" + setup_file="setup-gevent-all.py" + binary_prefix="openmtc-all" + ;; +help) + usage + exit 0 + ;; +*) # other images will be detected by scanning setup files + name="$1" + setup_file="setup-${name}.py" + binary_prefix="openmtc-${name}" + ;; +esac + +separator_line () { +counter=${1-80} +printf '%'${counter}'s\n' | tr ' ' '#' +} + +# get setup file and set working dir +find_result=($(find ${base_path} -iname "${setup_file}")) + +if [ ${#find_result[*]} -eq 0 ]; then + echo "Setup file ${setup_file} not existing. Exiting Now!." + exit 1 +fi + +if [ ${#find_result[*]} -gt 1 ]; then + echo "Too many setup files matching the name. Exiting Now!." + exit 1 +fi + +working_dir=$(dirname ${find_result[0]}) + +# get ids if chown is needed +chown_ids=${2} +if [[ ! ${chown_ids} =~ ^[0-9]{1,4}:[0-9]{1,4}$ ]]; then + unset chown_ids +fi + +################################################################################ +# set target file +get_target_from_setup_file () +{ +# Each setup file is assumed to hold ".py" suffix, this gets +# removed here +local module_name=${setup_file%.py} + +cd ${working_dir} +python - << END_OF_PYTHON +from importlib import import_module +setup = import_module('${module_name}', '${module_name}') +print("%s-%s" % (setup.SETUP_NAME, setup.SETUP_VERSION)) +END_OF_PYTHON +} + +# construct target file +target_file="$(get_target_from_setup_file).docker.tar.gz" +target_file="${working_dir}/dist/${target_file}" + +################################################################################ +# build binary_package +separator_line +printf "### Creating binary archive...\n" +printf "### Running \"python %s bdist\" now..." ${setup_file} +log_file="/tmp/${setup_file}_error.log" + +# clean up before +rm -f ${target_file} +rm -rf ${working_dir}/build + +# build +cd ${working_dir} +python ${setup_file} bdist --plat-name docker >/dev/null 2>${log_file} + +# clean up after +rm -rf ${working_dir}/build +rm ${working_dir}/${setup_file}c +find ${base_path} -iname "*.egg-info" -exec rm -r "{}" \; 2> /dev/null + +# check success +if [ -e ${target_file} ]; then + printf "done\n" +else + printf "error\n\n" + cat ${log_file} + exit 1 +fi + +rm ${log_file} + +################################################################################ +# clean binary_package +binary_archive="${working_dir}/dist/${binary_prefix}.docker.tar.gz" +printf "### Stripping .py files..." +cp ${target_file} ${binary_archive} +gzip -d ${binary_archive} +tar --wildcards --delete -f ${binary_archive%".gz"} "*.py" +gzip ${binary_archive%".gz"} +printf "done\n" +rm ${target_file} +printf "### Created binary archive at %s.\n" ${binary_archive} + +################################################################################ +# set correct permissions +if [ -n "${chown_ids}" ]; then + chown -R "${chown_ids}" "${working_dir}/dist" +fi diff --git a/create-binary-docker b/create-binary-docker new file mode 100755 index 0000000..dfdfed7 --- /dev/null +++ b/create-binary-docker @@ -0,0 +1,418 @@ +#!/usr/bin/env bash + +base_path=$(cd $(dirname "${0}"); pwd) + +prog_name="create-binary-docker" +prog_version="0.1" +version_str="${prog_name} v${prog_version}" +usage_str="${prog_name} [OPTS] + OPTS: + -a|--arch Choose architecture: amd64|arm + -p|--prefix Choose Docker image name prefix + -e|--export Export the Docker image to a file, after build + + -h|--help Show this help + -v|--version Show version information + + module_name: + gateway | backend +" + +version () { + echo "${version_str}" +} + +usage () { + echo "Usage: $usage_str" +} + +separator_line () { +counter=${1-80} +printf '%'${counter}'s\n' | tr ' ' '#' +} + +# +# Parsing commandline options and arguments using "getopt" +# +OPTS=$(getopt -o 'a:ehp:v' --long 'arch:,export,help,prefix:,version' -n "${prog_name}" -- "$@") +if [ 0 -ne $? ]; then + echo 'Exiting' >&2 + exit 1 +fi +eval set -- "$OPTS" +unset OPTS + +ARCH_OPT='' +PREFIX_OPT='' +EXPORT_OPT='' +MODULE_NAME='' + +while true; do + case "$1" in + '-a' | '--arch') + ARCH_OPT="$2" + shift 2 + continue + ;; + + '-e' | '--export') + EXPORT_OPT=true + shift 1 + continue + ;; + + '-h' | '--help') + usage + exit 0 + ;; + + '-p' | '--prefix') + PREFIX_OPT="$2" + shift 2 + continue + ;; + + '-v' | '--version') + version + shift 1 + exit 0 + ;; + + '--') # Argument handling + shift # Remove '--' + if [ $# -ne 1 ]; then + echo "Error: Exactly one module name needs to be provided" >&2 + usage + exit 1 + fi + MODULE_NAME="$1" + break + ;; + *) + echo "Error: Internal problem!" + usage + exit 1 + ;; + esac +done + +# Set architecture +case ${ARCH_OPT} in + amd64) + machine="amd64" + arch="x86_64" + ;; + + arm) + machine="arm" + arch="armv7l" + if [ $(uname -m) == "x86_64" ];then + printf "check qemu" + qemu_bin=$(which qemu-arm-static) + if [ -z "$qemu_bin" ];then + printf "Package 'qemu-user-static' not found" + printf "Script is exiting now.\n" + exit 1 + fi + fi + ;; + + '') + case $(uname -m) in + x86_64) + machine="amd64" + arch="x86_64" + ;; + armv6l|armv7l) + machine="arm" + arch="armv7l" + ;; + *) + printf "Platform %s is not supported!\n" $(uname -m) + printf "Script is exiting now.\n" + exit 1 + ;; + esac + ;; + + *) + echo "Error: Unknown architecture ${ARCH_OPT}" >&2 + usage + exit 1 + ;; + +esac + +# get docker image to build +case ${MODULE_NAME} in + backend|be) + name="backend" + setup_file="setup-gevent-all.py" + binary_prefix="openmtc-all" + ;; + gateway|gw) + name="gateway" + setup_file="setup-gevent-all.py" + binary_prefix="openmtc-all" + ;; + help) + usage + exit 0 + ;; + *) # other images will be detected by scanning setup files + name="$1" + setup_file="setup-${name}.py" + binary_prefix="openmtc-${name}" + ;; +esac + +# Use export image, if set +export_image=${EXPORT_OPT:-false} + +# +# docker variables +# +docker_prefix_default="openmtc/" +if [ -n "${PREFIX_OPT}" ]; then + docker_prefix="${PREFIX_OPT%%/}/" +else + docker_prefix=${docker_prefix_default} +fi + +# get setup file and set working dir +find_result=($(find ${base_path} -iname "${setup_file}")) + +if [ ${#find_result[*]} -eq 0 ]; then + echo "Setup file ${setup_file} not existing. Exiting Now!." + exit 1 +fi + +if [ ${#find_result[*]} -gt 1 ]; then + echo "Too many setup files matching the name. Exiting Now!." + exit 1 +fi + +working_dir=$(dirname ${find_result[0]}) + +# docker variables +docker_path="${working_dir}/docker" +docker_tmp="${docker_path}/tmp" + +# base image +base_docker_file="${docker_path}/base-${machine}" +base_docker_name="${docker_prefix}base-${machine}" + +# builder image +build_docker_file="${docker_path}/builder-${machine}" +build_docker_name="${docker_prefix}builder-${machine}" +build_docker_work_dir="/usr/local/src/openmtc-python/" +build_container_name="build-container" + +# docker image +target_docker_name="${docker_prefix}${name}-${machine}" +target_docker_file="${docker_path}/${name}-${machine}" +target_docker_binary="${docker_tmp}/openmtc-${name}.tar.gz" + +# export image +docker_dist="${base_path}/dist/docker" + +############################################################################## +# set docker command +# only sudo if not root and not in docker group +if [ $(id -u) -eq 0 ] || id -nG | grep -qw "docker"; then + docker_cmd=$(which docker) +else + docker_cmd="sudo "$(which docker) +fi + +############################################################################## +# trap and cleanup +cleanup () { +separator_line +printf "### Cleaning..." +rm -f "${target_docker_binary}" +rm -f "${docker_tmp}/${name}-dependencies.txt" +${docker_cmd} rm -f ${build_container_name} &> /dev/null +printf "done\n" +} + +trap cleanup SIGINT SIGTERM + +############################################################################## +# check if possible +target_setup_file="${working_dir}/${setup_file}" +if ! ([ -f "${target_setup_file}" ] && [ -f "${target_docker_file}" ]); then + if ! [ -f "${target_setup_file}" ]; then + printf "${target_setup_file} not existing\n" + fi + if ! [ -f "${target_docker_file}" ]; then + printf "${target_docker_file} not existing\n" + fi + printf "Script is exiting now.\n" + cleanup + exit 1 +fi + +############################################################################## +# Use this script to build sdk package before if necessary +if [ "${name}" != "sdk" ]; then + separator_line + printf "### Need to build SDK before.\n" + + # Run this script again to build sdk + $0 -a ${machine} -p ${docker_prefix} sdk + if [ $? -gt 0 ]; then + exit 1 + fi + separator_line + printf "### Continuing %s-%s...\n" ${name} ${machine} +fi + +############################################################################## +separator_line +printf "### Building docker image for %s-%s...\n" ${name} ${machine} + +############################################################################## +# When building SDK, build base and build container before +if [ "${name}" == "sdk" ]; then + ########################################################################## + # build base docker container + separator_line + printf "### Building base container...\n" + ${docker_cmd} build --tag ${base_docker_name} \ + --file ${base_docker_file} ${docker_path} + if [ $? -gt 0 ]; then + printf "### Building base container failed. Exiting now.\n" + cleanup + exit 1 + fi + printf "### Base container built successfully.\n" + + ########################################################################## + # build container to run setup script + separator_line + printf "### Building build container...\n" + ${docker_cmd} build --tag ${build_docker_name} \ + --build-arg OPENMTC_HOME=${build_docker_work_dir} \ + --file ${build_docker_file} ${docker_path} + if [ $? -gt 0 ]; then + printf "### Building build container failed. Exiting now.\n" + cleanup + exit 1 + fi + printf "### Build container built successfully.\n" +fi + +############################################################################## +# create the build container to run the script +separator_line +printf "### Create build container %s.\n" ${name} +${docker_cmd} create --name ${build_container_name} \ + --volume=${base_path}:${build_docker_work_dir} \ + ${build_docker_name} ${name} "$(id -u):$(id -g)" +if [ $? -gt 0 ]; then + printf "### Creating build container failed. Exiting now.\n" + cleanup + exit 1 +fi +printf "### Creating build container successfully.\n" + +############################################################################## +# starting container interactive to wait for finishing the script +separator_line +printf "### Starting build container...\n" +${docker_cmd} start -i ${build_container_name} +if [ $? -gt 0 ]; then + printf "### Starting build container failed. Exiting now.\n" + cleanup + exit 1 +fi +printf "### Starting build container successful.\n" + +############################################################################## +# move the file +separator_line +printf "### Move tar file...\n" +mkdir -p ${docker_tmp} +binary_archive="${binary_prefix}.docker.tar.gz" +binary_archive="${working_dir}/dist/${binary_archive}" +mv ${binary_archive} ${target_docker_binary} +printf "### Moving tar successful.\n" + +############################################################################## +# copy requirements +get_requirements_from_setup_file () +{ + # Each setup file is assumed to hold ".py" suffix, this gets + # removed here + local module_name=${setup_file%.py} + + cd ${working_dir} + python - << END_OF_PYTHON +from importlib import import_module +from re import sub +setup = import_module('${module_name}', '${module_name}') +print('\n'.join(map(lambda x: sub('[\s+]', '', x), + setup.SETUP_INSTALL_REQUIRES))) +END_OF_PYTHON +} + +printf "%s\n" $(get_requirements_from_setup_file) | tr " " "\n" > \ + "${docker_tmp}/${name}-dependencies.txt" + +############################################################################## +# build docker container +separator_line +printf "### Building %s-%s container...\n" ${name} ${machine} + +${docker_cmd} build -t ${target_docker_name} \ + -f ${target_docker_file} ${docker_path} +if [ $? -gt 0 ]; then + printf "### Building %s-%s container failed. Exiting now.\n" \ + ${name} ${machine} + cleanup + exit 1 +fi +printf "### Base %s-%s container built successfully.\n" ${name} ${machine} + +############################################################################## +# cleanup +cleanup + +############################################################################## +# remove dangling images +separator_line +printf "### Removing dangled images..." +for image in $(${docker_cmd} images -qa -f "dangling=true"); do + ${docker_cmd} rmi -f ${image} > /dev/null +done +printf "done\n" + +############################################################################## +# example to run the docker file +#${docker_cmd} run --name test -d \ +# -p 0.0.0.0:8001:8001 \ +# -e "EXTERNAL_IP=$(ip r get 8.8.8.8 | awk 'NR==1 {print $NF}')" \ +# ${target_docker_name} + +# test with curl +#curl $(ip r get 8.8.8.8 | awk 'NR==1 {print $NF}'):5001/m2m + +# stop and remove container again +#${docker_cmd} stop test && ${docker_cmd} rm test + +############################################################################## +# export docker image +if ${export_image}; then + separator_line + printf "### Exporting the image..." + mkdir -p ${docker_dist} + # change / in target_docker_name to - + docker_dist_file="${docker_dist}/${target_docker_name//\//-}.tar.gz" + ${docker_cmd} save ${target_docker_name} | gzip -c > ${docker_dist_file} + printf "done\n" +fi + +# import docker image +#zcat ${target_docker_name}.tar.gz | ${docker_cmd} load + diff --git a/dist/openmtc_sdk-4.99.0-py2.7.egg b/dist/openmtc_sdk-4.99.0-py2.7.egg new file mode 100644 index 0000000..6cf43b2 Binary files /dev/null and b/dist/openmtc_sdk-4.99.0-py2.7.egg differ diff --git a/doc/authentication.md b/doc/authentication.md new file mode 100644 index 0000000..d128dbe --- /dev/null +++ b/doc/authentication.md @@ -0,0 +1,159 @@ +# Authentication Guide + +## Enable Authentication using HTTPS at the backend CSE and gateway CSE + +To enable authentication the following parameters need to be changed in the configuration files of the backend CSE and gateway CSE named `config-backend.json` and `config-gateway.json`. +The configuration files are located in the `openmtc-open-source/openmtc-gevent` directory. + +In the `plugins` section of each configuration file the HTTPTransportPlugin needs to be enabled. +Therefore, make sure the `disabled` option of the plugin is set to **false**. +Set the option `enable_https` to **true** in the config of the HTTPTransportPlugin to enable HTTPS. +If the option `require_cert` is set to **true**, the client that requests the gateway must provide a certificate. + + +### Example of the gateway CSE configuration: +```json +"plugins": { + "openmtc_cse": [ + { + "name": "HTTPTransportPlugin", + "package": "openmtc_cse.plugins.transport_gevent_http", + "disabled": false, + "config": { + "enable_https": true, + "interface": "::", + "port": 8000, + "require_cert": true + } + }, +``` + +The necessary SSL parameters used for the HTTPS/SSL connection must be specified in the `ssl_certs` option in the `onem2m` section of the corresponding configuration file. +This option includes the private key (`key`), the certificate (`crt`) and the certificate chain (`ca`). +Pre-shipped keys, certificates and a chain file which can be used at the gateway CSE and the backend CSE are located in the `openmtc-open-source/openmtc-gevent/certs` directory. + + +### Example of the SSL parameters configured at the gateway CSE: +```json +"onem2m": { + "accept_insecure_certs": false, + "cse_base": "onem2m", + "cse_id": "mn-cse-1", + "cse_type": "MN-CSE", + "overwrite_originator": { + "enabled": false, + "originator": "/openmtc.org/mn-cse-1" + }, + "sp_id": "openmtc.org", + "ssl_certs": { + "ca": "certs/ca-chain.cert.pem", + "crt": "certs/mn-cse-1-client-server.cert.pem", + "key": "certs/mn-cse-1-client-server.key.pem" + } + }, +``` + +## Registering the gateway CSE to the backend CSE using HTTPS + +The RegistrationHandler plugin is responsible to register the gateway CSE to the backend CSE. +Therefore, the plugin needs to be enabled by setting the `disabled` option to **false**. +Furthermore, when HTTPS is enabled at both CSEs the options `poa` and `own_poa` needs to be changed to **https://...** in the `config` section of the RegistrationHandler plugin. + +```json +{ + "name": "RegistrationHandler", + "package": "openmtc_cse.plugins.registration_handler", + "disabled": false, + "config": { + "interval": 3600, + "labels": [ + "openmtc" + ], + "offset": 3600, + "remote_cses": [ + { + "cse_base": "onem2m", + "cse_id": "in-cse-1", + "cse_type": "IN_CSE", + , + "own_poa": [ + "https://localhost:8000" + ], + "poa": [ + "https://localhost:18000" + ] + } + ] + } + }, +``` + +## Authentication of AEs/IPEs + +When HTTPS is enabled in the configuration of the CSEs the AEs/IPEs must provide SSL information as well to perform requests to the CSEs. + +Using the [OpenMTC application framework](./sdk-framework.md) +AEs and IPEs derive from the provided base class `openmtc_app.onem2m.XAE`. + +When creating an instance of the AE/IPE the following additional SSL parameter can be specified: +* `ca_certs`: the certificate chain file +* `cert_file`: the certificate file +* `key_file`: the private key file +* `originator_pre`: the originator which needs to match the subjectAltName value in the certificate + +If all SSL parameters are specified, the AE/IPE is started by the application framework using an HTTPS client. + +### Example from the [onem2m-gui-sensors-actuators-final.py](/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-final.py) training IPE: + +```python +host = 'https://localhost:8000' + app = TestIPE( + poas=['https://localhost:21346'], # adds poas in order to receive notifications + # SSL options + originator_pre='//openmtc.org/mn-cse-1', # originator_pre, needs to match value in cert + ca_certs='../../openmtc-gevent/certs/ca-chain.cert.pem', + cert_file='certs/test-ipe.cert.pem', # cert file, pre-shipped and should match name + key_file='certs/test-ipe.key.pem' + ) +``` + + +## Request information using curl when HTTPS is enabled + +In general requests are performed just the same as presented in the [REST API Guide](./overview-rest-api.md). +But, when HTTPS is enabled additional SSL options need to be provided by the client to the CSE. + +When using curl as client add the following options: +* **--cert**: the client certificate +* **--key**: the private client key +* **--cacert**: the certificate chain +* **-H**: set Header Fields, used to set the *X-M2M-Origin* header mapped as `the originator` + +### Example curl request + +```shell +curl https://localhost:8000/onem2m -v --cert test-ae.cert.pem --key test-ae.key.pem --cacert ca-chain.cert.pem -H "X-M2M-Origin: //openmtc.org/mn-cse-1/CTest-AE" +``` +### Notes + +If the option `require_cert` in the HTTPTransportPlugin config is set to **false**, the client does not need to present a certificate. +Therefore, the curl option **--cert** and **--key** are not needed when sending a request to the CSE. + + +**Example:** + +```shell +curl https://localhost:8000/onem2m -v --cacert ca-chain.cert.pem -H "X-M2M-Origin: //openmtc.org/mn-cse-1/CTest-AE" +``` + +curl performs SSL certificate verification by default, using a "bundle" of Certificate Authority (CA) public keys (CA certs). +Using the **--cacert** option the "bundle" file which is used for verification can by specified. +In the above example this option is used and the certificate presented by the server to the curl client is verified. +Nevertheless, curl's verification of the server certificate can be turned off, using the **-k (or --insecure)** option. + + +**Example:** + +```shell +curl https://localhost:8000/onem2m -v -H "X-M2M-Origin: //openmtc.org/mn-cse-1/CTest-AE" --insecure +``` diff --git a/doc/certificates.md b/doc/certificates.md new file mode 100644 index 0000000..3a8420c --- /dev/null +++ b/doc/certificates.md @@ -0,0 +1,35 @@ +# Certificate Issuance Guide + +## How to create certificates? + +TODO: some extra documentation when issuance is changed + + +### OpenSSL commands to create certificates + +1. Create a Private Key + + ```shell + $ openssl ecparam -genkey -name prime256v1 -out intermediate/private/server.key.pem + ``` + +2. Create a Certificate Signing Request + + The private key is used to create a certificate signing request (CSR). + + ```shell + $ openssl req -new -SHA256 -nodes -config intermediate/openssl_intermediate.cnf -key intermediate/private/server.key.pem -out intermediate/csr/server.csr.pem + ``` + +3. Create a Certificate + + The Certificate Authority (CA) (in this case the intermediate CA) is used to sign the CSR and create a certificate. + + ```shell + openssl ca -config intermediate/openssl_intermediate.cnf -extensions server_cert -days 365 -notext -md sha256 -in intermediate/csr/server.csr.pem -out intermediate/certs/server.cert.pem + ``` + + +## How to setup the certificates when using Docker? + +TODO: NC diff --git a/doc/create-app-structure.md b/doc/create-app-structure.md new file mode 100644 index 0000000..8e16363 --- /dev/null +++ b/doc/create-app-structure.md @@ -0,0 +1,287 @@ +# Create App Structure Script + +This documentation explains the `create-app-structure` script in the main folder +and shows the usage. + +It follows the conventions for Apps and IPEs from the `openmtc-guidelines.md`. +It will create a complex app or an IPE based on the input. It will create a +basic set of folders and files. These will be the app structure, the setup +script and the needed docker files in order to run directly the +`create-binary-docker` script. + +## Usage + +The script shall be called from the command line: + +```bash +./create-app-structure + +Usage: $0 [-w] [-d] [-i] [-a ] APP_NAME + + APP_NAME Name of the app, used as folder and class name. + + -a If not provided, it will be created automatically by + converting the APP_NAME from camel case to snake case. + + If APP_NAME or APP_SCRIPT ends with IPE or similar, it + will be assumed as IPE and will be created under ipes. + + -w if true, FlaskRunner is used and basic index.html is + created + + -i if true, IN-AE is expected and default endpoint is + configured to backend + + -d if true, no setup file and docker files will be created +``` + +The first parameter is mandatory and shall be the name of the App in Camel Case. +From the name the script checks if the app shall be an IPE. Examples for apps +are "TestApp", "RobotControlLoop" and "ROSGUI". IPEs can be named like +"ZigBeeIPE" or "CUL868IPE". If names are not easy to guess the snake case form +the second parameter can be used. + +### Examples + +To create the structure for ZigBeeIPE: + +```bash +./create-app-structure ZigBeeIPE +``` + +This will create the folder `ipes/ZigBeeIPE`, the package `zigbeeipe`, the +module name `zig_bee_ipe` and the start script `zig-bee-ipe`. + +To create the structure for ROSGUI: + +```bash +./create-app-structure -a ros-gui ROSGUI +``` + +This will create the folder `apps/ROSGUI`, the package `rosgui`, the module name +`ros_gui` and the start script `ros-gui`. + +## Result + +The script will produce a bunch of files. For the example `TestApp` it would +create the folder `apps/TestApp` and the script `apps/test-app`. + +The folder `apps/TestApp` looks like the following: + +``` +apps ++-- ComplexApp +| +-- bin +| | +-- openmtc-test-app +| +-- docker +| | +-- configure-testapp-and-start +| | +-- testapp-amd64 +| | +-- testapp-arm +| +-- etc +| | +-- conf +| | +-- config.json.dist +| | +-- systemd +| | +-- system +| | +-- openmtc-testapp.service +| +-- MANIFEST.in +| +-- setup-testapp.py +| +-- src +| +-- complexapp +| +-- __init__.py +| +-- __main__.py +| +-- complex_app.py +| +-- utils.py ++ test-app +``` + +The setup script can be used to create a python install package that can be +installed on another machine. The docker files are needed to create a docker +image with the help of the `create-binary-docker` script. See extra +documentation for this. + +## Development Options + +There are two possibilities to use this script. + +1. Start from simple app +2. Start with this script + +### Start from simple app + +When you want to start with a simple app, you can copy an app from +`doc/training/apps` and name it that it ends with `-final.py`. Then you can +use the script located at `doc/training/start-app.sh` to start your simple app. + +When you are satisfied with the functionality, you can use the +`create-app-structure` script in order to create the structure and the files. +After the creation of the files you can copy the code of your simple app into +the main modules. For the example of `TestApp`, you would integrate the parts +below +```python +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner +``` +into `apps/TestApp/src/testapp/__main__.py` and the rest into +`apps/TestApp/src/testapp/test_app.py`. + +Check [later chapter](#files-to-work-on) to see which files need to be +adjusted. + +### Start with script + +When you start from scratch, you run the script in order to create the +structure. Then you can start developing. For the example of `TestApp`, you +start with `apps/TestApp/src/testapp/test_app.py`. + +Check [later chapter](#files-to-work-on) to see which files can be changed, if +needed. + +### Files to work on + +The script will create the structure. Some files need some changes, other files +can be changed, if necessary. + +#### `apps/TestApp/src/testapp/__init__.py` + +```python +""" +TODO: Add description here +""" + +__version__ = "ADD-VERSION-HERE" +__description__ = "TestApp" +__author_name__ = "ADD_AUTHOR_HERE" +__author_mail__ = "ADD_MAIL_HERE" +__requires__ = [] +``` + +* These entries will be used in the `setup-testapp.py` script to build the +python package. +* All values are self-explanatory. +* `__requires__` can be used, if other python libs are needed. + +#### `apps/TestApp/src/testapp/test_app.py` + +```python +from openmtc_app.onem2m import XAE + + +class TestApp(XAE): + interval = 10 + + def _on_register(self): + # start endless loop + self.run_forever(self.interval) +``` + +* This file contains the start of the program logic. +* More modules can be added. +* TestApp can be extended with additional parameters: + +```python +class TestApp(XAE): + + def __init__(self, additional_parameter, *kw, **args): + super(TestApp, self).__init__(*kw, **args) + self._additional_parameter = additional_parameter +``` + +* To add `*kw, **args` makes it easier to add the parameter as well in the +config as also in the `__main__` module. See other files. + +#### `apps/TestApp/config.json` + +```json +{ + "name": "TestApp", + "ep": "http://localhost:8000", + "cse_base": "onem2m", + "poas": [ + "http://auto:29260" + ], + "originator_pre": "//openmtc.org/mn-cse-1", + "ssl_certs": { + "cert_file": null, + "key_file": null, + "ca_certs": null + }, + "logging": { + "level": "ERROR", + "file": null + } +} +``` + +* Most of the parameters are quite fine. But can be changed: + * `name` is the name how the AE/IPE is registering itself. + * `ep` is the endpoint of the AE. Needs to be a Gateway or a backend. + * `cse_base` is the path to the `CSE-BASE` of the endpoint. + * `poas` is used in order to receive notifications. `http:` and `mqtt:` are + supported. + * `originator_pre` needs to be set, if SSL certificates are used. Needs to + match the CSE where the AE shall be register to. + * `ssl_certs` has to be set to the locations of the required certificate + files. + * `logging` can be used to change the log level and also to set a file to log + to. +* To add additional parameters, just add the parameter into the JSON. +* In order to use docker, all changes have to be made as well in +`apps/TestApp/etc/conf/config.json.dist`. + +#### `apps/TestApp/src/testapp/__main__.py` + +```python +from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser + +from openmtc_app.util import prepare_app, get_value +from openmtc_app.runner import AppRunner as Runner +from .test_app import TestApp + +# defaults +default_name = "TestApp" +default_ep = "http://localhost:8000" + +# args parser +parser = ArgumentParser( + description="An IPE called TestApp", + prog="TestApp", + formatter_class=ArgumentDefaultsHelpFormatter) +parser.add_argument("-n", "--name", help="Name used for the AE.") +parser.add_argument("-s", "--ep", help="URL of the local Endpoint.") + +# args, config and logging +args, config = prepare_app(parser, __loader__, __name__, "config.json") + +# variables +nm = get_value("name", (unicode, str), default_name, args, config) +cb = config.get("cse_base", "onem2m") +ep = get_value("ep", (unicode, str), default_ep, args, config) +poas = config.get("poas", ["http://auto:29260"]) +originator_pre = config.get("originator_pre", "//openmtc.org/mn-cse-1") +ssl_certs = config.get("ssl_certs", {}) + +# start +app = TestApp( + name=nm, cse_base=cb, poas=poas, + originator_pre=originator_pre, **ssl_certs +) +Runner(app).run(ep) + +print ("Exiting....") +``` + +* The module is ready for starting the app itself. +* When `test_app.py` was extended with an additional parameter and it was added +to the config, the init of the App needs to be changed: +```python +ssl_certs = config.get("ssl_certs", {}) +additional_parameter = config.get("additional_parameter", None) + +# start +app = TestApp( + additional_parameter=additional_parameter, + name=nm, cse_base=cb, poas=poas, + originator_pre=originator_pre, **ssl_certs +) +Runner(app).run(ep) +``` diff --git a/doc/create-binary-docker.md b/doc/create-binary-docker.md new file mode 100644 index 0000000..479b6fb --- /dev/null +++ b/doc/create-binary-docker.md @@ -0,0 +1,62 @@ +# Create binary docker images Script + +This documentation explains the `create-binary-docker` script in the main folder +and shows the usage. + +## Description + +`create-binary-docker` is used to build Docker container images for +all available OpenMTC components. It can be used for cross-builds +too. + +## Usage + +The script shall be called from the command line: + +```bash +Usage: create-binary-docker [OPTS] + OPTS: + -a|--arch Choose architecture: amd64|arm + -p|--prefix Choose Docker image name prefix + -e|--export Export the Docker image to a file, after build + + -h|--help Show this help + -v|--version Show version information + + module_name: + gateway | backend +``` + +## Examples + +Assuming an AMD64-based host machine, building the OpenMTC Gateway +Container image can be achieved by following: + +```bash +./create-binary-docker gateway +``` + +Cross building the same component for ARM (e.g. the Raspberry Pi), +use: + +```bash +./create-binary-docker -a arm gateway +``` + +Any successfully built Docker image will be available in the machine's +local Docker repository. + +To be able to adjust the Docker image name for the OpenMTC Gateway, +the common prefix can be configured, like this: + +```bash +./create-binary-docker -p openmtc-testv1 gateway +``` + +After the built succeeded, a Docker image of the following name will +be available in the host's Docker repository: +`openmtc-testv1/gateway`. + +It is also possible to automatically save the created Docker to the +file system, using the `--export` flag. This will save the Docker +image into `dist/docker` within the OpenMTC git repository. diff --git a/doc/deployment-guide.md b/doc/deployment-guide.md new file mode 100644 index 0000000..6ca0990 --- /dev/null +++ b/doc/deployment-guide.md @@ -0,0 +1,205 @@ +# OpenMTC Deployment + +## How to initialize OpenMTC for development? + +This step-by-step tutorial assumes you have [Ubuntu](https://en.wikipedia.org/wiki/Ubuntu_%28operating_system%29) +-- or any other [Debian](https://en.wikipedia.org/wiki/Debian)-based Linux distribution -- installed. Particularly, this guide works best with Ubuntu 16.04, Ubuntu 14.04 or Debian Jessie. + +Your OpenMTC development setup may be based on running OpenMTC +components with: +* [Docker](https://www.docker.com/) + (recommended) +* purely based on Python + +## How to setup a development environment using Docker? + +*Note:* If you do not have a running docker setup, we provide a [docker installation +guide](./various.md). Alternativly you can use the [offical docker +documentation](https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/). + +For this scenario, be sure to build Docker container images for following OpenMTC components: + +* sdk +* gateway +* backend + +## Building Docker container images for OpenMTC components + +For the following, it is expected that the OpenMTC code repository is +prepared in a directory named: + +``` +openmtc-open-source.git +``` + +OpenMTC container images will be created through a hiearchy of +dependent images. For example when creating image 'sdk', a dependent +'base' image will be created. After completing the build process for +each Docker image, the image is to be found in the machine's local +Docker repository. + +**Be sure to have installed Docker.io in version 1.6.2 at minimum.** + +``` +cd openmtc-open-source.git +./create-binary-docker sdk +./create-binary-docker gateway +``` + +This will create Docker image for the machine's architecture and store +them in the local Docker repository. These freshly generated Docker +images will have names dependent on your architecture. + +## Crossbuilding OpenMTC Docker container images + +Similarly to the last subsection, it is possible to crossbuild, for +example, Docker images for ARM on an AMD64 machine. Therefore, an +additional parameter needs to be provided to the +`create-binary-docker` script. + +``` +cd openmtc-open-source.git +./create-binary-docker --arch arm sdk +./create-binary-docker --arch arm gateway +``` + +Again, all freshly generated images are stored in the machine's local +Docker repository. Note that these images will hold "/arm" in their +names. + +It is possible to export these newly created Docker images and make +them available to the target machine, by import. + +For a description on how to `save` newly created Docker +images, see: +[How to save crossbuild Docker images](#how-to-save-crossbuild-docker-images). + +For a description on how to `load` a saved Docker image, on the target +machine, see: +[How to load saved Docker images](#how-to-load-saved-docker-images). + + +## How to inspect freshly generated Docker images + +TODO + +## How to save crossbuild Docker images + +**Disclaimer:** *Docker provides commands like export/import and +save/load. Sadly, it is easy to get confused here. The command +`save` is used to export a Docker image, while the command `load` is +used to import this Docker image. Whereas, the coammds export/import +are to container instances.* + +Assuming that the Docker images for OpenMTC Gateway and Backend are +named as `openmtc/gateway-arm` and `openmtc/backend-arm`, it is +possible to store these two images in one file, by using the +following: + +``` shell +# Assuming this to be executed on the crossbuild host machine + +# Additionally assuming, you have enough file space in /tmp available +cd /tmp + +docker save -o openmtc-docker-images.tar openmtc/gateway-arm openmtc/backend-arm +``` + +## How to load saved Docker images + + After you have copied the tar file that stores the above mentioned + Docker images to the destination machine, use the following command + to load them into the local Docker repository of the destination + machine. + +``` shell +# Assuming this to be executed on the crossbuild target machine + +# Additionally assuming, the tarfile was copied to /tmp directory +cd /tmp + +docker load -i openmtc-docker-images.tar +``` + +### OpenMTC development setup, for field domain only + +The following figure describes a simple OpenMTC development setup. +All data entities within a oneM2M system are represented as +*resources*. All resources are hierarchical ordered within the +*resource tree*. For this simple OpenMTC development setup, it is +assumed that all resources from the Application Entity (AE) will be +accessible via the OpenMTC Gateway. This means that the whole setup +is placed in field domain only. Via the external REST API, a user +is able to adjust the resource tree by using an HTTP client. + + ![](./pics/openmtc-simple-setup.png) + +In the figure above, it is also shown that a developer will use the +OpenMTC SDK to provides her own Application Entity (AE). This +user-defined application entity is able to access functionality of +the OpenMTC Gateway by using the SDK's internal API. + +A developer that wants to develop an AE may use this simplified +development setup. Therefore, the developer will need to run an +OpenMTC Gateway. This can be achieved easily by using the prepare +Docker images: + +```shell +docker run -d --name gateway -p 0.0.0.0:8000:8000 -e "EXTERNAL_IP=" \ + -e "LOGGING_LEVEL=DEBUG" openmtc/gateway-arm -vv +``` + + +- If you need to make the OpenMTC Gateway accessible to outside of + your host machine, use `` to configure this. +- Note that the Docker image name `openmtc/gateway-arm` may vary on + your machine. Use `docker images` to identify the Docker image name + you need to use here. +- If you need to know more about detail of `-d` (detach) and the port + mapping configuration `-p 0.0.0.0:8000:8000`, check out the + [documenation as provided by Docker](https://docs.docker.com/engine/reference/run/). + + If you already want to start with OpenMTC development, jump to + [How to test your development setup?](#how-to-test-your-development-setup). + +### OpenMTC development setup, for infrastructure domain + + The following figure depicts a more complex setup of OpenMTC. Here, + the focus on AE development for infrastructure domain. The following + figure depicts this setup. + + ![](./pics/openmtc-advanced-setup.png) + + Here, the simple setup from the last subsection is extended. An AE + in the field domain acts as a producer of data. Within this example, + another AE is developed that acts as data consumer in the + infrastructure domain. Therefore, an OpenMTC Backend will be used by + the AE in infrastructure domain, to subscribe to the data from the + field domain. + + For this setup the developer will need to provide, + +1. An OpenMTC Backend + ``` + docker run -ti --name backend -p 0.0.0.0:18000:18000 -e "ONEM2M_CSE_ID=" \ + -e "ONEM2M_NOTIFICATION_DISABLED=false" openmtc/backend-arm -v + ``` + - `` : The `CSE_ID` of the Backend (e.g. `backend`) +1. An OpenMTC Gateway (note that this configuration is different to + the one from the last example, due to the connection to the OpenMTC + Backend) + ``` + docker run -d --name gateway -p 0.0.0.0:8000:8000 -e "EXTERNAL_IP=" \ + -e "ONEM2M_HTTP_TRANSPORT_PORT=8000" -e "ONEM2M_CSE_ID=" \ + -e "ONEM2M_REMOTE_CSE_POA=" -e "ONEM2M_REMOTE_CSE_ID=" \ + -e "ONEM2M_NOTIFICATION_DISABLED=false" -e "ONEM2M_REGISTRATION_DISABLED=false" \ + openmtc/gateway-arm -v + ``` + - IP configuration: `` + - `` : The `CSE_ID` of the Gateway (e.g. `gateway`) + - `` : The `CSE_ID` of the Backend (e.g. `backend`) + - `` : An URL that describes the POA fo Backend (e.g. "http://localhost:18000") + - If you need to know more about detail of `-d` (detach) and the port + mapping configuration `-p 0.0.0.0:8000:8000`, check out the + [documenation as provided by Docker](https://docs.docker.com/engine/reference/run/). + diff --git a/doc/developer-faq.md b/doc/developer-faq.md new file mode 100644 index 0000000..378c4e5 --- /dev/null +++ b/doc/developer-faq.md @@ -0,0 +1,15 @@ +# Developer FAQ + +## Q: Is it possible to crossbuild OpenMTC Docker container images? + + **A**: + + Creating Docker container images on a Raspberry Pi takes quite long. + It is possible to crossbuild Docker container images of all OpenMTC + components for ARM architecture on AMD64 based systems. + + See: [Crossbuilding OpenMTC Docker container images](./deployment-guide.md). + + + + diff --git a/doc/example-apps/IoT-data-visualization.py b/doc/example-apps/IoT-data-visualization.py new file mode 100644 index 0000000..caa0acb --- /dev/null +++ b/doc/example-apps/IoT-data-visualization.py @@ -0,0 +1,46 @@ + +import urllib +from openmtc_app.onem2m import XAE +import uuid + + +class DataVisualization(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + period = 10 + + def _on_register(self): + # init variables + self.sensor_register = {} + self.sensor_register = [] + self.sensor_values = [] + self.name = uuid.uuid1() + self.things_name = urllib.urlopen("https://dweet.io/follow/%s" % self.name) + print "Thing name :", self.name + print "link for the current data type and values :", self.things_name.geturl() + # start endless loop + self.periodic_discover(self.remote_cse, + {'labels': ["openmtc:sensor_data"]}, + self.period, self.handle_discovery_sensor) + + def handle_discovery_sensor(self, discovery): + for uri in discovery: + self.add_container_subscription(uri, self.handle_sensor_data) + + def handle_sensor_data(self, container, content): + data ={} + self.sensor_register.append(content[0]['n']) + self.sensor_values.append(content[0]['v']) + for i, k in zip(self.sensor_register , self.sensor_values): + data.update({i: k}) + params = urllib.urlencode(data) + urllib.urlopen("https://dweet.io/dweet/for/%s?%s" % (self.name, params)) + + +if __name__ == "__main__": + from openmtc_app.flask_runner import SimpleFlaskRunner as Runner + + ep = "http://localhost:8000" + Runner(DataVisualization(), port=6050, host='auto').run(ep) + + diff --git a/doc/example-apps/data-aggregation.py b/doc/example-apps/data-aggregation.py new file mode 100644 index 0000000..7ee4be2 --- /dev/null +++ b/doc/example-apps/data-aggregation.py @@ -0,0 +1,102 @@ +import time +from collections import deque +from math import sqrt + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class DataAggregation(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + period = 10 + + def _on_register(self): + # init variables + self.sensor_register = {} + self.dev_cnt_list = [] + # start endless loop + self.periodic_discover(self.remote_cse, + {'labels': ["openmtc:sensor_data"]}, + self.period, self.handle_discovery_sensor) + + @staticmethod + def _time(): + return format(round(time.time(), 3), '.3f') + + def handle_discovery_sensor(self, discovery): + for uri in discovery: + self.sensor_register[uri] = { + 'values': deque([], 10) + } + content = self.get_content(uri) + if content: + self.handle_sensor(uri, content) + self.add_container_subscription(uri, self.handle_sensor) + + def create_sensor_structure(self, sensor_entry, content): + # dev_cnt + cnt_name = '_'.join(content[0]['bn'].split(':')[2:]) + cnt_name += '_' + content[0]['n'] + dev_cnt = Container(resourceName=cnt_name) + if dev_cnt not in self.dev_cnt_list: + sensor_entry['dev_cnt'] = dev_cnt = self.create_container(None, dev_cnt) + # mean cnt + mean_cnt = Container(resourceName='mean', labels=["openmtc:mean_data"]) + sensor_entry['mean_cnt'] = self.create_container(dev_cnt, mean_cnt) + # Standard_deviation cnt + deviation_cnt = Container(resourceName='Standard_deviation', labels=["openmtc:Standard_deviation_data"]) + sensor_entry['deviation_cnt'] = self.create_container(dev_cnt, deviation_cnt) + self.dev_cnt_list.append(dev_cnt) + else: + return dev_cnt,"already exists " + + def handle_sensor(self, container, content): + sensor_entry = self.sensor_register[container] + values = sensor_entry['values'] + try : + values.append(content[0]['v']) + except KeyError: + return + # check if container exists + try: + sensor_entry['dev_cnt'] + except KeyError: + self.create_sensor_structure(sensor_entry, content) + + # mean value + mean = sum(values) / len(values) + data = [{ + 'bn': content[0]['bn'], + 'n': content[0]['n'] + '_mean', + 'v': mean, + 't': self._time() + }] + + # Standard_deviation value + num_item = len(values) + standard_mean = sum(values) / num_item + differences = [((x - standard_mean) ** 2) ** 2 for x in values] + ssd = sum(differences) + variance = ssd / num_item + sd = sqrt(variance) + print sd + deviation_data = [{ + 'bn': content[0]['bn'], + 'n': content[0]['n'] + '_Standard_deviation', + 'v': sd, + 't': self._time() + }] + try: + data[0]['u'] = content[0]['u'] + except KeyError: + pass + self.push_content(sensor_entry['mean_cnt'], data) + self.push_content(sensor_entry['deviation_cnt'], deviation_data) + + +if __name__ == "__main__": + from openmtc_app.flask_runner import SimpleFlaskRunner as Runner + + ep = "http://localhost:8000" + Runner(DataAggregation(), port=6050, host='auto').run(ep) diff --git a/doc/example-apps/simple-decision-2.py b/doc/example-apps/simple-decision-2.py new file mode 100644 index 0000000..ff477f2 --- /dev/null +++ b/doc/example-apps/simple-decision-2.py @@ -0,0 +1,22 @@ +from openmtc_app.onem2m import XAE + + +class SimpleDecision2(XAE): + remove_registration = True + sensor = "onem2m/zigbeeipe-0/devices/ZBS122009491/sensor_data/brightness" + actuator = "onem2m/cul868ipe-0/FS20_ST3_16108_1/Switch" + + def _on_register(self): + + def handle_brightness(container, content): + command = "ON" if content[0]['v'] < 100.0 else "OFF" + self.push_content(self.actuator, command) + + self.add_container_subscription(self.sensor, handle_brightness) + + +if __name__ == "__main__": + from openmtc_app.flask_runner import SimpleFlaskRunner as Runner + + ep = "http://localhost:8000" + Runner(SimpleDecision2(), port=6050, host='auto').run(ep) diff --git a/doc/example-apps/simple-decision.py b/doc/example-apps/simple-decision.py new file mode 100644 index 0000000..00017c9 --- /dev/null +++ b/doc/example-apps/simple-decision.py @@ -0,0 +1,50 @@ +from openmtc_app.onem2m import XAE + + +class SimpleDecision(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + period = 10 + + def _on_register(self): + # init variables + self.switchContainers = [] + # start endless loop + self.periodic_discover(self.remote_cse, + {'labels': ["openmtc:actuator_data"]}, + self.period, self.handle_discovery_switch) + self.periodic_discover(self.remote_cse, + {'labels': ["openmtc:sensor_data:command"]}, + self.period, self.handle_discovery_command) + self.periodic_discover(self.remote_cse, + {'labels': ["openmtc:sensor_data:brightness"]}, + self.period, self.handle_discovery_brightness) + + def handle_discovery_switch(self, discovery): + for uri in discovery: + self.switchContainers.append(uri) + + def handle_discovery_command(self, discovery): + for uri in discovery: + self.add_container_subscription(uri, self.handle_command) + + def handle_discovery_brightness(self, discovery): + for uri in discovery: + self.add_container_subscription(uri, self.handle_brightness) + + def handle_command(self, container, content): + command = "ON" if content[0]['v'] == 1 else "OFF" + for switch in self.switchContainers: + self.push_content(switch, command) + + def handle_brightness(self, container, content): + command = "ON" if content[0]['v'] < 500.0 else "OFF" + for switch in self.switchContainers: + self.push_content(switch, command) + + +if __name__ == "__main__": + from openmtc_app.flask_runner import SimpleFlaskRunner as Runner + + ep = "http://localhost:8000" + Runner(SimpleDecision(), port=6050, host='auto').run(ep) diff --git a/doc/example-apps/start-simple-app b/doc/example-apps/start-simple-app new file mode 100755 index 0000000..b50d089 --- /dev/null +++ b/doc/example-apps/start-simple-app @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +base_path=$(dirname "$(readlink -f "${0}")") + +################################################################################ +# set app_file +declare -a app_array + +app_array=($(find ${base_path} -name "*.py")) +array_length=${#app_array[@]} + +# print possibilities +for i in $(seq 1 ${array_length}); do + path=${app_array[$[${i}-1]]} + echo "[${i}] $(basename ${path})" +done + +# read choice +while true; do + read -n 2 -p "Choose the app to start: " choice + + [[ ${choice} =~ ^[0-9]+$ ]] && \ + [ ${choice} -gt 0 -a ${choice} -le ${array_length} ] && \ + echo && break + + echo " Wrong choice. Do it again." +done + +app_file=${app_array[$[${choice}-1]]} + +################################################################################ +# run app_file +cd ${base_path} +. ../../common/prep-env.sh +python ${app_file} diff --git a/doc/install-sdk.md b/doc/install-sdk.md new file mode 100644 index 0000000..46e62d6 --- /dev/null +++ b/doc/install-sdk.md @@ -0,0 +1,54 @@ +# Installation of the OpenMTC SDK + + +## Requirements + +* Python (only version 2.7 is supported) + +**Note**: Only the *CPython* implementation (the default interpreter) of Python has been tested. *PyPy* might work as well, possibly with some minor adjustments. *Jython* is known not to work since it lacks support for compiled extensions. + +In order to install the `gevent` package, development headers for both python and libev as well as a C-Compiler and associated toolchain might be required. To install these along with the pip tool the following commands might be used: + +Debian based systems (including Ubuntu): + +``` sh +$ sudo apt-get install python-pip libev-dev python-dev gcc make automake +``` + +Redhat based systems (including Fedora, Centos): + +``` sh +$ sudo yum install python-pip libev-devel python-devel gcc make automake +``` + +Additionally, some required Python packages need to be installed. The following command line should suffice to install the required packages: + +``` sh +$ pip2 install --user --requirement openmtc-open-source/openmtc-gevent/dependencies.txt +``` + +## Installing + +To install the OpenMTC SDK itself the following steps need to be performed: + +Change to the SDK's distribution directory: + +``` sh +$ cd openmtc-open-source +``` + +Run the installer command: + +``` sh +$ sudo python setup-sdk.py install +``` + +## Testing the Installation + +The following command can be used to test if the OpenMTC SDK has been correctly installed: + +``` sh +$ python2 -c "import openmtc; import openmtc_app" +``` + +If the SDK has been installed correctly, this command will exit successfully (exit code `0`) and not produce any output. diff --git a/doc/introduction.md b/doc/introduction.md new file mode 100644 index 0000000..9233a5d --- /dev/null +++ b/doc/introduction.md @@ -0,0 +1,97 @@ +# Introduction to OpenMTC + +## Using OpenMTC for IoT/M2M applications + +OpenMTC provides an implementation of the [oneM2M standard](http://www.onem2m.org/). +This allows you to develop new or test existing IoT/M2M applications, create +your own IoT/M2M infrastructure, or extend an existing one. + +## How is data represented in an OpenMTC-based system? + +In an OpenMTC system, all entities are represented as **resources**. Resources +are either: + +* **Containers** consisting of other containers (subcontainer) or content instances +* **Content Instances** holding actual values + +This results in a hierarchical **resource tree** with content instances as leafs. +Each resource within the resource tree is uniquely addressable by an unique identifier (URI). + +### Example + +Lets assume you have a ZigBee device providing informations about the current +temperature and its battery status. This will result in the following resource +tree: + +![Resource Tree](pics/example_resource_tree.png "Resource Tree") + +## How to access and provide data from/to an OpenMTC-based system? + +OpenMTC provides mechanisms to create, change, update, and delete +resources in the resource tree. Therefore, an application is needed +that will use one of the following two approaches: + +* **A REST API** (external interface) +* **The OpenMTC SDK** (internal interface) + +Using the *REST API*, it is possible to provide your application logic +by just using your preferred HTTP client. + +In addition, it is also possible to use the *OpenMTC SDK* to write your application. Common +problems, like how to create a subscription that will notify your +application whenever a particular resources are changed or have a new +content instance available, are easily to be solved, using the SDK. + +## How to structure your OpenMTC-based IoT system? + +Using OpenMTC, it is possible to create your own IoT/M2M infrastructure. +Consider an IoT system that collects data from different sensors, in +different field domains, and automatically distributes the data to +central nodes of multiple field domains. + +![Example OpenMTC Architecture](pics/openmtc-archtecture-overview.png "Example OpenMTC +Architecture") + +With OpenMTC you would setup an **OpenMTC Gateway** for every field domain. All +oneM2M devices within this field domain would register themself with the +gateway. Non-oneM2M devices would need an **Interworking Proxy** +inbetween to translate to oneM2M. +The gateways are registered at an **OpenMTC Backend** within the infrastructure domain. +**OpenMTC Applications** could either connect directly to the gateways or the backend to provide/collect data. + + +### Application (AE) + +An application is provided by you and others to access data within the OpenMTC-based IoT system. + +An example for an application is the server part of Graphical User Interface (GUI), that allows to +visualize data stored in the resource tree. +Another example would be an application using device data to do some analytics +and providing the results within the resource tree. + +#### Interworking Proxy (IPE) + +A special kind of application -- in a matter of speaking -- is an IPE. +An IPE allows to translate data from a non-oneM2M domain to a oneM2M +domain. + +An example for an IPE could be an application that reads out sensor +values from your sensor device by using a vendor-specific binary +interface and translates it to oneM2M. + + +### OpenMTC Gateway + +A software node that is central to a particular small field domain +that allows to collect data from various IPEs. A local application +can access the resource tree, via the OpenMTC Gateway. + +It is possible to create a hierarchy of OpenMTC Gateways. This +allows to travel data from one part in your IoT system to another +one. + +### OpenMTC Backend + +If you need a central software node within the infrastructure domain, +the OpenMTC Backend is the exact choice. Usually, it acts as the +root node within a hierarchy of OpenMTC Gateways. diff --git a/doc/onem2m-client-mqtt.md b/doc/onem2m-client-mqtt.md new file mode 100644 index 0000000..5225b09 --- /dev/null +++ b/doc/onem2m-client-mqtt.md @@ -0,0 +1,64 @@ +# The oneM2M MQTT Client +In addition to the [oneM2M HTTP Client](./sdk-client.md), the OpenMTC application framework features an MQTT client. + +## About MQTT +MQTT is short for “Message Queue Telemetry Transport,” a protocol that has originally been developed by IBM as a means of transportation for data in sensor networks. In contrast to HTTP, MQTT does not work in a request-response-manor; a publish-subscribe-approach is being embraced instead. + +For the protocol to work, an intermediary called “Broker” is required: Its role is to accept messages from connected clients and relay them among existing subscriptions. Through this centralised structure, communication through MQTT will always require at least one additional peer in comparison to other protocols supported by the framework. + +Setting up a broker is imperative, but due to the wealth of available solutions well outside the scope of this document. Exhaustive testing has been conducted against [Moquette](http://andsel.github.io/moquette/), which allows interoperability to be pretty much guaranteed for version 0.8+. For a quick start (or testing), you may use an openly available [public broker](https://github.com/mqtt/mqtt.github.io/wiki/public_brokers) such as `iot.eclipse.org`. It goes without saying that message confidentiality on public infrastructure is nil. Hence, those brokers do not present an option in production environments. Doing so is hereby strongly discouraged. + +In the event of a reduced-complexity, low-maintenance set-up being desired, using a [docker image](https://hub.docker.com/search/?isAutomated=0&isOfficial=0&page=1&pullCount=0&q=mqtt+broker&starCount=0) were recommended over a public broker. There is no absolute requirement on docker, though. + +![OpenMTC MQTT Architecture](./pics/OpenMTC-MQTT.png "OpenMTC MQTT Architecture") + +## Working With The MQTT Client +The `OneM2MMQTTClient` class implements protocol binding in accordance to [TS-0010](http://www.onem2m.org/images/files/deliverables/Release2/TS-0010-MQTT%20Protocol%20Binding-V2_4_1.pdf "oneM2M Technical Specification 0010 v2.4.1 – MQTT Protocol Binding") akin to `OneM2MHTTPClient`. The two classes have been designed to work interchangeably from an application standpoint; differences in behaviours should be strictly related to the respective underlying transport protocols. + +Actual low-level MQTT handling is performed by the [paho mqtt library](http://www.eclipse.org/paho/clients/python/). Users are encouraged to open issues with that project in case of pure MQTT havoc. + +### Establishing Connectivity +Although the interfaces of both, `OneM2MHTTPClient` and `OneM2MMQTTClient` are identical, addressing endpoints varies drastically. Through the necessity of a broker commonly reachable by two peers, said broker has to be the endpoint instead of the peer's machines. Subsequently, an address suitable for `OneM2MMQTTClient` can in general not be crafted by merely substituting `http://` with `mqtt://`. (A notable exception is a set-up in which all peers - including the broker - are located on one and the same machine.) + +For a simple set-up of one AE and one CSE, proceed as follows: +#### Gateway (CSE) +1. Locate the `config-gateway.json` configuration file +1. Find the `plugins`.`openmtc_cse` key +1. Add the following stanza to the list: + +```json +{ + "name": "MQTTTransportPlugin", + "package": "openmtc_cse.plugins.transport_gevent_mqtt", + "disabled": false, + "config": { + "interface": "iot.eclipse.org", + "port": 1883 + } +}, + ``` + +> **ℹ️ Hint:** A gateway is not locked in a single protocol. Multiple transport plugins can be active at the same time, allowing for a CSE to be reachable through a set of protocols. + + +> **⚠️ Warning:** For the sake of brevity, `iot.eclipse.org` is set as broker. Please consider the introduction on MQTT regarding the ramifications. + +4. Start the gateway through the `openmtc-gateway-gevent` script + +On a related note, enabling the plugin in the backend (NSE) is done in an almost identical way: Just read `config-backend.json` in step 1 and `openmtc-backend-gevent` in step 4. + +#### Application Entity +Programmatically, it is sufficient to create an instance of `OneM2MMQTTClient` with a given endpoint. In adoption of [example 8a](./training/onem2m-examples/onem2m-example-8a.py): +```python +from openmtc_onem2m.client.mqtt import OneM2MMQTTClient + +client = OneM2MMQTTClient("mqtt://iot.eclipse.org#mn-cse-1") +``` + +All subsequent examples should be modifiable in the same fashion in order to enable MQTT support. In general, adjusting endpoints and providing the proper client is concluding the required steps. + +Please note the particle of the endpoint's URL being the name of a CSE. Due to the addressing scheme in oneM2M/MQTT, a requesting entity has to know the responding entities name in advance. It should be duly noted that this is a workaround neither mandated nor sanctioned by TS-0010. In fact, the semantics of particles in MQTT-URLs are [entirely undefined](https://github.com/mqtt/mqtt.github.io/wiki/URI-Scheme). This inconvenience may or may not vanish in future releases. + +## Further Reading + - [Official MQTT Website](http://mqtt.org/) + - [MQTT on Wikipedia](https://en.wikipedia.org/wiki/MQTT) diff --git a/doc/openmtc-get-started.md b/doc/openmtc-get-started.md new file mode 100644 index 0000000..489bcff --- /dev/null +++ b/doc/openmtc-get-started.md @@ -0,0 +1,66 @@ +# Quick Start + +In this short tutorial you will: + +1. Setup an OpenMTC Gateway +2. Create an *application resource* and corresponding *content instance* via the REST API +3. Using the REST API to get this *content instance* + +Clone the OpenMTC repository +```sh +git clone https://gitlab.fokus.fraunhofer.de/OpenMTC/openmtc-open-source openmtc-open-source.git +``` + +All following commands should be executed from within the repo folder +```sh +cd openmtc-open-source.git +``` + +The next steps need Docker to be installed on your system. If you need help for +the installation, follow [this guide](various.md). + +Build the gateway Image +```sh +./create-binary-docker gateway +``` + +To check if this step was successful run: + +```sh +docker image ls +``` + +If there is an entry "openmtc/gateway" the image was created successfully. + +Run the gateway Docker image +```sh +docker run -d --name gateway -p 0.0.0.0:8000:8000 \ + -e "LOGGING_LEVEL=DEBUG" openmtc/gateway -vv +``` + +Create an application resource on your gateway +```sh +curl -X POST localhost:8000/onem2m/ -H "Content-Type: application/vnd.onem2m-res+json" \ + -d '{"m2m:ae": {"rn": "EXAMPLE_APP_NAME", "api": "placeholder", "rr": "TRUE"}}' +``` + +Create a container resource on your gateway + +```sh +curl -X POST localhost:8000/onem2m/EXAMPLE_APP_NAME/ -H "Content-Type: application/vnd.onem2m-res+json" \ + -d '{"m2m:cnt": {"rn": "EXAMPLE_CONTAINER_NAME"}}' +``` + +Create plain text content + +```sh +curl -X POST localhost:8000/onem2m/EXAMPLE_APP_NAME/EXAMPLE_CONTAINER_NAME/ \ + -H "Content-Type: application/vnd.onem2m-res+json" \ + -d '{"m2m:cin": {"con": "EXAMPLE_VALUE", "cnf": "text/plain:0"}}' +``` + +Get the Content +```sh +curl -X GET localhost:8000/onem2m/EXAMPLE_APP_NAME/EXAMPLE_CONTAINER_NAME/latest +``` + diff --git a/doc/overview-rest-api.md b/doc/overview-rest-api.md new file mode 100644 index 0000000..cd3ffc7 --- /dev/null +++ b/doc/overview-rest-api.md @@ -0,0 +1,401 @@ +# Overview REST API + +## Introduction + +This document gives a quick overview on how the external interfaces of +an *OpenMTC Gateway* can be used to create new resources within +an OpenMTC M2M system. Based on the same external interface, it is +also shown how data can be retrieved from it. In general, all +functionality of a Gateway is exposed via the resource tree. This resource tree is accessible in a *RESTful* manner through +a HTTP interface. Thus, all examples in this document +can be performed with standard off-the-shelve HTTP client software +(curl, postman). + +### HTTP Requests and Responses as Given in this Text + +Within the step-by-step guide further below in this text, each HTTP +request and response are shown as their actual textual representation +-- unimportant details are omitted. + +A short (theoretical) example will help you to understand this. + +**Request** +``` +GET /some-example HTTP/1.1 +Host: localhost:8000 +``` + +This example, describes a `GET` request, based on HTTP/1.1, which is +send to the host `localhost` at port `8000` and requests the resource +`/some-example`. + +Another example, outlines how a `POST` request will look like. Here, +an HTTP/1.1-based `POST` request will send the below specified JSON to +host `localhost` at port `8000` and requests to send it to resource +`/some-example`. + +**Request** + +``` +POST /some-example HTTP/1.1 +Host: localhost:8000 +Content-Type: application/vnd.onem2m-res+json + +{ + "some-data": "some-string-value" +} +``` + +### Example usage of HTTP clients + +To be able to send the (theoretical) requests, given in the last +subsection, [cURL](https://curl.haxx.se/) may be used, which is a +command line program usable as HTTP client. + +#### Retrieve a resource + +```sh +curl -X GET localhost:8000/some-example +``` + +Here, the request command `GET` is send to localhost at port 8000, +requesting the resource "/some-example". If data was successfully +retrieved, by this, it will be printed to STDOUT. + +#### Push plain text data to a resource + +```sh +curl -X POST localhost:8000/some-example \ + -H "Content-Type: application/vnd.onem2m-res+json" \ + -d '{ "some-data": "some-string-value" }' +``` + +Here, plain text (JSON) data (`-d`) is pushed to the resource +"/some-example", by using the HTTP request command `POST`. Note that +the content type of the given data is specified via an additional HTTP +header (`-H`). If the provided JSON was successfully uploaded, a new +content instance for this resource is provided. + +#### Push base64 encoded data to a resource + +Sometimes, it is needed to encode given data -- for example a JSON -- +as a base64 string. This can be done, at the command line, in the +following way: + +```sh +echo '{ "some-data": "some-string-value" }' | base64 +``` + +Which will return following base64 encoded string: + +`eyAic29tZS1kYXRhIjogInNvbWUtc3RyaW5nLXZhbHVlIiB9Cg==` + +Pushing the same data as from the last subsection as a base64 encoded +string will work like this: + +```sh +curl -X POST localhost:8000/some-example \ + -H "Content-Type: application/vnd.onem2m-res+json" \ + -d 'eyAic29tZS1kYXRhIjogInNvbWUtc3RyaW5nLXZhbHVlIiB9Cg==' +``` + +If you need to do this as a single line command, just combine the two +last commands in the following way: + +```sh +curl -X POST localhost:8000/some-example \ + -H "Content-Type: application/vnd.onem2m-res+json" \ + -d $(echo '{ "some-data": "some-string-value" }' | base64) +``` + +### A step-by-step Guide for OpenMTC's Rest API + +**Step 1:** Create a new application entity within a given OpenMTC +Gateway: [AE creation](#create-an-application-resource). + +**Step 2:** Check the stored information about your newly created +application entity: +[Application Resource Retrieval](#retrieve-information-of-a-specific-application-resource). + +**Step 3:** To be able to structure information provided by your newly +created application entity, you should create a new oneM2M container +within the hierarchy of your application: [Create an Application Resource](#create-an-application-resource). + +**Step 4:** Check whether your newly created oneM2M container was +created correctly: [Retrieve Information of a Specific Container Resource](#retrieve-information-of-a-specific-container-resource). + +**Step 5:** Push new data (content instance) to your newly created +application container. If your new content instance is supposed to +store plain text value: +[Create plain text value Content Instance](#create-a-plain-text-content-instance). If your new content instance is supposed to store +base64-encoded values: +[Create base64-encoded Content Instance](#create-a-base64-encoded-content-instance). + +**Step 6:** Finally, check whether your data (content instance) was +pushed correctly: +[Retrieve Latest Content Instances](#retrieve-latest-content-instances). + +## Create an Application Resource + +**Request** + +``` +POST /onem2m +Host: localhost:8000 +Content-Type: application/vnd.onem2m-res+json + +{ + "m2m:ae": { + "rn": "EXAMPLE_APP_NAME", + "api": "placeholder", + "rr": "TRUE" + } +} +``` + +**Response** + +```json +{ + "m2m:ae": { + "ri":"ae1", + "nl":"dummy", + "rr":true, + "ty":2, + "et":"2017-03-02T16:46:13.350093+00:00", + "lt":"2017-03-02T16:12:53.350093+00:00", + "api":"placeholder", + "aei":"C1", + "pi":"cb0", + "rn":"EXAMPLE_APP_NAME", + "ct":"2017-03-02T16:12:53.350093+00:00" + } +} +``` + +## Retrieve Information of a Specific Application Resource + +**Request** + +``` +GET /onem2m/EXAMPLE_APP_NAME HTTP/1.1 +Host: localhost:8000 +``` + +**Response** + +```json +{ + "m2m:ae": { + "ri":"ae0", + "nl":"dummy", + "rr":true, + "ty":2, + "et":"2017-03-02T16:54:10.097197+00:00", + "ch": [{ + "typ":3,"nm":"EXAMPLE_CONTAINER_NAME", + "val":"cnt0" + }], + "lt":"2017-03-02T16:20:50.097197+00:00", + "api":"placeholder", + "aei":"C0", + "pi":"cb0", + "rn":"EXAMPLE_APP_NAME", + "ct":"2017-03-02T16:20:50.097197+00:00" + } +} +``` + +## Create a Container Resource + +**Request** + +``` +POST / HTTP/1.1 +Host: localhost:8000 +Content-Type: application/vnd.onem2m-res+json + +{ "m2m:cnt": { + "rn": "EXAMPLE_CONTAINER_NAME" + } +} +``` + +**Response** + +```json +{ + "m2m:cnt": { + "cr":"nobody", + "et":"2017-03-02T16:54:19.216702+00:00", + "ty":3, + "lt":"2017-03-02T16:20:59.216702+00:00", + "rn":"EXAMPLE_CONTAINER_NAME", + "ct":"2017-03-02T16:20:59.216702+00:00", + "ri":"cnt0", + "cni":0, + "cbs":0, + "pi":"ae0", + "st":"0" + } +} +``` + +## Retrieve Information of a Specific Container Resource + +**Request** + +``` +GET /onem2m/EXAMPLE_APP_NAME/EXAMPLE_CONTAINER_NAME HTTP/1.1 +Host: localhost:8000 +``` + +**Response** + +```json +{ + "m2m:cnt": { + "cr":"nobody", + "et":"2017-03-02T16:54:19.216702+00:00", + "ty":3, + "lt":"2017-03-02T16:20:59.216702+00:00", + "rn":"EXAMPLE_CONTAINER_NAME", + "ct":"2017-03-02T16:20:59.216702+00:00", + "ri":"cnt0", + "cni":0, + "cbs":0, + "pi":"ae0", + "st":"0" + } +} +``` + + + +## Create a Plain-Text Content Instance + +**Request** + +``` +POST /onem2m/EXAMPLE_APP_NAME/EXAMPLE_CONTAINER_NAME/ HTTP/1.1 +Host: localhost:8000 +Content-Type: application/vnd.onem2m-res+json + +{ + "m2m:cin": { + "con": "EXAMPLE_VALUE", + "cnf": "text/plain:0" + } +} +``` + +**Response** + +```json +{ + "m2m:cin": { + "ri":"cin1", + "ty":4, + "st":"0", + "cnf":"text/plain:0", + "lt":"2017-03-02T16:37:23.963247+00:00", + "et":"2017-03-02T17:10:43.963247+00:00", + "cs":13, + "pi":"cnt0", + "rn":"contentInstance-v2HDJeljran3jxPX", + "con":"EXAMPLE_VALUE", + "ct":"2017-03-02T16:37:23.963247+00:00" + } +} +``` + +## Create a Base64-Encode Content Instance + + For this subsection it is assumed that data represented as JSON will + used to create a new content instance. Therefore, following + example data: + ``` + { + "foo": 42, "bar": 42 + } + ``` + needs to be trans-coded to its base64 string: + ``` + eyJmb28iOiA0MiwgImJhciI6IDQyfQo= + ``` + + To be able to execute the trans-coding, be reminded of + subsection + [Push base64 encoded data to a resource](#push-base64-encoded-data-to-a-resource). + +**Request** + +``` +POST /onem2m/EXAMPLE_APP_NAME/EXAMPLE_CONTAINER_NAME/ HTTP/1.1 +Host: localhost:8000 +Content-Type: application/vnd.onem2m-res+json + +{ + "m2m:cin": { + "con": "eyJmb28iOiA0MiwgImJhciI6IDQyfQo=", + "cnf": "application/json:1" + } +} +``` + +**Response** + +```json +{ + "m2m:cin": { + "ri":"cin2", + "ty":4, + "st":"0", + "cnf":"application/json:1", + "lt":"2017-03-02T16:41:02.060806+00:00", + "et":"2017-03-02T17:14:22.060806+00:00", + "cs":32, + "pi":"cnt0", + "rn":"contentInstance-ccUmIDHZ2jvtUWyQ", + "con":"eyJmb28iOiA0MiwgImJhciI6IDQyfQo=", + "ct":"2017-03-02T16:41:02.060806+00:00" + } +} +``` + +## Retrieve Latest Content Instances + +**Request** + +``` +GET /onem2m/EXAMPLE_APP_NAME/EXAMPLE_CONTAINER_NAME/latest HTTP/1.1 +Host: localhost:8000 +``` + +**Response** + +```json +{ + "m2m:cin": { + "ri":"cin2", + "ty":4, + "st":"0", + "cnf":"application/json:1", + "lt":"2017-03-02T16:41:02.060806+00:00", + "et":"2017-03-02T17:14:22.060806+00:00", + "cs":32, + "pi":"cnt0", + "rn":"contentInstance-ccUmIDHZ2jvtUWyQ", + "con":"eyJmb28iOiA0MiwgImJhciI6IDQyfQo=", + "ct":"2017-03-02T16:41:02.060806+00:00" + } +} +``` + +## Reference: Short Resource names and Resource types + + [Resource names and types](reference-doc/resource-names-and-types.md) provides a + reference of how names of resources are mapped to their short names + but also provides a table that reveals the numerical representation + of resource types. + diff --git a/doc/pics/OpenMTC-MQTT.png b/doc/pics/OpenMTC-MQTT.png new file mode 100644 index 0000000..82990db Binary files /dev/null and b/doc/pics/OpenMTC-MQTT.png differ diff --git a/doc/pics/example_resource_tree.png b/doc/pics/example_resource_tree.png new file mode 100644 index 0000000..2ed455b Binary files /dev/null and b/doc/pics/example_resource_tree.png differ diff --git a/doc/pics/openmtc-advanced-setup.png b/doc/pics/openmtc-advanced-setup.png new file mode 100644 index 0000000..aa51199 Binary files /dev/null and b/doc/pics/openmtc-advanced-setup.png differ diff --git a/doc/pics/openmtc-archtecture-overview.png b/doc/pics/openmtc-archtecture-overview.png new file mode 100644 index 0000000..3c929ff Binary files /dev/null and b/doc/pics/openmtc-archtecture-overview.png differ diff --git a/doc/pics/openmtc-simple-setup.png b/doc/pics/openmtc-simple-setup.png new file mode 100644 index 0000000..9011d78 Binary files /dev/null and b/doc/pics/openmtc-simple-setup.png differ diff --git a/doc/pics/sources/example_resource_tree.xml b/doc/pics/sources/example_resource_tree.xml new file mode 100644 index 0000000..6cd3d9b --- /dev/null +++ b/doc/pics/sources/example_resource_tree.xml @@ -0,0 +1 @@ +7Zvfb5s6FMf/mjzuCmxDkse163YnrVKlPmy7L5MXXOCO4Mw4TbK/fgbsgA2ZCHEh6kilDY6N8Y8PXx/OSWbwdr3/wPAmuqcBSWbACfYz+G4GwAL44t/ccCgNnrssDSGLg9LkVobH+BeRRkdat3FAMq0ipzTh8UY3rmiakhXXbJgxutOrPdFEv+sGh6RheFzhpGn9HAc8MoaV2/8lcRipO7u+HN93vPoRMrpN5f1mAD4Vn7J4jVVbcqBZhAO6q5ng3QzeMkp5ebTe35Ikn1o1beV170+UHvvNSMq7XABVP/hBjZ0EYirkKWU8oiFNcXJXWW+K8ZG8BUecRXydiENXHJJ9zL/k5n+AJ0+/qqKUs8MXeUlx8rWoWNRLg7f5oglDSlNSWt7HSSKr/084P0hI8JZTYao69onSjbxHxhn9QW5pQlkxFOgXn2OJWkoxyzdPovlaTblI8CZkOIhF/1SZ7NATTbnsAYDyvHZ5+RH25vzLJcnolq3kDANJNGYhkbVQacrnvnaZXLMPhK6JmDFRgZEE8/hZxxRL2sNjvWrFxYFc9HYAZF+ecbIlClk/4XKIGhn+zy1VBW+yYjLeigruYrOvCouHUs1LZVQzVDP5Yf6/mN57cK/uKXpb3rYsbLBZkZev9y6KOXnc4GJed0KIdBqPz1bOUJjgLJPHDU4WTv5nUCFX/kIgmuSdROSZME72f1x+WQocr7zkoMvmrtIqVz3ZUU2nlO0SYCCYFGNwxUBNxViMpRhoIMVA7/K/pmL8F4c3hHx8uJtE4zzRmDuaaHhDigaaRGNw0Vg0RUNtFMOrxmJkPyMgz/FKvFJMmnGOZkCoOxrLITXDnzRjcM1QAqGJBhhLNFRvRlONpZcL1yQZZ0gGcnQ346gPQ2iGQnXkbeZbgDmeuDmPG994p50PyY1/OTfoEm5WojExdx/TjONUrP3Z7Dh/Mzse1DVn0ICIa9+1LVf7O+acsMM3gQTfTp7rhUgM+bqrsh0aEqYv2/As6+syvrtYecXSY1aOseZN11zp0mHWFamTz4nafM6u4a3acnotq6lsnV1TeYcHGheP7omAK/QNSsrxyKvq+RazoYXeEDAbKieh0ZBABR9q1TZ5hex0hz0f6h2WA6gALluscD7Oabe0wUT4OYSrHGafUMw4hCNTBzsT7hmPyuJlCEfQIBzZJRyqLWcivAvhoEm4ov5KCfccS4Qj74UIXy6MJ8ky4XAi/AzCYQvhXSNjIxE+t0S4B4bxUhCwTLj3ygkPcBYd3/Xyk4finTEtLMBBf4K7DvKVcQx14UPIFsfLbhz3QW3+ylGzKqYq3tYrzTCOmM5dSxD6L/TKZ7oLwLaYtsW5JsJPEd6WfZ8It0u4b5dwtc1YzAJM0f1TD8sZodylETMb8lsIqC0MYCO6n5E0o2zKFF6eKYRDZpjVK591HjhZbwjDfMt66MNfzYOZ6oFDZo5RWxBl2jNGZ8LYM9CQGWHU9hXp1+Qp2wo7dP3VyzD+MHSMaFTvsAPS/dSO8eE+HusU4OpG2nWlk6FvKcAFXR3Zl4tvobZvSE2kNUm7sryumSbtj9pcZ9Y1d0qLrL32WKq1/fO6AkqNdE5f1hDSG3LNRFZv1sRp9ePqsnr1A3Z49xs= \ No newline at end of file diff --git a/doc/pics/sources/openmtc-advanced-setup.odg b/doc/pics/sources/openmtc-advanced-setup.odg new file mode 100644 index 0000000..31d02f3 Binary files /dev/null and b/doc/pics/sources/openmtc-advanced-setup.odg differ diff --git a/doc/pics/sources/openmtc-archtecture-overview.odg b/doc/pics/sources/openmtc-archtecture-overview.odg new file mode 100644 index 0000000..f3a09f8 Binary files /dev/null and b/doc/pics/sources/openmtc-archtecture-overview.odg differ diff --git a/doc/pics/sources/openmtc-simple-setup.odg b/doc/pics/sources/openmtc-simple-setup.odg new file mode 100644 index 0000000..beb2957 Binary files /dev/null and b/doc/pics/sources/openmtc-simple-setup.odg differ diff --git a/doc/reference-doc/gateway-and-backend-configuration.md b/doc/reference-doc/gateway-and-backend-configuration.md new file mode 100644 index 0000000..f2a5373 --- /dev/null +++ b/doc/reference-doc/gateway-and-backend-configuration.md @@ -0,0 +1,234 @@ +# Gateway and backend configuration + +## global + +```json +"global": { + "additional_host_names": [], + "default_content_type": "application/json", + "default_lifetime": 3600, + "disable_forwarding": false, + "require_auth": false +} +``` + +| Name | Mandatory/Optional | Type | Default | Supported Values | Description | NOTE | +| :------- | :------------------------: | :----: | :-------- | :--------------------- | :------------- | :--------| +| additional_host_names | Mandatory | List | [] | | A list of additional hostnames (or pairs) to consider as "local" to the system. Useful when dealing with NAT. | NOT USED in OOS | +| default_content_type | Mandatory | String | *application/json* |
  • *application/json*
  • *application/vnd.onem2m-res+json*
  • *application/vnd.onem2m-ntfy+json*
  • *application/vnd.onem2m-attrs+json*
  • *text/plain*
| The default content type of the response. | VALUE NOT CHECKED; NOT REALLY USED/Overwritten | +| default_lifetime | Optional | Number | 3600 | | The default lifetime for resources in seconds. | +| disable_forwarding | ? | ? | ? | ? | ? | NOT USED in OOS | +| require_auth | ? | Boolean | true | |Reject any request that is lacking authentication information. | NOT USED in OOS | + +## database + +```json +"database": { + "driver": "openmtc_server.db.nodb2.NoDB2", + "dropDB": true +} +``` +| Name | Mandatory/Optional | Type | Default | Supported Values | Description | NOTE | +| :------- | :------------------------: | :----: | :-------- | :--------------------- | :------------- | :--------| +| driver | Mandatory | String | openmtc_server.db.nodb2.NoDB2 | openmtc_server.db.nodb2.NoDB2 | The appropriate gevent db adapter to use. | | +| dropB | Optional | Boolean | true | true/false | Initially deletes the database. | NO EFFECT in OOS when only using NoDB2 data base adapter | + +## logging + +```json +"logging": { + "file": "/var/log/openmtc/gateway.log", + "level": "DEBUG" +} +``` +| Name | Mandatory/Optional | Type | Default | Supported Values | Description | NOTE | +| :------- | :------------------------: | :----: | :-------- | :--------------------- | :------------- | :--------| +| file | Optional | String | null |
  • null
  • path to file
| The path to a file in order to write logging also to this file. The parent directory path must already exist and be writeable. If not specified or set to *null* file logging is not used. | | +| level | Optional | String | WARN |
  • ERROR
  • WARN
  • INFO
  • DEBUG
| The level that is used for logging. If not specified, the default log level (WARN) is used.
Verbose logging can be enabled when executing the *run-gateway* or *run-backend* start scripts using the **-v** option. This option can be used multiple times to enable the following log level:
  • -v : INFO
  • -vv : DEBUG
| | + +## onem2m + +```json +"onem2m": { + "accept_insecure_certs": false, + "cse_base": "onem2m", + "cse_id": "mn-cse-1", + "cse_type": "MN_CSE", + "overwrite_originator": { + "enabled": false, + "originator": "/openmtc.org/mn-cse-1" + }, + "sp_id": "openmtc.org", + "ssl_certs": { + "ca": "certs/ca-chain.cert.pem", + "crt": "certs/mn-cse-1-client-server.cert.pem", + "key": "certs/mn-cse-1-client-server.key.pem", + } +} +``` + +| Name | Mandatory/Optional | Type | Default | Supported Values | Description | NOTE | +| :------- | :------------------------: | :----: | :-------- | :--------------------- | :------------- | :--------| +| accept_insecure_certs | Optional | Boolean | not set (should be false) | true/false | When set to *true* the HTTP client of the CSE will not verify if the hostname of a remote CSE/server matches any of the entries in the subjectAltName or commonName of the certificate. | TODO: DEFAULT VALUE; if missing in config => geventhttpclient connectionpool: "if not self.insecure (None)" will every time check if match of hostname and peercert info | +| cse_base | Optional | String | onem2m | | The name of the *\* resource. | | +| cse_id | Optional | String | mn-cse-1 | | The unique identifier of the CSE. | | +| cse_type | Optional | String | MN-CSE |
  • IN_CSE
  • MN_CSE
  • AEN_CSE
| The type of the CSE. | | +| overwrite_originator | Optional | | | | Enables to overwrite the originator information of the CSE. Instead of using the *sp_id* and *cse_id* which is set in the *onem2m* section of the config, the originator specified by *overwrite_originator.originator* is used. May be applied, when using certificates to match the originator of the CSE and the originator included in the certificate using the subjectAltName. | | +| overwrite_originator.enabled | Optional | Boolean | false | true/false | Enables overwriting of the originator, if set to *true*. | | +| overwrite_originator.originator | Optional | String | "" (empty string) | | The originator which is used by the CSE when sending requests. | | +| sp_id | Optional | String | openmtc.org | | The unique identifier of the M2M Service Provider. | | +| ssl_certs | ? | | | | When using SSL this section provides the private key, certificate and certificate chain. | | +| ssl_certs.ca | ? | String | | | The path of the certificate chain file. | TODO: fix when missing | +| ssl_certs.crt | ? | String | | | The path of the certificate file. | TODO: fix when missing | +| ssl_certs.key | ? | String | | | The path of the key file. | TODO: fix when missing | +| | | | | | | | + + +## plugins.openmtc_cse + +Plugins are what enriches the core component of OpenMTC with functionality. +The ``plugins`` section of the configuration file consists of a list (``[...]``) of plugin entries. +Each plugin entry is a JSON Object (``{...}``) which consists of the following common parameters: + +| Name | Mandatory/Optional | Type | Default | Description | +| :------- | :------------------------: | :----: | :-------- | :-------------- | +| name | Mandatory | String | | The name of the class providing the plugin's functionality. | +| package | Mandatory | String | | The fully qualified name of the Python where the plugin implementation resides. | +| disabled | Optional | Boolean | true | Set this to true to prevent a plugin from being loaded in the first place. Useful to disable a plugin without removing its configuration altogether. | +| config | Optional | Object | {} | The plugin specific configuration. Please see the documentation of individual plugins for details. | + +### AnnouncementHandler + +TODO: Description + +```json +{ + "name": "AnnouncementHandler", + "package": "openmtc_cse.plugins.announcement_handler", + "disabled": true, + "config": { + "auto_announce": false + } +} +``` + +| Name | Mandatory/Optional | Type | Default | Description | NOTE | +| :------- | :------------------------: | :----: | :-------- | :------------- | :--------| +| config.auto_announce | Optional | Boolean | true | ? | NOT USED (part of commented-out code) | + + +### ExpirationTimeHandler + +TODO: Description + +```json +{ + "name": "ExpirationTimeHandler", + "package": "openmtc_cse.plugins.expiration_time_handler", + "disabled": true, + "config": { + "default_lifetime": 10000 + } +} +``` + +| Name | Mandatory/Optional | Type | Default | Description | +| :------- | :------------------------: | :----: | :-------- | :-------------- | +| config.default_lifetime | Optional | Number | 86400 | The default lifetime of resources in seconds. | + + +### HistoricalData + +TODO: Description + +```json +{ + "name": "HistoricalData", + "package": "openmtc_cse.plugins.historical_data_handler", + "disabled": true +} +``` + + +### HTTPTransportPlugin + +The plugin enables HTTP/HTTPS connections of the CSE. + +```json +{ + "name": "HTTPTransportPlugin", + "package": "openmtc_cse.plugins.transport_gevent_http", + "disabled": false, + "config": { + "enable_https": false, + "interface": "::", + "port": 8000, + "require_cert": true + } +} +``` + +| Name | Mandatory/Optional | Type | Default | Supported Values | Description | NOTE | +| :------- | :------------------------: | :----: | :-------- | :--------------------- | :------------- | :--------| +| config.enable_https | Optional | Boolean | false | true/false | Enables secure HTTPS connections using the ssl certificate information provided in the *onem2m * section of the configuration file in the *ssl_certs* object. Only when *ca*, *crt* and *key* are given HTTPS is enabled. | | +| config.interface | Optional | String | "" | | The HTTP/HTTPS server address. | | +| config.port | Optional | Number | 8000 | | The HTTP/HTTPS port. | | +| config.require_cert | | Boolean | true | true/false | If set to true, the client must provide a certificate. | | + +### NotificationHandler + +Entities of the onem2m system can subscribe to resources. The NotificationHandler plugin sends notifications to the notificationURI attribute of the subscription whenever the subscribed resource is changed (created, updated, deleted). + +```json +{ + "name": "NotificationHandler", + "package": "openmtc_cse.plugins.notification_handler", + "disabled": false +} +``` + + +### RegistrationHandler + +The RegistrationHandler plugin registers a CSE with another CSEs. +Therefore, a remote CSE resource which contains information about the gateway CSE is created at the backend CSE. +Furthermore, a remote CSE resource which contains information about the backend CSE is created at gateway CSE. + +```json +{ + "name": "RegistrationHandler", + "package": "openmtc_cse.plugins.registration_handler", + "disabled": false, + "config": { + "labels": [ + "openmtc" + ], + "remote_cses": [ + { + "cse_base": "onem2m", + "cse_id": "in-cse-1", + "cse_type": "IN_CSE", + "own_poa": [ + "http://localhost:8000" + ], + "poa": [ + "http://localhost:18000" + ], + } + ], + "interval": 3600, + "offset": 3600 + } +} +``` + +| Name | Mandatory/Optional | Type | Default | Supported Values | Description | NOTE | +| :------- | :------------------------: | :----: | :-------- | :--------------------- | :------------- | :--------| +| config.labels | Optional | List | [] | | A list including the labels which are set in the remote CSE resource at the backend CSE. | | +| config.remote_cses.cse_base | Optional, see NOTE | String | onem2m | | The name of the *\* resource of the backend CSE. | When missing: WARNING:RegistrationHandler:Could not register: | +| config.remote_cses.cse_id | Mandatory | String | | | The CSE-ID of the backend CSE. | | +| config.remote_cses.cse_type | Mandatory | String | | IN_CSE ??? | | | +| config.remote_cses.own_poa | Optional, see NOTE | List | | | A list including the points of access of the gateway CSE which is set in the remote CSE resource at the backend. | When missing: WARNING:RegistrationHandler:Could not register: | +| config.remote_cses.poa | Optional, see NOTE | List | | | A list including the points of access of the backend CSE. | When missing: WARNING:RegistrationHandler:Could not register: | +| interval | Optional | Number | 3600 or 5 | | The expirationTime update interval. | DUPLICATED DEFAULT VALUES in Code | +| offset | Optional | Number | 3600 or 10 | | An offset added to the expirationTime to ensure it can be met early. | DUPLICATED DEFAULT VALUES in Code | diff --git a/doc/reference-doc/resource-names-and-types.md b/doc/reference-doc/resource-names-and-types.md new file mode 100644 index 0000000..977e5d3 --- /dev/null +++ b/doc/reference-doc/resource-names-and-types.md @@ -0,0 +1,44 @@ +# Resource names and types + +## Mapping Names of Resource to Short Names + +The following table provides an overview of resource names +(e.g. "lastModifiedTime") and their mappings to short resource names +(e.g. "lt") as used in a given JSON. + +| Resource short name | Resource long name | +|:-------------------:|:---------------------:| +| aei | AE-ID | +| ch | childResource | +| csi | CSE-ID | +| cst | CSE type | +| ct | creationTime | +| et | expirationTime | +| fu | filterUsage | +| lt | lastModifiedTime | +| poa | pointOfAccess | +| ri | resourceID | +| rn | resourceName | +| rr | **TODO** | +| rsc | ResponseStatusCode | +| srt | supportedResourceType | +| ty | resourceType | + +## Numerical Representations of Resource Types + +The table below specifies the numerical representations of a given +resource types (selection). + +| Num | Resource type | +|:---:|:-----------------:| +| 2 | AE | +| 3 | container | +| 4 | contentInstance | +| 5 | CSEBase | +| 16 | remoteCSE | +| 23 | subscription | + + +All available resource types are implemented in the enumeration class +`ResourceTypeE`, defined in +[model.py](../../common/openmtc-onem2m/src/openmtc_onem2m/model.py). diff --git a/doc/repository-structure.md b/doc/repository-structure.md new file mode 100644 index 0000000..fc0e41d --- /dev/null +++ b/doc/repository-structure.md @@ -0,0 +1,29 @@ +# OpenMTC Code Repository Structure + +The following provides a description of the structure of the OpenMTC +code repository. + +``` +openmtc-open-source +|-- common -> common part used by CSE and AE + |-- openmtc/lib + |-- openmtc/src/openmtc + |-- openmtc-onem2m/src/openmtc_onem2m +|-- doc -> tutorials, docu, example apps +|-- docker -> docker utils for CSE and SDK +|-- futile/src/futile -> needs to be checked if needed (lib of Konrad) +|-- openmtc-app/src/openmtc_app -> app framework (AE) +|-- openmtc-gevent -> start CSE components + |-- bin, etc -> for packaging (check if needed) + |-- certs -> for auth (fake cert for out-of-the-box usage?) + |-- src/openmtc_gevent + |-- config + scripts -> start scripts for gevent platform (no other probably needed) +|-- server -> CSE components + |-- openmtc-cse/src/openmtc_cse + |-- openmtc-server/src/openmtc_server +|-- testing -> testing framework +|-- util +|-- git files +|-- scripts -> create-binary-docker +|-- setup files -> CSE, SDK +``` diff --git a/doc/sdk-client.md b/doc/sdk-client.md new file mode 100644 index 0000000..e0a8e8a --- /dev/null +++ b/doc/sdk-client.md @@ -0,0 +1,685 @@ +# SDK - The low-level CSE Client + + +## Introduction + +The OpenMTC SDK offers a client module for low-level access to the +oneM2M resource tree exposed by a CSE's reference point. Currently, +only the http and https protocols are supported. + +Basically, there are different types of Common Service Entities (CSE): + +* MN-CSE: Middle Node CSE (OpenMTC Gateway) +* IN-CSE: Infrastructure Node CSE (OpenMTC Backend) + +The client module comprises classes for representing requests, +responses as well as classes that provide an abstraction for a +connection to a CSE's reference point (the actual client itself). + + +## Requests + +Requests to a CSE are called a OneM2MRequest. The OpenMTC SDK provides +this class for representing the different types of requests that can +be issued towards a CSE. This class resides under the +`openmtc_onem2m.transport` package. The following requests +(OneM2MOperation) are available: + +* `retrieve` +* `delete` +* `create` +* `notify` +* `update` + + +### OneM2MRequest - Retrieve + +The most trivial case of a `OneM2MRequest` is the `retrieve`. It takes +the path of the resource to be retrieved as parameter upon +construction. + +This file can be found [here](./training/onem2m-examples/onem2m-example-3.py). +``` py +# Example 3: Retrieve OneM2MRequest + +from openmtc_onem2m.transport import OneM2MRequest + +request = OneM2MRequest("retrieve", to="onem2m") + +print request.to +#>>> onem2m +``` + + +### OneM2MRequest - Delete + +Like the `retrieve` `OneM2MRequest`, a `delete` `OneM2MRequest` merely +takes the path of the resource to be deleted as parameter upon +construction. + +This file can be found [here](./training/onem2m-examples/onem2m-example-4.py). +``` py +# Example 4: Delete OneM2MRequest + +from openmtc_onem2m.transport import OneM2MRequest + +request = OneM2MRequest("delete", to="onem2m") + +print request.to +#>>> onem2m +``` + + +### OneM2MRequest - Create + +When creating a `create` `OneM2MRequest` object we need to specify the +object to be created together with the path where it is to be +created. In most cases this is done by creating an appropriate +resource object and passing it. + +This file can be found [here](./training/onem2m-examples/onem2m-example-5a.py). +``` py +# Example 5a: Create OneM2MRequest + +from openmtc_onem2m.transport import OneM2MRequest +from openmtc_onem2m.model import AE + +my_app = AE(App_ID="myApp") + +request = OneM2MRequest("create", to="onem2m", pc="my_app") + +print request.to +#>>> onem2m +print request.pc +#>>> myApp +``` + +When creating contentInstances, we can also pass in a string of raw +data. In this case, we also need to specify the mime-type of the data +via the `resource_type` parameter (ty). + +This file can be found [here](./training/onem2m-examples/onem2m-example-5b.py). +``` py +# Example 5b: Create OneM2MRequest with data + +from openmtc_onem2m.transport import OneM2MRequest +import json + +sensor_data = {"type": "temperature", + "value": 15 } + +data_string = json.dumps(sensor_data) + +request = OneM2MRequest("create", + to="onem2m", + pc=data_string, + ty="application/json") + +print request.to +#>>> onem2m +print request.pc +#>>> {"type": "temperature", "value": 15} +``` + + +### OneM2MRequest - Notify + +For `notify` `OneM2MRequest` objects the same semantics as for +`create` `OneM2MRequest` apply. + +This file can be found [here](./training/onem2m-examples/onem2m-example-6a.py). +``` py +# Example 6a: Notify OneM2MRequest + +from openmtc_onem2m.transport import OneM2MRequest +from openmtc_onem2m.model import AE + +my_app = AE(App_ID="myApp") + +request = OneM2MRequest("notify", to="onem2m", pc=my_app) + +print request.to +#>>> onem2m +print request.pc.App_ID +#>>> myApp +``` + +This file can be found [here](./training/onem2m-examples/onem2m-example-6b.py). +``` py +# Example 6b: Notify OneM2MRequest with data + +from openmtc_onem2m.transport import OneM2MRequest +import json + +sensor_data = {"type": "temperature", + "value": 15 } + +data_string = json.dumps(sensor_data) + +request = OneM2MRequest("create", + to="onem2m", + pc=data_string, + ty="application/json") + +print request.to +#>>> onem2m +print request.pc +#>>> {"type": "temperature", "value": 15} +``` + + +### OneM2MRequest - Update + +The `update` `OneM2MRequest` can be used to update specific attributes +of an object (AE). If the request is legal, four different cases are +distinguished: + +* If an attribute value **is provided** in the `OneM2MRequest` that + **exists** in the target resource, the CSE will simply update that + attribute in the resource representation. +* If an attribute **is not provided** in the `OneM2MRequest`, but the + attribute **exists** in the target resource, the hosting CSE will + simply leave the value of that attribute unchanged. +* If an attribute **is provided** in the `OneM2MRequest` and does + **not exist** in the target resource, the hosting CSE will create + such attribute with the provided value. +* If an attribute is **set to NULL** in the `OneM2MRequest` and + **exists** in the target resource, the hosting CSE will delete such + attribute if the deletion of the attribute is allowed by the local + policy. + +The following example shows the creation of a `update` +`OneM2MRequest`. The CSE would either update the attribute (labels) in +the resource representation if it exists already exists there, or +create the attribute labels with the provided value if it does not +exist in the CSE resource representation yet. + +This file can be found [here](./training/onem2m-examples/onem2m-example-7.py). +``` py +# Example 7: Update OneM2MRequest + +from openmtc_onem2m.transport import OneM2MRequest +from openmtc_onem2m.model import AE + +my_app = AE(App_ID="myApp", labels=["keyword1", "keyword2"]) + +request = OneM2MRequest("update", to="onem2m", pc=my_app.labels) + +print request.to +#>>> onem2m +print request.pc +#>>> [u'keyword1', u'keyword2'] +``` + + +## Responses + +Upon servicing a request, a CSE will return a `OneM2MResponse`, which +is a class of the client module. This class is defined in the +`openmtc_onem2m.transport` module and derives from the `object` base +class. The following response types are possible: + +* `Create` +* `Retrieve` +* `Update` +* `Delete` +* `Notify` +* `Execute` +* `Observe` + +An `OneM2MResponse` has the following properties: + +* `status_code` - Denotes the result status of the operation (see + below). +* `request` - The type of the operation. One of (`create`, `retrieve`, + `update`, `delete`, `notify`, `execute`, `observe`). +* `rqi` - Denotes the request identifier (`requestIdentifier`). +* `pc` - Denotes the resource content (`primitiveContent`). +* `to` - Denotes to destination of the response. + + +### Error Responses + +If an error occurs on the CSE servicing the request, the CSE will +return a `OneM2MErrorResponse`. Note that the `OneM2MErrorResponse` +class is an `Exception`. In case that any error is reported by the CSE +during processing a request, the client will *raise* an instance of +`OneM2MErrorResponse`. The `OneM2MErrorResponse` heritates from the +classes `OneM2MResponse` and `OneM2MError` (`OpenMTCError`) and is not +yet implemented (pass). + + +### Status Codes + +The `status_code` of `OneM2MResponse` objects are defined as constants +in the `openmtc_onem2m.exc` module. The following constants are +defined: + +| ``STATUS`` | numeric_code | http_status_code | +|:-----------|:------------:|:----------------:| +| ``STATUS_ACCEPTED`` | 1000 | 202 | +| ``STATUS_OK`` | 2000 | 200 | +| ``STATUS_CREATED`` | 2001 | 201 | +| ``STATUS_BAD_REQUEST`` | 4000 | 400 | +| ``STATUS_NOT_FOUND`` | 4004 | 404 | +| ``STATUS_OPERATION_NOT_ALLOWED`` | 4005 | 405 | +| ``STATUS_REQUEST_TIMEOUT`` | 4008 | 408 | +| ``STATUS_SUBSCRIPTION_CREATOR_HAS_NO_PRIVILEGE`` | 4101 | 403 | +| ``STATUS_CONTENTS_UNACCEPTABLE`` | 4102 | 400 | +| ``STATUS_ACCESS_DENIED`` | 4103 | 403 | +| ``STATUS_GROUP_REQUEST_IDENTIFIER_EXISTS`` | 4104 | 409 | +| ``STATUS_CONFLICT`` | 4015 | 409 | +| ``STATUS_INTERNAL_SERVER_ERROR`` | 5000 | 500 | +| ``STATUS_NOT_IMPLEMENTED`` | 5001 | 501 | +| ``STATUS_TARGET_NOT_REACHABLE`` | 5103 | 404 | +| ``STATUS_NO_PRIVILEGE`` | 5105 | 403 | +| ``STATUS_ALREADY_EXISTS`` | 5106 | 403 | +| ``STATUS_TARGET_NOT_SUBSCRIBABLE`` | 5203 | 403 | +| ``STATUS_SUBSCRIPTION_VERIFICATION_INITIATION_FAILED`` | 5204 | 500 | +| ``STATUS_SUBSCRIPTION_HOST_HAS_NO_PRIVILEGE`` | 5205 | 403 | +| ``STATUS_NON_BLOCKING_REQUEST_NOT_SUPPORTED`` | 5206 | 501 | +| ``STATUS_EXTERNAL_OBJECT_NOT_REACHABLE`` | 6003 | 404 | +| ``STATUS_EXTERNAL_OBJECT_NOT_FOUND`` | 6005 | 404 | +| ``STATUS_MAX_NUMBER_OF_MEMBER_EXCEEDED`` | 6010 | 400 | +| ``STATUS_MEMBER_TYPE_INCONSISTENT`` | 6011 | 400 | +| ``STATUS_MANAGEMENT_SESSION_CANNOT_BE_ESTABLISHED`` | 6020 | 500 | +| ``STATUS_MANAGEMENT_SESSION_ESTABLISHMENT_TIMEOUT`` | 6021 | 500 | +| ``STATUS_INVALID_CMDTYPE`` | 6022 | 400 | +| ``STATUS_INVALID_ARGUMENTS`` | 6023 | 400 | +| ``STATUS_INSUFFICIENT_ARGUMENT`` | 6024 | 400 | +| ``STATUS_MGMT_CONVERSION_ERROR`` | 6025 | 500 | +| ``STATUS_CANCELLATION_FAILED`` | 6026 | 500 | +| ``STATUS_ALREADY_COMPLETE`` | 6028 | 400 | +| ``STATUS_COMMAND_NOT_CANCELLABLE`` | 6029 | 400 | + + +## Exceptions + +In addition to raising an instance of `OneM2MErrorResponse`, the CSE +client might also inidcate error conditions that do not occur while +the CSE was processing the request. This will mainly happen when the +client was unable to contact the CSE for whatever reason. + +Exeptions that are raised will be subclasses of the `OpenMTCError` +class defined in the `openmtc.exc` module. + + +## Using the Client + +The client implementation for interfacing with the HTTP interface of +an CSE resides in the `openmtc_onem2m.client.http` module. The +implementing class is called `OneM2MHTTPClient`. In the current +version of the SDK, we simply import the class directly. This is +planned to be replaced with a more sophisticated factory pattern that +creates appropriate clients based on the transport scheme (e.g. `http` +or `mqtt`) that is used. + +Client objects expose a method called `send_onem2m_request` for +sending `OneM2MRequest` objects to a CSE. + + +### Creating a Client + +To create a client object, we simply import the `OneM2MHTTPClient` +class from the `openmtc_onem2m.client.http` module and create an +instance of it with the URI of a reference point of an oneM2M CSE. + +This file can be found [here](./training/onem2m-examples/onem2m-example-8a.py). +``` py +# Example 8a: Creating a Client + +from openmtc_onem2m.client.http import OneM2MHTTPClient + +# create a OneM2MHTTPClient object +client = OneM2MHTTPClient("http://localhost:8000", False) +``` + + +### Making Requests + +To retrieve a resource from the CSE's resource tree, we can use the +`send_onem2m_request` method and pass an appropriate `OneM2MRequest` +object. In this case we retrieve the `CSEBase` resource of the CSE's +resource tree. If successful, the operation returns a promise, which +contains an `OneM2MResponse` object. The `OneM2MResponse` can be +obtained from the promise by using `.get()`. The `content` property of +the `OneM2MResponse` holds the appropriate `CSEBase` object. + +This file can be found [here](./training/onem2m-examples/onem2m-example-8b.py). +``` py +# Example 8b: Making Requests + +from openmtc_onem2m.client.http import OneM2MHTTPClient +from openmtc_onem2m.transport import OneM2MRequest + +# create a OneM2MHTTPClient object +client = OneM2MHTTPClient("http://localhost:8000", False) + +# create a OneM2MRequest object +onem2m_request = OneM2MRequest("retrieve", to="onem2m") +# send the OneM2MRequest to the CSE +promise = client.send_onem2m_request(onem2m_request) +# reteive the OneM2MResponse from the returned promise +onem2m_response = promise.get() + +print onem2m_response.to +#>>> onem2m +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2000, description='OK', http_status_code=200) +print onem2m_response.content +#>>> CSEBase(path='None', id='cb0') +``` + +**Note:** This example (and most of the following ones) will only work +as shown, if a `gateway` instance is running in the background of the +localhost. This can be launched by running the +`openmtc-open-source/openmtc-gevent/run_gateway` script. + +To create a resource on the CSE, we first create the desired resource +object and then send a create `OneM2MRequest`. + +In the following example, we will add the optional pararmeter +`resourceName="MYAPP"` to the creation of the AE in order to +facilitate the retrieval of this AE in the browser. After execution of +the example (and the condition to have running CSE on the localhost) +the created AE on the CSE should be retrievable at URL +`http://localhost:8000/onem2m/MYAPP` in a browser on the +localhost. Further, we add the mandatory parameter +`requestReachability=False` which states, that the created AE should +have no server capability and therefore no reachability for other +instances. + +For a `create` `OneM2MRequest`, there are two additional parameters: +`ty=AE` indicates that the resource that should be created on the CSE +is of type AE (ApplicationEntity). The statement `pc=my_app` specifies +what resource should be created on the CSE. In this case, it is the AE +created previously. + +This file can be found [here](./training/onem2m-examples/onem2m-example-10.py). +``` py +# Example 10: Create a resource + +from openmtc_onem2m.model import AE +from openmtc_onem2m.client.http import OneM2MHTTPClient +from openmtc_onem2m.transport import OneM2MRequest + +# create a OneM2MHTTPClient object +client = OneM2MHTTPClient("http://localhost:8000", False) + +# create a resource to be created on the CSE +# resourceName: (optional) for easy check in browser +# requestReachability: (mandatory) for servercapability of the AE +my_app = AE(App_ID="myApp", + labels=["keyword1", "keyword2"], + resourceName="MYAPP", + requestReachability=False) + +# create a OneM2MRequest object of type 'create' +# ty: resource_type of the created resource +# pc: Resource content to be transferred +onem2m_request = OneM2MRequest("create", to="onem2m", ty=AE, pc=my_app) + +# send the 'create' OneM2MRequest to the CSE +promise = client.send_onem2m_request(onem2m_request) + +# reteive the OneM2MResponse from the returned promise +onem2m_response = promise.get() + +print onem2m_response.to +#>>> onem2m +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2001, description='CREATED', http_status_code=201) +print onem2m_response.content +#>>> AE(path='None', id='ae0') +print onem2m_response.content.App_ID +#>>> myApp +print onem2m_response.content.labels +#>>> [u'keyword1', u'keyword2'] +``` + +**Note:** If this example throws a `OneM2MErrorResponse` with +`response_status_code: STATUS(numeric_code=4015, +description='CONFLICT', http_status_code=409)`, then the +`resourceName` might already be registered at the CSE. Try to alter +the `resourceName`. ResourceNames need to be unique on the +CSE. Alternatively, the running CSE process can be terminated and +restarted. This avoids the need to change the `resourceName`. + +**Note:** At this point the application object has been created in the +CSE's resource tree. However, the original object we created in our +program (`my_application`) has not been altered in any +way. Specifically, it does not contain any attributes that may have +been set or altered by the CSE, nor has its `path` property been set. + +If we want to continue working with the application object it is good +practice to retrieve the object again through the resourceName. + +This file can be found [here](./training/onem2m-examples/onem2m-example-11a.py). +``` py +# Example 11a: Create a resource (continued) + +from openmtc_onem2m.model import AE +from openmtc_onem2m.client.http import OneM2MHTTPClient +from openmtc_onem2m.transport import OneM2MRequest + +client = OneM2MHTTPClient("http://localhost:8000", False) + +my_app = AE(App_ID="myApp", + labels=["keyword1", "keyword2"], + resourceName="MYAPP1", + requestReachability=False) + +onem2m_request = OneM2MRequest("create", to="onem2m", ty=AE, pc=my_app) + +promise = client.send_onem2m_request(onem2m_request) + +onem2m_response = promise.get() + +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2001, description='CREATED', http_status_code=201) + +# Build path to retieve from +path = "onem2m/" + onem2m_response.content.resourceName +print path +#>>> onem2m/MYAPP + +# Retrieve the AE from the CSE +onem2m_request = OneM2MRequest("retrieve", to=path) +promise = client.send_onem2m_request(onem2m_request) +onem2m_response = promise.get() + +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2000, description='OK', http_status_code=200) +print onem2m_response.content +#>>> AE(path='None', id='ae0') + +# Set the local AE to the retrieved content +my_app = None +my_app = onem2m_response.content + +print my_app.App_ID +#>>> myApp +print my_app.resourceName +#>>> MYAPP +print my_app.labels +#>>> [u'keyword1', u'keyword2'] +``` + +**Note:** Again, if this example throws a `OneM2MErrorResponse` with +`response_status_code: STATUS(numeric_code=4015, +description='CONFLICT', http_status_code=409)`, then the +`resourceName` might already be registered at the CSE. Try to alter +the `resourceName`. Alternatively, the running CSE process can be +terminated and restarted. This avoids the need to change the +`resourceName`. + +The following example showcases how to update some fields using +`OneM2MRequest` `update`. + +This file can be found [here](./training/onem2m-examples/onem2m-example-11b.py). +``` py +# Example 11b: Updating a resource using OneM2MRequest Update + +from openmtc_onem2m.model import AE +from openmtc_onem2m.client.http import OneM2MHTTPClient +from openmtc_onem2m.transport import OneM2MRequest + +client = OneM2MHTTPClient("http://localhost:8000", False) + +my_app = AE(App_ID="myApp", + labels=["keyword1", "keyword2"], + resourceName="MYAPP2", + requestReachability=False) + +# Create the AE 'my_app' at the CSE +onem2m_request = OneM2MRequest("create", to="onem2m", ty=AE, pc=my_app) +promise = client.send_onem2m_request(onem2m_request) +onem2m_response = promise.get() +print onem2m_response.content.labels +#>>> [u'keyword1', u'keyword2'] + +# Retrieve the AE from the CSE and check the labels +path = "onem2m/" + onem2m_response.content.resourceName +onem2m_request = OneM2MRequest("retrieve", to=path) +promise = client.send_onem2m_request(onem2m_request) +onem2m_response = promise.get() +print onem2m_response.content.labels +#>>> [u'keyword1', u'keyword2'] + +# Update the changes labels in the remote resource +# Therefore a temporay AE object is needed +# This temporary AE object should ONLY contian the fields that need to be updated +tmp_app = AE(labels=["foo", "bar", "coffee"]) +onem2m_request = OneM2MRequest("update", to=path, pc=tmp_app) +promise = client.send_onem2m_request(onem2m_request) +onem2m_response = promise.get() +print onem2m_response.content.labels +#>>> [u'foo', u'bar', u'coffee'] + +# Set the local AE to the retrieved content +my_app = None +my_app = onem2m_response.content +print my_app.labels +#>>> [u'foo', u'bar', u'coffee'] +``` + + +### Error Handling + +The examples above have so far omitted error handling for the sake of +clarity and brevity. Obviously however, many things can go wrong at +various stages of processing and these cases need to be dealt with. + +Any errors that are returned from the CSE will be represented in the +form of an `OneM2MErrorResponse` instance. As stated before, the +`OneM2MErrorResponse` class derives from `Exception`. Consequently, +`OneM2MErrorResponse` objects are not returned from the method, +instead they are raised as exceptions. + +In addition, it is possible that the CSE could not be contacted at all +in the first place. In this case, an instance of +`openmtc.exc.ConnectionFailed` will be raised, which also derives from +`Exception`. + +**Note:** This implies that whenever one of the client methods returns +normally, we can be sure that the operation has succeeded and continue +working with the result as planned without further inspecting the +result's status. This allows a very convenient and pythonic separation +of error and result handling. + +With this in mind we can extend *Example 8b* by simply enclosing the +invocation of the client method in a `try`/`except`/`else` block. + +This file can be found [here](./training/onem2m-examples/onem2m-example-12a.py). +``` py +# Example 12a: Making Requests with error handling + +from openmtc_onem2m.client.http import OneM2MHTTPClient +from openmtc_onem2m.transport import OneM2MRequest, OneM2MErrorResponse +from openmtc.exc import OpenMTCError + +client = OneM2MHTTPClient("http://localhost:8000", False) + +try: + onem2m_request = OneM2MRequest("retrieve", to="onem2m") + promise = client.send_onem2m_request(onem2m_request) + onem2m_response = promise.get() +except OneM2MErrorResponse as e: + print "CSE reported an error:", e + raise +except OpenMTCError as e: + print "Failed to reach the CSE:", e + raise +else: + pass + +# no exception was raised, the method returned normally. +print onem2m_response.to +#>>> onem2m +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2000, description='OK', http_status_code=200) +print onem2m_response.content +#>>> CSEBase(path='None', id='cb0') +``` + + +### Forwarding + +OpenMTC will automatically handle forwarding of a OneM2MRequest if it +is referring to a different CSE than the one the client is connected +to. Forwarding in OneM2M is based on CSE-IDs whereas the ETSI M2M +equivalent Retargeting is based on IPs. + +Lets suppose that a gateway is availabe at `localhost:8000` and has +the CSE-ID `mn-cse-1`. Then, its backend is available at +`localhost:18000` and has the CSE-ID `in-cse-1`. + +Due to forwarding, the following requests will have the same results: + +* `localhost:8000/onem2m` and `localhost:18000/~/mn-cse-1/onem2m` +* `localhost:8000/onem2m` and `localhost:8000/~/mn-cse-1/onem2m"` +* `localhost:8000/~/in-cse-1/onem2m` and `localhost:18000/onem2m` + +The following exaple illustrates this: + +This file can be found [here](./training/onem2m-examples/onem2m-example-12b.py). +``` py +# Example 12b: Forwarding + +from openmtc_onem2m.client.http import OneM2MHTTPClient +from openmtc_onem2m.transport import OneM2MRequest + +client = OneM2MHTTPClient("http://localhost:8000", False) + +onem2m_request = OneM2MRequest("retrieve", to="onem2m") +onem2m_response = client.send_onem2m_request(onem2m_request).get() +print "---> Request to: http://localhost:8000" + "/" + onem2m_request.to +print onem2m_response.to +#>>> onem2m +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2000, description='OK', http_status_code=200) +print onem2m_response.content +#>>> CSEBase(path='None', id='cb0') + +onem2m_request = OneM2MRequest("retrieve", to="~/mn-cse-1/onem2m") +onem2m_response = client.send_onem2m_request(onem2m_request).get() +print "---> Request to: http://localhost:8000" + "/" + onem2m_request.to +print onem2m_response.to +#>>> ~/mn-cse-1/onem2m +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2000, description='OK', http_status_code=200) +print onem2m_response.content +#>>> CSEBase(path='None', id='cb0') + +client.port = 18000 +onem2m_request = OneM2MRequest("retrieve", to="~/mn-cse-1/onem2m") +onem2m_response = client.send_onem2m_request(onem2m_request).get() +print "---> Request to: http://localhost:18000" + "/" + onem2m_request.to +print onem2m_response.to +#>>> ~/mn-cse-1/onem2m +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2000, description='OK', http_status_code=200) +print onem2m_response.content +#>>> CSEBase(path='None', id='cb0') +``` + diff --git a/doc/sdk-datamodel.md b/doc/sdk-datamodel.md new file mode 100644 index 0000000..e34b7ff --- /dev/null +++ b/doc/sdk-datamodel.md @@ -0,0 +1,107 @@ +# SDK - The Data Model + + +## Introduction + +The OpenMTC data model represents the resources of the M2M resource tree(s) through classes and objects. All model classes derive from the base class `openmtc.model.Entity`. An Entity represents a data type defined in the respective Instances of these classes and have all the attributes of the respective type. Also, the constructors of these classes accept all relevant attributes. Attributes that are missing on construction will be initialized with default values (mostly `None` or empty lists). + + +### Entities vs. Resources + +As mentioned above, entity classes are the base class for all model entities. A special case of an entity is a resource. Resources are "first class" objects that are represented directly in the resource tree, e.g. the OneM2M `AE` (Application Entity) resource. + + +### Different Data Models + +At present, OpenMTC uses two different data models: + +- The OneM2M data model +- OpenMTC's internal data model (The "unified" data model) + +OpenMTC uses the unified data model for storing data in a database. Objects are mapped to / from this model to an externally visible model as needed. The reason for this is that certain resources should be visible through multiple representations. E.g. an OneM2M `AE` resource will be mapped internally as a `UnifiedApplication` resource. + +**Note:** This mapping of models happens mostly transparent at database adapter level. The actual mapping is performed by the `openmtc_unified` module. Code that is specific to a certain M2M standard does not need to have knowledge about the unified data model. This means that for example the OneM2M controller classes will always only have to work with OneM2M resource types, even though those are mapped to unified types when stored in the database. + + +## Member Variables + +Member variables on OpenMTC model objects are of three different varieties: + +- attributes - Any attributes that are directly contained within an object. Examples include `App_Id` and `appName`. Upon initializing/assigning attributes, type checking and possibly conversion is performed. Illegal values will be rejected by raising an appropriate `Exception`. Attributes can be of the following types: + - strings (Python type `str`). By default initialized with `None`. + - integers (Python type `int`). By default initialized with `None`. + - floating point (Python type `float`). By default initialized with `None`. + - dates (Python type `datetime.datetime`). By default initialized with `None`. + - sequences (Python type `list`). By default initialized with an empty `list` (`[]`). + - complex objects (Python type `dict`). By default intialized with an empty `dict` (`{}`). These are used to represent objects that are not themsekves adressable within the resource tree, e.g. permission objects. + - resources - The `latest` and `oldest` members of `ContentInstances` objects are represented as attributes. By default initialized with `None`. +- collections - Any variable length collection of child resources held by an object. By default initialized with an empty collection. +- subresources - All other children of a resource instance. By default initialized with a default-initialized object. + + +### Common Properties + +The following properties exist on instances of model classes: + +- `path` - The path of the object. e.g. `"/onem2m/TestGIP/devices/temp02"` +- `parent_path` (read-only) - The path of this resource's parent resource. This is derived from the resource's path. e.g. for a resource with `path="/onem2m/TestGIP/devices/temp02"` the `parent_path` would be `"/onem2m/TestGIP/devices"` +- `name` (read-only) - the last element of the resource's path. e.g. for a resource with `path="/onem2m/TestGIP/devices/temp02"` the `name` would be `"temp02"` + + +### Introspection + +The members of each OneM2M resource class can be inspected at run-time. For this matter, each resource class has the `attributes` property, which returns a list of all attribute members. + + +## Creating Objects + +To create an object representing a resource from the OneM2M resource tree, one simply invokes its constructor. All classes can be instantiated without arguments. The following example creates an `AE` object that represents an OneM2M Application Entity resource. + +This file can be found [here](./training/onem2m-examples/onem2m-example-1.py). +``` py +# Example 1: Creating Objects + +from openmtc_onem2m.model import AE + +my_app = AE() + +print my_app.path +#>>> None +print my_app.App_ID +#>>> None +print my_app.parent_path +#>>> None +print my_app.labels +#>>> None +print my_app.attributes +#>>> [UnicodeAttribute(name="AE-ID", type=unicode), UnicodeAttribute(name="App-ID", type=unicode), ListAttribute(name="accessControlPolicyIDs", type=list), ListAttribute(name="announceTo", type=list), UnicodeAttribute(name="announcedAttribute", type=unicode), ListAttribute(name="childResources", type=list), DatetimeAttribute(name="creationTime", type=datetime), DatetimeAttribute(name="expirationTime", type=datetime), UnicodeAttribute(name="labels", type=unicode), DatetimeAttribute(name="lastModifiedTime", type=datetime), UnicodeAttribute(name="name", type=unicode), UnicodeAttribute(name="nodeLink", type=unicode), UnicodeAttribute(name="ontologyRef", type=unicode), ListAttribute(name="pointOfAccess", type=list)] +``` + +As it can be seen, all attributes of the instance have been initialized with default values. For instance, both `path` and `App_ID` were initialized with `None`, while `labels` is an empty `list`. Since `path` is `None`, `parent_path` is also `None`. + +**Note:** It is important to understand that this only created the `AE` object internally in our program but not actually in the OneM2M resource tree of a CSE! No interaction with a CSE has taken place yet. + + +### Passing Values + +We can pass values for all the attributes that are applicable to a particular resource. + +This file can be found [here](./training/onem2m-examples/onem2m-example-2.py). +``` py +# Example 2: Passing Values + +from openmtc_onem2m.model import AE + +my_app = AE(App_ID="myApp", labels=["keyword1", "keyword2"]) + +print my_app.path +#>>> None +print my_app.App_ID +#>>> myApp +print my_app.parent_path +#>>> None +print my_app.labels +#>>> [u'keyword1', u'keyword2'] +print my_app.attributes +#>>> [UnicodeAttribute(name="AE-ID", type=unicode), UnicodeAttribute(name="App-ID", type=unicode), ListAttribute(name="accessControlPolicyIDs", type=list), ListAttribute(name="announceTo", type=list), UnicodeAttribute(name="announcedAttribute", type=unicode), ListAttribute(name="childResources", type=list), DatetimeAttribute(name="creationTime", type=datetime), DatetimeAttribute(name="expirationTime", type=datetime), UnicodeAttribute(name="labels", type=unicode), DatetimeAttribute(name="lastModifiedTime", type=datetime), UnicodeAttribute(name="name", type=unicode), UnicodeAttribute(name="nodeLink", type=unicode), UnicodeAttribute(name="ontologyRef", type=unicode), ListAttribute(name="pointOfAccess", type=list)] +``` diff --git a/doc/sdk-framework.md b/doc/sdk-framework.md new file mode 100644 index 0000000..ad05c95 --- /dev/null +++ b/doc/sdk-framework.md @@ -0,0 +1,312 @@ +# SDK - Using the Application Framework + + +## Introduction + +To simplify application development, the OpenMTC SDK provides an application framework for writing oneM2M compliant applications with minimal effort. + +This framework allows specifying application parameters in a declarative way and is furthermore fully event driven. + +The OpenMTC application framework resides in the `openmtc_app.onem2m` module. + + +## Application Basics + +To write an application a conventional Python class has to be implemented. There are only two restrictions on this class: + +- It must derive from the provided base class `openmtc_app.onem2m.XAE` +- It must override the \_on\_register method. + +The overall flow of operations is simple. Upon startup, the framework will register the application with the CSE and once that has successfully happened, call the `_on_register` method of the application. This is the entry point where the application can start actually doing things. This includes: + +- Creating some containers +- Creating more applications +- Subscribe for data from the CSE +- Setup any device specific communication + +Upon shutdown, the application itself and any resources it has created will automatically be deleted by the framework. + +**Note:** The framework also automatically handles expiration time updates for all resources that have been created. + + +### Properties + +The following properties are available on objects of type `openmtc_app.onem2m.XAE`: + +- `application` - the `Application` object that represents this application in the CSE's resource tree. +- `client` - an instance of `openmtc_onem2m.client.http.OneM2MHTTPClient` connected to the CSE +- `logger` - a `logging.Logger` object. Please refer to the Python [logging documentation](https://docs.python.org/2/library/logging.html) for details. + + +### Methods + +The following methods are available on an `openmtc_app.onem2m.XAE` object: + +- `get_application(application, path)` + - used to retrieve an Application resource. + - `application` old app instance or appId + - `path` (optional) path in the resource tree + - Returns the `Application` object that was retrieved + +- `create_application(application, path)` + - used to create a new Application resource in the CSE's resource tree, besides the one that has been registered at startup. + - `application` Application instance or appId as str + - `path` (optional) path in the resource tree at which the new application should be created. + - Returns the `Application` object that represents the application that has been created at the CSE. + +- `discover(path, filter_criteria, unstructured)` + - used to discover resources. + - `path` the target resource/path parenting the Container. e.g. the remote CSE + - `filter_criteria` (optional) FilterCriteria for the for the discovery + - `unstructured` (optional) set `discovery_result_type` + - Returns the content of the discovery + +- `create_container(target, container, labels, max_nr_of_instances)` + - used to create a Container resource as children of an application. + - `target` the target resource/path parenting the Container + - `container` the Container resource or a valid container ID + - `labels` (optional) the container's searchStrings + - `max_nr_of_instances` the container's maximum number of instances (0=unlimited) + - Returns the container that was created + +- `get_resource(path, app_local)` + - used to retrieve a resource + - `path` is the path to the resource + - `app_local` if set to `True` path will be appended to the path of the Application + +- `push_content(container, content, fmt, text)` + - used to push actual data into a container. Therefore creates a ContentInstance resource in the given container, wrapping the content. Defaults to serialising the content as JSON and base64 encodes it. Will attempt to create the container, if not found. + - `container` container at which the content should be created + - `content` the actual data. Currently, only Python `string`, `list` and `dict` objects are supported. + - `fmt` (optional) + - `text` (optional) + - Returns the created ContentInstance resource + +- `get_content(container)` + - used to retrieve the latest ContentInstance of a Container. + - `container` container to retrieve content from + - Returns the latest ContentInstance of the specified Container + +- `add_container_subscription(container, handler, data_handler, filter_criteria)` + - used to create a Subscription to the ContentInstances of the given Container. + - `container` the Container or it's path + - `handler` reference of the notification handling function + - `data_handler` (optional) reference of the function parsing/decoding the data + - `filter_criteria` (optional) FilterCriteria for the subscription + +- `emit(message, event)` + - publish data via `socket.io` to all connected clients. + - `message` is the data to be pushed. + - `event` specifies the `socket.io` event channel name. + + +### Application Shutdown + +Upon shutdown, the `shutdown` method will be called with no arguments. Implementations may override it to implement their own clean-up facilities. + +After the `shutdown` method has finished, all resources created by the application framework, including the application itself, will be removed. + + +## Writing Applications + +This example shows how a simple device application might be written. The goal of this application is to first register itself with the CSE and then continuously read sensor data from some hardware device and push it to the CSE. + +Note that the actual sensor reading parts of the application is left out - it is represented simply by an imagined `read_sensor_data` method. This example will solely focus on the OpenMTC SDK aspects of the application. + + +### A minimal Application + +A minimal application is simply a class that extends the application base class `openmtc_app.onem2m.XAE` and overrides the `_on_register` method. Such an application could look like this. + +This file can be found [here](./training/onem2m-examples/onem2m-example-13.py). +``` py +# Example 13: Minimal application + +from openmtc_app.onem2m import XAE + +class MyAE(XAE): + # when this is called the application is registered + # and can start doing something + def _on_register(self): + pass +``` + +Upon running, this application would be registered under the name MyAE. + + +### Running the Application + +Applications are run by encapsulating them in a so-called runner. These runners provide external functions such as maintaining a server component for receiving notifications. + +Currently, two runner implementations are provided, both built upon the popular [Flask](http://flask.pocoo.org/) framework. These are: + +- `openmtc_app.flask_runner.SimpleFlaskRunner` +- `openmtc_app.flask_runner.FlaskRunner` + +The difference between these two is that `FlaskRunner` provides some additional functionality, which is not included in `SimpleFlaskRunner`. This is mainly related to web-browser support/integration: + +- `FlaskRunner` supports websocksets through the [socket.io](http://socket.io/) abstraction layer. +- `FlaskRunner` is more scalable. + +However, in certain cases the simpler implementation of `SimpleFlaskRunner` can have advantages as well: + +- `FlaskRunner` is built upon the [gevent](http://www.gevent.org/) asynchronous I/O framework. While this provides for a very scalable solution, it might produce undesirable results and strange behaviour when applications implement their own I/O mechanisms, e.g. for communicating with a device. In contrast `SimpleFlaskRunner` will not exhibit these pitfalls. +- While `FlaskRunner` provides a more scalable solution when a lot of I/O happens, the resource consumption of `SimpleFlaskRunner` will be less in cases where only few requests are performed - especially when requests are performed strictly sequential. + +**Hint:** Both implementations will automatically serve static www content from a `static` directory in the application's root directory. + +To invoke a runner, we have to: + +1. instantiate our application class +2. instantiate the runner class, passing in the application object in the runner's constructor +3. invoke the runner's `run` method, passing the URI of the CSE we want to communicate with + +The following example shows how this is done: + +This file can be found [here](./training/onem2m-examples/onem2m-example-14a.py). +``` py +# Example 14a: Invoking a FlaskRunner + +from openmtc_app.onem2m import XAE +from openmtc_app.flask_runner import FlaskRunner + +class MyAE(XAE): + def _on_register(self): + pass + +app_instance = MyAE() +runner = FlaskRunner(app_instance) +runner.run("http://localhost:8000") +``` + +**Note:** This example (and most of the following ones) will only work as shown, if a `gateway` instance is running in the background of the localhost. This can be launched by running the `openmtc-open-source/openmtc-gevent/run_gateway` script. + +At this point the runner will start the app which results in the app being registered at the CSE. Once that has happened, the `_on_register` method will be called upon which our application can actually start its operation. + +The above example will register the application under its default application ID, which in this case, is derived from the name of the application's class: `MyAE`. In some cases - for example when multiple instances of the same application class are run against the same CSE - it may be desirable to override the default application ID. This can be achieved simply by setting the `name` parameter when instantiating the application class. + +This file can be found [here](./training/onem2m-examples/onem2m-example-14b.py). +``` py +# Example 14b: Invoking a FlaskRunner with custom name + +from openmtc_app.onem2m import XAE +from openmtc_app.flask_runner import FlaskRunner + +class MyAE(XAE): + def _on_register(self): + pass + +app_instance = MyAE(name="someAppName") +runner = FlaskRunner(app_instance) +runner.run("http://localhost:8000") +``` + +The invocation in this example tells the framework to register the application under the ID `someAppName`. + + +### Providing additional Information + +We can provide/override some static configuration information about our application in a declarative manner. This information includes: + +- `app_id` - The default application ID. +- `name` - The default application name. (default = None) +- `labels` - The default application labels. +- `default_lifetime` - The default application lifetime. (default = 3600) +- `max_nr_of_instances` - The default application maximum number of instances. (default = 3) +- `cse_base` - The default application CSE-base. (default = "onem2m") + +This file can be found [here](./training/onem2m-examples/onem2m-example-15.py). +``` py +# Example 15: Running App with Static Information + +from openmtc_app.onem2m import XAE + +class MyAE(XAE): + app_id = "AnotherAppID" + labels =["keyword1", "keyword2"] +``` + +In the above example, the application would by default be registered under the ID `anotherAppID`. Moreover, its `labels` attribute would be set to `["keyword1", "keyword2"]`. + + +### Creating a Container + +At this point, the next step for most if not all device applications will be to create a container to store any sensor data it reads. This is achieved by calling the `create_container` method. + +This file can be found [here](./training/onem2m-examples/onem2m-example-16.py). +``` py +# Example 16: Creating a simple Container + +from openmtc_app.onem2m import XAE +from openmtc_app.flask_runner import FlaskRunner + +class MyAE(XAE): + def _on_register(self): + container = self.create_container(None, "myContainer") + +app_instance = MyAE() +runner = FlaskRunner(app_instance) +runner.run("http://localhost:8000") +``` + +Note how we pass `None` as the first parameter to `create_container`. This implies to the framework that the container should be created as a child of the registered application (this invocation is thus equivalent to `self.create_container(self.application, "myContainer"`). We could pass different application objects in the first parameter which we would have obtained by calling `self.create_application`. + +Also note how we pass a simple string object as the second parameter. This will serve as the `id` attribute of the container. The framework will internally create a `Container` object with the specified ID which will otherwise be configured with some default parameters. If we wanted to provide more details upon container creation, we would have to pass in a full container object. + +This file can be found [here](./training/onem2m-examples/onem2m-example-17.py). +``` py +# Example 17: Creating a custom Container + +from openmtc_app.onem2m import XAE +from openmtc_app.flask_runner import FlaskRunner +from openmtc_onem2m.model import Container + +class MyAE(XAE): + def _on_register(self): + # create a container + container = Container( + resourceName = "myContainer", + maxNrOfInstances=100, + maxByteSize=1024 ** 3 ) + container = self.create_container(None, container) + +app_instance = MyAE() +runner = FlaskRunner(app_instance) +runner.run("http://localhost:8000") +``` + +Here we create an explicit `Container` object which limits the size of the container to a maximum of 100 content instances and a maximum size of 3 MByte. + + +### Pushing Data + +The final step for our application is to read data from the actual sensor hardware and forward it to the CSE as a content instance. As mentioned before, reading the data is represented by a fictional function `read_sensor_data`, the implementation of which is out of scope for the purpose of this document. In our imagination it will return a single value. + +The modus operandi of this particular application is to simply read sensor data using the aforementioned function, forward it to the CSE and then wait 60 seconds before starting the whole procedure again. This will be done over and over again, thus we will encapsulate this scheme in an endless loop. + +This file can be found [here](./training/onem2m-examples/onem2m-example-18.py). +``` py +# Example 18: Pushing Data + +from openmtc_app.onem2m import XAE +from openmtc_app.flask_runner import FlaskRunner +from time import sleep +from somewhere import read_sensor_data + +class MyAE(XAE): + def _on_register(self): + container = self.create_container(None, "myContainer") + + while True: + value = read_sensor_data() # read measurements + data = {"value": value} + self.push_content(container, data) + sleep(60) + +app_instance = MyAE() +runner = FlaskRunner(app_instance) +runner.run("http://localhost:8000") +``` + +Note how the actual forwarding of the data is performed by using the `push_content` method. `push_content` takes as first argument a `Container` object that specifies the destination container of the data. The second argument is simply the data that is to be pushed in the form of a Python `dict` or `list` object. Internally, the framework will wrap this in a `ContentInstance` object and forward it to the CSE. + diff --git a/doc/systemd.md b/doc/systemd.md new file mode 100644 index 0000000..7562dff --- /dev/null +++ b/doc/systemd.md @@ -0,0 +1,53 @@ +# OpenMTC systemd integration +In addition to SysV init scripts, OpenMTC is providing ready-to-use service files for the [systemd](https://freedesktop.org/wiki/Software/systemd/) init system. + +> **ℹ️ Hint:** As OpenMTC does not require privileged ports in order to function, it can be safely run from within a user's home directory. To do so, replace `sudo systemctl` with `systemctl --user`. It should bear mentioning that this is a convenience option for developers that is strongly discouraged for production. + +## Installing Service Files +Please refrain from copying or editing the provided files. Instead, change into the root of the openmtc repository and proceed as follows: +```sh +$ sudo systemctl link $(pwd)/openmtc-gevent/etc/systemd/system/openmtc-*.service +``` +This should provide you with two new services in the systemd search path: `openmtc-gateway` and `openmtc-backend`. Confirm this by executing `sudo systemctl status openmtc-\*`. + +## Adjusting `ExecStart` +By default, the service files will assume to find the start scripts to be located in `/usr/local/bin`. Chances are, this may not be the case. This is how to adjust the service files properly: +```sh +$ sudo systemctl edit openmtc-gateway +``` +An editor is going to open. Fill the presented file with the following content: +```ini +[Service] +# Clear user and group +User= +Group= +# This is needed to reset ExecStart before reassigning +ExecStart= +# Change to location on your system +ExecStart=/opt/openmtc-gevent/bin/run-gateway +``` +Save the file and exit the editor. Run `sudo systemctl daemon-reload` and verify the new value got picked up by executing `sudo systemctl show --property=ExecStart --no-pager openmtc-gateway `. + +The procedure above is identical for the `openmtc-backend` component, safe for replacing `gateway` with `backend`. + +## Controlling the Services +Both, gateway and backend, can be started through `sudo systemctl start openmtc-gateway` and `sudo systemctl start openmtc-backend`, respectively. They can also be started in bulk via `sudo systemctl start openmtc-gateway openmtc-backend`. Execution order is not guaranteed, though. + +If you always require the two services, it may be a good idea to add a dependency from the gateway to the backend as follows: +```sh +$ sudo systemctl add-requires openmtc-gateway.service openmtc-backend +``` +> **⚠️ Caveat:** Please take note that the target unit name is fully specified. This is needed as systemd would otherwise assume you wanted to add a requirement to a *target* + +Verify the dependency has been set in place by running `sudo systemctl list-dependencies openmtc-gateway` and checking if `openmtc-gateway.service` is listed. Starting the `openmtc-gateway` service will now also invoke `openmtc-backend`. Another benefit is that `openmtc-gateway` will be reloaded if `openmtc-backend` is. + +Services can be shut down through `sudo systemctl stop` in the same fashion as `systemctl start`. + +## Monitoring +While `systemctl status` may reveal some log lines, more telling output generated by the services can be monitored through [`journalctl`](https://www.freedesktop.org/software/systemd/man/journalctl.html). `sudo journalctl -lf --unit openmtc-\*` should present a life feed of all running openmtc services. + +> **ℹ️ Hint:** The road is a bit rough for those who run OpenMTC via systemd from their homedir, unfortunately. In theory, you should be good to go with `journalctl -lf --user --unit openmtc-\*`. But due to a lot of diversity in deployed systemd versions potentised by a wealth of different configurations, this may not do the trick. In that case, - under the assumption only a single instance of each, gateway and backend is running - the fastest way to success may be this: + ```sh + $ sudo journalctl -lf _PID=$(ps ux | grep gateway_main.py | head -n1 | awk '{ print $2; }') + \ + _PID=$(ps ux | grep backend_main.py | head -n1 | awk '{ print $2; }') + ``` \ No newline at end of file diff --git a/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-01.py b/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-01.py new file mode 100644 index 0000000..061dc1e --- /dev/null +++ b/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-01.py @@ -0,0 +1,44 @@ +# notes: +# - this file is equivalent to onem2m-gui-sensors-final.py +# - actuator support will be added in the following files + + +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + # for each device container discovered + for uri in discovery: + # subscribe to device container with handler function + print('Subscribing to Resource: %s' % uri) + self.add_container_subscription(uri, self.handle_measurements) + + def handle_measurements(self, container, data): + # this function handles the new data from subscribed measurements containers + print('handle measurements..') + print('container: %s' % container) + print('data: %s' % data) + print('') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI( + poas=['http://localhost:21345'] # adds poas in order to receive notifications + ) + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-02.py b/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-02.py new file mode 100644 index 0000000..f882dc4 --- /dev/null +++ b/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-02.py @@ -0,0 +1,56 @@ +# notes: +# - adds variable self.actuators +# - adds periodic discovery of 'commands' containers + + +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # init variables + self.actuators = [] + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + # start periodic discovery of 'commands' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['commands']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_commands # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + # for each device container discovered + for uri in discovery: + # subscribe to device container with handler function + print('Subscribing to Resource: %s' % uri) + self.add_container_subscription(uri, self.handle_measurements) + + def handle_discovery_commands(self, discovery): + pass + + def handle_measurements(self, container, data): + # this function handles the new data from subscribed measurements containers + print('handle measurements..') + print('container: %s' % container) + print('data: %s' % data) + print('') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI( + poas=['http://localhost:21345'] # adds poas in order to receive notifications + ) + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-03.py b/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-03.py new file mode 100644 index 0000000..1ed07b2 --- /dev/null +++ b/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-03.py @@ -0,0 +1,60 @@ +# notes: +# - extends the commands discovery handler to append new commands containers to the actuators list + + +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # init variables + self.actuators = [] + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + # start periodic discovery of 'commands' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['commands']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_commands # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + # for each device container discovered + for uri in discovery: + # subscribe to device container with handler function + print('Subscribing to Resource: %s' % uri) + self.add_container_subscription(uri, self.handle_measurements) + + def handle_discovery_commands(self, discovery): + # for every 'commands' container discovered + for uri in discovery: + print('discovered commands container: %s' % uri) + # add discovered commands container to known actuators list + self.actuators.append(uri) + print('') + + def handle_measurements(self, container, data): + # this function handles the new data from subscribed measurements containers + print('handle measurements..') + print('container: %s' % container) + print('data: %s' % data) + print('') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI( + poas=['http://localhost:21345'] # adds poas in order to receive notifications + ) + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-04.py b/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-04.py new file mode 100644 index 0000000..99a4a0b --- /dev/null +++ b/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-04.py @@ -0,0 +1,80 @@ +# notes: +# - extended the handle_measurements() function to implement some simple logic for actuator control: +# - 1. open or close windows based on humidity measurements +# - 2. actuate the Air-Conditioning based on temperature measurements + + +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # init variables + self.actuators = [] + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + # start periodic discovery of 'commands' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['commands']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_commands # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + # for each device container discovered + for uri in discovery: + # subscribe to device container with handler function + print('Subscribing to Resource: %s' % uri) + self.add_container_subscription(uri, self.handle_measurements) + + def handle_discovery_commands(self, discovery): + # for every 'commands' container discovered + for uri in discovery: + print('discovered commands container: %s' % uri) + # add discovered commands container to known actuators list + self.actuators.append(uri) + print('') + + def handle_measurements(self, container, data): + print('handle_measurements...') + print('container: %s' % container) + print('data: %s' % data) + # extract information from data set + value = data['value'] + type_ = data['type'] + # simple logic to control the AirCon + if type_ == 'temperature': + if value >= 22: + data = {'Power': 'ON'} + print('Temperature = %s >= 22. Turning AirConditioning ON' % value) + else: + data = {'Power': 'OFF'} + print('Temperature = %s < 22. Turning AirConditioning OFF' % value) + # simple logic to control the Windows + elif type_ == 'humidity': + if value >= 65: + data = {'State': 'OPEN'} + print('Humidity = %s >= 65. OPEN Window' % value) + else: + data = {'State': 'CLOSE'} + print('Humidity = %s < 65. CLOSE Window' % value) + print('') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI( + poas=['http://localhost:21345'] # adds poas in order to receive notifications + ) + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-05.py b/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-05.py new file mode 100644 index 0000000..c6e79d5 --- /dev/null +++ b/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-05.py @@ -0,0 +1,86 @@ +# notes: +# - extend the handle_measurements() function to push new commands based on measurements to the actuators + + +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # init variables + self.actuators = [] + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + # start periodic discovery of 'commands' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['commands']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_commands # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + # for each device container discovered + for uri in discovery: + # subscribe to device container with handler function + print('Subscribing to Resource: %s' % uri) + self.add_container_subscription(uri, self.handle_measurements) + + def handle_discovery_commands(self, discovery): + # for every 'commands' container discovered + for uri in discovery: + print('discovered commands container: %s' % uri) + # add discovered commands container to known actuators list + self.actuators.append(uri) + print('') + + def handle_measurements(self, container, data): + print('handle_measurements...') + print('container: %s' % container) + print('data: %s' % data) + # extract information from data set + value = data['value'] + type_ = data['type'] + # simple logic to control the AirCon + if type_ == 'temperature': + if value >= 22: + data = {'Power': 'ON'} + print('Temperature = %s >= 22. Turning AirConditioning ON' % value) + else: + data = {'Power': 'OFF'} + print('Temperature = %s < 22. Turning AirConditioning OFF' % value) + # push the new command based on temperature measurements to all known AirCon actuators + for actuator in self.actuators: + if 'AirCon' in actuator: + self.push_content(actuator, data) + # simple logic to control the Windows + elif type_ == 'humidity': + if value >= 65: + data = {'State': 'OPEN'} + print('Humidity = %s >= 65. OPEN Window' % value) + else: + data = {'State': 'CLOSE'} + print('Humidity = %s < 65. CLOSE Window' % value) + # push the new command based on humidity measurements to all known Window actuators + for actuator in self.actuators: + if 'Window' in actuator: + self.push_content(actuator, data) + print('') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI( + poas=['http://localhost:21345'] # adds poas in order to receive notifications + ) + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-final.py b/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-final.py new file mode 100644 index 0000000..f6e0323 --- /dev/null +++ b/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-final.py @@ -0,0 +1,83 @@ +# notes: +# - complete example + + +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # init variables + self.actuators = [] + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + # start periodic discovery of 'commands' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['commands']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_commands # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + # for each device container discovered + for uri in discovery: + # subscribe to device container with handler function + print('Subscribing to Resource: %s' % uri) + self.add_container_subscription(uri, self.handle_measurements) + + def handle_discovery_commands(self, discovery): + # for every 'commands' container discovered + for uri in discovery: + print('discovered commands container: %s' % uri) + # add discovered commands container to known actuators list + self.actuators.append(uri) + print('') + + def handle_measurements(self, container, data): + print('handle_measurements...') + print('container: %s' % container) + print('data: %s' % data) + # extract information from data set + value = data['value'] + type_ = data['type'] + # simple logic to control the AirCon + if type_ == 'temperature': + if value >= 22: + data = {'Power': 'ON'} + print('Temperature = %s >= 22. Turning AirConditioning ON' % value) + else: + data = {'Power': 'OFF'} + print('Temperature = %s < 22. Turning AirConditioning OFF' % value) + # simple logic to control the Windows + elif type_ == 'humidity': + if value >= 65: + data = {'State': 'OPEN'} + print('Humidity = %s >= 65. OPEN Window' % value) + else: + data = {'State': 'CLOSE'} + print('Humidity = %s < 65. CLOSE Window' % value) + print('') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI( + poas=['http://localhost:21345'], # adds poas in order to receive notifications + # SSL options + originator_pre='//openmtc.org/in-cse-1', # originator_pre, needs to match value in cert + ca_certs='../../openmtc-gevent/certs/ca-chain.cert.pem', + cert_file='certs/test-gui.cert.pem', # cert file, pre-shipped and should match name + key_file='certs/test-gui.key.pem' + ) + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-01.py b/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-01.py new file mode 100644 index 0000000..d6da511 --- /dev/null +++ b/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-01.py @@ -0,0 +1,36 @@ +# notes: +# - initial app base structure +# - starts periodic discovery on registration +# - the discovery result is printed as a whole +# - this will discover EVERY new container + + +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # start periodic discovery of EVERY container + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + None, # no filter criteria + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery # callback function to return the result of the discovery to + ) + + def handle_discovery(self, discovery): + # print the discovery + print('New discovery:') + print(discovery) + print(' ') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI() + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-02.py b/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-02.py new file mode 100644 index 0000000..a976720 --- /dev/null +++ b/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-02.py @@ -0,0 +1,37 @@ +# notes: +# - adds filter criteria, to specify what to discover +# - detailed print of every uri from the discovery +# - this will only discover new container with the specific label 'measurements' +# - renamed function handle_discovery() to handle_discovery_measurements() + + +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + print('New discovery:') + # for each device container discovered + for uri in discovery: + # print content of discovery + print('uri from discovery: %s' % uri) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI() + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-03.py b/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-03.py new file mode 100644 index 0000000..b992529 --- /dev/null +++ b/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-03.py @@ -0,0 +1,46 @@ +# notes: +# - adds subscription to discovered containers via returned uri +# - adds content handler for subscribed containers +# - this will only discover and subscribe to new containers with the specific label +# - whenever a child is created in the subscribed containers, the content handler is called + + +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + # for each device container discovered + for uri in discovery: + # subscribe to device container with handler function + print('Subscribing to Resource: %s' % uri) + self.add_container_subscription(uri, self.handle_measurements) + + def handle_measurements(self, container, data): + # this function handles the new data from subscribed measurements containers + print('handle measurements..') + print('container: %s' % container) + print('data: %s' % data) + print('') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI( + poas=['http://localhost:21345'] # adds poas in order to receive notifications + ) + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-final.py b/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-final.py new file mode 100644 index 0000000..50a38ec --- /dev/null +++ b/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-final.py @@ -0,0 +1,48 @@ +# notes: +# - complete example + + +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + # for each device container discovered + for uri in discovery: + # subscribe to device container with handler function + print('Subscribing to Resource: %s' % uri) + self.add_container_subscription(uri, self.handle_measurements) + + def handle_measurements(self, container, data): + # this function handles the new data from subscribed measurements containers + print('handle measurements..') + print('container: %s' % container) + print('data: %s' % data) + print('') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI( + poas=['http://localhost:21345'], # adds poas in order to receive notifications + # SSL options + originator_pre='//openmtc.org/in-cse-1', # originator_pre, needs to match value in cert + ca_certs='../../openmtc-gevent/certs/ca-chain.cert.pem', + cert_file='certs/test-gui.cert.pem', # cert file, pre-shipped and should match name + key_file='certs/test-gui.key.pem' + ) + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-01.py b/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-01.py new file mode 100644 index 0000000..4b163df --- /dev/null +++ b/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-01.py @@ -0,0 +1,141 @@ +# notes: +# - this file is equivalent to onem2m-ipe-sensors-final.py +# - onem2m-ipe-sensors-final.py will be completed from this point for actuator support +# - added variable self.actuators and self._command_containers + + +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # available actuators + actuators = [ + 'Switch-AirCon', # AirConditioning + 'Switch-Window' # Window + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + self._recognized_measurement_containers = {} + self._command_containers = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('initializing sensor: %s' % sensor) + + # create sensor container + device_container = Container(resourceName=sensor) + device_container = self.create_container(self._devices_container.path, + device_container, + labels=['sensor'], + max_nr_of_instances=0) + + # add sensor to _recognized_sensors + self._recognized_sensors[sensor] = device_container + + # create measurements container + labels = ['measurements'] + if sensor.startswith('Temp'): + labels.append('temperature') + else: + labels.append('humidity') + measurements_container = Container(resourceName='measurements') + measurements_container = self.create_container(device_container.path, + measurements_container, + labels=labels, + max_nr_of_instances=3) + + # add measurements_container from sensor to _recognized_measurement_containers + self._recognized_measurement_containers[sensor] = measurements_container + + def push_sensor_data(self, sensor, value): + + # build data set with value and metadata + if sensor.startswith('Temp'): + data = { + 'value': value, + 'type': 'temperature', + 'unit': 'degreeC' + } + else: + data = { + 'value': value, + 'type': 'humidity', + 'unit': 'percentage' + } + + # print the new data set + print ('%s: %s' % (sensor, data)) + + # finally, push the data set to measurements_container of the sensor + self.push_content(self._recognized_measurement_containers[sensor], data) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-02.py b/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-02.py new file mode 100644 index 0000000..ad51aaa --- /dev/null +++ b/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-02.py @@ -0,0 +1,149 @@ +# notes: +# - adds the creation of a container for each actuator of this ipe inside the devices_container + + +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # available actuators + actuators = [ + 'Switch-AirCon', # AirConditioning + 'Switch-Window' # Window + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + self._recognized_measurement_containers = {} + self._command_containers = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # create container for each actuator + for actuator in self.actuators: + actuator_container = Container(resourceName=actuator) + self.create_container( + self._devices_container.path, # the target resource/path parenting the Container + actuator_container, # the Container resource or a valid container ID + max_nr_of_instances=0, # the container's max_nr_of_instances (here: 0=unlimited) + labels=['actuator'] # (optional) the container's labels + ) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('initializing sensor: %s' % sensor) + + # create sensor container + device_container = Container(resourceName=sensor) + device_container = self.create_container(self._devices_container.path, + device_container, + labels=['sensor'], + max_nr_of_instances=0) + + # add sensor to _recognized_sensors + self._recognized_sensors[sensor] = device_container + + # create measurements container + labels = ['measurements'] + if sensor.startswith('Temp'): + labels.append('temperature') + else: + labels.append('humidity') + measurements_container = Container(resourceName='measurements') + measurements_container = self.create_container(device_container.path, + measurements_container, + labels=labels, + max_nr_of_instances=3) + + # add measurements_container from sensor to _recognized_measurement_containers + self._recognized_measurement_containers[sensor] = measurements_container + + def push_sensor_data(self, sensor, value): + + # build data set with value and metadata + if sensor.startswith('Temp'): + data = { + 'value': value, + 'type': 'temperature', + 'unit': 'degreeC' + } + else: + data = { + 'value': value, + 'type': 'humidity', + 'unit': 'percentage' + } + + # print the new data set + print ('%s: %s' % (sensor, data)) + + # finally, push the data set to measurements_container of the sensor + self.push_content(self._recognized_measurement_containers[sensor], data) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-03.py b/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-03.py new file mode 100644 index 0000000..e70563f --- /dev/null +++ b/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-03.py @@ -0,0 +1,160 @@ +# notes: +# - adds the creation of a commands_container inside each actuator_container +# - adds this new commands_container to the self._command_containers variable + + +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # available actuators + actuators = [ + 'Switch-AirCon', # AirConditioning + 'Switch-Window' # Window + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + self._recognized_measurement_containers = {} + self._command_containers = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # create container for each actuator + for actuator in self.actuators: + actuator_container = Container(resourceName=actuator) + self.create_container( + self._devices_container.path, # the target resource/path parenting the Container + actuator_container, # the Container resource or a valid container ID + max_nr_of_instances=0, # the container's max_nr_of_instances (here: 0=unlimited) + labels=['actuator'] # (optional) the container's labels + ) + # create container for the commands of the actuators + commands_container = Container(resourceName='commands') + commands_container = self.create_container( + actuator_container.path, + commands_container, + max_nr_of_instances=3, + labels=['commands'] + ) + # add commands_container of current actuator to self._command_containers + self._command_containers[actuator] = commands_container + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('initializing sensor: %s' % sensor) + + # create sensor container + device_container = Container(resourceName=sensor) + device_container = self.create_container(self._devices_container.path, + device_container, + labels=['sensor'], + max_nr_of_instances=0) + + # add sensor to _recognized_sensors + self._recognized_sensors[sensor] = device_container + + # create measurements container + labels = ['measurements'] + if sensor.startswith('Temp'): + labels.append('temperature') + else: + labels.append('humidity') + measurements_container = Container(resourceName='measurements') + measurements_container = self.create_container(device_container.path, + measurements_container, + labels=labels, + max_nr_of_instances=3) + + # add measurements_container from sensor to _recognized_measurement_containers + self._recognized_measurement_containers[sensor] = measurements_container + + def push_sensor_data(self, sensor, value): + + # build data set with value and metadata + if sensor.startswith('Temp'): + data = { + 'value': value, + 'type': 'temperature', + 'unit': 'degreeC' + } + else: + data = { + 'value': value, + 'type': 'humidity', + 'unit': 'percentage' + } + + # print the new data set + print ('%s: %s' % (sensor, data)) + + # finally, push the data set to measurements_container of the sensor + self.push_content(self._recognized_measurement_containers[sensor], data) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-04.py b/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-04.py new file mode 100644 index 0000000..af60e5a --- /dev/null +++ b/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-04.py @@ -0,0 +1,173 @@ +# notes: +# - adds the subscription of each commands_container to the handler handle_command() +# - adds the handler function handle_command() + + +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # available actuators + actuators = [ + 'Switch-AirCon', # AirConditioning + 'Switch-Window' # Window + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + self._recognized_measurement_containers = {} + self._command_containers = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # create container for each actuator + for actuator in self.actuators: + actuator_container = Container(resourceName=actuator) + self.create_container( + self._devices_container.path, # the target resource/path parenting the Container + actuator_container, # the Container resource or a valid container ID + max_nr_of_instances=0, # the container's max_nr_of_instances (here: 0=unlimited) + labels=['actuator'] # (optional) the container's labels + ) + # create container for the commands of the actuators + commands_container = Container(resourceName='commands') + commands_container = self.create_container( + actuator_container.path, + commands_container, + max_nr_of_instances=3, + labels=['commands'] + ) + # add commands_container of current actuator to self._command_containers + self._command_containers[actuator] = commands_container + # subscribe to command container of each actuator to the handler command + self.add_container_subscription( + commands_container.path, # the Container or it's path to be subscribed + self.handle_command # reference of the notification handling function + ) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def handle_command(self, container, value): + print('handle_command...') + print('container: %s' % container) + print('value: %s' % value) + print('') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('initializing sensor: %s' % sensor) + + # create sensor container + device_container = Container(resourceName=sensor) + device_container = self.create_container(self._devices_container.path, + device_container, + labels=['sensor'], + max_nr_of_instances=0) + + # add sensor to _recognized_sensors + self._recognized_sensors[sensor] = device_container + + # create measurements container + labels = ['measurements'] + if sensor.startswith('Temp'): + labels.append('temperature') + else: + labels.append('humidity') + measurements_container = Container(resourceName='measurements') + measurements_container = self.create_container(device_container.path, + measurements_container, + labels=labels, + max_nr_of_instances=3) + + # add measurements_container from sensor to _recognized_measurement_containers + self._recognized_measurement_containers[sensor] = measurements_container + + def push_sensor_data(self, sensor, value): + + # build data set with value and metadata + if sensor.startswith('Temp'): + data = { + 'value': value, + 'type': 'temperature', + 'unit': 'degreeC' + } + else: + data = { + 'value': value, + 'type': 'humidity', + 'unit': 'percentage' + } + + # print the new data set + print ('%s: %s' % (sensor, data)) + + # finally, push the data set to measurements_container of the sensor + self.push_content(self._recognized_measurement_containers[sensor], data) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE( + poas=['http://localhost:21346'], # adds poas in order to receive notifications + ) + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-final.py b/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-final.py new file mode 100644 index 0000000..e08aae6 --- /dev/null +++ b/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-final.py @@ -0,0 +1,177 @@ +# notes: +# - complete example + + +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # available actuators + actuators = [ + 'Switch-AirCon', # AirConditioning + 'Switch-Window' # Window + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + self._recognized_measurement_containers = {} + self._command_containers = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # create container for each actuator + for actuator in self.actuators: + actuator_container = Container(resourceName=actuator) + self.create_container( + self._devices_container.path, # the target resource/path parenting the Container + actuator_container, # the Container resource or a valid container ID + max_nr_of_instances=0, # the container's max_nr_of_instances (here: 0=unlimited) + labels=['actuator'] # (optional) the container's labels + ) + # create container for the commands of the actuators + commands_container = Container(resourceName='commands') + commands_container = self.create_container( + actuator_container.path, + commands_container, + max_nr_of_instances=3, + labels=['commands'] + ) + # add commands_container of current actuator to self._command_containers + self._command_containers[actuator] = commands_container + # subscribe to command container of each actuator to the handler command + self.add_container_subscription( + commands_container.path, # the Container or it's path to be subscribed + self.handle_command # reference of the notification handling function + ) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def handle_command(self, container, value): + print('handle_command...') + print('container: %s' % container) + print('value: %s' % value) + print('') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('initializing sensor: %s' % sensor) + + # create sensor container + device_container = Container(resourceName=sensor) + device_container = self.create_container(self._devices_container.path, + device_container, + labels=['sensor'], + max_nr_of_instances=0) + + # add sensor to _recognized_sensors + self._recognized_sensors[sensor] = device_container + + # create measurements container + labels = ['measurements'] + if sensor.startswith('Temp'): + labels.append('temperature') + else: + labels.append('humidity') + measurements_container = Container(resourceName='measurements') + measurements_container = self.create_container(device_container.path, + measurements_container, + labels=labels, + max_nr_of_instances=3) + + # add measurements_container from sensor to _recognized_measurement_containers + self._recognized_measurement_containers[sensor] = measurements_container + + def push_sensor_data(self, sensor, value): + + # build data set with value and metadata + if sensor.startswith('Temp'): + data = { + 'value': value, + 'type': 'temperature', + 'unit': 'degreeC' + } + else: + data = { + 'value': value, + 'type': 'humidity', + 'unit': 'percentage' + } + + # print the new data set + print ('%s: %s' % (sensor, data)) + + # finally, push the data set to measurements_container of the sensor + self.push_content(self._recognized_measurement_containers[sensor], data) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE( + poas=['http://localhost:21346'], # adds poas in order to receive notifications + # SSL options + originator_pre='//openmtc.org/mn-cse-1', # originator_pre, needs to match value in cert + ca_certs='../../openmtc-gevent/certs/ca-chain.cert.pem', + cert_file='certs/test-ipe.cert.pem', # cert file, pre-shipped and should match name + key_file='certs/test-ipe.key.pem' + ) + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-01.py b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-01.py new file mode 100644 index 0000000..42deb9d --- /dev/null +++ b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-01.py @@ -0,0 +1,21 @@ +# notes: +# - added base structure + + +from openmtc_app.onem2m import XAE + + +class TestIPE(XAE): + remove_registration = True + + def _on_register(self): + # log message + self.logger.debug('registered') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-02.py b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-02.py new file mode 100644 index 0000000..d6a9816 --- /dev/null +++ b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-02.py @@ -0,0 +1,41 @@ +# notes: +# - adds creation of a container for devices +# - introduces function for random sensor data generation +# - introduces endless loop + + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + def _on_register(self): + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # create some random data for a random sensor + self.get_random_data() + + # log message + self.logger.debug('registered') + + # start endless loop + self.run_forever() + + def get_random_data(self): + pass + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-03.py b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-03.py new file mode 100644 index 0000000..2c60c9f --- /dev/null +++ b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-03.py @@ -0,0 +1,44 @@ +# notes: +# - adds random +# - spawns run_forever with get_random_data function every one second +# - prints some random value operations + + +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + def _on_register(self): + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + print('---------') + random_value = random() + print(random_value) + print(random_value * 10) + print(int(random_value * 10)) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-04.py b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-04.py new file mode 100644 index 0000000..0729653 --- /dev/null +++ b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-04.py @@ -0,0 +1,71 @@ +# notes: +# - introducing list of sensors to create +# - introducing settings for random sensor data generation +# - adding code for random time intervals +# - adding code for random sensor selection +# - adding code for random sensor data generation + + +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # settings for random sensor data generation + threshold = 0.5 + value_range = 25 + value_offset = 10 + + def _on_register(self): + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + print('') + + # for random time intervals + if random() > self.threshold: + print('got some data') + + # select a random sensor + print('available sensors: %s' % self.sensors) + print('number of available sensors: %s' % len(self.sensors)) + print('some random sensor: %s' % self.sensors[int(random() * len(self.sensors))]) + + # generate random sensor data + print('random sensor data: %s' % int(random() * self.value_range + self.value_offset)) + + else: + print('no data') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-05.py b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-05.py new file mode 100644 index 0000000..7b35d23 --- /dev/null +++ b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-05.py @@ -0,0 +1,93 @@ +# notes: +# - adds different range and offset for temperature and humidity value generation +# - introducing self._recognized_sensors variable +# - completing get_random_data() function +# - introducing handle_sensor_data() function +# - introducing create_sensor_structure() function +# - introducing push_sensor_data() function + + +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('I need to create a structure for the sensor %s.' % sensor) + self._recognized_sensors[sensor] = 'something useful' + + def push_sensor_data(self, sensor, value): + print('I would push the content %i of %s to the gateway.' % (value, sensor)) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-06.py b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-06.py new file mode 100644 index 0000000..c5ba32a --- /dev/null +++ b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-06.py @@ -0,0 +1,117 @@ +# notes: + +# - added create sensor container to function create_sensor_structure() +# - add sensor to _recognized_sensors +# - build data set with value and metadata +# - printing out the new data set + + +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('initializing sensor: %s' % sensor) + + # create sensor container + device_container = Container(resourceName=sensor) + device_container = self.create_container(self._devices_container.path, + device_container, + labels=['sensor'], + max_nr_of_instances=0) + + # add sensor to _recognized_sensors + self._recognized_sensors[sensor] = device_container + + def push_sensor_data(self, sensor, value): + + # build data set with value and metadata + if sensor.startswith('Temp'): + data = { + 'value': value, + 'type': 'temperature', + 'unit': 'degreeC' + } + else: + data = { + 'value': value, + 'type': 'humidity', + 'unit': 'percentage' + } + + # print the new data set + print('%s: %s' % (sensor, data)) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-07.py b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-07.py new file mode 100644 index 0000000..b933b72 --- /dev/null +++ b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-07.py @@ -0,0 +1,134 @@ +# notes: +# - introduced self._measurement_containers variable +# - added creation of measurements container in function create_sensor_structure() +# - added push of data to measurements_container of the sensor in function push_sensor_data() + + +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + self._recognized_measurement_containers = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('initializing sensor: %s' % sensor) + + # create sensor container + device_container = Container(resourceName=sensor) + device_container = self.create_container(self._devices_container.path, + device_container, + labels=['sensor'], + max_nr_of_instances=0) + + # add sensor to _recognized_sensors + self._recognized_sensors[sensor] = device_container + + # create measurements container + labels = ['measurements'] + if sensor.startswith('Temp'): + labels.append('temperature') + else: + labels.append('humidity') + measurements_container = Container(resourceName='measurements') + measurements_container = self.create_container(device_container.path, + measurements_container, + labels=labels, + max_nr_of_instances=3) + + # add measurements_container from sensor to _recognized_measurement_containers + self._recognized_measurement_containers[sensor] = measurements_container + + def push_sensor_data(self, sensor, value): + + # build data set with value and metadata + if sensor.startswith('Temp'): + data = { + 'value': value, + 'type': 'temperature', + 'unit': 'degreeC' + } + else: + data = { + 'value': value, + 'type': 'humidity', + 'unit': 'percentage' + } + + # print the new data set + print ('%s: %s' % (sensor, data)) + + # finally, push the data set to measurements_container of the sensor + self.push_content(self._recognized_measurement_containers[sensor], data) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) diff --git a/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-final.py b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-final.py new file mode 100644 index 0000000..3bbbcf0 --- /dev/null +++ b/doc/training/apps/onem2m/ipe/sensors/onem2m-ipe-sensors-final.py @@ -0,0 +1,138 @@ +# notes: +# - complete example + + +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + self._recognized_measurement_containers = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('initializing sensor: %s' % sensor) + + # create sensor container + device_container = Container(resourceName=sensor) + device_container = self.create_container(self._devices_container.path, + device_container, + labels=['sensor'], + max_nr_of_instances=0) + + # add sensor to _recognized_sensors + self._recognized_sensors[sensor] = device_container + + # create measurements container + labels = ['measurements'] + if sensor.startswith('Temp'): + labels.append('temperature') + else: + labels.append('humidity') + measurements_container = Container(resourceName='measurements') + measurements_container = self.create_container(device_container.path, + measurements_container, + labels=labels, + max_nr_of_instances=3) + + # add measurements_container from sensor to _recognized_measurement_containers + self._recognized_measurement_containers[sensor] = measurements_container + + def push_sensor_data(self, sensor, value): + + # build data set with value and metadata + if sensor.startswith('Temp'): + data = { + 'value': value, + 'type': 'temperature', + 'unit': 'degreeC' + } + else: + data = { + 'value': value, + 'type': 'humidity', + 'unit': 'percentage' + } + + # print the new data set + print ("%s: %s" % (sensor, data)) + + # finally, push the data set to measurements_container of the sensor + self.push_content(self._recognized_measurement_containers[sensor], data) + + +if __name__ == "__main__": + from openmtc_app.runner import AppRunner as Runner + + host = "http://localhost:8000" + app = TestIPE( + # SSL options + originator_pre='//openmtc.org/mn-cse-1', # originator_pre, needs to match value in cert + ca_certs='../../openmtc-gevent/certs/ca-chain.cert.pem', + cert_file='certs/test-ipe.cert.pem', # cert file, pre-shipped and should match name + key_file='certs/test-ipe.key.pem' + ) + Runner(app).run(host) diff --git a/doc/training/certs/test-gui.cert.pem b/doc/training/certs/test-gui.cert.pem new file mode 100644 index 0000000..d2d2642 --- /dev/null +++ b/doc/training/certs/test-gui.cert.pem @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE----- +MIICsjCCAligAwIBAgICEBcwCgYIKoZIzj0EAwIwWTELMAkGA1UEBhMCREUxDzAN +BgNVBAgMBkJlcmxpbjESMBAGA1UECgwJVFUgQmVybGluMQswCQYDVQQLDAJBVjEY +MBYGA1UEAwwPSW50ZXJtZWRpYXRlLUNBMB4XDTE3MDcyNDE5MjY1MFoXDTE4MDcy +NDE5MjY1MFowbDELMAkGA1UEBhMCREUxDzANBgNVBAgMBkJlcmxpbjEPMA0GA1UE +BwwGQmVybGluMRkwFwYDVQQKDBBGT0tVUyBGcmF1bmhvZmVyMQ0wCwYDVQQLDARO +R05JMREwDwYDVQQDDAh0ZXN0LWd1aTBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IA +BOaMXolo4A0MnNtwx4giyIjfW5Yy+3nc/BLm68fjwXGsOUM/EVq/SppEAo4Iaon0 +PDG9UFIX3QfOfXXb8tI+oomjgfwwgfkwCQYDVR0TBAIwADARBglghkgBhvhCAQEE +BAMCBsAwMwYJYIZIAYb4QgENBCYWJE9wZW5TU0wgR2VuZXJhdGVkIENsaWVudCBD +ZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUhlMnogZpn43sYj/q/yAa5MJ93C0wHwYDVR0j +BBgwFoAUp+8brctf+zSV/bHdy5Arf1WxokIwDgYDVR0PAQH/BAQDAgXgMB0GA1Ud +JQQWMBQGCCsGAQUFBwMCBggrBgEFBQcDATA1BgNVHREELjAshh8vL29wZW5tdGMu +b3JnL2luLWNzZS0xL0NUZXN0R1VJgglsb2NhbGhvc3QwCgYIKoZIzj0EAwIDSAAw +RQIgFnO68dODPkbmgIl9A4usB4bA07ZJt9avtTt+VeBGgo4CIQDcW6P3J2ydwBqh +4RYgPWVzslv4s7JaTVd8muHoC85FSQ== +-----END CERTIFICATE----- diff --git a/doc/training/certs/test-gui.key.pem b/doc/training/certs/test-gui.key.pem new file mode 100644 index 0000000..7d76d17 --- /dev/null +++ b/doc/training/certs/test-gui.key.pem @@ -0,0 +1,8 @@ +-----BEGIN EC PARAMETERS----- +BggqhkjOPQMBBw== +-----END EC PARAMETERS----- +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIKcLCVeonOtqjZcvTTRhN/FhYsTHCblXJho1JlkRV3lHoAoGCCqGSM49 +AwEHoUQDQgAE5oxeiWjgDQyc23DHiCLIiN9bljL7edz8Eubrx+PBcaw5Qz8RWr9K +mkQCjghqifQ8Mb1QUhfdB859ddvy0j6iiQ== +-----END EC PRIVATE KEY----- diff --git a/doc/training/certs/test-ipe.cert.pem b/doc/training/certs/test-ipe.cert.pem new file mode 100644 index 0000000..1e66f2b --- /dev/null +++ b/doc/training/certs/test-ipe.cert.pem @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE----- +MIICsTCCAligAwIBAgICEBYwCgYIKoZIzj0EAwIwWTELMAkGA1UEBhMCREUxDzAN +BgNVBAgMBkJlcmxpbjESMBAGA1UECgwJVFUgQmVybGluMQswCQYDVQQLDAJBVjEY +MBYGA1UEAwwPSW50ZXJtZWRpYXRlLUNBMB4XDTE3MDcyNDE5MjUxN1oXDTE4MDcy +NDE5MjUxN1owbDELMAkGA1UEBhMCREUxDzANBgNVBAgMBkJlcmxpbjEPMA0GA1UE +BwwGQmVybGluMRkwFwYDVQQKDBBGT0tVUyBGcmF1bmhvZmVyMQ0wCwYDVQQLDARO +R05JMREwDwYDVQQDDAh0ZXN0LWlwZTBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IA +BEur+tDxKs1Q9QiSfAr6jufibRNATBArcrIFyJ+EuVPI74DEu0yTPSbj3bc6MF22 +/LTMcWYQ+HDsThVzWF8JemmjgfwwgfkwCQYDVR0TBAIwADARBglghkgBhvhCAQEE +BAMCBsAwMwYJYIZIAYb4QgENBCYWJE9wZW5TU0wgR2VuZXJhdGVkIENsaWVudCBD +ZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUdS+z+TYkMkM8PcFDgUwDCP7ZyAcwHwYDVR0j +BBgwFoAUp+8brctf+zSV/bHdy5Arf1WxokIwDgYDVR0PAQH/BAQDAgXgMB0GA1Ud +JQQWMBQGCCsGAQUFBwMCBggrBgEFBQcDATA1BgNVHREELjAshh8vL29wZW5tdGMu +b3JnL21uLWNzZS0xL0NUZXN0SVBFgglsb2NhbGhvc3QwCgYIKoZIzj0EAwIDRwAw +RAIgV+6CNmpFq1J8pHdYdVZCqhdhWiLNUkUmhlctlLR8JMICIHTmPsOE8wP2IDIt +DcpsJM8Rh9k9LGNFJfND56d3YMZA +-----END CERTIFICATE----- diff --git a/doc/training/certs/test-ipe.key.pem b/doc/training/certs/test-ipe.key.pem new file mode 100644 index 0000000..a8487bb --- /dev/null +++ b/doc/training/certs/test-ipe.key.pem @@ -0,0 +1,8 @@ +-----BEGIN EC PARAMETERS----- +BggqhkjOPQMBBw== +-----END EC PARAMETERS----- +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIKmAV8TSduc5De7lIjJFNio5UqzARdJJ12aHhOQzOcfYoAoGCCqGSM49 +AwEHoUQDQgAES6v60PEqzVD1CJJ8CvqO5+JtE0BMECtysgXIn4S5U8jvgMS7TJM9 +JuPdtzowXbb8tMxxZhD4cOxOFXNYXwl6aQ== +-----END EC PRIVATE KEY----- diff --git a/doc/training/console-outputs/training-sensor-actuator-backend.md b/doc/training/console-outputs/training-sensor-actuator-backend.md new file mode 100644 index 0000000..2fe8b54 --- /dev/null +++ b/doc/training/console-outputs/training-sensor-actuator-backend.md @@ -0,0 +1,65 @@ +```sh +user@host:/git$ ./openmtc-open-source/openmtc-gevent/run-backend +PYTHONPATH: :/git/openmtc-open-source/openmtc-gevent/../futile/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/lib:/git/openmtc-open-source/openmtc-gevent/../common/openmtc-onem2m/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/src:/git/openmtc-open-source/openmtc-gevent/../serializers/*/src:/git/openmtc-open-source/openmtc-gevent/../openmtc-app/src +PYTHONPATH: :/git/openmtc-open-source/openmtc-gevent/../futile/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/lib:/git/openmtc-open-source/openmtc-gevent/../common/openmtc-onem2m/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/src:/git/openmtc-open-source/openmtc-gevent/../serializers/*/src:/git/openmtc-open-source/openmtc-gevent/../openmtc-app/src:/git/openmtc-open-source/openmtc-gevent/src:/git/openmtc-open-source/openmtc-gevent/../server/openmtc-cse/src:/git/openmtc-open-source/openmtc-gevent/../server/openmtc-server/src +DEBUG:openmtc_gevent.main:Reading config file: config-backend.json +::1 - - [2017-07-27 13:56:51] "POST /~/in-cse-1/onem2m HTTP/1.1" 201 488 0.003584 +::1 - - [2017-07-27 13:56:51] "GET /~/in-cse-1/onem2m HTTP/1.1" 200 515 0.000941 +::1 - - [2017-07-27 13:57:04] "POST /onem2m HTTP/1.1" 201 470 0.001442 +::1 - - [2017-07-27 13:57:04] "GET /onem2m/TestGUI HTTP/1.1" 200 465 0.000659 +::1 - - [2017-07-27 13:57:04] "GET /~/mn-cse-1/onem2m?fu=1&lbl=measurements&drt2 HTTP/1.1" 200 183 0.005280 +::1 - - [2017-07-27 13:57:04] "GET /~/mn-cse-1/onem2m?fu=1&lbl=commands&drt2 HTTP/1.1" 200 299 0.005015 +::1 - - [2017-07-27 13:57:05] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A04.595851%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 237 0.016746 +::1 - - [2017-07-27 13:57:05] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A04.597361%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.021630 +::1 - - [2017-07-27 13:57:05] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 200 574 0.012765 +::1 - - [2017-07-27 13:57:05] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 201 507 0.099880 +::1 - - [2017-07-27 13:57:06] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A05.625861%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.018821 +::1 - - [2017-07-27 13:57:06] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A05.739613%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 237 0.008851 +::1 - - [2017-07-27 13:57:06] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Humi-2/measurements HTTP/1.1" 200 574 0.004739 +::1 - - [2017-07-27 13:57:06] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Humi-2/measurements HTTP/1.1" 201 507 0.090307 +::1 - - [2017-07-27 13:57:07] "POST /_/openmtc.org/in-cse-1/CTestGUI HTTP/1.1" 200 161 0.051118 +::1 - - [2017-07-27 13:57:07] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A06.648521%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.018782 +::1 - - [2017-07-27 13:57:07] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A06.850242%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.018515 +::1 - - [2017-07-27 13:57:08] "POST /_/openmtc.org/in-cse-1/CTestGUI HTTP/1.1" 200 161 0.096046 +::1 - - [2017-07-27 13:57:08] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A07.670974%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.018577 +::1 - - [2017-07-27 13:57:08] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A07.871679%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.026302 +::1 - - [2017-07-27 13:57:09] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A08.692558%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.018213 +::1 - - [2017-07-27 13:57:09] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A08.901452%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.018222 +::1 - - [2017-07-27 13:57:10] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A09.714743%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.020672 +::1 - - [2017-07-27 13:57:10] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A09.923621%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 237 0.020293 +::1 - - [2017-07-27 13:57:10] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Temp-2/measurements HTTP/1.1" 200 579 0.013450 +::1 - - [2017-07-27 13:57:11] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Temp-2/measurements HTTP/1.1" 201 508 0.095122 +::1 - - [2017-07-27 13:57:11] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A10.739403%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.021823 +::1 - - [2017-07-27 13:57:12] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A11.062693%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 237 0.005329 +::1 - - [2017-07-27 13:57:12] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 200 580 0.003225 +::1 - - [2017-07-27 13:57:12] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 201 508 0.095343 +::1 - - [2017-07-27 13:57:12] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A11.764429%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.006962 +::1 - - [2017-07-27 13:57:13] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A12.172755%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.021495 +::1 - - [2017-07-27 13:57:13] "POST /_/openmtc.org/in-cse-1/CTestGUI HTTP/1.1" 200 161 0.046212 +::1 - - [2017-07-27 13:57:13] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A12.772512%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.022092 +::1 - - [2017-07-27 13:57:14] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A13.197370%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.022078 +::1 - - [2017-07-27 13:57:14] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A13.798782%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.022495 +::1 - - [2017-07-27 13:57:15] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A14.222723%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.021815 +::1 - - [2017-07-27 13:57:15] "POST /_/openmtc.org/in-cse-1/CTestGUI HTTP/1.1" 200 161 0.049609 +::1 - - [2017-07-27 13:57:15] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A14.825227%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.022142 +::1 - - [2017-07-27 13:57:16] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A15.248347%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.021845 +::1 - - [2017-07-27 13:57:16] "POST /_/openmtc.org/in-cse-1/CTestGUI HTTP/1.1" 200 161 0.046456 +::1 - - [2017-07-27 13:57:16] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A15.851501%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.023678 +::1 - - [2017-07-27 13:57:17] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A16.274279%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.012253 +::1 - - [2017-07-27 13:57:17] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A16.878481%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.007413 +::1 - - [2017-07-27 13:57:18] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A17.289737%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.027434 +::1 - - [2017-07-27 13:57:18] "POST /_/openmtc.org/in-cse-1/CTestGUI HTTP/1.1" 200 161 0.052167 +::1 - - [2017-07-27 13:57:18] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A17.887983%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.023076 +::1 - - [2017-07-27 13:57:19] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A18.321269%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.023393 +::1 - - [2017-07-27 13:57:19] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A18.914453%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.007408 +::1 - - [2017-07-27 13:57:20] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A19.348526%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.022457 +::1 - - [2017-07-27 13:57:20] "POST /_/openmtc.org/in-cse-1/CTestGUI HTTP/1.1" 200 161 0.056230 +::1 - - [2017-07-27 13:57:20] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A19.924024%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.026688 +::1 - - [2017-07-27 13:57:21] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A20.374134%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.025529 +::1 - - [2017-07-27 13:57:21] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A20.953640%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.006857 +::1 - - [2017-07-27 13:57:22] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A21.402722%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.025722 +::1 - - [2017-07-27 13:57:22] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A21.962632%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.022066 +::1 - - [2017-07-27 13:57:23] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A22.431626%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.023046 +::1 - - [2017-07-27 13:57:24] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A22.988582%2B00%3A00&lbl=commands&drt2 HTTP/1.1" 200 183 0.025497 +::1 - - [2017-07-27 13:57:24] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+11%3A57%3A23.458088%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.023691 +``` diff --git a/doc/training/console-outputs/training-sensor-actuator-gateway.md b/doc/training/console-outputs/training-sensor-actuator-gateway.md new file mode 100644 index 0000000..6218c16 --- /dev/null +++ b/doc/training/console-outputs/training-sensor-actuator-gateway.md @@ -0,0 +1,84 @@ +```sh +user@host:/git$ ./openmtc-open-source/openmtc-gevent/run_gateway +PYTHONPATH: :/git/openmtc-open-source/openmtc-gevent/../futile/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/lib:/git/openmtc-open-source/openmtc-gevent/../common/openmtc-onem2m/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/src:/git/openmtc-open-source/openmtc-gevent/../serializers/*/src:/git/openmtc-open-source/openmtc-gevent/../openmtc-app/src +PYTHONPATH: :/git/openmtc-open-source/openmtc-gevent/../futile/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/lib:/git/openmtc-open-source/openmtc-gevent/../common/openmtc-onem2m/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/src:/git/openmtc-open-source/openmtc-gevent/../serializers/*/src:/git/openmtc-open-source/openmtc-gevent/../openmtc-app/src:/git/openmtc-open-source/openmtc-gevent/src:/git/openmtc-open-source/openmtc-gevent/../server/openmtc-cse/src:/git/openmtc-open-source/openmtc-gevent/../server/openmtc-server/src +DEBUG:openmtc_gevent.main:Reading config file: config-gateway.json +::1 - - [2017-07-27 13:57:03] "POST /onem2m HTTP/1.1" 201 470 0.001538 +::1 - - [2017-07-27 13:57:03] "GET /onem2m/TestIPE HTTP/1.1" 200 465 0.000703 +::1 - - [2017-07-27 13:57:03] "POST /onem2m/TestIPE HTTP/1.1" 201 471 0.047925 +::1 - - [2017-07-27 13:57:03] "POST /onem2m/TestIPE/devices HTTP/1.1" 201 474 0.048331 +::1 - - [2017-07-27 13:57:03] "POST /onem2m/TestIPE/devices/Switch-AirCon HTTP/1.1" 201 469 0.046801 +::1 - - [2017-07-27 13:57:03] "GET /onem2m/TestIPE/devices/Switch-AirCon/commands HTTP/1.1" 200 464 0.003259 +::1 - - [2017-07-27 13:57:03] "POST /onem2m/TestIPE/devices/Switch-AirCon/commands HTTP/1.1" 201 467 0.046626 +::1 - - [2017-07-27 13:57:03] "POST /onem2m/TestIPE/devices HTTP/1.1" 201 474 0.045386 +::1 - - [2017-07-27 13:57:03] "POST /onem2m/TestIPE/devices/Switch-Window HTTP/1.1" 201 469 0.045888 +::1 - - [2017-07-27 13:57:03] "GET /onem2m/TestIPE/devices/Switch-Window/commands HTTP/1.1" 200 464 0.001493 +::1 - - [2017-07-27 13:57:03] "POST /onem2m/TestIPE/devices/Switch-Window/commands HTTP/1.1" 201 467 0.042703 +::1 - - [2017-07-27 13:57:04] "GET /~/mn-cse-1/onem2m?lbl=measurements&fu=1 HTTP/1.1" 200 183 0.001639 +::1 - - [2017-07-27 13:57:04] "GET /~/mn-cse-1/onem2m?lbl=commands&fu=1 HTTP/1.1" 200 299 0.001513 +::1 - - [2017-07-27 13:57:04] "POST /onem2m/TestIPE/devices HTTP/1.1" 201 465 0.049577 +::1 - - [2017-07-27 13:57:04] "POST /onem2m/TestIPE/devices/Humi-1 HTTP/1.1" 201 488 0.049236 +::1 - - [2017-07-27 13:57:04] "POST /onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 201 531 0.045790 +::1 - - [2017-07-27 13:57:05] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A04.595851%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 237 0.009508 +::1 - - [2017-07-27 13:57:05] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A04.597361%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.009083 +::1 - - [2017-07-27 13:57:05] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 200 598 0.002505 +::1 - - [2017-07-27 13:57:05] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 201 507 0.047818 +::1 - - [2017-07-27 13:57:05] "POST /onem2m/TestIPE/devices HTTP/1.1" 201 465 0.044975 +::1 - - [2017-07-27 13:57:05] "POST /onem2m/TestIPE/devices/Humi-2 HTTP/1.1" 201 488 0.043389 +::1 - - [2017-07-27 13:57:05] "POST /onem2m/TestIPE/devices/Humi-2/measurements HTTP/1.1" 201 531 0.045038 +::1 - - [2017-07-27 13:57:06] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A05.625861%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.011576 +::1 - - [2017-07-27 13:57:06] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A05.739613%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 237 0.006012 +::1 - - [2017-07-27 13:57:06] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Humi-2/measurements HTTP/1.1" 200 598 0.001802 +::1 - - [2017-07-27 13:57:06] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Humi-2/measurements HTTP/1.1" 201 507 0.044464 +::1 - - [2017-07-27 13:57:06] "POST /onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 201 531 0.047284 +::1 - - [2017-07-27 13:57:07] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A06.648521%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.012750 +::1 - - [2017-07-27 13:57:07] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A06.850242%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.012305 +::1 - - [2017-07-27 13:57:08] "POST /onem2m/TestIPE/devices/Humi-2/measurements HTTP/1.1" 201 531 0.045643 +::1 - - [2017-07-27 13:57:08] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A07.670974%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.012383 +::1 - - [2017-07-27 13:57:08] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A07.871679%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.019881 +::1 - - [2017-07-27 13:57:09] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A08.692558%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.012114 +::1 - - [2017-07-27 13:57:09] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A08.901452%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.012244 +::1 - - [2017-07-27 13:57:10] "POST /onem2m/TestIPE/devices HTTP/1.1" 201 465 0.043398 +::1 - - [2017-07-27 13:57:10] "POST /onem2m/TestIPE/devices/Temp-2 HTTP/1.1" 201 493 0.042360 +::1 - - [2017-07-27 13:57:10] "POST /onem2m/TestIPE/devices/Temp-2/measurements HTTP/1.1" 201 532 0.049459 +::1 - - [2017-07-27 13:57:10] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A09.714743%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.014616 +::1 - - [2017-07-27 13:57:10] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A09.923621%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 237 0.013724 +::1 - - [2017-07-27 13:57:10] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Temp-2/measurements HTTP/1.1" 200 603 0.004230 +::1 - - [2017-07-27 13:57:11] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Temp-2/measurements HTTP/1.1" 201 508 0.045875 +::1 - - [2017-07-27 13:57:11] "POST /onem2m/TestIPE/devices HTTP/1.1" 201 467 0.049913 +::1 - - [2017-07-27 13:57:11] "POST /onem2m/TestIPE/devices/Temp-1 HTTP/1.1" 201 494 0.049076 +::1 - - [2017-07-27 13:57:11] "POST /onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 201 532 0.049060 +::1 - - [2017-07-27 13:57:11] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A10.739403%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.015129 +::1 - - [2017-07-27 13:57:12] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A11.062693%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 237 0.003554 +::1 - - [2017-07-27 13:57:12] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 200 604 0.001058 +::1 - - [2017-07-27 13:57:12] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 201 508 0.047654 +::1 - - [2017-07-27 13:57:12] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A11.764429%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.004372 +::1 - - [2017-07-27 13:57:13] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A12.172755%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.015175 +::1 - - [2017-07-27 13:57:13] "POST /onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 201 531 0.043883 +::1 - - [2017-07-27 13:57:13] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A12.772512%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.016051 +::1 - - [2017-07-27 13:57:14] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A13.197370%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.015887 +::1 - - [2017-07-27 13:57:14] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A13.798782%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.015560 +::1 - - [2017-07-27 13:57:15] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A14.222723%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.015576 +::1 - - [2017-07-27 13:57:15] "POST /onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 201 531 0.049928 +::1 - - [2017-07-27 13:57:15] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A14.825227%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.016184 +::1 - - [2017-07-27 13:57:16] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A15.248347%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.015338 +::1 - - [2017-07-27 13:57:16] "POST /onem2m/TestIPE/devices/Temp-2/measurements HTTP/1.1" 201 532 0.045091 +::1 - - [2017-07-27 13:57:16] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A15.851501%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.017068 +::1 - - [2017-07-27 13:57:17] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A16.274279%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.008302 +::1 - - [2017-07-27 13:57:17] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A16.878481%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.004853 +::1 - - [2017-07-27 13:57:18] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A17.289737%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.018532 +::1 - - [2017-07-27 13:57:18] "POST /onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 201 532 0.044654 +::1 - - [2017-07-27 13:57:18] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A17.887983%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.016881 +::1 - - [2017-07-27 13:57:19] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A18.321269%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.016954 +::1 - - [2017-07-27 13:57:19] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A18.914453%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.004566 +::1 - - [2017-07-27 13:57:20] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A19.348526%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.016216 +::1 - - [2017-07-27 13:57:20] "POST /onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 201 534 0.047617 +::1 - - [2017-07-27 13:57:20] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A19.924024%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.019466 +::1 - - [2017-07-27 13:57:21] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A20.374134%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.018749 +::1 - - [2017-07-27 13:57:21] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A20.953640%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.004838 +::1 - - [2017-07-27 13:57:22] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A21.402722%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.019487 +::1 - - [2017-07-27 13:57:22] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A21.962632%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.015912 +::1 - - [2017-07-27 13:57:23] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A22.431626%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.016858 +::1 - - [2017-07-27 13:57:24] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A22.988582%2B00%3A00&lbl=commands&fu=1 HTTP/1.1" 200 183 0.017713 +::1 - - [2017-07-27 13:57:24] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+11%3A57%3A23.458088%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.017278 +``` diff --git a/doc/training/console-outputs/training-sensor-actuator-gui.md b/doc/training/console-outputs/training-sensor-actuator-gui.md new file mode 100644 index 0000000..98cec8d --- /dev/null +++ b/doc/training/console-outputs/training-sensor-actuator-gui.md @@ -0,0 +1,58 @@ +```sh +user@host:/git$ ./openmtc-open-source/doc/training/start-app.sh +[1] onem2m-ipe-sensors-final.py +[2] onem2m-ipe-sensors-actuators-final.py +[3] onem2m-gui-sensors-final.py +[4] onem2m-gui-sensors-actuators-final.py +Choose the app to start: 4 +PYTHONPATH: :/git/openmtc-open-source/doc/../futile/src:/git/openmtc-open-source/doc/../common/openmtc/lib:/git/openmtc-open-source/doc/../common/openmtc-onem2m/src:/git/openmtc-open-source/doc/../common/openmtc/src:/git/openmtc-open-source/doc/../serializers/*/src:/git/openmtc-open-source/doc/../openmtc-app/src +discovered commands container: /mn-cse-1/onem2m/TestIPE/devices/Switch-Window/commands + +discovered commands container: /mn-cse-1/onem2m/TestIPE/devices/Switch-AirCon/commands + +Subscribing to Resource: /mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements +Subscribing to Resource: /mn-cse-1/onem2m/TestIPE/devices/Humi-2/measurements +127.0.0.1 - - [2017-07-27 13:57:07] "POST / HTTP/1.1" 200 160 0.001971 +handle_measurements... +container: /mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements +data: {u'type': u'humidity', u'value': 34, u'unit': u'percentage'} +Humidity = 34 < 65. CLOSE Window + +127.0.0.1 - - [2017-07-27 13:57:08] "POST / HTTP/1.1" 200 160 0.045359 +handle_measurements... +container: /mn-cse-1/onem2m/TestIPE/devices/Humi-2/measurements +data: {u'type': u'humidity', u'value': 78, u'unit': u'percentage'} +Humidity = 78 >= 65. OPEN Window + +Subscribing to Resource: /mn-cse-1/onem2m/TestIPE/devices/Temp-2/measurements +Subscribing to Resource: /mn-cse-1/onem2m/TestIPE/devices/Temp-1/measurements +127.0.0.1 - - [2017-07-27 13:57:13] "POST / HTTP/1.1" 200 160 0.043167 +handle_measurements... +container: /mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements +data: {u'type': u'humidity', u'value': 42, u'unit': u'percentage'} +Humidity = 42 < 65. CLOSE Window + +127.0.0.1 - - [2017-07-27 13:57:15] "POST / HTTP/1.1" 200 160 0.041397 +handle_measurements... +container: /mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements +data: {u'type': u'humidity', u'value': 67, u'unit': u'percentage'} +Humidity = 67 >= 65. OPEN Window + +127.0.0.1 - - [2017-07-27 13:57:16] "POST / HTTP/1.1" 200 160 0.043280 +handle_measurements... +container: /mn-cse-1/onem2m/TestIPE/devices/Temp-2/measurements +data: {u'type': u'temperature', u'value': 27, u'unit': u'degreeC'} +Temperature = 27 >= 22. Turning AirConditioning ON + +127.0.0.1 - - [2017-07-27 13:57:18] "POST / HTTP/1.1" 200 160 0.047879 +handle_measurements... +container: /mn-cse-1/onem2m/TestIPE/devices/Temp-1/measurements +data: {u'type': u'temperature', u'value': 20, u'unit': u'degreeC'} +Temperature = 20 < 22. Turning AirConditioning OFF + +^C127.0.0.1 - - [2017-07-27 13:57:20] "POST / HTTP/1.1" 200 160 0.046181 +handle_measurements... +container: /mn-cse-1/onem2m/TestIPE/devices/Temp-1/measurements +data: {u'type': u'temperature', u'value': 23, u'unit': u'degreeC'} +Temperature = 23 >= 22. Turning AirConditioning ON +``` diff --git a/doc/training/console-outputs/training-sensor-actuator-ipe.md b/doc/training/console-outputs/training-sensor-actuator-ipe.md new file mode 100644 index 0000000..5d5f2d4 --- /dev/null +++ b/doc/training/console-outputs/training-sensor-actuator-ipe.md @@ -0,0 +1,24 @@ +```sh +user@host:/git$ ./openmtc-open-source/doc/training/start-app.sh +[1] onem2m-ipe-sensors-final.py +[2] onem2m-ipe-sensors-actuators-final.py +[3] onem2m-gui-sensors-final.py +[4] onem2m-gui-sensors-actuators-final.py +Choose the app to start: 2 +PYTHONPATH: :/git/openmtc-open-source/doc/../futile/src:/git/openmtc-open-source/doc/../common/openmtc/lib:/git/openmtc-open-source/doc/../common/openmtc-onem2m/src:/git/openmtc-open-source/doc/../common/openmtc/src:/git/openmtc-open-source/doc/../serializers/*/src:/git/openmtc-open-source/doc/../openmtc-app/src +initializing sensor: Humi-1 +Humi-1: {'type': 'humidity', 'unit': 'percentage', 'value': 66} +initializing sensor: Humi-2 +Humi-2: {'type': 'humidity', 'unit': 'percentage', 'value': 75} +Humi-1: {'type': 'humidity', 'unit': 'percentage', 'value': 34} +Humi-2: {'type': 'humidity', 'unit': 'percentage', 'value': 78} +initializing sensor: Temp-2 +Temp-2: {'type': 'temperature', 'unit': 'degreeC', 'value': 10} +initializing sensor: Temp-1 +Temp-1: {'type': 'temperature', 'unit': 'degreeC', 'value': 10} +Humi-1: {'type': 'humidity', 'unit': 'percentage', 'value': 42} +Humi-1: {'type': 'humidity', 'unit': 'percentage', 'value': 67} +Temp-2: {'type': 'temperature', 'unit': 'degreeC', 'value': 27} +Temp-1: {'type': 'temperature', 'unit': 'degreeC', 'value': 20} +Temp-1: {'type': 'temperature', 'unit': 'degreeC', 'value': 23} +``` diff --git a/doc/training/console-outputs/training-sensor-backend.md b/doc/training/console-outputs/training-sensor-backend.md new file mode 100644 index 0000000..3724429 --- /dev/null +++ b/doc/training/console-outputs/training-sensor-backend.md @@ -0,0 +1,36 @@ +```sh +user@host:/git$ ./openmtc-open-source/openmtc-gevent/run-backend +PYTHONPATH: :/git/openmtc-open-source/openmtc-gevent/../futile/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/lib:/git/openmtc-open-source/openmtc-gevent/../common/openmtc-onem2m/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/src:/git/openmtc-open-source/openmtc-gevent/../serializers/*/src:/git/openmtc-open-source/openmtc-gevent/../openmtc-app/src +PYTHONPATH: :/git/openmtc-open-source/openmtc-gevent/../futile/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/lib:/git/openmtc-open-source/openmtc-gevent/../common/openmtc-onem2m/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/src:/git/openmtc-open-source/openmtc-gevent/../serializers/*/src:/git/openmtc-open-source/openmtc-gevent/../openmtc-app/src:/git/openmtc-open-source/openmtc-gevent/src:/git/openmtc-open-source/openmtc-gevent/../server/openmtc-cse/src:/git/openmtc-open-source/openmtc-gevent/../server/openmtc-server/src +DEBUG:openmtc_gevent.main:Reading config file: config-backend.json +::1 - - [2017-07-27 12:09:15] "POST /~/in-cse-1/onem2m HTTP/1.1" 201 488 0.004198 +::1 - - [2017-07-27 12:09:15] "GET /~/in-cse-1/onem2m HTTP/1.1" 200 515 0.000929 +::1 - - [2017-07-27 12:09:33] "POST /onem2m HTTP/1.1" 201 470 0.001444 +::1 - - [2017-07-27 12:09:33] "GET /onem2m/TestGUI HTTP/1.1" 200 465 0.000686 +::1 - - [2017-07-27 12:09:33] "GET /~/mn-cse-1/onem2m?fu=1&lbl=measurements&drt2 HTTP/1.1" 200 403 0.005261 +::1 - - [2017-07-27 12:09:33] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Temp-2/measurements HTTP/1.1" 200 651 0.003173 +::1 - - [2017-07-27 12:09:33] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Temp-2/measurements HTTP/1.1" 201 507 0.102775 +::1 - - [2017-07-27 12:09:33] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Humi-2/measurements HTTP/1.1" 200 574 0.011672 +::1 - - [2017-07-27 12:09:33] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Humi-2/measurements HTTP/1.1" 201 507 0.101555 +::1 - - [2017-07-27 12:09:33] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 200 574 0.014317 +::1 - - [2017-07-27 12:09:34] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 201 507 0.100284 +::1 - - [2017-07-27 12:09:34] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 200 651 0.016302 +::1 - - [2017-07-27 12:09:34] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 201 507 0.099349 +::1 - - [2017-07-27 12:09:34] "POST /_/openmtc.org/in-cse-1/CTestGUI HTTP/1.1" 200 161 0.064469 +::1 - - [2017-07-27 12:09:35] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+10%3A09%3A34.227461%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.021622 +::1 - - [2017-07-27 12:09:36] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+10%3A09%3A35.252923%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.019777 +::1 - - [2017-07-27 12:09:36] "POST /_/openmtc.org/in-cse-1/CTestGUI HTTP/1.1" 200 161 0.095844 +::1 - - [2017-07-27 12:09:37] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+10%3A09%3A36.276019%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.021947 +::1 - - [2017-07-27 12:09:37] "POST /_/openmtc.org/in-cse-1/CTestGUI HTTP/1.1" 200 161 0.053290 +::1 - - [2017-07-27 12:09:38] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+10%3A09%3A37.301073%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.026539 +::1 - - [2017-07-27 12:09:38] "POST /_/openmtc.org/in-cse-1/CTestGUI HTTP/1.1" 200 161 0.053452 +::1 - - [2017-07-27 12:09:39] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+10%3A09%3A38.330637%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.020305 +::1 - - [2017-07-27 12:09:40] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+10%3A09%3A39.354340%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.020969 +::1 - - [2017-07-27 12:09:40] "POST /_/openmtc.org/in-cse-1/CTestGUI HTTP/1.1" 200 161 0.050797 +::1 - - [2017-07-27 12:09:41] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+10%3A09%3A40.378678%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.022431 +::1 - - [2017-07-27 12:09:41] "POST /_/openmtc.org/in-cse-1/CTestGUI HTTP/1.1" 200 161 0.054525 +::1 - - [2017-07-27 12:09:42] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+10%3A09%3A41.405124%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.029247 +::1 - - [2017-07-27 12:09:43] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+10%3A09%3A42.437767%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 200 183 0.003374 +::1 - - [2017-07-27 12:09:43] "DELETE /~/in-cse-1/onem2m/mn-cse-1 HTTP/1.1" 200 161 0.003337 +::1 - - [2017-07-27 12:09:44] "GET /~/mn-cse-1/onem2m?fu=1&cra=2017-07-27+10%3A09%3A43.442350%2B00%3A00&lbl=measurements&drt2 HTTP/1.1" 404 168 0.003254 +``` diff --git a/doc/training/console-outputs/training-sensor-gateway.md b/doc/training/console-outputs/training-sensor-gateway.md new file mode 100644 index 0000000..d38a0b0 --- /dev/null +++ b/doc/training/console-outputs/training-sensor-gateway.md @@ -0,0 +1,48 @@ +```sh +user@host:/git$ ./openmtc-open-source/openmtc-gevent/run_gateway +PYTHONPATH: :/git/openmtc-open-source/openmtc-gevent/../futile/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/lib:/git/openmtc-open-source/openmtc-gevent/../common/openmtc-onem2m/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/src:/git/openmtc-open-source/openmtc-gevent/../serializers/*/src:/git/openmtc-open-source/openmtc-gevent/../openmtc-app/src +PYTHONPATH: :/git/openmtc-open-source/openmtc-gevent/../futile/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/lib:/git/openmtc-open-source/openmtc-gevent/../common/openmtc-onem2m/src:/git/openmtc-open-source/openmtc-gevent/../common/openmtc/src:/git/openmtc-open-source/openmtc-gevent/../serializers/*/src:/git/openmtc-open-source/openmtc-gevent/../openmtc-app/src:/git/openmtc-open-source/openmtc-gevent/src:/git/openmtc-open-source/openmtc-gevent/../server/openmtc-cse/src:/git/openmtc-open-source/openmtc-gevent/../server/openmtc-server/src +DEBUG:openmtc_gevent.main:Reading config file: config-gateway.json +::1 - - [2017-07-27 12:09:24] "POST /onem2m HTTP/1.1" 201 438 0.001517 +::1 - - [2017-07-27 12:09:24] "GET /onem2m/TestIPE HTTP/1.1" 200 433 0.000697 +::1 - - [2017-07-27 12:09:24] "POST /onem2m/TestIPE HTTP/1.1" 201 471 0.050511 +::1 - - [2017-07-27 12:09:24] "POST /onem2m/TestIPE/devices HTTP/1.1" 201 465 0.049690 +::1 - - [2017-07-27 12:09:24] "POST /onem2m/TestIPE/devices/Humi-1 HTTP/1.1" 201 488 0.050775 +::1 - - [2017-07-27 12:09:24] "POST /onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 201 531 0.047327 +::1 - - [2017-07-27 12:09:25] "POST /onem2m/TestIPE/devices HTTP/1.1" 201 465 0.048645 +::1 - - [2017-07-27 12:09:26] "POST /onem2m/TestIPE/devices/Humi-2 HTTP/1.1" 201 488 0.042868 +::1 - - [2017-07-27 12:09:26] "POST /onem2m/TestIPE/devices/Humi-2/measurements HTTP/1.1" 201 531 0.044007 +::1 - - [2017-07-27 12:09:29] "POST /onem2m/TestIPE/devices HTTP/1.1" 201 465 0.046697 +::1 - - [2017-07-27 12:09:29] "POST /onem2m/TestIPE/devices/Temp-2 HTTP/1.1" 201 491 0.045791 +::1 - - [2017-07-27 12:09:29] "POST /onem2m/TestIPE/devices/Temp-2/measurements HTTP/1.1" 201 531 0.047106 +::1 - - [2017-07-27 12:09:31] "POST /onem2m/TestIPE/devices HTTP/1.1" 201 465 0.049131 +::1 - - [2017-07-27 12:09:31] "POST /onem2m/TestIPE/devices/Temp-1 HTTP/1.1" 201 491 0.047133 +::1 - - [2017-07-27 12:09:31] "POST /onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 201 531 0.046603 +::1 - - [2017-07-27 12:09:32] "POST /onem2m/TestIPE/devices/Temp-2/measurements HTTP/1.1" 201 531 0.043829 +::1 - - [2017-07-27 12:09:33] "POST /onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 201 531 0.046445 +::1 - - [2017-07-27 12:09:33] "GET /~/mn-cse-1/onem2m?lbl=measurements&fu=1 HTTP/1.1" 200 403 0.002203 +::1 - - [2017-07-27 12:09:33] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Temp-2/measurements HTTP/1.1" 200 675 0.000822 +::1 - - [2017-07-27 12:09:33] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Temp-2/measurements HTTP/1.1" 201 507 0.050772 +::1 - - [2017-07-27 12:09:33] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Humi-2/measurements HTTP/1.1" 200 598 0.005506 +::1 - - [2017-07-27 12:09:33] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Humi-2/measurements HTTP/1.1" 201 507 0.047209 +::1 - - [2017-07-27 12:09:33] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 200 598 0.004212 +::1 - - [2017-07-27 12:09:34] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 201 507 0.049078 +::1 - - [2017-07-27 12:09:34] "GET /~/mn-cse-1/onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 200 675 0.005679 +::1 - - [2017-07-27 12:09:34] "POST /~/mn-cse-1/onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 201 507 0.048767 +::1 - - [2017-07-27 12:09:34] "POST /onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 201 531 0.048999 +::1 - - [2017-07-27 12:09:35] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+10%3A09%3A34.227461%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.015252 +::1 - - [2017-07-27 12:09:36] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+10%3A09%3A35.252923%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.013147 +::1 - - [2017-07-27 12:09:36] "POST /onem2m/TestIPE/devices/Humi-2/measurements HTTP/1.1" 201 531 0.047898 +::1 - - [2017-07-27 12:09:37] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+10%3A09%3A36.276019%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.016102 +::1 - - [2017-07-27 12:09:37] "POST /onem2m/TestIPE/devices/Temp-1/measurements HTTP/1.1" 201 531 0.048973 +::1 - - [2017-07-27 12:09:38] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+10%3A09%3A37.301073%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.019646 +::1 - - [2017-07-27 12:09:38] "POST /onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 201 531 0.049452 +::1 - - [2017-07-27 12:09:39] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+10%3A09%3A38.330637%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.014191 +::1 - - [2017-07-27 12:09:40] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+10%3A09%3A39.354340%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.014929 +::1 - - [2017-07-27 12:09:40] "POST /onem2m/TestIPE/devices/Humi-2/measurements HTTP/1.1" 201 533 0.047670 +::1 - - [2017-07-27 12:09:41] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+10%3A09%3A40.378678%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.015533 +::1 - - [2017-07-27 12:09:41] "POST /onem2m/TestIPE/devices/Humi-1/measurements HTTP/1.1" 201 533 0.049902 +::1 - - [2017-07-27 12:09:42] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+10%3A09%3A41.405124%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.021273 +::1 - - [2017-07-27 12:09:42] "DELETE /onem2m/TestIPE HTTP/1.1" 200 161 0.029264 +::1 - - [2017-07-27 12:09:43] "GET /~/mn-cse-1/onem2m?cra=2017-07-27+10%3A09%3A42.437767%2B00%3A00&lbl=measurements&fu=1 HTTP/1.1" 200 183 0.001473 +``` diff --git a/doc/training/console-outputs/training-sensor-gui.md b/doc/training/console-outputs/training-sensor-gui.md new file mode 100644 index 0000000..027d377 --- /dev/null +++ b/doc/training/console-outputs/training-sensor-gui.md @@ -0,0 +1,42 @@ +```sh +user@host:/git$ ./openmtc-open-source/doc/training/start-app.sh +[1] onem2m-ipe-sensors-final.py +[2] onem2m-ipe-sensors-actuators-final.py +[3] onem2m-gui-sensors-final.py +[4] onem2m-gui-sensors-actuators-final.py +Choose the app to start: 3 +PYTHONPATH: :/git/openmtc-open-source/doc/../futile/src:/git/openmtc-open-source/doc/../common/openmtc/lib:/git/openmtc-open-source/doc/../common/openmtc-onem2m/src:/git/openmtc-open-source/doc/../common/openmtc/src:/git/openmtc-open-source/doc/../serializers/*/src:/git/openmtc-open-source/doc/../openmtc-app/src +Subscribing to Resource: /mn-cse-1/onem2m/TestIPE/devices/Temp-2/measurements +Subscribing to Resource: /mn-cse-1/onem2m/TestIPE/devices/Humi-2/measurements +Subscribing to Resource: /mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements +Subscribing to Resource: /mn-cse-1/onem2m/TestIPE/devices/Temp-1/measurements +127.0.0.1 - - [2017-07-27 12:09:34] "POST / HTTP/1.1" 200 160 0.008250 +handle measurements.. +container: /mn-cse-1/onem2m/TestIPE/devices/Temp-1/measurements +data: {u'type': u'temperature', u'value': 28, u'unit': u'degreeC'} + +127.0.0.1 - - [2017-07-27 12:09:36] "POST / HTTP/1.1" 200 160 0.045183 +handle measurements.. +container: /mn-cse-1/onem2m/TestIPE/devices/Humi-2/measurements +data: {u'type': u'humidity', u'value': 55, u'unit': u'percentage'} + +127.0.0.1 - - [2017-07-27 12:09:37] "POST / HTTP/1.1" 200 160 0.044600 +handle measurements.. +container: /mn-cse-1/onem2m/TestIPE/devices/Temp-1/measurements +data: {u'type': u'temperature', u'value': 25, u'unit': u'degreeC'} + +127.0.0.1 - - [2017-07-27 12:09:38] "POST / HTTP/1.1" 200 160 0.043332 +handle measurements.. +container: /mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements +data: {u'type': u'humidity', u'value': 66, u'unit': u'percentage'} + +127.0.0.1 - - [2017-07-27 12:09:40] "POST / HTTP/1.1" 200 160 0.044541 +handle measurements.. +container: /mn-cse-1/onem2m/TestIPE/devices/Humi-2/measurements +data: {u'type': u'humidity', u'value': 60, u'unit': u'percentage'} + +127.0.0.1 - - [2017-07-27 12:09:41] "POST / HTTP/1.1" 200 160 0.047174 +handle measurements.. +container: /mn-cse-1/onem2m/TestIPE/devices/Humi-1/measurements +data: {u'type': u'humidity', u'value': 34, u'unit': u'percentage'} +``` diff --git a/doc/training/console-outputs/training-sensor-ipe.md b/doc/training/console-outputs/training-sensor-ipe.md new file mode 100644 index 0000000..655a8d0 --- /dev/null +++ b/doc/training/console-outputs/training-sensor-ipe.md @@ -0,0 +1,25 @@ +```sh +user@host:/git$ ./openmtc-open-source/doc/training/start-app.sh +[1] onem2m-ipe-sensors-final.py +[2] onem2m-ipe-sensors-actuators-final.py +[3] onem2m-gui-sensors-final.py +[4] onem2m-gui-sensors-actuators-final.py +Choose the app to start: 1 +PYTHONPATH: :/git/openmtc-open-source/doc/../futile/src:/git/openmtc-open-source/doc/../common/openmtc/lib:/git/openmtc-open-source/doc/../common/openmtc-onem2m/src:/git/openmtc-open-source/doc/../common/openmtc/src:/git/openmtc-open-source/doc/../serializers/*/src:/git/openmtc-open-source/doc/../openmtc-app/src +initializing sensor: Humi-1 +Humi-1: {'type': 'humidity', 'unit': 'percentage', 'value': 33} +initializing sensor: Humi-2 +Humi-2: {'type': 'humidity', 'unit': 'percentage', 'value': 49} +initializing sensor: Temp-2 +Temp-2: {'type': 'temperature', 'unit': 'degreeC', 'value': 31} +initializing sensor: Temp-1 +Temp-1: {'type': 'temperature', 'unit': 'degreeC', 'value': 14} +Temp-2: {'type': 'temperature', 'unit': 'degreeC', 'value': 13} +Temp-1: {'type': 'temperature', 'unit': 'degreeC', 'value': 11} +Temp-1: {'type': 'temperature', 'unit': 'degreeC', 'value': 28} +Humi-2: {'type': 'humidity', 'unit': 'percentage', 'value': 55} +Temp-1: {'type': 'temperature', 'unit': 'degreeC', 'value': 25} +Humi-1: {'type': 'humidity', 'unit': 'percentage', 'value': 66} +Humi-2: {'type': 'humidity', 'unit': 'percentage', 'value': 60} +Humi-1: {'type': 'humidity', 'unit': 'percentage', 'value': 34} +``` diff --git a/doc/training/onem2m-examples/onem2m-example-1.py b/doc/training/onem2m-examples/onem2m-example-1.py new file mode 100644 index 0000000..a94ff09 --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-1.py @@ -0,0 +1,16 @@ +# Example 1: Creating Objects + +from openmtc_onem2m.model import AE + +my_app = AE() + +print my_app.path +#>>> None +print my_app.App_ID +#>>> None +print my_app.parent_path +#>>> None +print my_app.labels +#>>> None +print my_app.attributes +#>>> [UnicodeAttribute(name="AE-ID", type=unicode), UnicodeAttribute(name="App-ID", type=unicode), ListAttribute(name="accessControlPolicyIDs", type=list), ListAttribute(name="announceTo", type=list), UnicodeAttribute(name="announcedAttribute", type=unicode), ListAttribute(name="childResources", type=list), DatetimeAttribute(name="creationTime", type=datetime), DatetimeAttribute(name="expirationTime", type=datetime), UnicodeAttribute(name="labels", type=unicode), DatetimeAttribute(name="lastModifiedTime", type=datetime), UnicodeAttribute(name="name", type=unicode), UnicodeAttribute(name="nodeLink", type=unicode), UnicodeAttribute(name="ontologyRef", type=unicode), ListAttribute(name="pointOfAccess", type=list)] diff --git a/doc/training/onem2m-examples/onem2m-example-10.py b/doc/training/onem2m-examples/onem2m-example-10.py new file mode 100644 index 0000000..3edc1e8 --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-10.py @@ -0,0 +1,38 @@ +# Example 10: Create a resource + +from openmtc_onem2m.model import AE +from openmtc_onem2m.client.http import OneM2MHTTPClient +from openmtc_onem2m.transport import OneM2MRequest + +# create a OneM2MHTTPClient object +client = OneM2MHTTPClient("http://localhost:8000", False) + +# create a resource to be created on the CSE +# resourceName: (optional) for easy check in browser +# requestReachability: (mandatory) for servercapability of the AE +my_app = AE(App_ID="myApp", + labels=["keyword1", "keyword2"], + resourceName="MYAPP", + requestReachability=False) + +# create a OneM2MRequest object of type 'create' +# ty: resource_type of the created resource +# pc: Resource content to be transferred +onem2m_request = OneM2MRequest("create", to="onem2m", ty=AE, pc=my_app) + +# send the 'create' OneM2MRequest to the CSE +promise = client.send_onem2m_request(onem2m_request) + +# reteive the OneM2MResponse from the returned promise +onem2m_response = promise.get() + +print onem2m_response.to +#>>> onem2m +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2001, description='CREATED', http_status_code=201) +print onem2m_response.content +#>>> AE(path='None', id='ae0') +print onem2m_response.content.App_ID +#>>> myApp +print onem2m_response.content.labels +#>>> [u'keyword1', u'keyword2'] diff --git a/doc/training/onem2m-examples/onem2m-example-11a.py b/doc/training/onem2m-examples/onem2m-example-11a.py new file mode 100644 index 0000000..2ab6c25 --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-11a.py @@ -0,0 +1,47 @@ +# Example 11a: Create a resource (continued) + +from openmtc_onem2m.model import AE +from openmtc_onem2m.client.http import OneM2MHTTPClient +from openmtc_onem2m.transport import OneM2MRequest + +client = OneM2MHTTPClient("http://localhost:8000", False) + +my_app = AE(App_ID="myApp", + labels=["keyword1", "keyword2"], + resourceName="MYAPP1", + requestReachability=False) + +onem2m_request = OneM2MRequest("create", to="onem2m", ty=AE, pc=my_app) + +promise = client.send_onem2m_request(onem2m_request) + +onem2m_response = promise.get() + +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2001, description='CREATED', http_status_code=201) + +# Build path to retieve from +path = "onem2m/" + onem2m_response.content.resourceName +print path +#>>> onem2m/MYAPP + +# Retrieve the AE from the CSE +onem2m_request = OneM2MRequest("retrieve", to=path) +promise = client.send_onem2m_request(onem2m_request) +onem2m_response = promise.get() + +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2000, description='OK', http_status_code=200) +print onem2m_response.content +#>>> AE(path='None', id='ae0') + +# Set the local AE to the retrieved content +my_app = None +my_app = onem2m_response.content + +print my_app.App_ID +#>>> myApp +print my_app.resourceName +#>>> MYAPP +print my_app.labels +#>>> [u'keyword1', u'keyword2'] diff --git a/doc/training/onem2m-examples/onem2m-example-11b.py b/doc/training/onem2m-examples/onem2m-example-11b.py new file mode 100644 index 0000000..2fabb8a --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-11b.py @@ -0,0 +1,43 @@ +# Example 11b: Updating a resource using OneM2MRequest Update + +from openmtc_onem2m.model import AE +from openmtc_onem2m.client.http import OneM2MHTTPClient +from openmtc_onem2m.transport import OneM2MRequest + +client = OneM2MHTTPClient("http://localhost:8000", False) + +my_app = AE(App_ID="myApp", + labels=["keyword1", "keyword2"], + resourceName="MYAPP2", + requestReachability=False) + +# Create the AE 'my_app' at the CSE +onem2m_request = OneM2MRequest("create", to="onem2m", ty=AE, pc=my_app) +promise = client.send_onem2m_request(onem2m_request) +onem2m_response = promise.get() +print onem2m_response.content.labels +#>>> [u'keyword1', u'keyword2'] + +# Retrieve the AE from the CSE and check the labels +path = "onem2m/" + onem2m_response.content.resourceName +onem2m_request = OneM2MRequest("retrieve", to=path) +promise = client.send_onem2m_request(onem2m_request) +onem2m_response = promise.get() +print onem2m_response.content.labels +#>>> [u'keyword1', u'keyword2'] + +# Update the changes labels in the remote resource +# Therefore a temporay AE object is needed +# This temporary AE object should ONLY contian the fields that need to be updated +tmp_app = AE(labels=["foo", "bar", "coffee"]) +onem2m_request = OneM2MRequest("update", to=path, pc=tmp_app) +promise = client.send_onem2m_request(onem2m_request) +onem2m_response = promise.get() +print onem2m_response.content.labels +#>>> [u'foo', u'bar', u'coffee'] + +# Set the local AE to the retrieved content +my_app = None +my_app = onem2m_response.content +print my_app.labels +#>>> [u'foo', u'bar', u'coffee'] diff --git a/doc/training/onem2m-examples/onem2m-example-12a.py b/doc/training/onem2m-examples/onem2m-example-12a.py new file mode 100644 index 0000000..6cf6cc3 --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-12a.py @@ -0,0 +1,28 @@ +# Example 12a: Making Requests with error handling + +from openmtc_onem2m.client.http import OneM2MHTTPClient +from openmtc_onem2m.transport import OneM2MRequest, OneM2MErrorResponse +from openmtc.exc import OpenMTCError + +client = OneM2MHTTPClient("http://localhost:8000", False) + +try: + onem2m_request = OneM2MRequest("retrieve", to="onem2m") + promise = client.send_onem2m_request(onem2m_request) + onem2m_response = promise.get() +except OneM2MErrorResponse as e: + print "CSE reported an error:", e + raise +except OpenMTCError as e: + print "Failed to reach the CSE:", e + raise +else: + pass + +# no exception was raised, the method returned normally. +print onem2m_response.to +#>>> onem2m +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2000, description='OK', http_status_code=200) +print onem2m_response.content +#>>> CSEBase(path='None', id='cb0') diff --git a/doc/training/onem2m-examples/onem2m-example-12b.py b/doc/training/onem2m-examples/onem2m-example-12b.py new file mode 100644 index 0000000..7d4f625 --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-12b.py @@ -0,0 +1,37 @@ +# Example 12b: Forwarding + +from openmtc_onem2m.client.http import OneM2MHTTPClient +from openmtc_onem2m.transport import OneM2MRequest + +client = OneM2MHTTPClient("http://localhost:8000", False) + +onem2m_request = OneM2MRequest("retrieve", to="onem2m") +onem2m_response = client.send_onem2m_request(onem2m_request).get() +print "---> Request to: http://localhost:8000" + "/" + onem2m_request.to +print onem2m_response.to +#>>> onem2m +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2000, description='OK', http_status_code=200) +print onem2m_response.content +#>>> CSEBase(path='None', id='cb0') + +onem2m_request = OneM2MRequest("retrieve", to="~/mn-cse-1/onem2m") +onem2m_response = client.send_onem2m_request(onem2m_request).get() +print "---> Request to: http://localhost:8000" + "/" + onem2m_request.to +print onem2m_response.to +#>>> ~/mn-cse-1/onem2m +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2000, description='OK', http_status_code=200) +print onem2m_response.content +#>>> CSEBase(path='None', id='cb0') + +client.port = 18000 +onem2m_request = OneM2MRequest("retrieve", to="~/mn-cse-1/onem2m") +onem2m_response = client.send_onem2m_request(onem2m_request).get() +print "---> Request to: http://localhost:18000" + "/" + onem2m_request.to +print onem2m_response.to +#>>> ~/mn-cse-1/onem2m +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2000, description='OK', http_status_code=200) +print onem2m_response.content +#>>> CSEBase(path='None', id='cb0') diff --git a/doc/training/onem2m-examples/onem2m-example-13.py b/doc/training/onem2m-examples/onem2m-example-13.py new file mode 100644 index 0000000..6d0b3fd --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-13.py @@ -0,0 +1,9 @@ +# Example 13: Minimal application + +from openmtc_app.onem2m import XAE + +class MyAE(XAE): + # when this is called the application is registered + # and can start doing something + def _on_register(self): + pass diff --git a/doc/training/onem2m-examples/onem2m-example-14a.py b/doc/training/onem2m-examples/onem2m-example-14a.py new file mode 100644 index 0000000..f07f2b5 --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-14a.py @@ -0,0 +1,12 @@ +# Example 14a: Invoking a FlaskRunner + +from openmtc_app.onem2m import XAE +from openmtc_app.flask_runner import FlaskRunner + +class MyAE(XAE): + def _on_register(self): + pass + +app_instance = MyAE() +runner = FlaskRunner(app_instance) +runner.run("http://localhost:8000") diff --git a/doc/training/onem2m-examples/onem2m-example-14b.py b/doc/training/onem2m-examples/onem2m-example-14b.py new file mode 100644 index 0000000..a78f6ac --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-14b.py @@ -0,0 +1,12 @@ +# Example 14b: Invoking a FlaskRunner with custom name + +from openmtc_app.onem2m import XAE +from openmtc_app.flask_runner import FlaskRunner + +class MyAE(XAE): + def _on_register(self): + pass + +app_instance = MyAE(name="someAppName") +runner = FlaskRunner(app_instance) +runner.run("http://localhost:8000") diff --git a/doc/training/onem2m-examples/onem2m-example-15.py b/doc/training/onem2m-examples/onem2m-example-15.py new file mode 100644 index 0000000..50a65be --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-15.py @@ -0,0 +1,7 @@ +# Example 15: Running App with Static Information + +from openmtc_app.onem2m import XAE + +class MyAE(XAE): + app_id = "AnotherAppID" + labels =["keyword1", "keyword2"] diff --git a/doc/training/onem2m-examples/onem2m-example-16.py b/doc/training/onem2m-examples/onem2m-example-16.py new file mode 100644 index 0000000..dbdfd82 --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-16.py @@ -0,0 +1,12 @@ +# Example 16: Creating a simple Container + +from openmtc_app.onem2m import XAE +from openmtc_app.flask_runner import FlaskRunner + +class MyAE(XAE): + def _on_register(self): + container = self.create_container(None, "myContainer") + +app_instance = MyAE() +runner = FlaskRunner(app_instance) +runner.run("http://localhost:8000") diff --git a/doc/training/onem2m-examples/onem2m-example-17.py b/doc/training/onem2m-examples/onem2m-example-17.py new file mode 100644 index 0000000..79e5e65 --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-17.py @@ -0,0 +1,18 @@ +# Example 17: Creating a custom Container + +from openmtc_app.onem2m import XAE +from openmtc_app.flask_runner import FlaskRunner +from openmtc_onem2m.model import Container + +class MyAE(XAE): + def _on_register(self): + # create a container + container = Container( + resourceName = "myContainer", + maxNrOfInstances=100, + maxByteSize=1024 ** 3 ) + container = self.create_container(None, container) + +app_instance = MyAE() +runner = FlaskRunner(app_instance) +runner.run("http://localhost:8000") diff --git a/doc/training/onem2m-examples/onem2m-example-18.py b/doc/training/onem2m-examples/onem2m-example-18.py new file mode 100644 index 0000000..b92825d --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-18.py @@ -0,0 +1,20 @@ +# Example 18: Pushing Data + +from openmtc_app.onem2m import XAE +from openmtc_app.flask_runner import FlaskRunner +from time import sleep +from somewhere import read_sensor_data + +class MyAE(XAE): + def _on_register(self): + container = self.create_container(None, "myContainer") + + while True: + value = read_sensor_data() # read measurements + data = {"value": value} + self.push_content(container, data) + sleep(60) + +app_instance = MyAE() +runner = FlaskRunner(app_instance) +runner.run("http://localhost:8000") diff --git a/doc/training/onem2m-examples/onem2m-example-2.py b/doc/training/onem2m-examples/onem2m-example-2.py new file mode 100644 index 0000000..11b633a --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-2.py @@ -0,0 +1,16 @@ +# Example 2: Passing Values + +from openmtc_onem2m.model import AE + +my_app = AE(App_ID="myApp", labels=["keyword1", "keyword2"]) + +print my_app.path +#>>> None +print my_app.App_ID +#>>> myApp +print my_app.parent_path +#>>> None +print my_app.labels +#>>> [u'keyword1', u'keyword2'] +print my_app.attributes +#>>> [UnicodeAttribute(name="AE-ID", type=unicode), UnicodeAttribute(name="App-ID", type=unicode), ListAttribute(name="accessControlPolicyIDs", type=list), ListAttribute(name="announceTo", type=list), UnicodeAttribute(name="announcedAttribute", type=unicode), ListAttribute(name="childResources", type=list), DatetimeAttribute(name="creationTime", type=datetime), DatetimeAttribute(name="expirationTime", type=datetime), UnicodeAttribute(name="labels", type=unicode), DatetimeAttribute(name="lastModifiedTime", type=datetime), UnicodeAttribute(name="name", type=unicode), UnicodeAttribute(name="nodeLink", type=unicode), UnicodeAttribute(name="ontologyRef", type=unicode), ListAttribute(name="pointOfAccess", type=list)] diff --git a/doc/training/onem2m-examples/onem2m-example-3.py b/doc/training/onem2m-examples/onem2m-example-3.py new file mode 100644 index 0000000..f4ac0e8 --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-3.py @@ -0,0 +1,8 @@ +# Example 3: Retrieve OneM2MRequest + +from openmtc_onem2m.transport import OneM2MRequest + +request = OneM2MRequest("retrieve", to="onem2m") + +print request.to +#>>> onem2m diff --git a/doc/training/onem2m-examples/onem2m-example-4.py b/doc/training/onem2m-examples/onem2m-example-4.py new file mode 100644 index 0000000..526f928 --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-4.py @@ -0,0 +1,8 @@ +# Example 4: Delete OneM2MRequest + +from openmtc_onem2m.transport import OneM2MRequest + +request = OneM2MRequest("delete", to="onem2m") + +print request.to +#>>> onem2m diff --git a/doc/training/onem2m-examples/onem2m-example-5a.py b/doc/training/onem2m-examples/onem2m-example-5a.py new file mode 100644 index 0000000..f16338b --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-5a.py @@ -0,0 +1,13 @@ +# Example 5a: Create OneM2MRequest + +from openmtc_onem2m.transport import OneM2MRequest +from openmtc_onem2m.model import AE + +my_app = AE(App_ID="myApp") + +request = OneM2MRequest("create", to="onem2m", pc="my_app") + +print request.to +#>>> onem2m +print request.pc +#>>> myApp diff --git a/doc/training/onem2m-examples/onem2m-example-5b.py b/doc/training/onem2m-examples/onem2m-example-5b.py new file mode 100644 index 0000000..2878d73 --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-5b.py @@ -0,0 +1,19 @@ +# Example 5b: Create OneM2MRequest with data + +from openmtc_onem2m.transport import OneM2MRequest +import json + +sensor_data = {"type": "temperature", + "value": 15 } + +data_string = json.dumps(sensor_data) + +request = OneM2MRequest("create", + to="onem2m", + pc=data_string, + ty="application/json") + +print request.to +#>>> onem2m +print request.pc +#>>> {"type": "temperature", "value": 15} diff --git a/doc/training/onem2m-examples/onem2m-example-6a.py b/doc/training/onem2m-examples/onem2m-example-6a.py new file mode 100644 index 0000000..82e47af --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-6a.py @@ -0,0 +1,13 @@ +# Example 6a: Notify OneM2MRequest + +from openmtc_onem2m.transport import OneM2MRequest +from openmtc_onem2m.model import AE + +my_app = AE(App_ID="myApp") + +request = OneM2MRequest("notify", to="onem2m", pc=my_app) + +print request.to +#>>> onem2m +print request.pc.App_ID +#>>> myApp diff --git a/doc/training/onem2m-examples/onem2m-example-6b.py b/doc/training/onem2m-examples/onem2m-example-6b.py new file mode 100644 index 0000000..f50a47c --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-6b.py @@ -0,0 +1,19 @@ +# Example 6b: Notify OneM2MRequest with data + +from openmtc_onem2m.transport import OneM2MRequest +import json + +sensor_data = {"type": "temperature", + "value": 15 } + +data_string = json.dumps(sensor_data) + +request = OneM2MRequest("create", + to="onem2m", + pc=data_string, + ty="application/json") + +print request.to +#>>> onem2m +print request.pc +#>>> {"type": "temperature", "value": 15} diff --git a/doc/training/onem2m-examples/onem2m-example-7.py b/doc/training/onem2m-examples/onem2m-example-7.py new file mode 100644 index 0000000..72b0b86 --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-7.py @@ -0,0 +1,13 @@ +# Example 7: Update OneM2MRequest + +from openmtc_onem2m.transport import OneM2MRequest +from openmtc_onem2m.model import AE + +my_app = AE(App_ID="myApp", labels=["keyword1", "keyword2"]) + +request = OneM2MRequest("update", to="onem2m", pc=my_app.labels) + +print request.to +#>>> onem2m +print request.pc +#>>> [u'keyword1', u'keyword2'] diff --git a/doc/training/onem2m-examples/onem2m-example-8a.py b/doc/training/onem2m-examples/onem2m-example-8a.py new file mode 100644 index 0000000..eb8b127 --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-8a.py @@ -0,0 +1,6 @@ +# Example 8a: Creating a Client + +from openmtc_onem2m.client.http import OneM2MHTTPClient + +# create a OneM2MHTTPClient object +client = OneM2MHTTPClient("http://localhost:8000", False) diff --git a/doc/training/onem2m-examples/onem2m-example-8b.py b/doc/training/onem2m-examples/onem2m-example-8b.py new file mode 100644 index 0000000..3c37731 --- /dev/null +++ b/doc/training/onem2m-examples/onem2m-example-8b.py @@ -0,0 +1,21 @@ +# Example 8b: Making Requests + +from openmtc_onem2m.client.http import OneM2MHTTPClient +from openmtc_onem2m.transport import OneM2MRequest + +# create a OneM2MHTTPClient object +client = OneM2MHTTPClient("http://localhost:8000", False) + +# create a OneM2MRequest object +onem2m_request = OneM2MRequest("retrieve", to="onem2m") +# send the OneM2MRequest to the CSE +promise = client.send_onem2m_request(onem2m_request) +# reteive the OneM2MResponse from the returned promise +onem2m_response = promise.get() + +print onem2m_response.to +#>>> onem2m +print onem2m_response.response_status_code +#>>> STATUS(numeric_code=2000, description='OK', http_status_code=200) +print onem2m_response.content +#>>> CSEBase(path='None', id='cb0') diff --git a/doc/training/start-app.sh b/doc/training/start-app.sh new file mode 100755 index 0000000..b06c332 --- /dev/null +++ b/doc/training/start-app.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +base_path=$(dirname "$(readlink -f "${0}")") + +################################################################################ +# set app_file +declare -a app_array + +app_array=($(find ${base_path} -name "*-final.py")) +array_length=${#app_array[@]} + +# print possibilities +for i in $(seq 1 ${array_length}); do + path=${app_array[$[${i}-1]]} + echo "[${i}] $(basename ${path})" +done + +# read choice +while true; do + read -n 1 -p "Choose the app to start: " choice + + [[ ${choice} =~ ^[0-9]+$ ]] && \ + [ ${choice} -gt 0 -a ${choice} -le ${array_length} ] && \ + echo && break + + echo " Wrong choice. Do it again." +done + +app_file=${app_array[$[${choice}-1]]} + +################################################################################ +# run app_file +cd ${base_path} +cd .. +. ../common/prep-env.sh +cd ${base_path} +python ${app_file} diff --git a/doc/training/training-gui-sensors-actuators.md b/doc/training/training-gui-sensors-actuators.md new file mode 100644 index 0000000..ea418c9 --- /dev/null +++ b/doc/training/training-gui-sensors-actuators.md @@ -0,0 +1,351 @@ +# gui-sensors-actuators demo app + + +The [gui-sensors-actuators demo applications](/doc/training/apps/onem2m/gui/sensors-actuators/) receives data from the (virtual) sensors from the ipe-demo-apps and prints it to the console. It further generates commands for control of the (virtual) actuators of the ipe-demo-apps. The demo application is extended incrementally from the basic app frame to the complete gui-sensors-actuators demo application. + + +## Step 1: [onem2m-gui-sensors-actuators-01.py](/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-01.py) + +* this file is equivalent to onem2m-gui-sensors-final.py +* actuator support will be added in the following files + +``` py +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + # for each device container discovered + for uri in discovery: + # subscribe to device container with handler function + print('Subscribing to Resource: %s' % uri) + self.add_container_subscription(uri, self.handle_measurements) + + def handle_measurements(self, container, data): + # this function handles the new data from subscribed measurements containers + print('handle measurements..') + print('container: %s' % container) + print('data: %s' % data) + print('') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI( + poas=['http://localhost:21345'] # adds poas in order to receive notifications + ) + Runner(app).run(host) +``` + + +## Step 2: [onem2m-gui-sensors-actuators-02.py](/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-02.py) + +* adds variable self.actuators +* adds periodic discovery of 'commands' containers + +``` py +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # init variables + self.actuators = [] + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + # start periodic discovery of 'commands' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_commands # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + # for each device container discovered + for uri in discovery: + # subscribe to device container with handler function + print('Subscribing to Resource: %s' % uri) + self.add_container_subscription(uri, self.handle_measurements) + + def handle_discovery_commands(self, discovery): + pass + + def handle_measurements(self, container, data): + # this function handles the new data from subscribed measurements containers + print('handle measurements..') + print('container: %s' % container) + print('data: %s' % data) + print('') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI( + poas=['http://localhost:21345'] # adds poas in order to receive notifications + ) + Runner(app).run(host) +``` + + +## Step 3: [onem2m-gui-sensors-actuators-03.py](/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-03.py) + +* extends the commands discovery handler to append new commands containers to the actuators list + +``` py +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # init variables + self.actuators = [] + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + # start periodic discovery of 'commands' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_commands # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + # for each device container discovered + for uri in discovery: + # subscribe to device container with handler function + print('Subscribing to Resource: %s' % uri) + self.add_container_subscription(uri, self.handle_measurements) + + def handle_discovery_commands(self, discovery): + # for every 'commands' container discovered + for uri in discovery: + print('discovered commands container: %s' % uri) + # add discovered commands container to known actuators list + self.actuators.append(uri) + print('') + + def handle_measurements(self, container, data): + # this function handles the new data from subscribed measurements containers + print('handle measurements..') + print('container: %s' % container) + print('data: %s' % data) + print('') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI( + poas=['http://localhost:21345'] # adds poas in order to receive notifications + ) + Runner(app).run(host) +``` + + +## Step 4: [onem2m-gui-sensors-actuators-04.py](/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-04.py) + +* extended the handle_measurements() function to implement some simple logic for actuator control: +* 1. open or close windows based on humidity measurements +* 2. actuate the Air-Conditioning based on temperature measurements + +``` py +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # init variables + self.actuators = [] + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + # start periodic discovery of 'commands' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['commands']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_commands # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + # for each device container discovered + for uri in discovery: + # subscribe to device container with handler function + print('Subscribing to Resource: %s' % uri) + self.add_container_subscription(uri, self.handle_measurements) + + def handle_discovery_commands(self, discovery): + # for every 'commands' container discovered + for uri in discovery: + print('discovered commands container: %s' % uri) + # add discovered commands container to known actuators list + self.actuators.append(uri) + print('') + + def handle_measurements(self, container, data): + print('handle_measurements...') + print('container: %s' % container) + print('data: %s' % data) + # extract information from data set + value = data['value'] + type_ = data['type'] + # simple logic to control the AirCon + if type_ == 'temperature': + if value >= 22: + data = {'Power': 'ON'} + print('Temperature = %s >= 22. Turning AirConditioning ON' % value) + else: + data = {'Power': 'OFF'} + print('Temperature = %s < 22. Turning AirConditioning OFF' % value) + # simple logic to control the Windows + elif type_ == 'humidity': + if value >= 65: + data = {'State': 'OPEN'} + print('Humidity = %s >= 65. OPEN Window' % value) + else: + data = {'State': 'CLOSE'} + print('Humidity = %s < 65. CLOSE Window' % value) + print('') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI( + poas=['http://localhost:21345'] # adds poas in order to receive notifications + ) + Runner(app).run(host) +``` + + +## Step 5: [onem2m-gui-sensors-actuators-05.py](/doc/training/apps/onem2m/gui/sensors-actuators/onem2m-gui-sensors-actuators-05.py) + +* extend the handle_measurements() function to push new commands based on measurements to the actuators + +``` py +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # init variables + self.actuators = [] + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + # start periodic discovery of 'commands' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['commands']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_commands # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + # for each device container discovered + for uri in discovery: + # subscribe to device container with handler function + print('Subscribing to Resource: %s' % uri) + self.add_container_subscription(uri, self.handle_measurements) + + def handle_discovery_commands(self, discovery): + # for every 'commands' container discovered + for uri in discovery: + print('discovered commands container: %s' % uri) + # add discovered commands container to known actuators list + self.actuators.append(uri) + print('') + + def handle_measurements(self, container, data): + print('handle_measurements...') + print('container: %s' % container) + print('data: %s' % data) + # extract information from data set + value = data['value'] + type_ = data['type'] + # simple logic to control the AirCon + if type_ == 'temperature': + if value >= 22: + data = {'Power': 'ON'} + print('Temperature = %s >= 22. Turning AirConditioning ON' % value) + else: + data = {'Power': 'OFF'} + print('Temperature = %s < 22. Turning AirConditioning OFF' % value) + # push the new command based on temperature measurements to all known AirCon actuators + for actuator in self.actuators: + if 'AirCon' in actuator: + self.push_content(actuator, data) + # simple logic to control the Windows + elif type_ == 'humidity': + if value >= 65: + data = {'State': 'OPEN'} + print('Humidity = %s >= 65. OPEN Window' % value) + else: + data = {'State': 'CLOSE'} + print('Humidity = %s < 65. CLOSE Window' % value) + # push the new command based on humidity measurements to all known Window actuators + for actuator in self.actuators: + if 'Window' in actuator: + self.push_content(actuator, data) + print('') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI( + poas=['http://localhost:21345'] # adds poas in order to receive notifications + ) + Runner(app).run(host) +``` + diff --git a/doc/training/training-gui-sensors.md b/doc/training/training-gui-sensors.md new file mode 100644 index 0000000..6f94ca6 --- /dev/null +++ b/doc/training/training-gui-sensors.md @@ -0,0 +1,135 @@ +# gui-sensors demo app + + +The [gui-sensors demo applications](/doc/training/apps/onem2m/gui/sensors/) receives data from the (virtual) sensors from the ipe-demo-apps and prints it to the console. The demo application is extended incrementally from the basic app frame to the complete gui-sensors demo application. + + +## Step 1: [onem2m-gui-sensors-01.py](/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-01.py) + +* initial app base structure +* starts periodic discovery on registration +* the discovery result is printed as a whole +* this will discover EVERY new container + +``` py +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # start periodic discovery of EVERY container + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + None, # no filter criteria + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery # callback function to return the result of the discovery to + ) + + def handle_discovery(self, discovery): + # print the discovery + print('New discovery:') + print(discovery) + print(' ') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI() + Runner(app).run(host) +``` + + +## Step 2: [onem2m-gui-sensors-02.py](/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-02.py) + +* adds filter criteria, to specify what to discover +* detailed print of every uri from the discovery +* this will only discover new container with the specific label 'measurements' +* renamed function handle_discovery() to handle_discovery_measurements() + +``` py +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + print('New discovery:') + # for each device container discovered + for uri in discovery: + # print content of discovery + print('uri from discovery: %s' % uri) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:18000' + app = TestGUI() + Runner(app).run(host) +``` + + +## Step 3: [onem2m-gui-sensors-03.py](/doc/training/apps/onem2m/gui/sensors/onem2m-gui-sensors-03.py) + +* adds subscription to discovered containers via returned uri +* adds content handler for subscribed containers +* this will only discover and subscribe to new containers with the specific label +* whenever a child is created in the subscribed containers, the content handler is called + +``` py +from openmtc_app.onem2m import XAE + + +class TestGUI(XAE): + remove_registration = True + remote_cse = '/mn-cse-1/onem2m' + + def _on_register(self): + # start periodic discovery of 'measurements' containers + self.periodic_discover( + self.remote_cse, # start directory inside cse for discovery + {'labels': ['measurements']}, # filter criteria (what to discover) + 1, # frequency of repeated discovery (in Hz) + self.handle_discovery_measurements # callback function to return the result of the discovery to) + ) + + def handle_discovery_measurements(self, discovery): + # for each device container discovered + for uri in discovery: + # subscribe to device container with handler function + print('Subscribing to Resource: %s' % uri) + self.add_container_subscription(uri, self.handle_measurements) + + def handle_measurements(self, container, data): + # this function handles the new data from subscribed measurements containers + print('handle measurements..') + print('container: %s' % container) + print('data: %s' % data) + print('') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + host = 'http://localhost:18000' + app = TestGUI( + poas=['http://localhost:21345'] # adds poas in order to receive notifications + ) + Runner(app).run(host) +``` + diff --git a/doc/training/training-index.md b/doc/training/training-index.md new file mode 100644 index 0000000..ce2792a --- /dev/null +++ b/doc/training/training-index.md @@ -0,0 +1,101 @@ +# Write your first OpenMTC applications + + +## Introduction +OpenMTC is delivered with some incremental demo applications which can be consulted for further understanding or as template for other applications. The oneM2M demo applications can be found in [/doc/training/apps/onem2m/](/doc/training/apps/onem2m/). + +The training is subdivided into examples for GUI-applications and IPE-applications. Both of these provide incremental demo applications for either sensor-actuator-applications or only sensor-applications. These are explained in the following sections. + + +## The start-app Script + +The [training folder](/doc/training) contains the [start-app.sh](/doc/training/start-app.sh) script. This script allows to run one of the four complete demo applications: + +```sh +user@host:/git$ ./openmtc-open-source/doc/training/start-app.sh +[1] onem2m-ipe-sensors-final.py +[2] onem2m-ipe-sensors-actuators-final.py +[3] onem2m-gui-sensors-final.py +[4] onem2m-gui-sensors-actuators-final.py +Choose the app to start: +``` + +## Getting started + +First of all, OpenMTC needs to be install on your system. If you have not yet installed OpenMTC, please do so following these [instructions](../install-sdk.md). + +### Sensor only demo applications + +To run the sensor only demo application, you will need four consoles. Proceed in the following order, since both the IPE and the GUI require a running CSE. + +**Console 1:** Backend +* start the Backend by executing the following: +* `./openmtc-open-source/openmtc-gevent/run_backend` +* After you started all four consoles, you should get something like [this](console-outputs/training-sensor-backend.md). + +**Console 2:** Gateway +* start the Gateway by executing the following: +* `./openmtc-open-source/openmtc-gevent/run_gateway` +* After you started all four consoles, you should get something like [this](console-outputs/training-sensor-gateway.md). + +**Console 3:** IPE +* start the IPE by executing the following: +* `./openmtc-open-source/doc/training/start-app.sh` +* type `1` +* you should get something like [this](console-outputs/training-sensor-ipe.md). + +**Console 4:** GUI +* start the GUI by executing the following: +* `./openmtc-open-source/doc/training/start-app.sh` +* type `3` +* you should get something like [this](console-outputs/training-sensor-gui.md). + + +### Sensor-Actuator demo applications + +To run the sensor-actuator demo application, you will also need four consoles. Proceed in the following order, since both the IPE and the GUI require a running CSE. + +**Console 1:** Backend +* start the Backend by executing the following: +* `./openmtc-open-source/openmtc-gevent/run_backend` +* After you started all four consoles, you should get something like [this](console-outputs/training-sensor-actuator-backend.md). + +**Console 2:** Gateway +* start the Gateway by executing the following: +* `./openmtc-open-source/openmtc-gevent/run_gateway` +* After you started all four consoles, you should get something like [this](console-outputs/training-sensor-actuator-gateway.md). + +**Console 3:** IPE +* start the IPE by executing the following: +* `./openmtc-open-source/doc/training/start-app.sh` +* type `2` +* you should get something like [this](console-outputs/training-sensor-actuator-ipe.md). + +**Console 4:** GUI +* start the GUI by executing the following: +* `./openmtc-open-source/doc/training/start-app.sh` +* type `4` +* you should get something like [this](console-outputs/training-sensor-actuator-gui.md). + + +## IPE demo applications + +IPE stands for Interworking Proxy Application Entity. The IPE demo applications attaches (virtual) sensors and (virtual) actuators to the CSE by sending (simulated) sensors readings and receiving and processing commands meant for (virtual) actuators attached to the hardware this IPE demo app is running on. + +The [IPE demo applications](/doc/training/apps/onem2m/ipe/) are available as sensor-actuator-applications or only as sensor-applications. + +**Incremental IPE demo applications** +* [ipe-sensors](training-ipe-sensors.md) provides virtual sensors +* [ipe-sensors-actuators](training-ipe-sensors-actuators.md) provides virtual sensors and actuators + + +## GUI demo applications + +GUI stands for Graphical User Interface. This is somewhat misleading, as these demo apps do not provide a real GUI. These GUI demo applications rather provide a textual user interface which receives (and eventually displays to the user) the (virtual) sensor data provided by the ipe-demo-apps. Further, they send commands to the (virtual) actuators attached to the ipe-demo-apps. + +The [gui demo applications](/doc/training/apps/onem2m/gui/) are available as sensor-actuator-applications or only as sensor-applications. + +**Incremental GUI demo applications** +* [GUI-sensors](training-gui-sensors.md) receives and displays sensor data +* [GUI-sensors-actuators](training-gui-sensors-actuators.md) receives and displays sensor data and issues commands to actuators + diff --git a/doc/training/training-ipe-sensors-actuators.md b/doc/training/training-ipe-sensors-actuators.md new file mode 100644 index 0000000..8d2c10b --- /dev/null +++ b/doc/training/training-ipe-sensors-actuators.md @@ -0,0 +1,641 @@ +# ipe-sensors-actuators demo app + + +The [ipe-sensors-actuators demo applications](/doc/training/apps/onem2m/ipe/sensors-actuators/) generates data from (virtual) sensors and sends them to the CSE. It further receives and processes commands meant for (virtual) actuators attaced to the hardware this ipe demo app is running on. The demo application is extended incrementally from the basic app frame to the complete ipe-sensors-actuators demo application. + + +## Step 1: [onem2m-ipe-sensors-actuators-01.py](/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-01.py) + +* this file is equivalent to onem2m-ipe-sensors-final.py +* onem2m-ipe-sensors-final.py will be completed from this point for actuator support +* added variable self.actuators and self._command_containers + +``` py +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # available actuators + actuators = [ + 'Switch-AirCon', # AirConditioning + 'Switch-Window' # Window + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + self._recognized_measurement_containers = {} + self._command_containers = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('initializing sensor: %s' % sensor) + + # create sensor container + device_container = Container(resourceName=sensor) + device_container = self.create_container(self._devices_container.path, + device_container, + labels=['sensor'], + max_nr_of_instances=0) + + # add sensor to _recognized_sensors + self._recognized_sensors[sensor] = device_container + + # create measurements container + labels = ['measurements'] + if sensor.startswith('Temp'): + labels.append('temperature') + else: + labels.append('humidity') + measurements_container = Container(resourceName='measurements') + measurements_container = self.create_container(device_container.path, + measurements_container, + labels=labels, + max_nr_of_instances=3) + + # add measurements_container from sensor to _recognized_measurement_containers + self._recognized_measurement_containers[sensor] = measurements_container + + def push_sensor_data(self, sensor, value): + + # build data set with value and metadata + if sensor.startswith('Temp'): + data = { + 'value': value, + 'type': 'temperature', + 'unit': 'degreeC' + } + else: + data = { + 'value': value, + 'type': 'humidity', + 'unit': 'percentage' + } + + # print the new data set + print ('%s: %s' % (sensor, data)) + + # finally, push the data set to measurements_container of the sensor + self.push_content(self._recognized_measurement_containers[sensor], data) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) +``` + +## Step 2: [onem2m-ipe-sensors-actuators-02.py](/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-02.py) + +* adds the creation of a container for each actuator of this ipe inside the devices_container + +``` py +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # available actuators + actuators = [ + 'Switch-AirCon', # AirConditioning + 'Switch-Window' # Window + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + self._recognized_measurement_containers = {} + self._command_containers = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # create container for each actuator + for actuator in self.actuators: + actuator_container = Container(resourceName=actuator) + self.create_container( + self._devices_container.path, # the target resource/path parenting the Container + actuator_container, # the Container resource or a valid container ID + max_nr_of_instances=0, # the container's max_nr_of_instances (here: 0=unlimited) + labels=['actuator'] # (optional) the container's labels + ) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('initializing sensor: %s' % sensor) + + # create sensor container + device_container = Container(resourceName=sensor) + device_container = self.create_container(self._devices_container.path, + device_container, + labels=['sensor'], + max_nr_of_instances=0) + + # add sensor to _recognized_sensors + self._recognized_sensors[sensor] = device_container + + # create measurements container + labels = ['measurements'] + if sensor.startswith('Temp'): + labels.append('temperature') + else: + labels.append('humidity') + measurements_container = Container(resourceName='measurements') + measurements_container = self.create_container(device_container.path, + measurements_container, + labels=labels, + max_nr_of_instances=3) + + # add measurements_container from sensor to _recognized_measurement_containers + self._recognized_measurement_containers[sensor] = measurements_container + + def push_sensor_data(self, sensor, value): + + # build data set with value and metadata + if sensor.startswith('Temp'): + data = { + 'value': value, + 'type': 'temperature', + 'unit': 'degreeC' + } + else: + data = { + 'value': value, + 'type': 'humidity', + 'unit': 'percentage' + } + + # print the new data set + print ('%s: %s' % (sensor, data)) + + # finally, push the data set to measurements_container of the sensor + self.push_content(self._recognized_measurement_containers[sensor], data) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) +``` + +## Step 3: [onem2m-ipe-sensors-actuators-03.py](/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-03.py) + +* adds the creation of a commands_container inside each actuator_container +* adds this new commands_container to the self._command_containers variable + +``` py +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # available actuators + actuators = [ + 'Switch-AirCon', # AirConditioning + 'Switch-Window' # Window + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + self._recognized_measurement_containers = {} + self._command_containers = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # create container for each actuator + for actuator in self.actuators: + actuator_container = Container(resourceName=actuator) + self.create_container( + self._devices_container.path, # the target resource/path parenting the Container + actuator_container, # the Container resource or a valid container ID + max_nr_of_instances=0, # the container's max_nr_of_instances (here: 0=unlimited) + labels=['actuator'] # (optional) the container's labels + ) + # create container for the commands of the actuators + commands_container = Container(resourceName='commands') + commands_container = self.create_container( + actuator_container.path, + commands_container, + max_nr_of_instances=3, + labels=['commands'] + ) + # add commands_container of current actuator to self._command_containers + self._command_containers[actuator] = commands_container + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('initializing sensor: %s' % sensor) + + # create sensor container + device_container = Container(resourceName=sensor) + device_container = self.create_container(self._devices_container.path, + device_container, + labels=['sensor'], + max_nr_of_instances=0) + + # add sensor to _recognized_sensors + self._recognized_sensors[sensor] = device_container + + # create measurements container + labels = ['measurements'] + if sensor.startswith('Temp'): + labels.append('temperature') + else: + labels.append('humidity') + measurements_container = Container(resourceName='measurements') + measurements_container = self.create_container(device_container.path, + measurements_container, + labels=labels, + max_nr_of_instances=3) + + # add measurements_container from sensor to _recognized_measurement_containers + self._recognized_measurement_containers[sensor] = measurements_container + + def push_sensor_data(self, sensor, value): + + # build data set with value and metadata + if sensor.startswith('Temp'): + data = { + 'value': value, + 'type': 'temperature', + 'unit': 'degreeC' + } + else: + data = { + 'value': value, + 'type': 'humidity', + 'unit': 'percentage' + } + + # print the new data set + print ('%s: %s' % (sensor, data)) + + # finally, push the data set to measurements_container of the sensor + self.push_content(self._recognized_measurement_containers[sensor], data) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) +``` + +## Step 4: [onem2m-ipe-sensors-actuators-04.py](/doc/training/apps/onem2m/ipe/sensors-actuators/onem2m-ipe-sensors-actuators-04.py) + +* adds the subscription of each commands_container to the handler handle_command() +* adds the handler function handle_command() + +``` py +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # available actuators + actuators = [ + 'Switch-AirCon', # AirConditioning + 'Switch-Window' # Window + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + self._recognized_measurement_containers = {} + self._command_containers = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # create container for each actuator + for actuator in self.actuators: + actuator_container = Container(resourceName=actuator) + self.create_container( + self._devices_container.path, # the target resource/path parenting the Container + actuator_container, # the Container resource or a valid container ID + max_nr_of_instances=0, # the container's max_nr_of_instances (here: 0=unlimited) + labels=['actuator'] # (optional) the container's labels + ) + # create container for the commands of the actuators + commands_container = Container(resourceName='commands') + commands_container = self.create_container( + actuator_container.path, + commands_container, + max_nr_of_instances=3, + labels=['commands'] + ) + # add commands_container of current actuator to self._command_containers + self._command_containers[actuator] = commands_container + # subscribe to command container of each actuator to the handler command + self.add_container_subscription( + commands_container.path, # the Container or it's path to be subscribed + self.handle_command # reference of the notification handling function + ) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def handle_command(self, container, value): + print('handle_command...') + print('container: %s' % container) + print('value: %s' % value) + print('') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('initializing sensor: %s' % sensor) + + # create sensor container + device_container = Container(resourceName=sensor) + device_container = self.create_container(self._devices_container.path, + device_container, + labels=['sensor'], + max_nr_of_instances=0) + + # add sensor to _recognized_sensors + self._recognized_sensors[sensor] = device_container + + # create measurements container + labels = ['measurements'] + if sensor.startswith('Temp'): + labels.append('temperature') + else: + labels.append('humidity') + measurements_container = Container(resourceName='measurements') + measurements_container = self.create_container(device_container.path, + measurements_container, + labels=labels, + max_nr_of_instances=3) + + # add measurements_container from sensor to _recognized_measurement_containers + self._recognized_measurement_containers[sensor] = measurements_container + + def push_sensor_data(self, sensor, value): + + # build data set with value and metadata + if sensor.startswith('Temp'): + data = { + 'value': value, + 'type': 'temperature', + 'unit': 'degreeC' + } + else: + data = { + 'value': value, + 'type': 'humidity', + 'unit': 'percentage' + } + + # print the new data set + print ('%s: %s' % (sensor, data)) + + # finally, push the data set to measurements_container of the sensor + self.push_content(self._recognized_measurement_containers[sensor], data) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE( + poas=['http://localhost:21346'], # adds poas in order to receive notifications + ) + Runner(app).run(host) +``` + diff --git a/doc/training/training-ipe-sensors.md b/doc/training/training-ipe-sensors.md new file mode 100644 index 0000000..662baa4 --- /dev/null +++ b/doc/training/training-ipe-sensors.md @@ -0,0 +1,553 @@ +# IPE-sensors demo app + + +The [ipe-sensors demo applications](./apps/onem2m/ipe/sensors/) generates data from (virtual) sensors and sends them to the CSE. The demo application is extended incrementally from the basic app frame to the complete IPE-sensors demo application. + + +## Step 1: [onem2m-ipe-sensors-01.py](./apps/onem2m/ipe/sensors/onem2m-ipe-sensors-01.py) + +* added base structure + +``` py +from openmtc_app.onem2m import XAE + + +class TestIPE(XAE): + remove_registration = True + + def _on_register(self): + # log message + self.logger.debug('registered') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) +``` + + +## Step 2: [onem2m-ipe-sensors-02.py](./apps/onem2m/ipe/sensors/onem2m-ipe-sensors-02.py) + +* adds creation of a container for devices +* introduces function for random sensor data generation +* introduces endless loop + +``` py +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + def _on_register(self): + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # create some random data for a random sensor + self.get_random_data() + + # log message + self.logger.debug('registered') + + # start endless loop + self.run_forever() + + def get_random_data(self): + pass + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) +``` + + +## Step 3: [onem2m-ipe-sensors-03.py](./apps/onem2m/ipe/sensors/onem2m-ipe-sensors-03.py) + +* adds random +* spawns run_forever with get_random_data function every one second +* prints some random value operations + +``` py +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + def _on_register(self): + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + print('---------') + random_value = random() + print(random_value) + print(random_value * 10) + print(int(random_value * 10)) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) +``` + + +## Step 4: [onem2m-ipe-sensors-04.py](./apps/onem2m/ipe/sensors/onem2m-ipe-sensors-04.py) + +* introducing list of sensors to create +* introducing settings for random sensor data generation +* adding code for random time intervals +* adding code for random sensor selection +* adding code for random sensor data generation + +``` py +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # settings for random sensor data generation + threshold = 0.5 + value_range = 25 + value_offset = 10 + + def _on_register(self): + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + print('') + + # for random time intervals + if random() > self.threshold: + print('got some data') + + # select a random sensor + print('available sensors: %s' % self.sensors) + print('number of available sensors: %s' % len(self.sensors)) + print('some random sensor: %s' % self.sensors[int(random() * len(self.sensors))]) + + # generate random sensor data + print('random sensor data: %s' % int(random() * self.value_range + self.value_offset)) + + else: + print('no data') + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) +``` + + +## Step 5: [onem2m-ipe-sensors-05.py](./apps/onem2m/ipe/sensors/onem2m-ipe-sensors-05.py) + +* adds different range and offset for temperature and humidity value generation +* introducing self._recognized_sensors variable +* completing get_random_data() function +* introducing handle_sensor_data() function +* introducing create_sensor_structure() function +* introducing push_sensor_data() function + +``` py +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('I need to create a structure for the sensor %s.' % sensor) + self._recognized_sensors[sensor] = 'something useful' + + def push_sensor_data(self, sensor, value): + print('I would push the content %i of %s to the gateway.' % (value, sensor)) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) +``` + + +## Step 6: [onem2m-ipe-sensors-06.py](./apps/onem2m/ipe/sensors/onem2m-ipe-sensors-06.py) + +* added create sensor container to function create_sensor_structure() +* add sensor to _recognized_sensors +* build data set with value and metadata +* printing out the new data set + +``` py +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('initializing sensor: %s' % sensor) + + # create sensor container + device_container = Container(resourceName=sensor) + device_container = self.create_container(self._devices_container.path, + device_container, + labels=['sensor'], + max_nr_of_instances=0) + + # add sensor to _recognized_sensors + self._recognized_sensors[sensor] = device_container + + def push_sensor_data(self, sensor, value): + + # build data set with value and metadata + if sensor.startswith('Temp'): + data = { + 'value': value, + 'type': 'temperature', + 'unit': 'degreeC' + } + else: + data = { + 'value': value, + 'type': 'humidity', + 'unit': 'percentage' + } + + # print the new data set + print('%s: %s' % (sensor, data)) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) +``` + + +## Step 7: [onem2m-ipe-sensors-07.py](./apps/onem2m/ipe/sensors/onem2m-ipe-sensors-07.py) + +* introduced self._measurement_containers variable +* added creation of measurements container in function create_sensor_structure() +* added push of data to measurements_container of the sensor in function push_sensor_data() + +``` py +from random import random + +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container + + +class TestIPE(XAE): + remove_registration = True + + # sensors to create + sensors = [ + 'Temp-1', + 'Temp-2', + 'Humi-1', + 'Humi-2' + ] + + # settings for random sensor data generation + threshold = 0.2 + temp_range = 25 + temp_offset = 10 + humi_range = 50 + humi_offset = 30 + + def _on_register(self): + + # init variables + self._recognized_sensors = {} + self._recognized_measurement_containers = {} + + # init base structure + label = 'devices' + container = Container(resourceName=label) + self._devices_container = self.create_container(None, + container, + labels=[label], + max_nr_of_instances=0) + + # trigger periodically new data generation + self.run_forever(1, self.get_random_data) + + # log message + self.logger.debug('registered') + + def get_random_data(self): + + # at random time intervals + if random() > self.threshold: + + # select a random sensor + sensor = self.sensors[int(random() * len(self.sensors))] + + # set parameters depending on sensor type + if sensor.startswith('Temp'): + value_range = self.temp_range + value_offset = self.temp_offset + else: + value_range = self.humi_range + value_offset = self.humi_offset + + # generate random sensor data + value = int(random() * value_range + value_offset) + self.handle_sensor_data(sensor, value) + + def handle_sensor_data(self, sensor, value): + + # initialize sensor structure if never done before + if sensor not in self._recognized_sensors: + self.create_sensor_structure(sensor) + self.push_sensor_data(sensor, value) + + def create_sensor_structure(self, sensor): + print('initializing sensor: %s' % sensor) + + # create sensor container + device_container = Container(resourceName=sensor) + device_container = self.create_container(self._devices_container.path, + device_container, + labels=['sensor'], + max_nr_of_instances=0) + + # add sensor to _recognized_sensors + self._recognized_sensors[sensor] = device_container + + # create measurements container + labels = ['measurements'] + if sensor.startswith('Temp'): + labels.append('temperature') + else: + labels.append('humidity') + measurements_container = Container(resourceName='measurements') + measurements_container = self.create_container(device_container.path, + measurements_container, + labels=labels, + max_nr_of_instances=3) + + # add measurements_container from sensor to _recognized_measurement_containers + self._recognized_measurement_containers[sensor] = measurements_container + + def push_sensor_data(self, sensor, value): + + # build data set with value and metadata + if sensor.startswith('Temp'): + data = { + 'value': value, + 'type': 'temperature', + 'unit': 'degreeC' + } + else: + data = { + 'value': value, + 'type': 'humidity', + 'unit': 'percentage' + } + + # print the new data set + print ('%s: %s' % (sensor, data)) + + # finally, push the data set to measurements_container of the sensor + self.push_content(self._recognized_measurement_containers[sensor], data) + + +if __name__ == '__main__': + from openmtc_app.runner import AppRunner as Runner + + host = 'http://localhost:8000' + app = TestIPE() + Runner(app).run(host) +``` + diff --git a/doc/various.md b/doc/various.md new file mode 100644 index 0000000..af0524d --- /dev/null +++ b/doc/various.md @@ -0,0 +1,84 @@ +# Appendix + +## Howto install Docker on Ubuntu + +[Docker](https://www.docker.com/) is a software container solution used +to provide operating-system-level virtualization. It is possible for +all OpenMTC components to run within a Docker container. To make +this possible, a script is provided to allow a user to build Docker +container images. + +*Be aware that during the build process the build machine has to have +access to the Internet, since various Docker container image +dependencies are pulled.* + +For a general introduction to Docker, see: +[Get Started with Docker](https://docs.docker.com/get-started/). + +### Install and Prepare Docker + + For the build script to work, Docker needs to be installed. There + are two different approaches to installing Docker on your system. + + 1. Use the OS package manager to install docker.io package + 2. Use installer from Docker.com to install freshest version of Docker + + Use the first method whenever your OS package manager provides a more + recent of Docker. If this not the case, *e.g. in Ubuntu 14.04*, use + the second approach to install Docker. + +#### Installing docker.io package in Ubuntu 16.04 or Debian testing via `apt` + + +``` +# Install docker on Debian-like systems +sudo apt-get -y install docker.io + +# Prepare a user for non-root access to Docker + +# If above installation did not create a 'docker' group, you may add +# it manually. Afterwards, you need to restart Docker for that +sudo groupadd docker +sudo service docker restart + +# Add a specific user to user group 'docker' +sudo usermod -aG docker USER_NAME + +# Be sure to logoff and login that user, to make the group change to +# kick in + +# Check if Docker runs properly, by using various docker commands, e.g. +docker images +docker ps + +# Use the Docker build commands in the next section for further +# testing, if above commands were working properly. If in the next +# section Docker does not work properly, see below description. + +# If Docker does not work properly yet, try to restart it +sudo systemctl stop docker +sudo ip link set down docker0 +sudo ip link set up docker0 +sudo systemctl start docker + +# Still, not? Is routing configured correctly? Maybe try: +sudo ip r add 172.17.0.0/16 dev docker0 +``` + +#### Installing freshest Docker version via Docker.com installer + + *Use this particularly for Ubuntu 14.04.* + +``` +# Downloads an installation script and executes it +# Be aware that within the script `sudo` is called +curl -sSL https://get.docker.com | sh + +# Prepare your user account to use docker command +# +# 1. Add the specified user to user group 'docker' +sudo usermod -aG docker USER_NAME + +# 2. Logout and re-login to the system so that the user account changes +# can "kick-in" +``` diff --git a/docker/backend-amd64 b/docker/backend-amd64 new file mode 100644 index 0000000..c52da10 --- /dev/null +++ b/docker/backend-amd64 @@ -0,0 +1,30 @@ +############################################################ +# Dockerfile to run openmtc backend binary +############################################################ + +# Set the base image to use openmtc/sdk +FROM openmtc/sdk-amd64:latest + +ENV MOD_NAME=backend + +# Set the file maintainer +MAINTAINER rst/tgu + +# install openmtc dependencies +COPY tmp/$MOD_NAME-dependencies.txt /tmp/requirements.txt +RUN pip install --upgrade --requirement /tmp/requirements.txt + +# install openmtc-all +COPY tmp/openmtc-$MOD_NAME.tar.gz /tmp/openmtc-$MOD_NAME.tar.gz +RUN tar xzf /tmp/openmtc-$MOD_NAME.tar.gz -C / \ + --owner root --group root --no-same-owner --no-overwrite-dir \ + --transform 's/json\.dist/json/' --show-transformed + +RUN mkdir -p /var/log/openmtc + +# add change config +COPY configure-$MOD_NAME-and-start /usr/local/bin/configure-and-start + +# entry point +ENTRYPOINT ["/usr/local/bin/configure-and-start"] +CMD [""] diff --git a/docker/backend-arm b/docker/backend-arm new file mode 100644 index 0000000..0c0a905 --- /dev/null +++ b/docker/backend-arm @@ -0,0 +1,30 @@ +############################################################ +# Dockerfile to run openmtc backend binary +############################################################ + +# Set the base image to use openmtc/sdk +FROM openmtc/sdk-arm:latest + +ENV MOD_NAME=backend + +# Set the file maintainer +MAINTAINER rst/tgu + +# install openmtc dependencies +COPY tmp/$MOD_NAME-dependencies.txt /tmp/requirements.txt +RUN pip install --upgrade --requirement /tmp/requirements.txt + +# install openmtc-all +COPY tmp/openmtc-$MOD_NAME.tar.gz /tmp/openmtc-$MOD_NAME.tar.gz +RUN tar xzf /tmp/openmtc-$MOD_NAME.tar.gz -C / \ + --owner root --group root --no-same-owner --no-overwrite-dir \ + --transform 's/json\.dist/json/' --show-transformed + +RUN mkdir -p /var/log/openmtc + +# add change config +COPY configure-$MOD_NAME-and-start /usr/local/bin/configure-and-start + +# entry point +ENTRYPOINT ["/usr/local/bin/configure-and-start"] +CMD [""] diff --git a/docker/base-amd64 b/docker/base-amd64 new file mode 100644 index 0000000..a1637b0 --- /dev/null +++ b/docker/base-amd64 @@ -0,0 +1,33 @@ +############################################################ +# Dockerfile for base python environment +############################################################ + +# Set the base image to use debian +FROM debian:jessie + +# Set the file maintainer +MAINTAINER rst/tgu + +# install packages and clean up +RUN printf "Starting update...\n" && \ + apt-get update -qq && \ + printf "Update finished.\n" && \ + printf "Starting dist-upgrade...\n" && \ + apt-get dist-upgrade -y > /dev/null && \ + printf "Upgrade finished.\n" && \ + printf "Starting install...\n" && \ + apt-get install -y \ + build-essential \ + netbase \ + iproute2 \ + python-pip \ + python-dev \ + jq > /dev/null && \ + apt-get clean && \ + printf "Installation and cleanup finished.\n" + +# set user +USER root + +# entry point +ENTRYPOINT ["/bin/bash"] diff --git a/docker/base-arm b/docker/base-arm new file mode 100644 index 0000000..b2d49e4 --- /dev/null +++ b/docker/base-arm @@ -0,0 +1,33 @@ +############################################################ +# Dockerfile for base python environment +############################################################ + +# Set the base image to use debian +FROM resin/rpi-raspbian:jessie + +# Set the file maintainer +MAINTAINER rst/tgu + +# install packages and clean up +RUN printf "Starting update...\n" && \ + apt-get update -qq && \ + printf "Update finished.\n" && \ + printf "Starting dist-upgrade...\n" && \ + apt-get dist-upgrade -y > /dev/null && \ + printf "Upgrade finished.\n" && \ + printf "Starting install...\n" && \ + apt-get install -y \ + build-essential \ + netbase \ + iproute2 \ + python-pip \ + python-dev \ + jq > /dev/null && \ + apt-get clean && \ + printf "Installation and cleanup finished.\n" + +# set user +USER root + +# entry point +ENTRYPOINT ["/bin/bash"] diff --git a/docker/builder-amd64 b/docker/builder-amd64 new file mode 100644 index 0000000..3185ec4 --- /dev/null +++ b/docker/builder-amd64 @@ -0,0 +1,9 @@ +# Set the builder image to use openmtc/base +FROM openmtc/base-amd64:latest + +ARG OPENMTC_HOME=/usr/local/src/openmtc-python/ + +WORKDIR $OPENMTC_HOME + +ENTRYPOINT ["./create-binary-dist"] +CMD [""] diff --git a/docker/builder-arm b/docker/builder-arm new file mode 100644 index 0000000..36d5e33 --- /dev/null +++ b/docker/builder-arm @@ -0,0 +1,9 @@ +# Set the builder image to use openmtc/base +FROM openmtc/base-arm:latest + +ARG OPENMTC_HOME=/usr/local/src/openmtc-python/ + +WORKDIR $OPENMTC_HOME + +ENTRYPOINT ["./create-binary-dist"] +CMD [""] diff --git a/docker/configure-backend-and-start b/docker/configure-backend-and-start new file mode 100755 index 0000000..dab86ae --- /dev/null +++ b/docker/configure-backend-and-start @@ -0,0 +1,85 @@ +#!/usr/bin/env bash + +CONFIG_FILE="/etc/openmtc/gevent/config-backend.json" + +# defaults global +REQUIRE_AUTH=${REQUIRE_AUTH-false} + +# defaults logging +LOGGING_FILE=${LOGGING_FILE-"/var/log/openmtc/backend.log"} +LOGGING_LEVEL=${LOGGING_LEVEL-"ERROR"} + +# defaults onem2m +ONEM2M_SP_ID=${ONEM2M_SP_ID-"openmtc.org"} +ONEM2M_CSE_TYPE=${ONEM2M_CSE_TYPE-"IN-CSE"} +ONEM2M_CSE_ID=${ONEM2M_CSE_ID-"in-cse-1"} +ONEM2M_CSE_BASE=${ONEM2M_CSE_BASE-"onem2m"} +ONEM2M_SSL_KEY=${ONEM2M_SSL_KEY-"/etc/openmtc/certs/in-cse-1-client-server.key.pem"} +ONEM2M_SSL_CRT=${ONEM2M_SSL_CRT-"/etc/openmtc/certs/in-cse-1-client-server.cert.pem"} +ONEM2M_SSL_CA=${ONEM2M_SSL_CA-"/etc/openmtc/certs/ca-chain.cert.pem"} +ONEM2M_ACCEPT_INSECURE_CERTS=${ONEM2M_ACCEPT_INSECURE_CERTS-false} +ONEM2M_OVERWRITE_ORIGINATOR=${ONEM2M_OVERWRITE_ORIGINATOR-false} + +# defaults onem2m plugins +ONEM2M_HTTP_TRANSPORT_DISABLED=${ONEM2M_HTTP_TRANSPORT_DISABLED-false} +ONEM2M_HTTP_TRANSPORT_PORT=${ONEM2M_HTTP_TRANSPORT_PORT-18000} +ONEM2M_HTTP_TRANSPORT_SSL_ENABLED=${ONEM2M_HTTP_TRANSPORT_SSL_ENABLED-false} +ONEM2M_HTTP_TRANSPORT_REQUIRE_CERT=${ONEM2M_HTTP_TRANSPORT_REQUIRE_CERT-true} + +ONEM2M_NOTIFICATION_DISABLED=${ONEM2M_NOTIFICATION_DISABLED-true} + +# ensure correct level +case ${LOGGING_LEVEL} in + FATAL|ERROR|WARN|INFO|DEBUG) + ;; + *) + LOGGING_LEVEL="ERROR" + ;; +esac + +# local ip +LOCAL_IP=$(ip r get 8.8.8.8 | awk 'NR==1 {print $NF}') + +# set hostname +HOST_NAME=${EXTERNAL_IP-${LOCAL_IP}} + +# Configuration of the service. +CONFIG_TEMP=${CONFIG_FILE}".tmp" +echo -n "Configuring M2M backend..." +JQ_STRING='.' + +# basics +JQ_STRING=${JQ_STRING}' | + .global.require_auth = '${REQUIRE_AUTH}' | + .logging.file |= "'${LOGGING_FILE}'" | + .logging.level |= "'${LOGGING_LEVEL}'" +' + +# onem2m +JQ_STRING=${JQ_STRING}' | + .onem2m.sp_id = "'${ONEM2M_SP_ID}'" | + .onem2m.cse_type = "'${ONEM2M_CSE_TYPE}'" | + .onem2m.cse_id |= "'${ONEM2M_CSE_ID}'" | + .onem2m.cse_base |= "'${ONEM2M_CSE_BASE}'" | + .onem2m.ssl_certs.key |= "'${ONEM2M_SSL_KEY}'" | + .onem2m.ssl_certs.crt |= "'${ONEM2M_SSL_CRT}'" | + .onem2m.ssl_certs.ca |= "'${ONEM2M_SSL_CA}'" | + .onem2m.accept_insecure_certs |= '${ONEM2M_ACCEPT_INSECURE_CERTS}' | + .onem2m.overwrite_originator.enabled |= '${ONEM2M_OVERWRITE_ORIGINATOR}' +' + +# onem2m plugins +JQ_STRING=${JQ_STRING}' | + (.plugins.openmtc_cse[] | select(.name == "HTTPTransportPlugin") | .disabled) |= '${ONEM2M_HTTP_TRANSPORT_DISABLED}' | + (.plugins.openmtc_cse[] | select(.name == "HTTPTransportPlugin") | .config.port) |= '${ONEM2M_HTTP_TRANSPORT_PORT}' | + (.plugins.openmtc_cse[] | select(.name == "HTTPTransportPlugin") | .config.enable_https) |= '${ONEM2M_HTTP_TRANSPORT_SSL_ENABLED}' | + (.plugins.openmtc_cse[] | select(.name == "HTTPTransportPlugin") | .config.require_cert) |= '${ONEM2M_HTTP_TRANSPORT_REQUIRE_CERT}' | + (.plugins.openmtc_cse[] | select(.name == "NotificationHandler") | .disabled) |= '${ONEM2M_NOTIFICATION_DISABLED}' +' + +cat ${CONFIG_FILE} | jq -M "${JQ_STRING}"> ${CONFIG_TEMP} +mv ${CONFIG_TEMP} ${CONFIG_FILE} + +echo "done" + +exec python -m openmtc_gevent.backend_main $@ diff --git a/docker/configure-gateway-and-start b/docker/configure-gateway-and-start new file mode 100755 index 0000000..e2c6985 --- /dev/null +++ b/docker/configure-gateway-and-start @@ -0,0 +1,95 @@ +#!/usr/bin/env bash + +CONFIG_FILE="/etc/openmtc/gevent/config-gateway.json" + +# local ip +LOCAL_IP=$(ip r get 8.8.8.8 | awk 'NR==1 {print $NF}') + +# set hostname +HOST_NAME=${EXTERNAL_IP-${LOCAL_IP}} + +# defaults global +REQUIRE_AUTH=${REQUIRE_AUTH-false} + +# defaults logging +LOGGING_FILE=${LOGGING_FILE-"/var/log/openmtc/gateway.log"} +LOGGING_LEVEL=${LOGGING_LEVEL-"ERROR"} + +# defaults onem2m +ONEM2M_SP_ID=${ONEM2M_SP_ID-"openmtc.org"} +ONEM2M_CSE_TYPE=${ONEM2M_CSE_TYPE-"MN-CSE"} +ONEM2M_CSE_ID=${ONEM2M_CSE_ID-"mn-cse-1"} +ONEM2M_CSE_BASE=${ONEM2M_CSE_BASE-"onem2m"} +ONEM2M_SSL_KEY=${ONEM2M_SSL_KEY-"/etc/openmtc/certs/mn-cse-1-client-server.key.pem"} +ONEM2M_SSL_CRT=${ONEM2M_SSL_CRT-"/etc/openmtc/certs/mn-cse-1-client-server.cert.pem"} +ONEM2M_SSL_CA=${ONEM2M_SSL_CA-"/etc/openmtc/certs/ca-chain.cert.pem"} +ONEM2M_ACCEPT_INSECURE_CERTS=${ONEM2M_ACCEPT_INSECURE_CERTS-false} +ONEM2M_OVERWRITE_ORIGINATOR=${ONEM2M_OVERWRITE_ORIGINATOR-false} + +# defaults onem2m plugins +ONEM2M_HTTP_TRANSPORT_DISABLED=${ONEM2M_HTTP_TRANSPORT_DISABLED-false} +ONEM2M_HTTP_TRANSPORT_PORT=${ONEM2M_HTTP_TRANSPORT_PORT-8000} +ONEM2M_HTTP_TRANSPORT_SSL_ENABLED=${ONEM2M_HTTP_TRANSPORT_SSL_ENABLED-false} +ONEM2M_HTTP_TRANSPORT_REQUIRE_CERT=${ONEM2M_HTTP_TRANSPORT_REQUIRE_CERT-true} + +ONEM2M_NOTIFICATION_DISABLED=${ONEM2M_NOTIFICATION_DISABLED-true} + +ONEM2M_REGISTRATION_DISABLED=${ONEM2M_REGISTRATION_DISABLED-true} +ONEM2M_REMOTE_CSE_ID=${ONEM2M_REMOTE_CSE_ID-"in-cse-1"} +ONEM2M_REMOTE_CSE_POA=${ONEM2M_REMOTE_CSE_POA-"http://localhost:18000"} +${ONEM2M_HTTP_TRANSPORT_SSL_ENABLED} && SCHEME="https" || SCHEME="http" +ONEM2M_REMOTE_CSE_OWN_POA="${SCHEME}://${HOST_NAME}:${ONEM2M_HTTP_TRANSPORT_PORT}" +ONEM2M_REMOTE_CSE_BASE=${ONEM2M_REMOTE_CSE_BASE-"onem2m"} +ONEM2M_REMOTE_CSE_TYPE=${ONEM2M_REMOTE_CSE_TYPE-"IN-CSE"} + +# ensure correct level +case ${LOGGING_LEVEL} in + FATAL|ERROR|WARN|INFO|DEBUG) + ;; + *) + LOGGING_LEVEL="ERROR" + ;; +esac + +# Configuration of the service. +CONFIG_TEMP=${CONFIG_FILE}".tmp" +echo -n "Configuring M2M gateway..." +JQ_STRING='.' + +# basics +JQ_STRING=${JQ_STRING}' | + .global.require_auth = '${REQUIRE_AUTH}' | + .logging.file |= "'${LOGGING_FILE}'" | + .logging.level |= "'${LOGGING_LEVEL}'" +' + +# onem2m +JQ_STRING=${JQ_STRING}' | + .onem2m.sp_id = "'${ONEM2M_SP_ID}'" | + .onem2m.cse_type = "'${ONEM2M_CSE_TYPE}'" | + .onem2m.cse_id |= "'${ONEM2M_CSE_ID}'" | + .onem2m.cse_base |= "'${ONEM2M_CSE_BASE}'" | + .onem2m.ssl_certs.key |= "'${ONEM2M_SSL_KEY}'" | + .onem2m.ssl_certs.crt |= "'${ONEM2M_SSL_CRT}'" | + .onem2m.ssl_certs.ca |= "'${ONEM2M_SSL_CA}'" | + .onem2m.accept_insecure_certs |= '${ONEM2M_ACCEPT_INSECURE_CERTS}' | + .onem2m.overwrite_originator.enabled |= '${ONEM2M_OVERWRITE_ORIGINATOR}' +' + +# onem2m plugins +JQ_STRING=${JQ_STRING}' | + (.plugins.openmtc_cse[] | select(.name == "HTTPTransportPlugin") | .disabled) |= '${ONEM2M_HTTP_TRANSPORT_DISABLED}' | + (.plugins.openmtc_cse[] | select(.name == "HTTPTransportPlugin") | .config.port) |= '${ONEM2M_HTTP_TRANSPORT_PORT}' | + (.plugins.openmtc_cse[] | select(.name == "HTTPTransportPlugin") | .config.enable_https) |= '${ONEM2M_HTTP_TRANSPORT_SSL_ENABLED}' | + (.plugins.openmtc_cse[] | select(.name == "HTTPTransportPlugin") | .config.require_cert) |= '${ONEM2M_HTTP_TRANSPORT_REQUIRE_CERT}' | + (.plugins.openmtc_cse[] | select(.name == "NotificationHandler") | .disabled) |= '${ONEM2M_NOTIFICATION_DISABLED}' | + (.plugins.openmtc_cse[] | select(.name == "RegistrationHandler") | .disabled) |= '${ONEM2M_REGISTRATION_DISABLED}' | + (.plugins.openmtc_cse[] | select(.name == "RegistrationHandler") | .config.remote_cses) |= [{cse_id: "'${ONEM2M_REMOTE_CSE_ID}'", poa: ["'${ONEM2M_REMOTE_CSE_POA}'"], own_poa: ["'${ONEM2M_REMOTE_CSE_OWN_POA}'"], cse_base: "'${ONEM2M_REMOTE_CSE_BASE}'", cse_type: "'${ONEM2M_REMOTE_CSE_TYPE}'"}] +' + +cat ${CONFIG_FILE} | jq -M "${JQ_STRING}"> ${CONFIG_TEMP} +mv ${CONFIG_TEMP} ${CONFIG_FILE} + +echo "done" + +exec python -m openmtc_gevent.gateway_main $@ diff --git a/docker/gateway-amd64 b/docker/gateway-amd64 new file mode 100644 index 0000000..f1f8044 --- /dev/null +++ b/docker/gateway-amd64 @@ -0,0 +1,30 @@ +############################################################ +# Dockerfile to run openmtc gateway binary +############################################################ + +# Set the base image to use openmtc/sdk +FROM openmtc/sdk-amd64:latest + +ENV MOD_NAME=gateway + +# Set the file maintainer +MAINTAINER rst/tgu + +# install openmtc dependencies +COPY tmp/$MOD_NAME-dependencies.txt /tmp/requirements.txt +RUN pip install --upgrade --requirement /tmp/requirements.txt + +# install openmtc-all +COPY tmp/openmtc-$MOD_NAME.tar.gz /tmp/openmtc-$MOD_NAME.tar.gz +RUN tar xzf /tmp/openmtc-$MOD_NAME.tar.gz -C / \ + --owner root --group root --no-same-owner --no-overwrite-dir \ + --transform 's/json\.dist/json/' --show-transformed + +RUN mkdir -p /var/log/openmtc + +# add change config +COPY configure-$MOD_NAME-and-start /usr/local/bin/configure-and-start + +# entry point +ENTRYPOINT ["/usr/local/bin/configure-and-start"] +CMD [""] diff --git a/docker/gateway-arm b/docker/gateway-arm new file mode 100644 index 0000000..9a7495b --- /dev/null +++ b/docker/gateway-arm @@ -0,0 +1,30 @@ +############################################################ +# Dockerfile to run openmtc gateway binary +############################################################ + +# Set the base image to use openmtc/sdk +FROM openmtc/sdk-arm:latest + +ENV MOD_NAME=gateway + +# Set the file maintainer +MAINTAINER rst/tgu + +# install openmtc dependencies +COPY tmp/$MOD_NAME-dependencies.txt /tmp/requirements.txt +RUN pip install --upgrade --requirement /tmp/requirements.txt + +# install openmtc-all +COPY tmp/openmtc-$MOD_NAME.tar.gz /tmp/openmtc-$MOD_NAME.tar.gz +RUN tar xzf /tmp/openmtc-$MOD_NAME.tar.gz -C / \ + --owner root --group root --no-same-owner --no-overwrite-dir \ + --transform 's/json\.dist/json/' --show-transformed + +RUN mkdir -p /var/log/openmtc + +# add change config +COPY configure-$MOD_NAME-and-start /usr/local/bin/configure-and-start + +# entry point +ENTRYPOINT ["/usr/local/bin/configure-and-start"] +CMD [""] diff --git a/docker/sdk-amd64 b/docker/sdk-amd64 new file mode 100644 index 0000000..7f3594c --- /dev/null +++ b/docker/sdk-amd64 @@ -0,0 +1,21 @@ +############################################################ +# Dockerfile for openmtc sdk binary environment +############################################################ + +# Set the base image to use openmtc/base +FROM openmtc/base-amd64:latest + +ENV MOD_NAME=sdk + +# Set the file maintainer +MAINTAINER rst/tgu + +# install openmtc dependencies +COPY tmp/$MOD_NAME-dependencies.txt /tmp/requirements.txt +RUN pip install --upgrade --requirement /tmp/requirements.txt + +# install openmtc-sdk +COPY tmp/openmtc-$MOD_NAME.tar.gz /tmp/openmtc-$MOD_NAME.tar.gz +RUN tar xzf /tmp/openmtc-$MOD_NAME.tar.gz -C / \ + --owner root --group root --no-same-owner --no-overwrite-dir \ + --transform 's/json\.dist/json/' --show-transformed diff --git a/docker/sdk-arm b/docker/sdk-arm new file mode 100644 index 0000000..a346068 --- /dev/null +++ b/docker/sdk-arm @@ -0,0 +1,21 @@ +############################################################ +# Dockerfile for openmtc sdk binary environment +############################################################ + +# Set the base image to use openmtc/base +FROM openmtc/base-arm:latest + +ENV MOD_NAME=sdk + +# Set the file maintainer +MAINTAINER rst/tgu + +# install openmtc dependencies +COPY tmp/$MOD_NAME-dependencies.txt /tmp/requirements.txt +RUN pip install --upgrade --requirement /tmp/requirements.txt + +# install openmtc-sdk +COPY tmp/openmtc-$MOD_NAME.tar.gz /tmp/openmtc-$MOD_NAME.tar.gz +RUN tar xzf /tmp/openmtc-$MOD_NAME.tar.gz -C / \ + --owner root --group root --no-same-owner --no-overwrite-dir \ + --transform 's/json\.dist/json/' --show-transformed diff --git a/futile/src/futile/StringIO/__init__.py b/futile/src/futile/StringIO/__init__.py new file mode 100644 index 0000000..40a6c72 --- /dev/null +++ b/futile/src/futile/StringIO/__init__.py @@ -0,0 +1,4 @@ +try: + from StringIO import StringIO +except ImportError: + from io import StringIO diff --git a/futile/src/futile/__init__.py b/futile/src/futile/__init__.py new file mode 100644 index 0000000..fc280c1 --- /dev/null +++ b/futile/src/futile/__init__.py @@ -0,0 +1,82 @@ +from futile.basictypes import basestring, BASE_STR +from futile.logging import LoggerMixin + +Base = LoggerMixin + + +class NOT_SET(object): + __slots__ = () + + def __bool__(self): + return False + __nonzero__ = __bool__ + + def __str__(self): + return "" + +NOT_SET = NOT_SET() +DEFAULT_ENCODING = "utf-8" +DEFAULT_CHUNK_SIZE = 128 * 1024 +THREADSAFE = True + + +def noop(*args, **kw): + pass + + +def not_implemented(*args, **kw): + raise NotImplementedError() + + +def tostr(o): + if isinstance(o, basestring): + return o + return BASE_STR(o) + + +if basestring == str: + uc = tostr + encstr = not_implemented +else: + def uc(s): + if isinstance(s, unicode): + return s + if isinstance(s, basestring): + return s.decode(DEFAULT_ENCODING) + return unicode(s) + + def encstr(s): + if isinstance(s, str): + return s + if not isinstance(s, unicode): + s = unicode(s) + return s.encode(DEFAULT_ENCODING) + + +def identity(x): + return x + +_isc = issubclass + + +def issubclass(o, classes): + "A safer version of __builtin__.issubclass that does not raise TypeError when called with a non-type object" + + return isinstance(o, type) and _isc(o, classes) + +try: + callable +except NameError: + def callable(x): + return hasattr(x, "__call__") + + +class ObjectProxy(object): + __slots__ = ("_o") + + def __init__(self, proxyobject, *args, **kw): + super(ObjectProxy, self).__init__(*args, **kw) + self._o = proxyobject + + def __getattr__(self, k): + return getattr(self._o, k) diff --git a/futile/src/futile/__init__.pyc b/futile/src/futile/__init__.pyc new file mode 100644 index 0000000..ec6ef68 Binary files /dev/null and b/futile/src/futile/__init__.pyc differ diff --git a/futile/src/futile/abchelper.py b/futile/src/futile/abchelper.py new file mode 100644 index 0000000..8b66cbb --- /dev/null +++ b/futile/src/futile/abchelper.py @@ -0,0 +1,12 @@ +''' +Created on 13.11.2012 + +@author: kca +''' + +try: + from abc import ABCMeta, abstractmethod, abstractproperty +except ImportError: + from futile import identity + ABCMeta = type + abstractmethod = abstractproperty = identity diff --git a/futile/src/futile/basictypes.py b/futile/src/futile/basictypes.py new file mode 100644 index 0000000..8230e95 --- /dev/null +++ b/futile/src/futile/basictypes.py @@ -0,0 +1,20 @@ +''' +Created on 11.05.2013 + +@author: kca +''' + +try: + from types import ClassType +except ImportError: + ClassType = type + +try: + basestring = basestring +except NameError: + basestring = str + +try: + BASE_STR = unicode +except NameError: + BASE_STR = str diff --git a/futile/src/futile/basictypes.pyc b/futile/src/futile/basictypes.pyc new file mode 100644 index 0000000..adffff5 Binary files /dev/null and b/futile/src/futile/basictypes.pyc differ diff --git a/futile/src/futile/caching/__init__.py b/futile/src/futile/caching/__init__.py new file mode 100644 index 0000000..0c79571 --- /dev/null +++ b/futile/src/futile/caching/__init__.py @@ -0,0 +1,63 @@ +''' +Created on 17.07.2011 + +@author: kca +''' + +from ..collections import OrderedDict +import futile + +class LRUCache(OrderedDict): + max_items = 100 + + def __init__(self, max_items = None, threadsafe = None, *args, **kw): + super(LRUCache, self).__init__(*args, **kw) + if max_items is not None: + if max_items <= 0: + raise ValueError(max_items) + self.max_items = max_items + + if threadsafe is None: + threadsafe = futile.THREADSAFE + + if threadsafe: + from threading import RLock + self.__lock = RLock() + else: + self.__lock = None + self.__getitem__ = self._getitem + self.__setitem__ = self._setitem + + def __getitem__(self, k): + if self.__lock is None: + return self._getitem(k) + with self.__lock: + return self._getitem(k) + + def get(self, k, default = None): + try: + return self[k] + except KeyError: + return default + + def _getitem(self, k): + v = super(LRUCache, self).__getitem__(k) + del self[k] + super(LRUCache, self).__setitem__(k, v) + return v + + def __iter__(self): + for k in tuple(super(LRUCache, self).__iter__()): + yield k + + def __setitem__(self, k, v): + if self.__lock is None: + return self._setitem(k, v) + with self.__lock: + self._setitem(k, v) + + def _setitem(self, k, v): + super(LRUCache, self).__setitem__(k, v) + if len(self) > self.max_items: + self.popitem(False) + \ No newline at end of file diff --git a/futile/src/futile/caching/__init__.pyc b/futile/src/futile/caching/__init__.pyc new file mode 100644 index 0000000..58d2d16 Binary files /dev/null and b/futile/src/futile/caching/__init__.pyc differ diff --git a/futile/src/futile/collections/OrderedSet.py b/futile/src/futile/collections/OrderedSet.py new file mode 100644 index 0000000..5de43a7 --- /dev/null +++ b/futile/src/futile/collections/OrderedSet.py @@ -0,0 +1,110 @@ +# Copyright (C) 2009 Raymond Hettinger + +# *** MIT License *** +# Permission is hereby granted, free of charge, to any person obtaining a copy of +# this software and associated documentation files (the "Software"), to deal in +# the Software without restriction, including without limitation the rights to +# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +# of the Software, and to permit persons to whom the Software is furnished to do +# so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +## {{{ http://code.activestate.com/recipes/576694/ (r7) + +# kca: fixed exception at interpreter shutdown +# kca: added list methods + +import collections + +KEY, PREV, NEXT = range(3) + +class OrderedSet(collections.MutableSet): + + def __init__(self, iterable=None): + self.end = end = [] + end += [None, end, end] # sentinel node for doubly linked list + self.map = {} # key --> [key, prev, next] + if iterable is not None: + self |= iterable + + def __len__(self): + return len(self.map) + + def __contains__(self, key): + return key in self.map + + def add(self, key): + if key not in self.map: + end = self.end + curr = end[PREV] + curr[NEXT] = end[PREV] = self.map[key] = [key, curr, end] + append = add + + def discard(self, key): + _KEY, PREV, NEXT = 0, 1, 2 + if key in self.map: + key, prev, next = self.map.pop(key) + prev[NEXT] = next + next[PREV] = prev + + def __iter__(self): + end = self.end + curr = end[NEXT] + while curr is not end: + yield curr[KEY] + curr = curr[NEXT] + + def __reversed__(self): + KEY, PREV, NEXT = 0, 1, 2 + end = self.end + curr = end[PREV] + while curr is not end: + yield curr[KEY] + curr = curr[PREV] + + def pop(self, last=True): + # changed default to last=False - by default, treat as queue. + if not self: + raise KeyError('set is empty') + key = next(reversed(self)) if last else next(iter(self)) + self.discard(key) + return key + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self)) + + def __eq__(self, other): + if isinstance(other, OrderedSet): + return len(self) == len(other) and list(self) == list(other) + return set(self) == set(other) + + def __del__(self): + self.clear() # remove circular references + + def __getitem__(self, index): + return list(self)[index] + + +if __name__ == '__main__': + print(OrderedSet('abracadaba')) + print(OrderedSet('simsalabim')) +## end of http://code.activestate.com/recipes/576694/ }}} + +## kca: + print OrderedSet('simsalabim')[1] + + # Test case for exception at shutdown (yes, really...) + x = OrderedSet('simsalabim') + diff --git a/futile/src/futile/collections/__init__.py b/futile/src/futile/collections/__init__.py new file mode 100644 index 0000000..e5569b4 --- /dev/null +++ b/futile/src/futile/collections/__init__.py @@ -0,0 +1,44 @@ +''' +Created on 17.07.2011 + +@author: kca +''' + +import futile +from futile.basictypes import basestring + +try: + from collections import OrderedDict +except ImportError: + from ordereddict import OrderedDict + +from abc import ABCMeta +from collections import Iterable, Sequence + + +def is_iterable(o): + return isinstance(o, Iterable) and not isinstance(o, basestring) + + +def get_iterable(o): + if o is None: + return () + return ((not isinstance(o, Iterable) or isinstance(o, basestring)) + and (o,) or o) + + +def get_list(o): + if o is None: + return [] + return ((not isinstance(o, Iterable) or isinstance(o, basestring)) + and [o] or list(o)) + + +def yield_buffer(buffer, chunk_size=None): + chunk_size = chunk_size or futile.DEFAULT_CHUNK_SIZE + + while True: + chunk = buffer.read(chunk_size) + if not chunk: + return + yield chunk diff --git a/futile/src/futile/collections/__init__.pyc b/futile/src/futile/collections/__init__.pyc new file mode 100644 index 0000000..3737916 Binary files /dev/null and b/futile/src/futile/collections/__init__.pyc differ diff --git a/futile/src/futile/collections/ordereddict.py b/futile/src/futile/collections/ordereddict.py new file mode 100644 index 0000000..55b4bad --- /dev/null +++ b/futile/src/futile/collections/ordereddict.py @@ -0,0 +1,127 @@ +# Copyright (c) 2009 Raymond Hettinger +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation files +# (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, +# publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. + +from UserDict import DictMixin + + +class OrderedDict(dict, DictMixin): + def __init__(self, *args, **kwds): + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__end + except AttributeError: + self.clear() + self.update(*args, **kwds) + + def clear(self): + self.__end = end = [] + end += [None, end, end] # sentinel node for doubly linked list + self.__map = {} # key --> [key, prev, next] + dict.clear(self) + + def __setitem__(self, key, value): + if key not in self: + end = self.__end + curr = end[1] + curr[2] = end[1] = self.__map[key] = [key, curr, end] + dict.__setitem__(self, key, value) + + def __delitem__(self, key): + dict.__delitem__(self, key) + key, prev, next = self.__map.pop(key) + prev[2] = next + next[1] = prev + + def __iter__(self): + end = self.__end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + def __reversed__(self): + end = self.__end + curr = end[1] + while curr is not end: + yield curr[0] + curr = curr[1] + + def popitem(self, last=True): + if not self: + raise KeyError('dictionary is empty') + if last: + key = reversed(self).next() + else: + key = iter(self).next() + value = self.pop(key) + return key, value + + def __reduce__(self): + items = [[k, self[k]] for k in self] + tmp = self.__map, self.__end + del self.__map, self.__end + inst_dict = vars(self).copy() + self.__map, self.__end = tmp + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def keys(self): + return list(self) + + setdefault = DictMixin.setdefault + update = DictMixin.update + pop = DictMixin.pop + values = DictMixin.values + items = DictMixin.items + iterkeys = DictMixin.iterkeys + itervalues = DictMixin.itervalues + iteritems = DictMixin.iteritems + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + + def copy(self): + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + if isinstance(other, OrderedDict): + if len(self) != len(other): + return False + for p, q in zip(self.items(), other.items()): + if p != q: + return False + return True + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other diff --git a/futile/src/futile/collections/sortedlist.py b/futile/src/futile/collections/sortedlist.py new file mode 100644 index 0000000..3a08355 --- /dev/null +++ b/futile/src/futile/collections/sortedlist.py @@ -0,0 +1,38 @@ +try: + from blist import sortedlist +except ImportError: + from futile.logging import get_logger + from heapq import heappush, heappop, heapify + + get_logger(__name__).warning("blist.sortedlist is not available. Using a fallback implementation") + + class sortedlist(object): + def __init__(self, iterable=(), *args, **kw): + super(sortedlist, self).__init__(*args, **kw) + + l = self._list = list(iterable) + + if iterable is not None: + heapify(l) + + def add(self, v): + heappush(self._list, v) + + def pop(self, index=-1): + if index != 0: + raise NotImplementedError() + + return heappop(self._list) + + def remove(self, object): + self._list.remove(object) + heapify(self._list) + + def __getitem__(self, index): + if index != 0: + raise NotImplementedError() + + return self._list[index] + + def __len__(self): + return len(self._list) diff --git a/futile/src/futile/contextlib.py b/futile/src/futile/contextlib.py new file mode 100644 index 0000000..2e16135 --- /dev/null +++ b/futile/src/futile/contextlib.py @@ -0,0 +1,21 @@ +''' +Created on 14.07.2011 + +@author: kca +''' +from futile import ObjectProxy + +class closing(ObjectProxy): + def __enter__(self): + return self._o + + def __exit__(self, exc_type, exc_val, exc_tb): + self._o.close() + + +class exiting(ObjectProxy): + def __enter__(self): + return self._o + + def __exit__(self, exc_type, exc_val, exc_tb): + self._o.__exit__(exc_type, exc_val, exc_tb) \ No newline at end of file diff --git a/futile/src/futile/contextlib.pyc b/futile/src/futile/contextlib.pyc new file mode 100644 index 0000000..d80e3db Binary files /dev/null and b/futile/src/futile/contextlib.pyc differ diff --git a/futile/src/futile/etree.py b/futile/src/futile/etree.py new file mode 100644 index 0000000..da9677b --- /dev/null +++ b/futile/src/futile/etree.py @@ -0,0 +1,44 @@ +''' +Created on 25.07.2011 + +@author: kca +''' + +import sys +from .logging import get_logger + +try: + from lxml import etree as impl + from lxml.etree import tostring as _ts + + get_logger(__name__).debug("Using lxml etree implementation1.") + + def tostring(element, encoding="utf-8", pretty_print=False): + return _ts(element, encoding=encoding, pretty_print=pretty_print) +except ImportError: + logger = get_logger(__name__) + logger.warning( + "lxml library not found, trying builtin ElementTree implementations. Pretty printing will be disabled.") + try: + from xml.etree import cElementTree as impl + + try: + impl.ParseError = impl.XMLParserError + except AttributeError: + pass + logger.debug("Using native xml.etree.cElementTree") + except ImportError: + from xml.etree import ElementTree as impl + + logger.debug("Using python xml.etree.ElementTree") + + _ts = impl.tostring + + def tostring(element, encoding="utf-8", pretty_print=False): + return _ts(element, encoding=encoding) + + impl.tostring = tostring + impl.XMLSyntaxError = impl.ParseError + +sys.modules[__name__ + ".impl"] = sys.modules[__name__ + ".ElementTree"] = ElementTree = impl + diff --git a/futile/src/futile/exc.py b/futile/src/futile/exc.py new file mode 100644 index 0000000..94b89ab --- /dev/null +++ b/futile/src/futile/exc.py @@ -0,0 +1,22 @@ +''' +Created on 14.07.2011 + +@author: kca +''' + +from . import issubclass + +def errorstr(e): + try: + message = e.message + except AttributeError: + message = str(e) + else: + if not message: + message = str(e) + return message + +def raise_error(e): + if isinstance(e, Exception) or (isinstance(e, type) and issubclass(e, Exception)): + raise e + raise Exception(e) diff --git a/futile/src/futile/logging/__init__.py b/futile/src/futile/logging/__init__.py new file mode 100644 index 0000000..3e8a34e --- /dev/null +++ b/futile/src/futile/logging/__init__.py @@ -0,0 +1,230 @@ +""" +Created on 15.07.2011 + +@author: kca +""" +import logging +import logging.handlers +from futile.basictypes import ClassType, basestring +from futile.threading import current_thread +from logging import Filter +from futile.collections import get_iterable + +# statics +_handlers = [] +_formatter = logging.Formatter('%(asctime)s %(levelname)s - %(name)s: %(message)s') +_level = logging.NOTSET + +# log level constants for convenience +from logging import CRITICAL, FATAL, ERROR, WARNING, INFO, DEBUG, NOTSET + +CRITICAL = CRITICAL +FATAL = FATAL +ERROR = ERROR +WARNING = WARNING +INFO = INFO +DEBUG = DEBUG +NOTSET = NOTSET + + +def get_default_level(): + return _level + + +def set_default_level(l): + global _level + _level = l + logging.basicConfig(level=l) + + +# try: +# from colorlog import ColoredFormatter +# formatter = ColoredFormatter( +# "%(blue)s%(asctime)s %(log_color)s%(levelname) - 8s%(reset)s%(name)s: %(message)s", +# datefmt=None, +# reset=True, +# log_colors={ +# 'DEBUG': 'cyan', +# 'INFO': 'green', +# 'WARNING': 'yellow', +# 'ERROR': 'red', +# 'CRITICAL': 'red', +# } +# ) +# import logging +# hand = logging.StreamHandler() +# hand.setFormatter(formatter) +# futile.logging.add_handler( hand) +# except ImportError: +# pass +def get_default_formatter(): + return _formatter + + +def set_default_formatter(frmt): + global _formatter + if frmt and isinstance(frmt, logging.Formatter): + _formatter = frmt + else: + raise TypeError("Not a logging Formatter: %s" % (frmt, )) + + +def add_handler(h): + if not isinstance(h, logging.Handler): + raise TypeError(h) + + _handlers.append(h) + + +def add_log_file(path, level=None, formatter=None): + """ Adds a log file to all future loggers. + Files will be rotated depending on max_bytes and backups parameters. + + @param path: path to logfile + @param level: minimum log level + @param formatter: a logging.Formatter for this log file + """ + handler = logging.handlers.WatchedFileHandler(path) + handler.setFormatter(formatter or _formatter) + # TODO(rst): probably try/except is necessary + handler.setLevel(level or _level) + add_handler(handler) + + +def get_logger(logger_name=None, level=None): + level = level if level is not None else _level + # logging.basicConfig(level=level) + if logger_name: + if not isinstance(logger_name, basestring): + if not isinstance(logger_name, (type, ClassType)): + l_class = logger_name.__class__ + else: + l_class = logger_name + logger_name = l_class.__module__ + "." + l_class.__name__ + else: + logger_name = __name__ + + try: + logger = logging.getLogger(logger_name) + except Exception as e: + print ("Failed to get logger '%s': %s" % (logger_name, e)) + raise + + try: + logger.setLevel(level) # raises TypeError: not a valid string or int + except TypeError: + logger.setLevel(NOTSET) # TODO(rst): set another level if wrong level? + for h in _handlers: + logger.addHandler(h) + return logger + + +class LoggerMixin(object): + + log_file = None + log_level = None + + def __init__(self): + self.__logger = None + + @classmethod + def _get_logger(cls, logger_name=None): + logger = get_logger(logger_name, cls.log_level) + if cls.log_file: + formatter = get_default_formatter() + handler = logging.handlers.WatchedFileHandler(cls.log_file) + handler.setFormatter(formatter) + logger.addHandler(handler) + + return logger + + def get_logger(self): + try: + if self.__logger is not None: + return self.__logger + except AttributeError: + pass + self.__logger = l = self.get_class_logger() + return l + + def set_logger(self, logger): + self.__logger = logger + logger = property(get_logger, set_logger) + + @classmethod + def get_class_logger(cls): + try: + return cls.__dict__["__logger__"] + except KeyError: + l = cls.__logger__ = cls._get_logger(cls.__name__) + return l + + def __getstate__(self): + l = getattr(self, "_LoggerMixin__logger", None) + self.__logger = None + try: + sgs = super(LoggerMixin, self).__getstate__ + except AttributeError: + state = self.__dict__.copy() + else: + state = sgs() + self.__logger = l + return state + + +class ThreadFilter(Filter): + def __init__(self, thread=None, name=''): + Filter.__init__(self, name=name) + self.thread = thread or current_thread() + + def filter(self, record): + return current_thread() == self.thread + + +class ErrorLogger(LoggerMixin): + def __init__(self, name="operation", logger=None, + level=get_default_level(), *args, **kw): + super(ErrorLogger, self).__init__(*args, **kw) + if logger is not None: + self.logger = logger + self.name = name + self.log_level = level + assert level is not None + + def __enter__(self): + self.logger.debug("Entering %s", self.name) + return self + + def __exit__(self, type, value, traceback): + if type is not None: + self.logger.exception("Error in %s", self.name) + else: + self.logger.log(self.log_level, "%s finished", self.name) + + +def log_errors(f): + def _f(*args, **kw): + with ErrorLogger(f.__name__): + result = f(*args, **kw) + get_logger(f).debug("%s returning: %s", f.__name__, result) + return result + _f.__name__ = f.__name__ + return _f + + +def sanitize_dict(d, keys=("password",), replacement="*", inplace=False): + keys = get_iterable(keys) + if not inplace: + d = dict(d) + + if replacement is None: + for k in keys: + d.pop(k, None) + else: + for k in keys: + v = d[k] + if isinstance(v, basestring): + d[k] = replacement * len(v) + else: + d[k] = replacement + return d diff --git a/futile/src/futile/logging/__init__.pyc b/futile/src/futile/logging/__init__.pyc new file mode 100644 index 0000000..5771f9b Binary files /dev/null and b/futile/src/futile/logging/__init__.pyc differ diff --git a/futile/src/futile/logging/handlers.py b/futile/src/futile/logging/handlers.py new file mode 100644 index 0000000..e8fee2c --- /dev/null +++ b/futile/src/futile/logging/handlers.py @@ -0,0 +1,14 @@ +''' +Created on 30.08.2011 + +@author: kca +''' + +from logging.handlers import BufferingHandler as _BufferingHandler + +class BufferingHandler(_BufferingHandler): + def __init__(self, capacity = None): + _BufferingHandler.__init__(self, capacity = capacity) + + def shouldFlush(self, record): + return self.capacity and super(BufferingHandler, self).shouldFlush(record) or False diff --git a/futile/src/futile/logging/logbook.py b/futile/src/futile/logging/logbook.py new file mode 100644 index 0000000..601e6bd --- /dev/null +++ b/futile/src/futile/logging/logbook.py @@ -0,0 +1,9 @@ +''' +Created on 30.08.2011 + +@author: kca +''' + +from collections import namedtuple + +Logbook = namedtuple("Logbook", ("name", "component", "entries")) diff --git a/futile/src/futile/logging/logtap.py b/futile/src/futile/logging/logtap.py new file mode 100644 index 0000000..9afbc57 --- /dev/null +++ b/futile/src/futile/logging/logtap.py @@ -0,0 +1,60 @@ +''' +Created on 29.08.2011 + +@author: kca +''' + +import logging +from . import ThreadFilter +from ..collections import get_list +from futile import NOT_SET +from logging import LogRecord, DEBUG +from futile.logging import ErrorLogger + +class LogTap(ErrorLogger): + def __init__(self, handler, logger = None, name = None, level = DEBUG, *args, **kw): + super(LogTap, self).__init__(name = name, logger = logger, level = level, *args, **kw) + handler = get_list(handler) + self.handlers = handler + self.target_logger = logger or logging.root + + def attach(self): + map(self.target_logger.addHandler, self.handlers) + + def detach(self): + for handler in self.handlers: + self.target_logger.removeHandler(handler) + handler.close() + + def emit(self, record): + for handler in self.handlers: + handler.emit(record) + + def __enter__(self): + self.attach() + return super(LogTap, self).__enter__() + + def __exit__(self, type, value, traceback): + super(LogTap, self).__exit__(type, value, traceback) + self.detach() + +class BufferingLogTap(LogTap): + log = None + + def __init__(self, handler = None, name = None, logger = None, level = DEBUG, capacity = None, memhandler = None, *args, **kw): + if not memhandler: + from handlers import BufferingHandler + memhandler = BufferingHandler(capacity) + memhandler.addFilter(ThreadFilter()) + self.memhandler = memhandler + handler = [ memhandler ] + get_list(handler) + super(BufferingLogTap, self).__init__(handler = handler, logger = logger, name = name, level = level, *args, **kw) + + def detach(self): + self.log = map(lambda r: isinstance(r, LogRecord) and self.memhandler.format(r) or r, self.memhandler.buffer) + super(BufferingLogTap, self).detach() + + def emit(self, record, level = NOT_SET): + if isinstance(record, LogRecord): + return super(BufferingLogTap, self).emit(record) + self.memhandler.buffer.append(record) diff --git a/futile/src/futile/multiprocess/RWLock.py b/futile/src/futile/multiprocess/RWLock.py new file mode 100644 index 0000000..de8ba85 --- /dev/null +++ b/futile/src/futile/multiprocess/RWLock.py @@ -0,0 +1,82 @@ +''' +Created on 30.04.2011 + +@author: kca +''' + +import os +from fcntl import lockf, LOCK_EX, LOCK_SH, LOCK_UN +from contextlib import contextmanager +from futile.signal import timeout + +class RWLock(object): + def __init__(self, path = None, threadsafe = True, *args, **kw): + if not path: + raise NotImplementedError() + + if not os.path.exists(path): + open(path, "a").close() + + self.__path = path + + if threadsafe: + import threading + self.__local = threading.local() + else: + class Local(object): + pass + self.__local = Local + + self.__local.f = None + + + @contextmanager + def read_transaction(self, timeout = None): + self.read_acquire(timeout = timeout) + try: + yield + finally: + self.read_release() + pass + pass + + @contextmanager + def write_transaction(self, timeout = None): + self.write_acquire(timeout = timeout) + try: + yield + finally: + self.write_release() + + def __acquire(self, fmode, lmode, to): + assert getattr(self.__local, "f", None) is None + f = open(self.__path, fmode) + try: + if timeout: + with timeout(to): + lockf(f, lmode) + else: + lockf(f, lmode) + except: + f.close() + raise + self.__local.f = f + return f + + def read_acquire(self, timeout = None): + return self.__acquire("r", LOCK_SH, timeout) + + def read_release(self): + with self.__local.f as f: + self.__local.f = None + lockf(f, LOCK_UN) + + write_release = read_release + + def write_acquire(self, timeout = None): + return self.__acquire("a", LOCK_EX, timeout) + + __enter__ = write_acquire + + def __exit__(self, *args): + self.write_release() diff --git a/futile/src/futile/multiprocess/__init__.py b/futile/src/futile/multiprocess/__init__.py new file mode 100644 index 0000000..b4a4a4e --- /dev/null +++ b/futile/src/futile/multiprocess/__init__.py @@ -0,0 +1,3 @@ +from RWLock import RWLock + +Lock = RWLock \ No newline at end of file diff --git a/futile/src/futile/net/PortTester.py b/futile/src/futile/net/PortTester.py new file mode 100644 index 0000000..56290cc --- /dev/null +++ b/futile/src/futile/net/PortTester.py @@ -0,0 +1,83 @@ +''' +Created on 15.07.2011 + +@author: kca +''' + +from asyncore import dispatcher, loop +from socket import AF_INET, SOCK_STREAM, error +from sockethelper import socket +from futile.exc import errorstr +from collections import namedtuple +import sys +from time import time + +class TestResult(namedtuple("TestResultTuple", ("result", "message"))): + def __new__(cls, result, message = ""): + return super(TestResult, cls).__new__(cls, result, message) + + def __bool__(self): + return self.result + __nonzero__ = __bool__ + + def __str__(self): + if self.message: + return "%s - %s" % (self.result, self.message) + return str(self.result) + + def __eq__(self, o): + try: + return self.result == o.result + except AttributeError: + return False + + def __ne__(self, o): + return not (self == o) + +def test_port(host, port, family = AF_INET, type = SOCK_STREAM): + try: + with socket(family, type) as s: + s.connect((host, port)) + except error, e: + return TestResult(False, "%s (%d)" % (e.strerror, e.errno)) + except Exception, e: + return TestResult(False, errorstr(e)) + return TestResult(True) + +class PortTester(dispatcher): + result = TestResult(False, "Test did not run") + + def __init__(self, host, port, family = AF_INET, type = SOCK_STREAM, map = None): + dispatcher.__init__(self, map = map) + self.create_socket(family, type) + self.connect((host, port)) + self.host = host + self.port = port + + def handle_connect(self): + self.result = TestResult(True) + self.close() + + def handle_error(self): + self.result = TestResult(False, errorstr(sys.exc_value)) + self.close() + +def run_test(map, timeout = 0.0): + if timeout and timeout > 0.0: + timeout = float(timeout) + start = time() + while True: + loop(map = map, timeout = timeout, count = 1) + if map: + now = time() + timeout -= now - start + if timeout <= 0.0: + for r in map.itervalues(): + r.result = TestResult(False, "Timeout") + break + start = now + else: + break + else: + loop(map = map) + \ No newline at end of file diff --git a/futile/src/futile/net/__init__.py b/futile/src/futile/net/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/futile/src/futile/net/__init__.pyc b/futile/src/futile/net/__init__.pyc new file mode 100644 index 0000000..3c791b6 Binary files /dev/null and b/futile/src/futile/net/__init__.pyc differ diff --git a/futile/src/futile/net/exc.py b/futile/src/futile/net/exc.py new file mode 100644 index 0000000..1a04adf --- /dev/null +++ b/futile/src/futile/net/exc.py @@ -0,0 +1,4 @@ + + +class NetworkError(Exception): + pass diff --git a/futile/src/futile/net/exc.pyc b/futile/src/futile/net/exc.pyc new file mode 100644 index 0000000..2a1b038 Binary files /dev/null and b/futile/src/futile/net/exc.pyc differ diff --git a/futile/src/futile/net/http/__init__.py b/futile/src/futile/net/http/__init__.py new file mode 100644 index 0000000..19ba75d --- /dev/null +++ b/futile/src/futile/net/http/__init__.py @@ -0,0 +1,96 @@ +''' +Created on 17.07.2011 + +@author: kca +''' +try: + from httplib import HTTPConnection as _HTTPConnection, HTTPSConnection as _HTTPSConnection +except ImportError: + from http.client import HTTPConnection as _HTTPConnection, HTTPSConnection as _HTTPSConnection + +from futile.contextlib import closing +from futile import NOT_SET +import socket +from . import exc as _exc +import sys +import types + +try: + from urllib import quote, quote_plus, unquote, unquote_plus +except ImportError: + from urllib.parse import quote, quote_plus, unquote, unquote_plus + +class HTTPResponseWrapper(object): + def __init__(self, connection, response, *args, **kw): + super(HTTPResponseWrapper, self).__init__(*args, **kw) + + self.__response = response + self.__connection = connection + + #def __del__(self): + # self.close() + + def __getattr__(self, k): + return getattr(self.__response, k) + + def __enter__(self): + return self.__response + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def close(self): + try: + self.__response.close() + except: + pass + finally: + self.__connection.close() + +class HTTPConnection(_HTTPConnection): + response_wrapper = closing + + def __init__(self, host, port=None, strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, response_wrapper = NOT_SET): + _HTTPConnection.__init__(self, host, port, strict, timeout, source_address) + if response_wrapper is not NOT_SET: + self.response_wrapper = response_wrapper + + def getresponse(self, buffering = False): + r = _HTTPConnection.getresponse(self, buffering) + if self.response_wrapper: + r = self.response_wrapper(r) + return r + +class HTTPSConnection(_HTTPSConnection): + response_wrapper = closing + + def __init__(self, host, port=None, key_file = None, cert_file = None, strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, response_wrapper = NOT_SET): + _HTTPSConnection.__init__(self, host, port, key_file = key_file, cert_file = cert_file, strict = strict, timeout = timeout, source_address = source_address) + if response_wrapper is not NOT_SET: + self.response_wrapper = response_wrapper + + def getresponse(self, buffering = False): + r = _HTTPSConnection.getresponse(self, buffering) + if self.response_wrapper: + r = self.response_wrapper(r) + return r + + +class exc(types.ModuleType): + def __getattr__(self, k): + try: + v = getattr(_exc, k) + except AttributeError: + if not k.startswith("HTTPError"): + raise + v = _exc.get_error_class(k[9:]) + setattr(self, k, v) + return v + + +name = __name__ + ".exc" +exc = exc(name) +sys.modules[name] = exc +del name + + \ No newline at end of file diff --git a/futile/src/futile/net/http/__init__.pyc b/futile/src/futile/net/http/__init__.pyc new file mode 100644 index 0000000..dc6ff7e Binary files /dev/null and b/futile/src/futile/net/http/__init__.pyc differ diff --git a/futile/src/futile/net/http/client/ConnectionPoolManager.py b/futile/src/futile/net/http/client/ConnectionPoolManager.py new file mode 100644 index 0000000..a33e262 --- /dev/null +++ b/futile/src/futile/net/http/client/ConnectionPoolManager.py @@ -0,0 +1,55 @@ +''' +Created on 19.03.2013 + +@author: kca +''' + +from logging import DEBUG, WARNING +import futile.logging +import urllib3.connectionpool +from urllib3.connectionpool import HTTPConnectionPool, HTTPSConnectionPool +from futile.logging import LoggerMixin +from futile import ObjectProxy + +if not futile.logging.get_logger().isEnabledFor(DEBUG): + urllib3.connectionpool.log.setLevel(WARNING) + +class Urllib3ResponseWrapper(ObjectProxy): + def getheader(self, header, default=None): + return self._o.getheader(header.lower(), default) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def close(self): + self._o.release_conn() + + def isclosed(self): + return False + +class ConnectionPoolManager(LoggerMixin): + def __init__(self, host, port, certfile = None, keyfile = None, cacertfile=None, force_ssl = False, *args, **kw): + super(ConnectionPoolManager, self).__init__(*args, **kw) + + self.logger.debug("Creating ConnectionPoolManager for %s:%s", host, port) + + if certfile or keyfile or force_ssl: + #https://docs.python.org/2/library/ssl.html#ssl.SSLContext + from ssl import SSLContext, PROTOCOL_SSLv23 + ssl_context=SSLContext(PROTOCOL_SSLv23) + ssl_context.load_cert_chain(certfile = certfile, keyfile = keyfile) + ssl_context.load_verify_locations(cafile=cacertfile) + #https://docs.python.org/2/library/httplib.html + self.__pool = HTTPSConnectionPool(host, port, maxsize = 16, context = ssl_context) + else: + self.__pool = HTTPConnectionPool(host, port, maxsize = 16) + + def request(self, method, path, body, headers, timeout): + return Urllib3ResponseWrapper(self.__pool.urlopen(method, path, body, + headers, timeout = timeout, pool_timeout = 30, preload_content = False, assert_same_host = False)) + + + \ No newline at end of file diff --git a/futile/src/futile/net/http/client/RestClient.py b/futile/src/futile/net/http/client/RestClient.py new file mode 100644 index 0000000..3a253a7 --- /dev/null +++ b/futile/src/futile/net/http/client/RestClient.py @@ -0,0 +1,353 @@ +''' +Created on 21.05.2011 + +@author: kca +''' + +from base64 import b64encode +from cStringIO import StringIO +from datetime import datetime +from logging import DEBUG +from socket import getservbyname +from time import time +from urllib import quote_plus +from urllib2 import quote +from urlparse import urlparse + +from futile import ObjectProxy +from futile.logging import LoggerMixin +from futile.net.http.exc import NetworkError, HTTPError + + +def compose_qs(values): + return "&".join([ "%s=%s" % (quote(k), quote(v)) for k, v in dict(values).iteritems() ]) + +class LoggingResponseWrapper(LoggerMixin, ObjectProxy): + def __init__(self, response, *args, **kw): + super(LoggingResponseWrapper, self).__init__(proxyobject = response, *args, **kw) + self.__buffer = StringIO() + self.__finalized = False + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def __enter__(self): + return self + + def read(self, n = None): + s = self._o.read(n) + self.__buffer.write(s) + return s + + def readline(self): + s = self._o.readline() + self.__buffer.write(s) + return s + + def readlines(self, sizehint = None): + lines = self._o.readlines(sizehint) + self.__buffer.write(''.join(lines)) + return lines + + def close(self): + if self.__finalized: + self.logger.debug("%s is already finalized" % (self, )) + return + + self.__finalized = True + try: + if not self._o.isclosed(): + self.__buffer.write(self._o.read()) + self.logger.debug("Read data:\n %s", self.__buffer.getvalue()) + except: + self.logger.exception("Finalizing response failed") + finally: + self._o.close() + + self.__buffer.close() + + +class CachingHttplibResponseWrapper(ObjectProxy, LoggerMixin): + def __init__(self, response, path, tag, last_modified, cache, *args, **kw): + super(CachingHttplibResponseWrapper, self).__init__(proxyobject = response, *args, **kw) + self.__cache = cache + self.__buffer = StringIO() + self.__path = path + self.__tag = tag + self.__last_modified = last_modified + self.__finalized = False + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def __enter__(self): + return self + + def read(self, n = None): + s = self._o.read(n) + self.__buffer.write(s) + return s + + def readline(self): + s = self._o.readline() + self.__buffer.write(s) + return s + + def readlines(self, sizehint = None): + lines = self._o.readlines(sizehint) + self.__buffer.write(''.join(lines)) + return lines + + def close(self): + if self.__finalized: + self.logger.debug("%s is already finalized" % (self, )) + return + + self.__finalized = True + try: + if not self._o.isclosed(): + self.__buffer.write(self._o.read()) + val = self.__buffer.getvalue() + self.logger.debug("Putting to cache: %s -> %s, %s\n %s", self.__path, self.__tag, self.__last_modified, val) + self.__cache[self.__path] = (self.__tag, self.__last_modified, val) + except: + self.logger.exception("Finalizing response failed") + finally: + self._o.close() + + self.__buffer.close() + + def __getattr__(self, name): + return getattr(self._o, name) + + +class closing(ObjectProxy): + def __getattr__(self, k): + return getattr(self._o, k) + + def __enter__(self): + return self._o + + def __exit__(self, exc_type, exc_val, exc_tb): + self._o.close() + + def close(self): + self._o.close() + + +class RestClient(LoggerMixin): + ERROR_RESPONSE_MAX = 320 + + get_timeout = timeout = 120.0 + + def __init__(self, uri, username=None, password=None, certfile=None, + keyfile=None, cacertfile=None, content_type="text/plain", + headers=None, + cache=True, timeout=None, get_timeout=None, + component_name = "server", connection_manager = None, + *args, **kw): + super(RestClient, self).__init__(*args, **kw) + + self.logger.debug("Creating RestClient for %s", uri) + + self.timeout = timeout or self.timeout + self.get_timeout = get_timeout or timeout or self.get_timeout + + if cache: + if cache is True: + from futile.caching import LRUCache + cache = LRUCache() + self.__cache = cache + + if "://" not in uri: + uri = "http://" + uri + + self.__content_type = content_type + self.component_name = component_name + + info = urlparse(uri) + + self.logger.debug("Restclient certfile is %s"%certfile) + if info.scheme == "https": + if bool(certfile) ^ bool(keyfile): + raise ValueError("Must give both certfile and keyfile if any") + if certfile: + from os.path import exists + if not exists(certfile): + raise ValueError("Certificate file not found: %s" % (certfile, )) + if not exists(keyfile): + raise ValueError("Key file not found: %s" % (keyfile, )) + elif info.scheme != "http": + raise ValueError(info.scheme) + else: + # In case of http, we do not want any certificates + keyfile = certfile = None + + port = info.port and int(info.port) or getservbyname(info.scheme) + + self.__base = info.path or "" + #if not self.__base.endswith("/"): + # self.__base += "/" + + if not username: + username = info.username + + if not headers: + headers = {} + + headers.setdefault("Accept", "*/*") + headers["Accept-Encoding"] = "identity" + + if username: + password = password or info.password or "" + headers["Authorization"] = "Basic " + b64encode("%s:%s" % (username, password)) + + self.__headers = headers + + if not connection_manager: + #from SimpleConnectionManager import SimpleConnectionManager as connection_manager + from ConnectionPoolManager import ConnectionPoolManager as connection_manager + + self.__connection_manager = connection_manager(host=info.hostname, + port=port, + certfile = certfile, keyfile = keyfile, cacertfile = cacertfile, force_ssl = info.scheme == "https") + + def set_authinfo(self, username, password=""): + if not username: + self.__headers.pop("Authorization") + else: + self.__headers["Authorization"] = "Basic " + b64encode("%s:%s" % (quote_plus(username), password)) + + def request(self, method, path, data = None, headers = {}, args = None): + if isinstance(data, unicode): + data = data.encode("utf-8") + + fullpath = self.__base + path + + request_headers = self.__headers.copy() + + if args: + fullpath += ("?" in fullpath and "&" or "?") + compose_qs(args) + + if headers: + request_headers.update(headers) + + if method == "GET": + timeout = self.get_timeout + if self.__cache: + try: + etag, modified, cached = self.__cache[fullpath] + if etag: + request_headers["If-None-Match"] = etag + request_headers["If-Modified-Since"] = modified + except KeyError: + request_headers.pop("If-None-Match", None) + request_headers.pop("If-Modified-Since", None) + else: + timeout = self.timeout + + if data: + request_headers.setdefault("Content-Type", self.__content_type) + if hasattr(data, "read") and not hasattr(data, "fileno"): + data = data.read() + + log_headers = request_headers + #if self.logger.isEnabledFor(DEBUG) and "Authorization" in request_headers: + #log_headers = request_headers.copy() + #log_headers["Authorization"] = "" + + if method == "GET": + self.logger.debug("%s: %s (%s)", method, fullpath, log_headers) + else: + self.logger.debug("%s: %s (%s)\n%s", method, fullpath, log_headers, repr(data)) + + t = time() + try: + response = self.__connection_manager.request(method, fullpath, data, request_headers, timeout) + except Exception as e: + if self.logger.isEnabledFor(DEBUG): + self.logger.exception("Error during request") + if str(e) in ("", "''"): + e = repr(e) + try: + error_msg = "An error occurred while contacting the %s: %s. Request was: %s %s (%.4fs)" % (self.component_name, e, method, fullpath, time() - t) + except: + self.logger.exception("Failed to format error message.") + error_msg = "Error during request." + + raise NetworkError(error_msg) + + self.logger.debug("%s %s result: %s (%.4fs)", method, fullpath, response.status, time() - t) + r_status = response.status + if r_status == 304: + response.close() + try: + self.logger.debug("Using cached answer for %s (%s, %s):\n %s", fullpath, etag, modified, cached) + return closing(StringIO(cached)) + except NameError: + raise NetworkError("Error: The %s returned 304 though no cached version is available. Request was: %s %s" % (self.component_name, method, fullpath)) + if r_status == 302: + raise NotImplementedError("HTTP redirect") + if r_status < 200 or r_status >= 300: + with response: + via = response.getheader("Via") + try: + data = response.read(self.ERROR_RESPONSE_MAX and self.ERROR_RESPONSE_MAX + 1 or None) + if not data or (not self.logger.isEnabledFor(DEBUG) and "" in data): + data = "" + else: + if self.ERROR_RESPONSE_MAX and len(data) > self.ERROR_RESPONSE_MAX: + data = data[:self.ERROR_RESPONSE_MAX] + " (truncated)\n" + data = data.encode("utf-8") + except Exception as e: + data = "" % (e, ) + + if not data.endswith("\n"): + data += "\n" + + try: + msg = "Error during execution. The %s said: %s %s - %sRequest was: %s %s. " % (self.component_name, response.status, response.reason, data, method, fullpath) + except: + msg = "Error during execution. The %s said %s. " % (self.component_name, response.status) + + if via: + culprit = via.split(",")[0] + p = culprit.rfind("(") + if p >= 0 and culprit.endswith(")"): + culprit = culprit[p + 1:-1] + msg += "The error occurred after the request went through %s (Via: %s)." % (culprit, via) + else: + msg += "The error seems to have occurred at the %s (No Via header found in response)." % (self.component_name, ) + + raise HTTPError(msg=msg, status=response.status) + + if method == "DELETE": + try: + self.__cache.pop(fullpath, None) + except AttributeError: + pass + else: + etag = response.getheader("Etag") + modified = response.getheader("Last-Modified") + if self.__cache is not False and (etag or modified): + if not modified: + modified = datetime.utcnow().strftime("%a, %d %b %Y %X GMT") + response = CachingHttplibResponseWrapper(response, fullpath, etag, modified, self.__cache) + elif self.logger.isEnabledFor(DEBUG): + response = LoggingResponseWrapper(response) + + return response + + def get(self, path, headers = None, args = None): + return self.request("GET", path, headers = headers, args = args) + + def post(self, path, data, headers = None): + return self.request("POST", path, data, headers) + add = post + + def put(self, path, data, headers = None): + return self.request("PUT", path, data, headers) + update = put + + def delete(self, path, headers = None): + return self.request("DELETE", path, None, headers) diff --git a/futile/src/futile/net/http/client/RestClientAsync.py b/futile/src/futile/net/http/client/RestClientAsync.py new file mode 100644 index 0000000..8e9a491 --- /dev/null +++ b/futile/src/futile/net/http/client/RestClientAsync.py @@ -0,0 +1,327 @@ +''' +Created on 21.05.2011 + +@author: kca +''' + +from base64 import b64encode +from cStringIO import StringIO +from logging import DEBUG +from socket import getservbyname +from urllib2 import quote +from urlparse import urlparse + +#import vertx + +from aplus import Promise +from futile import ObjectProxy +from futile.logging import LoggerMixin +from futile.net.http.exc import NetworkError, HTTPError + + +def compose_qs(values): + return "&".join([ "%s=%s" % (quote(k), quote(v)) for k, v in dict(values).iteritems() ]) + +class LoggingResponseWrapper(LoggerMixin, ObjectProxy): + def __init__(self, response, *args, **kw): + super(LoggingResponseWrapper, self).__init__(proxyobject = response, *args, **kw) + self.__buffer = StringIO() + self.__finalized = False + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def __enter__(self): + return self + + def read(self, n = None): + s = self._o.read(n) + self.__buffer.write(s) + return s + + def readline(self): + s = self._o.readline() + self.__buffer.write(s) + return s + + def readlines(self, sizehint = None): + lines = self._o.readlines(sizehint) + self.__buffer.write(''.join(lines)) + return lines + + def close(self): + if self.__finalized: + self.logger.debug("%s is already finalized" % (self, )) + return + + self.__finalized = True + try: + if not self._o.isclosed(): + self.__buffer.write(self._o.read()) + self.logger.debug("Read data:\n %s", self.__buffer.getvalue()) + except: + self.logger.exception("Finalizing response failed") + finally: + self._o.close() + + self.__buffer.close() + + +class CachingHttplibResponseWrapper(ObjectProxy, LoggerMixin): + def __init__(self, response, path, tag, last_modified, cache, *args, **kw): + super(CachingHttplibResponseWrapper, self).__init__(proxyobject = response, *args, **kw) + self.__cache = cache + self.__buffer = StringIO() + self.__path = path + self.__tag = tag + self.__last_modified = last_modified + self.__finalized = False + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def __enter__(self): + return self + + def read(self, n = None): + s = self._o.read(n) + self.__buffer.write(s) + return s + + def readline(self): + s = self._o.readline() + self.__buffer.write(s) + return s + + def readlines(self, sizehint = None): + lines = self._o.readlines(sizehint) + self.__buffer.write(''.join(lines)) + return lines + + def close(self): + if self.__finalized: + self.logger.debug("%s is already finalized" % (self, )) + return + + self.__finalized = True + try: + if not self._o.isclosed(): + self.__buffer.write(self._o.read()) + val = self.__buffer.getvalue() + self.logger.debug("Putting to cache: %s -> %s, %s\n %s", self.__path, self.__tag, self.__last_modified, val) + self.__cache[self.__path] = (self.__tag, self.__last_modified, val) + except: + self.logger.exception("Finalizing response failed") + finally: + self._o.close() + + self.__buffer.close() + + def __getattr__(self, name): + return getattr(self._o, name) + + +class closing(ObjectProxy): + def __getattr__(self, k): + return getattr(self._o, k) + + def __enter__(self): + return self._o + + def __exit__(self, exc_type, exc_val, exc_tb): + self._o.close() + + def close(self): + self._o.close() + + +class RestClient(LoggerMixin): + ERROR_RESPONSE_MAX = 320 + + get_timeout = timeout = 120.0 + + def __init__(self, uri, username=None, password=None, certfile=None, + keyfile=None, content_type="text/plain", headers=None, + cache=True, timeout=None, get_timeout=None, + component_name = "server", connection_manager = None, + *args, **kw): + super(RestClient, self).__init__(*args, **kw) + + self.logger.debug("Creating RestClient for %s", uri) + + self.timeout = timeout or self.timeout + self.get_timeout = get_timeout or timeout or self.get_timeout + + if cache: + if cache is True: + from futile.caching import LRUCache + cache = LRUCache() + self.__cache = cache + + if "://" not in uri: + uri = "http://" + uri + + self.__content_type = content_type + self.component_name = component_name + + info = urlparse(uri) + + if info.scheme == "https": + if bool(certfile) ^ bool(keyfile): + raise ValueError("Must give both certfile and keyfile if any") + if certfile: + from os.path import exists + if not exists(certfile): + raise ValueError("Certificate file not found: %s" % (certfile, )) + if not exists(keyfile): + raise ValueError("Key file not found: %s" % (keyfile, )) + elif info.scheme != "http": + raise ValueError(info.scheme) + + port = info.port and int(info.port) or getservbyname(info.scheme) + + self.__base = info.path or "" + #if not self.__base.endswith("/"): + # self.__base += "/" + + if not username: + username = info.username + + if not headers: + headers = {} + + headers.setdefault("Accept", "*/*") + headers["Accept-Encoding"] = "identity" + + if username: + password = password or info.password or "" + headers["Authorization"] = "Basic " + b64encode("%s:%s" % (username, password)) + + self.__headers = headers + + #if not connection_manager: + # #from SimpleConnectionManager import SimpleConnectionManager as connection_manager + # from ConnectionPoolManager import ConnectionPoolManager as connection_manager + # + # self.__connection_manager = connection_manager(host = info.hostname, port = port, + # certfile = certfile, keyfile = keyfile, force_ssl = info.scheme == "https") + # + + self.client= vertx.create_http_client() + self.client.host = info.netloc.split(":")[0] + self.client.port = port + + #temporary test server + #import json + #self.srv = vertx.create_http_server() + #def srv_handle(re): + # re.response.put_header("Content-Type","application/json; charset=utf-8") + # re.response.put_header("Location","locationlocation.location") + # re.response.end(json.dumps({"One":"Two"})) + #self.srv.request_handler(srv_handle) + #self.srv.listen(5000) + + def request(self, method, path, data = None, headers = {}, args = None): + if isinstance(data, unicode): + data = data.encode("utf-8") + fullpath = self.__base + path + request_headers = self.__headers.copy() + + if args: + fullpath += ("?" in fullpath and "&" or "?") + compose_qs(args) + + if headers: + request_headers.update(headers) + + if method == "GET": + timeout = self.get_timeout + try: + etag, modified, cached = self.__cache[fullpath] + if etag: + request_headers["If-None-Match"] = etag + request_headers["If-Modified-Since"] = modified + except KeyError: + request_headers.pop("If-None-Match", None) + request_headers.pop("If-Modified-Since", None) + else: + timeout = self.timeout + request_headers.setdefault("Content-Type", self.__content_type) + + log_headers = request_headers + if self.logger.isEnabledFor(DEBUG) and "Authorization" in request_headers: + log_headers = request_headers.copy() + log_headers["Authorization"] = "" + + if method == "GET": + self.logger.debug("%s: %s (%s)", method, fullpath, log_headers) + else: + self.logger.debug("%s: %s (%s)\n%s", method, fullpath, log_headers, repr(data)) + + #t = time() + promise=Promise() + try: + #response = self.__connection_manager.request(method, fullpath, data, request_headers, timeout) + + def response_handler(resp): + if resp.status_code == 304: + try: + promise.fulfill(closing(StringIO(cached))) + except NameError: + promise.reject(NetworkError("Error: The %s returned 304 though no cached version is available. Request was: %s %s" % (self.component_name, method, fullpath))) + if resp.status_code < 200 or resp.status_code >= 300: + try: + promise.reject(HTTPError(msg = resp.status_message, status = resp.status_code)) + except: + promise.reject(HTTPError(msg = "Http error", status = response.status)) + else: + promise.fulfill(resp) + + req=self.client.request(method,fullpath,response_handler) + for head,value in request_headers.items(): + req.put_header(head,value) + if data: + req.chunked = True + req.write_str(data) + req.end() + + except Exception as e: + print "Exception triggered: %s"%e + promise.reject(e) + + return promise + + #if method == "DELETE": + # try: + # self.__cache.pop(fullpath, None) + # except AttributeError: + # pass + #else: + # etag = response.getheader("Etag") + # modified = response.getheader("Last-Modified") + # if etag or modified: + # if not modified: + # modified = datetime.utcnow().strftime("%a, %d %b %Y %X GMT") + # response = CachingHttplibResponseWrapper(response, fullpath, etag, modified, self.__cache) + # elif self.logger.isEnabledFor(DEBUG): + # response = LoggingResponseWrapper(response) + + + + + def get(self, path, headers = None, args = None): + p = self.request("GET", path, headers = headers, args = args) + return p + + def post(self, path, data, headers = None): + p = self.request("POST", path, data, headers) + return p + add = post + + def put(self, path, data, headers = None): + p = self.request("PUT", path, data) + return p + update = put + + def delete(self, path, headers = None): + p = self.request("DELETE", path, None, headers) + return p diff --git a/futile/src/futile/net/http/client/SimpleConnectionManager.py b/futile/src/futile/net/http/client/SimpleConnectionManager.py new file mode 100644 index 0000000..3b1b7dd --- /dev/null +++ b/futile/src/futile/net/http/client/SimpleConnectionManager.py @@ -0,0 +1,61 @@ +''' +Created on 19.03.2013 + +@author: kca +''' + +from httplib import HTTPConnection, HTTPSConnection +from futile.logging import LoggerMixin + +class HttplibResponseWrapper(LoggerMixin): + def __init__(self, connection, *args, **kw): + super(HttplibResponseWrapper, self).__init__(*args, **kw) + + self.__response = connection.getresponse() + self.__connection = connection + + def __getattr__(self, k): + return getattr(self.__response, k) + + def __enter__(self): + return self.__response + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def close(self): + try: + self.__response.close() + except: + self.logger.exception("Error closing response") + finally: + self.__connection.close() + +class SimpleConnectionManager(LoggerMixin): + def __init__(self, host, port, certfile = None, keyfile = None, force_ssl = False, *args, **kw): + super(SimpleConnectionManager, self).__init__(*args, **kw) + + self.logger.debug("Creating SimpleConnectionManager for %s:%s", host, port) + + if keyfile or certfile or force_ssl: + self.__certfile = certfile + self.__keyfile = keyfile + self._get_connection = self._get_secure_connection + + self.__host = host + self.__port = port + + def request(self, method, path, body, headers, timeout): + connection = self._get_connection(timeout) + try: + connection.request(method, path, body, headers) + return HttplibResponseWrapper(connection) + except: + connection.close() + raise + + def _get_connection(self, timeout): + return HTTPConnection(self.__host, self.__port, timeout = timeout) + + def _get_secure_connection(self, timeout): + return HTTPSConnection(self.__host, self.__port, self.__keyfile, self.__certfile, timeout = timeout) diff --git a/futile/src/futile/net/http/client/__init__.py b/futile/src/futile/net/http/client/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/futile/src/futile/net/http/exc.py b/futile/src/futile/net/http/exc.py new file mode 100644 index 0000000..17cc26a --- /dev/null +++ b/futile/src/futile/net/http/exc.py @@ -0,0 +1,134 @@ +''' +Created on 21.07.2011 + +@author: kca +''' + + +from futile.net.exc import NetworkError + +STATUS_STRINGS = { + 100: "Continue", + 101: "Switching Protocols", + 200: "Ok", + 201: "Created", + 202: "Accepted", + 203: "Non-Authoritative Information", + 204: "No Content", + 205: "Reset Content", + 206: "Partial Content", + 300: "Multiple Choices", + 301: "Moved Permanently", + 302: "Found", + 303: "See Other", + 304: "Not Modfied", + 305: "Use Proxy", + 306: "", + 307: "Temporary Redirect", + 400: "Bad Request", + 401: "Unauthorized", + 402: "Payment Required", + 403: "Forbidden", + 404: "Not Found", + 405: "Method Not Allowed", + 406: "Not Acceptable", + 407: "Proxy Authentication Required", + 408: "Request Timeout", + 409: "Conflict", + 410: "Gone", + 411: "Length Required", + 412: "Precondition Failed", + 413: "Request Entity Too Large", + 414: "Request-URI Too Long", + 415: "Unsupported Media Type", + 416: "Requested Range Not Satisfiable", + 417: "Expectation Failed", + 500: "Internal Server Error", + 501: "Not Implemented", + 502: "Bad Gateway", + 503: "Service Unavailable", + 504: "Gateway Timeout", +} + +STATUS_MIN = 100 +STATUS_MAX = 504 +ERROR_MIN = 400 +ERROR_MAX = 504 + + +def get_error_message(statuscode): + try: + return STATUS_STRINGS[statuscode] + except KeyError: + raise ValueError(statuscode) + + +class HTTPErrorType(type): + __classes = {} + + @classmethod + def get_error_class(cls, status): + try: + status = int(status) + except (TypeError, ValueError): + raise ValueError("Not a valid HTTP error code: '%s'" % (status, )) + + try: + errorcls = cls.__classes[status] + except KeyError: + if status < STATUS_MIN or status > STATUS_MAX: + raise ValueError("Not a valid HTTP error code: %s" % (status,)) + name = "HTTPError%s" % (status, ) + errorcls = cls(name, (HTTPError, ), {"__init__": + cls._make_init(status)}) + cls.__classes[status] = errorcls + globals()[name] = errorcls + + return errorcls + + def __call__(self, *args, **kw): + if self is HTTPError: + try: + status = kw.pop("status") + except KeyError: + try: + status = args[0] + args = args[1:] + except IndexError: + return super(HTTPErrorType, self).__call__(*args, **kw) + + self = self.get_error_class(status) + return super(HTTPErrorType, self).__call__(*args, **kw) + + @classmethod + def _make_init(cls, status): + def __init__(self, msg=None, reason=None, *args, **kw): + super(self.__class__, self).__init__(status=status, + reason=reason, msg=msg, *args, **kw) + return __init__ + +get_error_class = HTTPErrorType.get_error_class + + +class HTTPError(NetworkError): + __metaclass__ = HTTPErrorType + + def __init__(self, status, reason=None, msg=None, *args, **kw): + status = int(status) + if not reason: + reason = STATUS_STRINGS.get(status, "Unknown Error") + if not msg: + msg = "HTTP Error %s - %s" % (status, reason) + super(HTTPError, self).__init__(msg, status, reason, *args, **kw) + + @property + def message(self): + return self.args[0] + + @property + def status(self): + return self.args[1] + + @property + def reason(self): + return self.args[2] diff --git a/futile/src/futile/net/http/exc.pyc b/futile/src/futile/net/http/exc.pyc new file mode 100644 index 0000000..88aa381 Binary files /dev/null and b/futile/src/futile/net/http/exc.pyc differ diff --git a/futile/src/futile/net/http/server/__init__.py b/futile/src/futile/net/http/server/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/futile/src/futile/net/http/server/ssl/__init__.py b/futile/src/futile/net/http/server/ssl/__init__.py new file mode 100644 index 0000000..3a11e0c --- /dev/null +++ b/futile/src/futile/net/http/server/ssl/__init__.py @@ -0,0 +1,54 @@ +''' +Created on 18.08.2011 + +@author: kca +''' + +from futile.logging import LoggerMixin +from ssl import wrap_socket, SSLSocket, SSLError, CERT_OPTIONAL, CERT_NONE +from socket import error +from futile import NOT_SET + +class HTTPSMixin(LoggerMixin): + certfile = keyfile = ca_certs = None + cert_reqs = CERT_NONE + + def init_https(self, certfile, keyfile = None, ca_certs = None, cert_reqs = NOT_SET, secure = True): + self.keyfile = keyfile + self.certfile = certfile + self.ca_certs = ca_certs + if cert_reqs is NOT_SET: + cert_reqs = ca_certs and CERT_OPTIONAL or CERT_NONE + self.cert_reqs = cert_reqs + if secure: + self.enable_https() + + def enable_https(self): + if not self.secure: + if not self.certfile: + raise SSLError("Certificate info missing.") + if self.cert_reqs != CERT_NONE and not self.ca_certs: + raise SSLError("Certificate validation requested but no ca certs available.") + self.logger.debug("Enabling https with certfile=%s kefile=%s ca_certs=%s cert_reqs=%s", self.certfile, self.keyfile, self.ca_certs, self.cert_reqs) + self.socket = wrap_socket(self.socket, server_side = True, keyfile = self.keyfile, certfile = self.certfile, ca_certs = self.ca_certs, cert_reqs = self.cert_reqs) + + def disable_https(self): + if self.secure: + self.socket = self.socket._sock + + def get_request(self): + try: + return self.socket.accept() + except error, e: + self.logger.exception("Error during accept(): %s", e) + raise + + def is_secure(self): + return isinstance(self.socket, SSLSocket) + def set_secure(self, s): + if s: + self.enable_https() + else: + self.disable_https() + return s + secure = property(is_secure) diff --git a/futile/src/futile/net/http/server/wsgi/__init__.py b/futile/src/futile/net/http/server/wsgi/__init__.py new file mode 100644 index 0000000..fc0b1e2 --- /dev/null +++ b/futile/src/futile/net/http/server/wsgi/__init__.py @@ -0,0 +1,19 @@ +''' +Created on 17.07.2011 + +@author: kca +''' + +from wsgiref.simple_server import WSGIRequestHandler, WSGIServer as _WSGIServer +from SocketServer import ThreadingMixIn, ForkingMixIn + +class WSGIServer(_WSGIServer): + def __init__(self, server_address, app = None, RequestHandlerClass = WSGIRequestHandler): + _WSGIServer.__init__(self, server_address, RequestHandlerClass) + self.set_app(app) + +class ThreadingWSGIServer(ThreadingMixIn, WSGIServer): + pass + +class ForkingWSGIServer(ForkingMixIn, WSGIServer): + pass diff --git a/futile/src/futile/net/http/server/wsgi/ssl.py b/futile/src/futile/net/http/server/wsgi/ssl.py new file mode 100644 index 0000000..f7c17b9 --- /dev/null +++ b/futile/src/futile/net/http/server/wsgi/ssl.py @@ -0,0 +1,22 @@ +''' +Created on 22.08.2011 + +@author: kca +''' + +from ..ssl import HTTPSMixin +from ..wsgi import WSGIServer +from SocketServer import ThreadingMixIn, ForkingMixIn +from wsgiref.simple_server import WSGIRequestHandler +from futile import NOT_SET + +class SecureWSGIServer(HTTPSMixin, WSGIServer): + def __init__(self, server_address, certfile, keyfile = None, ca_certs = None, cert_reqs = NOT_SET, app = None, RequestHandlerClass = WSGIRequestHandler): + WSGIServer.__init__(self, server_address, app = app, RequestHandlerClass = RequestHandlerClass) + self.init_https(certfile, keyfile, ca_certs = ca_certs, cert_reqs = cert_reqs) + +class SecureThreadingWSGIServer(ThreadingMixIn, SecureWSGIServer): + pass + +class SecureForkingWSGIServer(ForkingMixIn, SecureWSGIServer): + pass diff --git a/futile/src/futile/net/sockethelper.py b/futile/src/futile/net/sockethelper.py new file mode 100644 index 0000000..0a146f4 --- /dev/null +++ b/futile/src/futile/net/sockethelper.py @@ -0,0 +1,13 @@ +''' +Created on 14.07.2011 + +@author: kca +''' + +from socket import socket as _socket, AF_INET, SOCK_STREAM +from futile.contextlib import closing + +def socket(family = AF_INET, type = SOCK_STREAM, proto = 0): + return closing(_socket(family, type, proto)) + + \ No newline at end of file diff --git a/futile/src/futile/net/wsgi.py b/futile/src/futile/net/wsgi.py new file mode 100644 index 0000000..df6a495 --- /dev/null +++ b/futile/src/futile/net/wsgi.py @@ -0,0 +1,14 @@ +''' +Created on 21.01.2012 + +@author: kca +''' + +from wsgiref.simple_server import WSGIServer +from SocketServer import ThreadingMixIn, ForkingMixIn + +class ThreadingWSGIServer(ThreadingMixIn, WSGIServer): + pass + +class ForkingWSGIServer(ForkingMixIn, WSGIServer): + pass diff --git a/futile/src/futile/net/xmlrpc.py b/futile/src/futile/net/xmlrpc.py new file mode 100644 index 0000000..9d87cc5 --- /dev/null +++ b/futile/src/futile/net/xmlrpc.py @@ -0,0 +1,40 @@ +''' +Created on 17.07.2011 + +@author: kca +''' + +from futile import Base +from SimpleXMLRPCServer import SimpleXMLRPCDispatcher + +class WSGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher, Base): + def __init__(self, encoding=None): + SimpleXMLRPCDispatcher.__init__(self, allow_none = True, encoding = encoding) + + def __call__(self, environ, start_response): + if environ["REQUEST_METHOD"] != "POST": + headers = [("Content-type", "text/html")] + + if environ["REQUEST_METHOD"] == "HEAD": + data = "" + else: + data = "400 Bad request

400 Bad request

" + headers.append(("Content-length", str(len(data)))) + start_response("400 Bad request", headers) + return (data, ) + + l = int(environ["CONTENT_LENGTH"]) + request = environ["wsgi.input"].read(l) + response = self._marshaled_dispatch(request) + headers = [("Content-type", "text/xml"), ("Content-length", str(len(response)))] + start_response("200 OK", headers) + return (response, ) + + def _dispatch(self, *args, **kw): + try: + result = SimpleXMLRPCDispatcher._dispatch(self, *args, **kw) + # self.logger.debug("Result: %s" % (result, )) + return result + except: + self.logger.exception("Error while processing request") + raise \ No newline at end of file diff --git a/futile/src/futile/operator/__init__.py b/futile/src/futile/operator/__init__.py new file mode 100644 index 0000000..dda3a84 --- /dev/null +++ b/futile/src/futile/operator/__init__.py @@ -0,0 +1,9 @@ +from operator import attrgetter + +def attrproperty(name): + return property(attrgetter(name)) + +def resolve_attr(obj, attr): + for name in attr.split("."): + obj = getattr(obj, name) + return obj \ No newline at end of file diff --git a/futile/src/futile/os/__init__.py b/futile/src/futile/os/__init__.py new file mode 100644 index 0000000..f5ee1e2 --- /dev/null +++ b/futile/src/futile/os/__init__.py @@ -0,0 +1,6 @@ + + +def get_fileobj(f): + if not hasattr(f, "read"): + return open(f) + return f diff --git a/futile/src/futile/os/mount.py b/futile/src/futile/os/mount.py new file mode 100644 index 0000000..34e1ec0 --- /dev/null +++ b/futile/src/futile/os/mount.py @@ -0,0 +1,53 @@ +''' +Created on 24.01.2012 + +@author: kca +''' + +from ..path import Path +from ..subprocess import check_output + +def umount(where, force = False): + cmd = [ "umount", where ] + if force: + cmd.append("-f") + check_output(cmd) +unmount = umount + +def mount(what, where, fstype = None, options = None): + return Mount(what, where, fstype, options).mount() + +class Mount(object): + def __init__(self, what, where, fstype = None, options = None): + self.what = Path(what) + self.where = Path(where) + self.fstype = fstype + options = self.options = options and set(options) or set() + if what.isfile(): + options.add("loop") + elif not what.isblockdev(): + raise ValueError("Mount source must be a file or block device: %s" % (what, )) + + def mount(self, fstype = None, options = None): + cmd = [ "mount", self.what, self.where ] + + fstype = fstype or self.fstype + if fstype: + cmd += [ "-t", self.fstype ] + + opts = self.options + if options: + opts += set(self.options) + if opts: + cmd += [ "-o", ','.join(self.options) ] + + check_output(cmd) + return self + __enter__ = mount + + def umount(self, force = False): + umount(self.where, force) + unmount = umount + + def __exit__(self, exc_type, exc_val, exc_tb): + self.umount(True) diff --git a/futile/src/futile/path/__init__.py b/futile/src/futile/path/__init__.py new file mode 100644 index 0000000..c193650 --- /dev/null +++ b/futile/src/futile/path/__init__.py @@ -0,0 +1,865 @@ +""" path.py - An object representing a path to a file or directory. + +Example: + +from path import path +d = path('/home/guido/bin') +for f in d.files('*.py'): + f.chmod(0755) + +This module requires Python 2.2 or later. + + +URL: http://www.jorendorff.com/articles/python/path +Author: Jason Orendorff (and others - see the url!) +Date: 7 Mar 2004 +""" + +# Note - this is an umodified version of Jason Orendorff's 'path' module. + +# TODO +# - Bug in write_text(). It doesn't support Universal newline mode. +# - Better error message in listdir() when self isn't a +# directory. (On Windows, the error message really sucks.) +# - Make sure everything has a good docstring. +# - Add methods for regex find and replace. +# - guess_content_type() method? +# - Perhaps support arguments to touch(). +# - Could add split() and join() methods that generate warnings. +# - Note: __add__() technically has a bug, I think, where +# it doesn't play nice with other types that implement +# __radd__(). Test this. + +from __future__ import generators + +def quote(p): + from urllib2 import quote + return quote(p, "") + + +import sys, os, fnmatch, glob, shutil, codecs + +__version__ = '2.0.4' +__all__ = ['path'] + +# Pre-2.3 support. Are unicode filenames supported? +_base = str +try: + if os.path.supports_unicode_filenames: + _base = unicode +except AttributeError: + pass + +# Pre-2.3 workaround for basestring. +try: + basestring +except NameError: + basestring = (str, unicode) + +# Universal newline support +_textmode = 'r' +if hasattr(file, 'newlines'): + _textmode = 'U' + + +class path(_base): + """ Represents a filesystem path. + + For documentation on individual methods, consult their + counterparts in os.path. + """ + + # --- Special Python methods. + + def __repr__(self): + return 'path(%s)' % _base.__repr__(self) + + # Adding a path and a string yields a path. + def __add__(self, more): + return path(_base(self) + more) + + def __radd__(self, other): + return path(other + _base(self)) + + # The / operator joins paths. + def __div__(self, rel): + """ fp.__div__(rel) == fp / rel == fp.joinpath(rel) + + Join two path components, adding a separator character if + needed. + """ + return path(os.path.join(self, rel)) + + # Make the / operator work even when true division is enabled. + __truediv__ = __div__ + + def getcwd(): + """ Return the current working directory as a path object. """ + return path(os.getcwd()) + getcwd = staticmethod(getcwd) + + + # --- Operations on path strings. + + def abspath(self): return path(os.path.abspath(self)) + def normcase(self): return path(os.path.normcase(self)) + def normpath(self): return path(os.path.normpath(self)) + def realpath(self): return path(os.path.realpath(self)) + def expanduser(self): return path(os.path.expanduser(self)) + def expandvars(self): return path(os.path.expandvars(self)) + def dirname(self): return path(os.path.dirname(self)) + basename = os.path.basename + + def expand(self): + """ Clean up a filename by calling expandvars(), + expanduser(), and normpath() on it. + + This is commonly everything needed to clean up a filename + read from a configuration file, for example. + """ + return self.expandvars().expanduser().normpath() + + def _get_namebase(self): + base, _ext = os.path.splitext(self.name) + return base + + def _get_ext(self): + _f, ext = os.path.splitext(_base(self)) + return ext + + def _get_drive(self): + drive, _r = os.path.splitdrive(self) + return path(drive) + + parent = property( + dirname, None, None, + """ This path's parent directory, as a new path object. + + For example, path('/usr/local/lib/libpython.so').parent == path('/usr/local/lib') + """) + + name = property( + basename, None, None, + """ The name of this file or directory without the full path. + + For example, path('/usr/local/lib/libpython.so').name == 'libpython.so' + """) + + namebase = property( + _get_namebase, None, None, + """ The same as path.name, but with one file extension stripped off. + + For example, path('/home/guido/python.tar.gz').name == 'python.tar.gz', + but path('/home/guido/python.tar.gz').namebase == 'python.tar' + """) + + ext = property( + _get_ext, None, None, + """ The file extension, for example '.py'. """) + + drive = property( + _get_drive, None, None, + """ The drive specifier, for example 'C:'. + This is always empty on systems that don't use drive specifiers. + """) + + def splitpath(self): + """ p.splitpath() -> Return (p.parent, p.name). """ + parent, child = os.path.split(self) + return path(parent), child + + def splitdrive(self): + """ p.splitdrive() -> Return (p.drive, ). + + Split the drive specifier from this path. If there is + no drive specifier, p.drive is empty, so the return value + is simply (path(''), p). This is always the case on Unix. + """ + drive, rel = os.path.splitdrive(self) + return path(drive), rel + + def splitext(self): + """ p.splitext() -> Return (p.stripext(), p.ext). + + Split the filename extension from this path and return + the two parts. Either part may be empty. + + The extension is everything from '.' to the end of the + last path segment. This has the property that if + (a, b) == p.splitext(), then a + b == p. + """ + filename, ext = os.path.splitext(self) + return path(filename), ext + + def stripext(self): + """ p.stripext() -> Remove one file extension from the path. + + For example, path('/home/guido/python.tar.gz').stripext() + returns path('/home/guido/python.tar'). + """ + return self.splitext()[0] + + if hasattr(os.path, 'splitunc'): + def splitunc(self): + unc, rest = os.path.splitunc(self) + return path(unc), rest + + def _get_uncshare(self): + unc, r = os.path.splitunc(self) + return path(unc) + + uncshare = property( + _get_uncshare, None, None, + """ The UNC mount point for this path. + This is empty for paths on local drives. """) + + def joinpath(self, *args): + """ Join two or more path components, adding a separator + character (os.sep) if needed. Returns a new path + object. + """ + return path(os.path.join(self, *args)) + + def splitall(self): + """ Return a list of the path components in this path. + + The first item in the list will be a path. Its value will be + either os.curdir, os.pardir, empty, or the root directory of + this path (for example, '/' or 'C:\\'). The other items in + the list will be strings. + + path.path.joinpath(*result) will yield the original path. + """ + parts = [] + loc = self + while loc != os.curdir and loc != os.pardir: + prev = loc + loc, child = prev.splitpath() + if loc == prev: + break + parts.append(child) + parts.append(loc) + parts.reverse() + return parts + + def relpath(self): + """ Return this path as a relative path, + based from the current working directory. + """ + cwd = path(os.getcwd()) + return cwd.relpathto(self) + + def relpathto(self, dest): + """ Return a relative path from self to dest. + + If there is no relative path from self to dest, for example if + they reside on different drives in Windows, then this returns + dest.abspath(). + """ + origin = self.abspath() + dest = path(dest).abspath() + + orig_list = origin.normcase().splitall() + # Don't normcase dest! We want to preserve the case. + dest_list = dest.splitall() + + if orig_list[0] != os.path.normcase(dest_list[0]): + # Can't get here from there. + return dest + + # Find the location where the two paths start to differ. + i = 0 + for start_seg, dest_seg in zip(orig_list, dest_list): + if start_seg != os.path.normcase(dest_seg): + break + i += 1 + + # Now i is the point where the two paths diverge. + # Need a certain number of "os.pardir"s to work up + # from the origin to the point of divergence. + segments = [os.pardir] * (len(orig_list) - i) + # Need to add the diverging part of dest_list. + segments += dest_list[i:] + if len(segments) == 0: + # If they happen to be identical, use os.curdir. + return path(os.curdir) + else: + return path(os.path.join(*segments)) + + + # --- Listing, searching, walking, and matching + + def listdir(self, pattern=None): + """ D.listdir() -> List of items in this directory. + + Use D.files() or D.dirs() instead if you want a listing + of just files or just subdirectories. + + The elements of the list are path objects. + + With the optional 'pattern' argument, this only lists + items whose names match the given pattern. + """ + names = os.listdir(self) + if pattern is not None: + names = fnmatch.filter(names, pattern) + return [self / child for child in names] + + def dirs(self, pattern=None): + """ D.dirs() -> List of this directory's subdirectories. + + The elements of the list are path objects. + This does not walk recursively into subdirectories + (but see path.walkdirs). + + With the optional 'pattern' argument, this only lists + directories whose names match the given pattern. For + example, d.dirs('build-*'). + """ + return [p for p in self.listdir(pattern) if p.isdir()] + + def devs(self, pattern = None): + return [p for p in self.listdir(pattern) if p.isdev()] + + def blockdevs(self, pattern = None): + return [p for p in self.listdir(pattern) if p.isblockdev()] + + def chardevs(self, pattern = None): + return [p for p in self.listdir(pattern) if p.ischardev()] + + def files(self, pattern=None): + """ D.files() -> List of the files in this directory. + + The elements of the list are path objects. + This does not walk into subdirectories (see path.walkfiles). + + With the optional 'pattern' argument, this only lists files + whose names match the given pattern. For example, + d.files('*.pyc'). + """ + + return [p for p in self.listdir(pattern) if p.isfile()] + + def walk(self, pattern=None): + """ D.walk() -> iterator over files and subdirs, recursively. + + The iterator yields path objects naming each child item of + this directory and its descendants. This requires that + D.isdir(). + + This performs a depth-first traversal of the directory tree. + Each directory is returned just before all its children. + """ + for child in self.listdir(): + if pattern is None or child.fnmatch(pattern): + yield child + if child.isdir(): + for item in child.walk(pattern): + yield item + + def walkdirs(self, pattern=None): + """ D.walkdirs() -> iterator over subdirs, recursively. + + With the optional 'pattern' argument, this yields only + directories whose names match the given pattern. For + example, mydir.walkdirs('*test') yields only directories + with names ending in 'test'. + """ + for child in self.dirs(): + if pattern is None or child.fnmatch(pattern): + yield child + for subsubdir in child.walkdirs(pattern): + yield subsubdir + + def walkfiles(self, pattern=None): + """ D.walkfiles() -> iterator over files in D, recursively. + + The optional argument, pattern, limits the results to files + with names that match the pattern. For example, + mydir.walkfiles('*.tmp') yields only files with the .tmp + extension. + """ + for child in self.listdir(): + if child.isfile(): + if pattern is None or child.fnmatch(pattern): + yield child + elif child.isdir(): + for f in child.walkfiles(pattern): + yield f + + def fnmatch(self, pattern): + """ Return True if self.name matches the given pattern. + + pattern - A filename pattern with wildcards, + for example '*.py'. + """ + return fnmatch.fnmatch(self.name, pattern) + + def glob(self, pattern): + """ Return a list of path objects that match the pattern. + + pattern - a path relative to this directory, with wildcards. + + For example, path('/users').glob('*/bin/*') returns a list + of all the files users have in their bin directories. + """ + return map(path, glob.glob(_base(self / pattern))) + + + # --- Reading or writing an entire file at once. + + def open(self, mode='r'): + """ Open this file. Return a file object. """ + return file(self, mode) + + def bytes(self): + """ Open this file, read all bytes, return them as a string. """ + f = self.open('rb') + try: + return f.read() + finally: + f.close() + + def write_bytes(self, bytes, append=False): + """ Open this file and write the given bytes to it. + + Default behavior is to overwrite any existing file. + Call this with write_bytes(bytes, append=True) to append instead. + """ + if append: + mode = 'ab' + else: + mode = 'wb' + f = self.open(mode) + try: + f.write(bytes) + finally: + f.close() + + def text(self, encoding=None, errors='strict'): + """ Open this file, read it in, return the content as a string. + + This uses 'U' mode in Python 2.3 and later, so '\r\n' and '\r' + are automatically translated to '\n'. + + Optional arguments: + + encoding - The Unicode encoding (or character set) of + the file. If present, the content of the file is + decoded and returned as a unicode object; otherwise + it is returned as an 8-bit str. + errors - How to handle Unicode errors; see help(str.decode) + for the options. Default is 'strict'. + """ + if encoding is None: + # 8-bit + f = self.open(_textmode) + try: + return f.read() + finally: + f.close() + else: + # Unicode + f = codecs.open(self, 'r', encoding, errors) + # (Note - Can't use 'U' mode here, since codecs.open + # doesn't support 'U' mode, even in Python 2.3.) + try: + t = f.read() + finally: + f.close() + return (t.replace(u'\r\n', u'\n') + .replace(u'\r\x85', u'\n') + .replace(u'\r', u'\n') + .replace(u'\x85', u'\n') + .replace(u'\u2028', u'\n')) + + def write_text(self, text, encoding=None, errors='strict', linesep=os.linesep, append=False): + """ Write the given text to this file. + + The default behavior is to overwrite any existing file; + to append instead, use the 'append=True' keyword argument. + + There are two differences between path.write_text() and + path.write_bytes(): newline handling and Unicode handling. + See below. + + Parameters: + + - text - str/unicode - The text to be written. + + - encoding - str - The Unicode encoding that will be used. + This is ignored if 'text' isn't a Unicode string. + + - errors - str - How to handle Unicode encoding errors. + Default is 'strict'. See help(unicode.encode) for the + options. This is ignored if 'text' isn't a Unicode + string. + + - linesep - keyword argument - str/unicode - The sequence of + characters to be used to mark end-of-line. The default is + os.linesep. You can also specify None; this means to + leave all newlines as they are in 'text'. + + - append - keyword argument - bool - Specifies what to do if + the file already exists (True: append to the end of it; + False: overwrite it.) The default is False. + + + --- Newline handling. + + write_text() converts all standard end-of-line sequences + ('\n', '\r', and '\r\n') to your platform's default end-of-line + sequence (see os.linesep; on Windows, for example, the + end-of-line marker is '\r\n'). + + If you don't like your platform's default, you can override it + using the 'linesep=' keyword argument. If you specifically want + write_text() to preserve the newlines as-is, use 'linesep=None'. + + This applies to Unicode text the same as to 8-bit text, except + there are three additional standard Unicode end-of-line sequences: + u'\x85', u'\r\x85', and u'\u2028'. + + (This is slightly different from when you open a file for + writing with fopen(filename, "w") in C or file(filename, 'w') + in Python.) + + + --- Unicode + + If 'text' isn't Unicode, then apart from newline handling, the + bytes are written verbatim to the file. The 'encoding' and + 'errors' arguments are not used and must be omitted. + + If 'text' is Unicode, it is first converted to bytes using the + specified 'encoding' (or the default encoding if 'encoding' + isn't specified). The 'errors' argument applies only to this + conversion. + + """ + if isinstance(text, unicode): + if linesep is not None: + # Convert all standard end-of-line sequences to + # ordinary newline characters. + text = (text.replace(u'\r\n', u'\n') + .replace(u'\r\x85', u'\n') + .replace(u'\r', u'\n') + .replace(u'\x85', u'\n') + .replace(u'\u2028', u'\n')) + text = text.replace(u'\n', linesep) + if encoding is None: + encoding = sys.getdefaultencoding() + bytes = text.encode(encoding, errors) + else: + # It is an error to specify an encoding if 'text' is + # an 8-bit string. + assert encoding is None + + if linesep is not None: + text = (text.replace('\r\n', '\n') + .replace('\r', '\n')) + bytes = text.replace('\n', linesep) + + self.write_bytes(bytes, append) + + def lines(self, encoding=None, errors='strict', retain=True): + """ Open this file, read all lines, return them in a list. + + Optional arguments: + encoding - The Unicode encoding (or character set) of + the file. The default is None, meaning the content + of the file is read as 8-bit characters and returned + as a list of (non-Unicode) str objects. + errors - How to handle Unicode errors; see help(str.decode) + for the options. Default is 'strict' + retain - If true, retain newline characters; but all newline + character combinations ('\r', '\n', '\r\n') are + translated to '\n'. If false, newline characters are + stripped off. Default is True. + + This uses 'U' mode in Python 2.3 and later. + """ + if encoding is None and retain: + f = self.open(_textmode) + try: + return f.readlines() + finally: + f.close() + else: + return self.text(encoding, errors).splitlines(retain) + + def write_lines(self, lines, encoding=None, errors='strict', + linesep=os.linesep, append=False): + """ Write the given lines of text to this file. + + By default this overwrites any existing file at this path. + + This puts a platform-specific newline sequence on every line. + See 'linesep' below. + + lines - A list of strings. + + encoding - A Unicode encoding to use. This applies only if + 'lines' contains any Unicode strings. + + errors - How to handle errors in Unicode encoding. This + also applies only to Unicode strings. + + linesep - The desired line-ending. This line-ending is + applied to every line. If a line already has any + standard line ending ('\r', '\n', '\r\n', u'\x85', + u'\r\x85', u'\u2028'), that will be stripped off and + this will be used instead. The default is os.linesep, + which is platform-dependent ('\r\n' on Windows, '\n' on + Unix, etc.) Specify None to write the lines as-is, + like file.writelines(). + + Use the keyword argument append=True to append lines to the + file. The default is to overwrite the file. Warning: + When you use this with Unicode data, if the encoding of the + existing data in the file is different from the encoding + you specify with the encoding= parameter, the result is + mixed-encoding data, which can really confuse someone trying + to read the file later. + """ + if append: + mode = 'ab' + else: + mode = 'wb' + f = self.open(mode) + try: + for line in lines: + isUnicode = isinstance(line, unicode) + if linesep is not None: + # Strip off any existing line-end and add the + # specified linesep string. + if isUnicode: + if line[-2:] in (u'\r\n', u'\x0d\x85'): + line = line[:-2] + elif line[-1:] in (u'\r', u'\n', + u'\x85', u'\u2028'): + line = line[:-1] + else: + if line[-2:] == '\r\n': + line = line[:-2] + elif line[-1:] in ('\r', '\n'): + line = line[:-1] + line += linesep + if isUnicode: + if encoding is None: + encoding = sys.getdefaultencoding() + line = line.encode(encoding, errors) + f.write(line) + finally: + f.close() + + + # --- Methods for querying the filesystem. + + exists = os.path.exists + isabs = os.path.isabs + isdir = os.path.isdir + isfile = os.path.isfile + islink = os.path.islink + ismount = os.path.ismount + + if hasattr(os.path, 'samefile'): + samefile = os.path.samefile + + getatime = os.path.getatime + atime = property( + getatime, None, None, + """ Last access time of the file. """) + + getmtime = os.path.getmtime + mtime = property( + getmtime, None, None, + """ Last-modified time of the file. """) + + if hasattr(os.path, 'getctime'): + getctime = os.path.getctime + ctime = property( + getctime, None, None, + """ Creation time of the file. """) + + getsize = os.path.getsize + size = property( + getsize, None, None, + """ Size of the file, in bytes. """) + + def isdev(self): + from stat import S_ISBLK, S_ISCHR + mode = self.__st_mode() + return S_ISBLK(mode) or S_ISCHR(mode) + + def __st_mode(self): + try: + return self.stat().st_mode + except OSError as e: + if e.errno != 2: + raise + return 0 + + def ischardev(self): + from stat import S_ISCHR + return S_ISCHR(self.__st_mode()) + + def isblockdev(self): + from stat import S_ISBLK + return S_ISBLK(self.__st_mode()) + + if hasattr(os, 'access'): + def access(self, mode): + """ Return true if current user has access to this path. + + mode - One of the constants os.F_OK, os.R_OK, os.W_OK, os.X_OK + """ + return os.access(self, mode) + + def stat(self): + """ Perform a stat() system call on this path. """ + return os.stat(self) + + def lstat(self): + """ Like path.stat(), but do not follow symbolic links. """ + return os.lstat(self) + + if hasattr(os, 'statvfs'): + def statvfs(self): + """ Perform a statvfs() system call on this path. """ + return os.statvfs(self) + + if hasattr(os, 'pathconf'): + def pathconf(self, name): + return os.pathconf(self, name) + + + # --- Modifying operations on files and directories + + def utime(self, times): + """ Set the access and modified times of this file. """ + os.utime(self, times) + + def chmod(self, mode): + os.chmod(self, mode) + + if hasattr(os, 'chown'): + def chown(self, uid, gid): + os.chown(self, uid, gid) + + def rename(self, new): + os.rename(self, new) + + def renames(self, new): + os.renames(self, new) + # --- Create/delete operations on directories + + def mkdir(self, mode=0750): + os.mkdir(self, mode) + + def makedirs(self, mode=0750): + os.makedirs(self, mode) + + def rmdir(self): + os.rmdir(self) + + def removedirs(self): + os.removedirs(self) + + + # --- Modifying operations on files + + def touch(self, mode = 0640): + """ Set the access/modified times of this file to the current time. + Create the file if it does not exist. + """ + fd = os.open(self, os.O_WRONLY | os.O_CREAT, mode) + os.close(fd) + os.utime(self, None) + + def remove(self): + os.remove(self) + + def unlink(self): + os.unlink(self) + + + # --- Links + + if hasattr(os, 'link'): + def link(self, newpath): + """ Create a hard link at 'newpath', pointing to this file. """ + os.link(self, newpath) + + if hasattr(os, 'symlink'): + def symlink(self, newlink): + """ Create a symbolic link at 'newlink', pointing here. """ + os.symlink(self, newlink) + + if hasattr(os, 'readlink'): + def readlink(self): + """ Return the path to which this symbolic link points. + + The result may be an absolute or a relative path. + """ + return path(os.readlink(self)) + + def readlinkabs(self): + """ Return the path to which this symbolic link points. + + The result is always an absolute path. + """ + p = self.readlink() + if p.isabs(): + return p + else: + return (self.parent / p).abspath() + + def checkdir(self): + if not self.isdir(): + raise Exception("Not a directory: '%s'" % (self, )) + + def checkfile(self): + if not self.isfile(): + raise Exception("Not a file: '%s'" % (self, )) + + def forcedir(self, mode = 0750): + if not self.isdir(): + if self.exists(): + raise Exception("Not a directory: '%s'" % (self, )) + self.makedirs(mode) + + def forcefile(self, mode = 0640): + if not self.exists(): + return self.touch(mode = 0640) + if not self.isfile(): + raise Exception("Not a file: %s" % (self ,)) + + # --- High-level functions from shutil + + copyfile = shutil.copyfile + copymode = shutil.copymode + copystat = shutil.copystat + copy = shutil.copy + copy2 = shutil.copy2 + copytree = shutil.copytree + if hasattr(shutil, 'move'): + move = shutil.move + + def rmtree(self): + if self.isdir(): + return shutil.rmtree(self) + self.unlink() + + quote = quote + + # --- Special stuff from os + + if hasattr(os, 'chroot'): + def chroot(self): + os.chroot(self) + + if hasattr(os, 'startfile'): + startfile = os.startfile + +Path = path \ No newline at end of file diff --git a/futile/src/futile/profile/__init__.py b/futile/src/futile/profile/__init__.py new file mode 100644 index 0000000..43adfb8 --- /dev/null +++ b/futile/src/futile/profile/__init__.py @@ -0,0 +1,16 @@ +from time import time + +def timeit(f): + def _timeit(*args, **kw): + _timeit.__runs__ += 1 + start = time() + try: + return f(*args, **kw) + finally: + spent = _timeit.__last_time__ = time() - start + _timeit.__total_time__ += spent + _timeit.__runs__ = 0 + _timeit.__total_time__ = 0.0 + _timeit.__last_time__ = None + _timeit.__name__ = f.__name__ + return _timeit \ No newline at end of file diff --git a/futile/src/futile/serializer/__init__.py b/futile/src/futile/serializer/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/futile/src/futile/serializer/exc.py b/futile/src/futile/serializer/exc.py new file mode 100644 index 0000000..929535b --- /dev/null +++ b/futile/src/futile/serializer/exc.py @@ -0,0 +1,8 @@ +''' +Created on 24.09.2011 + +@author: kca +''' + +class ParseError(Exception): + pass \ No newline at end of file diff --git a/futile/src/futile/serializer/xml.py b/futile/src/futile/serializer/xml.py new file mode 100644 index 0000000..f52d672 --- /dev/null +++ b/futile/src/futile/serializer/xml.py @@ -0,0 +1,51 @@ +''' +Created on 28.08.2011 + +@author: kca +''' + +from ..logging import LoggerMixin +from logging import DEBUG +from ..etree.impl import ElementTree, XML, ParseError as XMLParseError, XMLSyntaxError, tostring +from abc import ABCMeta, abstractmethod +from futile.serializer.exc import ParseError + +class AbstractXMLSerializer(LoggerMixin): + __metaclass__ = ABCMeta + + def load(self, input): + if self.logger.isEnabledFor(DEBUG): + from cStringIO import StringIO + input = input.read() + self.logger.debug("Parsing input: %s", input) + input = StringIO(input) + root = self._load(input) + return self._parse_input(root) + + def _load(self, input): + try: + if isinstance(input, str): + return XML(input) + else: + return ElementTree().parse(input) + except Exception, e: + self._handle_parse_error(e) + raise ParseError(e) + + def _handle_parse_error(self, e): + self.logger.exception("Error parsing input: %s", e) + + @abstractmethod + def _parse_input(self, root): + raise NotImplementedError() + + def dump(self, o, pretty_print = True): + raise NotImplementedError() + + def dumps(self, o, pretty_print = True): + xml = self._dump_object(o) + return tostring(xml, pretty_print = pretty_print) + + @abstractmethod + def _dump_object(self, o): + raise NotImplementedError() diff --git a/futile/src/futile/signal/__init__.py b/futile/src/futile/signal/__init__.py new file mode 100644 index 0000000..a81e7e8 --- /dev/null +++ b/futile/src/futile/signal/__init__.py @@ -0,0 +1 @@ +from timeout import timeout, Timeout \ No newline at end of file diff --git a/futile/src/futile/signal/timeout.py b/futile/src/futile/signal/timeout.py new file mode 100644 index 0000000..40015ce --- /dev/null +++ b/futile/src/futile/signal/timeout.py @@ -0,0 +1,29 @@ +''' +Created on 20.05.2011 + +@author: kca +''' + +from signal import signal, SIGALRM, alarm +from contextlib import contextmanager +from futile import noop + + +@contextmanager +def timeout(seconds): + if not seconds: + yield + return + + original_handler = signal(SIGALRM, noop) + + try: + alarm(seconds) + yield + finally: + alarm(0) + signal(SIGALRM, original_handler) + + +def Timeout(seconds): + return lambda: timeout(seconds) diff --git a/futile/src/futile/singleton.py b/futile/src/futile/singleton.py new file mode 100644 index 0000000..e171320 --- /dev/null +++ b/futile/src/futile/singleton.py @@ -0,0 +1,30 @@ +''' +Created on 23.07.2011 + +@author: kca +''' +from futile import Base +from futile.logging import LoggerMixin + +class SingletonType(type, LoggerMixin): + __instances = {} + + def get_instance(self): + try: + i = self.__instances[self] + self.logger.debug("Reusing singleton instance for %s.%s" % (self.__module__, self.__name__)) + except KeyError: + self.logger.debug("Creating singleton instance for %s.%s" % (self.__module__, self.__name__)) + i = super(SingletonType, self).__call__() + self.__instances[self] = i + return i + +class ForcedSingletonType(SingletonType): + def __call__(self, *args, **kw): + return self.get_instance() + +class Singleton(Base): + __metaclass__ = SingletonType + +class ForcedSingleton(Base): + __metaclass__ = ForcedSingletonType \ No newline at end of file diff --git a/futile/src/futile/string/__init__.py b/futile/src/futile/string/__init__.py new file mode 100644 index 0000000..bb3f869 --- /dev/null +++ b/futile/src/futile/string/__init__.py @@ -0,0 +1,23 @@ +import string + +letters_digits_underscore = string.letters + string.digits + "_" + + +class InvalidIdentifier(ValueError): + pass + + +def is_identifier(s): + if not s or s[0] not in string.letters: + return False + + for c in s: + if c not in letters_digits_underscore: + return False + + return True + + +def check_identifier(s): + if not is_identifier(s): + raise InvalidIdentifier(s) diff --git a/futile/src/futile/subprocess/__init__.py b/futile/src/futile/subprocess/__init__.py new file mode 100644 index 0000000..cbf830a --- /dev/null +++ b/futile/src/futile/subprocess/__init__.py @@ -0,0 +1,46 @@ +''' +Created on 17.07.2011 + +@author: kca +''' + +import logging, sys +from futile.logging import get_logger +from subprocess import check_output as _check_output, check_call as _check_call, CalledProcessError, STDOUT, Popen + +try: + from subprocces import SubprocessError, TimeoutExpired +except ImportError: + class SubprocessError(Exception): + pass + + class TimeoutExpired(SubprocessError): + pass + +def _pre_call(args): + #needed for chroot safety + import encodings.string_escape + + cmd = ' '.join(args) + get_logger().debug("running %s" % (cmd, )) + return cmd + + +def check_output(args, stdin=None, stderr=STDOUT, shell=False, cwd=None, env=None, *popenargs, **popenkw): + cmd = _pre_call(args) + + try: + return _check_output(args, stdin=stdin, stderr=stderr, shell=shell, cwd=cwd, env=env, *popenargs, **popenkw) + except CalledProcessError as e: + get_logger().debug("Command %s returned exit code %s. This is the programs output:\n%s<>" % (cmd, e.returncode, e.output)) + raise + +def check_call(args, stdin=None, stdout=None, stderr=None, shell=False, cwd=None, env=None, *popenargs, **popenkw): + cmd = _pre_call(args) + + try: + return _check_call(args, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd, env=env, *popenargs, **popenkw) + except CalledProcessError as e: + get_logger().debug("Command %s returned exit code %s." % (cmd, e.returncode)) + raise + diff --git a/futile/src/futile/subprocess/daemon.py b/futile/src/futile/subprocess/daemon.py new file mode 100644 index 0000000..8d6eab3 --- /dev/null +++ b/futile/src/futile/subprocess/daemon.py @@ -0,0 +1,165 @@ +''' +Created on 02.02.2012 + +@author: kca +''' + +from time import sleep +from abc import ABCMeta, abstractproperty, abstractmethod +from futile import Base +from futile.path import Path +from . import check_call, STDOUT + +class DaemonController(Base): + __metaclass__ = ABCMeta + + def __init__(self, sleep = 5, stop_sleep = 3, *args, **kw): + super(DaemonController, self).__init__(*args, **kw) + self.__sleep = int(sleep) + self.__stop_sleep = int(stop_sleep) + + @abstractproperty + def is_running(self): + raise NotImplementedError() + + def start(self): + self._start() + sleep(self.__sleep) + + @abstractmethod + def _start(self): + raise NotImplementedError() + + def stop(self): + self._stop() + sleep(self.__stop_sleep) + + @abstractmethod + def _stop(self): + raise NotImplementedError() + + def __enter__(self): + self.start() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.stop() + +class DummyController(DaemonController): + def __init__(self, sleep = 0, stop_sleep = 0, *args, **kw): + super(DummyController).__init__(sleep = sleep, stop_sleep = stop_sleep, *args, **kw) + + def _start(self): + pass + _stop = _start + + @property + def is_running(self): + return False + +import os +import errno + +class CheckPIDFileController(DaemonController): + def __init__(self, pidfile, *args, **kw): + super(CheckPIDFileController, self).__init__(*args, **kw) + self.__pidfile = Path(pidfile) + + @property + def pidfile(self): + return self.__pidfile + + @property + def is_running(self): + if not self.pidfile.exists(): + return False + + if not self.pidfile.isfile(): + raise Exception("pidfile '%s' is not a file" % (self.pidfile, )) + + try: + pid = int(self.__pidfile.open().readline(16)) + except: + self.logger.exception("Error reading pidfile %s" % (self.pidfile)) + raise + + try: + os.kill(pid, 0) + return True + except OSError, e: + if e.errno == errno.ESRCH: + return False + raise + +class StartStopDaemonController(CheckPIDFileController): + def __init__(self, executable, fork = False, workingdir = None, pidfile = None, makepidfile = False, daemonargs = None, ssd = "/sbin/start-stop-daemon", ldpath = None, outfile = "/dev/null", *args, **kw): + if not pidfile: + pidfile = "/tmp/" + executable.replace("/", "_") + ".pid" + super(StartStopDaemonController, self).__init__(pidfile = pidfile, *args, **kw) + + self.__executable = unicode(executable) + self.__workingdir = workingdir and unicode(workingdir) or None + + if ldpath is not None: + if not isinstance(ldpath, (list, set, tuple, frozenset)): + ldpath = [ ldpath ] + ldpath = tuple(set(ldpath)) + self.__ldpath = ldpath + + self.__makepidfile = makepidfile + self.__daemonargs = daemonargs + self.__fork = fork + self.__ssd = ssd + self.__outfile = outfile + + def get_daemonargs(self): + return self.__daemonargs + def set_daemonargs(self, da): + self.__daemonargs = da + daemonargs = property(get_daemonargs, set_daemonargs) + + def __make_cmd(self, cmd, test): + cmd = [ self.__ssd, cmd, '-x', self.__executable, '-p', self.pidfile, '-o' ] + + if self.__workingdir: + cmd += [ '-d', self.__workingdir ] + + if test: + cmd.append('-t') + + env = None + if self.__ldpath: + env = dict(LD_LIBRARY_PATH = ':'.join(self.__ldpath)) + + return cmd, env + + def __check_cmd(self, cmd, env): + self.logger.debug("ssd env: " + str(env)) + + outfile = self.__outfile + if outfile: + outfile = Path(outfile).open("a") + + try: + check_call(cmd, stdout = outfile, stderr = STDOUT, close_fds = True, cwd = self.__workingdir, env = env) + finally: + if outfile is not None: + outfile.close() + + def _start(self): + cmd, env = self.__make_cmd("-S", False) + if self.__makepidfile: + cmd.append('-m') + + if self.__fork: + cmd.append('-b') + + if self.__daemonargs: + cmd += [ '--' ] + list(self.__daemonargs) + + self.__check_cmd(cmd, env) + + def _stop(self): + cmd, env = self.__make_cmd("-K", False) + self.__check_cmd(cmd, env) + diff --git a/futile/src/futile/tempfile/__init__.py b/futile/src/futile/tempfile/__init__.py new file mode 100644 index 0000000..3047f4b --- /dev/null +++ b/futile/src/futile/tempfile/__init__.py @@ -0,0 +1,38 @@ +from tempfile import mkdtemp as _mkdtemp +from shutil import rmtree +from .. import Base +from futile import noop + +class TempDir(Base): + delete_on_error = delete = True + + def __init__(self, suffix='', prefix='tmp', dir=None, delete = None, delete_on_error = None, *args, **kw): + super(TempDir, self).__init__(*args, **kw) + self.__name = _mkdtemp(suffix, prefix, dir) + if delete is not None: + self.delete = delete + if delete_on_error is not None: + self.delete_on_error = delete_on_error + + @property + def name(self): + return self.__name + + def rmtree(self): + rmtree(self.__name) + self.rmtree = noop + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.delete or (exc_type and self.delete_on_error): + self.rmtree() + + def __del__(self): + self.__exit__(None, None, None) + + def __str__(self): + return self.__name + +mkdtemp = TempDir diff --git a/futile/src/futile/threading/RWLock.py b/futile/src/futile/threading/RWLock.py new file mode 100644 index 0000000..ecd483b --- /dev/null +++ b/futile/src/futile/threading/RWLock.py @@ -0,0 +1,106 @@ +#! /usr/bin/env python +''' +Created on 01.04.2011 + +@author: kca +''' +#TODO: proper timeout handling +from __future__ import with_statement + +from threading import Lock, Event +from contextlib import contextmanager + +class Timeout(Exception): + pass + +class ReverseSemaphore(object): + def __init__(self, *args, **kw): + super(ReverseSemaphore, self).__init__(*args, **kw) + + self.counter = 0 + self.lock = Lock() + self.event = Event() + self.event.set() + pass + + def acquire(self): + with self.lock: + self.counter += 1 + self.event.clear() + pass + pass + + def release(self): + with self.lock: + self.counter -= 1 + if self.counter == 0: + self.event.set() + if self.counter < 0: + self.counter = 0 + pass + pass + pass + + def wait(self): + return self.event.wait() + pass + + def __enter__(self): + self.acquire() + pass + + def __exit__ (self, type, value, tb): + self.release() + pass + pass + + +class RWLock(object): + def __init__(self, *args, **kw): + super(RWLock, self).__init__(*args, **kw) + + self.write_lock = Lock() + self.read_lock = ReverseSemaphore() + self.write_event = Event() + self.write_event.set() + + @contextmanager + def read_transaction(self, timeout = None): + self.read_acquire(timeout = timeout) + try: + yield + finally: + self.read_release() + pass + pass + + @contextmanager + def write_transaction(self, timeout = None): + self.write_acquire(timeout = timeout) + try: + yield + finally: + self.write_release() + pass + pass + + def read_acquire(self, timeout = None): + self.write_event.wait(timeout = timeout) + if not self.write_event.is_set(): + raise Timeout() + self.read_lock.acquire() + return True + + def read_release(self): + self.read_lock.release() + pass + + def write_acquire(self, timeout = None): + self.write_lock.acquire() + self.write_event.clear() + self.read_lock.wait() + pass + + def write_release(self): + self.write_event.set() + self.write_lock.release() diff --git a/futile/src/futile/threading/__init__.py b/futile/src/futile/threading/__init__.py new file mode 100644 index 0000000..3576376 --- /dev/null +++ b/futile/src/futile/threading/__init__.py @@ -0,0 +1,18 @@ +import sys + +try: + from threading import current_thread +except ImportError: + from threading import currentThread as current_thread + + +if sys.version_info < (2, 7): + from threading import _Event + class Event(_Event): + def wait(self, timeout = None): + super(_Event, self).wait(timeout = timeout) + return self.is_set() +else: + from threading import Event + + \ No newline at end of file diff --git a/futile/src/futile/threading/__init__.pyc b/futile/src/futile/threading/__init__.pyc new file mode 100644 index 0000000..04cc0b5 Binary files /dev/null and b/futile/src/futile/threading/__init__.pyc differ diff --git a/futile/src/futile/threading/synchronized.py b/futile/src/futile/threading/synchronized.py new file mode 100644 index 0000000..a6b1cf4 --- /dev/null +++ b/futile/src/futile/threading/synchronized.py @@ -0,0 +1,28 @@ +''' +Created on 08.08.2011 + +@author: kca +''' + +from threading import Condition + +def synchronized(f): + done = Condition() + f.in_progress = False + + def sync(*args, **kw): + done.acquire() + if not f.in_progress: + f.in_progress = True + done.release() + try: + return f(*args, **kw) + finally: + f.in_progress = False + with done: + done.notify_all() + else: + done.wait() + assert(not f.in_progress) + done.release() + return sync diff --git a/futile/src/futile/traceback/__init__.py b/futile/src/futile/traceback/__init__.py new file mode 100644 index 0000000..7dccf20 --- /dev/null +++ b/futile/src/futile/traceback/__init__.py @@ -0,0 +1,19 @@ +import sys +from traceback import format_exception + +def get_traceback(self, exc_info=None): + return ''.join(format_exception(*(exc_info or sys.exc_info()))) + + +def current_stack(skip=0): + try: + 1 / 0 + except ZeroDivisionError: + f = sys.exc_info()[2].tb_frame + for _ in xrange(skip + 2): + f = f.f_back + lst = [] + while f is not None: + lst.append((f, f.f_lineno)) + f = f.f_back + return lst diff --git a/futile/src/futile/types/TypeManager.py b/futile/src/futile/types/TypeManager.py new file mode 100644 index 0000000..9b08d2a --- /dev/null +++ b/futile/src/futile/types/TypeManager.py @@ -0,0 +1,9 @@ +''' +Created on 01.09.2011 + +@author: kca +''' +from futile.types import AbstractTypeManager + +class TypeManager(AbstractTypeManager): + pass \ No newline at end of file diff --git a/futile/src/futile/types/__init__.py b/futile/src/futile/types/__init__.py new file mode 100644 index 0000000..fa863be --- /dev/null +++ b/futile/src/futile/types/__init__.py @@ -0,0 +1,52 @@ +''' +Created on 01.09.2011 + +@author: kca +''' + +import sys +from types import ModuleType + +from futile.collections import get_iterable +from ..logging import LoggerMixin + + +class ImmutableType(type): + def __call__(self, *args, **kw): + if args and isinstance(args[0], self): + return args[0] + return super(ImmutableType, self).__call__(*args, **kw) + +class TypeManagerType(LoggerMixin, type): + def __init__(self, *args, **kw): + super(TypeManagerType, self).__init__(*args, **kw) + modname = self.__module__ + "." + self.__name__ + if self.__module__ != __name__: + sys.modules[modname] = self + self.__module_name__ = modname + + +class AbstractTypeManager(LoggerMixin, ModuleType): + __metaclass__ = TypeManagerType + + def __init__(self, name = None, *args, **kw): + name = name or str(id(name)) + self.modulename = self.__module_name__ + "." + getattr(self, "__prefix__", self.__class__.__name__) + name + sys.modules[self.modulename] = self + + def create_type(self, name, base = (), dict = {}, metaclass = type): + try: + existing = getattr(self, name) + if not isinstance(existing, type): + raise ValueError(name) + return existing + except AttributeError: + pass + + base = get_iterable(base) + self.logger.debug("Creating %s %s(%s) with %s", metaclass.__name__, + name, base, dict) + dict["__module__"] = self.modulename + type = metaclass(name, base, dict) + setattr(self, name, type) + return type diff --git a/futile/src/futile/types/__init__.pyc b/futile/src/futile/types/__init__.pyc new file mode 100644 index 0000000..545b66c Binary files /dev/null and b/futile/src/futile/types/__init__.pyc differ diff --git a/ipes/CUL868IPE/MANIFEST.in b/ipes/CUL868IPE/MANIFEST.in new file mode 100644 index 0000000..8bb307b --- /dev/null +++ b/ipes/CUL868IPE/MANIFEST.in @@ -0,0 +1 @@ +include utils.py diff --git a/ipes/CUL868IPE/README.md b/ipes/CUL868IPE/README.md new file mode 100644 index 0000000..6caeec4 --- /dev/null +++ b/ipes/CUL868IPE/README.md @@ -0,0 +1,266 @@ +# Prerequisite + +## Hardware + +* OpenMTC Gateway: Raspberry Pi 3 incl. SD with Raspbian (Jessi) and Power Plug +* OpenMTC Backend: Raspberry Pi 3 incl. SD with Raspbian (Jessi) and Power Plug +* USB-Stick: [Busware CUL v3](http://busware.de/tiki-index.php?page=CUL) +* FS-20 sensor +* FS-20 Actuator: Power Plug + +## Software + +Both Raspberry Pis need [Docker](https://www.docker.com/) to be installed. + +``` +curl -sSL https://get.docker.com | sh +``` + +``` +sudo systemctl enable docker +sudo systemctl start docker +sudo usermod -aG docker +``` + +After that reboot your Raspberry Pi and check the following command: + +``` +docker ps +``` + +If an emtpy table is returned the installion was succesfull. + +After that check if the following tools are installed: + +``` +sudo apt install git jq +``` + +# Raspberry Pi 1: Setup the OpenMTC Backend + +Clone the OpenMTC Repo to your Raspberry Pi and change to that directory. + +``` +git clone HIER MUSS DAS REPO STEHEN!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +cd REPO NAME!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +``` + +Create the Docker image for the OpenMTC backend: + +``` +./create-binary-docker -a arm backend +``` + +Start the Docker container: + +``` +docker run --name backend --rm -it -p 0.0.0.0:18000:18000 -e "ONEM2M_HTTP_TRANSPORT_PORT=18000" -e "ONEM2M_NOTIFICATION_DISABLED=false" openmtc/backend-arm -v +``` + +This should give you an output similiar to this: + +``` +INFO:HTTPTransportPlugin:Starting plugin HTTPTransportPlugin +INFO:GEventServerRack:WSGIServer started on ('::', 18000, 0, 0) +INFO:NotificationHandler:Starting plugin NotificationHandler +INFO:openmtc_gevent.main:OpenMTC is running +``` + +# Raspberry Pi 2: Setup the OpenMTC Gateway/IPE + +Clone the OpenMTC Repo to your Raspberry Pi and change to that directory. + +```sh +git clone HIER MUSS DAS REPO STEHEN!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +cd REPO NAME!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +``` + +Create the Docker image for the OpenMTC gateway and IPE: + +``` +./create-binary-docker -a arm gateway +./create-binary-docker -a arm cul868ipe +``` + +Start the Docker gateway container: + +``` +docker run --name gateway --rm -it -p 0.0.0.0:8000:8000\ + -e "ONEM2M_HTTP_TRANSPORT_PORT=8000"\ + -e "ONEM2M_NOTIFICATION_DISABLED=false"\ + -e "ONEM2M_REGISTRATION_DISABLED=false"\ + -e "ONEM2M_REMOTE_CSE_POA=http://:18000"\ + -e "EXTERNAL_IP="\ + openmtc/gateway-arm -v +``` + +This should give you an output similiar to this: + +``` +NFO:GEventServerRack:WSGIServer started on ('::', 8000, 0, 0) +INFO:NotificationHandler:Starting plugin NotificationHandler +INFO:RegistrationHandler:Starting plugin RegistrationHandler +INFO:RegistrationHandler:registering /mn-cse-1 at /in-cse-1 +INFO:RemoteCSEController:Created resource of type 'remoteCSE' at onem2m/in-cse-1 +INFO:openmtc_gevent.main:Gateway is running +``` + +On your Raspberry Pi running the backend you should se something like this: + +``` +INFO:RemoteCSEController:Created resource of type 'remoteCSE' at onem2m/mn-cse-1 +::ffff:10.147.66.103 - - [2016-06-16 13:29:28] "POST /~/in-cse-1/onem2m HTTP/1.1" 201 431 0.036645 +::ffff:10.147.66.103 - - [2016-06-16 13:29:28] "GET /~/in-cse-1/onem2m HTTP/1.1" 200 503 0.007546 +``` + +Now you should read this [site](http://busware.de/tiki-index.php?page=CUL) to +configure your RF USB-Stick: Busware CUL v3. + +After that attach the stick to your Raspberry Pi. In order to get its device +name you should run: + +```sh +dmesg | grep "tty" +``` + +This should give you an output like this: + +``` +[ 1.810324] 3f201000.uart: ttyAMA0 at MMIO 0x3f201000 (irq = 87, base_baud = 0) is a PL011 rev2 +[ 5.931064] cdc_acm 1-1.4:1.0: ttyACM0: USB ACM device +``` + +In this example "ttyACM0" would be the device name. + +``` sh +docker run --name cul868ipe --link gateway --rm -it \ + -e "EP=http://:8000" \ + --device=/dev/ttyACM0:/dev/ttyACM0 \ + openmtc/culgip-arm -v +``` + +The output should show something similiar to: + +``` +Configuring M2M cul868gip...done +DEBUG:__main__:Trying config file location: /config.json +DEBUG:__main__:Trying config file location: /etc/openmtc/cul868gip/config.json +INFO:__main__:Reading configuration file /etc/openmtc/cul868gip/config.json. +INFO:CUL868Gip:Registering application as CUL868Gip. +INFO:CUL868Gip:Registration successful: onem2m/CUL868Gip. +INFO:CUL868Gip:Container created: onem2m/CUL868Gip/S300TH_1. +INFO:CUL868Gip:Container created: onem2m/CUL868Gip/S300TH_1/Temperature. +INFO:CUL868Gip:Container created: onem2m/CUL868Gip/S300TH_1/Humidity. +INFO:CUL868Gip:Container created: onem2m/CUL868Gip/S300TH_1/NGSI. +INFO:CUL868Gip:Container created: onem2m/CUL868Gip/FS20_ST3_16108_1. +INFO:CUL868Gip:Container created: onem2m/CUL868Gip/FS20_ST3_16108_1/Switch. +INFO:CUL868Gip:Container created: onem2m/CUL868Gip/FS20_ST3_16108_1/State. +``` + +The IWP (AE) is registering itself at the gateway. Output of gateway console: + +``` +INFO:AEController:Created resource of type 'AE' at onem2m/CUL868Gip +::ffff:172.17.0.3 - - [2016-06-16 13:55:57] "POST /onem2m HTTP/1.1" 201 447 0.011879 +::ffff:172.17.0.3 - - [2016-06-16 13:55:57] "GET /onem2m/CUL868Gip HTTP/1.1" 200 442 0.003415 +INFO:ContainerController:Created resource of type 'container' at onem2m/CUL868Gip/S300TH_1 +::ffff:172.17.0.3 - - [2016-06-16 13:55:57] "POST /onem2m/CUL868Gip HTTP/1.1" 201 437 0.043352 +INFO:ContainerController:Created resource of type 'container' at onem2m/CUL868Gip/S300TH_1/Temperature +::ffff:172.17.0.3 - - [2016-06-16 13:55:57] "POST /onem2m/CUL868Gip/S300TH_1 HTTP/1.1" 201 467 0.038131 +INFO:ContainerController:Created resource of type 'container' at onem2m/CUL868Gip/S300TH_1/Humidity +::ffff:172.17.0.3 - - [2016-06-16 13:55:57] "POST /onem2m/CUL868Gip/S300TH_1 HTTP/1.1" 201 461 0.040906 +... +``` + +# Retrieve Sensor Data + +Example: + +``` +curl http://:8000/onem2m/CUL868IPE/S300TH_1/Temperature/latest -s | jq -r '."m2m:cin".con' | base64 -d | jq -r . +``` + +# Configure Actuator + +Press and hold the button on the power plug until the light is flashing. Send the following command from the terminal session of the gateway to configure the power plug. Device code and house code is already preconfigured in the IPE. The power plug is adopting the config while receiving the command. + +```sh +curl -H content-type:application/json -d '{"m2m:cin":{"con":"OFF", "cnf":"text/plain:0"}}' http://:8000/onem2m/CUL868IPE/FS20_ST3_16108_1/Switch +``` + +The flashing light off the power plug should stop. Afterwards the power plug is +ready to use. Send commands to control the power plug: + +**ON** +```sh +curl -H content-type:application/json -d '{"m2m:cin":{"con":"ON", "cnf":"text/plain:0"}}' http://:8000/onem2m/CUL868IPE/FS20_ST3_16108_1/Switch +``` + +**OFF** +```sh +curl -H content-type:application/json -d '{"m2m:cin":{"con":"OFF", "cnf":"text/plain:0"}}' http://:8000/onem2m/CUL868IPE/FS20_ST3_16108_1/Switch +``` + +**TOGGLE** +```sh +curl -H content-type:application/json -d '{"m2m:cin":{"con":"TOGGLE", "cnf":"text/plain:0"}}' http://:8000/onem2m/CUL868IPE/FS20_ST3_16108_1/Switch +``` + +# Simulation mode + +If you do not have any FS20 devices and ready but you want to check if the +OpenMTC Setup is working you are able to run the IPE docker container with a +simulation mode. + +``` +docker run --name cul868ipe --link gateway --rm -it \ + -e "EP=http://:8000" \ + -e "SIM=true" \ + openmtc/culgip-arm -v +``` + +After that you should see something like this: + +**IPE** +``` +INFO:CUL868IPE:Registering application as CUL868IPE. +INFO:CUL868IPE:Registration successful: onem2m/CUL868IPE. +INFO:CUL868IPE:Container created: onem2m/CUL868IPE/FS20_ST3_16108_1. +INFO:CUL868IPE:Container created: onem2m/CUL868IPE/FS20_ST3_16108_1/Switch. +INFO:CUL868IPE:Container created: onem2m/CUL868IPE/FS20_sender_21111111-1321. +INFO:CUL868IPE:Container created: +onem2m/CUL868IPE/FS20_sender_21111111-1321/Command. +``` + +**Gateway** + +``` +NFO:AEController:Created resource of type 'AE' at onem2m/CUL868IPE +::ffff:172.17.0.4 - - [2017-09-13 15:32:05] "POST /onem2m HTTP/1.1" 201 524 0.011283 +::ffff:172.17.0.4 - - [2017-09-13 15:32:05] "GET /onem2m/CUL868IPE HTTP/1.1" 200 519 0.004515 +WARNING:ContainerController:expirationTime is too low. Adjusting +INFO:ContainerController:Created resource of type 'container' at onem2m/CUL868IPE/FS20_ST3_16108_1 +::ffff:172.17.0.4 - - [2017-09-13 15:32:05] "POST /onem2m/CUL868IPE HTTP/1.1" 201 583 0.050967 +WARNING:ContainerController:expirationTime is too low. Adjusting +INFO:ContainerController:Created resource of type 'container' at onem2m/CUL868IPE/FS20_ST3_16108_1/Switch +::ffff:172.17.0.4 - - [2017-09-13 15:32:05] "POST /onem2m/CUL868IPE/FS20_ST3_16108_1 HTTP/1.1" 201 553 0.052137 +::ffff:172.17.0.4 - - [2017-09-13 15:32:06] "GET /onem2m/CUL868IPE/FS20_ST3_16108_1/Switch HTTP/1.1" 200 548 0.006070 +INFO:SubscriptionController:Created resource of type 'subscription' at onem2m/CUL868IPE/FS20_ST3_16108_1/Switch/subscription-pFoSGnSNxZNXCWnW +::ffff:172.17.0.4 - - [2017-09-13 15:32:06] "POST /onem2m/CUL868IPE/FS20_ST3_16108_1/Switch HTTP/1.1" 201 509 0.055830 +WARNING:ContainerController:expirationTime is too low. Adjusting +INFO:ContainerController:Created resource of type 'container' at onem2m/CUL868IPE/FS20_sender_21111111-1321 +::ffff:172.17.0.4 - - [2017-09-13 15:32:06] "POST /onem2m/CUL868IPE HTTP/1.1" 201 577 0.053110 +WARNING:ContainerController:expirationTime is too low. Adjusting +INFO:ContainerController:Created resource of type 'container' at onem2m/CUL868IPE/FS20_sender_21111111-1321/Command +::ffff:172.17.0.4 - - [2017-09-13 15:32:06] "POST /onem2m/CUL868IPE/FS20_sender_21111111-1321 HTTP/1.1" 201 551 0.052217 +WARNING:ContainerController:expirationTime is too low. Adjusting +INFO:ContainerController:Created resource of type 'container' at onem2m/CUL868IPE/FS20_sender_21111111-1322 +::ffff:172.17.0.4 - - [2017-09-13 15:33:46] "POST /onem2m/CUL868IPE HTTP/1.1" 201 577 0.062213 +WARNING:ContainerController:expirationTime is too low. Adjusting +INFO:ContainerController:Created resource of type 'container' at onem2m/CUL868IPE/FS20_sender_21111111-1322/Command +::ffff:172.17.0.4 - - [2017-09-13 15:33:46] "POST /onem2m/CUL868IPE/FS20_sender_21111111-1322 HTTP/1.1" 201 551 0.062236 +``` + +# Additional Informations + +More information can be found on the [official webpage](http://www.openmtc.org/dev_center.html). diff --git a/ipes/CUL868IPE/bin/openmtc-cul-868-ipe b/ipes/CUL868IPE/bin/openmtc-cul-868-ipe new file mode 100755 index 0000000..f92e5f3 --- /dev/null +++ b/ipes/CUL868IPE/bin/openmtc-cul-868-ipe @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +exec python -m cul868ipe $@ diff --git a/ipes/CUL868IPE/config.json b/ipes/CUL868IPE/config.json new file mode 100644 index 0000000..d620c94 --- /dev/null +++ b/ipes/CUL868IPE/config.json @@ -0,0 +1,33 @@ +{ + "name": "CUL868IPE", + "ep": "http://localhost:8000", + "cse_base": "onem2m", + "poas": [ + "http://auto:28728" + ], + "originator_pre": "//openmtc.org/mn-cse-1", + "ssl_certs": { + "cert_file": null, + "key_file": null, + "ca_certs": null + }, + "logging": { + "level": "ERROR", + "file": null + }, + "sim": true, + "sim_period": 3, + "cul_device": "/dev/ttyACM0", + "device_mappings": { + "ZBS12345678": "Fenster", + "ZBS12341234": "Dach", + "FS20_ST3_16108_1": "Wohnzimmer" + }, + "devices": [ + "s300th:1", + "fs20_motion:21111111-1321", + "fs20_brightness:21111111-3322", + "fs20:16108-1", + "fs20:16108-0" + ] +} diff --git a/ipes/CUL868IPE/docker/configure-cul868ipe-and-start b/ipes/CUL868IPE/docker/configure-cul868ipe-and-start new file mode 100755 index 0000000..c1557b0 --- /dev/null +++ b/ipes/CUL868IPE/docker/configure-cul868ipe-and-start @@ -0,0 +1,67 @@ +#!/usr/bin/env bash + +CONFIG_FILE="/etc/openmtc/cul868ipe/config.json" + +NAME=${NAME-"CUL868IPE"} +EP=${EP-"http://localhost:8000"} +CSE_BASE=${CSE_BASE-"onem2m"} +POAS=${POAS-'["http://auto:28728"]'} +ORIGINATOR_PRE=${ORIGINATOR_PRE-"//openmtc.org/mn-cse-1"} +SSL_CRT=${SSL_CRT-"/etc/openmtc/certs/cul868ipe.cert.pem"} +SSL_KEY=${SSL_KEY-"/etc/openmtc/certs/cul868ipe.key.pem"} +SSL_CA=${SSL_CA-"/etc/openmtc/certs/ca-chain.cert.pem"} +SIM=${SIM-false} +SIM_PERIOD=${SIM_PERIOD-300} +CUL_DEVICE=${CUL_DEVICE-"/dev/ttyACM0"} +DEVICES=${DEVICES-'["s300th:1", "fs20:16108-1"]'} +DEVICE_MAPPINGS=${DEVICE_MAPPINGS-'{}'} + +# defaults logging +LOGGING_FILE=${LOGGING_FILE-"/var/log/openmtc/cul868ipe.log"} +LOGGING_LEVEL=${LOGGING_LEVEL-"ERROR"} + +# ensure correct level +case ${LOGGING_LEVEL} in + FATAL|ERROR|WARN|INFO|DEBUG) + ;; + *) + LOGGING_LEVEL="ERROR" + ;; +esac + +# local ip +LOCAL_IP=$(ip r get 8.8.8.8 | awk 'NR==1 {print $NF}') + +# set hostname +HOST_NAME=${EXTERNAL_IP-${LOCAL_IP}} + +# Configuration of the service. +CONFIG_TEMP=${CONFIG_FILE}".tmp" +echo -n "Configuring M2M cul868ipe..." +JQ_STRING='.' + +# basics +JQ_STRING=${JQ_STRING}' | + .name = "'${NAME}'" | + .ep = "'${EP}'" | + .device_mappings = '''${DEVICE_MAPPINGS}''' | + .cse_base = "'${CSE_BASE}'" | + .poas = '${POAS}' | + .originator_pre = "'${ORIGINATOR_PRE}'" | + .ssl_certs.cert_file = "'${SSL_CRT}'" | + .ssl_certs.key_file = "'${SSL_KEY}'" | + .ssl_certs.ca_certs = "'${SSL_CA}'" | + .sim = '${SIM}' | + .sim_period = "'${SIM_PERIOD}'" | + .cul_device = "'${CUL_DEVICE}'" | + .devices = '${DEVICES}' | + .logging.file |= "'${LOGGING_FILE}'" | + .logging.level |= "'${LOGGING_LEVEL}'" +' + +cat ${CONFIG_FILE} | jq -M "${JQ_STRING}"> ${CONFIG_TEMP} +mv ${CONFIG_TEMP} ${CONFIG_FILE} + +echo "done" + +exec python -m cul868ipe $@ diff --git a/ipes/CUL868IPE/docker/cul868ipe-amd64 b/ipes/CUL868IPE/docker/cul868ipe-amd64 new file mode 100644 index 0000000..0ba70ff --- /dev/null +++ b/ipes/CUL868IPE/docker/cul868ipe-amd64 @@ -0,0 +1,30 @@ +############################################################ +# Dockerfile to run openmtc cul868ipe binary +############################################################ + +# Set the base image to use openmtc/sdk +FROM openmtc/sdk-amd64:latest + +ENV MOD_NAME=cul868ipe + +# Set the file maintainer +MAINTAINER rst + +# install openmtc dependencies +COPY tmp/$MOD_NAME-dependencies.txt /tmp/requirements.txt +RUN pip install --upgrade --requirement /tmp/requirements.txt + +# install openmtc-cul868ipe +COPY tmp/openmtc-$MOD_NAME.tar.gz /tmp/openmtc-$MOD_NAME.tar.gz +RUN tar xzf /tmp/openmtc-$MOD_NAME.tar.gz -C / \ + --owner root --group root --no-same-owner --no-overwrite-dir \ + --transform 's/json\.dist/json/' --show-transformed + +RUN mkdir -p /var/log/openmtc + +# add change config +COPY configure-$MOD_NAME-and-start /usr/local/bin/configure-and-start + +# entry point +ENTRYPOINT ["/usr/local/bin/configure-and-start"] +CMD [""] diff --git a/ipes/CUL868IPE/docker/cul868ipe-arm b/ipes/CUL868IPE/docker/cul868ipe-arm new file mode 100644 index 0000000..088b1fa --- /dev/null +++ b/ipes/CUL868IPE/docker/cul868ipe-arm @@ -0,0 +1,30 @@ +############################################################ +# Dockerfile to run openmtc cul868ipe binary +############################################################ + +# Set the base image to use openmtc/sdk +FROM openmtc/sdk-arm:latest + +ENV MOD_NAME=cul868ipe + +# Set the file maintainer +MAINTAINER rst + +# install openmtc dependencies +COPY tmp/$MOD_NAME-dependencies.txt /tmp/requirements.txt +RUN pip install --upgrade --requirement /tmp/requirements.txt + +# install openmtc-cul868ipe +COPY tmp/openmtc-$MOD_NAME.tar.gz /tmp/openmtc-$MOD_NAME.tar.gz +RUN tar xzf /tmp/openmtc-$MOD_NAME.tar.gz -C / \ + --owner root --group root --no-same-owner --no-overwrite-dir \ + --transform 's/json\.dist/json/' --show-transformed + +RUN mkdir -p /var/log/openmtc + +# add change config +COPY configure-$MOD_NAME-and-start /usr/local/bin/configure-and-start + +# entry point +ENTRYPOINT ["/usr/local/bin/configure-and-start"] +CMD [""] diff --git a/ipes/CUL868IPE/etc/conf/config.json.dist b/ipes/CUL868IPE/etc/conf/config.json.dist new file mode 100644 index 0000000..0bd4e85 --- /dev/null +++ b/ipes/CUL868IPE/etc/conf/config.json.dist @@ -0,0 +1,25 @@ +{ + "name": "CUL868IPE", + "ep": "http://localhost:8000", + "cse_base": "onem2m", + "poas": [ + "http://auto:28728" + ], + "originator_pre": "//openmtc.org/mn-cse-1", + "ssl_certs": { + "cert_file": "/etc/openmtc/certs/cul868ipe.cert.pem", + "key_file": "/etc/openmtc/certs/cul868ipe.key.pem", + "ca_certs": "/etc/openmtc/certs/ca-chain.cert.pem" + }, + "logging": { + "level": "INFO", + "file": "/var/log/openmtc/cul868ipe.log" + }, + "sim": false, + "sim_period": 3, + "cul_device": "/dev/ttyACM0", + "devices": [ + "fs20:16108-1", + "fs20:16108-0" + ] +} diff --git a/ipes/CUL868IPE/etc/systemd/system/openmtc-cul868ipe.service b/ipes/CUL868IPE/etc/systemd/system/openmtc-cul868ipe.service new file mode 100644 index 0000000..662adca --- /dev/null +++ b/ipes/CUL868IPE/etc/systemd/system/openmtc-cul868ipe.service @@ -0,0 +1,10 @@ +[Unit] +Description=OpenMTC CUL868IPE +After=network.target +Wants=ntp.service + +[Service] +ExecStart=/usr/local/bin/cul-868-ipe + +[Install] +WantedBy=multi-user.target diff --git a/ipes/CUL868IPE/firmware/CUL_V3.hex b/ipes/CUL868IPE/firmware/CUL_V3.hex new file mode 100644 index 0000000..03f7f80 --- /dev/null +++ b/ipes/CUL868IPE/firmware/CUL_V3.hex @@ -0,0 +1,1453 @@ +:1000000044C200005EC200005CC200000C945A0FA3 +:1000100058C2000056C2000054C2000052C2000084 +:1000200050C200004EC200000C94EE264AC20000EE +:1000300048C2000046C2000044C2000042C20000A4 +:1000400040C200000C949B103CC200003AC2000069 +:1000500038C200005FC5000034C2000032C2000098 +:1000600030C200002EC200002CC200002AC20000D4 +:1000700028C2000026C2000024C2000022C20000E4 +:1000800020C200001EC200001CC200001AC20000F4 +:1000900018C2000016C2000014C2000012C2000004 +:1000A00010C200000EC200000CC20000425A0862DA +:1000B0000A28433B03461F0C699E1B4155175AD023 +:1000C0001945D01A47650A4D210B4B9D0A55360A32 +:1000D00059D01C52090854E02056E10757D108585E +:1000E000861365900866AE226D9D236C8108742985 +:1000F0000675191578630400000012011001020052 +:100100000008EB034B20000001020001090243003C +:1001100002010080320904000001020201000524EE +:100120000001100524010301042402060524060031 +:1001300001070582030800FF09040100020A00000C +:100140000007050402400000070583024000000488 +:1001500003090416036200750073007700610072E2 +:100160000065002E006400650000002A034300556E +:10017000004C0038003600380000002A03430055C8 +:10018000004C00340033003300000000030F1E2633 +:1001900027000000030F1E2536386700030F256770 +:1001A00040605000030F275188838100030F2750C0 +:1001B000C8C3C20027000000000000006700000064 +:1001C000000000005000000000000000810000005E +:1001D00000000000C20000000000000D2E2D07D31B +:1001E000913D04320000060021656A55E43023B9D0 +:1001F00000070018146C070090876BF85611E92A65 +:10020000001F4100072E050DD391FF080500000CCB +:100210000021656A2D3B1322F862073F181D1CC799 +:1002200000B0876BF8B611EA2A001F4100202F208A +:1002300000203D2000504C4C310D0A00504C4C30F9 +:100240000D0A005620312E36312043554C383638B1 +:10025000005620312E36312043554C343333004381 +:10026000554C5F563300202F2000203D20004C4F7E +:1002700056460D0A00424F56460D0A000007022E50 +:10028000030D04E905CA070C0B060D210E650F6A64 +:1002900010C81193120315341733181819161B437D +:1002A00021562500261129592C812D353EC30B08D6 +:1002B000105B11F81547191D1A1C1BC71C001DB235 +:1002C00021B623EA0007024604C605260B0610C81D +:1002D0001193120313221534173F182819161B43C4 +:1002E0002156250026110D210E650F6A070C1607F1 +:1002F00020F81E871F6B29592C812D353EC3FF0026 +:100300004C454E4552520D0A004C4F56460D0A00C0 +:100310004C454E4552520D0A0000070246030D049B +:100320009A057D0642070C0B060D210E650F6A101B +:10033000C81193120315341700181819161B431C03 +:100340006825002611FF00000000000000000000EA +:1003500000000000000D2E2D07D3913D0432000057 +:10036000060010B07155E43023B900070018146C72 +:10037000070090876BF85611E92A001F41000D2EE7 +:100380002D07D3913D04320000060010ABA5550A9D +:100390003023B900070018146C070090876BF856DB +:1003A00011E92A001F410000535300547D7DFFFFD7 +:1003B0004B4B7E7E00454F420D0A004E2F41004EB2 +:1003C0002F410020697320756E6B6E6F776E292048 +:1003D000557365206F6E65206F66003F202800040E +:1003E0007605961700000602000B080C000D210E82 +:1003F000650F6A106A114A1206132214F80A0015D2 +:100400004721B622101818192E1A6D1B041C091D3D +:10041000B223EA242A2500261F29592C812D352EA6 +:100420000907000800090006FF3EC2FF0454053D0D +:100430001700000602000B080C000D210E6B0FD0F8 +:10044000105C11041206132214F80A00154421B698 +:1004500022101818192E1ABF1B431C091DB523EAB8 +:10046000242A2500261F29592C812D352E09070005 +:100470000800090006FF3EC2FF4F464600544D4F9C +:10048000444500534D4F4445000011241FBECFEF9B +:10049000DAE0DEBFCDBF11E0A0E0B1E0E6E2FAE5D0 +:1004A00002C005900D92AE38B107D9F718E0AEE85A +:1004B000B1E001C01D92A63DB107E1F70CD00C944C +:1004C000112D9ECDF89481E085BF82E085BFE0E0EC +:1004D000F8E3099508952FE088E190E00FB6F894CD +:1004E000A895809360000FBE2093600080E090E0AC +:1004F00020E80FB6F89420936100809361000FBE4E +:100500002E9A2D9A04B603FE0AC083E390E069D5C3 +:10051000882329F083E390E060E0C7D5D3DF89EF3B +:1005200087BD84E085BD82E084BD80936E0010921B +:1005300080008AE08093810084B7877F84BF6E9AB1 +:100540000E9469231BD60E94D5260E94BF200E94CC +:10055000220C86E293E290938E0380938D030E9497 +:10056000BB1589E08093930176980E949E270DD257 +:100570000E94091111D30E9449220E94A0130E94D7 +:10058000CE150E9457190E94181A0E949A28EDCF82 +:10059000DA01292F33272230310559F02330310574 +:1005A00069F02130310561F54AEF50E022E130E099 +:1005B0002BC04CE051E023E430E026C09070813045 +:1005C000910551F08130910518F00297C9F40BC0E4 +:1005D000EFE4F1E0849103C0E3E5F1E08491282F9A +:1005E00030E0AF0111C01E9B03C04BE651E002C0DA +:1005F0004BE751E0EBE6F1E0E4912E2F30E004C050 +:1006000040E050E020E030E04D935C93C901089554 +:100610008EBD0DB407FEFDCF8EB508952898806C71 +:100620008EBD0DB407FEFDCF8EB51EBC0DB407FE0A +:10063000FDCF8EB5289A0895289880648EBD0DB49C +:1006400007FEFDCF8EB56EBD0DB407FEFDCF8EB596 +:10065000289A089528988EBD0DB407FEFDCF8EB55B +:10066000289A089586E3F6DF10928F0310923C02D9 +:1006700010923E020895A1E0B0E0E1E4F3E00C94B2 +:10068000CD2B0196BE016F5F7F4F41E036D4892BA1 +:1006900041F18981893989F400E010E0802FBEDFC3 +:1006A000EBD3C80187709070079709F493D30F5F5D +:1006B0001F4F0033110591F714C0B0DF182F83E4EA +:1006C00054D38981D9D381E392E088D3812FD4D3C5 +:1006D0008DE292E083D3812F90E062E040E2D0D3BC +:1006E00079D32196E4E00C94E92B1F931FEF05C00A +:1006F0008AE090E0BBD2112331F084E3ABDF1150EC +:1007000080778031A9F7EA9A1F9108951F93EA989C +:1007100086E3A0DF1FEF05C08AE090E0A7D2112397 +:1007200031F085E397DF115080778032A9F71F9170 +:100730000895EF92FF920F93CF93DF93082F8A30A3 +:1007400008F008E0C02FD0E083E0CC0FDD1F8A95D1 +:10075000E1F7C557DE4F8BE290E07C010894E11C85 +:10076000F11CFE016491A1D42196C7018333910548 +:10077000A1F70550053010F067E101C061E184E2A6 +:1007800090E093D4DF91CF910F91FF90EF90089577 +:10079000CF93DF9382E090E0EC012196FE01E852D6 +:1007A000FE4FE4916E2F81D4CB32D10511F0CE01F2 +:1007B000F3CF87E390E0EC012196FE01E453FE4F76 +:1007C000E4916E2F72D4C036D10511F0CE01F3CF73 +:1007D0001E9918C08FE090E060E167D480E190E05E +:1007E00060EB63D481E190E061E75FD484E490E062 +:1007F00060E15BD485E490E060EB57D486E490E060 +:1008000061E753D488E095DFDF91CF9108950F938E +:100810001F93CF93DF938C0110923E02EA98209AA7 +:10082000289A8EE190E022D228988EE190E01ED2A4 +:10083000289A8DE290E01AD280E30CDF84E690E003 +:1008400015D2289880E48EBD0DB407FEFDCF8EB57D +:10085000C0E0D0E0C8018C0F9D1FC3D38EBD0DB486 +:1008600007FEFDCF8EB52196C932D10599F7289A9A +:1008700028988EE78EBD0DB407FEFDCF8EB58BE2B6 +:1008800090E0EC012196ADD38EBD0DB407FEFDCFF7 +:100890008EB5C333D10511F0CE01F3CF289A83E38F +:1008A000D9DE81E0E8D1DF91CF911F910F910895BA +:1008B00082E090E0ACDF81E080938F0310923C02F5 +:1008C00010923E020895A1E0B0E0E9E6F4E00C9455 +:1008D000CF2B22E029830196BE016F5F7F4F41E05D +:1008E0000CD3898126DF82E090E091DF2196E2E05F +:1008F0000C94EB2BFF920F931F93F82E062F1FEFF4 +:1009000085E38CDE8F1549F0802F90E0AFD1112365 +:1009100011F481E003C01150F3CF80E01F910F91DB +:10092000FF90089585E27ADE8F33F1F486E392DE5C +:1009300080918E01882321F48CE392E04FD202C093 +:100940008DE388DE83E386DE81E064E0D3DF85E249 +:1009500065DE8F3349F480918E01882319F485E395 +:1009600092E03CD281E0089580E0089585E356DE70 +:100970008D3051F485E252DE8F3331F4D3DF84E3DE +:1009800069DE80778031D9F70895809152058430EF +:1009900009F069C08093E90080918F01882351F5A7 +:1009A0008091E80085FD06C025C06091F1008BECC8 +:1009B00094E045D48091F2009091F300892BA9F73F +:1009C0008091E800982F9B7F9093E8008F77809329 +:1009D000E80081E080938F0185EC94E090938C0394 +:1009E00080938B03E0918D03F0918E0381E0099554 +:1009F00010928F0183E08093E90080914A0488235C +:100A000091F18091E80085FF2EC0F89405C088E43C +:100A100094E02DD48093F10080914A04882339F02A +:100A20008091F2009091F3008034910580F37894E6 +:100A30008091F2009091F3002091E800322F3E7FE8 +:100A40003093E8002F772093E8008034910559F423 +:100A50000E9476258091E800982F9E7F9093E80071 +:100A60008F778093E8000895909153058091540505 +:100A7000813229F0823261F1803299F512C0913AC7 +:100A800081F58091E800877F8093E80080E091E025 +:100A900067E070E00E94A1258091E800982F9B7F7D +:100AA00011C09132F1F48091E800877F8093E800D3 +:100AB00080E091E067E070E00E9443268091E800CA +:100AC000982F9E7F9093E8008F778093E800089599 +:100AD000913239F48091E800877F8093E8000C948C +:100AE0004D25089582E061EC42E00E94232583E0D9 +:100AF00061E842E30E94232584E060E842E30E942B +:100B0000232510920001109201011092020110920F +:100B1000030108951F920F920FB60F9211248F9325 +:100B20009F93AF93BF938091910390919203A09173 +:100B30009303B09194030196A11DB11D809391037D +:100B400090939203A0939303B093940380919003A6 +:100B50008F5F80939003809190038D3710F01092F7 +:100B600090038091330490913404009729F0019709 +:100B70009093340480933304809146048F3F19F09E +:100B8000815080934604BF91AF919F918F910F90B8 +:100B90000FBE0F901F901895809191039091920332 +:100BA000A0919303B091940320919101281709F427 +:100BB0004FC08091910390919203A0919303B091C3 +:100BC000940380939101A89580913304909134040B +:100BD000892B11F40E945A2080914604882311F435 +:100BE0000E94412080912304882339F08150809312 +:100BF0002304882311F40E94FD148091900388231C +:100C000039F58091070181FF04C08EB190E48927F6 +:100C10008EB9809195039091960323E084389207D2 +:100C200028F4019690939603809395038091900108 +:100C30008F5F809390018C3359F4109290018091D2 +:100C400030048F5F809330048C3310F010923004A6 +:100C500008958091940310D1809193030DD18091D8 +:100C600092030AD18091910307D1B4C0880F991FD4 +:100C70000197F1F7089520ED37E004C0F9013197AD +:100C8000F1F781508823D1F70895CF93DF93AC011A +:100C9000DC0113968C911397843509F063C09C9105 +:100CA0002DE001C02F5F291730F4FA01E20FF11D8A +:100CB00083818B33B9F730E0E92FF0E0CF010F9754 +:100CC0002817390709F04EC03E979A012E0F3F1F93 +:100CD0003E96D90113968C91843509F043C03B9719 +:100CE000EA01CE0FDF1F3B96DA0114969C91149710 +:100CF00089819817B9F515969C9115978A8198174F +:100D000089F516969C9116978B81981759F5179629 +:100D10009C918C81981731F57197E40FF51F8D81A7 +:100D20008383F90131968E81D90111968C93DF016D +:100D3000119689858183FD0131968A8511968C9360 +:100D4000DF0111968B8581838C8511968C93119789 +:100D50008D8512968C93FA0180818A5080838281DE +:100D60008A508283DF91CF9108951F93182F80912D +:100D70005205843081F48091930180FF0CC08091F2 +:100D80004A0487FF01C001DE88E494E0612F57D256 +:100D90001A3009F4FADD80912E048823C9F080917D +:100DA000930183FF15C01A3099F01D3011F46BE3E5 +:100DB00001C0612F8EE993E042D21D3019F48EE913 +:100DC00093E063DF83E08093230482E0809329042F +:100DD0001F9108958DE0C9DF8AE0C7CFCF93DF93DD +:100DE000EC0102C0C2DF2196FE0184918823D1F775 +:100DF000DF91CF910895CF93DF93EC0102C021964C +:100E0000B4DF88818823D9F7DF91CF910895A5E0D9 +:100E1000B0E0EDE0F7E00C94CF2B1D8234E0DE0172 +:100E20001196282F2F7031502A3010F4205D01C008 +:100E3000295CFD01E30FF11D208324E09695879541 +:100E40002A95E1F76150009761F706C03150FC0127 +:100E5000E30FF11D408302C0CE010196615067FD92 +:100E600002C0332399F7CE010196830F911DC3DF92 +:100E70002596E2E00C94EB2B90E062E040E3C7CFD4 +:100E8000A6E0B0E0E6E4F7E00C94CB2B9C01162F33 +:100E9000042F1E8245E07E010894E11CF11C4150A4 +:100EA000F701E40FF11DC9016AE070E00E94982B80 +:100EB000805D8083C9016AE070E00E94982B9B01ED +:100EC00011506115710549F0442349F706C041509E +:100ED000FC01E40FF11D008302C0CE010196115008 +:100EE00017FD02C0442399F7CE010196840F911D8E +:100EF00082DF2696E6E00C94E72BDC01FB0123C0A1 +:100F0000822F80538A3010F050E002C0382B51E01D +:100F1000822F8154863020F42753322B295C51E0F4 +:100F2000822F8156863018F42755322B07C055235F +:100F300029F4203271F02A3361F00EC0992331F088 +:100F40003193415049F030E090E003C03295307F5A +:100F500091E02D912223A1F6CF01861B970B0895D6 +:100F6000FC01DB0120E030E014C0842F80538A3084 +:100F700080F4C90163E0880F991F6A95E1F7220F99 +:100F8000331F820F931F242F30E020533040280F4F +:100F9000391F4191442349F72D933C930895FB0158 +:100FA000982F92959F709A3010F4905D01C0995CD3 +:100FB00090838F708A3010F4805D01C0895C8183DA +:100FC0000895FC018181883419F48FE592E007C00F +:100FD0001E9903C081E592E002C083E492E0FEDE48 +:100FE000F9CE0C94FB2C08951F93CF93DF93EC0163 +:100FF000162F09C01150CE01F4DF3EDF112311F08E +:101000008AE3B3DE21961123A9F7DF91CF911F91D7 +:101010000895EF92FF921F93DF93CF9300D0CDB747 +:10102000DEB7FC0181818D3421F48AEF93E066E024 +:1010300005C0803529F48AED93E060E1D5DF2CC04E +:101040001A821982CF010196BE016F5F7F4F42E085 +:1010500054DF2981823041F4922F80E02A81E22EF0 +:10106000FF24E82AF92A02C0E22EFF24C701B9DFD3 +:10107000182F82E57ADEC70164E040E3C8DE8AE625 +:1010800092E0ACDE812FF8DE86E692E0A7DE812FCB +:1010900090E062E040E2F4DE9DDE0F900F90CF9191 +:1010A000DF911F91FF90EF9008950E94032DF99911 +:1010B000FECF0895A1E0B0E0E0E6F8E00C94CF2B7D +:1010C0001982009731F00196BE016F5F7F4F41E0BA +:1010D00014DF8981882321F083E390E061E0E5DF7C +:1010E0000E94C32610926E00F89428E088E190E0F8 +:1010F0000FB6F894A895809360000FBE209360000F +:10110000FFCF019667E071E041E0F7DE60910701F3 +:1011100060FF02C0769A01C0769884E390E0C5CF64 +:101120000F931F938C0180E090E061E0BEDF81E0CF +:1011300090E06DE3BADF2CDB80E690E060E0B5DFA5 +:1011400081E690E060E0B1DF83E390E060E0ADDF56 +:1011500084E390E062E0A9DF80E690E060E0A5DF54 +:1011600081E690E060E0A1DFF8018181883719F025 +:1011700080E090E09FDF1F910F91089580E090E064 +:1011800030DF813029F481E090E02BDF8D3319F0DE +:1011900080E090E0C5DF84E390E023DF80930701E7 +:1011A0000895A6E0B0E0E7EDF8E00C94C92B6C01DF +:1011B0000196BE016F5F7F4F43E09FDE082F8230B4 +:1011C000A8F12981823019F4E22EFF2407C0922F62 +:1011D00080E02A81E22EFF24E82AF92A10E0FE01AD +:1011E000E00FF11FC701608160DF000F111F0F5F6B +:1011F0001F4F0C0D1D1D0894E11CF11C6E0108947D +:10120000C11CD11C0FC0C801B60141E076DE892B9C +:1012100069F0C701698149DF0E5F1F4F82E090E0EE +:10122000E80EF91EF8018081882369F72696E8E028 +:101230000C94E52BFC011282108211820895DC01CE +:101240009FB7F89412968C91129787FD0EC08F5F0E +:1012500012968C9312978C91FD01E80FF11D638318 +:101260008F5F8C93803809F41C929FBF0895DC0136 +:101270002FB7F89412968C911297882319F42FBFE8 +:1012800090E014C0815012968C93129711968C9115 +:101290001197FD01E80FF11D93818F5F11968C93DB +:1012A0001197803811F411961C922FBF892F089541 +:1012B000CF93DF93DB01EA01322F4881882369F065 +:1012C000FB01E40FF11D81E090E002C0880F991F3F +:1012D0002A95E2F72081282B2083332319F0832FCE +:1012E000815009C0842F8F5F8C3008F48883A80F49 +:1012F000B11D1C9287E090E0DF91CF9108951F937C +:10130000182F5B9A882319F08091960102C0809172 +:10131000940190E034E0880F991F3A95E1F7A6DC3C +:101320005B98112319F08091970102C0809195017B +:1013300090E024E0880F991F2A95E1F797DC1F9130 +:101340000895A0E0B0E0E7EAF9E00C94C12B4C016D +:10135000562ED42E122FC02E4E2C20919503309154 +:10136000960382E0D80ED09E802D11249EEFD90ED8 +:10137000992787FD90952817390720F48EE692E08B +:101380002DDD6FC0281B390B3093960320939503F6 +:10139000769A80913E02882311F4EE2408C01092C0 +:1013A0003E0288E091E00E948613EE24E39480914F +:1013B0008F03882309F47CDAA9D941E0642E712CCB +:1013C000A12EBB2410E003C080E099DF1F5F15153C +:1013D000D9F7552011F081E092DF8401FF2415C078 +:1013E000C7E0D0E093010C2E02C0220F331F0A94F5 +:1013F000E2F7F8018081822382DF2197FFEFCF3F60 +:10140000DF0781F7F3940F5F1F4FFD1449F7C7E023 +:10141000D0E00D2D112707FD1095080D191D0CC0EA +:1014200093010C2E02C0220F331F0A94E2F7F80139 +:101430008081822364DF2197AC16BD068CF3842D56 +:101440001ADCCA9409F0BECF80919703882311F06B +:101450004CD902C086E3FED8EE2011F00E946D1731 +:101460007698CDB7DEB7E0E10C94DD2BEF920F93C9 +:101470001F93DF93CF9300D000D0CDB7DEB7019696 +:101480008E010F5F1F4FB80143E037DD9AE690935E +:10149000940184E28093950181E280939601909378 +:1014A000970180E88C83C80160E043E026E003E018 +:1014B0005FE0E52E46DF0F900F900F900F90CF91D9 +:1014C000DF911F910F91EF900895EF920F93DF93AB +:1014D000CF93CDB7DEB760970FB6F894DEBF0FBEDF +:1014E000CDBF0196BE016F5F7F4F40E106DD698190 +:1014F0004A810B818C81809394018D818093950129 +:101500008E81809396018F8180939701242F2F7075 +:10151000E02EE2947FE0E722CE01089642954F70DC +:101520000F700FDF60960FB6F894DEBF0FBECDBF11 +:10153000CF91DF910F91EF900895A9E1B0E0E3EA38 +:10154000FAE00C94BF2B8E01025F1F4F0196B80189 +:101550004BE0D3DCF82E95E39093940186E18093E1 +:1015600095018093960190939701C8016F2D9AD1B0 +:10157000F801EF0DF11D80836F2C639419821A829C +:101580007724E7E0A1E02A2E312C2801F2E08F2E0B +:10159000912C8C0E9D1E5E010894A11CB11C41C0B3 +:1015A00000E010E0C72CDD247201EC0CFD1C910161 +:1015B000002E02C0220F331F0A94E2F7D7018C914C +:1015C0008223B401A5012E2F73DEE82F0F5F1F4F7A +:1015D0000430110561F781E0B401A5012E2F68DE0A +:1015E000982F04E010E07201EC0CFD1C9101002E1C +:1015F00002C0220F331F0A94E2F7F7018081822391 +:10160000B401A501292F54DE982F0F5F1F4F08301A +:10161000110561F781E0B401A501292F49DEE82F0A +:101620007394761408F4BCCFCE0102966AE0498127 +:101630002E2F03E0EAE0EE2E84DE6996E2E10C94C0 +:10164000DB2BA9E1B0E0E7E2FBE00C94C32B3EE02A +:10165000E32EF12CEC0EFD1E0196B7014BE04DDCA4 +:10166000893009F05BC089E18093950180939401F2 +:101670008093960182E380939701C70169E005D1C9 +:101680008F8B19821A8207E0A7E010E027E0622E14 +:10169000712C91E0892E912C570182E0C82ED12C1B +:1016A000CC0EDD1E7E010894E11CF11C01C007E098 +:1016B000802F992787FD90959301281B390BC40132 +:1016C00002C0880F991F2A95E2F79C01F501E10FEE +:1016D000F11D80818223B601A7012A2FE9DDA82F01 +:1016E000002311F00150E4CF81E0B601A7012A2FB9 +:1016F000DFDDA82F1F5F1A30D0F2882319F0282FC2 +:10170000215004C089818F5F898327E0CE01029632 +:101710006CE0498103E0BAE0EB2E13DE6996EEE05F +:101720000C94DF2BAEE0B0E0E8E9FBE00C94BF2BBB +:101730007C01162F842F2E87B701412F9AD0F701F5 +:10174000E10FF11D8083A12EA39419821A82470113 +:10175000BB24E7E0F1E02F2E312C72E0472E512C14 +:101760004C0E5D1E3E010894611C711C2FC007E0E9 +:1017700010E06701CB0CD11C9101002E02C0220F9A +:10178000331F0A94E2F7D6018C918223B201A301A0 +:101790002E2F8EDDE82F01501040BFEF0F3F1B07AB +:1017A00059F7D4018D914D01082E82958025082E80 +:1017B00086958695802590E0019695958795817010 +:1017C000B201A3012E2F74DDE82FB394BA1478F27E +:1017D000EE2319F02E2F215004C089818F5F898359 +:1017E00027E089E1809395018093940185E28093BD +:1017F000970180939601CE0102966CE049810E8597 +:101800005AE0E52E9EDD2E96E2E10C94DB2BACE057 +:10181000B0E0EDE0FCE00C94CB2BF62EE42E01962C +:101820008E010F5F1F4FB8014BE067DB682FC801C7 +:101830004F2D2E2D77DF2C96E6E00C94E72B66E0FB +:1018400043E0E5CF539A5B985298E9E6F0E0808157 +:101850008061808382EC91E090939603809395035E +:101860001092B4011092D0011092EC0110928F03EB +:101870000895982F06C04150FB01E40FF11D8081AF +:10188000980F4423C1F7892F0895AC0120E006C0CA +:101890006150FA01E60FF11D808128276623C1F708 +:1018A000822F0895DC019091200230E025E040E095 +:1018B00013C06150FD01E60FF11DE0818E2F82956E +:1018C0008F703827280F992311F0442321F08E2F91 +:1018D0008F703827280F4F5F662359F7230F229503 +:1018E000207F822F832B0895A0E0B0E0EAE7FCE0A0 +:1018F0000C94C92BDC011092200210920B0297E08D +:1019000013968C911397981B11961C911197110F98 +:10191000110F110F190F10920C02FF24EE2407E093 +:1019200057E040E070E0E7E0CE2ED12CC1E0D0E0FF +:101930007FC0FD01EE0DF11D848190E0002E02C0FC +:10194000959587950A94E2F7282F2170002311F0CE +:10195000015002C0E39407E06B3469F4533059F44A +:101960007723B9F5222311F071E062C040930B0296 +:10197000109220021EC05F3F69F56634E1F4E42F47 +:10198000F0E0E45FFD4F8081082E82958025082ECF +:1019900086958695802590E001969595879581702E +:1019A000907030E08217930761F040930B027093C0 +:1019B000200280E057C0653411F06B3411F422230B +:1019C000A1F34F5FE42FF0E0E45FFD4F108257E09A +:1019D0002FC070E0222359F1663479F4E42FF0E04F +:1019E000E45FFD4F9E01052E02C0220F331F0A94B3 +:1019F000E2F78081822B80831AC0653411F06B344A +:101A0000B1F4E42FF0E0E45FFD4F852F992787FDC7 +:101A100090959601281B390BC9019E0102C0220F27 +:101A2000331F8A95E2F7C9012081282B208351506A +:101A3000F3948F2D8150811708F47BCF40930B02D4 +:101A4000709320021F1508F0B4CF653411F45F3F86 +:101A500011F0772319F04F5F40930B0280910B0236 +:101A6000811181E0CDB7DEB7E8E00C94E52BDC0115 +:101A700012968C911297ED91FC911197E80FF11D40 +:101A8000E08113962C911397215013962C93139762 +:101A90002F5F39F48F5F12968C93129787E013961D +:101AA0008C938E2F90E002C0959587952A95E2F74A +:101AB00081700895A0E0B0E0E0E6FDE00C94C52B55 +:101AC0006C01962E842EFF24C0E0D0E0F1E0AF2E12 +:101AD000B12C062F10E00150104019C0C601C7DF1D +:101AE0008823A1F0882059F0C8018C1B9D0B95011B +:101AF00002C0220F331F8A95E2F7C90106C0C50153 +:101B000002C0880F991FEA94E2F7F82A2196EC2E7A +:101B1000C91520F38F2DCDB7DEB7ECE00C94E12B87 +:101B2000A4E0B0E0E6E9FDE00C94C72BFC0187E0FF +:101B30008C833496FA83E983349710920B02818167 +:101B400090E023E0880F991F2A95E1F72381821BFB +:101B50009109CE970CF45AC01B82CC24DD245E017F +:101B60000894A11CB11C2EC0ED2CFF24C50168E017 +:101B700040E0A0DF082F8CE092E0E80EF91EF701AC +:101B80000083002E02950025002E0695069500255F +:101B9000C5016DDF10E00F5F1F4F159507950170B0 +:101BA000107090E00817190731F4C50160DF9D2D12 +:101BB0009F5F882319F0D0920B0228C0F701808123 +:101BC000C826D92EF5E0FD1578F6D0920B02C50196 +:101BD00068E040E06FDFF82E082F002E0295002508 +:101BE000002E069506950025C50141DF10E00F5F28 +:101BF0001F4F159507950170107090E00817190791 +:101C000029F480E0CF1419F481E001C080E024962B +:101C1000EAE00C94E32BA4E0B0E0E1E1FEE00C94F8 +:101C2000C72BFC011B8287E08C833496FA83E983FF +:101C3000349710920B028081853009F053C0818166 +:101C400090E043E0880F991F4A95E1F72381821BBA +:101C500091098938910509F045C019E83FE0C32E84 +:101C600030EFD32E80E05E010894A11CB11C10C09F +:101C7000C50168E041E01EDFC80ED11C2CE032E057 +:101C8000E20EF31E1827F7011083182F1C5D802F1A +:101C9000E82EFF24082F0F5F8F3050F3C50168E056 +:101CA00041E008DF182F2CE032E0E20EF31E8095B1 +:101CB000F701808300930B02C50168E041E0FADE82 +:101CC000082FC10ED11CC50168E041E0F3DE40E001 +:101CD000302F20E0C21AD30A90E0C816D90619F4B2 +:101CE00041E001C040E0842F2496EAE00C94E32B0D +:101CF000EF92FF921F93DF93CF9300D000D0CDB728 +:101D0000DEB7FC013496FA83E9833497818184300D +:101D1000B9F587E08C838381813091F51B8210E0D7 +:101D20007E010894E11CF11C12C0112331F4C7019B +:101D300067E041E0BFDE806804C0C70168E041E0C1 +:101D4000B9DEE12FF0E0E45FFD4F80831F5F1430C8 +:101D500060F3CE01019667E041E0ACDEE12FF0E0F8 +:101D6000E45FFD4F880F80831F5F10930B0290E0AC +:101D700080910C0282958F708A3019F491E001C035 +:101D800090E0892F0F900F900F900F90CF91DF91DF +:101D90001F91FF90EF900895FC018181A82FB0E082 +:101DA00050E0E081CD01841B950B0B964797D8F44A +:101DB0008E2F90E070E09C01261B370B2C303105F4 +:101DC00094F4255F3F4F7CF08A0F9B1F861B970B77 +:101DD000841B950B8C30910534F420E0855F9F4F78 +:101DE0001CF021E001C020E0822F0895282F30E070 +:101DF000C901880F991F820F931F860F911D95951A +:101E00008795959587950895DC0111968C9111978A +:101E1000843190F010926F008091080290E02CE1E4 +:101E200030E0829FF001839FF00D929FF00D11240E +:101E3000E856FE4F10820895662381F0FD01E80FF9 +:101E4000F11D81E090E013960C90139702C0880F6B +:101E5000991F0A94E2F72481282B248313968C91EE +:101E60001397815013968C9313978F5F79F487E0C3 +:101E700013968C93139711968C9111978F5F1196EF +:101E80008C931197A80FB11D14961C920895282FBA +:101E90008552873160F46A3050F0673240F490E048 +:101EA000621708F091E081E09827892F089580E07B +:101EB00090E008951F920F920FB60F921124EF92A7 +:101EC000FF920F931F932F933F934F935F936F93C3 +:101ED0007F938F939F93AF93BF93CF93DF93EF93B2 +:101EE000FF9380914704882321F082E08093470488 +:101EF000F0C080912404823021F483E08093240494 +:101F0000E8C08091840090918500E4E096958795E3 +:101F1000EA95E1F7182FE09008028CE1E89EE001D5 +:101F20001124C856DE4F9881943031F41E3208F4E3 +:101F3000D0C01F3448F0BDC0953031F4173108F4DB +:101F4000C8C0183208F0B5C0892F84504A990CC017 +:101F5000823038F4CE0161E057DF10928500109294 +:101F6000840010933A02B5C000913A02F12EF01AA3 +:101F7000F0923B021092850010928400823020F48F +:101F8000CE0160E041DFA5C082E086BB992371F4F9 +:101F9000053608F09EC084E68F1508F49AC0088FB5 +:101FA000F98E81E08A8382E0888393C0923009F0C1 +:101FB0005AC0CE014896602F4F2DEEDE1A8188233D +:101FC00059F0888D602F12DF888F898D6F2D0EDF7D +:101FD000898F1F5F1A837DC0143008F442C080EAE5 +:101FE0009FE090938900809388001C3050F0298DE9 +:101FF000888D90E0820F911D8536910514F084E064 +:1020000025C01A3080F0298D888D90E0820F911DB7 +:1020100085974CF485E0888388EE93E0909389005F +:102020008093880014C080919D03882371F0802FD5 +:102030006F2D2DDF882349F0898D688D28DF882357 +:1020400021F081E08093240434C083E088830A8FE8 +:10205000FB8E198287E08B831C8282E080936F0065 +:1020600038C0188295CFCE014A96602F4F2D94DE4E +:10207000882361F0CE0161E0C7DE8A8D602FB6DE75 +:102080008A8F8B8D6F2DB2DE8B8F23C0CE01489649 +:10209000602F4F2D81DE882361F0CE0160E0B4DE39 +:1020A000888D602FA3DE888F898D6F2D9FDE898FAD +:1020B00010C010926F002E2D30E08CE190E0289F30 +:1020C000F001299FF00D389FF00D1124E856FE4FC6 +:1020D0001082FF91EF91DF91CF91BF91AF919F91CE +:1020E0008F917F916F915F914F913F912F911F91B0 +:1020F0000F91FF90EF900F900FBE0F901F901895CB +:102100008091080290E02CE130E0829FF001839FF3 +:10211000F00D929FF00D1124E856FE4F8081882328 +:1021200031F420E030E0809146048F3F11F021E04F +:1021300030E0822F08951F920F920FB60F92112454 +:102140002F933F934F935F936F937F938F939F93BF +:10215000AF93BF93EF93FF9310926F00809197031B +:1021600083FF03C08EE20E94B50640910802242F2F +:1021700030E08CE190E0289FF001299FF00D389F1E +:10218000F00D1124E856FE4F8081833018F08181D4 +:10219000823028F410926F008CE190E013C080919F +:1021A0000A028330D1F48091970382FF04C085E74F +:1021B00092E00E94EE0610926F008091080290E07B +:1021C0002CE130E0829FF001839FF00D929FF00D93 +:1021D0001124E856FE4F10820BC08F5F80930A02D5 +:1021E000842F8F5F80930802843011F410920802CC +:1021F000FF91EF91BF91AF919F918F917F916F91DF +:102200005F914F913F912F910F900FBE0F901F90B4 +:1022100018950F931F93CF93DF9380913B028823F0 +:10222000B9F18091970387FF17C084E30E940E03E2 +:102230008058803410F480E00CC0803C10F08FE0B7 +:1022400008C090E08055904003E0959587950A95E9 +:10225000E1F78F590E94B5068091970383FF16C05E +:1022600082E70E94B5068091970384FF04C08091A5 +:102270003A020E94B50686E60E94B5068091970351 +:1022800084FF04C080913B020E94B50610923B027D +:1022900080910A02882309F42AC2769A9091090251 +:1022A0008CE1989FE0011124C856DE4FCE01B3DCCB +:1022B000882311F003E5A8C0CE0166E415DB88236E +:1022C00009F4ECC140910B02415040930B02942F52 +:1022D00056E007C09150E92FF0E0E45FFD4F8081A8 +:1022E000580F9923B9F7A42FB0E0FD01E45FFD4F2B +:1022F0006081561711F4443068F4852F90E0019600 +:10230000262F30E08217930779F4443068F0A45FF9 +:10231000BD4F5C9306E478C09150E92FF0E0E45F94 +:10232000FD4F8081280F02C0942F2CE09923A1F744 +:10233000261709F0B3C1443008F4B0C1ADC1809193 +:102340000B02815080930B02893009F0ADC199E0F6 +:1023500020E09150E92FF0E0E45FFD4F80812827D5 +:102360009923B9F780911502281709F09DC105E45A +:102370004BC060910B02615060930B028CE092E0C5 +:1023800091DAE0910B02F0E090912002E91BF10953 +:10239000E45FFD4F9081891761F08B818F5F8B83A4 +:1023A0008150873009F09EC11B82898181508983C9 +:1023B00099C10BE429C08B81843009F0E4C0CE01BF +:1023C00048966CE34EE1E8DC882309F4DCC0C801E0 +:1023D00060913A024EE1E0DC682FCE0115DD1092EB +:1023E0000B020DC0E92FF0E0DE01AE0FBF1F149607 +:1023F0008C91E45FFD4F80839F5F90930B029091DF +:102400000B02953078F302E580919703682F80FFE7 +:10241000BBC081FF02C020E05FC09091210280918B +:102420000B02981799F5109221020EC0E22FF0E0EE +:10243000DF01A45FBD4FEE5DFD4F90818C91981739 +:1024400049F42F5F209321022091210230910B0249 +:10245000231760F32317D1F4809191039091920395 +:10246000A0919303B09194032091360230913702EA +:102470004091380250913902821B930BA40BB50B8B +:102480008697A105B10510F421E001C020E010926B +:1024900021020CC0E92FF0E0DF01A45FBD4F8C9159 +:1024A000EE5DFD4F80839F5F90932102909121020A +:1024B00080910B02981770F3809191039091920391 +:1024C000A0919303B09194038093360290933702C6 +:1024D000A0933802B09339020435F1F466FD1CC0B4 +:1024E00080910B028530C0F080910E028B3409F48C +:1024F0004BC0893609F448C0833509F445C084359A +:1025000009F442C08D3709F43FC08E37E9F180915C +:102510000F0280778037C1F12223B1F5802F0E940E +:10252000B50680912002882329F080910B0281500A +:1025300080930B0210E008C0E12FF0E0E45FFD4F54 +:1025400080810E943C071F5FE0910B021E17A0F3E1 +:1025500080912002882359F0F0E0E45FFD4F8081F4 +:1025600090E08F70907061E040E30E9407078091D7 +:10257000970385FF05C084E30E940E030E943C0779 +:102580000E94EA0601C000E08091970382FF6DC0BF +:1025900080E70E94B506888190E062E040E20E94F8 +:1025A0004007888D90E014E0880F991F1A95E1F795 +:1025B00065E040E20E944007898D90E0B4E0880F1A +:1025C000991FBA95E1F765E040E20E9440078A8DC5 +:1025D00090E0A4E0880F991FAA95E1F765E040E23A +:1025E0000E9440078B8D90E0F4E0880F991FFA95C8 +:1025F000E1F765E040E20E9440078A8190E063E0F5 +:1026000040E20E944007898190E063E040E20E943E +:1026100040072B8187E090E0821B910962E040E255 +:102620000E94400780E20E94B5068091970385FFD3 +:1026300008C084E30E940E030E943C0780E20E94CF +:10264000B5068B81873019F089818F5F898310E00F +:1026500007C0FE01E10FF11D84810E943C071F5F4E +:1026600089811817B0F30E94EA06188280910A0245 +:10267000815080930A02809109028F5F8093090242 +:10268000843011F4109209027698043581F58CE0BB +:1026900092E00E945D1F2BC004E5B6CECE0165E43A +:1026A00023D9882309F04BCECE013ADA882311F0E2 +:1026B00008E4AACECE011CDB882311F004E7A4CEE7 +:1026C0008E01065E1F4FC80160913A024B8D64DB9C +:1026D000682FCE0199DBCE016BE406D9882309F47B +:1026E0005CCE47CE8981843009F04DCF64CEDF9136 +:1026F000CF911F910F91089580919703882321F026 +:102700000E9458040C9475030C943203FC018181DF +:10271000882371F4809197030E943C078091950370 +:102720009091960365E040E20E9440070C94EA060F +:10273000CF01019667E973E041E00E947D07DCCF9D +:10274000A1E0B0E0E6EAF3E10C94CB2B8091240405 +:10275000882309F484C0E0909103F09092030091E3 +:10276000930310919403833009F060C08FE30E94BB +:102770000E03182F863010F400E018C08BEC94E0A4 +:102780000E941A0928988FEF0E94080380E00E9497 +:102790000803082F08C080E00E940803682F8BEC14 +:1027A00094E00E941F091150B1F7289AA5DF1092FA +:1027B0002404053709F053C08EEC94E0BE016F5F2E +:1027C0007F4F41E00E947D07019759F599818091E3 +:1027D0009D03981731F58091D204853539F00DC0ED +:1027E0008BEC94E00E9437090E94B5068091CD04DD +:1027F0008823B1F70E94EA0632C08091CD0484504C +:102800008093CD0484E08093CC048BEC94E06AE068 +:102810000E941F09E0918D03F0918E0388E00995D5 +:102820001EC08BEC94E00E941A0919C09E2D8230C4 +:1028300049F480919C03981B983088F05DDF1092DA +:1028400024040DC0813059F487E390E00E9407040E +:102850000E94750382E080932404E0929C032196F9 +:10286000E6E00C94E72B5B980C9436065B9A0C948C +:102870003606A7E0B0E0EFE3F4E10C94CA2B282F72 +:102880008091A603843551F4809127049091280407 +:102890000196909328048093270439C0863451F41C +:1028A00080912504909126040196909326048093AC +:1028B00025042DC0853451F480919A0390919B0397 +:1028C000019690939B0380939A0321C08B3451F41B +:1028D00080912A0490912B04019690932B0480936D +:1028E0002A0415C0883451F480912C0490912D0451 +:1028F000019690932D0480932C0409C08091980335 +:10290000909199030196909399038093980385E79A +:102910008983222319F4DD24D39412C080912E04DC +:10292000BE016E5F7F4F0E94CF0780919D03BE0165 +:102930006C5F7F4F0E94CF0785E58E8386E0D82E9F +:102940000E9458040E94860310E080E891E08EDF28 +:1029500080E093E088DF1F5F1630B9F780E093E0F6 +:1029600085DF80E891E07FDF80E891E07FDF5B98A2 +:1029700087E390E00E94070483E00E943B062898CA +:102980008FE70E9408038091A0038D0D0E94080329 +:102990007E010894E11CF11C8701F80181918F01EF +:1029A0000E940803802F8E198D15B8F306C08EE99A +:1029B00093E00E9437090E9408038091A0038823B6 +:1029C000B1F7289A0E9486038EE993E00E941A09C3 +:1029D00014E104C0115081E00E943B068AE30E948A +:1029E0000E0390E08F779070892B11F0112391F7EF +:1029F00083DE2796E7E00C94E62B82DB882361F0E8 +:102A000083E080932904809121049091220401960F +:102A10009093220480932104809129048150809313 +:102A20002904882321F083E080932304089581E022 +:102A300020CFCF93DF93EC018981882339F48091F3 +:102A40009D030E943C0780912E0413C08436A1F49C +:102A50008091910390919203A0919303B09194037C +:102A600064E040E30E9407078EE20E94B506809171 +:102A700023040E943C074DC0833709F04DC080916C +:102A800027049091280461E040E30E9407078EE24A +:102A90000E94B506809125049091260461E040E3F0 +:102AA0000E9407078EE20E94B50680919A039091DA +:102AB0009B0361E040E30E9407078EE20E94B50697 +:102AC00080912A0490912B0461E040E30E94070763 +:102AD0008EE20E94B50680912C0490912D0461E055 +:102AE00040E30E9407078EE20E94B506809198039A +:102AF0009091990361E040E30E9407078EE20E94F3 +:102B0000B506809121049091220461E040E30E9487 +:102B100007070E94EA062CC08936D9F4CE01029636 +:102B20006DE973E041E00E947D0780E690E06091EE +:102B30009D030E945508CE0104966EE274E041E0C8 +:102B40000E947D0781E690E060912E040E94550866 +:102B50000FC08EE993E00E941A0904C08EE993E049 +:102B60000E941F09219668816623C1F780E081DEFB +:102B7000DF91CF91089580E690E00E94F107809365 +:102B80009D0381E690E00E94F10780932E04882344 +:102B900021F081E280939703AFCD0895A2E3B0E0E6 +:102BA000E4EDF5E10C94CA2B80913C02882309F4F2 +:102BB0009FC04A9B88C08FE30E940E038F778233A9 +:102BC00068F08AE30E942A0386E30E942A038DE3C9 +:102BD0000E942A0384E30E942A038AC089832898DA +:102BE0008FEF0E94080310E082E0E82EF12CEC0E3B +:102BF000FD1E08C080E00E940803F701E10FF11DEF +:102C000080831F5F89811817A8F380E00E94080362 +:102C1000D82E80E00E940803289A84E30E942A03A9 +:102C200085E30E940E038D30C1F79A8186E78927DC +:102C30008A8322E0AE014F5F5F4F08C0E40FF51FAB +:102C400080819452982790832F5F982F8981E22F5B +:102C5000F0E0281798F3E40FF51F80819B81892706 +:102C600080838091970384FF13C081E60E94B5069C +:102C7000098110E07E010894E11CF11CF701E10FCD +:102C8000F11D80810E94B5061F5F0117B8F71BC0B8 +:102C900081E40E94B506098110E07E010894E11CE0 +:102CA000F11CF701E10FF11D80810E943C071F5FBD +:102CB0000117B8F78091970385FF03C08D2D0E94FF +:102CC0003C070E94EA0685E30E940E03813029F04A +:102CD000813161F48AE30E942A0386E30E942A0379 +:102CE0008DE30E942A0384E30E942A030E94B60413 +:102CF000E296E7E00C94E62BCF93DF93EA98209AD4 +:102D0000289A8EE190E00E94360628988EE190E0A5 +:102D10000E943606289A8DE290E00E94360680E3F3 +:102D20000E942A0384E690E00E943606CCE7D2E0B7 +:102D3000FE0184912196FE0164910E941C0321965C +:102D4000F2E0CE3ADF07A1F780913D02882371F0CF +:102D5000CEEAD2E0FE0184912196FE0164910E94A8 +:102D60001C032196F2E0C43CDF07A1F783E30E9435 +:102D70002A0384E00E943B0684E30E942A0385E341 +:102D80000E940E038D30C1F7DF91CF910895A2E329 +:102D9000B0E0EDECF6E10C94CA2B0196BE016F5F3A +:102DA0007F4F41E30E947D07D82E282F30E021502D +:102DB0003040898190E02817390709F072C080916E +:102DC0003C02882321F498DF83E00E943B061B81AC +:102DD0008A8196E789278A8322E0AE014F5F5F4FA1 +:102DE0000BC0FA01EA0FFB1FAC0FBD1F8C91845280 +:102DF0009081892780832F5F8981A22FB0E02817D7 +:102E000080F3FA01EA0FFB1F80818127808385E32D +:102E10000E942A0385E30E940E038331C1F714FF49 +:102E200008C088E70E943B0688E70E943B0688E7C7 +:102E300001C08AE00E943B0628988FE70E940803A1 +:102E40007E010894E11CF11C870105C0F801819105 +:102E50008F010E940803802F8E198D15B8F3289AD0 +:102E600085E30E940E038331D9F385E30E940E03AC +:102E7000863149F48BE30E942A0386E30E942A03E9 +:102E80008DE30E942A0380913C02882349F084E369 +:102E90000E942A0385E30E940E038D30C1F701C012 +:102EA0002BDCE296E7E00C94E62BFC0101962181F5 +:102EB000223731F0223559F481E080933D0202C07F +:102EC00010923D0219DF81E080933C020895233780 +:102ED00009F45DCF10923C0208951F93CF93DF93C6 +:102EE000EA98209A289A8EE190E00E943606289867 +:102EF0008EE190E00E943606289A8DE290E00E94D2 +:102F0000360680E30E942A0384E690E00E9436069B +:102F1000C4ECD2E0FE018491813450F42196FE018C +:102F200064910E941C032196F3E0C030DF0791F703 +:102F300083E30E942A0384E00E943B0610E01150C4 +:102F400041F084E30E942A038AE090E00E94360662 +:102F5000F6CF81E080933E02DF91CF911F910895DB +:102F6000EF92FF920F931F93CF93DF937C01EB01BE +:102F7000FC0100810F5F6115710519F420E030E05C +:102F800002C024E630E0802F90E043E0880F991FD4 +:102F90004A95E1F76AE070E00E94AC2B260F371FDC +:102FA000A901662757FD6095762F209195033091F2 +:102FB0009603C901A0E0B0E084179507A607B707FC +:102FC00028F489E093E00E94EE06CEC0241B350B66 +:102FD000309396032093950380913E02882309F451 +:102FE0007CDF85E30E940E038D3071F08AE50E943C +:102FF000B50685E40E94B50682E50E94B50682E525 +:103000000E94B50681E34AC0809145029091460234 +:10301000A0914702B09148020097A105B10521F4A3 +:1030200029C081E00E943B06809191039091920318 +:10303000A0919303B09194032091450230914602F0 +:10304000409147025091480282179307A407B507A1 +:1030500041F38091910390919203A0919303B091D9 +:1030600094032F5F3F4F4F4F5F4F82179307A40783 +:10307000B507B9F20E94860385E30E940E038331EF +:10308000C1F08AE50E94B50685E40E94B50682E596 +:103090000E94B50682E50E94B50682E30E94B5064D +:1030A00085E30E940E030E943C070E94EA0615DF9A +:1030B0005BC0CD2B49F0C0E0D0E084E60E943B0627 +:1030C0002196CA30D105C9F728988FE70E940803D6 +:1030D000E70110E004C089910E9408031F5F1017E8 +:1030E000D0F3289A10E085E30E940E038D3059F04A +:1030F00085E30E940E03833131F481E00E943B0698 +:103100001F5F183C81F785E30E940E038D30B9F0F4 +:103110008AE50E94B50685E40E94B50682E50E9414 +:10312000B50682E50E94B50683E30E94B50685E3F5 +:103130000E940E030E943C070E94EA06CEDE8091A8 +:103140003E02882309F4D8DA80919103909192038A +:10315000A0919303B091940380934502909346020B +:10316000A0934702B0934802CDB7DEB7E6E00C94D7 +:10317000E72BEF92FF920F93DF93CF93CDB7DEB79C +:103180002C970FB6F894DEBF0FBECDBFFC018BE0CD +:10319000898381818A831B8282E08C8387818D83EE +:1031A00080858E8381858F838481888785818987C7 +:1031B00086818A871B861C8684E10E943B067E01ED +:1031C0000894E11CF11CC70160E070E0C9DE8AE5EB +:1031D0000E94B50600E006C0EE0DFF1D80810E9432 +:1031E0003C070F5FE02FF0E0898190E08E179F078A +:1031F0009CF78091970385FF03C080E00E943C0705 +:103200000E94EA062C960FB6F894DEBF0FBECDBF23 +:10321000CF91DF910F91FF90EF900895CF93DF93BF +:10322000EC0190913F02992341F480914002882360 +:1032300021F4809141028823B1F48B81803321F005 +:10324000803411F0803579F48F81891761F4988585 +:1032500080914002981739F499858091410298171E +:1032600011F4CE0186DF90914202992341F48091BE +:103270004302882321F4809144028823A9F08881A5 +:103280008B3091F48B81803479F48F81891761F4CC +:10329000988580914302981739F49985809144026A +:1032A000981711F4CE0165DFDF91CF910895AEE15B +:1032B000B0E0EDE5F9E10C94CB2B80913E02882340 +:1032C00009F46AC04A9B5AC08FE30E940E038F77AD +:1032D00089838E3110F08DE1898328988FEF0E94C9 +:1032E000080310E052E0E52EF12CEC0EFD1E08C0A4 +:1032F00080E00E940803F701E10FF11D80831F5F4A +:1033000089811817A8F380E00E940803082F80E045 +:103310000E940803289A7E010894E11CF11CC70151 +:103320007DDF8091970384FF10C08AE70E94B50675 +:1033300010E007C0F701E10FF11D80810E94B50682 +:103340001F5F89818117B0F727C08AE50E94B50603 +:1033500010E007C0F701E10FF11D80810E943C07DA +:103360001F5F89818117B0F78091970385FF03C0A4 +:10337000802F0E943C070E94EA060EC085E30E944F +:103380000E03813149F48AE30E942A0386E30E94F6 +:103390002A0384E30E942A036E96E6E00C94E72B4E +:1033A000AEE1B0E0E6EDF9E10C94CD2B8C01FC012F +:1033B0008181823711F491DD37C0833711F0863671 +:1033C00001F5C8010296BE016F5F7F4F4DE10E947B +:1033D0007D0790E00197298130E08217930729F05B +:1033E00080E093E00E94EE061FC060E070E0F8010C +:1033F0008181833711F461E070E0CE010196B0DD88 +:1034000013C0813629F4C80102966FE372E006C04A +:10341000873741F4C801029662E472E043E00E94FB +:103420007D0702C010923E026E96E4E00C94E92BF8 +:10343000AEE1B0E0EEE1FAE10C94CA2B80914902D2 +:10344000882309F476C04A9B61C08FE30E940E0373 +:103450008F7789838E3110F08DE1898328988FEFE3 +:103460000E94080310E082E0E82EF12CEC0EFD1E15 +:1034700008C080E00E940803F701E10FF11D80837E +:103480001F5F09811017A8F380E00E940803D82E5F +:10349000289A8AE30E942A0386E30E942A038DE386 +:1034A0000E942A0384E30E942A038091970384FFE9 +:1034B00012C087E70E94B50610E07E010894E11C67 +:1034C000F11CF701E10FF11D80810E94B5061F5F1D +:1034D0000117B8F72EC087E50E94B50610E07E01FF +:1034E0000894E11CF11CF701E10FF11D80810E949D +:1034F0003C071F5F0117B8F78091970385FF03C052 +:103500008D2D0E943C070E94EA0613C085E30E94AD +:103510000E03813011F0813161F48AE30E942A03A5 +:1035200086E30E942A038DE30E942A0384E30E941B +:103530002A036E96E7E00C94E62BCF93DF93EA988C +:10354000209A289A8EE190E00E94360628988EE113 +:1035500090E00E943606289A8DE290E00E9436069E +:1035600080E30E942A0384E690E00E943606C9E1C7 +:10357000D3E0FE018491813450F42196FE016491E0 +:103580000E941C032196F3E0C535DF0791F783E322 +:103590000E942A0381E00E943B06DF91CF910895AB +:1035A000FC018181823729F4C8DF81E080934902E0 +:1035B0000895833711F0109249020895282F813021 +:1035C00091F45B9A20910B0130910C01C901880F95 +:1035D000991F820F931F0E9436065B9880910B0102 +:1035E00090910C0134C080910B0190910C01222329 +:1035F00009F55B9A0E9436065B9820910B01309189 +:103600000C01C901880F991F820F931F0E94360673 +:103610005B9A80910B0190910C010E9436065B9899 +:1036200020910B0130910C01C901880F991F820F65 +:10363000931F20C05B9A0E9436065B9820910B0175 +:1036400030910C01C901880F991F820F931F0E94AE +:1036500036065B9A20910B0130910C01C901880F4D +:10366000991F820F931F0E9436065B9880910B0171 +:1036700090910C010C9436060F931F93CF93DF9318 +:10368000EA98209A289A8EE190E00E9436062898BF +:103690008EE190E00E943606289A8DE290E00E942A +:1036A000360680E30E942A0384E690E00E943606F4 +:1036B000289880E40E940803C5E5D3E0FE018491C8 +:1036C0000E9408032196F3E0C236DF07B9F7809124 +:1036D0000F010E940803809110010E94080380914D +:1036E00011010E9408032396FE0184910E940803A1 +:1036F0002196F3E0CE37DF07B9F7289A28988EE7AE +:103700000E9408038BE290E08C010F5F1F4F0E9424 +:10371000F1070E940803C80103331105A9F7289A8D +:1037200083E30E942A0381E00E943B0681E08093AC +:103730008F03DF91CF911F910F910895CF92DF9268 +:10374000EF92FF921F93CF93DF937C01FC01818165 +:10375000843759F4C70102966BE071E00E94B0070C +:1037600080910B0190910C0111C0833709F0A5C025 +:10377000F7018281823789F4C70103966DE071E019 +:103780000E94B00780910D0190910E0160E040E22F +:103790000E9440070E94EA06FBC0769A80914C0284 +:1037A0008823B1F480913C02882329F081E0809342 +:1037B0004A0210923C0280913E02882329F081E067 +:1037C00080934B0210923E0257DF83E00E943B063B +:1037D00086E30E942A038AE30E942A038BE30E9465 +:1037E0002A030E948603CC24DD2424C08A818033EE +:1037F00011F480E005C0813311F481E001C082E062 +:10380000DDDE1F5F21961D3089F75B9A80910B01E9 +:1038100090910C010E9436065B9810E080910B019C +:1038200090910C010E9436061F5F1F31B9F7089472 +:10383000C11CD11C80910D0190910E01C816D906B2 +:1038400018F4E70111E0D2CF80914C02882341F0B7 +:1038500080919703882371F486E30E942A0317C09E +:1038600080914A02882349F010924A0245DA81E0A9 +:1038700080933C020E9475030AC080914B0288230A +:1038800021F010924B0229DB02C00E947C13769833 +:1038900089E60E94B50683E70E94B506E70111E0BC +:1038A0008A81803319F0813309F086E40E94B506DD +:1038B0001F5F21961D30A1F76DCF8237B9F480913B +:1038C0003C02882329F081E080934A0210923C0256 +:1038D00080913E02882329F081E080934B02109270 +:1038E0003E02CADE81E080934C0252C0863659F512 +:1038F000F7018281803351F480E180930F0180EBE6 +:103900008093100181E78093110107C0C7010296DF +:103910006FE071E043E00E947D0789E60E94B506F2 +:1039200086E60E94B5068AE30E94B50680910F01E3 +:103930000E943C07809110010E943C078091110178 +:103940000E943C0727CF883719F580914A028823C7 +:1039500039F010924A02D0D981E080933C0210C025 +:1039600080914B02882321F010924B02B6DA0EC0F0 +:1039700082E090E00E94070480919703882319F069 +:103980000E94750303C086E30E942A0310924C0232 +:10399000DF91CF911F91FF90EF90DF90CF9008952E +:1039A0002F923F924F925F926F927F929F92AF922F +:1039B000BF92CF92DF92EF92FF920F931F93DF930C +:1039C000CF930F92CDB7DEB77C01FC0181818337A5 +:1039D00009F075C1198287E090E00E94F62B6C0116 +:1039E000C70102968E010F5F1F4FB80141E00E9490 +:1039F0007D078981F60181935F01C7010496B801B3 +:103A000041E00E947D078981807FF6018183C701A3 +:103A10000696B80141E00E947D078981F601828304 +:103A2000C7010896B80141E00E947D078981F6012F +:103A30008383C7010A96B80141E00E947D0789810E +:103A4000F6018683C7010C96B80141E00E947D070C +:103A50008981F6018583C7010E96B80141E00E9475 +:103A60007D078981F60184833181160120E057E0CA +:103A7000452E512C4C0C5D1C9191892F82958F7095 +:103A800089272827E415F505B9F72F70232BF501B1 +:103A90002083F60196012A5F3F4F8181908189271B +:103AA00081833196E217F307C1F7769A80914F022E +:103AB000882309F059C080913C02882329F081E0D5 +:103AC00080934D0210923C0280913E02882329F09F +:103AD00081E080934E0210923E02EA98209A289A42 +:103AE0008EE190E00E94360628988EE190E00E94D8 +:103AF0003606289A8DE290E00E94360680E30E9406 +:103B00002A0384E690E00E943606289880E40E940A +:103B100008030EE713E0F80184910E9408030F5F89 +:103B20001F4FF3E0073A1F07B1F7289A28988EE74E +:103B30000E9408038BE290E08C010F5F1F4F0E94F0 +:103B4000F1070E940803C80103331105A9F7289A59 +:103B500083E30E942A0381E00E943B0681E0809378 +:103B60008F0383E00E943B0686E30E942A038AE3D8 +:103B70000E942A038BE30E942A030E9486035B9A19 +:103B800087EC94E20E9436065B9889E50E943B062A +:103B900085E392E00E943606AA24BB2456C0AA20E0 +:103BA00011F402E001C007E010E05B9A86EF99E0B3 +:103BB0000E9436065B9886EF99E00E9436061F5FEA +:103BC00010179CF35B9A8CEF92E10E9436065B988B +:103BD00080911501909116010E9436068601F80128 +:103BE000808140E8E42EF12C9924682E7724C701C7 +:103BF000862197212091150130911601892B31F4EE +:103C00005B9AC9010E9436065B9805C05B98C901A2 +:103C10000E9436065B9A80911501909116010E94D0 +:103C200036069394F8E09F1619F0F694E794DFCFE8 +:103C30000F5F1F4F0415150591F65B9880E090E02B +:103C40000E9436060894A11CB11C8091120190E0DC +:103C5000A816B9060CF4A3CF80914F02882341F037 +:103C600080919703882371F486E30E942A0317C08A +:103C700080914D02882349F010924D023DD881E099 +:103C800080933C020E9475030AC080914E028823F3 +:103C900021F010924E0221D902C00E947C13769826 +:103CA00089E50E94B50683E70E94B506F10181917E +:103CB0001F010E943C0724143504C1F718C0823745 +:103CC000C9F4C701029662E171E00E94B00789E57C +:103CD0000E94B50682E70E94B5068AE30E94B506F7 +:103CE0008091120190E060E040E20E9440070E9453 +:103CF000EA0652C0843751F5F7018281803339F4E6 +:103D000088ED94E0909314018093130106C0C701DD +:103D1000029663E171E00E94B007809113019091D7 +:103D2000140196958795909316018093150189E566 +:103D30000E94B50684E70E94B5068AE30E94B50694 +:103D4000809113019091140166E0CECF883721F560 +:103D500080914D02882341F010924D020E947C1602 +:103D600081E080933C0210C080914E02882321F0B4 +:103D700010924E02B2D80EC082E090E00E9407047A +:103D800080919703882319F00E94750303C086E38E +:103D90000E942A0310924F020F90CF91DF911F9142 +:103DA0000F91FF90EF90DF90CF90BF90AF909F90DA +:103DB0007F906F905F904F903F902F900895DC011F +:103DC000E8E5F2E00EC0109751F091818C919817C0 +:103DD00031F4928111968C9111979817A1F0E20F0E +:103DE000F11D2081222321F083E0E630F80758F30B +:103DF00083E0E630F80719F010F0E6E0F3E0AB2BD3 +:103E000011F0E0E0F0E0CF010895CF93DF93EC01F3 +:103E1000D6DFFC01009779F0909106038081981716 +:103E200050F4E90FF11D80818A8389E78B838181BA +:103E30008C8381E001C080E0DF91CF9108950F93E2 +:103E40001F93CF93DF938C018091970386FF18C057 +:103E500084E50E94B506C0E0D0E0F801EC0FFD1F3C +:103E600080810E943C072196C530D105B1F7809131 +:103E7000970385FF03C08AEF0E943C070E94EA0671 +:103E8000DF91CF911F910F91089586E30E942A033D +:103E900080915302833511F08BE401C08BE90E94BD +:103EA0003B0681E592E065E04CE021E00E94920B48 +:103EB0000E94750381E592E0C2CF1F93FC0140810F +:103EC00031811281938124818FE580934604109281 +:103ED000570250912F04552329F4809145048823DB +:103EE00009F4CDC080915102841721F480915202CF +:103EF000831771F0109231041092500283E0809386 +:103F000006038FEF80934604409351023093520290 +:103F1000809131048F3F09F4B2C010935302909303 +:103F20005402209355021123E1F08823D1F4251780 +:103F300061F0243651F0812F8355823010F01D3707 +:103F400021F48FEF809331049AC0143509F497C09F +:103F5000892F807F803609F092C090679093540239 +:103F600083C081E592E051DF882329F480913104F8 +:103F7000882309F484C090915002992311F0892F6D +:103F800006C0E0913104F0E0E955FC4F849183359F +:103F900041F4143531F4809131048E5F8093310403 +:103FA0000BC0811749F0109231041092500283E047 +:103FB000809306038FEF61C0992379F08091060307 +:103FC0008E5F8093060381E592E01FDF882371F402 +:103FD000809131048E5F8093310490913104E92FF8 +:103FE000F0E0E855FC4F14911F3F59F480915302C3 +:103FF000809350028436C1F580913004809355023D +:1040000033C01093530287E78093540280912F04AA +:1040100080935502892F8E5F809331041092500255 +:104020008E3001F581E592E0CADEFC01009781F057 +:10403000808190E026E033E0281B390BDF01A80FD8 +:10404000B91F02C08D918193E217F307D8F3108254 +:10405000109231041092500283E0809306038FEF98 +:1040600080934604112359F010DF80915302893662 +:1040700031F081E08093570289E2809346041F91DA +:10408000089580915702882351F0FFDE89E28093E2 +:1040900046048091570281508093570208951092F0 +:1040A00031041092500283E0809306038FEF8093D7 +:1040B00046040895A6E0B0E0E0E6F0E20C94C92BD7 +:1040C0004091450480913204882341F0833019F0F7 +:1040D0008DE790E015C087E791E012C0842F90E053 +:1040E000877090702DE730E0BC01629FC001639F34 +:1040F000900D729F900D1124825B9F489595879536 +:1041000090933404809333044A831B8205E314E0C4 +:10411000FF246E010894C11CD11C8CE2E82EF8012A +:1041200090819F3FB1F080912F048F0D8983809102 +:104130003204882311F0EC8203C09C83F801818152 +:104140008D83C60165E04CE021E00E94920BC60120 +:1041500076DEF3940E5F1F4FF8E0FF1601F78091B3 +:104160003204882341F0833019F41092320403C0E2 +:104170008250809332042696E8E00C94E52B85E388 +:1041800090E00E94F10780932F0486E390E00E9464 +:10419000F10780934504E5E3F4E09FEF90833296C6 +:1041A00084E0E534F807D1F7109231041092500200 +:1041B00083E080930603909346041092580208957A +:1041C0006F927F929F92AF92BF92CF92DF92EF92C7 +:1041D000FF920F931F93DF93CF9300D000D000D0B6 +:1041E000CDB7DEB73C017C010894E11CF11CC7018E +:1041F000BE016F5F7F4F45E00E947D07E9818430FB +:1042000008F0ADC0E130A9F4833061F485E390E0BB +:104210006A810E94550886E390E06B810E945508F0 +:10422000AEDF23C180912F040E943C07809145049A +:1042300091C0E23009F04DC0813029F020E030E03B +:104240008A81813011F421E030E0E22E8091580221 +:10425000882321F48FEB93E00E94EE0608E512E03C +:104260002EC082E00835180719F080E20E94B506DA +:10427000F80181810E943C07F80182810E943C077D +:10428000EE20C9F08AE30E94B50653E0F52E0FC078 +:10429000F3E0FF1528F4F0FE03C08CE20E94B5069F +:1042A000F801EF0DF11DF39480810E943C07F801A5 +:1042B0008081F81668F3F8018081080F111DF8015C +:1042C0008081882309F448C0F3E006301F0748F2D4 +:1042D00043C0E33049F480E090E071DD28E532E04E +:1042E0002255281B822F36C0E03149F505E314E042 +:1042F000EE24FF24F80180818F3FA1F0EE2019F019 +:1043000080E20E94B5068F2D0E943C078AE30E943E +:10431000B506F80180810E943C07F80181810E9466 +:104320003C07E3940E5F1F4FF394F8E0FF1611F77C +:10433000EE2091F48BEB93E00E94EE060DC0E1318C +:1043400059F480913304909134046DE770E00E9439 +:10435000982B862F0E943C070E94EA0686C0809117 +:104360002F04E817D0F1482F50E0F0E0CA0107967B +:104370008E179F0794F19A8180914504981769F5EB +:104380008C818F3231F4C3016CE042E00E94070C53 +:104390006CC02D818C32C1F42093320481E090E016 +:1043A000909334048093330430E0220F331F220FA4 +:1043B000331F8091950390919603820F931F9093E2 +:1043C00096038093950351C0E41BF50BEE0FF0E0CC +:1043D000EB5CFB4F8083218348C080E090E0EFDC02 +:1043E0008C01D7010D900020E9F71197AE19BF0994 +:1043F000B695A7959A2E9394890D911D8855924054 +:104400008F3A910574F5F80190827801DD24D394F8 +:10441000CC24C3941BC057010894A11CB11CC30138 +:104420008C0D911DB50141E00E947D07F2E0FD1564 +:1044300048F4D0FC07C0F7018081843619F48181EB +:1044400080933004F2E0CF0ED3947501D91418F3A1 +:1044500083E00630180748F40D0D111DF801108295 +:1044600004C085EB93E00E94EE0626960FB6F89402 +:10447000DEBF0FBECDBFCF91DF911F910F91FF9097 +:10448000EF90DF90CF90BF90AF909F907F906F9014 +:104490000895A0E8B0E0EFE4F2E20C94C92B80911B +:1044A0004704882309F455C0813011F580919103A8 +:1044B00090919203A0919303B091940320910703EC +:1044C000281709F446C085E30E940E03813129F4C0 +:1044D0008AE30E942A030E947503809191039091C0 +:1044E0009203A0919303B09194038093070331C08A +:1044F0008FE30E940E03D82E87FD28C028988FEFE7 +:104500000E9408037E010894E11CF11C8701CE2C57 +:1045100006C080E00E940803F80181938F01802F7C +:104520008C198D15B0F3289A81E08093930105C012 +:10453000F70181917F010E94B5068E2D8C198D1592 +:10454000B8F30E94EA068FEF8093930181E0809395 +:104550004704C058DF4FE8E00C94E52B0F931F93FE +:10456000CF93DF93EC01DC010D900020E9F7119768 +:10457000A81BB90B8981823751F487E390E00E9430 +:1045800007040E94750381E08093470429C08337A4 +:1045900029F50A2F28988FE70E940803802F825060 +:1045A0000E94080312E004C089810E9408031F5F73 +:1045B00021961017C8F3289A0E94860304C084E647 +:1045C00090E00E9436068AE30E940E0390E08F7707 +:1045D0009070892BA1F70E94750302C010924704C6 +:1045E000DF91CF911F910F9108950F931F93CF9358 +:1045F000DF93EC0100E0802F90E09C01220F331F3D +:10460000280F391FF901E455FF4F1491F901E355C3 +:10461000FF4F25913491112399F0209739F480E2CE +:104620000E94B506812F0E94B50608C088818117B7 +:1046300029F4CE01F901099581E003C00F5FDBCFBA +:1046400080E0DF91CF911F910F9108951F931091FA +:1046500093018093930135C08BEC94E00E9437095D +:104660008A3011F08D3009F5E0910803EE2349F10D +:10467000F0E0E75FFC4F108289E093E0B6DF88232B +:1046800089F48BED93E00E94EE0689E093E00E94AE +:10469000FB0683EC93E00E94EE0680E090E0A5DF4D +:1046A0000E94EA06109208030CC09091080397FD3F +:1046B00008C0E92FF0E0E75FFC4F80839F5F909395 +:1046C00008038091CD04882339F6109393011F913C +:1046D0000895E4E6F0E080818B7F8083299A84B19D +:1046E000876084B9239880E58CBD8DB581608DBDD0 +:1046F00008954091D2085091D3084115510521F4F5 +:104700004091890150918A0120918B0130918C0157 +:104710002115310541F42DB73EB780918701909165 +:104720008801281B390B4217530718F020E030E0AE +:1047300002C0241B350BC901089582E40E94B5060E +:104740008091D2089091D30865E040E20E94400732 +:104750000E94EA0683E50E94B50680918901909146 +:104760008A0165E040E20E9440070E94EA0685E473 +:104770000E94B50680918B0190918C0165E040E22A +:104780000E9440070E94EA0686E40E94B506B1DF57 +:1047900065E040E20E9440070C94EA06A2E0B0E027 +:1047A000E3EDF3E2FCC7E3E5F5E08091F1008193EE +:1047B00085E0EB35F807C9F7809153053091540532 +:1047C000353009F49FC0363048F4313009F44BC01D +:1047D000313070F0333009F023C145C0383009F46E +:1047E000D7C0393009F4FCC0363009F019C1B4C063 +:1047F000803821F0823809F013C108C090914E052D +:1048000080914F05882391F0926010C080915705E8 +:104810008093E9008091EB0090E025E096958795E4 +:104820002A95E1F7982F91701092E9008091E800A5 +:10483000877F8093E8009093F1001092F1008091BF +:10484000E800982F9E7F9093E8008F778093E80090 +:1048500005C080915205882309F4E4C08091E800E6 +:1048600082FFF7CFB3C0882319F0823009F0D8C097 +:1048700090E08F719070009719F00297B9F50CC015 +:1048800080915505813009F0CDC010924F0533302D +:1048900069F580934F052AC080915505882331F52D +:1048A00020915705277009F4BDC02093E90080913D +:1048B000EB0080FF1BC03130A1F48091EB008061E0 +:1048C0008093EB0081E090E002C0880F991F2A9549 +:1048D000E2F78093EA001092EA008091EB00886092 +:1048E00003C08091EB0080628093EB001092E9009E +:1048F0008091E800877F8093E8008091E800982FFE +:104900009E7F68C0882309F08BC020915505809157 +:10491000E800877F8093E8008091E800982F9E7FD1 +:104920009093E8008F778093E80005C0809152054E +:10493000882309F477C08091E80080FFF7CF922F99 +:104940009F77892F80688093E300992309F46AC0D8 +:1049500083E08093520566C08058823008F060C0C2 +:10496000809155059091560560915705AE014F5FB6 +:104970005F4F0E94C802BC01009709F453C08091A8 +:10498000E800877F8093E80089819A812AD11EC040 +:10499000803809F045C08091E800877F8093E80067 +:1049A000809150058093F1008091E800982F9E7FC0 +:1049B0009093E8008F778093E80004C080915205BF +:1049C000882381F18091E80082FFF8CF8091E80090 +:1049D000982F9B7F9093E8008F778093E80022C008 +:1049E0008823F1F4209155052230E0F48091E8000D +:1049F000877F8093E800209350058091E800982FEE +:104A00009E7F9093E8008F778093E800222311F037 +:104A100084E001C083E0809352050E94720502C0C9 +:104A20000E9434058091E80083FF0AC08091EB006A +:104A300080628093EB008091E800877F8093E8009C +:104A40002296E2E0C8C68093E9008091EB00816085 +:104A50008093EB001092ED006093EC004093ED002A +:104A60008091EE00881F8827881F08951092F40017 +:104A700090E09093E9001092F0001092E80080918D +:104A8000ED008D7F8093ED008091EB008E7F809311 +:104A9000EB009F5F973069F7089580915305882355 +:104AA000BCF404C0809152058823F9F08091E8009D +:104AB00082FFF8CF8091E800982F9B7F9093E800C9 +:104AC0008F778093E800089580915205882369F0DC +:104AD0008091E80080FFF8CF8091E800982F9E7FBA +:104AE0009093E8008F778093E800089594E6809192 +:104AF000EC0080FF05C08091E80080FF05C01FC06A +:104B00008091E80082FD1BC080915205882311F43A +:104B100082E008958091EB0085FF02C081E0089556 +:104B20008091E10082FFE3CF8091E1008B7F809351 +:104B3000E100992311F483E008959150D8CF80E0EB +:104B40000895FC014091590550915A059B01461763 +:104B5000570708F49A0140E032C08091E80083FFD3 +:104B600002C081E008958091E80082FD32C080910A +:104B700052058823A1F18091E80080FF20C005C084 +:104B800081918093F100215030402115310531F0A1 +:104B90008091F2009091F300089790F38091F200D9 +:104BA0009091F30040E0089709F441E08091E8001B +:104BB000982F9E7F9093E8008F778093E8002115CF +:104BC000310559F6442349F604C0809152058823E3 +:104BD00031F08091E80082FFF8CF80E0089582E014 +:104BE0000895DC014091590550915A059B014617E3 +:104BF000570708F49A0140E034C08091E80083FF31 +:104C000002C081E008958091E80082FD34C0809167 +:104C100052058823B1F18091E80080FF22C007C0CF +:104C2000FD011196E491E093F100215030402115EF +:104C3000310531F08091F2009091F300089780F3F4 +:104C40008091F2009091F30040E0089709F441E070 +:104C50008091E800982F9E7F9093E8008F77809353 +:104C6000E8002115310549F6442339F604C0809146 +:104C70005205882331F08091E80082FFF8CF80E070 +:104C8000089582E00895FC0124C08091E80083FF2C +:104C900002C081E0089580915205882341F18091FE +:104CA000E80082FFF2CF06C08091F100819361504D +:104CB000704031F08091F2009091F300892BA1F7C0 +:104CC0008091E800982F9B7F9093E8008F778093E6 +:104CD000E80061157105C9F604C08091520588236A +:104CE00031F08091E80080FFF8CF80E0089582E005 +:104CF000089567D06ED01092520510925005109210 +:104D00004F0510924E058AE482BF19BC89B58260B6 +:104D100089BD09B400FEFDCF8091D800982F9F7700 +:104D20009093D80080688093D8008091D7008160EC +:104D30008093D7008091D8008F7D8093D800809198 +:104D4000E0008B7F8093E0008091E0008E7F809375 +:104D5000E0008091E1008E7F8093E1008091E2008D +:104D600081608093E2008091E100877F8093E10081 +:104D70008091E20088608093E2008091D800816099 +:104D80008093D8000895B5DFE0EEF0E08081816087 +:104D90008083E8EDF0E080818F77808319BC80818B +:104DA0008F7E8083109251050895A3DFE8EDF0E037 +:104DB00080818061808381E080935105789408959B +:104DC0000895E8EDF0E080818E7F80831092E2000C +:104DD00008951092DA001092E10008951F920F9248 +:104DE0000FB60F9211242F933F934F935F936F93BE +:104DF0007F938F939F93AF93BF93EF93FF93809194 +:104E0000DA0080FF15C08091D80080FF11C080912A +:104E1000DA008E7F8093DA008091D90080FF05C090 +:104E200081E080935205CCDF03C010925205C8DFA9 +:104E30008091E10080FF1DC08091E20080FF19C0D9 +:104E40008091E1008E7F8093E1008091E2008E7F6F +:104E50008093E2008091E20080618093E200809183 +:104E6000D80080628093D80019BC85E080935205F9 +:104E7000A7DF8091E10084FF29C08091E20084FFD8 +:104E800025C019BC89B5826089BD09B400FEFDCF7B +:104E90008091D8008F7D8093D8008091E1008F7E33 +:104EA0008093E1008091E2008F7E8093E200809108 +:104EB000E20081608093E20080915005882311F424 +:104EC00083E001C084E0809352057ADF8091E100A5 +:104ED00083FF23C08091E20083FF1FC08091E10027 +:104EE000877F8093E10082E0809352051092500505 +:104EF0008091E1008E7F8093E1008091E2008E7FBF +:104F00008093E2008091E20080618093E200AEDD58 +:104F100080E060E042E097DD53DFFF91EF91BF91C9 +:104F2000AF919F918F917F916F915F914F913F9141 +:104F30002F910F900FBE0F901F9018951F93809187 +:104F40005205882361F01091E9001092E9008091E8 +:104F5000E80083FF01C022DC17701093E9001F9165 +:104F600008951F93CF93DF93182F53985298109260 +:104F70008A03EA98209A289A8EE190E00E943606E9 +:104F800028988EE190E00E943606289A8DE290E003 +:104F90000E94360680E30E942A0384E690E00E9485 +:104FA0003606113019F0123089F511C0CFEDD3E07B +:104FB000FE0184918134D8F42196FE0164910E940F +:104FC0001C032196F4E0C73ADF0791F710C0CCE24A +:104FD000D4E0FE018491813450F42196FE01649165 +:104FE0000E941C032196F4E0C43FDF0791F7109361 +:104FF0008A0383E30E942A038CE0EEE7F6E0DF01F8 +:105000001D928A95E9F784E00E943B06DF91CF91DB +:105010001F910895FC018181823771F48281882378 +:1050200069F0833711F481E003C0843719F482E01A +:1050300098DF04C080E0FCCF833799F080918A0329 +:10504000813019F0823039F403C083E894E005C060 +:105050008DE794E002C089E794E00E94EE060C948C +:10506000EA060895FF920F931F93CF93DF93182FB3 +:10507000092FF62E28988FEF0E940803212F302F3A +:10508000C901EC0110E005C080E00E94080389938B +:105090001F5F1F15C8F3289ACDB7DEB7E5E098C3A8 +:1050A0001F93882339F485E30E940E038D3011F499 +:1050B00080E03EC01092890686E30E942A0385E3C1 +:1050C0000E940E03182F8130D1F78BE30E942A0330 +:1050D0008AE30E942A0310927E06109280061092A4 +:1050E0007F0610928206109281068AE896E09093DD +:1050F000840680938306109285061093860610928C +:10510000870680918A038093880683E060E00E948E +:105110001C0388E062E00E941C0384E30E942A03CF +:1051200085E30E940E038D30D9F781E080938906D4 +:105130001F910895A2E0B0E0EFE9F8E22FC380915B +:105140008A03882309F4ECC080918906813051F0EC +:10515000813030F0823061F0833009F09DC06FC043 +:1051600081E0DDC04A9B98C082E08093890694C0AC +:105170004B9B92C0809183069091840663E072DF1E +:10518000809188062091830630918406BE016F5F6E +:105190007F4F813061F4C901C6D0882309F0BBC0BC +:1051A000898180937E067CD1AC0181E00CC0C9016D +:1051B00040E008D1882309F0AEC0898180937E0643 +:1051C0006FD1AC0180E060E083D190938006809342 +:1051D0007F0660917F067091800682E069347807CF +:1051E00008F099C06F3F710509F058F486E00E94FD +:1051F0001C0388E060E00E941C0381E08093850628 +:1052000003C086E00E941C0380918306909184066F +:105210000396909384068093830680917F069091F5 +:1052200080060397909382068093810610928606EB +:1052300083E08093890667E00E941C032DC04B9B8E +:105240002BC080918106909182068F3F910509F0D5 +:1052500058F480918506882339F488E060E00E9444 +:105260001C0381E0809385068091830690918406DB +:105270006FE1F8DE80918106909182064F979093BE +:1052800082068093810680918306909184064F96D2 +:1052900090938406809383064A9940C08091890642 +:1052A000823008F43BC0809183069091840660911F +:1052B0008106D8DE81E08093870680918806813060 +:1052C00051F480917E06ECD0AC018AE896E06BE563 +:1052D00075E0A2D109C080917E06E2D0AC018AE8D7 +:1052E00096E06BE575E00FD1892BA9F482E60E9468 +:1052F000B50610E006C0E55AFA4F80810E943C07CF +:105300001F5F80915B05CCD0E12FF0E0E817F90733 +:1053100090F30E94EA061092890602C080E0C0DE87 +:105320002296E3E057C2CF93DF93FC01EB01A0810B +:105330008181EA2FE295EF70F0E0E95EFE4F508147 +:10534000B0E0AF70B070A95EBE4FAC91E82FE295AF +:10535000EF70F0E0E95EFE4F4081E82FF0E0EF7083 +:10536000F070E95EFE4FE08120E030E0EF3F11F4A5 +:1053700021E030E080E090E04F3F11F481E090E0E8 +:10538000282B392B80E05F3F09F481E090E0AF3FAC +:1053900009F491E0892B90E0282B392B232B11F075 +:1053A00081E00DC05295550F550F507CA295A07FFE +:1053B000A52BAE2B440F440F4A2B488380E0DF918E +:1053C000CF9108950F931F93CF93DF93EC018B013F +:1053D000442319F050E060E01AC08A81E82FF0E021 +:1053E000EF73F070E95CFE4F6081E981F0E0EF70EF +:1053F000F070EE0FFF1FEE0FFF1F829586958695CA +:10540000837090E0E82BF92BE95CFE4F5081A88176 +:10541000E981E295EF70F0E08A2F90E08370907060 +:1054200024E0880F991F2A95E1F7E82BF92BE95C16 +:10543000FE4FE081A695A695B0E0A95CBE4FAC9169 +:1054400020E030E0EF3F11F421E030E080E090E038 +:10545000AF3F11F481E090E0282B392B80E06F3FC3 +:1054600009F481E090E05F3F09F491E0892B90E03E +:10547000282B392B232B11F081E00DC0A295A07FA2 +:10548000AE2BF801A083442329F45295507F562B6C +:10549000F801518380E0DF91CF911F910F91089522 +:1054A000482F8A3110F422E009C090E04A9754E076 +:1054B000959587955A95E1F7282F2D5F842F90E0D9 +:1054C000019630E0220F331F280F391FC9010895BC +:1054D000882351F0662321F09A012F5F3F4F01C0CE +:1054E0009A01220F331F0DC09A01220F331F240F80 +:1054F000351F36952795662311F440FF02C02F5FB4 +:105500003F4FC9010895A0E0B0E0E8E8FAE23FC1EA +:105510005C018B017A01CC24DD24C0E0D0E06BC0BB +:1055200081E0E816F10491F4C501B80141E04ADFD9 +:10553000882309F067C0C095D095D070F8018081AC +:1055400090E0C817D90709F45AC05FC0C501B80177 +:1055500040E038DF882309F055C08EEF9FEFE80E5A +:10556000F91EE2E0F0E0CE0EDF1EE114F10459F086 +:10557000FBE0CF16D104E0F0C6010C9762E170E0C9 +:10558000D7D0892BA9F4AE01409550959A0130707F +:10559000F801818190E028173907B9F5252F3327C5 +:1055A000808190E02817390781F5C0E0D0E01DC068 +:1055B000F1E0EF16F10489F4CE01F80160819DD08D +:1055C000EC019C0120953095232F3327F801818130 +:1055D00090E02817390749F018C0CE01F801608122 +:1055E0008CD0F801618189D0EC0183E090E0A80EB5 +:1055F000B91E0E5F1F4FE114F10409F091CF20E0B6 +:1056000030E005C021E030E002C022E030E0C90116 +:10561000CDB7DEB7EAE0D7C0A0E0B0E0E1E1FBE261 +:10562000B2C03C016B014A01AA24BB24C0E0D0E017 +:1056300000E010E0EE24FF2452C0C301B60173DE87 +:10564000882319F021E030E053C082E088169104ED +:1056500069F09AE0A916B10458F000E010E0C50125 +:105660000A9762E170E064D0892B11F401E010E048 +:10567000E114F10471F0C095D095D070F6018081ED +:1056800090E0C817D90791F5C0E0D0E0EE24FF24E0 +:1056900019C00115110589F09E0120953095232F21 +:1056A0003327F601808190E028173907F9F400E0EC +:1056B00010E041E0E42EF12C05C0CE01F60160813E +:1056C0001CD0EC010894810891080894A11CB11C1D +:1056D00082E090E0680E791E0894C11CD11C8114F0 +:1056E000910409F0AACF20E030E002C022E030E0CF +:1056F000C901CDB7DEB7EEE062C0AC0170E0E5E60F +:10570000FDE39A0120703078232F3327862F90E015 +:1057100080789070440F551F2817390711F04E27D5 +:105720005F277F5F783011F0660FEBCFCA010895D5 +:10573000AA1BBB1B51E107C0AA1FBB1FA617B707B7 +:1057400010F0A61BB70B881F991F5A95A9F78095D3 +:105750009095BC01CD01089597FB092E07260AD02C +:1057600077FD04D0E5DF06D000201AF4709561952E +:105770007F4F0895F6F7909581959F4F08952F924A +:105780003F924F925F926F927F928F929F92AF92D1 +:10579000BF92CF92DF92EF92FF920F931F93CF931E +:1057A000DF93CDB7DEB7CA1BDB0B0FB6F894DEBFB5 +:1057B0000FBECDBF09942A88398848885F846E84DB +:1057C0007D848C849B84AA84B984C884DF80EE8025 +:1057D000FD800C811B81AA81B981CE0FD11D0FB62E +:1057E000F894DEBF0FBECDBFED010895CF93DF93D8 +:1057F000BC018230910510F462E070E0A091D40801 +:10580000B091D508ED01E0E0F0E040E050E021C0CB +:10581000888199818617970769F48A819B813097DF +:1058200019F09383828304C09093D5088093D408A1 +:10583000FE0134C06817790738F44115510519F095 +:105840008417950708F4AC01FE018A819B819C01B5 +:10585000E9012097E9F641155105A9F1CA01861B16 +:10586000970B049708F4BA01E0E0F0E02AC08D91AC +:105870009C91119784179507F9F46417750781F4C3 +:1058800012968D919C911397309719F09383828390 +:1058900004C09093D5088093D408FD0132964FC080 +:1058A000CA01861B970BFD01E80FF91F61937193E5 +:1058B00002978D939C9343C0FD01828193819C014B +:1058C000D9011097A1F68091D2089091D308892B25 +:1058D00041F48091890190918A019093D30880933B +:1058E000D20840918B0150918C014115510541F432 +:1058F0004DB75EB78091870190918801481B590B85 +:105900002091D2083091D30824173507B0F4CA018A +:10591000821B930B8617970780F0AB014E5F5F4F9A +:105920008417950750F0420F531F5093D3084093AC +:10593000D208F9016193719302C0E0E0F0E0CF0179 +:10594000DF91CF910895CF93DF93009709F450C072 +:10595000EC0122971B821A82A091D408B091D5083D +:10596000109709F140E050E0AC17BD0708F1BB8388 +:10597000AA83FE0121913191E20FF31FAE17BF07F9 +:1059800079F48D919C911197280F391F2E5F3F4F0D +:105990003983288312968D919C9113979B838A83D8 +:1059A0004115510571F4D093D508C093D40820C097 +:1059B00012968D919C911397AD01009711F0DC0127 +:1059C000D3CFFA01D383C28321913191E20FF31F28 +:1059D000CE17DF0769F488819981280F391F2E5F60 +:1059E0003F4FFA01318320838A819B819383828395 +:1059F000DF91CF910895F999FECF92BD81BDF89ABC +:105A0000992780B50895262FF999FECF1FBA92BD28 +:105A100081BD20BD0FB6F894FA9AF99A0FBE01968F +:065A20000895F894FFCF89 +:105A26008025000000000802323100A401060010A3 +:105A3600B07106D8046C02FFFFFFFFFF0302FFFFF1 +:105A46000100FFFFFFFFFFAAA9A6A59A9996956AEE +:105A56006966655A595655FFFFFFFFFFFFFFFFFFB7 +:105A6600FFFF03FF0102FFFFFFFF07FFFF00FFFF2E +:105A76000506FF04FFFFFFFFFFFF0BFF090AFFFFFD +:105A86000FFFFF08FFFFFFFF0D0EFF0CFFFFFFFFDD +:105A9600FFFFFFFFFFFFFF160D0E0B1C191A132C3D +:0E5AA600252623343132292000D608000000C6 +:00000001FF diff --git a/ipes/CUL868IPE/firmware/flash_cul.sh b/ipes/CUL868IPE/firmware/flash_cul.sh new file mode 100755 index 0000000..0ac39c7 --- /dev/null +++ b/ipes/CUL868IPE/firmware/flash_cul.sh @@ -0,0 +1,9 @@ +#! /bin/sh + +set -e + +cd `dirname "$0"` + +dfu-programmer atmega32u4 erase --force +dfu-programmer atmega32u4 flash CUL_V3.hex +dfu-programmer atmega32u4 reset diff --git a/ipes/CUL868IPE/setup-cul868ipe.py b/ipes/CUL868IPE/setup-cul868ipe.py new file mode 100755 index 0000000..0219687 --- /dev/null +++ b/ipes/CUL868IPE/setup-cul868ipe.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python + +from setuptools import setup +from distutils.core import setup +from glob import glob +import sys + +from utils import get_packages, get_pkg_files, OpenMTCSdist, move_config_files + +# name and dir +NAME = "cul868ipe" +BASE_DIR = "." + +# import pkg +sys.path.append(BASE_DIR + "/src") +pkg = __import__(NAME) + +# setup name and version +SETUP_NAME = "openmtc-" + NAME +SETUP_VERSION = pkg.__version__ +SETUP_DESCRIPTION = pkg.__description__ + +# meta +SETUP_AUTHOR = pkg.__author_name__ +SETUP_AUTHOR_EMAIL = pkg.__author_mail__ +SETUP_URL = "http://www.openmtc.org" +SETUP_LICENSE = "Fraunhofer FOKUS proprietary" + +# requirements +SETUP_REQUIRES = pkg.__requires__ +SETUP_INSTALL_REQUIRES = pkg.__requires__ + +# packages +PACKAGES = [NAME] +PACKAGE_DIR = {"": BASE_DIR + "/src"} +all_packages = [] +for package in PACKAGES: + all_packages.extend(get_packages(package, PACKAGE_DIR)) + +# scripts +SETUP_SCRIPTS = glob(BASE_DIR + "/bin/*") + +# package data +PACKAGE_DATA = {NAME: get_pkg_files(BASE_DIR, NAME)} + +# data files +CONFIG_FILES = ("config.json",) +CONFIG_DIR = "/etc/openmtc/" + NAME +CONFIG_DIST_FILES = (BASE_DIR + "/etc/conf/config.json.dist",) +DATA_FILES = [(CONFIG_DIR, CONFIG_DIST_FILES)] + +# cmd class +CMD_CLASS = {'sdist': OpenMTCSdist} + +if __name__ == "__main__": + if 'bdist_wheel' in sys.argv: + raise RuntimeError("This setup.py does not support wheels") + + ############################################################################ + # setup + setup(name=SETUP_NAME, + version=SETUP_VERSION, + description=SETUP_DESCRIPTION, + author=SETUP_AUTHOR, + author_email=SETUP_AUTHOR_EMAIL, + url=SETUP_URL, + license=SETUP_LICENSE, + requires=SETUP_REQUIRES, + install_requires=SETUP_INSTALL_REQUIRES, + package_dir=PACKAGE_DIR, + packages=all_packages, + scripts=SETUP_SCRIPTS, + package_data=PACKAGE_DATA, + data_files=DATA_FILES, + cmdclass=CMD_CLASS + ) + + ############################################################################ + # install + if "install" in sys.argv: + # only do this during install + move_config_files(CONFIG_DIR, CONFIG_FILES) diff --git a/ipes/CUL868IPE/src/cul868ipe/__init__.py b/ipes/CUL868IPE/src/cul868ipe/__init__.py new file mode 100644 index 0000000..e9dc627 --- /dev/null +++ b/ipes/CUL868IPE/src/cul868ipe/__init__.py @@ -0,0 +1,9 @@ +""" +Interworking Proxy for Cul868 devices. +""" + +__version__ = "4.9.9" +__description__ = "The OpenMTC Cul868IPE" +__author_name__ = "Ronny Kreuch" +__author_mail__ = "ronny.kreuch@fokus.fraunhofer.de" +__requires__ = [] diff --git a/ipes/CUL868IPE/src/cul868ipe/__init__.pyc b/ipes/CUL868IPE/src/cul868ipe/__init__.pyc new file mode 100644 index 0000000..e051663 Binary files /dev/null and b/ipes/CUL868IPE/src/cul868ipe/__init__.pyc differ diff --git a/ipes/CUL868IPE/src/cul868ipe/__main__.py b/ipes/CUL868IPE/src/cul868ipe/__main__.py new file mode 100644 index 0000000..4ef5bce --- /dev/null +++ b/ipes/CUL868IPE/src/cul868ipe/__main__.py @@ -0,0 +1,56 @@ +import sys +import os +if 'threading' in sys.modules and not os.environ.get('SUPPORT_GEVENT'): + raise Exception('threading module loaded before monkey patching!') +os.environ.setdefault("GEVENT_RESOLVER", "thread") +import gevent.monkey +gevent.monkey.patch_all() + +from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser + +from openmtc_app.util import prepare_app, get_value +from openmtc_app.runner import AppRunner as Runner +from .cul_868_ipe import CUL868IPE + +# defaults +default_name = "CUL868IPE" +default_device = "/dev/ttyACM0" +default_ep = "http://localhost:8000" + +# args parser +parser = ArgumentParser( + description="An IPE for the FS20 device connected on a CUL868", + prog="CUL868IPE", + formatter_class=ArgumentDefaultsHelpFormatter) +parser.add_argument("-n", "--name", help="Name used for the AE.") +parser.add_argument("-s", "--ep", help="URL of the local Endpoint.") +parser.add_argument("-d", "--cul-device", help="Device Node of the CUL868.") +parser.add_argument("devices", nargs="*") + +# args, config and logging +args, config = prepare_app(parser, __loader__, __name__, "config.json") + +# variables +nm = get_value("name", (unicode, str), default_name, args, config) +cb = config.get("cse_base", "onem2m") +ep = get_value("ep", (unicode, str), default_ep, args, config) +poas = config.get("poas", ["http://auto:28728"]) +originator_pre = config.get("originator_pre", "//openmtc.org/mn-cse-1") +ssl_certs = config.get("ssl_certs", {}) + +s = config.get("sim", False) +p = int(config.get("sim_period")) +cul_device = get_value('cul_device', (unicode, str), default_device, args, config) +device_mappings = get_value('device_mappings', dict, {}, args, config) +devices = get_value('devices', list, [], args, config) + +# start +app = CUL868IPE( + devices, device=cul_device, sim=s, sim_period=p, + device_mappings=device_mappings, + name=nm, cse_base=cb, poas=poas, + originator_pre=originator_pre, **ssl_certs +) +Runner(app).run(ep) + +print ("Exiting....") diff --git a/ipes/CUL868IPE/src/cul868ipe/cul_868_coordinator.py b/ipes/CUL868IPE/src/cul868ipe/cul_868_coordinator.py new file mode 100644 index 0000000..0d07133 --- /dev/null +++ b/ipes/CUL868IPE/src/cul868ipe/cul_868_coordinator.py @@ -0,0 +1,150 @@ +import select +from collections import defaultdict +from os import system +from random import choice + +from gevent import spawn + +from futile.logging import LoggerMixin +from .parsers import (EM1000EMParser, S300THParser, FS20Parser, HMSParser, + SIMParser) + + +def _hex(n): + s = hex(n)[2:] + l = len(s) + return s.zfill(l + l % 2) + + +class CUL868Coordinator(LoggerMixin): + COMMAND_ON = "11" + COMMAND_OFF = "00" + COMMAND_TOGGLE = "12" + + PROTOCOL_S300TH = "K" + PROTOCOL_EM1000EM = "E" + PROTOCOL_FS20 = "F" + PROTOCOL_HMS = "H" + + def __init__(self, device="/dev/ttyACM1"): + super(CUL868Coordinator, self).__init__() + self.running = False + self.device = device + self.handlers = defaultdict(lambda: defaultdict(list)) + self.parsers = { + "K": S300THParser(), + "E": EM1000EMParser(), + "F": FS20Parser(), + "H": HMSParser() + } + + self.sim_parsers = { + # "K": SIMParser(), + # "E": SIMParser(), + "F": SIMParser(), + # "H": SIMParser() + } + + self.write_handle = self.device_handle = None + + def start(self): + self.device_handle = open(self.device, "r", 0) + self.write_handle = open(self.device, "w", 0) + self.write_handle.write("X01\r\n") + self.running = True + + spawn(self._listener) + + def start_simulation(self, run_forever, period): + run_forever(period, self._generate_simulated_data) + + def _generate_simulated_data(self): + p = choice(self.sim_parsers.keys()) + fake_parser = self.sim_parsers[p] + dev_id, data = fake_parser(p) + handler = self.handlers[p] + + try: + handler(dev_id, data) + except Exception: + self.logger.exception("Error in data handler.") + + def _listener(self): + system("stty -echo -echok -echoke -echoe -echonl < %s" % + (self.device,)) + while self.running: + try: + rlist, _, _ = select.select([self.device_handle], [], [], 0.25) + if not rlist: + continue + line = self.device_handle.readline() # TODO: Make this interruptable + except Exception: + self.logger.exception("Error reading from %s.", self.device) + return + if len(line) == 0: + self.logger.info("Received empty line. Aborting") + return + + if len(line) == 1: + continue + self.logger.debug("Read CUL data: %r", line) + + protocol = line[0] + + if protocol in "*X": + continue + + try: + parser = self.parsers[protocol] + except KeyError: + self.logger.warn("No parser for %s", line) + continue + + self.logger.debug("Have parser for %s: %s", protocol, parser) + + try: + dev_id, data = parser(line) + except Exception: + self.logger.exception("Error parsing line: %s", line) + continue + + self.logger.debug("Parsed data: %s %s", dev_id, data) + + self.logger.debug("%s %s %s", self.handlers, + self.handlers[protocol]) + + handler = self.handlers[protocol] + self.logger.debug("Calling handler: %s", handler) + try: + handler(dev_id, data) + except Exception: + self.logger.exception("Error in data handler.") + + def shutdown(self): + self.logger.info("Shutting down.") + self.running = False + if self.device_handle is not None: + self.device_handle.close() + if self.write_handle is not None: + self.write_handle.close() + + def add_handler(self, protocol, handler): + self.handlers[protocol] = handler + + def _send_fs20(self, house_code, device_code, command): + self.logger.debug("Send FS20: house_code=%s device_code=%s command=%s", + house_code, device_code, command) + s = ''.join( + ("F", _hex(int(house_code)), _hex(int(device_code)), command)) + self.logger.debug("Sending FS20 command: %s", s) + self.write_handle.write(s + "\r\n") + self.logger.debug("Command sent") + + def switch_on(self, house_code, device_code): + self._send_fs20(house_code, device_code, self.COMMAND_ON) + + def switch_off(self, house_code, device_code): + self._send_fs20(house_code, device_code, self.COMMAND_OFF) + + def toggle(self, house_code, device_code): + self._send_fs20(house_code, device_code, self.COMMAND_TOGGLE) diff --git a/ipes/CUL868IPE/src/cul868ipe/cul_868_coordinator.pyc b/ipes/CUL868IPE/src/cul868ipe/cul_868_coordinator.pyc new file mode 100644 index 0000000..8daa47c Binary files /dev/null and b/ipes/CUL868IPE/src/cul868ipe/cul_868_coordinator.pyc differ diff --git a/ipes/CUL868IPE/src/cul868ipe/cul_868_ipe.py b/ipes/CUL868IPE/src/cul868ipe/cul_868_ipe.py new file mode 100644 index 0000000..529ec4d --- /dev/null +++ b/ipes/CUL868IPE/src/cul868ipe/cul_868_ipe.py @@ -0,0 +1,464 @@ +import time +from openmtc_app.onem2m import XAE +from openmtc_onem2m.model import Container +from collections import namedtuple +from cul_868_coordinator import CUL868Coordinator + +BAUD_RATE = 9600 +NODE_DISCOVER_INTERVAL = 6 +SLEEP_INTERVAL = 0.1 + +CULDevice = namedtuple("CULDevice", ("type", "device_id")) + + +class CUL868IPE(XAE): + max_nr_of_instances = 30 + default_access_right = False + + def __init__(self, cul_devices, device="/dev/ttyACM0", + sim=False, sim_period=3, device_mappings={}, *args, **kw): + super(CUL868IPE, self).__init__(*args, **kw) + self.device = device + + self.fs20 = [] + self.em1000em = [] + self.s300th = [] + self.fs20_sender = [] + self.fs20_brightness = [] + self.fs20_door = [] + self.fs20_window = [] + self.fs20_motion = [] + self.hms100t = [] + self.hms100tf = [] + + self.containers = {} + self.dev_containers = {} + + self.sim = sim + self.sim_period = sim_period + + self.device_mappings = device_mappings + + self._old_fs20_values = {} + + self.cul = CUL868Coordinator(device=device) + + for d in map(lambda s: CULDevice(*s.split(":")[:2]), cul_devices): + if d.type == "fs20": + house_code, device_code = d.device_id.split("-") + self.fs20.append((house_code, device_code)) + elif d.type == "em1000em": + self.em1000em.append(d.device_id) + elif d.type == "s300th": + self.s300th.append(d.device_id) + elif d.type == "fs20_sender": + self.fs20_sender.append(d.device_id) + elif d.type == "fs20_brightness": + self.fs20_brightness.append(d.device_id) + elif d.type == "fs20_door": + self.fs20_door.append(d.device_id) + elif d.type == "fs20_window": + self.fs20_window.append(d.device_id) + elif d.type == "fs20_motion": + self.fs20_motion.append(d.device_id) + elif d.type == "hms100t": + self.hms100t.append(d.device_id) + elif d.type == "hms100tf": + self.hms100tf.append(d.device_id) + else: + raise ValueError("Unknown device type: %s" % (d.type,)) + + def _on_shutdown(self): + self.cul.shutdown() + + def add_device(self, cnt_id, labels, sub_containers): + labels += ["openmtc:device", "openmtc:device:cul868"] + cse_id = self.get_resource(self.cse_base).CSE_ID[1:] + try: + tenant_id, instance_id = cse_id.split('~') + except ValueError: + tenant_id = cse_id + instance_id = 'None' + context = (self.device_mappings[cnt_id] + if cnt_id in self.device_mappings.keys() else None) + + dev_cnt = Container(resourceName=cnt_id, maxNrOfInstances=0, + labels=labels) + dev_cnt = self.create_container(None, dev_cnt) + self.dev_containers[cnt_id] = dev_cnt + + for c_id, l, func in sub_containers: + s_id = cnt_id + '_' + c_id.upper() + + if func: + l = (map(lambda x: "openmtc:actuator_data:%s" % x, l) + if l else []) + l.append('openmtc:actuator_data') + l.append('openmtc:sensor_data') + # if in device mappings, add smart orchestra labels + if context: + l.extend(( + '{}'.format(tenant_id), + '{}/{}'.format(tenant_id, instance_id), + '{}/{}/{}'.format(tenant_id, instance_id, context), + '{}/{}/{}/{}'.format(tenant_id, instance_id, context, c_id) + )) + sub_cnt = Container(resourceName=c_id, maxNrOfInstances=0, + labels=l) + else: + l = map(lambda x: "openmtc:sensor_data:%s" % x, l) if l else [] + l.append('openmtc:actuator_data') + l.append('openmtc:sensor_data') + # if in device mappings, add smart orchestra labels + if context: + l.extend(( + '{}'.format(tenant_id), + '{}/{}'.format(tenant_id, instance_id), + '{}/{}/{}'.format(tenant_id, instance_id, context), + '{}/{}/{}/{}'.format(tenant_id, instance_id, context, c_id) + )) + sub_cnt = Container(resourceName=c_id, labels=l) + + self.containers[s_id] = s_cnt = self.create_container(dev_cnt, + sub_cnt) + + if func: + self.add_container_subscription(s_cnt, func) + + return dev_cnt + + def _on_register(self): + for house_code, device_code in self.fs20: + d = "%s_%s" % (house_code, device_code) + handle_switch = self._get_handle_switch(house_code, device_code) + self.add_device('FS20_ST3_%s' % d, ["FS20_ST3", "ST3", "PowerPlug"], + (("switch", ["switch"], handle_switch),)) + + self.cul.add_handler(self.cul.PROTOCOL_S300TH, self._handle_s300th_data) + self.cul.add_handler(self.cul.PROTOCOL_FS20, + self._handle_fs20_sender_data) + self.cul.add_handler(self.cul.PROTOCOL_EM1000EM, + self._handle_em1000em_data) + self.cul.add_handler(self.cul.PROTOCOL_HMS, self._handle_hms_data) + + if self.sim: + self.cul.start_simulation(self.run_forever, self.sim_period) + else: + self.cul.start() + + def _get_handle_switch(self, house_code, device_code): + + def handle_switch(container, content): + if isinstance(content, (str, unicode)): # fallback to old behavior + if content == 'TOGGLE': + self.cul.toggle(house_code, device_code) + elif content == 'ON': + self.cul.switch_on(house_code, device_code) + elif content == 'OFF': + self.cul.switch_off(house_code, device_code) + elif isinstance(content, list): # senml + try: + value = round(float(content[0]['v'])) + if value == 1.0: + self.cul.switch_on(house_code, device_code) + elif value == 0.0: + self.cul.switch_off(house_code, device_code) + except (KeyError, ValueError): + pass + + return handle_switch + + @staticmethod + def _time(): + return format(round(time.time(), 3), '.3f') + + def _get_sensor_data(self, dev_name, measure, unit, value): + entry = { + "bn": "urn:dev:" + dev_name, # basename + "n": measure, # name + "t": self._time() # timestamp + } + + if unit: # unit + entry['u'] = unit + + try: + entry['v'] = float(value) # value + except ValueError: + if isinstance(value, bool): + entry['vb'] = value + elif value.lower() == "true": + entry['vb'] = True + elif value.lower() == "false": + entry['vb'] = False + else: + entry['vs'] = str(value) + + return [entry] + + def _handle_em1000em_data(self, dev_id, data): + self.logger.debug("Handling EM1000EM data: %s", data) + + cnt_id = "EM1000EM_%s" % dev_id + dev_name = "em100em:%s" % dev_id + + try: + self.dev_containers[cnt_id] + except KeyError: + if dev_id in self.em1000em or len(self.em1000em) == 0: + self.add_device(cnt_id, ["EM1000EM"], + (("load", False, None), + ("work", False, None))) + else: + return + + # Load + container_id = cnt_id + "_LOAD" + ci = self._get_sensor_data(dev_name, "load", "W", data.last) + self.push_content(self.containers[container_id], ci) + + # Work + container_id = cnt_id + "_WORK" + ci = self._get_sensor_data(dev_name, "work", "kWh", data.cumulated) + self.push_content(self.containers[container_id], ci) + + def _handle_s300th_data(self, dev_id, data): + self.logger.debug("Handling S300TH data: %s", data) + + cnt_id = "S300TH_%s" % dev_id + dev_name = "s300th:%s" % dev_id + + try: + self.dev_containers[cnt_id] + except KeyError: + if dev_id in self.s300th or len(self.s300th) == 0: + self.add_device(cnt_id, ["S300TH"], + (("temperature", ["temperature"], None), + ("humidity", ["humidity"], None))) + else: + return + + # Temperature + container_id = cnt_id + "_TEMPERATURE" + ci = self._get_sensor_data(dev_name, "temperature", "Cel", + data.temperature) + self.push_content(self.containers[container_id], ci) + + # Humidity + container_id = cnt_id + "_HUMIDITY" + ci = self._get_sensor_data(dev_name, "humidity", "%RH", data.humidity) + self.push_content(self.containers[container_id], ci) + + def _get_fs20_value(self, dev_id, value): + # TODO(rst): handle more command strings (toggle, dim_*, timer) + try: + old_fs20_value = self._old_fs20_values[dev_id] + except KeyError: + old_fs20_value = '' + + if value == 'off': + fs20_value = '0.0' + elif value == 'on_old': + if old_fs20_value.startswith('-'): + fs20_value = old_fs20_value[1:] + else: + fs20_value = old_fs20_value or '1.0' + elif value.startswith('on_'): + fs20_value = str(int(value[3:]) / 16.0) + else: + fs20_value = None + + if fs20_value is not None: + if value == 'off': + self._old_fs20_values[dev_id] = '-' + old_fs20_value + else: + self._old_fs20_values[dev_id] = fs20_value + + return fs20_value + + def _handle_fs20_sender_data(self, dev_id, data): + self.logger.debug("Handling FS20_sender data: %s", data) + self.logger.debug("data is of type" + str(type(data))) + + # motion + if (len(self.fs20_motion) > 0 or len(self.fs20_brightness) > 0 or + len(self.fs20_door) > 0 or len(self.fs20_window) > 0): + if dev_id in self.fs20_motion: + self._handle_fs20_motion_data(dev_id, data) + elif dev_id in self.fs20_brightness: + self._handle_fs20_brightness_data(dev_id, data) + elif dev_id in self.fs20_door: + self._handle_fs20_door_data(dev_id, data) + elif dev_id in self.fs20_window: + self._handle_fs20_window_data(dev_id, data) + else: + cnt_id = "FS20_sender_%s" % dev_id + dev_name = "fs20:%s" % dev_id + + try: + self.dev_containers[cnt_id] + except KeyError: + if dev_id in self.fs20_sender or len(self.fs20_sender) == 0: + self.add_device(cnt_id, ["FS20_sender"], + (("command", ["command"], None),)) + else: + return + + # Command + container_id = cnt_id + "_COMMAND" + value = self._get_fs20_value(dev_id, data.command) + if value is not None: + ci = self._get_sensor_data(dev_name, "command", "%", value) + self.push_content(self.containers[container_id], ci) + + def _handle_fs20_motion_data(self, dev_id, data): + self.logger.debug("Handling FS20_motion data: %s", data) + + cnt_id = "FS20_motion_%s" % (dev_id,) + dev_name = "fs20:%s" % dev_id + + try: + self.dev_containers[cnt_id] + except KeyError: + if dev_id in self.fs20_motion: + self.add_device(cnt_id, ["FS20_sender", "FS20_motion"], + (("motion", ["motion", "command"], None),)) + else: + return + + # Motion + container_id = cnt_id + "_MOTION" + value = self._get_fs20_value(dev_id, data.command) + if value is not None: + ci = self._get_sensor_data(dev_name, "command", "%", value) + self.push_content(self.containers[container_id], ci) + + def _handle_fs20_brightness_data(self, dev_id, data): + self.logger.debug("Handling FS20_brightness data: %s", data) + + cnt_id = "FS20_brightness_%s" % dev_id + dev_name = "fs20:%s" % dev_id + + try: + self.dev_containers[cnt_id] + except KeyError: + if dev_id in self.fs20_brightness or len(self.fs20_brightness) == 0: + self.add_device(cnt_id, ["FS20_sender", "FS20_brightness"], + (("brightness", ["brightness", "command"], + None),)) + else: + return + + # Brightness + container_id = cnt_id + "_BRIGHTNESS" + value = self._get_fs20_value(dev_id, data.command) + if value is not None: + ci = self._get_sensor_data(dev_name, "command", "%", value) + self.push_content(self.containers[container_id], ci) + + def _handle_fs20_door_data(self, dev_id, data): + self.logger.debug("Handling FS20_door data: %s", data) + + cnt_id = "FS20_door_%s" % dev_id + dev_name = "fs20:%s" % dev_id + + try: + self.dev_containers[cnt_id] + except KeyError: + if dev_id in self.fs20_door or len(self.fs20_door) == 0: + self.add_device(cnt_id, + ["FS20_sender", "FS20_door"], + (("door", ["door", "command"], None),)) + else: + return + + # Door + container_id = cnt_id + "_DOOR" + value = self._get_fs20_value(dev_id, data.command) + if value is not None: + ci = self._get_sensor_data(dev_name, "command", "%", value) + self.push_content(self.containers[container_id], ci) + + def _handle_fs20_window_data(self, dev_id, data): + self.logger.debug("Handling FS20_window data: %s", data) + + cnt_id = "FS20_window_%s" % dev_id + dev_name = "fs20:%s" % dev_id + + try: + self.dev_containers[cnt_id] + except KeyError: + if dev_id in self.fs20_window or len(self.fs20_window) == 0: + self.add_device(cnt_id, ["FS20_sender", "FS20_window"], + (("window", ["window", "command"], None),)) + else: + return + + # Window + container_id = cnt_id + "_WINDOW" + value = self._get_fs20_value(dev_id, data.command) + if value is not None: + ci = self._get_sensor_data(dev_name, "command", "%", value) + self.push_content(self.containers[container_id], ci) + + def _handle_hms_data(self, dev_id, data): + if data.device == "HMS100T": + self._handle_hms100t_data(dev_id, data) + elif data.device == " HMS100TF": + self._handle_hms100tf_data(dev_id, data) + + def _handle_hms100t_data(self, dev_id, data): + self.logger.debug("Handling HMS100T data: %s", data) + + cnt_id = "HMS100T_%s" % dev_id + dev_name = "hms100t:%s" % dev_id + + try: + self.dev_containers[cnt_id] + except KeyError: + if dev_id in self.hms100t or len(self.hms100t) == 0: + self.add_device(cnt_id, ["HMS100T"], + (("temperature", ["temperature"], None), + ("battery", False, None))) + else: + return + + # Temperature + container_id = cnt_id + "_TEMPERATURE" + ci = self._get_sensor_data(dev_name, "temperature", "Cel", + data.temperature) + self.push_content(self.containers[container_id], ci) + + # TODO(rst): handle battery + pass + + def _handle_hms100tf_data(self, dev_id, data): + self.logger.debug("Handling HMS100TF data: %s", data) + + cnt_id = "HMS100TF_%s" % dev_id + dev_name = "hms100tf:%s" % dev_id + + try: + self.dev_containers[cnt_id] + except KeyError: + if dev_id in self.hms100t or len(self.hms100t) == 0: + self.add_device(cnt_id, ["HMS100TF"], + (("temperature", ["temperature"], None), + ("humidity", ["humidity"], None), + ("battery", True, None))) + else: + return + + # Temperature + container_id = cnt_id + "_TEMPERATURE" + ci = self._get_sensor_data(dev_name, "temperature", "Cel", + data.temperature) + self.push_content(self.containers[container_id], ci) + + # Humidity + container_id = cnt_id + "_HUMIDITY" + ci = self._get_sensor_data(cnt_id, "humidity", "%RH", data.humidity) + self.push_content(self.containers[container_id], ci) + + # TODO(rst): handle battery + pass diff --git a/ipes/CUL868IPE/src/cul868ipe/cul_868_ipe.pyc b/ipes/CUL868IPE/src/cul868ipe/cul_868_ipe.pyc new file mode 100644 index 0000000..bc08c90 Binary files /dev/null and b/ipes/CUL868IPE/src/cul868ipe/cul_868_ipe.pyc differ diff --git a/ipes/CUL868IPE/src/cul868ipe/parsers.py b/ipes/CUL868IPE/src/cul868ipe/parsers.py new file mode 100644 index 0000000..c87a00b --- /dev/null +++ b/ipes/CUL868IPE/src/cul868ipe/parsers.py @@ -0,0 +1,222 @@ +from abc import abstractmethod +from collections import namedtuple +import random +from futile.logging import LoggerMixin +from random import choice + +S300THData = namedtuple("S300THData", ("temperature", "humidity")) +EM1000EMData = namedtuple("EM1000EMData", + ("counter", "cumulated", "last", "top")) +FS20Data = namedtuple("FS20Data", ("command", "duration")) +HMSData = namedtuple("HMSData", + ("device", "temperature", "humidity", "battery")) + + +class Parser(LoggerMixin): + def __call__(self, line): + self.logger.debug("Parsing: %s", line) + result = self._parse(line) + + self.logger.debug("Parse result: %s", result) + return result + + @abstractmethod + def _parse(self, line): + pass + + +class SIMParser(Parser): + def _parse(self, line): + if line == 'K': + dev_id = '1' + temperature = random.uniform(0, 30) + humidity = random.uniform(0, 30) + return dev_id, S300THData(temperature, humidity) + elif line == 'F': + dev_id = '21111111-132' + choice(['1', '2']) + duration = '' + command = choice(['11', '12', '14']) + return dev_id, FS20Data(command, duration) + # elif line == 'E': + # dev_id = line + # counter = random.uniform(0,30) + # cumulated = random.uniform(0,30) + # last =random.uniform(0,30) + # top = random.uniform(0,30) + # + # return dev_id, EM1000EMData(counter, cumulated, last, top) + # elif line == 'H': + # dev_id = '' + # temperature = random.uniform(0,30) + # humidity = random.uniform(0,30) + # battery = random.uniform(0,100) + # return dev_id, HMSData(temperature, humidity, battery) + + +class S300THParser(Parser): + def _parse(self, line): + l1 = int(line[1]) + dev_id = str(int(line[2]) + (l1 & 7)) + + temp = float(line[6] + line[3] + "." + line[4]) + + if l1 > 8: # TODO: Check if this works + temp *= -1 + + humidity = float(line[7] + line[8] + "." + line[5]) + + return dev_id, S300THData(temp, humidity) + + +class EM1000EMParser(Parser): + def _parse(self, line): + dev_id = str(int(line[3:5], 16)) + + counter = int(line[5:7], 16) + cumulated = int(line[9:11] + line[7:9], 16) / 1000.0 + last = int(line[13:15] + line[11:13], 16) * 10 + top = int(line[17:19] + line[15:17], 16) * 10 + + return dev_id, EM1000EMData(counter, cumulated, last, top) + + +class FS20Parser(Parser): + @staticmethod + def int2base(x, base): + import string + + digs = string.digits + string.lowercase + if x < 0: + sign = -1 + elif x == 0: + return '0' + else: + sign = 1 + x *= sign + digits = [] + while x: + digits.append(digs[x % base]) + x /= base + if sign < 0: + digits.append('-') + digits.reverse() + return ''.join(digits) + + def base_convert(self, value, from_base, to_base): + return self.int2base(int(value, from_base), to_base) + + @staticmethod + def base4_digit_increaser(base4): + base4_i = base4.replace("3", "4") + base4_i = base4_i.replace("2", "3") + base4_i = base4_i.replace("1", "2") + base4_i = base4_i.replace("0", "1") + return base4_i + + def _parse(self, line): + # from http://fhz4linux.info/tiki-index.php?page=FS20%20Protocol + + commands = [ + 'off', # 00 + 'on_1', # 01 + 'on_2', # 02 + 'on_3', # 03 + 'on_4', # 04 + 'on_5', # 05 + 'on_6', # 06 + 'on_7', # 07 + 'on_8', # 08 + 'on_9', # 09 + 'on_10', # 10 + 'on_11', # 11 + 'on_12', # 12 + 'on_13', # 13 + 'on_14', # 14 + 'on_15', # 15 + 'on_16', # 16 + 'on_old', # 17 + 'toggle', # 18 + 'dim_up', # 19 + 'dim_down', # 20 + 'dim_up_down', # 21 + 'time_set', # 22 + 'send_state' # 23 + 'off_for_timer', # 24 + 'on_16_for_timer', # 25 + 'on_old_for_timer', # 26 + 'reset', # 27 + 'free', # 28 + 'free', # 29 + 'on_16_for_timer_pre', # 30 + 'on_old_for_timer_pre' # 31 + ] + + line = line.rstrip() + # convert hex string (minus identifier "F") to elv String (base4 + # with digits 1-4) + elv = self.base4_digit_increaser( + self.base_convert(line[1:7], 16, 4).zfill(6 * 2)) + + # grab data + hc1 = elv[0:4] + hc2 = elv[4:8] + address = elv[8:10] + sub_address = elv[10:12] + command = int(line[7:9], 16) + extended = command & 0x20 + command &= ~0xE0 + if extended: + duration = ((2 ** min(int(line[9:10], 16), 12)) * + int(line[10:11], 16) * 0.25) + else: + duration = '' + + # make sensor ID consist of house_code and address + dev_id = str(hc1) + str(hc2) + "-" + str(address) + str(sub_address) + + return dev_id, FS20Data(commands[command], duration) + + +class HMSParser(Parser): + devices = { + "0": "HMS100TF", + "1": "HMS100T", + "2": "HMS100WD", + "3": "RM100-2", + "4": "HMS100TFK", # Depending on the onboard jumper it is 4 or 5 + "5": "HMS100TFK", + "6": "HMS100MG", + "8": "HMS100CO", + "e": "HMS100FIT" + } + + def _parse(self, line): + dev_id = line[1:5] + val = line[5:] + + device = self.devices.get(val[1]) + status = int(val[0], 16) + sign = -1 if (status & 8) else 1 + + if device == "HMS100T": + temperature = sign * float(val[5] + val[2] + '.' + val[3]) + humidity = None + battery = 0 + if status & 2: + battery = 1 + if status & 4: + battery = 2 + elif device == "HMS100TF": + temperature = sign * float(val[5] + val[2] + '.' + val[3]) + humidity = sign * float(val[6] + val[7] + '.' + val[4]) + battery = 0 + if status & 2: + battery = 1 + if status & 4: + battery = 2 + else: + temperature = None + humidity = None + battery = None + + return dev_id, HMSData(device, temperature, humidity, battery) diff --git a/ipes/CUL868IPE/src/cul868ipe/parsers.pyc b/ipes/CUL868IPE/src/cul868ipe/parsers.pyc new file mode 100644 index 0000000..c8922b1 Binary files /dev/null and b/ipes/CUL868IPE/src/cul868ipe/parsers.pyc differ diff --git a/ipes/CUL868IPE/src/cul868ipe/test_parsers.py b/ipes/CUL868IPE/src/cul868ipe/test_parsers.py new file mode 100644 index 0000000..91e2174 --- /dev/null +++ b/ipes/CUL868IPE/src/cul868ipe/test_parsers.py @@ -0,0 +1,91 @@ +from parsers import S300THParser, EM1000EMParser, FS20Parser + +__author__ = 'ren-local' + + +def test_parsers(): + # ('1', S300THData(temperature=22.5, humidity=31.3)) + sensor_id, data = (S300THParser()("K01253231")) + assert sensor_id == '1' + assert data.temperature == 22.5 + assert data.humidity == 31.3 + + # ('1', S300THData(temperature=29.7, humidity=38.6)) + sensor_id, data = (S300THParser()("K01976238")) + assert sensor_id == '1' + assert data.temperature == 29.7 + assert data.humidity == 38.6 + + # ('5', S300THData(temperature=27.1, humidity=38.4)) + sensor_id, data = (S300THParser()("K41714238")) + assert sensor_id == '5' + assert data.temperature == 27.1 + assert data.humidity == 38.4 + + # ('6', EM1000EMData(counter=95, cumulated=0.257, last=30, top=60)) + sensor_id, data = (EM1000EMParser()("E02065F010103000600")) + assert sensor_id == '6' + assert data.counter == 95 + assert data.cumulated == 0.257 + assert data.last == 30 + assert data.top == 60 + + # ('6', EM1000EMData(counter=96, cumulated=0.26, last=30, top=60)) + sensor_id, data = (EM1000EMParser()("E020660040103000600")) + assert sensor_id == '6' + assert data.counter == 96 + assert data.cumulated == 0.26 + assert data.last == 30 + assert data.top == 60 + + # ('6', EM1000EMData(counter=97, cumulated=0.263, last=30, top=60)) + sensor_id, data = (EM1000EMParser()("E020661070103000600")) + assert sensor_id == '6' + assert data.counter == 97 + assert data.cumulated == 0.263 + assert data.last == 30 + assert data.top == 60 + + # ('6', EM1000EMData(counter=98, cumulated=0.266, last=30, top=50)) + sensor_id, data = (EM1000EMParser()("E0206620A0103000500")) + assert sensor_id == '6' + assert data.counter == 98 + assert data.cumulated == 0.266 + assert data.last == 30 + assert data.top == 50 + + # ('11111112-1414', FS20Data(command='14', duration='3321')) + sensor_id, data = (FS20Parser()("F0001333A4F")) + assert sensor_id == '11111112-1414' + assert data.command == '14' + assert data.duration == '3321' + + # ('11111112-1111', FS20Data(command='11', duration='')) + sensor_id, data = (FS20Parser()("F00010000")) + assert sensor_id == '11111112-1111' + assert data.command == '11' + assert data.duration == '' + + # ('22222222-1211', FS20Data(command='14', duration='3321')) + sensor_id, data = (FS20Parser()("F5555103A4F")) + assert sensor_id == '22222222-1211' + assert data.command == '14' + assert data.duration == '3321' + + # ('11111112-1212', FS20Data(command='14', duration='3321')) + sensor_id, data = (FS20Parser()("F0001113A4F")) + assert sensor_id == '11111112-1212' + assert data.command == '14' + assert data.duration == '3321' + + # ('11111112-4343', FS20Data(command='14', duration='3321')) + sensor_id, data = (FS20Parser()("F0001EE3A4F")) + assert sensor_id == '11111112-4343' + assert data.command == '14' + assert data.duration == '3321' + + # ('11111112-4444', FS20Data(command='14', duration='3321')) + sensor_id, data = (FS20Parser()("F0001FF3A4F")) + assert sensor_id == '11111112-4444' + assert data.command == '14' + assert data.duration == '3321' diff --git a/ipes/CUL868IPE/utils.py b/ipes/CUL868IPE/utils.py new file mode 100644 index 0000000..d8a733f --- /dev/null +++ b/ipes/CUL868IPE/utils.py @@ -0,0 +1,148 @@ +import distutils.command.sdist +import distutils.command.build_py +import os +import subprocess +import sys + + +def echo(msg, *args): + if args: + msg = msg % args + sys.stdout.write(msg + "\n") + + +def get_packages(package, package_dir, excluded_list=None, included_list=None): + included_list = included_list or [] + excluded_list = excluded_list or [] + + try: + root = package_dir[package] + except KeyError: + root = package_dir.get("", ".") + "/" + package + + if not os.path.exists(root): + sys.stderr.write( + "Directory for package %s does not exist: %s\n" % (package, root)) + sys.exit(1) + + def on_error(error): + sys.stderr.write( + "Error while collecting packages for %s: %s\n" % (package, error)) + sys.exit(1) + + packages = [package] + + r_prefix = len(root) + 1 + for path, dirs, files in os.walk(root, onerror=on_error): + is_module = "__init__.py" in files and path != root + excluded = any(map(lambda x: x in path, excluded_list)) + included = any(map(lambda x: x in path, included_list)) + if is_module and (not excluded or included): + packages.append(package + "." + path[r_prefix:].replace("/", ".")) + + return packages + + +def get_pkg_files(base_dir, name): + package_files = [] + pkg_dir = os.path.join(base_dir, 'src', name) + pkg_data_dir = os.path.join(pkg_dir, 'static') + for (path, directories, filenames) in os.walk(pkg_data_dir): + for filename in filenames: + package_files.append(os.path.join(os.path.relpath(path, pkg_dir), + filename)) + return package_files + + +def enable_init_files(init_dir, init_dist_files): + for f in init_dist_files: + os.chmod(os.path.join(init_dir, os.path.basename(f)), 0755) + + +def move_config_files(config_dir, config_files): + for f in config_files: + target_file = os.path.join(config_dir, f) + if not os.path.exists(target_file): + echo("Installing config file %s", target_file) + os.rename(target_file + ".dist", target_file) + # os.chmod(target_file, 0644) + else: + echo("Not overwriting config file %s", target_file) + + +def create_openmtc_user(db_dir=None, log_dir=None): + try: + from pwd import getpwnam + except ImportError: + print "Could not import the 'pwd' module. Skipping user management" + else: + # assuming DB_DIR was created by setup already + try: + pw = getpwnam('openmtc') + except KeyError as e: + try: + # add system user openmtc:openmtc + # useradd --system -UM openmtc + useradd = "useradd --system -UM openmtc" + retcode = subprocess.call(useradd, shell=True) + if retcode: + raise Exception("Failed to add user 'openmtc'") + pw = getpwnam('openmtc') + except Exception as e: + sys.stderr.write("Error creating user: %s\n" % (e, )) + sys.exit(1) + uid = pw.pw_uid + gid = pw.pw_gid + + # set path permissions + if db_dir: + os.chown(db_dir, uid, gid) + if log_dir: + os.chown(log_dir, uid, gid) + + +class OpenMTCSdist(distutils.command.sdist.sdist): + def make_release_tree(self, base_dir, files): + distutils.command.sdist.sdist.make_release_tree(self, base_dir, files) + + script_name = os.path.basename(sys.argv[0]) + + if script_name != "setup.py": + os.rename(base_dir + "/" + script_name, base_dir + "/setup.py") + self.filelist.files.remove(script_name) + self.filelist.files.append("setup.py") + + +class OpenMTCSdistBinary(OpenMTCSdist, object): + def make_release_tree(self, base_dir, files): + super(OpenMTCSdistBinary, self).make_release_tree(base_dir, files) + + script_name = os.path.basename(sys.argv[0]) + + build_py = self.get_finalized_command('build_py') + build_py.compile = 1 + build_py.optimize = 2 + build_py.retain_init_py = 1 + build_py.build_lib = base_dir + build_py.byte_compile( + [base_dir + "/" + f for f in self.filelist.files if + f != script_name and f.endswith(".py")]) + + +class OpenMTCBuildPy(distutils.command.build_py.build_py): + retain_init_py = 0 + + def byte_compile(self, files): + distutils.command.build_py.build_py.byte_compile(self, files) + + +class OpenMTCBuildPyBinary(OpenMTCBuildPy, object): + retain_init_py = 0 + + def byte_compile(self, files): + super(OpenMTCBuildPyBinary, self).byte_compile(files) + + for f in files: + if (f.endswith('.py') and (os.path.basename(f) != "__init__.py" or + not self.retain_init_py)): + os.unlink(f) diff --git a/ipes/cul-868-ipe b/ipes/cul-868-ipe new file mode 100755 index 0000000..4c9fe0c --- /dev/null +++ b/ipes/cul-868-ipe @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +cd $(dirname ${0}) + +. ./prep-env.sh + +cd CUL868IPE + +PYTHONPATH=${PYTHONPATH}:src exec python -m cul868ipe $@ diff --git a/ipes/prep-env.sh b/ipes/prep-env.sh new file mode 100644 index 0000000..861a0ee --- /dev/null +++ b/ipes/prep-env.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env sh + +. ../common/prep-env.sh + +_SRC_PATH="../openmtc-app/src" +_READLINK_PATH="$(readlink ${_SRC_PATH})" +PYTHONPATH=${PYTHONPATH}:$(pwd)/${_READLINK_PATH:-${_SRC_PATH}} + +echo PYTHONPATH: ${PYTHONPATH} + +export PYTHONPATH diff --git a/openmtc-app/src/openmtc_app/__init__.py b/openmtc-app/src/openmtc_app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/openmtc-app/src/openmtc_app/__init__.pyc b/openmtc-app/src/openmtc_app/__init__.pyc new file mode 100644 index 0000000..e07905e Binary files /dev/null and b/openmtc-app/src/openmtc_app/__init__.pyc differ diff --git a/openmtc-app/src/openmtc_app/exc.py b/openmtc-app/src/openmtc_app/exc.py new file mode 100644 index 0000000..71ac30c --- /dev/null +++ b/openmtc-app/src/openmtc_app/exc.py @@ -0,0 +1,2 @@ +class OpenMTCAppError(Exception): + pass diff --git a/openmtc-app/src/openmtc_app/flask_runner/__init__.py b/openmtc-app/src/openmtc_app/flask_runner/__init__.py new file mode 100644 index 0000000..2b102dd --- /dev/null +++ b/openmtc-app/src/openmtc_app/flask_runner/__init__.py @@ -0,0 +1,89 @@ +from signal import SIGTERM, SIGINT + +from flask import (Flask, request, abort, redirect, url_for, + Response as FlaskResponse) + +from gevent import signal as gevent_signal +from gevent.pywsgi import WSGIServer +from geventwebsocket.handler import WebSocketHandler +from socketio import Server as SioServer, Middleware as SioMiddleware + +from futile.net.http.exc import HTTPError +from openmtc_app.runner import AppRunner + + +class Response(FlaskResponse): + pass + + +class SimpleFlaskRunner(AppRunner): + def __init__(self, m2m_app, port=None, listen_on="0.0.0.0", *args, **kw): + super(SimpleFlaskRunner, self).__init__(m2m_app=m2m_app, *args, **kw) + + self.port = port or 5050 + self.listen_on = listen_on + self.flask_app = Flask(type(self.m2m_app).__module__) + + def _get_server(self): + return WSGIServer((self.listen_on, self.port), self.flask_app) + + def _run(self): + self.m2m_app.run(self, self.m2m_ep) + + _server = self._get_server() + self.logger.debug("Serving on %s:%s", self.listen_on, self.port) + gevent_signal(SIGTERM, _server.stop) + gevent_signal(SIGINT, _server.stop) + _server.serve_forever() + + def add_route(self, route, handler, methods=("POST", "GET")): + def wrapper(): + try: + return handler(request) + except HTTPError as e: + self.logger.exception("Aborting") + abort(e.status) + + self.logger.debug("Adding route: %s -> %s" % (route, handler)) + self.flask_app.add_url_rule(route, view_func=wrapper, + endpoint=route + str(handler), + methods=methods) + + +class FlaskRunner(SimpleFlaskRunner): + def __init__(self, m2m_app, port=None, listen_on="0.0.0.0", *args, **kw): + super(FlaskRunner, self).__init__(m2m_app=m2m_app, port=port, + listen_on=listen_on, *args, **kw) + + @self.flask_app.route("/") + def home(): + return redirect(url_for('static', filename='index.html')) + + self.sio_app = SioServer(async_mode='gevent') + + @self.sio_app.on('connect') + def connect(sid, environ): + self.logger.debug('client connected: %s' % sid) + + def _get_server(self): + return WSGIServer((self.listen_on, self.port), + SioMiddleware(self.sio_app, self.flask_app), + handler_class=WebSocketHandler) + + def emit(self, event, message=None, sid=None): + self.sio_app.emit(event, message, room=sid) + + def get_handler_decorator(self, name): + return self.sio_app.on(name) + + def add_message_handler(self, name, handler, client=False, response=False): + + def wrapper(*args, **kw): + if not client: + args = args[1:] + if response: + return handler(*args, **kw) + else: + handler(*args, **kw) + + self.sio_app.on(name, wrapper) diff --git a/openmtc-app/src/openmtc_app/flask_runner/__init__.pyc b/openmtc-app/src/openmtc_app/flask_runner/__init__.pyc new file mode 100644 index 0000000..0e24657 Binary files /dev/null and b/openmtc-app/src/openmtc_app/flask_runner/__init__.pyc differ diff --git a/openmtc-app/src/openmtc_app/notification/__init__.py b/openmtc-app/src/openmtc_app/notification/__init__.py new file mode 100644 index 0000000..6490e22 --- /dev/null +++ b/openmtc-app/src/openmtc_app/notification/__init__.py @@ -0,0 +1,273 @@ +from gevent import spawn +from gevent.pywsgi import WSGIServer +from inspect import getargspec +from futile.logging import LoggerMixin +from openmtc_onem2m.exc import OneM2MError +from openmtc_onem2m.model import ( + EventNotificationCriteria, + NotificationEventTypeE, + Subscription, +) +from openmtc_onem2m.serializer import get_onem2m_decoder +from urlparse import urlparse + +from openmtc_onem2m.util import split_onem2m_address + +_handler_map = {} + + +def register_handler(cls, schemes=()): + _handler_map.update({ + scheme: cls for scheme in map(str.lower, schemes) + }) + + +def get_handler(scheme, poa, callback_func, ssl_certs=None): + return _handler_map[scheme](poa, callback_func, ssl_certs) + + +class NotificationManager(LoggerMixin): + handlers = [] + endpoints = [] + callbacks = {} + + def __init__(self, poas, ep, onem2m_mapper, ca_certs=None, cert_file=None, key_file=None): + """ + :param list poas: + :param str ep: + :param openmtc_onem2m.mapper.OneM2MMapper onem2m_mapper: + """ + self.mapper = onem2m_mapper + self.sp_id, self.cse_id, _ = split_onem2m_address(onem2m_mapper.originator) + self.ssl_certs = { + 'ca_certs': ca_certs, + 'cert_file': cert_file, + 'key_file': key_file + } + + for poa in map(urlparse, poas): + if poa.hostname == 'auto': + poa = poa._replace(netloc="%s:%s" % (self._get_auto_host(ep), poa.port)) + + if not poa.scheme: + poa = poa._replace(scheme='http') + + try: + self.handlers.append(get_handler(poa.scheme, poa, self._handle_callback, + self.ssl_certs)) + self.endpoints.append(poa.geturl()) + except: + pass + + self.logger.debug('Available POAs: %s' % ', '.join(self.endpoints)) + + super(NotificationManager, self).__init__() + + @staticmethod + def _get_auto_host(ep): + try: + import socket + from urlparse import urlparse + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + netloc = urlparse(ep).netloc.split(':') + s.connect((netloc[0], int(netloc[1]))) + host = s.getsockname()[0] + s.close() + except: + host = "127.0.0.1" + + return host + + def _normalize_path(self, path): + path = path[len(self.sp_id):] if path.startswith(self.sp_id) and self.sp_id else path + path = path[len(self.cse_id) + 1:] if path.startswith(self.cse_id) and self.cse_id else path + return path + + def _init(self): + for handler in self.handlers: + try: + handler.start() + except: + pass + + def nop(): + pass + + self._init = nop + + def register_callback(self, func, sur): + self.callbacks[sur] = func if len(getargspec(func)[0]) > 1 \ + else lambda _, **notification: func(notification['rep']) + + def _handle_callback(self, originator, **notification): + sur = notification.pop('sur') + sur = self._normalize_path(sur) + + try: + callback = self.callbacks[sur] + except KeyError: + if not sur.startswith('/'): + # TODO(rst): maybe not the best, check alternatives + # assumes originator is always in the form //SP-ID/CSE-ID + sur = originator[originator.rfind('/'):] + '/' + sur + try: + callback = self.callbacks[sur] + except KeyError: + return + else: + return + try: + spawn(callback, originator, **notification) + except: + pass + + def get_expiration_time(self): + return None + + def subscribe(self, path, func, filter_criteria=None, expiration_time=None, + notification_types=(NotificationEventTypeE.updateOfResource,)): + self._init() + + event_notification_criteria = filter_criteria or EventNotificationCriteria() + event_notification_criteria.notificationEventType = ( + event_notification_criteria.notificationEventType or list(notification_types)) + + subscription = self.mapper.create(path, Subscription( + notificationURI=[self.mapper.originator], + expirationTime=expiration_time or self.get_expiration_time(), + eventNotificationCriteria=event_notification_criteria, + )) + + reference = self._normalize_path(subscription.subscriberURI or subscription.path) + self.register_callback(func, reference) + return subscription + + def unsubscribe(self, sur): + self.mapper.delete(sur) + del self.callbacks[sur] + + def shutdown(self): + for subscription in self.callbacks.keys(): + try: + self.unsubscribe(subscription) + except OneM2MError: + pass + + for handler in self.handlers: + try: + handler.stop() + except: + pass + + +class BaseNotificationHandler(object): + def __init__(self, poa, callback_func, ssl_certs=None): + self._endpoint = poa + self._callback = callback_func + self._ssl_certs = ssl_certs + + @classmethod + def _unpack_notification(cls, notification): + return { + 'sur': notification.subscriptionReference, + 'net': notification.notificationEvent.notificationEventType, + 'rep': notification.notificationEvent.representation, + } + + def start(self): + raise NotImplementedError + + def stop(self): + pass + + +class MqttNotificationHandler(BaseNotificationHandler): + _client = None + + def start(self): + from openmtc_onem2m.client.mqtt import get_client + from openmtc_onem2m.transport import OneM2MResponse + from openmtc_onem2m.exc import get_response_status + + def wrapper(request): + notification = self._unpack_notification(request.content) + self._callback(request.originator, **notification) + return OneM2MResponse(status_code=get_response_status(2002), request=request) + + self._client = get_client(self._endpoint.geturl(), handle_request_func=wrapper) + + def stop(self): + self._client.stop() + + +register_handler(MqttNotificationHandler, ('mqtt', 'mqtts', 'secure-mqtt')) + + +class HttpNotificationHandler(BaseNotificationHandler): + server = None + + def __init__(self, poa, callback_func, ssl_certs=None): + super(HttpNotificationHandler, self).__init__(poa, callback_func, ssl_certs) + + self.ca_certs = ssl_certs.get('ca_certs') + self.cert_file = ssl_certs.get('cert_file') + self.key_file = ssl_certs.get('key_file') + + # TODO(rst): maybe tis needs to be tested when the server is started + if poa.scheme == 'https' and not (self.ca_certs and self.cert_file and self.key_file): + raise Exception() + + def start(self): + from flask import ( + Flask, + request, + Response, + ) + + app = Flask(__name__) + + @app.after_request + def attach_headers(response): + response.headers['x-m2m-ri'] = request.headers['x-m2m-ri'] + return response + + @app.route('/', methods=['POST']) + def index(): + assert 'x-m2m-origin' in request.headers, 'No originator set' + assert 'x-m2m-ri' in request.headers, 'Missing request id' + assert 'content-type' in request.headers, 'Unspecified content type' + + notification = self._unpack_notification( + get_onem2m_decoder(request.content_type).decode(request.data)) + self._callback(request.headers['x-m2m-origin'], **notification) + + return Response( + headers={ + 'x-m2m-rsc': 2000, + }, + ) + + if self._endpoint.scheme == 'https': + self.server = WSGIServer( + ( + self._endpoint.hostname, + self._endpoint.port or 6050 + ), + application=app, + keyfile=self.key_file, certfile=self.cert_file, ca_certs=self.ca_certs + ) + else: + self.server = WSGIServer( + ( + self._endpoint.hostname, + self._endpoint.port or 6050 + ), + application=app, + ) + spawn(self.server.serve_forever) + + def stop(self): + self.server.stop() + + +register_handler(HttpNotificationHandler, ('http', 'https')) diff --git a/openmtc-app/src/openmtc_app/notification/__init__.pyc b/openmtc-app/src/openmtc_app/notification/__init__.pyc new file mode 100644 index 0000000..96aae55 Binary files /dev/null and b/openmtc-app/src/openmtc_app/notification/__init__.pyc differ diff --git a/openmtc-app/src/openmtc_app/onem2m.py b/openmtc-app/src/openmtc_app/onem2m.py new file mode 100644 index 0000000..ea083d4 --- /dev/null +++ b/openmtc-app/src/openmtc_app/onem2m.py @@ -0,0 +1,744 @@ +from base64 import ( + b64decode, + b64encode, +) +from datetime import datetime +from gevent import ( + spawn, + spawn_later, +) +from iso8601 import parse_date +from json import ( + dumps as json_dumps, + loads as json_loads, +) +from futile.logging import LoggerMixin +import logging +from openmtc.util import ( + UTC, + datetime_now, + datetime_the_future, +) +from openmtc_app.flask_runner import FlaskRunner +from openmtc_app.notification import NotificationManager +from openmtc_onem2m.exc import ( + CSENotFound, + CSENotImplemented, + STATUS_CONFLICT, +) +from openmtc_onem2m.mapper import OneM2MMapper +from openmtc_onem2m.model import ( + AE, + Container, + ContentInstance, + EncodingTypeE, + get_short_member_name, + NotificationEventTypeE, + EventNotificationCriteria) +from openmtc_onem2m.serializer import get_onem2m_decoder +from openmtc_onem2m.transport import OneM2MErrorResponse +import time +import re +from urllib import urlencode + +logging.getLogger("iso8601").setLevel(logging.ERROR) + +# fix missing SSLv3 +try: + from gevent.ssl import PROTOCOL_SSLv3 +except ImportError: + import gevent.ssl + + gevent.ssl.PROTOCOL_SSLv3 = gevent.ssl.PROTOCOL_TLSv1 + + +class XAE(LoggerMixin): + """ Generic OpenMTC application class. + Implements functionality common to all typical OpenMTC applications. + """ + + # TODO(rst): add more features + # support several AEs using the same App-ID and appName + + name = None + containers = () + labels = () + # default_access_right = True + default_lifetime = 3600 + max_nr_of_instances = 3 + resume_registration = remove_registration = True + notification_handlers = {} + mapper = None + notification_manager = None + __app = None + + def __init__(self, name=None, cse_base=None, expiration_time=None, announce_to=None, poas=None, + originator_pre=None, ca_certs=None, cert_file=None, key_file=None, *args, **kw): + super(XAE, self).__init__(*args, **kw) + + self.__subscriptions = [] + + self.name = name or type(self).__name__ + self.cse_base = cse_base or "onem2m" + + ae_id = "C" + self.name + self.originator = (originator_pre + '/' + ae_id) if originator_pre else ae_id + + self.ca_certs = ca_certs + self.cert_file = cert_file + self.key_file = key_file + + if expiration_time is not None: + if isinstance(expiration_time, (str, unicode)): + expiration_time = parse_date(expiration_time) + elif isinstance(expiration_time, (int, float)): + expiration_time = datetime.fromtimestamp(expiration_time, UTC) + + if not isinstance(expiration_time, datetime): + raise ValueError(expiration_time) + + self.default_lifetime = (expiration_time - datetime_now()).total_seconds() + + self.announceTo = announce_to + + self.__resumed_registration = False + self.__known_containers = set() + self.__shutdown = False + + self.allow_duplicate = None + self.runner = None + self.poas = poas or [] + + self.fmt_json_regex = re.compile(r'^application/(?:[^+]+\+)?json$', re.IGNORECASE) + self.fmt_xml_regex = re.compile(r'^application/(?:[^+]+\+)?xml$', re.IGNORECASE) + + def get_expiration_time(self): + if self.default_lifetime is None: + return None + return datetime_the_future(self.default_lifetime) + + @property + def application(self): + return self.__app + + def run(self, runner, cse, allow_duplicate=True): + self.mapper = OneM2MMapper(cse, originator=self.originator, ca_certs=self.ca_certs, + cert_file=self.cert_file, key_file=self.key_file) + self.notification_manager = NotificationManager(self.poas, cse, self.mapper, + ca_certs=self.ca_certs, + cert_file=self.cert_file, + key_file=self.key_file) + + self.allow_duplicate = allow_duplicate + self.runner = runner + self.register() + + def shutdown(self): + """ Graceful shutdown. + Deletes all Applications and Subscriptions. + """ + try: + self._on_shutdown() + except: + self.logger.exception("Error in shutdown handler") + + self.logger.debug("shutdown handler is finished") + + self.__shutdown = True + + self.notification_manager.shutdown() + + self._remove_apps() + + def _remove_apps(self): + if self.remove_registration: + try: + if self.__app: + self.mapper.delete(self.__app) + except: + pass + self.logger.debug("app deleted") + + @staticmethod + def run_forever(period=1000, func=None, *args, **kw): + """ executes a given function repeatingly at a given interval + :param period: (optional) frequency of repeated execution (in Hz) + :param func: (optional) function to be executed + """ + + if func is None: + def func(*_): + pass + + def run_periodically(): + func(*args, **kw) + spawn_later(period, run_periodically) + + return spawn(run_periodically) + + def periodic_discover(self, path, fc, interval, cb, err_cb=None): + """ starts periodic discovery at a given frequency + :param path: start directory inside cse for discovery + :param fc: filter criteria (what to discover) + :param interval: frequency of repeated discovery (in Hz) + :param cb: callback function to return the result of the discovery to + :param err_cb: (optional) callback function for errors to return the error of the discovery to + """ + if not isinstance(fc, dict): + fc = {} + + def run_discovery(o): + try: + cb(self.discover(path, o)) + except OneM2MErrorResponse as error_response: + if err_cb: + return err_cb(error_response) + else: + o['createdAfter'] = datetime_now() + + spawn_later(interval, run_discovery, o) + + return spawn(run_discovery, fc) + + def register(self): + """ Registers the Application with the CSE. """ + self.logger.info("Registering application as %s." % (self.name,)) + try: + poa = self.notification_manager.endpoints + except AttributeError: + poa = [] + app = AE(resourceName=self.name, labels=list(self.labels), + pointOfAccess=poa) + app.announceTo = self.announceTo + app.requestReachability = bool(poa) + + try: + registration = self.create_application(app) + except OneM2MErrorResponse as error_response: + if error_response.response_status_code is STATUS_CONFLICT: + registration = self._handle_registration_conflict(app) + if not registration: + raise + else: + self.logger.error('Error at start up') + self.logger.error(error_response.response_status_code) + raise SystemExit + self.__app = registration + + assert registration.path + + try: + self._on_register() + except (KeyboardInterrupt, SystemExit): + raise + except: + self.logger.exception("Error on initialization") + raise + + def _handle_registration_conflict(self, app): + if not self.resume_registration: + return None + # TODO(rst): update app here for expiration_time and poas + + app = self.get_application(app) + + self.__start_refresher(app) + + self.__resumed_registration = True + + return app + + def emit(self, event, message=None): + """ Websocket emit. """ + if not isinstance(self.runner, FlaskRunner): + raise RuntimeError('Runner is not supporting emit!') + self.runner.emit(event, message) + + def _on_register(self): + pass + + def _on_shutdown(self): + pass + + def get_application(self, application, path=None): + """ Retrieves an Application resource. + :param application: old app instance or appId + :param path: (optional) path in the resource tree + """ + if path is None: + # FIXME(rst): use app path and not cse base path + path = self.cse_base + + if not isinstance(application, AE): + application = AE(resourceName=application) + + name = application.resourceName + + path = "%s/%s" % (path, name) if path else name + app = self.mapper.get(path) + + self.logger.debug("retrieved app: %s" % app) + + return app + + def create_application(self, application, path=None): + """ Creates an Application resource. + + :param application: Application instance or appId as str + :param path: (optional) path in the resource tree + """ + # TODO(rst): set app_id via config + # TODO(rst): set requestReachability based on used runner + if path is None: + path = self.cse_base + + def restore_app(app): + self.logger.warn("Restoring app: %s", app.path) + app.expirationTime = None + self.create_application(app, path=path) + + if not isinstance(application, AE): + application = AE(resourceName=application, App_ID='dummy', requestReachability=False) + else: + if not application.App_ID: + application.App_ID = 'dummy' + if not application.requestReachability: + application.requestReachability = False + + application.expirationTime = application.expirationTime or self.get_expiration_time() + app = self.mapper.create(path, application) + self.logger.debug("Created application at %s", app.path) + app = self.get_application(application, path) + assert app.path + self.__start_refresher(app, restore=restore_app) + self.logger.info("Registration successful: %s." % (app.path,)) + + # TODO(rst): handle when ACP is reimplemented + # if accessRight: + # if not isinstance(accessRight, AccessRight): + # accessRight = AccessRight( + # id="ar", + # selfPermissions={"permission": [{ + # "id": "perm", + # "permissionFlags": { + # "flag": ["READ", "WRITE", "CREATE", "DELETE"] + # }, + # "permissionHolders": { + # "all": "all" + # } + # }]}, + # permissions={"permission": [{ + # "id": "perm", + # "permissionFlags": { + # "flag": ["READ", "WRITE", "CREATE", "DELETE"] + # }, + # "permissionHolders": { + # "all": "all" + # } + # }]} + # ) + # accessRight = self.create_accessRight(app, accessRight) + # + # app.accessRightID = accessRight.path + # + # self.mapper.update(app, ("accessRightID",)) + + return app + + # TODO(rst): use FilterCriteria from model and convert + def discover(self, path=None, filter_criteria=None, unstructured=True): + """ Discovers Container resources. + + :param path: (optional) the target path to start the discovery + :param filter_criteria: (optional) FilterCriteria for the for the discovery + :param unstructured: (optional) set discovery_result_type + """ + if path is None: + path = self.cse_base + + # TODO(rst): use filter_criteria from model + if not filter_criteria: + filter_criteria = {} + path += "?fu=1" + if filter_criteria: + path += "&" + urlencode( + { + get_short_member_name(k): v for k, v in filter_criteria.iteritems() + }, + True + ) + + path += '&drt=' + str(1 if unstructured else 2) + + discovery = self.mapper.get(path) + + return discovery.CONTENT + + def create_container(self, target, container, labels=None, max_nr_of_instances=None): + """ Creates a Container resource. + + :param target: the target resource/path parenting the Container + :param container: the Container resource or a valid container ID + :param labels: (optional) the container's labels + :param max_nr_of_instances: (optional) the container's maximum number + of instances (0=unlimited) + """ + + def restore_container(c): + self.logger.warn("Restoring container: %s", c.path) + c.expirationTime = None + self.__known_containers.remove(c.path) + self.create_container(target, c, labels=labels) + + if target is None: + target = self.__app + + if not isinstance(container, Container): + container = Container(resourceName=container) + + # if we got max instances..set them + if max_nr_of_instances: + container.maxNrOfInstances = max_nr_of_instances + # if we did not set max instances yet, set them + else: + container.maxNrOfInstances = self.max_nr_of_instances + + if container.expirationTime is None: + container.expirationTime = self.get_expiration_time() + + if labels: + container.labels = labels + + path = getattr(target, "path", target) + + try: + container = self.mapper.create(path, container) + except OneM2MErrorResponse as error_response: + if error_response.response_status_code is STATUS_CONFLICT: + c_path = path + '/' + container.resourceName + container.path = c_path + if (self.__resumed_registration and + c_path not in self.__known_containers): + container = self.mapper.update(container) + else: + raise error_response + else: + raise error_response + + self.__known_containers.add(container.path) + self.__start_refresher(container, restore=restore_container) + self.logger.info("Container created: %s." % (container.path,)) + return container + + # TODO(rst): handle when ACP is reimplemented + # def create_access_right(self, application, accessRight): + # """ Creates an AccessRight resource. + # + # :param application: the Application which will contain the AR + # :param accessRight: the AccessRight instance + # """ + # self.logger.debug("Creating accessRight for %s", application) + # + # if application is None: + # application = self.__app + # assert application.path + # + # path = getattr(application, "path", application) + # + # if not path.endswith("/accessRights"): + # path += "/accessRights" + # + # accessRight = self.mapper.create(path, accessRight) + # accessRight = self.mapper.get(accessRight.path) + # self.__start_refresher(accessRight, extra_fields=["selfPermissions"]) + # self.logger.info("accessRight created: %s." % (accessRight.path,)) + # return accessRight + # + # create_accessRight = create_access_right + + def get_resource(self, path, app_local=False): + if app_local: + path = self.__app.path + '/' + path + + if not path: + return None + + try: + return self.mapper.get(path) + except OneM2MErrorResponse: + return None + + def push_content(self, container, content, fmt=None, text=None): + """ Creates a ContentInstance resource in the given container, + wrapping the content. + Defaults to serialising the content as JSON and base64 encodes it. + NOTE: Will attempt to create the container, if not found. + + :param container: Container object or container path string + :param content: the content data + :param fmt: + :param text: + """ + path = getattr(container, "path", container) + + if isinstance(content, (str, unicode)): + fmt = 'text/plain' if fmt is None else fmt + text = True if text is None else text + elif isinstance(content, (dict, list)): + fmt = 'application/json' if fmt is None else fmt + text = False if text is None else text + else: + raise CSENotImplemented("Only dict, list and str are supported!") + + if re.search(self.fmt_json_regex, fmt): + if text: + # TODO(rst): check if it should be with masked quotation marks + con = json_dumps(content) + cnf = fmt + ':' + str(EncodingTypeE.plain.value) + # raise CSENotImplemented("Only json as b64 is supported!") + else: + con = b64encode(json_dumps(content)) + cnf = fmt + ':' + str(EncodingTypeE.base64String.value) + elif fmt == 'text/plain': + if text: + con = content + cnf = fmt + ':' + str(EncodingTypeE.plain.value) + else: + con = b64encode(content) + cnf = fmt + ':' + str(EncodingTypeE.base64String.value) + else: + # TODO(rst): add handling of other formats or raise not implemented + raise CSENotImplemented("Only json and text are supported!") + + return self.mapper.create(path, ContentInstance( + content=con, + contentInfo=cnf, + )) + + @staticmethod + def _get_content_from_cin(cin): + if isinstance(cin, ContentInstance): + # TODO(rst): handle contentInfo and decode + # resource.contentInfo -> application/json:1 + # media, encodingType = split(':') + # encodingType = 1 -> base64.decodeString(resource.content) + # encodingType = 2 -> not supported + media_type, encoding_type = cin.contentInfo.split(':') + content = cin.content + try: + if int(encoding_type) == EncodingTypeE.base64String: + content = b64decode(content) + + if media_type == 'application/json': + content = json_loads(content) + except ValueError: + pass + + return content + + return cin + + def get_content(self, container): + """ Retrieve the latest ContentInstance of a Container. + + :param container: Container object or container path string + """ + return self._get_content_from_cin( + self.mapper.get( + getattr(container, 'path', container) + '/latest' + ) + ) + + def _get_notification_data(self, data, content_type): + try: + return get_onem2m_decoder(content_type).\ + decode(data).\ + notificationEvent.\ + representation + # serializer = get_onem2m_decoder(content_type) + # notification = serializer.decode(data) + # resource = notification.notificationEvent.representation + # return resource + except (KeyError, TypeError, ValueError, IndexError): + self.logger.error("Failed to get notification data from %s" % data) + return None + + def _remove_route(self, route): + self.logger.debug("removing route: %s", route) + self.runner.flask_app.url_map._rules = filter( + lambda x: x.rule != route, + self.runner.flask_app.url_map._rules + ) + + def _add_subscription(self, path, _, handler, delete_handler, filter_criteria=None, expiration_time=None): + params = { + 'filter_criteria': filter_criteria, + 'expiration_time': expiration_time, + } + self.add_subscription_handler(path, handler, **params) + # self.notification_manager.subscribe(path, handler, **params) + if delete_handler: + params['types'] = (NotificationEventTypeE.deleteOfResource,) + self.add_subscription_handler(path, delete_handler, **params) + + def add_subscription(self, path, handler, delete_handler=None): + """ Creates a subscription resource at path. + And registers handler to receive notification data. + + :param path: path to subscribe to + :param handler: notification handler + :param delete_handler: reference to delete handling function + """ + self._add_subscription(path, None, handler, delete_handler) + + def add_subscription_handler(self, path, handler, types=(NotificationEventTypeE.updateOfResource, ), + filter_criteria=None, expiration_time=None): + """ + + :param path: + :param handler: + :param types: + :param filter_criteria: + :param expiration_time: + :return: + """ + def subscribe(): + return self.notification_manager.subscribe( + path, + handler, + notification_types=types, + filter_criteria=filter_criteria, + expiration_time=expiration_time + ) + + subscription = subscribe() + + def restore_subscription(): + # called to recreate the subscription + # for some reason subscription is not assigned here, + # so we make it a parameter + self.logger.warn("Restoring subscription: %s", subscription.name) + self.notification_manager.unsubscribe(subscription.subscriberURI or subscription.path) + subscribe() + + # refresh expirationTime regularly + # TODO(sho): This should rather be handled through the notification manager itself + self.__start_refresher(subscription, restore=restore_subscription) + return subscription + + def add_container_subscription(self, container, handler, + delete_handler=None, filter_criteria=None): + """ Creates a Subscription to the ContentInstances of the given + Container. + + :param container: Container object or container path string + :param handler: reference of the notification handling function + :param delete_handler: reference to delete handling function + :param filter_criteria: (optional) FilterCriteria for the subscription + """ + + path = getattr(container, "path", container) + + # check if target is container + if not isinstance(self.mapper.get(path), Container): + raise RuntimeError('Target is not a container.') + + # event notification criteria + filter_criteria = filter_criteria or EventNotificationCriteria() + filter_criteria.notificationEventType = list([ + NotificationEventTypeE.createOfDirectChildResource, + ]) + + def content_handler(cin): + handler(path, self._get_content_from_cin(cin)) + + self._add_subscription( + path, + None, + content_handler, + delete_handler, + filter_criteria + ) + + def __start_refresher(self, instance, extra_fields=(), restore=None): + """ Starts a threading.Timer chain, + to repeatedly update a resource instance's expirationTime. + NOTE: instance.expirationTime should already be set and the instance + created. + + :param instance: resource instance + :param extra_fields: additional fields, needed in the update request + :param restore: function that will restore the instance, if it has + expired accidentally. Has to restart the refresher. + """ + if not instance.expirationTime: + return + interval = time.mktime(instance.expirationTime.timetuple()) - (time.time() + time.timezone) + if interval > 120: + interval -= 60 + else: + interval = max(1, interval * 0.75) + + self.logger.debug("Will update expiration time of %s in %s seconds", instance, interval) + self.runner.set_timer(interval, self.__update_exp_time, instance=instance, extra_fields=extra_fields, restore=restore) + + def start_refresher(self, instance, extra_fields=(), restore=None): + self.__start_refresher(instance, extra_fields=extra_fields, restore=restore) + + def __update_exp_time(self, instance=None, the_future=None, extra_fields=(), + interval=None, offset=None, restore=None): + """ Updates a resource instance's expirationTime to the_future + or a default value sometime in the future. + + :note: If instance is not provided or None or False, self.__app is + updated. + :note: Starts a new Timer. + :param instance: resource instance to update + :param the_future: new expirationTime value + :param extra_fields: additional fields, needed in the update request + :param interval: update interval + :param offset: expirationTime offset (should be >0) + :param restore: function that will restore the instance, if it has + expired accidentally. Has to restart the refresher. + :raise CSENotFound: If the instance could not be found and no restore + was provided. + """ + self.logger.debug("updating ExpirationTime of %s", instance) + if self.__shutdown: + # not sure this ever happens. + return + + interval = interval or 60 * 10 # TODO make configurable + offset = offset or 60 * 10 # 10min default + if not the_future: + the_future = datetime.utcfromtimestamp(time.time() + interval + offset) + fields = ["expirationTime"] + fields.extend(extra_fields) + if not instance: + # does this happen if the instance was deleted? + instance = self.__app + instance.expirationTime = the_future + try: + self.mapper.update(instance, fields) + except CSENotFound as e: + self.logger.warn("ExpirationTime update of %s failed: %s", instance, e) + # subscription disappeared? + # missed the expirationTime? + # mb sync issue?; mb congestion? + if restore: + restore(instance) + return + else: + raise + # NOTE: expirationTime might have been changed by CSE at this point. + # update could/should return the updated instance in this case, but + # doesnt. => additional GET to confirm expirationTime ? + + self.logger.debug("Will update expiration time in %s seconds", interval) + self.runner.set_timer( + interval, + self.__update_exp_time, + instance=instance, + extra_fields=extra_fields, + restore=restore, + ) diff --git a/openmtc-app/src/openmtc_app/onem2m.pyc b/openmtc-app/src/openmtc_app/onem2m.pyc new file mode 100644 index 0000000..cd96f85 Binary files /dev/null and b/openmtc-app/src/openmtc_app/onem2m.pyc differ diff --git a/openmtc-app/src/openmtc_app/runner/__init__.py b/openmtc-app/src/openmtc_app/runner/__init__.py new file mode 100644 index 0000000..099ff22 --- /dev/null +++ b/openmtc-app/src/openmtc_app/runner/__init__.py @@ -0,0 +1,51 @@ +from gevent import spawn_later, wait + +from futile.logging import LoggerMixin + + +class AppRunner(LoggerMixin): + def __init__(self, m2m_app, *args, **kw): + super(AppRunner, self).__init__(*args, **kw) + + self._timers = set() + self.m2m_app = m2m_app + self.m2m_ep = None + + def run(self, m2m_ep): + self.m2m_ep = m2m_ep + + try: + self._run() + except (KeyboardInterrupt, SystemExit): + self.logger.info("Exiting...") + except Exception: + self.logger.exception("Error") + raise + finally: + self.logger.debug("Shutting down") + self._shutdown_app() + for timer in self._timers: + timer.kill() + + def _run(self): + self.m2m_app.run(self, self.m2m_ep) + + wait() + + def _shutdown_app(self): + self.m2m_app.shutdown() + + def set_timer(self, t, f, *args, **kw): + timer = None + + def wrapper(): + self._timers.discard(timer) + f(*args, **kw) + + timer = spawn_later(t, wrapper) + self._timers.add(timer) + return timer + + def cancel_timer(self, timer): + self._timers.discard(timer) + timer.kill() diff --git a/openmtc-app/src/openmtc_app/runner/__init__.pyc b/openmtc-app/src/openmtc_app/runner/__init__.pyc new file mode 100644 index 0000000..8e64f3f Binary files /dev/null and b/openmtc-app/src/openmtc_app/runner/__init__.pyc differ diff --git a/openmtc-app/src/openmtc_app/util.py b/openmtc-app/src/openmtc_app/util.py new file mode 100644 index 0000000..4421e13 --- /dev/null +++ b/openmtc-app/src/openmtc_app/util.py @@ -0,0 +1,75 @@ +import sys +from json import load as json_load +from operator import getitem + +import futile + + +def prepare_app(parser, loader, name, default_config_file): + parser.add_argument("-v", "--verbose", action="count", default=None, + help="Increase verbosity in output. This option can be" + " specified multiple times.") + args = parser.parse_args() + + module_ = loader.fullname.split("." + name).pop(0) + + futile.logging.set_default_level(futile.logging.DEBUG) + logger = futile.logging.get_logger(name) + + config_locations = (".", "/etc/openmtc/" + module_) + + try: + import os.path + for d in config_locations: + config_file = os.path.join(os.path.abspath(d), + default_config_file) + logger.debug("Trying config file location: %s", config_file) + if os.path.isfile(config_file): + break + else: + raise Exception("Configuration file %s not found in any of these " + "locations: %s" % default_config_file, + config_locations) + except Exception as e: + sys.stderr.write(str(e) + "\n") + sys.exit(2) + + try: + with open(config_file) as f: + logger.info("Reading configuration file %s.", config_file) + config = json_load(f) + except IOError as e: + logger.warning("Failed to read configuration file %s: %s", + config_file, e) + config = {} + except Exception as e: + logger.critical("Error reading configuration file %s: %s", + config_file, e) + sys.exit(2) + + if "logging" in config: # TODO init logging + log_conf = config["logging"] + if args.verbose is None: + futile.logging.set_default_level(log_conf.get("level") or + futile.logging.WARNING) + elif args.verbose >= 2: + futile.logging.set_default_level(futile.logging.DEBUG) + else: + futile.logging.set_default_level(futile.logging.INFO) + logfile = log_conf.get("file") + if logfile: + futile.logging.add_log_file(logfile) + else: + futile.logging.set_default_level(futile.logging.DEBUG) + + return args, config + + +def get_value(name, value_type, default_value, args, config): + try: + value = (getattr(args, name.replace(".", "_"), None) or + reduce(getitem, name.split("."), config)) + except KeyError: + value = None + value = value if isinstance(value, value_type) else default_value + return value diff --git a/openmtc-app/src/openmtc_app/util.pyc b/openmtc-app/src/openmtc_app/util.pyc new file mode 100644 index 0000000..e49ec52 Binary files /dev/null and b/openmtc-app/src/openmtc_app/util.pyc differ diff --git a/openmtc-gevent/bin/openmtc-backend-gevent b/openmtc-gevent/bin/openmtc-backend-gevent new file mode 100755 index 0000000..06bdf53 --- /dev/null +++ b/openmtc-gevent/bin/openmtc-backend-gevent @@ -0,0 +1,3 @@ +#!/bin/sh + +exec python -m openmtc_gevent.backend_main $@ diff --git a/openmtc-gevent/bin/openmtc-gateway-gevent b/openmtc-gevent/bin/openmtc-gateway-gevent new file mode 100755 index 0000000..f53a58b --- /dev/null +++ b/openmtc-gevent/bin/openmtc-gateway-gevent @@ -0,0 +1,3 @@ +#!/bin/sh + +exec python -m openmtc_gevent.gateway_main $@ diff --git a/openmtc-gevent/certs/ca-chain.cert.pem b/openmtc-gevent/certs/ca-chain.cert.pem new file mode 100644 index 0000000..3cc177b --- /dev/null +++ b/openmtc-gevent/certs/ca-chain.cert.pem @@ -0,0 +1,28 @@ +-----BEGIN CERTIFICATE----- +MIICETCCAbegAwIBAgICEAEwCgYIKoZIzj0EAwIwYjELMAkGA1UEBhMCREUxDzAN +BgNVBAgMBkJlcmxpbjEPMA0GA1UEBwwGQmVybGluMRIwEAYDVQQKDAlUVSBCZXJs +aW4xCzAJBgNVBAsMAkFWMRAwDgYDVQQDDAdSb290LUNBMB4XDTE3MDMyMDIyNDUz +NFoXDTIxMDkyNTIyNDUzNFowWTELMAkGA1UEBhMCREUxDzANBgNVBAgMBkJlcmxp +bjESMBAGA1UECgwJVFUgQmVybGluMQswCQYDVQQLDAJBVjEYMBYGA1UEAwwPSW50 +ZXJtZWRpYXRlLUNBMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEGYPonMzGYwAw +1rvEMi/mk4BtkVMPTtRpdar/oM2mShiyBzy+d61pc0geq/NHkHzmJi6J9xswNBVl +idljkYKrEKNmMGQwHQYDVR0OBBYEFKfvG63LX/s0lf2x3cuQK39VsaJCMB8GA1Ud +IwQYMBaAFINTz6R8cGHuVKG07DnLwZs2C/DNMBIGA1UdEwEB/wQIMAYBAf8CAQAw +DgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA0gAMEUCIBSRqMrk/0M5HQrYtQ4g +ZZBwIreYjlniv6fqAlBGwtfIAiEAuzxVfwdZxgpZl04zkdU5AQNG25GizkrRwGjJ +y9ZJzGY= +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIICHzCCAcSgAwIBAgIJAOTZ9WwRMdvQMAoGCCqGSM49BAMCMGIxCzAJBgNVBAYT +AkRFMQ8wDQYDVQQIDAZCZXJsaW4xDzANBgNVBAcMBkJlcmxpbjESMBAGA1UECgwJ +VFUgQmVybGluMQswCQYDVQQLDAJBVjEQMA4GA1UEAwwHUm9vdC1DQTAeFw0xNzAz +MjAyMjQ0NThaFw0yNzAzMTgyMjQ0NThaMGIxCzAJBgNVBAYTAkRFMQ8wDQYDVQQI +DAZCZXJsaW4xDzANBgNVBAcMBkJlcmxpbjESMBAGA1UECgwJVFUgQmVybGluMQsw +CQYDVQQLDAJBVjEQMA4GA1UEAwwHUm9vdC1DQTBZMBMGByqGSM49AgEGCCqGSM49 +AwEHA0IABEPEi5rFtbypZ56HBUDKAxqOrBbQPyahd3fcV7fzHo+K3AKDujQiw+7Y +6pFapS1b14QUc3ScJETvQTa63MYkIcqjYzBhMB0GA1UdDgQWBBSDU8+kfHBh7lSh +tOw5y8GbNgvwzTAfBgNVHSMEGDAWgBSDU8+kfHBh7lShtOw5y8GbNgvwzTAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAKBggqhkjOPQQDAgNJADBGAiEA +r2xYsi2wuHgytIVqvHVZAdSh5wMLdp/mISmDT7o0AVgCIQCZijClEKQbNloJwC5g +KR4LWGX+3W/5YAY6xQdGETeq3A== +-----END CERTIFICATE----- diff --git a/openmtc-gevent/certs/in-cse-1-client-server.cert.pem b/openmtc-gevent/certs/in-cse-1-client-server.cert.pem new file mode 100644 index 0000000..856ad2a --- /dev/null +++ b/openmtc-gevent/certs/in-cse-1-client-server.cert.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDKDCCAs6gAwIBAgICEBQwCgYIKoZIzj0EAwIwWTELMAkGA1UEBhMCREUxDzAN +BgNVBAgMBkJlcmxpbjESMBAGA1UECgwJVFUgQmVybGluMQswCQYDVQQLDAJBVjEY +MBYGA1UEAwwPSW50ZXJtZWRpYXRlLUNBMB4XDTE3MDcyNDE5MTYzNloXDTE4MDcy +NDE5MTYzNlowejELMAkGA1UEBhMCREUxDzANBgNVBAgMBkJlcmxpbjEPMA0GA1UE +BwwGQmVybGluMRkwFwYDVQQKDBBGT0tVUyBGcmF1bmhvZmVyMQ0wCwYDVQQLDARO +R05JMR8wHQYDVQQDDBZpbi1jc2UtMS1jbGllbnQtc2VydmVyMFkwEwYHKoZIzj0C +AQYIKoZIzj0DAQcDQgAEpFb8yavwuIy49Cq12i+X95Ey8zMeRwuRv7qOXZMFs7D/ +QY5vuLoHQDPz1t4RzmoFWhj7DVGF1JNzkIg2z52nM6OCAWMwggFfMAkGA1UdEwQC +MAAwEQYJYIZIAYb4QgEBBAQDAgbAMDMGCWCGSAGG+EIBDQQmFiRPcGVuU1NMIEdl +bmVyYXRlZCBTZXJ2ZXIgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFAWMmlsZhru2/GEu +Em1iVCkFuWMAMIGNBgNVHSMEgYUwgYKAFKfvG63LX/s0lf2x3cuQK39VsaJCoWak +ZDBiMQswCQYDVQQGEwJERTEPMA0GA1UECAwGQmVybGluMQ8wDQYDVQQHDAZCZXJs +aW4xEjAQBgNVBAoMCVRVIEJlcmxpbjELMAkGA1UECwwCQVYxEDAOBgNVBAMMB1Jv +b3QtQ0GCAhABMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAgYI +KwYBBQUHAwEwLAYDVR0RBCUwI4YWLy9vcGVubXRjLm9yZy9pbi1jc2UtMYIJbG9j +YWxob3N0MAoGCCqGSM49BAMCA0gAMEUCIQCZ20ckL8neJxE4seaAtOL3IyCqBQPG +TPAU9lK7mhx1VwIgIznBnnVVwt2BUREUD7jedNzyOkVv8Lgj9H6WC4Ko0go= +-----END CERTIFICATE----- diff --git a/openmtc-gevent/certs/in-cse-1-client-server.key.pem b/openmtc-gevent/certs/in-cse-1-client-server.key.pem new file mode 100644 index 0000000..2096325 --- /dev/null +++ b/openmtc-gevent/certs/in-cse-1-client-server.key.pem @@ -0,0 +1,8 @@ +-----BEGIN EC PARAMETERS----- +BggqhkjOPQMBBw== +-----END EC PARAMETERS----- +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIO0cUFpqRn+CqWba7EZIgvSHYgj0kaTbC8Yi4S6gx2u4oAoGCCqGSM49 +AwEHoUQDQgAEpFb8yavwuIy49Cq12i+X95Ey8zMeRwuRv7qOXZMFs7D/QY5vuLoH +QDPz1t4RzmoFWhj7DVGF1JNzkIg2z52nMw== +-----END EC PRIVATE KEY----- diff --git a/openmtc-gevent/certs/mn-cse-1-client-server.cert.pem b/openmtc-gevent/certs/mn-cse-1-client-server.cert.pem new file mode 100644 index 0000000..42a591e --- /dev/null +++ b/openmtc-gevent/certs/mn-cse-1-client-server.cert.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDKTCCAs6gAwIBAgICEBUwCgYIKoZIzj0EAwIwWTELMAkGA1UEBhMCREUxDzAN +BgNVBAgMBkJlcmxpbjESMBAGA1UECgwJVFUgQmVybGluMQswCQYDVQQLDAJBVjEY +MBYGA1UEAwwPSW50ZXJtZWRpYXRlLUNBMB4XDTE3MDcyNDE5MjAxNFoXDTE4MDcy +NDE5MjAxNFowejELMAkGA1UEBhMCREUxDzANBgNVBAgMBkJlcmxpbjEPMA0GA1UE +BwwGQmVybGluMRkwFwYDVQQKDBBGT0tVUyBGcmF1bmhvZmVyMQ0wCwYDVQQLDARO +R05JMR8wHQYDVQQDDBZtbi1jc2UtMS1jbGllbnQtc2VydmVyMFkwEwYHKoZIzj0C +AQYIKoZIzj0DAQcDQgAEulrJS0iS9Nfm+QN5T9BANNxPpwHwH330v7YYR/hPHQil +HQEXP4uwMzMmrByFZF5xluZNt2DySftfMSvwUQRtAKOCAWMwggFfMAkGA1UdEwQC +MAAwEQYJYIZIAYb4QgEBBAQDAgbAMDMGCWCGSAGG+EIBDQQmFiRPcGVuU1NMIEdl +bmVyYXRlZCBTZXJ2ZXIgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFBaxMYEY0uq3+sx4 +F5GMu9g4MPheMIGNBgNVHSMEgYUwgYKAFKfvG63LX/s0lf2x3cuQK39VsaJCoWak +ZDBiMQswCQYDVQQGEwJERTEPMA0GA1UECAwGQmVybGluMQ8wDQYDVQQHDAZCZXJs +aW4xEjAQBgNVBAoMCVRVIEJlcmxpbjELMAkGA1UECwwCQVYxEDAOBgNVBAMMB1Jv +b3QtQ0GCAhABMA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAgYI +KwYBBQUHAwEwLAYDVR0RBCUwI4YWLy9vcGVubXRjLm9yZy9tbi1jc2UtMYIJbG9j +YWxob3N0MAoGCCqGSM49BAMCA0kAMEYCIQCb4PoHrb96MtW8trbKaaYoZKyF69y/ +HXeaDJ0piW3uUAIhANypn4BX0ELPNVR1zIn29X+GaYGgwlOJfzpW0eLQtQ+l +-----END CERTIFICATE----- diff --git a/openmtc-gevent/certs/mn-cse-1-client-server.key.pem b/openmtc-gevent/certs/mn-cse-1-client-server.key.pem new file mode 100644 index 0000000..fac977c --- /dev/null +++ b/openmtc-gevent/certs/mn-cse-1-client-server.key.pem @@ -0,0 +1,8 @@ +-----BEGIN EC PARAMETERS----- +BggqhkjOPQMBBw== +-----END EC PARAMETERS----- +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEINGtUU4c3FW/RdjmeWHHbJmEc8iWZZR5mV5MsbqcMAuQoAoGCCqGSM49 +AwEHoUQDQgAEulrJS0iS9Nfm+QN5T9BANNxPpwHwH330v7YYR/hPHQilHQEXP4uw +MzMmrByFZF5xluZNt2DySftfMSvwUQRtAA== +-----END EC PRIVATE KEY----- diff --git a/openmtc-gevent/certs/test-ae.cert.pem b/openmtc-gevent/certs/test-ae.cert.pem new file mode 100644 index 0000000..3ff8249 --- /dev/null +++ b/openmtc-gevent/certs/test-ae.cert.pem @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE----- +MIICsTCCAlegAwIBAgICEB0wCgYIKoZIzj0EAwIwWTELMAkGA1UEBhMCREUxDzAN +BgNVBAgMBkJlcmxpbjESMBAGA1UECgwJVFUgQmVybGluMQswCQYDVQQLDAJBVjEY +MBYGA1UEAwwPSW50ZXJtZWRpYXRlLUNBMB4XDTE3MDgxNDE2MDUxN1oXDTE4MDgx +NDE2MDUxN1owazELMAkGA1UEBhMCREUxDzANBgNVBAgMBkJlcmxpbjEPMA0GA1UE +BwwGQmVybGluMRkwFwYDVQQKDBBGT0tVUyBGcmF1bmhvZmVyMQ0wCwYDVQQLDARO +R05JMRAwDgYDVQQDDAd0ZXN0LWFlMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE +gucEU4BXYtPUPfFKuBgyJzfQXe2hLcW4JkY7OP8hQtOxgn2Ex+v8PbifqAKLb0XF +yMKqDiYDZ/mbyeX8r0DMdaOB/DCB+TAJBgNVHRMEAjAAMBEGCWCGSAGG+EIBAQQE +AwIGwDAzBglghkgBhvhCAQ0EJhYkT3BlblNTTCBHZW5lcmF0ZWQgQ2xpZW50IENl +cnRpZmljYXRlMB0GA1UdDgQWBBQ/HBO72oRbiYoHVENuQJz5j8O5xjAfBgNVHSME +GDAWgBSn7xuty1/7NJX9sd3LkCt/VbGiQjAOBgNVHQ8BAf8EBAMCBeAwHQYDVR0l +BBYwFAYIKwYBBQUHAwIGCCsGAQUFBwMBMDUGA1UdEQQuMCyGHy8vb3Blbm10Yy5v +cmcvbW4tY3NlLTEvQ1Rlc3QtQUWCCWxvY2FsaG9zdDAKBggqhkjOPQQDAgNIADBF +AiEA2rNTfHdKezvSgIUtDhJb/WgrbS/YmhfvZJsybEOteJcCIDlqyqfCW0Z4FGwm +qkNiQ28FnLtqri/bYbQIdCkAJHFA +-----END CERTIFICATE----- diff --git a/openmtc-gevent/certs/test-ae.key.pem b/openmtc-gevent/certs/test-ae.key.pem new file mode 100644 index 0000000..de77426 --- /dev/null +++ b/openmtc-gevent/certs/test-ae.key.pem @@ -0,0 +1,8 @@ +-----BEGIN EC PARAMETERS----- +BggqhkjOPQMBBw== +-----END EC PARAMETERS----- +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIH3gf3E4haYAvIRiDRw8BC+aaMYZlm7uyBNbgP5ofo+yoAoGCCqGSM49 +AwEHoUQDQgAEgucEU4BXYtPUPfFKuBgyJzfQXe2hLcW4JkY7OP8hQtOxgn2Ex+v8 +PbifqAKLb0XFyMKqDiYDZ/mbyeX8r0DMdQ== +-----END EC PRIVATE KEY----- diff --git a/openmtc-gevent/config-backend.json b/openmtc-gevent/config-backend.json new file mode 100644 index 0000000..8a3ea3f --- /dev/null +++ b/openmtc-gevent/config-backend.json @@ -0,0 +1,79 @@ +{ + "global": { + "disable_forwarding": false, + "default_lifetime": 2000, + "additional_host_names": [], + "require_auth": false, + "default_content_type": "application/json" + }, + "database": { + "driver": "openmtc_server.db.nodb2.NoDB2", + "dropDB": true + }, + "logging": { + "level": "ERROR", + "file": null + }, + "onem2m": { + "sp_id": "openmtc.org", + "cse_type": "IN-CSE", + "cse_id": "in-cse-1", + "cse_base": "onem2m", + "ssl_certs": { + "key": "certs/in-cse-1-client-server.key.pem", + "crt": "certs/in-cse-1-client-server.cert.pem", + "ca": "certs/ca-chain.cert.pem" + }, + "accept_insecure_certs": false, + "overwrite_originator": { + "enabled": false, + "originator": "/openmtc.org/in-cse-1" + }, + "default_privileges": [ + { + "accessControlOperations": [2, 32], + "accessControlOriginators": ["all"] + } + ], + "dynamic_authorization": { + "enabled": false, + "poa": [] + } + }, + "plugins": { + "openmtc_cse": [ + { + "name": "HTTPTransportPlugin", + "package": "openmtc_cse.plugins.transport_gevent_http", + "disabled": false, + "config": { + "interface": "::", + "port": 18000, + "enable_https": false, + "require_cert": true + } + }, + { + "name": "NotificationHandler", + "package": "openmtc_cse.plugins.notification_handler", + "disabled": false + }, + { + "name": "ExpirationTimeHandler", + "package": "openmtc_cse.plugins.expiration_time_handler", + "disabled": true, + "config": { + "default_lifetime": 10000 + } + }, + { + "name": "AnnouncementHandler", + "package": "openmtc_cse.plugins.announcement_handler", + "disabled": true, + "config": { + "auto_announce": false + } + } + ] + } +} diff --git a/openmtc-gevent/config-gateway.json b/openmtc-gevent/config-gateway.json new file mode 100644 index 0000000..5e6dbe4 --- /dev/null +++ b/openmtc-gevent/config-gateway.json @@ -0,0 +1,104 @@ +{ + "global": { + "disable_forwarding": false, + "default_lifetime": 2000, + "additional_host_names": [], + "require_auth": false, + "default_content_type": "application/json" + }, + "database": { + "driver": "openmtc_server.db.nodb2.NoDB2", + "dropDB": true + }, + "logging": { + "level": "ERROR", + "file": null + }, + "onem2m": { + "sp_id": "openmtc.org", + "cse_type": "MN-CSE", + "cse_id": "mn-cse-1", + "cse_base": "onem2m", + "ssl_certs": { + "key": "certs/mn-cse-1-client-server.key.pem", + "crt": "certs/mn-cse-1-client-server.cert.pem", + "ca": "certs/ca-chain.cert.pem" + }, + "accept_insecure_certs": false, + "overwrite_originator": { + "enabled": false, + "originator": "/openmtc.org/mn-cse-1" + }, + "default_privileges": [ + { + "accessControlOperations": [2, 32], + "accessControlOriginators": ["all"] + } + ], + "dynamic_authorization": { + "enabled": false, + "poa": [] + } + }, + "plugins": { + "openmtc_cse": [ + { + "name": "HTTPTransportPlugin", + "package": "openmtc_cse.plugins.transport_gevent_http", + "disabled": false, + "config": { + "interface": "::", + "port": 8000, + "enable_https": false, + "require_cert": true + } + }, + { + "name": "NotificationHandler", + "package": "openmtc_cse.plugins.notification_handler", + "disabled": false + }, + { + "name": "ExpirationTimeHandler", + "package": "openmtc_cse.plugins.expiration_time_handler", + "disabled": true, + "config": { + "default_lifetime": 10000 + } + }, + { + "name": "RegistrationHandler", + "package": "openmtc_cse.plugins.registration_handler", + "disabled": false, + "config": { + "labels": [ + "openmtc" + ], + "remote_cses": [ + { + "cse_id": "in-cse-1", + "poa": [ + "http://localhost:18000" + ], + "own_poa": [ + "http://localhost:8000" + ], + "cse_base": "onem2m", + "cse_type": "IN_CSE" + } + ], + "interval": 3600, + "offset": 3600 + } + }, + { + "name": "AnnouncementHandler", + "package": "openmtc_cse.plugins.announcement_handler", + "disabled": true, + "config": { + "auto_announce": false + } + } + ] + } +} diff --git a/openmtc-gevent/dependencies.txt b/openmtc-gevent/dependencies.txt new file mode 100644 index 0000000..14cf4ab --- /dev/null +++ b/openmtc-gevent/dependencies.txt @@ -0,0 +1,29 @@ +# Dont install via the operating system's package manager +# Install all dependencies via the "pip" tool +# e.g. $ sudo pip install urllib3 +# or $ sudo pip install --requirement dependencies.txt + +urllib3 +gevent>=1.0 +iso8601>=0.1.5 +werkzeug>=0.9 +funcy +netifaces +decorator +mimeparse +gevent_websocket +python_socketio>=1.7.5 +flask +geventhttpclient +enum34 +pyxb==1.2.3 +dtls +blist +simplejson +ujson +pymongo +paho-mqtt +coapthon +rdflib +fyzz +yapps diff --git a/openmtc-gevent/etc/conf/config-backend.json.dist b/openmtc-gevent/etc/conf/config-backend.json.dist new file mode 100644 index 0000000..9d5efcd --- /dev/null +++ b/openmtc-gevent/etc/conf/config-backend.json.dist @@ -0,0 +1,79 @@ +{ + "global": { + "disable_forwarding": false, + "default_lifetime": 2000, + "additional_host_names": [], + "require_auth": false, + "default_content_type": "application/json" + }, + "database": { + "driver": "openmtc_server.db.nodb2.NoDB2", + "dropDB": true + }, + "logging": { + "level": "INFO", + "file": "/var/log/openmtc/backend.log" + }, + "onem2m": { + "sp_id": "openmtc.org", + "cse_type": "IN-CSE", + "cse_id": "in-cse-1", + "cse_base": "onem2m", + "ssl_certs": { + "key": "/etc/openmtc/certs/in-cse-1-client-server.key.pem", + "crt": "/etc/openmtc/certs/in-cse-1-client-server.cert.pem", + "ca": "/etc/openmtc/certs/ca-chain.cert.pem" + }, + "accept_insecure_certs": false, + "overwrite_originator": { + "enabled": false, + "originator": "/openmtc.org/in-cse-1" + }, + "default_privileges": [ + { + "accessControlOperations": [2, 32], + "accessControlOriginators": ["all"] + } + ], + "dynamic_authorization": { + "enabled": false, + "poa": [] + } + }, + "plugins": { + "openmtc_cse": [ + { + "name": "HTTPTransportPlugin", + "package": "openmtc_cse.plugins.transport_gevent_http", + "disabled": false, + "config": { + "interface": "::", + "port": 18000, + "enable_https": false, + "require_cert": true + } + }, + { + "name": "NotificationHandler", + "package": "openmtc_cse.plugins.notification_handler", + "disabled": false + }, + { + "name": "ExpirationTimeHandler", + "package": "openmtc_cse.plugins.expiration_time_handler", + "disabled": true, + "config": { + "default_lifetime": 10000 + } + }, + { + "name": "AnnouncementHandler", + "package": "openmtc_cse.plugins.announcement_handler", + "disabled": true, + "config": { + "auto_announce": false + } + } + ] + } +} diff --git a/openmtc-gevent/etc/conf/config-gateway.json.dist b/openmtc-gevent/etc/conf/config-gateway.json.dist new file mode 100644 index 0000000..5a7b828 --- /dev/null +++ b/openmtc-gevent/etc/conf/config-gateway.json.dist @@ -0,0 +1,104 @@ +{ + "global": { + "disable_forwarding": false, + "default_lifetime": 2000, + "additional_host_names": [], + "require_auth": false, + "default_content_type": "application/json" + }, + "database": { + "driver": "openmtc_server.db.nodb2.NoDB2", + "dropDB": true + }, + "logging": { + "level": "INFO", + "file": "/var/log/openmtc/gateway.log" + }, + "onem2m": { + "sp_id": "openmtc.org", + "cse_type": "MN-CSE", + "cse_id": "mn-cse-1", + "cse_base": "onem2m", + "ssl_certs": { + "key": "/etc/openmtc/certs/mn-cse-1-client-server.key.pem", + "crt": "/etc/openmtc/certs/mn-cse-1-client-server.cert.pem", + "ca": "/etc/openmtc/certs/ca-chain.cert.pem" + }, + "accept_insecure_certs": false, + "overwrite_originator": { + "enabled": false, + "originator": "/openmtc.org/mn-cse-1" + }, + "default_privileges": [ + { + "accessControlOperations": [2, 32], + "accessControlOriginators": ["all"] + } + ], + "dynamic_authorization": { + "enabled": false, + "poa": [] + } + }, + "plugins": { + "openmtc_cse": [ + { + "name": "HTTPTransportPlugin", + "package": "openmtc_cse.plugins.transport_gevent_http", + "disabled": false, + "config": { + "interface": "::", + "port": 8000, + "enable_https": false, + "require_cert": true + } + }, + { + "name": "NotificationHandler", + "package": "openmtc_cse.plugins.notification_handler", + "disabled": false + }, + { + "name": "ExpirationTimeHandler", + "package": "openmtc_cse.plugins.expiration_time_handler", + "disabled": true, + "config": { + "default_lifetime": 10000 + } + }, + { + "name": "RegistrationHandler", + "package": "openmtc_cse.plugins.registration_handler", + "disabled": false, + "config": { + "labels": [ + "openmtc" + ], + "remote_cses": [ + { + "cse_id": "in-cse-1", + "poa": [ + "http://localhost:18000" + ], + "own_poa": [ + "http://localhost:8000" + ], + "cse_base": "onem2m", + "cse_type": "IN_CSE" + } + ], + "interval": 3600, + "offset": 3600 + } + }, + { + "name": "AnnouncementHandler", + "package": "openmtc_cse.plugins.announcement_handler", + "disabled": true, + "config": { + "auto_announce": false + } + } + ] + } +} diff --git a/openmtc-gevent/etc/init.d/openmtc-backend b/openmtc-gevent/etc/init.d/openmtc-backend new file mode 100755 index 0000000..a858335 --- /dev/null +++ b/openmtc-gevent/etc/init.d/openmtc-backend @@ -0,0 +1,106 @@ +#!/bin/sh + +### BEGIN INIT INFO +# Provides: openmtc-backend +# Required-Start: $local_fs $network ntp +# Required-Stop: +# Default-Start: 2 3 4 5 +# Default-Stop: 0 1 6 +# Short-Description: openmtc Network Service Capability Layer +### END INIT INFO + +set -e + +# /etc/init.d/openmtc-backend: start and stop the openmtc Backend + +NAME="OpenMTC Backend" +DAEMON=/usr/local/bin/openmtc-backend-gevent +PID_FILE=/var/run/openmtc-backend.pid +CREDENTIALS=openmtc:openmtc + +MAX_RETRIES=3 +MAX_SLEEP=30 + +test -x || exit 0 + +. /lib/lsb/init-functions + +openmtc_start() { + if ! /usr/sbin/ntpdate -u de.pool.ntp.org > /dev/null 2>&1; then + log_warning_msg "Could not sync time."\ + "Consider setting the time manually." + fi + if start-stop-daemon --start --quiet --background --pidfile "${PID_FILE}" --make-pidfile --exec "${DAEMON}" + then + rc=0 + counter=0 + while [ ${counter} -lt ${MAX_SLEEP} ]; do + rc=1 + if [ $(netstat -tulpen | grep $(cat ${PID_FILE})/python | wc -l) -gt 0 ]; then + rc=0 + break + fi + counter=$((counter+1)) + sleep 1 + done + if ! kill -0 `cat "${PID_FILE}"` >/dev/null 2>&1; then + log_failure_msg " failed to start" + rm -f "${PID_FILE}" + rc=1 + fi + else + rc=1 + fi + if [ "${rc}" -eq 0 ]; then + log_end_msg 0 + else + log_end_msg 1 + rm -f "${PID_FILE}" + fi +} + +case "$1" in + start) + log_daemon_msg "Starting " "${NAME}" + if [ -s "${PID_FILE}" ] && kill -0 `cat "${PID_FILE}"` >/dev/null 2>&1; then + log_progress_msg "${NAME} apparently already running." + log_end_msg 0 + exit 0 + fi + openmtc_start + ;; + stop) + log_daemon_msg "Stopping " "${NAME}" + start-stop-daemon --stop --signal INT --quiet --oknodo --pidfile "${PID_FILE}" + log_end_msg 0 + rm -f "${PID_FILE}" + ;; + restart) + set +e + log_daemon_msg "Restarting " "${NAME}" + if [ -s "${PID_FILE}" ] && kill -0 `cat "${PID_FILE}"` >/dev/null 2>&1; then + start-stop-daemon --stop --signal INT --quiet --oknodo --pidfile "${PID_FILE}" || true + sleep 1 + else + log_warning_msg "${NAME} not running, attempting to start." + rm -f "${PID_FILE}" + fi + openmtc_start + ;; + status) + status_of_proc -p "${PID_FILE}" "${DAEMON}" "${NAME}" + if [ $? -eq 0 ]; then + if [ $(netstat -tulpen | grep $(cat ${PID_FILE})/python | wc -l) -gt 0 ]; then + log_success_msg "${NAME} port is reachable" + exit 0 # notreached due to set -e + fi + log_failure_msg "${NAME} port not reachable." + exit 1 + fi + ;; + *) + echo "Usage: /etc/init.d/openmtc-backend {start|stop|restart|status}" + exit 1 +esac + +exit 0 diff --git a/openmtc-gevent/etc/init.d/openmtc-gateway b/openmtc-gevent/etc/init.d/openmtc-gateway new file mode 100755 index 0000000..33dc93f --- /dev/null +++ b/openmtc-gevent/etc/init.d/openmtc-gateway @@ -0,0 +1,106 @@ +#!/bin/sh + +### BEGIN INIT INFO +# Provides: openmtc-gateway +# Required-Start: $local_fs $network ntp +# Required-Stop: +# Default-Start: 2 3 4 5 +# Default-Stop: 0 1 6 +# Short-Description: openmtc Gateway Service Capability Layer +### END INIT INFO + +set -e + +# /etc/init.d/openmtc-gateway: start and stop the openmtc Gateway + +NAME="OpenMTC Gateway" +DAEMON=/usr/local/bin/openmtc-gateway-gevent +PID_FILE=/var/run/openmtc-gateway.pid +CREDENTIALS=openmtc:openmtc + +MAX_RETRIES=3 +MAX_SLEEP=30 + +test -x || exit 0 + +. /lib/lsb/init-functions + +openmtc_start() { + if ! /usr/sbin/ntpdate -u de.pool.ntp.org > /dev/null 2>&1; then + log_warning_msg "Could not sync time."\ + "Consider setting the time manually." + fi + if start-stop-daemon --start --quiet --background --pidfile "${PID_FILE}" --make-pidfile --exec "${DAEMON}" + then + rc=0 + counter=0 + while [ ${counter} -lt ${MAX_SLEEP} ]; do + rc=1 + if [ $(netstat -tulpen | grep $(cat ${PID_FILE})/python | wc -l) -gt 0 ]; then + rc=0 + break + fi + counter=$((counter+1)) + sleep 1 + done + if ! kill -0 `cat "${PID_FILE}"` >/dev/null 2>&1; then + log_failure_msg " failed to start" + rm -f "${PID_FILE}" + rc=1 + fi + else + rc=1 + fi + if [ "${rc}" -eq 0 ]; then + log_end_msg 0 + else + log_end_msg 1 + rm -f "${PID_FILE}" + fi +} + +case "$1" in + start) + log_daemon_msg "Starting " "${NAME}" + if [ -s "${PID_FILE}" ] && kill -0 `cat "${PID_FILE}"` >/dev/null 2>&1; then + log_progress_msg "${NAME} apparently already running." + log_end_msg 0 + exit 0 + fi + openmtc_start + ;; + stop) + log_daemon_msg "Stopping " "${NAME}" + start-stop-daemon --stop --signal INT --quiet --oknodo --pidfile "${PID_FILE}" + log_end_msg 0 + rm -f "${PID_FILE}" + ;; + restart) + set +e + log_daemon_msg "Restarting " "${NAME}" + if [ -s "${PID_FILE}" ] && kill -0 `cat "${PID_FILE}"` >/dev/null 2>&1; then + start-stop-daemon --stop --signal INT --quiet --oknodo --pidfile "${PID_FILE}" || true + sleep 1 + else + log_warning_msg "${NAME} not running, attempting to start." + rm -f "${PID_FILE}" + fi + openmtc_start + ;; + status) + status_of_proc -p "${PID_FILE}" "${DAEMON}" "${NAME}" + if [ $? -eq 0 ]; then + if [ $(netstat -tulpen | grep $(cat ${PID_FILE})/python | wc -l) -gt 0 ]; then + log_success_msg "${NAME} port is reachable" + exit 0 # notreached due to set -e + fi + log_failure_msg "${NAME} port not reachable." + exit 1 + fi + ;; + *) + echo "Usage: /etc/init.d/openmtc-gateway {start|stop|restart|status}" + exit 1 +esac + +exit 0 diff --git a/openmtc-gevent/etc/logrotate.d/openmtc b/openmtc-gevent/etc/logrotate.d/openmtc new file mode 100644 index 0000000..0184bc3 --- /dev/null +++ b/openmtc-gevent/etc/logrotate.d/openmtc @@ -0,0 +1,9 @@ +/var/log/openmtc/*.log { + size 10M + rotate 7 + notifempty + missingok + compress + delaycompress +} + diff --git a/openmtc-gevent/etc/systemd/system/openmtc-backend.service b/openmtc-gevent/etc/systemd/system/openmtc-backend.service new file mode 100644 index 0000000..9936a79 --- /dev/null +++ b/openmtc-gevent/etc/systemd/system/openmtc-backend.service @@ -0,0 +1,13 @@ +[Unit] +Description=OpenMTC Backend +Documentation=http://www.openmtc.org +Wants=ntp.service + +[Service] +User=openmtc +Group=openmtc +Environment=PYTHONUNBUFFERED=true +ExecStart=/usr/local/bin/run-backend + +[Install] +WantedBy=network.target diff --git a/openmtc-gevent/etc/systemd/system/openmtc-gateway.service b/openmtc-gevent/etc/systemd/system/openmtc-gateway.service new file mode 100644 index 0000000..56a68f2 --- /dev/null +++ b/openmtc-gevent/etc/systemd/system/openmtc-gateway.service @@ -0,0 +1,13 @@ +[Unit] +Description=OpenMTC Gateway +Documentation=http://www.openmtc.org +Wants=ntp.service + +[Service] +User=openmtc +Group=openmtc +Environment=PYTHONUNBUFFERED=true +ExecStart=/usr/local/bin/run-gateway + +[Install] +WantedBy=network.target diff --git a/openmtc-gevent/kill-backend b/openmtc-gevent/kill-backend new file mode 100755 index 0000000..cddc7a5 --- /dev/null +++ b/openmtc-gevent/kill-backend @@ -0,0 +1,7 @@ +#!/bin/sh + +# make it possible to specify other signal values, defaults to kill +KILL_SIGNAL=${1:--9} + +pkill ${KILL_SIGNAL} -f backend_main.py + diff --git a/openmtc-gevent/kill-gateway b/openmtc-gevent/kill-gateway new file mode 100755 index 0000000..486e07a --- /dev/null +++ b/openmtc-gevent/kill-gateway @@ -0,0 +1,6 @@ +#!/bin/sh + +# make it possible to specify other signal values, defaults to kill +KILL_SIGNAL=${1:--9} + +pkill ${KILL_SIGNAL} -f gateway_main.py diff --git a/openmtc-gevent/prep-env.sh b/openmtc-gevent/prep-env.sh new file mode 100755 index 0000000..bb43fa9 --- /dev/null +++ b/openmtc-gevent/prep-env.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +. ../common/prep-env.sh + +for d in src ../server/*/src ; do + _SRC_PATH="${d}" + _READLINK_PATH="$(readlink ${_SRC_PATH})" + PYTHONPATH=${PYTHONPATH}:$(pwd)/${_READLINK_PATH:-${_SRC_PATH}} +done + +echo PYTHONPATH: ${PYTHONPATH} + +export PYTHONPATH diff --git a/openmtc-gevent/run-backend b/openmtc-gevent/run-backend new file mode 100755 index 0000000..879fff3 --- /dev/null +++ b/openmtc-gevent/run-backend @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +cd $(dirname ${0}) + +. ./prep-env.sh + +exec python src/openmtc_gevent/backend_main.py -f config-backend.json "$@" diff --git a/openmtc-gevent/run-gateway b/openmtc-gevent/run-gateway new file mode 100755 index 0000000..7ed0dca --- /dev/null +++ b/openmtc-gevent/run-gateway @@ -0,0 +1,7 @@ +#!/bin/sh + +cd $(dirname ${0}) + +. ./prep-env.sh + +exec python src/openmtc_gevent/gateway_main.py -f config-gateway.json "$@" diff --git a/openmtc-gevent/src/openmtc_gevent/GEventNetworkManager.py b/openmtc-gevent/src/openmtc_gevent/GEventNetworkManager.py new file mode 100644 index 0000000..3b0478a --- /dev/null +++ b/openmtc-gevent/src/openmtc_gevent/GEventNetworkManager.py @@ -0,0 +1,360 @@ +import netifaces +import time +from collections import namedtuple + +from aplus import Promise +from openmtc_server.exc import InterfaceNotFoundException +from openmtc_server.NetworkManager import NetworkManager + +Interface = namedtuple("Interface", ("name", "addresses", "hwaddress")) +Address = namedtuple("Address", ("address", "family")) + + +class GEventNetworkManager(NetworkManager): + def __init__(self, config, *args, **kw): + super(GEventNetworkManager, self).__init__(*args, **kw) + + self._api = None + + self.config = config + + self.polling = True + + self.logger.info("GEventNetworkManager loaded") + + def initialize(self, api): + self._api = api + self.logger.info("GEventNetworkManager initialized") + self.start() + + def start(self): + # self.api.register_connectivity_handler(self.connectivity_request) + self.polling = True + self._api.run_task(self.start_polling) + self.logger.info("GEventNetworkManager started") + + def stop(self): + self.polling = False + self.logger.info("GEventNetworkManager stopped") + + def connectivity_request(self): + """Handles connectivity requests""" + # please note: normally we get an rcat argument, default: rcat=0 + with Promise() as p: + blacklist = ['lo'] + interfaces = netifaces.interfaces() + + interface = next((x for x in interfaces if (x not in blacklist)), + None) + + if interface is None: + p.reject(InterfaceNotFoundException( + "No interfaces found matching request")) + else: + p.fulfill((self._get_interface(interface), 0)) + + return p + + def start_polling(self, timeout=1): + """Poll netifaces information and check for differences, for as long as + self.polling == True. + + :param timeout: Amount of time to wait between polling + """ + last_interfaces = cur_interfaces = netifaces.interfaces() + cur_interfaces_copy = list(cur_interfaces) + + last_ifaddresses = {} + for iface in last_interfaces: + last_ifaddresses[iface] = netifaces.ifaddresses(iface) + + self.logger.debug("polling started") + while self.polling: + try: + cur_interfaces = netifaces.interfaces() + cur_interfaces_copy = list(cur_interfaces) + + intersection = set(last_interfaces) ^ set(cur_interfaces) + if len(intersection) > 0: + self.logger.debug("difference detected") + self.logger.debug("last interfaces: %s", last_interfaces) + self.logger.debug("current interfaces: %s", cur_interfaces) + + for isetface in intersection: + if isetface in cur_interfaces: + # new interface + self.logger.debug("Firing %s event for %s", + "interface_created", isetface) + self._api.events.interface_created.fire( + self._create_interface( + isetface, netifaces.ifaddresses(isetface))) + else: + # removed interface + self.logger.debug("Firing %s event for %s", + "interface_removed", isetface) + self._api.events.interface_removed.fire( + self._create_interface( + isetface, last_ifaddresses[isetface])) + + for iface in cur_interfaces: + cur_ifaddresses = netifaces.ifaddresses(iface) + if (iface in last_ifaddresses and + last_ifaddresses[iface] != cur_ifaddresses): + self._check_ifaddresses_diff(last_ifaddresses[iface], + cur_ifaddresses, iface) + + last_ifaddresses[iface] = cur_ifaddresses + except Exception as e: + self.logger.exception("Something went wrong during polling: %s", + e) + finally: + # updating last stuff to current stuff + last_interfaces = cur_interfaces_copy + time.sleep(timeout) + + self.logger.debug("polling done") + + def get_interfaces(self): + """Returns all known network interfaces + + :return Promise([Interface]): a promise for a list of interfaces + """ + with Promise() as p: + interfaces = [] + for iface in netifaces.interfaces(): + interfaces.append(self._get_interface(iface)) + + # check if array has duplicates + # does this even work with namedtuple(s)? + # interfaces = list(set(interfaces)) + p.fulfill(interfaces) + return p + + def get_interface(self, name): + """Returns an Interface object identified by name + + :param name: name of interface + :return Promise(Interface): a promise for an interface + :raise InterfaceNotFoundException: if interface was not found + """ + with Promise() as p: + + if name not in netifaces.interfaces(): + p.reject(InterfaceNotFoundException("%s was not found" % name)) + else: + p.fulfill(self._get_interface(name)) + return p + + def get_addresses(self, interface=None): + """Get addresses of a given interface or all addresses if :interface: is + None + + :param interface: name of interface + :return: Promise([Address]): a promise for a list of addresses + """ + with Promise() as p: + p.fulfill(self._get_addresses(interface)) + + return p + + def _get_addresses_from_ifaddresses(self, ifaddresses): + """Get addresses of a given interface + + :param ifaddresses: raw addresses of interface (from netifaces) + :return: list of addresses + """ + addresses = [] + for family in ifaddresses: + if family != netifaces.AF_LINK: # no hwaddr + for addr in ifaddresses[family]: + a = addr["addr"] + if family == netifaces.AF_INET6: + a = self._remove_ipv6_special_stuff(a) + addresses.append( + Address(address=a, family=family)) + + return addresses + + def _get_addresses(self, iface=None): + """Get addresses of a given interface + + :param iface: name of interface + :return: list of addresses + """ + + if iface is None: + interfaces = netifaces.interfaces() + else: + interfaces = [iface] + + addresses = [] + + for interface in interfaces: + n_addresses = netifaces.ifaddresses(interface) + addresses += self._get_addresses_from_ifaddresses(n_addresses) + + # check if array has duplicates + # addresses = list(set(addresses)) + + return addresses + + def _create_interface(self, name, ifaddresses): + """Create Interface tuple based on given interfaces addresses. (function + independent of netifaces) + + :param name: + :param ifaddresses: + :return: + """ + addresses = self._get_addresses_from_ifaddresses(ifaddresses) + try: + hwaddress = ifaddresses[netifaces.AF_LINK][0]["addr"] + except (IndexError, KeyError): + self.logger.debug("No hardware address found for %s!", name) + hwaddress = None + + return Interface(name=name, + addresses=addresses, + hwaddress=hwaddress) + + def _get_interface(self, name): + """Returns an Interface object identified by name + + :param name: name of interface + :return Interface: interface + :raise UnknownInterface: if interface was not found + """ + if name not in netifaces.interfaces(): + raise InterfaceNotFoundException("%s was not found" % name) + else: + ifaddresses = netifaces.ifaddresses(name) + addresses = self._get_addresses_from_ifaddresses(ifaddresses) + try: + hwaddress = ifaddresses[netifaces.AF_LINK][0]["addr"] + except (IndexError, KeyError): + self.logger.debug("No hardware address found for %s!", name) + hwaddress = None + return Interface(name=name, + addresses=addresses, + hwaddress=hwaddress) + + def _check_ifaddresses_diff(self, lifaddr, cifaddr, iface): + """parses last and current interface addresses of a given interface and + fires events for discovered differences + + :param lifaddr: dict of family:addresses (last addresses) + :param cifaddr: dict of family:addresses (curr addresses) + :param iface: str name of interface (needed only to create interface for + event firing) + """ + self.logger.debug("checking difference of \r\n%s vs \r\n%s", lifaddr, + cifaddr) + + intersection = set(lifaddr.keys()) ^ set(cifaddr.keys()) + if len(intersection) > 0: + self.logger.debug( + "Sensing a change in address families of interface %s", iface) + # first check if new address family + self.logger.debug("Iterating through %s", intersection) + for isectkey in intersection: + if isectkey in cifaddr.keys(): + for addr in cifaddr.get(isectkey, []): + self.logger.debug("Firing %s event for %s of %s", + "address_created", addr, iface) + a = Address(address=addr["addr"], family=isectkey) + self._api.events.address_created.fire(iface, a) + elif isectkey in lifaddr.keys(): + for addr in lifaddr.get(isectkey, []): + self.logger.debug("Firing %s event for %s of %s", + "address_removed", addr, iface) + a = Address(address=addr["addr"], family=isectkey) + self._api.events.address_removed.fire(iface, a) + + else: + for key in lifaddr.keys(): + # check for removed addresses (contained only in lifaddr) + removed_addr = [] + for laddr in lifaddr.get(key): + for caddr in cifaddr.get(key): + d = DictDiffer(caddr, laddr) + + if len(d.changed()) == 0: + # this means both addresses are the same -> remove + # from removed_addr list + if laddr in removed_addr: + removed_addr.remove(laddr) + break + + else: + # else add address to unknown/removed addresses + if laddr not in removed_addr: + removed_addr.append(laddr) + + if len(removed_addr) > 0: + self.logger.debug("removed addresses found: %s", + removed_addr) + for raddr in removed_addr: + self.logger.debug("Firing %s event for %s of %s", + "address_removed", raddr, iface) + a = Address(address=raddr["addr"], family=key) + self._api.events.address_removed.fire(iface, a) + + # now check for added addresses (contained only in cifaddr) + added_addr = [] + for caddr in cifaddr.get(key): + for laddr in lifaddr.get(key): + d = DictDiffer(caddr, laddr) + + if len(d.changed()) == 0: + # this means both addresses are the same -> remove + # from added_addr list + if caddr in added_addr: + added_addr.remove(caddr) + break + + else: + # else add address to unknown/added addresses + if caddr not in added_addr: + added_addr.append(caddr) + + if len(added_addr) > 0: + self.logger.debug("added addresses found: %s", added_addr) + for aaddr in added_addr: + self.logger.debug("Firing %s event for %s of %s", + "address_created", aaddr, iface) + a = Address(address=aaddr["addr"], family=key) + self._api.events.address_created.fire(iface, a) + + @staticmethod + def _remove_ipv6_special_stuff(address): + return address.split("%")[0] + + +class DictDiffer(object): + """ + Calculate the difference between two dictionaries as: + (1) items added + (2) items removed + (3) keys same in both but changed values + (4) keys same in both and unchanged values + """ + + def __init__(self, current_dict, past_dict): + self.current_dict, self.past_dict = current_dict, past_dict + self.set_current, self.set_past = set(current_dict.keys()), set( + past_dict.keys()) + self.intersect = self.set_current.intersection(self.set_past) + + def added(self): + return self.set_current - self.intersect + + def removed(self): + return self.set_past - self.intersect + + def changed(self): + return set(o for o in self.intersect if + self.past_dict[o] != self.current_dict[o]) + + def unchanged(self): + return set(o for o in self.intersect if + self.past_dict[o] == self.current_dict[o]) diff --git a/openmtc-gevent/src/openmtc_gevent/GEventNetworkManager.pyc b/openmtc-gevent/src/openmtc_gevent/GEventNetworkManager.pyc new file mode 100644 index 0000000..efc407f Binary files /dev/null and b/openmtc-gevent/src/openmtc_gevent/GEventNetworkManager.pyc differ diff --git a/openmtc-gevent/src/openmtc_gevent/ServerRack.py b/openmtc-gevent/src/openmtc_gevent/ServerRack.py new file mode 100644 index 0000000..6e11fc3 --- /dev/null +++ b/openmtc-gevent/src/openmtc_gevent/ServerRack.py @@ -0,0 +1,63 @@ +""" +Created on 31.12.2013 + +@author: kca +""" + +# Adapted from https://gist.github.com/denik/1008826 + +from futile.logging import LoggerMixin +import sys +import gevent +import signal +from gevent.event import Event + + +class GEventServerRack(LoggerMixin): + + def __init__(self, servers, *args, **kw): + super(GEventServerRack, self).__init__(*args, **kw) + self.servers = servers + e = self._shutdown_event = Event() + e.set() + + def start(self): + started = [] + try: + for server in self.servers: + name = self._server_name(server) + server.start() + started.append(server) + self.logger.info('%s started on %s', name, server.address) + except: + self.logger.exception("Failed to start server %s", name) + self.stop(started) + raise + + self._shutdown_event.clear() + + @staticmethod + def _server_name(server): + return getattr(server, 'name', None) or server.__class__.__name__ or 'Server' + + def stop(self, servers=None): + self.logger.info("Stopping listeners...") + if servers is None: + servers = self.servers + for server in servers: + try: + server.stop() + except: + if hasattr(server, 'loop'): # gevent >= 1.0 + server.loop.handle_error(server.stop, *sys.exc_info()) + else: # gevent <= 0.13 + self.logger.exception("Error stopping server %s", + self._server_name(server)) + + self._shutdown_event.set() + + def serve_forever(self): + gevent.signal(signal.SIGTERM, self.stop) + gevent.signal(signal.SIGINT, self.stop) + self.start() + self._shutdown_event.wait() diff --git a/openmtc-gevent/src/openmtc_gevent/ServerRack.pyc b/openmtc-gevent/src/openmtc_gevent/ServerRack.pyc new file mode 100644 index 0000000..78921d0 Binary files /dev/null and b/openmtc-gevent/src/openmtc_gevent/ServerRack.pyc differ diff --git a/openmtc-gevent/src/openmtc_gevent/TaskRunner.py b/openmtc-gevent/src/openmtc_gevent/TaskRunner.py new file mode 100644 index 0000000..c4ef066 --- /dev/null +++ b/openmtc-gevent/src/openmtc_gevent/TaskRunner.py @@ -0,0 +1,22 @@ +from gevent.pool import Pool, Greenlet +from openmtc_server.TaskRunner import TaskRunner + + +class GEventTaskRunner(TaskRunner): + timeout = 5 + + def __init__(self, pool_size=200, *args, **kw): + super(GEventTaskRunner, self).__init__(*args, **kw) + self._pool = Pool(pool_size) + + def run_task(self, func, *args, **kw): + self.logger.debug("Adding task %s to pool of size %s", func, + self._pool.free_count()) + self._pool.start(Greenlet(func, *args, **kw)) + self.logger.debug("Task added") + + def stop(self): + self.logger.debug("Waiting for background queue to finish") + self._pool.join(self.timeout) + self.logger.debug("background queue finished") + super(GEventTaskRunner, self).stop() diff --git a/openmtc-gevent/src/openmtc_gevent/TaskRunner.pyc b/openmtc-gevent/src/openmtc_gevent/TaskRunner.pyc new file mode 100644 index 0000000..2867442 Binary files /dev/null and b/openmtc-gevent/src/openmtc_gevent/TaskRunner.pyc differ diff --git a/openmtc-gevent/src/openmtc_gevent/__init__.py b/openmtc-gevent/src/openmtc_gevent/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/openmtc-gevent/src/openmtc_gevent/__init__.pyc b/openmtc-gevent/src/openmtc_gevent/__init__.pyc new file mode 100644 index 0000000..ded449e Binary files /dev/null and b/openmtc-gevent/src/openmtc_gevent/__init__.pyc differ diff --git a/openmtc-gevent/src/openmtc_gevent/backend_main.py b/openmtc-gevent/src/openmtc_gevent/backend_main.py new file mode 100644 index 0000000..f2cb1b3 --- /dev/null +++ b/openmtc-gevent/src/openmtc_gevent/backend_main.py @@ -0,0 +1,3 @@ +import openmtc_gevent.main + +openmtc_gevent.main.main("config-backend.json", False) diff --git a/openmtc-gevent/src/openmtc_gevent/gateway_main.py b/openmtc-gevent/src/openmtc_gevent/gateway_main.py new file mode 100644 index 0000000..fa3936e --- /dev/null +++ b/openmtc-gevent/src/openmtc_gevent/gateway_main.py @@ -0,0 +1,3 @@ +import openmtc_gevent.main + +openmtc_gevent.main.main("config-gateway.json", True) diff --git a/openmtc-gevent/src/openmtc_gevent/main.py b/openmtc-gevent/src/openmtc_gevent/main.py new file mode 100644 index 0000000..c5cce00 --- /dev/null +++ b/openmtc-gevent/src/openmtc_gevent/main.py @@ -0,0 +1,417 @@ +# added this safety exception due to problems with sphinx autodoc when module +# load order is not strict. See: +# http://stackoverflow.com/questions/8774958/keyerror-in-module-threading-after-a-successful-py-test-run +import sys +import os +import gevent.monkey + +if 'threading' in sys.modules and not os.environ.get('SUPPORT_GEVENT'): + raise Exception('threading module loaded before monkey patching in ' + 'gevent_main!') + +os.environ.setdefault("GEVENT_RESOLVER", "ares,thread") +gevent.monkey.patch_all() + +# ssl fixes +################################################################################ +import gevent.ssl + +__ssl__ = __import__('ssl') + +try: + _ssl = __ssl__._ssl +except AttributeError: + _ssl = __ssl__._ssl2 + + +if not hasattr(_ssl, 'sslwrap'): + import inspect + + def new_sslwrap(sock, server_side=False, keyfile=None, certfile=None, + cert_reqs=__ssl__.CERT_NONE, + ssl_version=__ssl__.PROTOCOL_SSLv23, ca_certs=None, + ciphers=None): + context = __ssl__.SSLContext(ssl_version) + context.verify_mode = cert_reqs or __ssl__.CERT_NONE + if ca_certs: + context.load_verify_locations(ca_certs) + if certfile: + context.load_cert_chain(certfile, keyfile) + if ciphers: + context.set_ciphers(ciphers) + + caller_self = inspect.currentframe().f_back.f_locals['self'] + return context._wrap_socket(sock, server_side=server_side, + ssl_sock=caller_self) + + _ssl.sslwrap = new_sslwrap + + del inspect + del new_sslwrap + del __ssl__ + del _ssl + +# bugfix for geventhttpclient, many thanks to kca +gevent.ssl.PROTOCOL_SSLv3 = gevent.ssl.PROTOCOL_TLSv1 + + +# TODO: kca: look at http://www.gevent.org/servers.html + +# gevent main +################################################################################ +from openmtc.configuration import ConfigurationError +from openmtc_server.util.async import async_all + +_components = [] +_plugins = [] + +logger = None + + +def load_plugin(api, category, descriptor, global_config, onem2m_config, + is_gateway): + from re import sub as re_sub + + def convert(n): + s1 = re_sub('(.)([A-Z][a-z]+)', r'\1_\2', n) + return re_sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() + + try: + name = descriptor["name"] + except KeyError: + raise ConfigurationError('"name" missing in plugin entry: %s' % + (descriptor,)) + + if descriptor.get("disabled"): + logger.info("Plugin %s is disabled", name) + return + + try: + try: + package = descriptor["package"] + except KeyError: + package = category + ".plugins." + convert(name) + + from importlib import import_module + module_ = import_module(package) + + cls = getattr(module_, name) + + config = descriptor.get("config", {}) + + if config.setdefault("global", global_config) is not global_config: + raise ConfigurationError("Invalid (reserved) key in configuration " + "for plugin %s: 'global'", name) + if config.setdefault("onem2m", onem2m_config) is not onem2m_config: + raise ConfigurationError("Invalid (reserved) key in configuration " + "for plugin %s: 'onem2m'", name) + + try: + if is_gateway: + config_class = cls.__gateway_configuration__ + else: + config_class = cls.__backend_configuration__ + except AttributeError: + config_class = cls.__configuration__ + + config = config_class(config) + + _plugins.append(cls(api, config)) + except ConfigurationError as e: + raise ConfigurationError("Error loading plugin %s: %s" % (name, e)) + except Exception as e: + logger.exception("Error loading plugin %s: %s", name, e) + raise Exception("Error loading plugin %s: %s" % (name, e)) + + +def load_plugins(api, plugins, global_config, onem2m_config, is_gateway): + for category in plugins.values(): + for plugin in category: + load_plugin(api, category, plugin, global_config, onem2m_config, + is_gateway) + + +def init_plugins(): + logger.info("Initializing plugins") + async_all([plugin.initialize() for plugin in _plugins]).get() + + +def start_plugins(): + logger.info("Starting plugins") + async_all([plugin.start() for plugin in _plugins]).get() + + +def load_config(name): + logger.debug("Reading config file: %s", name) + from openmtc_server.configuration import MainConfiguration, SimpleOption + from openmtc_cse.configuration import OneM2MConfiguration + + MainConfiguration.__options__["onem2m"] = SimpleOption(OneM2MConfiguration) + try: + with open(name) as f: + from json import load as j_load + config = j_load(f) + config = MainConfiguration(config) + except Exception as e: + raise ConfigurationError("Failed to load config file %s: %s" % + (name, e)) + + return config + + +def stop_component(component): + logger.debug("Stopping component: %s", component) + try: + component.stop() + except BaseException: + logger.exception("Failed to stop component %s", component) + + +def stop_components(): + map(stop_component, reversed(_components)) + logger.debug("Components stopped") + + +def stop_plugin(plugin): + if plugin.started: + stop_component(plugin) + + +def stop_plugins(): + map(stop_plugin, _plugins) + + +def init_component(component, api): + logger.debug("Initializing component: %s", component) + component.initialize(api) + _components.append(component) + + +def main(default_config_file, is_gateway): + global logger + + import futile.logging + logger = futile.logging.get_logger(__name__) + + from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter + + config_locations = (".", "/etc/openmtc/gevent", "/etc/openmtc") + + parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter) + parser.add_argument("-f", "--configfile", default=None, + help="Location of the configuration file. If " + "unspecified the system will look for a file called %s" + " in these locations: %s" % + (default_config_file, ', '.join(config_locations))) + parser.add_argument("-v", "--verbose", action="count", default=None, + help="Increase verbosity in output. This option can be" + " specified multiple times.") + parser.add_argument("--profiler", action="store_true", + help="Use GreenletProfiler") + args = parser.parse_args() + + configfile = args.configfile + futile.logging.set_default_level(futile.logging.DEBUG) + + try: + if not configfile: + import os.path + for d in config_locations: + configfile = os.path.join(os.path.abspath(d), + default_config_file) + logger.debug("Trying config file location: %s", configfile) + if os.path.isfile(configfile): + break + else: + raise ConfigurationError("Configuration file %s not found in " + "any of these locations: %s" % + (default_config_file, config_locations)) + + config = load_config(configfile) + except ConfigurationError as e: + sys.stderr.write(str(e) + "\n") + sys.exit(2) + + import openmtc_cse.api + openmtc_cse.api.config = config + import openmtc_server.api + openmtc_server.api.config = config + + # TODO: kca: + # Also: set global (non-futile) level? + if "logging" in config: # TODO init logging + # FIXME: This won't work, needs translation to log levels + log_conf = config["logging"] + if args.verbose is None: + futile.logging.set_default_level(log_conf.get("level") or + futile.logging.WARNING) + elif args.verbose >= 2: + futile.logging.set_default_level(futile.logging.DEBUG) + else: + futile.logging.set_default_level(futile.logging.INFO) + logfile = log_conf.get("file") + if logfile: + futile.logging.add_log_file(logfile) + else: + futile.logging.set_default_level(futile.logging.DEBUG) + + # make iso8601 logging shut up + logger = futile.logging.get_logger(__name__) + futile.logging.get_logger("iso8601").setLevel(futile.logging.ERROR) + logger.debug("Running OpenMTC") + + from itertools import starmap + + import signal + + from gevent import spawn_later + from gevent.event import Event as GEventEvent + + from openmtc_gevent.TaskRunner import GEventTaskRunner + + from openmtc_cse.methoddomain import OneM2MMethodDomain + + from openmtc_cse.transport import OneM2MTransportDomain + + from openmtc_server.platform.default.Event import (ResourceFinishEvent, + NetworkEvent) + + from GEventNetworkManager import GEventNetworkManager + + from openmtc_server.util.db import load_db_module + + omd = OneM2MMethodDomain(config=config) + + otd = OneM2MTransportDomain(config=config) + + nm = GEventNetworkManager(config=config.get("network_manager", {})) + + task_runner = GEventTaskRunner() + _components.append(task_runner) + + _timers = set() + + db = load_db_module(config) + + class Api(object): + PLATFORM = "gevent" + + class events(object): + resource_created = ResourceFinishEvent(task_runner.run_task) + resource_deleted = ResourceFinishEvent(task_runner.run_task) + resource_updated = ResourceFinishEvent(task_runner.run_task) + resource_announced = ResourceFinishEvent(task_runner.run_task) + + # fired when a network interface appeared + # called with + interface_created = NetworkEvent(task_runner.run_task) + # fired when a network interface was disappeared + # called with + interface_removed = NetworkEvent(task_runner.run_task) + # fired when an address appeared on an existing interface + # called with ,
+ address_created = NetworkEvent(task_runner.run_task) + # fired when an address disappeared on an existing interface + # called with ,
+ address_removed = NetworkEvent(task_runner.run_task) + + start_onem2m_session = db.start_onem2m_session + get_shelve = db.get_shelve + + # handle request + handle_onem2m_request = omd.handle_onem2m_request + + # send request + send_onem2m_request = otd.send_onem2m_request + send_notify = otd.send_notify + + register_point_of_access = otd.register_point_of_access + + # connectors and endpoints + register_onem2m_client = otd.register_client + get_onem2m_endpoints = otd.get_endpoints + add_poa_list = otd.add_poa_list + + network_manager = nm + + run_task = task_runner.run_task + + @staticmethod + def set_timer(t, f, *args, **kw): + timer = None + + def wrapper(): + _timers.discard(timer) + f(*args, **kw) + timer = spawn_later(t, wrapper) + _timers.add(timer) + return timer + + @staticmethod + def cancel_timer(timer): + _timers.discard(timer) + timer.kill() + + map = map + + @staticmethod + def starmap(c, l): + return tuple(starmap(c, l)) + + Api.db = db + + openmtc_cse.api.api = Api + openmtc_cse.api.events = Api.events + openmtc_server.api.api = Api + openmtc_server.api.events = Api.events + + shutdown_event = GEventEvent() + gevent.signal(signal.SIGTERM, shutdown_event.set) + gevent.signal(signal.SIGINT, shutdown_event.set) + + try: + init_component(otd, Api) + init_component(omd, Api) + init_component(nm, Api) + + force = config["database"].get("dropDB") + if force or not db.is_initialized(): + db.initialize(force) + omd.init_cse_base() + + omd.start() + + load_plugins(Api, config.get("plugins", ()), + config["global"], config["onem2m"], is_gateway) + init_plugins() + start_plugins() + + logger.info("OpenMTC is running") + except: + logger.exception("Error during startup") + else: + if args.profiler: + import GreenletProfiler + GreenletProfiler.set_clock_type("cpu") + GreenletProfiler.start() + + # wait for shutdown event + shutdown_event.wait() + + if args.profiler: + GreenletProfiler.stop() + stats = GreenletProfiler.get_func_stats() + stats.print_all() + stats.save('profile.callgrind', type='callgrind') + + stop_plugins() + stop_components() + + for timer in _timers: + try: + timer.kill() + except: + logger.exception("Failed to kill timer %s", timer) + +if __name__ == "__main__": + main() diff --git a/openmtc-gevent/src/openmtc_gevent/main.pyc b/openmtc-gevent/src/openmtc_gevent/main.pyc new file mode 100644 index 0000000..8020c48 Binary files /dev/null and b/openmtc-gevent/src/openmtc_gevent/main.pyc differ diff --git a/pylint.cfg b/pylint.cfg new file mode 100644 index 0000000..bbec387 --- /dev/null +++ b/pylint.cfg @@ -0,0 +1,280 @@ +[MASTER] + +# Specify a configuration file. +#rcfile= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Profiled execution. +profile=no + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Pickle collected data for later comparisons. +persistent=yes + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + + +[MESSAGES CONTROL] + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time. See also the "--disable" option for examples. +#enable= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +#disable= + + +[REPORTS] + +# Set the output format. Available formats are text, parseable, colorized, msvs +# (visual studio) and html. You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Put messages in a separate file for each module / package specified on the +# command line instead of printing them on stdout. Reports (if any) will be +# written in a file name "pylint_global.[txt|html]". +files-output=no + +# Tells whether to display a full report or only the messages +reports=yes + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Add a comment according to your evaluation note. This is used by the global +# evaluation report (RP0004). +comment=no + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +msg-template={path}:{line}: [{msg_id}({symbol}), {obj}] {msg} + + +[SIMILARITIES] + +# Minimum lines number of a similarity. +min-similarity-lines=4 + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[VARIABLES] + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# A regular expression matching the beginning of the name of dummy variables +# (i.e. not used). +dummy-variables-rgx=_$|dummy + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + + +[TYPECHECK] + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# List of classes names for which member attributes should not be checked +# (useful for classes with attributes dynamically set). +ignored-classes=SQLObject + +# When zope mode is activated, add a predefined set of Zope acquired attributes +# to generated-members. +zope=no + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E0201 when accessed. Python regular +# expressions are accepted. +generated-members=REQUEST,acl_users,aq_parent + + +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=80 + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + +# List of optional constructs for which whitespace checking is disabled +no-space-check=trailing-comma,dict-separator + +# Maximum number of lines in a module +max-module-lines=1000 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + + +[BASIC] + +# Required attributes for module, separated by a comma +required-attributes= + +# List of builtins function names that should not be used, separated by a comma +bad-functions=map,filter,apply,input + +# Regular expression which should only match correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression which should only match correct module level names +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Regular expression which should only match correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression which should only match correct function names +function-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct method names +method-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct instance attribute names +attr-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct argument names +argument-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct variable names +variable-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct attribute names in class +# bodies +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Regular expression which should only match correct list comprehension / +# generator expression variable names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=__.*__ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.* + +# Maximum number of locals for function / method body +max-locals=15 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of branch for function / method body +max-branches=12 + +# Maximum number of statements in function / method body +max-statements=50 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + + +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub,TERMIOS,Bastion,rexec + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + + +[CLASSES] + +# List of interface methods to ignore, separated by a comma. This is used for +# instance to not check methods defines in Zope's Interface base class. +ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/server/openmtc-cse/src/openmtc_cse/__init__.py b/server/openmtc-cse/src/openmtc_cse/__init__.py new file mode 100644 index 0000000..f99fb6e --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/__init__.py @@ -0,0 +1,5 @@ +from collections import namedtuple + + +OneM2MEndPoint = namedtuple("OneM2MEndPointBase", + ("scheme", "server_address", "port")) diff --git a/server/openmtc-cse/src/openmtc_cse/__init__.pyc b/server/openmtc-cse/src/openmtc_cse/__init__.pyc new file mode 100644 index 0000000..69725c4 Binary files /dev/null and b/server/openmtc-cse/src/openmtc_cse/__init__.pyc differ diff --git a/server/openmtc-cse/src/openmtc_cse/api.py b/server/openmtc-cse/src/openmtc_cse/api.py new file mode 100644 index 0000000..e69de29 diff --git a/server/openmtc-cse/src/openmtc_cse/api.pyc b/server/openmtc-cse/src/openmtc_cse/api.pyc new file mode 100644 index 0000000..cb615bc Binary files /dev/null and b/server/openmtc-cse/src/openmtc_cse/api.pyc differ diff --git a/server/openmtc-cse/src/openmtc_cse/configuration.py b/server/openmtc-cse/src/openmtc_cse/configuration.py new file mode 100644 index 0000000..1dfe046 --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/configuration.py @@ -0,0 +1,9 @@ +from openmtc.configuration import Configuration, SimpleOption + + +class OneM2MConfiguration(Configuration): + __name__ = "onem2m configuration" + __options__ = {"sp_id": SimpleOption(default="openmtc.org"), + "cse_type": SimpleOption(default="MN_CSE"), + "cse_id": SimpleOption(default="mn-cse-1"), + "cse_base": SimpleOption(default="onem2m")} diff --git a/server/openmtc-cse/src/openmtc_cse/configuration.pyc b/server/openmtc-cse/src/openmtc_cse/configuration.pyc new file mode 100644 index 0000000..81cbcc0 Binary files /dev/null and b/server/openmtc-cse/src/openmtc_cse/configuration.pyc differ diff --git a/server/openmtc-cse/src/openmtc_cse/methoddomain/__init__.py b/server/openmtc-cse/src/openmtc_cse/methoddomain/__init__.py new file mode 100644 index 0000000..5fafc14 --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/methoddomain/__init__.py @@ -0,0 +1,251 @@ +import controller +import openmtc_onem2m.model as model +from aplus import Promise +from openmtc.util import datetime_now +from openmtc_cse.methoddomain.controller import OneM2MDefaultController +from openmtc_onem2m.exc import (STATUS_INTERNAL_SERVER_ERROR, CSEConflict, + CSENotFound, CSENotImplemented) +from openmtc_onem2m.model import (CSEBase, CSETypeIDE) +from openmtc_onem2m.transport import (OneM2MErrorResponse, OneM2MOperation) +from openmtc_server import Component +from openmtc_server.db.exc import DBConflict, DBNotFound +from openmtc_server.exc import ConfigurationError +from openmtc_server.util import log_error + + +class OneM2MMethodDomain(Component): + def __init__(self, config, *args, **kw): + super(OneM2MMethodDomain, self).__init__(*args, **kw) + + self._api = None + self.events = None + self.config = config + + self.controller_classes = { + model.CSEBase: controller.CSEBaseController, + model.RemoteCSE: controller.RemoteCSEController, + model.AE: controller.AEController, + model.Subscription: controller.SubscriptionController, + model.ContentInstance: controller.ContentInstanceController, + model.Container: controller.ContainerController, + model.AccessControlPolicy: controller.AccessControlPolicyController, + model.SemanticDescriptor: controller.SemanticDescriptorController, + } + + self._cse_base = None + self._rel_cse_id = None + self._abs_cse_id = None + + def initialize(self, api): + self._api = api + self.events = self._api.events + + self._api.handle_onem2m_request = self.handle_onem2m_request + + def start(self): + pass + + def stop(self): + pass + + def init_cse_base(self): + # get config values + onem2m_config = self.config["onem2m"] + self._cse_base = cse_base_name = onem2m_config.get("cse_base", "onem2m") + + # TODO(rst): check later + # node_link = 'dummy' + + # cse type + cse_type = onem2m_config.get("cse_type") + try: + cse_type = getattr(CSETypeIDE, + str(cse_type).replace("-", "_").upper()) + except (AttributeError, TypeError, ValueError): + raise ConfigurationError("Invalid value for 'cse_type': %s" % + (cse_type,)) + cse_type = CSETypeIDE(cse_type) + + # cse id + try: + self._rel_cse_id = "/" + onem2m_config["cse_id"] + except KeyError: + raise ConfigurationError("Missing configuration key: cse_id") + + # sp id + try: + self._abs_cse_id = "//" + onem2m_config["sp_id"] + self._rel_cse_id + except KeyError: + raise ConfigurationError("Missing configuration key: sp_id") + + # time + now = datetime_now() + + # resource + cse_base = CSEBase( + resourceName=cse_base_name, + resourceID='cb0', + parentID=None, + resourceType=model.ResourceTypeE['CSEBase'], + creationTime=now, + lastModifiedTime=now, + cseType=cse_type, + CSE_ID=self._rel_cse_id, + supportedResourceType=[model.ResourceTypeE[x.typename] + for x in self.controller_classes.keys()], + pointOfAccess=[], + path=cse_base_name + ) + db_session = self._api.start_onem2m_session() + + try: + result = db_session.store(cse_base) + except Exception as error: + self.logger.exception("Initialization error") + db_session.rollback() + raise error + else: + db_session.commit() + return result + + def handle_onem2m_request(self, onem2m_request): + self.logger.debug("handling request:\r\n\t%s", onem2m_request) + + db_session = self._api.start_onem2m_session() + try: + result = self._handle_onem2m_request(db_session, onem2m_request) + db_session.commit() + return result + except Exception as error: + if log_error(error): + self.logger.exception("Error during request: %r", error) + else: + self.logger.debug("Error during request: %r", error) + try: + status_code = error.response_status_code + except AttributeError: + status_code = 500 + + p = Promise() + result = OneM2MErrorResponse(status_code=status_code, + request=onem2m_request) + db_session.rollback() + p.reject(result) + return p + + def _forward(self, onem2m_request, path): + operation = onem2m_request.op + + # TODO(rst): optimize this (handling of CSE-relative references) + pre = (self._abs_cse_id if path.startswith('//') + else self._rel_cse_id) + '/' + if operation == OneM2MOperation.create: + if isinstance(onem2m_request.content, model.Subscription): + cn = onem2m_request.content + cn.notificationURI = [(pre if not uri.startswith('/') else '') + uri + for uri in cn.notificationURI] + if cn.subscriberURI and not cn.subscriberURI.startswith('/'): + cn.subscriberURI = pre + cn.subscriberURI + + return self._api.send_onem2m_request(onem2m_request) + + def _get_controller_class(self, resource_type): + try: + return self.controller_classes[resource_type] + except KeyError: + raise CSENotImplemented() + + def _run_controller(self, ctrl, request, resource): + with Promise() as p: + try: + p.fulfill(ctrl(request, resource)) + except Exception as error: + self.logger.debug("Handling %s: %s", type(error).__name__, + error) + if isinstance(error, OneM2MErrorResponse): + p.reject(error) + elif isinstance(error, DBConflict): + p.reject(CSEConflict()) + elif isinstance(error, DBNotFound): + p.reject(CSENotFound()) + else: + try: + status_code = error.response_status_code + except AttributeError: + status_code = STATUS_INTERNAL_SERVER_ERROR + + result = OneM2MErrorResponse(status_code, request=request) + p.reject(result) + return p + + def _normalize_path(self, path): + if path.startswith(self._rel_cse_id): + return '/'.join(path.split('/')[2:]) + elif path.startswith(self._abs_cse_id): + return '/'.join(path.split('/')[4:]) + else: + return path + + def _check_existence_and_get_resource(self, db_session, path): + if path.startswith('.'): + path = self._cse_base + path[1:] + + def get_resource(p): + try: + r = db_session.get(p) + except DBNotFound: + try: + p_segment, ch_segments = p.split('/', 1) + except ValueError: + raise CSENotFound() + try: + p = db_session.get(p_segment) + r = db_session.get('/'.join([p.path, ch_segments])) + except DBNotFound: + raise CSENotFound() + return r + + # virtual resource handling, see TS-0004 6.8 + # oldest, latest -> Container + # TODO(rst): fanOutPoint -> group + # TODO(rst): pollingChannelURI -> pollingChannel + if path.endswith(('latest', 'oldest')): + parent_path, virtual = path.rsplit('/', 1) + parent = get_resource(parent_path) + if isinstance(parent, model.Container): + resource = getattr(parent, virtual) + if resource is None: + raise CSENotFound() + return resource + + return get_resource(path) + + def _handle_onem2m_request(self, db_session, request): + self.logger.debug("_handling request:\r\n\t%s", request) + + operation = request.operation + + # strip trailing slashes + request.to = request.to.rstrip('/') + + # TS-0004 7.3.2.6 -> forwarding + path = self._normalize_path(request.to) + if path.startswith('/'): + return self._forward(request, path) + + def handle_onem2m_request(req): + return self._handle_onem2m_request(db_session, req) + + def _handle_resource(res): + if operation in (OneM2MOperation.create, OneM2MOperation.update): + resource_type = request.resource_type + else: + resource_type = type(res) + + ctrl_class = self._get_controller_class(resource_type) + ctrl = ctrl_class(db_session, resource_type, handle_onem2m_request) + return self._run_controller(ctrl, request, res) + + # TS-0004 7.3.3.2 -> check existence + resource = self._check_existence_and_get_resource(db_session, path) + return _handle_resource(resource) diff --git a/server/openmtc-cse/src/openmtc_cse/methoddomain/__init__.pyc b/server/openmtc-cse/src/openmtc_cse/methoddomain/__init__.pyc new file mode 100644 index 0000000..5231d60 Binary files /dev/null and b/server/openmtc-cse/src/openmtc_cse/methoddomain/__init__.pyc differ diff --git a/server/openmtc-cse/src/openmtc_cse/methoddomain/controller/__init__.py b/server/openmtc-cse/src/openmtc_cse/methoddomain/controller/__init__.py new file mode 100644 index 0000000..528b887 --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/methoddomain/controller/__init__.py @@ -0,0 +1,1426 @@ +import string +from datetime import datetime +from itertools import chain +from operator import attrgetter +from random import choice +from urlparse import urlparse +import binascii +import base64 + +from iso8601.iso8601 import parse_date, ParseError +from fyzz import parse +from rdflib import Graph + +import openmtc_cse.api as api +from futile import uc +from futile.logging import LoggerMixin +from openmtc.exc import OpenMTCError +from openmtc.model import FlexibleAttributesMixin +from openmtc.util import datetime_now, datetime_the_future +from openmtc_cse.methoddomain.filtercriteria import check_match +from openmtc_onem2m.exc import (CSEOperationNotAllowed, STATUS_OK, CSETypeError, + CSEMissingValue, CSEValueError, STATUS_CREATED, + CSEError, CSESyntaxError, CSEBadRequest, + CSEPermissionDenied, STATUS_NOT_FOUND, CSEConflict, + CSEContentsUnacceptable, CSETargetNotReachable) +from openmtc_onem2m.model import (ExpiringResource, Notification, + AccessControlOperationE, ResourceTypeE, + NotificationContentTypeE, FilterUsageE, + get_short_resource_name, URIList, + DiscResTypeE, Container, AccessControlPolicy, + AccessControlPolicyIDHolder, AccessControlRuleC, + DynAuthDasRequestC, SecurityInfo, SecurityInfoTypeE, AE) +from openmtc_onem2m.transport import (OneM2MResponse, OneM2MRequest, + OneM2MOperation, OneM2MErrorResponse) +from openmtc_onem2m.util import split_onem2m_address +from openmtc_server.db import DBError +from openmtc_server.db.exc import DBNotFound +from openmtc_server.util import match_now_cron +from openmtc_server.util import uri_safe +from openmtc_server.util.async import async_all + + +_resource_id_counter = {} + + +class OneM2MDefaultController(LoggerMixin): + RANDOM_SOURCE = string.letters + string.digits + + result_content_type = None + + def __init__(self, db_session, resource_type, handle_onem2m_request): + super(OneM2MDefaultController, self).__init__() + self.resource_type = resource_type + self.db_session = db_session + self.handle_onem2m_request = handle_onem2m_request + + def __call__(self, request, target_resource): + self.logger.debug("%s servicing request", type(self).__name__) + + self.request = request + self.resource = target_resource + + self.global_config = api.config["global"] + self.onem2m_config = api.config["onem2m"] + self.api = api.api + self.events = api.events + + self.values = None + + self._require_auth = self.global_config.get("require_auth", True) + + # TODO(rkr): maybe make subjectAltName as mandatory in the certificate, + # TODO before handling it to the WSGI application + # differentiate between authN and impersonation + # authN: a valid ssl handshake was performed, but the subjectAltName may not provided + # impersonation: which means that subjectAltName exists in cert and matches the request + # originator id + self.is_authenticated = getattr(request, "_authenticated", None) + self.remote_ip_addr = getattr(request, "_remote_ip_addr", None) + + self._sp_id = "//" + self.onem2m_config["sp_id"] # //openmtc.org + self._rel_cse_id = "/" + self.onem2m_config["cse_id"] # /mn-cse-1 + self._abs_cse_id = self._sp_id + self._rel_cse_id # //openmtc.org/mn-cse-1 + + # default policies + self._default_privileges = map(lambda x: AccessControlRuleC(**x), + self.onem2m_config.get("default_privileges", [])) + + # dynamic authorization + dynamic_authorization = self.onem2m_config.get("dynamic_authorization", {}) + self._dynamic_authorization_supported = dynamic_authorization.get('enabled', False) + self._dynamic_authorization_poa = dynamic_authorization.get('poa', []) + + return self._handle_request() + + def _handle_request(self): + try: + handler = getattr(self, "_handle_" + self.request.operation.name) + except AttributeError as e: + raise CSEOperationNotAllowed(e) + return handler() + + # AUTHORIZATION + + def _check_authorization(self, resource=None): + """This method performs the access control decision (TS-0003 7.1.4). + The result should be "res_acrs= TRUE || FALSE" for "Permit" or "Deny". + + :return: True or False + """ + + if not self._require_auth: + return + + # ========================================== + # Part I: Getting the accessControlPolicyIDs + # ========================================== + # 1. get accessControlPolicyIDs of target resource + # 2. if not exist, get from parent resource + # 3. some resources specific handling => TS-0001 clause 9.6 for , + # , + # =>> get from parent + # 4. apply system default policies, if accessControlPolicyIDs attribute is: + # - not set + # - not point to valid resource + # - not reachable + + # ===================================================================== + # Part II: Get Rules (acrs) from selfPrivileges or privileges attribute + # ===================================================================== + # 1. if target is the resource + # =>> get rules from "selfPrivileges" + # 2. if target is another resource: + # =>> get accessControlRules (acr) from "privilege" attribute of the + # which is linked in the accessControlPolicyIDs + + # ==================================== + # Part III: Evaluate against the rules + # ==================================== + # Check the following conditions: + # 1. check if accessControlOriginators of the rule includes the originator of the request + # ==>> evaluate fr parameter of request vs. acor + # 2. check if accessControlOperations of the rule includes the operation type of the request + # ==>> evaluate op parameter of request vs. acop + # 3. check if accessControlContexts of the rule includes the request context + + self.logger.debug("checking authorization...") + + try: + urlparse(self.request.originator) + except AttributeError: + raise CSEPermissionDenied("No or not a valid originator given!") + + # cse is authorized: needed to perform actions like sending DAS response, otherwise loop + if self.request.originator == self._abs_cse_id: + return + + if not resource: + resource = self.parent if self.request.op == OneM2MOperation.create else self.resource + + # AE is allowed on own resources + if isinstance(resource, AE): + _, _, ae_id = split_onem2m_address(self.request.originator) + if ae_id == resource.resourceID: + return + elif getattr(resource, 'creator', '') == self.request.originator: + return + + # if the resource is a policy, check selfPrivileges + if isinstance(resource, AccessControlPolicy): + self._check_auth_acp(resource) + elif isinstance(resource, AccessControlPolicyIDHolder): + self._check_auth_acp_holder(resource) + else: + self._check_auth_other(resource) + + def _check_auth_acp(self, resource): + self.logger.debug("resource is AccessControlPolicy, checking selfPrivileges '%s'" % + resource.selfPrivileges) + + # TODO(rst): check if default policies are also valid for selfPrivileges + if self._perform_evaluation([resource], "selfPrivileges"): + return + + raise CSEPermissionDenied("Authentication failed. Cause: selfPrivileges") + + def _check_auth_acp_holder(self, resource): + self.logger.debug("Resource is AccessControlPolicyIDHolder, getting policies...") + + self._check_privileges(resource) + + def _check_auth_other(self, resource): + self.logger.debug("Resource has no attribute 'accessControlPolicyIDs'. Checking parent...") + parent = self._get_parent_of_resource(resource) + + if not isinstance(parent, AccessControlPolicyIDHolder): + self.logger.debug("Parent is not an AccessControlPolicyIDHolder") + raise CSEPermissionDenied("Authorization failed.") + + self._check_privileges(parent) + + def _get_parent_of_resource(self, resource): + return self.db_session.get(resource.parentID) + + def _check_privileges(self, resource): + # get all ACPs + policies = [] # acpi, accessControlPolicyIDs + if resource.accessControlPolicyIDs: + policies = self._get_policies(resource.accessControlPolicyIDs) + elif isinstance(resource, Container): + policies = self._get_parent_policies_of_container(resource) + + # perform evaluation based on policies/default policies + if self._perform_evaluation(policies, "privileges"): + return + + if self._dynamic_authorization_supported: + self.logger.debug("Notifying DAS Server...") + if self._notify_das_server(resource): + return + + raise CSEPermissionDenied("Authorization failed.") + + def _get_parent_policies_of_container(self, resource): + policies = [] + parent_resource = self._get_parent_of_resource(resource) + if parent_resource.accessControlPolicyIDs: + policies = self._get_policies(parent_resource.accessControlPolicyIDs) + return policies + else: + if isinstance(parent_resource, Container): + return self._get_parent_policies_of_container(parent_resource) + else: + return policies + + def _get_policies(self, access_control_policy_ids): + + def get_policy(aid): + try: + return self.api.handle_onem2m_request( + OneM2MRequest( + OneM2MOperation.retrieve, aid, fr=self._abs_cse_id + ) + ).get().content + except OneM2MErrorResponse as error: + if error.response_status_code == STATUS_NOT_FOUND: + self.logger.debug("Policy '%s' NOT FOUND.", aid) + else: + self.logger.debug("Error getting policy: %s:", error) + return None + + return filter(None, map(get_policy, access_control_policy_ids)) + + # def _notify_das_server(self, notify_uri, payload): + def _notify_das_server(self, resource): + # 7.5.1.2.10 Notification for Dynamic Authorization + # When the Originator(i.e. Hosting CSE) is triggered to perform dynamic authorization for + # an incoming request that it receives, then it performs the following steps in order: + # 1) Configure the To parameter with the address of the corresponding DAS Server + # associated with the resource targeted by the received request. The Hosting CSE + # shall use the DAS Server address information configured within the + # dynamicAuthorizationPoA attribute of the < dynamicAuthorizationConsultation > + # resource associated with the targeted resource. The Hosting CSE shall determine the + # corresponding < dynamicAuthorizationConsultation > resource using the + # dynamicAuthorizationConsultationIDs attribute of the targeted resource. If the + # attribute is not supported by the targeted resource, or it is not set, or it has a + # value that does not correspond to a valid < dynamicAuthorizationConsultation > + # resource(s), or it refers to a < dynamicAuthorizationConsultation > resource(s) + # that is not reachable, then based on system policies, the + # dynamicAuthorizationConsultationIDs associated with the parent may apply to the + # child resource if present, or a system default < dynamicAuthorizationConsultation > + # may apply if present. If a dynamicAuthorizationConsultationID attribute and + # corresponding < dynamicAuthorizationConsultation > resource can not be found or if + # the dynamicAuthorizationEnabled of a < dynamicAuthorizationConsultation > has a + # value of FALSE, the Hosting CSE shall reject the request by returning an + # "ORIGINATOR_HAS_NO_PRIVILEGE" Response Status Code to the Originator of the + # received request and no additional steps shall be performed. + + dac = self._get_dynamic_authorization_consultation(resource) + + if dac is None: + dyn_auth_poa = self._dynamic_authorization_poa + else: + dyn_auth_poa = dac.dynamicAuthorizationPoA + + if not dyn_auth_poa: + return False + + # 2) Configure the From parameter with the ID of the Hosting CSE which hosts the resource + # targeted by the received request. + # 3) Configure the mandatory sub-elements of the securityInfo element of the notification + # data + # a. The securityInfoType element shall be configured as "1" (Dynamic Authorization + # Request) in the Notify request primitive. + # b.The originator element shall be configured with the ID of the Originator of the + # received request. + # c. The targetedResourceType element shall be configured with the type of resource + # targeted by the received request. + # d. The operation element shall be configured with the type of operation targeted by + # the received request. + # 4) Optionally configure one or more optional sub-elements of the securityInfo element + # of the notification data + # ... + + das_req = DynAuthDasRequestC( + originator=self.request.originator, + operation=self.request.operation, + targetedResourceType=resource.resourceType, + targetedResourceID=self._rel_cse_id + '/' + resource.resourceID + ) + content = SecurityInfo( + dasRequest=das_req, + securityInfoType=SecurityInfoTypeE.DynamicAuthorizationRequest + ) + + request = OneM2MRequest(OneM2MOperation.notify, '', fr=self._abs_cse_id, + ty=SecurityInfo, pc=content) + + # 5) The Hosting CSE shall send the notification request for dynamic authorization to the + # targeted DAS Server. + try: + resp = self.api.send_notify(request, dyn_auth_poa).get() + except (OneM2MErrorResponse, CSETargetNotReachable): + return False + + # Originator: + # When the Hosting CSE receives a notification response for dynamic authorization, it + # performs the following steps in order: + # 1) The Hosting CSE shall verify that the securityInfoType element of the securityInfo + # element of the notification is configured as "2" (Dynamic Authorization Response). + # If it is not, the Hosting CSE shall not grant privileges to the Originator of the + # request for which the Hosting CSE was attempting dynamic authorization. The Hosting + # CSE shall reject the request by returning an "ORIGINATOR_HAS_NO_PRIVILEGE" Response + # Status Code to the Originator of the received request and no additional steps shall + # be performed. + + sec_info_resp = resp.content + + try: + if sec_info_resp.securityInfoType != SecurityInfoTypeE.DynamicAuthorizationResponse: + return False + except AttributeError: + return False + + # 2) The Hosting CSE shall check whether the response contains a dynamicACPInfo element. + # If present, the Hosting CSE shall create a child resource + # under the targeted resource and configure its privileges using the dynamicACPInfo. + # In this case, the Hosting CSE shall configure the privileges attribute with the + # grantedPrivileges and the expirationTime attribute with the privilegesLifetime. The + # Hosting CSE shall also configure the selfPrivileges attribute to allow itself to + # perform Update/Retrieve/Delete operations on the newly created + # resource. + + if not sec_info_resp.dasResponse: + return False + + acp_info = sec_info_resp.dasResponse.dynamicACPInfo + # disabled for debug + # if acp_info: + # self._create_dynamic_policy(resource, acp_info) + + # 3) The Hosting CSE shall check whether the response contains a tokens element. If + # present the Hosting CSE shall perform verification and caching of the token as + # specified in clause 7.3.2 in TS-0003 [7]. + + # Not implemented yet. + + # NOTE: The Hosting CSE uses the information in the DAS response for authorization, + # see clause 7.3.3.15. + + return self._perform_evaluation([acp_info], "grantedPrivileges") + + def _get_dynamic_authorization_consultation(self, resource): + try: + for dac_id in resource.dynamicAuthorizationConsultationIDs: + try: + dac = self.db_session.get(dac_id) + if dac.dynamicAuthorizationEnabled: + return dac + except DBNotFound: + pass + + pid = resource.parentID + except AttributeError: + return None + else: + return self._get_dynamic_authorization_consultation(self.db_session.get(pid)) + + def _create_dynamic_policy(self, resource, acp_info): + acp = AccessControlPolicy( + selfPrivileges=[AccessControlRuleC( + accessControlOriginators=[self._abs_cse_id], + accessControlOperations=[ + AccessControlOperationE.retrieve, + AccessControlOperationE.update, + AccessControlOperationE.delete + ] + )], + privileges=acp_info.grantedPrivileges, + expirationTime=acp_info.privilegesLifetime or datetime_the_future(3600) + ) + + req = OneM2MRequest(OneM2MOperation.create, resource.resourceID, + pc=acp, + fr=self._abs_cse_id, + ty=AccessControlPolicy) + resp = self.api.handle_onem2m_request(req).get() + dyn_acp_id = resp.content.resourceID + resource.accessControlPolicyIDs.append(dyn_acp_id) + self.db_session.update(resource, ['accessControlPolicyIDs']) + + def _perform_evaluation(self, policies, privilege_type): + + def _perform_access_decision(access_control_rules): + for acr in access_control_rules: + if self._is_authorized(self.request, acr): + self.logger.debug("SUCCESS: At least one match in accessControlRules.") + return True + return False + + if policies: + self.logger.debug("Performing evaluation of resource policies...") + privileges = list(chain.from_iterable(map(attrgetter(privilege_type), policies))) + return _perform_access_decision(privileges) + elif self._default_privileges: + self.logger.debug("Performing evaluation using default privileges...") + return _perform_access_decision(self._default_privileges) + return False + + def _is_authorized(self, request, acr): + """This method performs access control decision (TS-0003 7.1.5) for a single acr. + res_acr(k) = res_authn(k) AND res_origs(k) AND res_ops(k) AND res_ctxts(k). + + :param request: + :param acr: + :return: + """ + self.logger.debug("_is_authorized -> keys: %s", acr.__dict__) + + # get enum value of requested operation name + request_op_val = getattr(AccessControlOperationE, request.op) + + # discover operation is indicated by op = retrieve AND fc and Discrestype parameters + # therefore set request operation value to 32 and check it against acop + if request.op == OneM2MOperation.retrieve and request.drt and request.fc: + try: + if request.drt in ["1", "2"] and request.fc.filterUsage == 1: + request_op_val = 32 + except AttributeError: + pass + + # results for each part in the acr + res_origs = False + res_ops = False + res_authn = False + res_ctxts = False + + # 1st - request originator in acor? + # TS-0001, p.119, Table 9.6.2.1-1: possible parameters + # acr.accessControlOriginators = ["all" || "" (CSE-ID, AE-ID, Group-ID + # || Role-ID (optional?)(TS0001 - 7.1.14) ] + + def orig_matches(orig, to_match): + if to_match == 'all': + self.logger.debug("all originators are valid for this acr") + return True + + def get_cse_relative_originator(o): + if o.startswith(self._sp_id): + return o[len(self._sp_id):] + return o + + if get_cse_relative_originator(orig) == get_cse_relative_originator(to_match): + self.logger.debug("request originator matches originator/domain") + return True + + self.logger.debug("invalid originator: '%s' != '%s'", orig, to_match) + return False + + if hasattr(acr, "accessControlOriginators") and acr.accessControlOriginators: + res_origs = any(orig_matches(request.fr, o) for o in acr.accessControlOriginators) + if not res_origs: + return False + else: + # the set of acor is empty (or not available) => fr is not member of it => False + self.logger.debug("no accessControlOriginators to check") + return False + + # 2nd - op allowed? + # acr.accessControlOperations e.g. [1, 2, 4] <- intEnums type AccessControlOperation + # acop vs. op: for create, delete, request, update, notify + # acop vs. op AND fc (Disrestype parameters): for discover + if hasattr(acr, "accessControlOperations") and acr.accessControlOperations: + # check request operation value against acop and check combined values (like 3, 7, etc.) + for v in acr.accessControlOperations: + if (request_op_val & v) != 0: + res_ops = True + break + if res_ops: + self.logger.debug("request operation '%s' is part of allowed operations '%s'", + request.op, + acr.accessControlOperations) + else: + self.logger.debug("request operation '%s' is not part of allowed operations '%s'", + request.op, + acr.accessControlOperations) + return False + else: + self.logger.debug("no accessControlOperations to check") + + # 3rd - request matches authenticationFlag + if hasattr(acr, "accessControlAuthenticationFlag"): + if acr.accessControlAuthenticationFlag: + if self.is_authenticated: + res_authn = True + else: + self.logger.debug("accessControlAuthenticationFlag is set True, " + "but originator is not authenticated") + res_authn = False + else: + res_authn = True + + # 4th - request matches context criteria? (time window || location || ipAddress) + # TODO(rkr): future implementation: location + if hasattr(acr, "accessControlContexts") and acr.accessControlContexts: + for context in acr.accessControlContexts: + if hasattr(context, "accessControlWindow") and context.accessControlWindow: + window_match = False + for window in context.accessControlWindow: + if match_now_cron(window): + window_match = True + break + + if window_match: + self.logger.debug("time window is open for request") + else: + self.logger.debug("time window closed for request") + return False + + if hasattr(context, "accessControlIpAddresses") and context.accessControlIpAddresses: + if hasattr(context.accessControlIpAddresses, "ipv4Addresses") and \ + context.accessControlIpAddresses.ipv4Addresses: + ip_match = False + for ipv4 in context.accessControlIpAddresses.ipv4Addresses: + remote_ipv4_addr = self.remote_ip_addr[7:] + if remote_ipv4_addr == ipv4: + ip_match = True + break + if ip_match: + self.logger.debug("ip match for request") + else: + self.logger.debug("no ip match for request") + return False + + # went through all stages -> success, return True + if res_origs and res_ops and res_authn: + return True + else: + return False + + def _is_authenticated(self, request_originator): + # TODO(rkr): implement 1st and 2nd; the 3rd is not a currently needed use case in our + # TODO deployments + # TODO(rkr): see TS-0003 p.33; 7.1.2-3 + # rq_authn = TRUE || FALSE + + # 1st - Originator = AE registered to Hosting CSE + # if originator is AE registered to the Hosting CSE then this decision is + # deployment/implementation specific. In some cases it is appropriate to expect TLS or DTLS + # to be used. In other cases, TLS or DTLS may be un-necessary. + + # 2nd - Originator = CSE registered to Hosting CSE + # If originator is CSE registered with the Hosting CSE, originator shall always be + # considered authenticated, because the Mcc is always required to be protected by TLS or + # DTLS. (according to an SAEF like described in TS-0003 8.2) + + # 3rd - AE/CSE registered with an other CSE that is not the Hosting CSE + # If the Originator is an AE or CSE registered with a CSE other than the Hosting CSE, then + # the Originator is considered authenticated by the Hosting CSE if and only if the request + # primitive is protected using End - to - End Security of Primitives(ESPrim) as described + # in clause 8.4. + + # TODO(rkr): AE Impersonation Prevention - TS-0003 p.37; 7.2.1 + # 0. Security association establishment may be performed.Clause 6.1.2.2.1 describes the + # scenarios when security association establishment between an AE and CSE is mandatory, and + # describes the scenarios when security association establishment between an AE and CSE is + # recommended.The subsequent procedures shall be performed if a security association has + # been established. + # + # 1. The AE shall send a request to Hosting CSE via its Registrar CSE(Hosting CSE is not + # represented on this figure and can either be the Registrar CSE or another CSE). + # + # 2. The Registrar CSE shall check if the value in the From parameter is the same as the ID + # associated in security association. + # + # 3. If the value is not the same, the Registrar CSE shall send a response with error + # response code '6101' (Security error - impersonation error). + # + # 4. If the values is the same, the Registrar CSE performs procedures specified in clause + # 8.2 of oneM2M TS - 0001[1].Depending on the number of Transit CSEs, the Registrar CSE + # shall either process the request or forward it to the Hosting CSE or to another Transit + # CSE. + + # defaults + AE_TLS_DTLS_connected = False + CSE_TLS_DTLS_connected = False + + # TODO(rkr): implement + # AE_TLS_DTLS_connected = True + # CSE_TLS_DTLS_connected = True + + if AE_TLS_DTLS_connected or CSE_TLS_DTLS_connected: + self.request.rq_authn = True + return True + else: + self.request.rq_authn = False + return False + + # NOTIFY + + def _handle_notify(self): + raise CSEOperationNotAllowed() + + # CREATE + + def _handle_create(self): + self.parent = self.resource + del self.resource + + self.now = datetime_now() + self.fields = [] + + self._check_authorization() + self._check_create_representation() + self._create_resource() + self._finalize_create() + return self._send_create_response() + + def _check_create_representation(self): + rt = self.resource_type + if not self.parent.has_child_type(rt): + raise CSEBadRequest() + + # TODO(rst): change controller to work on resource itself + values = self.request.content.get_values(True) + + self.logger.debug("_check_create_representation: %s", values) + + # TODO: move this to expiration time handler plugin + # but needs to be set to a value even if plugin is disabled + if issubclass(self.resource_type, ExpiringResource): + expiration_time = values.get("expirationTime") + if not expiration_time: + expiration_time = self.now + self.global_config[ + "default_lifetime"] + self.fields.append("expirationTime") + else: + if not isinstance(expiration_time, datetime): + try: + expiration_time = parse_date(expiration_time) + except ParseError as e: + raise CSEValueError( + "Illegal value for expirationTime: %s" % (e,)) + if expiration_time < self.now + self.global_config[ + "min_lifetime"]: + self.logger.warn("expirationTime is too low. Adjusting") + expiration_time = self.now + self.global_config[ + "min_lifetime"] + self.fields.append("expirationTime") + elif expiration_time > self.now + self.global_config[ + "max_lifetime"]: + self.logger.warn("expirationTime is too high. Adjusting") + expiration_time = self.now + self.global_config[ + "max_lifetime"] + self.fields.append("expirationTime") + + values["expirationTime"] = expiration_time + + rt_attributes = rt.attributes + ignore_extra = True # todo(rst): check this later with flexContainer + is_flex = ignore_extra and issubclass(self.resource_type, + FlexibleAttributesMixin) + + # TODO: optimize + if ignore_extra and not is_flex: + names = rt.attribute_names + for k in values.keys(): + if k not in names: + values.pop(k) + + for attribute in rt_attributes: + have_attr = (attribute.name in values and + values[attribute.name] is not None) + # TODO(rkr): check mandatory attributes + if not have_attr and attribute.mandatory: + raise CSEMissingValue("Missing attribute: %s" % + (attribute.name,)) + if have_attr and attribute.accesstype == attribute.RO: + self._handle_ro_attribute(attribute) + + self.values = values + + def _handle_ro_attribute(self, attribute): + if not self.request.internal: + raise CSETypeError("Attribute must not be specified: %s" % + (attribute.name,)) + + def _create_resource(self): + # TODO(rst): change controller to work on resource itself + if not self.values: + values = self.request.content.get_values(True) + else: + values = self.values + + self._set_mandatory_create_attributes(values) + + self.logger.debug("Creating resource of type '%s' with values: %s", + self.resource_type, values) + + resource_type = self.resource_type + + if "stateTag" in resource_type.attribute_names: + values["stateTag"] = 0 + + resource = resource_type(**values) + resource.path = self._get_resource_path() + + self.logger.info("Created resource of type '%s' at %s", + resource.typename, resource.path) + + self.resource = resource + + return self.db_session.store(resource) + + def _set_resource_id(self, values): + short_name = get_short_resource_name(self.resource_type.typename) + try: + _resource_id_counter[short_name] += 1 + except KeyError: + _resource_id_counter[short_name] = 0 + values["resourceID"] = short_name + str( + _resource_id_counter[short_name]) + + def _set_mandatory_create_attributes(self, values): + # time attributes + values["creationTime"] = values["lastModifiedTime"] = self.now + + # set values for parentID and resourceID + values["parentID"] = self.parent.resourceID + self._set_resource_id(values) + + # resource name + try: + name = uc(values["resourceName"]) + except KeyError: + name = "%s-%s" % (self.resource_type.typename, self._create_id()) + self.name = values["resourceName"] = name + + # resource type + values["resourceType"] = ResourceTypeE[self.resource_type.typename] + + def _create_id(self): + return ''.join([choice(self.RANDOM_SOURCE) for _ in range(16)]) + + def _get_resource_path(self): + try: + return self.__resource_path + except AttributeError: + # TODO: current uri_safe is insufficient. need a better strategy + rp = self.__resource_path = self.parent.path + "/" + uri_safe( + self.name) + return rp + + def _finalize_create(self): + events = self.api.events + events.resource_created.fire(self.resource, + self.request) + if self.parent is not None: + events.resource_updated.fire(self.parent, + self.request) + + def _send_create_response(self): + return OneM2MResponse(STATUS_CREATED, pc=self.resource, + request=self.request) + + # RETRIEVE + + def _prepare_fields(self): + """ + Make sure fields is a list + :return: + """ + + self.fields = self.request.content and self.request.content.values + + def _handle_retrieve(self): + try: + fu = self.request.filter_criteria.filterUsage + except AttributeError: + fu = None + self._prepare_fields() + if fu == FilterUsageE.Discovery: + self._prepare_discovery() + else: + self._check_authorization() + # TODO(rkr): if authorization from accessControlPolicies failed || False + # TODO perform Dynamic Authorization if the Hosting CSE does support it + self._prepare_resource() + return self._send_retrieve_response() + + def _prepare_discovery(self): + self.limit = None + self.truncated = False + + try: + self.drt = DiscResTypeE(int(self.request.drt)) + except TypeError: + self.drt = DiscResTypeE.structured + except ValueError: + raise CSEBadRequest() + + if hasattr(self.request.filter_criteria, 'limit'): + self.limit = self.request.filter_criteria.limit + + self.logger.debug("_prepare_resource -> _handle_result: %s" % + self.resource) + + self.discovered = [] + self.result = URIList(self.discovered) + self._discovery() + + def _discovery(self): + try: + return self._do_discovery(self.resource) + except OpenMTCError: + self.logger.exception("Error during discovery") + raise CSEError("Error during discovery") + + def _do_discovery(self, node): + self.logger.debug("_do_discovery: %s", node) + + if self.limit and len(self.discovered) >= self.limit: + self.logger.debug("stopping discovery: limit reached") + self.truncated = True + return True + + if check_match(node, self.request.filter_criteria): + try: + self._check_authorization(node) + if self.drt == DiscResTypeE.unstructured: + self.discovered.append(node.resourceID) + else: + self.discovered.append(node.path) + except CSEPermissionDenied: + pass + + if not self.truncated: + self._retrieve_children_for_resource(node) + self.logger.debug("checking sub resources of: %s", node) + self.logger.debug("childResource: %s", node.childResource) + for s in node.childResource: + self.logger.debug("is resource '%s' virtual? -> %s", s.name, + s.virtual) + if not s.virtual: + sub_node = self.db_session.get(s.path) + self._do_discovery(sub_node) + + def _prepare_resource(self): + self.logger.debug("preparing resource.") + res = self.result = self.resource + try: + res.resourceType = getattr(ResourceTypeE, + type(res).__name__) + except AttributeError: + self.logger.debug("no resourceType for %s", res) + + if self.fields and isinstance(self.fields, list): + res.set_values({k: v if k in self.fields else None for + k, v in res.get_values().items()}) + return self._retrieve_children() + + def _send_retrieve_response(self): + return OneM2MResponse(STATUS_OK, pc=self.result, request=self.request) + + def _retrieve_children(self): + return self._retrieve_children_for_resource(self.resource) + + def _retrieve_children_for_resource(self, resource): + self.logger.debug("getting children of: %s", resource) + children = self.db_session.get_collection(None, resource) + resource.childResource = children + + # UPDATE + + def _handle_update(self): + self.now = datetime_now() + self.fields = [] + + self._check_authorization() + self._check_update_representation() + self._update_resource() + self._finalize_update() + return self._send_update_response() + + def _check_update_representation(self): + rt = self.resource_type + + # TODO(rst): change controller to work on resource itself + values = self.request.content.get_values(True) + + self.logger.debug("_check_update_representation: %s", values) + + for k in ("lastModifiedTime", "stateTag", "childResource"): + values.pop(k, None) + + # TODO: move this to expiration time handler plugin + # but needs to be set to a value even if plugin is disabled + if issubclass(self.resource_type, ExpiringResource): + expiration_time = values.get("expirationTime") + if not expiration_time: + expiration_time = self.now + self.global_config[ + "default_lifetime"] + self.fields.append("expirationTime") + else: + if not isinstance(expiration_time, datetime): + try: + expiration_time = parse_date(expiration_time) + except ParseError as e: + raise CSEValueError( + "Illegal value for expirationTime: %s" % (e,)) + if expiration_time < self.now + self.global_config[ + "min_lifetime"]: + self.logger.warn("expirationTime is too low. Adjusting") + expiration_time = self.now + self.global_config[ + "min_lifetime"] + self.fields.append("expirationTime") + elif expiration_time > self.now + self.global_config[ + "max_lifetime"]: + self.logger.warn("expirationTime is too high. Adjusting") + expiration_time = self.now + self.global_config[ + "max_lifetime"] + self.fields.append("expirationTime") + + values["expirationTime"] = expiration_time + + rt_attributes = rt.attributes + ignore_extra = True # todo(rst): check this later with flexContainer + is_flex = ignore_extra and issubclass(self.resource_type, + FlexibleAttributesMixin) + + # TODO: optimize + if ignore_extra and not is_flex: + names = rt.attribute_names + for k in values.keys(): + if k not in names: + values.pop(k) + + for attribute in rt_attributes: + have_attr = attribute.name in values + if have_attr and attribute.accesstype == attribute.WO: + self._handle_wo_attribute(attribute) + + def _handle_wo_attribute(self, attribute): + if not self.request.internal: + raise CSETypeError("Attribute must not be specified: %s" % + (attribute.name,)) + + def _update_resource(self): + # TODO(rst): change controller to work on resource itself (partly done) + values = self.request.content.get_values(True) + + self._set_mandatory_update_attributes(values) + + self.logger.debug("Updating resource of type '%s' with values: %s", + self.resource_type, values) + + resource_type = self.resource_type + + if "stateTag" in resource_type.attribute_names: + values["stateTag"] = 0 + + resource = resource_type(**values) + + for v in values.keys(): + setattr(self.resource, v, values[v]) + + # resource.path = self.resource.path + +# self.logger.info("Updated resource of type '%s' at %s", +# resource.typename, resource.path) + self.logger.info("Updated resource of type '%s' at %s", + self.resource.typename, self.resource.path) + + # self.resource = resource + + return self.db_session.update(self.resource) + # return self.db_session.update(resource, values.keys()) + + def _set_mandatory_update_attributes(self, values): + values["lastModifiedTime"] = self.now + + def _finalize_update(self): + events = self.api.events + events.resource_updated.fire(self.resource, + self.request) + + def _send_update_response(self): + return OneM2MResponse(STATUS_OK, pc=self.resource, + request=self.request) + + # DELETE + + def _handle_delete(self): + self._check_authorization() + self._delete_resource() + if not self.request.cascading: + self._get_parent() + self._finalize_delete() + return self._send_delete_response() + + def _get_parent(self): + self.parent = self.db_session.get(self.resource.parent_path) + + def _delete_resource(self): + self._delete_children() + self._do_delete_resource() + + def _do_delete_resource(self): + return self.db_session.delete(self.resource) + + def _delete_children(self): + self._retrieve_children() + self._do_delete_children() + + def _do_delete_children(self): + child_promises = [] + + for child in self.resource.childResource: + request = OneM2MRequest(OneM2MOperation.delete, child.path, fr=self._abs_cse_id, + rqi=self.request.rqi) + request.cascading = True + child_promises.append(self.handle_onem2m_request(request)) + + async_all(child_promises, fulfill_with_none=True).get() + + def _finalize_delete(self): + if not self.request.cascading: + self.events.resource_updated.fire(self.parent, + self.request) + self.events.resource_deleted.fire(self.resource, self.request) + + def _send_delete_response(self): + return OneM2MResponse(STATUS_OK, request=self.request) + + +# see TS-0004 7.4.4 +class CSEBaseController(OneM2MDefaultController): + def _handle_create(self): + raise CSEOperationNotAllowed() + + def _prepare_resource(self): + super(CSEBaseController, self)._prepare_resource() + self.resource.pointOfAccess = self.api.get_onem2m_endpoints() + + def _handle_update(self): + raise CSEOperationNotAllowed() + + def _handle_delete(self): + raise CSEOperationNotAllowed() + + +# see TS-0004 7.4.5 +class RemoteCSEController(OneM2MDefaultController): + # TODO(rst): add Mca check -> 7.4.5.2.1 Create + + def _handle_create(self): + self.parent = self.resource + del self.resource + + self.now = datetime_now() + self.fields = [] + + self._check_create_representation() + self._create_resource() + self._finalize_create() + return self._send_create_response() + + +class AEController(OneM2MDefaultController): + def _handle_notify(self): + return self.api.send_notify(self.request, self.resource.pointOfAccess).get() + + def _handle_create(self): + self.parent = self.resource + del self.resource + + self.now = datetime_now() + self.fields = [] + + self._check_create_representation() + self._create_resource() + self._finalize_create() + return self._send_create_response() + + def _set_resource_id(self, values): + + def get_generic_ae_id(): + try: + _resource_id_counter["ae"] += 1 + except KeyError: + _resource_id_counter["ae"] = 0 + return "CAE" + str(_resource_id_counter["ae"]) + + try: + _, _, ae_id = split_onem2m_address(self.request.originator) + except TypeError: + ae_id = get_generic_ae_id() + + if not ae_id.startswith('C'): + ae_id = get_generic_ae_id() + + try: + self.db_session.get(ae_id) + except DBNotFound: + pass + else: + raise CSEConflict() + + values["resourceID"] = ae_id + + def _set_mandatory_create_attributes(self, values): + super(AEController, self)._set_mandatory_create_attributes(values) + + values["AE-ID"] = values["resourceID"] + + # TODO(rst): set nodeLink + values["nodeLink"] = "dummy" + + +class SubscriptionController(OneM2MDefaultController): + def _handle_create(self): + self.parent = self.resource + del self.resource + + self.now = datetime_now() + self.fields = [] + + self._check_authorization() + self._check_originator_access() + self._check_notification_uri() + self._check_create_representation() + self._create_resource() + self._finalize_create() + return self._send_create_response() + + # def _check_syntax_create(self): + # super(SubscriptionController, self)._check_syntax_create() + # try: + # criterias = self.request.pc["eventNotificationCriteria"] + # if criterias: + # from openmtc_cse.methoddomain.filtercriteria import filters + # + # self.logger.debug("validating filter criterias: %s", criterias) + # for crit in criterias: + # if crit != "attribute": + # if hasattr(filters, crit): + # self.logger.debug("criterion '%s' is valid", crit) + # pass # valid filter + # else: + # self.logger.error("criterion '%s' is invalid", crit) + # raise CSESyntaxError("unknown criterion: %s", crit) + # except KeyError as e: + # pass + # # self.logger.warn(e) + + def _check_originator_access(self): + # TODO(rst): TS-004 7.3.8.2.1 + # 3. Check if the subscribed-to resource, addressed in To parameter in + # the Request, is subscribable. Subscribable resource types are defined + # in TS-0001 Functional Architecture [6], they have + # resource types as their child resources. + # If it is not subscribable, the Hosting CSE shall return the Notify + # response primitive with a Response Status Code indicating + # "TARGET_NOT_SUBSCRIBABLE" error. + + # 4. Check if the Originator has privileges for retrieving the + # subscribed-to resource. + # If the Originator does not have the privilege, the Hosting CSE shall + # return the Notify response primitive with Response Status Code + # indicating "NO_PRIVILEGE" error. + return + + def _check_notification_uri(self): + # TODO(rst): TS-004 7.3.8.2.1 + # 5. If the notificationURI is not the Originator, the Hosting CSE + # should send a Notify request primitive to the notificationURI with + # verificationRequest parameter set as TRUE (clause 7.4.1.2.2). + + # debug only + if self.request.originator is None: + return + + try: + self.logger.debug("Checking notificationURI: %s", + self.request.content.notificationURI) + uris = [uri for uri in + self.request.content.notificationURI if + not uri.startswith(self.request.originator)] + # TODO(rst): change the check that it should be a valid AE-ID + # for uri in uris: + # if not urlparse(uri).scheme: + # raise CSESyntaxError("Invalid notificationURI") + except KeyError: + raise CSESyntaxError("Invalid notificationURI") + + # a. If the Hosting CSE cannot send the Notify request primitive, the + # Hosting CSE shall return the Notify response primitive with a Response + # Status Code indicating "SUBSCRIPTION_VERIFICATION_INITIATION_FAILED" + # error. + + def send_verification(notify_uri): + notification = Notification( + verificationRequest=True, + creator=self.request.originator + ) + + send_notify_request = OneM2MRequest(OneM2MOperation.notify, notify_uri, + self.request.originator, pc=notification) + return self.api.send_onem2m_request(send_notify_request) + + # b. If the Hosting CSE sent the primitive, the Hosting CSE shall + # check if the Notify response primitive contains a Response Status Code + # indicating "SUBSCRIPTION_CREATOR_HAS_NO_PRIVILEGE" or + # "SUBSCRIPTION_HOST_HAS_NO_PRIVILEGE" error. If so, the Hosting CSE + # shall return the Create response primitive with a Response Status Code + # indicating the same error from the Notify response primitive to the + # Originator. + + def handle_error(error): + self.logger.info("Subscription verification failed: %s", error) + raise CSEError + # TODO(rst): check subscription error + # if error.status_code in [ + # STATUS_REQUEST_TIMEOUT, + # STATUS_BAD_GATEWAY, + # STATUS_SERVICE_UNAVAILABLE, + # STATUS_GATEWAY_TIMEOUT + # ]: + # raise CannotInitiateSubscriptionVerification(error) + # elif error.status_code == STATUS_SUBSCRIPTION_VERIFICATION_FAILED: + # raise SubscriptionVerificationFailed(error) + # else: + # raise CSEBadGateway(error) + + # TODO(rst): verification request needs to be checked + # try: + # async_all(map(send_verification, uris), + # fulfill_with_none=True).get() + # except Exception as error: + # handle_error(error) + + def _set_mandatory_create_attributes(self, values): + super(SubscriptionController, + self)._set_mandatory_create_attributes(values) + # TODO(rst): TS-004 7.3.8.2.1 + # 7. If the notificationURI is not the Originator, the Hosting CSE shall + # store Originator ID to creator attribute. + if (self.request.originator not in + values["notificationURI"]): + values["creator"] = self.request.originator + + # set notificationContentType if not set + if "notificationContentType" not in values: + values["notificationContentType"] = \ + NotificationContentTypeE.allAttributes + + +class ContainerController(OneM2MDefaultController): + def _set_mandatory_create_attributes(self, values): + super(ContainerController, + self)._set_mandatory_create_attributes(values) + values["creator"] = self.request.originator or 'nobody' + values["currentNrOfInstances"] = 0 + values["currentByteSize"] = 0 + + +class ContentInstanceController(OneM2MDefaultController): + def _create_resource(self): + super(ContentInstanceController, self)._create_resource() + + # handle_old_instances + max_nr_of_instances = self.parent.maxNrOfInstances + current_nr_of_instances = self.parent.currentNrOfInstances + if 0 < max_nr_of_instances <= current_nr_of_instances: + self.parent.currentNrOfInstances -= 1 + self.parent.currentByteSize -= self.parent.oldest.contentSize + + self.db_session.delete(self.parent.oldest) + + if self.parent.currentNrOfInstances >= 1: + oldest = self.db_session.get_oldest_content_instance( + self.parent) + self.logger.debug("Setting new oldest: %s", oldest) + self.parent.oldest = oldest + else: + self.logger.debug("Setting oldest to None") + self.parent.oldest = None + + # handle_new_instance + self.parent.currentNrOfInstances += 1 + self.parent.currentByteSize += self.resource.contentSize + if self.parent.oldest is None: + self.logger.debug("Setting new resource as oldest: %s", + self.resource) + self.parent.oldest = self.resource + self.parent.latest = self.resource + self.db_session.update(self.parent) + + def _set_mandatory_create_attributes(self, vals): + self.request.name = None + super(ContentInstanceController, + self)._set_mandatory_create_attributes(vals) + + vals["contentSize"] = len(vals["content"].encode('utf-8')) + if not vals.get("contentInfo"): + vals["contentInfo"] = 'text/plain:0' + + def _delete_resource(self): + super(ContentInstanceController, self)._delete_resource() + + cnt = self.db_session.get(self.resource.parentID) + # TODO(rst): handle byte size + try: + ci_l = self.db_session.get_latest_content_instance(cnt) + ci_o = self.db_session.get_oldest_content_instance(cnt) + except (DBError, KeyError): + cnt.latest = None + cnt.oldest = None + cnt.currentNrOfInstances = 0 + else: + cnt.latest = ci_l + cnt.oldest = ci_o + cnt.currentNrOfInstances -= 1 + + return self.db_session.update(cnt) + + +class AccessControlPolicyController(OneM2MDefaultController): + def _set_mandatory_create_attributes(self, vals): + super(AccessControlPolicyController, + self)._set_mandatory_create_attributes(vals) + + if vals.get("selfPrivileges") is None: + vals["selfPrivileges"] = [{ + "accessControlOperations": [ + AccessControlOperationE.create, + AccessControlOperationE.retrieve, + AccessControlOperationE.update, + AccessControlOperationE.delete, + AccessControlOperationE.notify, + AccessControlOperationE.discover + ], + "accessControlOriginators": ["/mn-cse-1"] + }] + + +class DynamicAuthorizationConsultationController(OneM2MDefaultController): + def _set_mandatory_create_attributes(self, values): + super(DynamicAuthorizationConsultationController, + self)._set_mandatory_create_attributes(values) + + # TODO(rkr): values is set here, but it's not set in the resource at the end when resource + # TODO(rkr): is created + if not values.get("dynamicAuthorizationPoA"): + values["dynamicAuthorizationPoA"] = [] + # if not values.get("dynamicAuthorizationLifetime"): + # values["dynamicAuthorizationLifetime"] = "" + + +class SemanticDescriptorController(OneM2MDefaultController): + + @staticmethod + def _check_descriptor_data(descriptor_data): + try: + data = base64.b64decode(descriptor_data) + except binascii.Error: + raise CSEContentsUnacceptable("The descriptor was not correctly base64 encoded.") + + try: + g = Graph() + g.parse(data=data, format="application/rdf+xml") + except Exception: + raise CSEContentsUnacceptable("The descriptor attribute does not conform to the " + "RDF/XML syntax as defined in RDF 1.1 XML Syntax.") + + def _check_create_representation(self): + super(SemanticDescriptorController, self)._check_create_representation() + self._check_descriptor_data(self.values["descriptor"]) + + def _prepare_resource(self): + super(SemanticDescriptorController, self)._prepare_resource() + res = self.result + + # delete "semanticOpExec" from the response. + del res.attribute_values["semanticOpExec"] + setattr(res, "semanticOpExec", None) + + def _check_update_representation(self): + super(SemanticDescriptorController, self)._check_update_representation() + + values = self.request.content.get_values(True) + if all(k in values for k in ("semanticOpExec", "descriptor")): + # check if both attribute exist at the same time + raise CSEContentsUnacceptable("bad request: both semanticOpExec and descriptor exist") + elif "descriptor" in values: + # verify if the descriptor conform to the RDF syntax or not + self._check_descriptor_data(self.values["descriptor"]) + elif "semanticOpExec" in values: + # verify if the semanticOpExec has a correct SPAROL syntax + try: + parse(values["semanticOpExec"]) + except Exception: + raise CSEContentsUnacceptable("The semanticOpExec attribute does not conform to " + "the SPARQL query syntax.") + else: + raise CSESyntaxError("Please provide an updated descriptor or a semanticOpExec") diff --git a/server/openmtc-cse/src/openmtc_cse/methoddomain/controller/__init__.pyc b/server/openmtc-cse/src/openmtc_cse/methoddomain/controller/__init__.pyc new file mode 100644 index 0000000..b696c5c Binary files /dev/null and b/server/openmtc-cse/src/openmtc_cse/methoddomain/controller/__init__.pyc differ diff --git a/server/openmtc-cse/src/openmtc_cse/methoddomain/filtercriteria/__init__.py b/server/openmtc-cse/src/openmtc_cse/methoddomain/filtercriteria/__init__.py new file mode 100644 index 0000000..98b2b35 --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/methoddomain/filtercriteria/__init__.py @@ -0,0 +1,52 @@ +from futile.logging import get_logger +from openmtc.model import ModelTypeError +from openmtc_cse.methoddomain.filtercriteria import filters +from openmtc_onem2m.exc import CSEBadRequest +from openmtc_onem2m.model import FilterCriteria + +_logger = get_logger(__name__) + + +def check_match(resource, filter_criteria): + _logger.debug("checking if filter criteria '%s' are matched by " + "resource '%s'", filter_criteria, resource) + for criteria, value in filter_criteria.get_values(True).iteritems(): + if not value: + continue + _logger.debug("checking if resource matches: %s=%s", criteria, value) + try: + filter_function = getattr(filters, criteria) + except AttributeError: + _logger.error("'%s' is not a valid filter criterion", criteria) + return False + else: + if not filter_function(resource, value): + _logger.debug("resource '%s' does not match criterion '%s=%s'", + resource, criteria, value) + return False + + _logger.debug("resource '%s' matches filter criteria '%s'", + resource, filter_criteria) + return True + + +def parse_filter_criteria(filter_criteria): + if filter_criteria is None: + filter_criteria = {} + _logger.debug("parsing '%s'", filter_criteria) + int_criteria = ('stateTagSmaller', 'stateTagBigger', 'resourceType', + 'sizeAbove', 'sizeBelow', 'filterUsage', 'limit') + parsed_criteria = {} + for k, v in filter_criteria.iteritems(): + if k in int_criteria: + if isinstance(v, list): + parsed_criteria[k] = map(int, v) + else: + parsed_criteria[k] = int(v) + else: + parsed_criteria[k] = v + + try: + return FilterCriteria(**parsed_criteria) + except ModelTypeError: + raise CSEBadRequest() diff --git a/server/openmtc-cse/src/openmtc_cse/methoddomain/filtercriteria/__init__.pyc b/server/openmtc-cse/src/openmtc_cse/methoddomain/filtercriteria/__init__.pyc new file mode 100644 index 0000000..01991c6 Binary files /dev/null and b/server/openmtc-cse/src/openmtc_cse/methoddomain/filtercriteria/__init__.pyc differ diff --git a/server/openmtc-cse/src/openmtc_cse/methoddomain/filtercriteria/filters.py b/server/openmtc-cse/src/openmtc_cse/methoddomain/filtercriteria/filters.py new file mode 100644 index 0000000..b2271f7 --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/methoddomain/filtercriteria/filters.py @@ -0,0 +1,219 @@ +from futile.logging import get_logger +from futile.collections import get_iterable + +_logger = get_logger(__name__) + +""" +check TS-0001-oneM2M-Functional-Architecture-V-2014-08, page 59 +""" + + +def modifiedSince(resource, value): + try: + return resource.lastModifiedTime > value + except AttributeError: + pass + + +def unmodifiedSince(resource, value): + try: + return resource.lastModifiedTime < value + except AttributeError: + pass + + +def createdAfter(resource, value): + try: + return resource.creationTime > value + except AttributeError: + pass + + +def createdBefore(resource, value): + try: + return resource.creationTime < value + except AttributeError: + pass + + +def contentType(resource, value): + try: + return any(map(resource.contentType.__contains__, get_iterable(value))) + except AttributeError: + pass + + +def stateTagSmaller(resource, value): + """ + Check if the stateTag attribute of the resource is smaller than the specified value. + + :param resource: + :param value: + :return: + :rtype: bool + """ + try: + return resource.stateTag < value + except AttributeError: + pass + + +def stateTagBigger(resource, value): + """ + Check if the stateTag attribute of the resource is bigger than the specified value. + :param resource: + :type resource: + :param value: + :type value: + :return: + :rtype: bool + """ + try: + return resource.stateTag > value + except AttributeError: + pass + + +def expireBefore(resource, value): + """ + Check if the expirationTime attribute of the resource is chronologically before the specified value. + + :param resource: + :type resource: + :param value: + :type value: + :return: + :rtype: + """ + try: + return resource.expirationTime < value + except AttributeError: + pass + + +def expireAfter(resource, value): + """ + Check if the expirationTime attribute of the resource is chronologically after the specified value. + + :param resource: + :type resource: + :param value: + :type value: + :return: + :rtype: + """ + try: + return resource.expirationTime > value + except AttributeError: + pass + + +def labels(resource, values): + """ + Check if the labels attributes of the resource matches the specified value. + :param resource: + :type resource: + :param values: + :type values: + :return: + :rtype: + """ + def test(value): + try: + return value in resource.labels + except (AttributeError, TypeError): + return False + + return any(map(test, values)) + + +def resourceType(resource, values): + """ + The resourceType attribute of the resource is the same as the specified + value. It also allows discriminating between normal and announced resources. + + :param resource: + :type resource: + :param values: + :type values: + :return: + :rtype: + """ + def test(value): + try: + return resource.resourceType == int(value) + except AttributeError: + return False + + return any(map(test, values)) + + +def sizeAbove(resource, value): + """ + Check if the contentSize attribute of the resource is + equal to or greater than the specified value. + :param resource: + :type resource: + :param value: + :type value: + :return: + :rtype: + """ + try: + return resource.contentSize >= value + except AttributeError: + pass + + +def sizeBelow(resource, value): + """ + Check if the contentSize attribute of the resource is smaller than the specified value. + :param resource: + :type resource: + :param value: + :type value: int + :return: + :rtype: + """ + try: + return resource.contentSize < value + except AttributeError: + pass + + +def limit(resource, value): + """ + Check if this is a valid limit for the number of matching resources to be specified. + + :param resource: + :type resource: + :param value: specified limit + :type value: int + :return: True if valid limit, False otherwise + :rtype: bool + """ + return value > 0 + + +def filterUsage(resource, value): + """ + Indicates how the filter criteria is used. + E.g., if this parameter is not provided, the Retrieve operation is for generic retrieve operation. + If filterUsage is provided, the Retrieve operation is for resource . + + :param resource: + :type resource: + :param value: + :type value: bool + :return: + :rtype: bool + """ + # if value: + # return True + # else: + # return False + return True + + +filters = [stateTagSmaller, stateTagBigger, expireBefore, expireAfter, labels, + resourceType, sizeAbove, sizeBelow, limit, filterUsage] diff --git a/server/openmtc-cse/src/openmtc_cse/methoddomain/filtercriteria/filters.pyc b/server/openmtc-cse/src/openmtc_cse/methoddomain/filtercriteria/filters.pyc new file mode 100644 index 0000000..a325838 Binary files /dev/null and b/server/openmtc-cse/src/openmtc_cse/methoddomain/filtercriteria/filters.pyc differ diff --git a/server/openmtc-cse/src/openmtc_cse/plugins/__init__.py b/server/openmtc-cse/src/openmtc_cse/plugins/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/openmtc-cse/src/openmtc_cse/plugins/__init__.pyc b/server/openmtc-cse/src/openmtc_cse/plugins/__init__.pyc new file mode 100644 index 0000000..c46743f Binary files /dev/null and b/server/openmtc-cse/src/openmtc_cse/plugins/__init__.pyc differ diff --git a/server/openmtc-cse/src/openmtc_cse/plugins/announcement_handler/__init__.py b/server/openmtc-cse/src/openmtc_cse/plugins/announcement_handler/__init__.py new file mode 100644 index 0000000..d8cbc2a --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/plugins/announcement_handler/__init__.py @@ -0,0 +1,603 @@ +from collections import namedtuple + +from openmtc_onem2m.exc import CSEError +from openmtc_onem2m.model import AnnounceableResource, get_onem2m_type, \ + CSEBase, RemoteCSE + +from openmtc_onem2m.transport import OneM2MRequest, MetaInformation, \ + OneM2MOperation +from openmtc_server.Plugin import Plugin +from copy import deepcopy +from openmtc_server.util.async import async_all +from re import sub +from urlparse import urlparse +# url join with coap compatibility +from urlparse import urljoin, uses_relative, uses_netloc + +uses_relative.append('coap') +uses_netloc.append('coap') + +AnncResult = namedtuple('AnncResult', ['cse_uri', 'res_con']) + + +class AnnouncementHandler(Plugin): + # method constants + _CREATE = 'create' + _UPDATE = 'update' + _DELETE = 'delete' + + def __init__(self, api, config, *args, **kw): + super(AnnouncementHandler, self).__init__(api, config, *args, **kw) + self._announcements = {} + self._cse_base = urlparse(self.config['global']['cse_base']).path + # TODO_oneM2M: self._cse_links should be filled with registration plugin, using a static value in the mean time + # self._cse_links = {} + self._cse_links = { + 'http://localhost:15000/onem2m': 'http://localhost:15000/onem2m'} + + def _init(self): + # subscribe for announceable resources + self.events.resource_created.register_handler(self._update_annc_create, + AnnounceableResource) + self.events.resource_updated.register_handler(self._update_annc_update, + AnnounceableResource) + self.events.resource_deleted.register_handler(self._delete_annc, + AnnounceableResource) + + # We keep track of the CSEs that are registered, + # in case we need to set default announceTo + self.events.resource_created.register_handler(self._cse_created, + CSEBase) + self.events.resource_deleted.register_handler(self._cse_deleted, + CSEBase) + + self._initialized() + + def _start(self): + def retrieve_remote_cse_list(): + def get_collection(session): + p = session.get_collection(None, RemoteCSE, + CSEBase(path="/onem2m")) + session.commit() + return p + + return self.api.db.start_session().then(get_collection) + + def get_cse(cse): + cse_req = OneM2MRequest(OneM2MOperation.retrieve, cse, None, + MetaInformation(None)) + return (self.api.handle_onem2m_request(cse_req) + .then(lambda r: r.resource)) + + def handle_remote_cse_list(remote_cse_list): + self.logger.debug("Loaded RemoteCSE list is %s" % remote_cse_list) + for cse in remote_cse_list: + self.logger.debug("Adding CSE %s in the list" % cse) + self._cse_links[cse.path] = cse.link + + return retrieve_remote_cse_list() \ + .then( + lambda remote_cse_list: async_all(map(get_cse, remote_cse_list))) \ + .then(handle_remote_cse_list) \ + .then(self._started) + # return self._started() + + def _cse_created(self, cse, req): + # TODO_oneM2M: Test this with RemoteCSE registration + self._cse_links[cse.path] = cse.link + + def _cse_deleted(self, resource, req): + self._cse_links.pop(req.to, None) + + def _update_annc_create(self, resource, onem2m_req): + self.logger.debug("_update_annc_create: resource.announceTo is %s", + resource.announceTo) + self.logger.debug("_update_annc_create: request is %s", onem2m_req) + self._announcements[resource.path] = { + 'resource': deepcopy(resource), + 'uris': {} + } + try: + # self._announcements[resource.path]['resource'].announceTo[ + # 'cseList']['reference'] = [] + self._announcements[resource.path]['resource'].announceTo = [] + except KeyError: + pass + # except TypeError: + # self._announcements[resource.path]['resource'].announceTo = { + # 'cseList': { + # 'reference': [] + # } + # } + + if resource.typename == 'AE': + req_ent = resource.path + else: + # req_ent = onem2m_req.requestingEntity + req_ent = onem2m_req.originator + self._update_annc(resource, self._CREATE, req_ent) + + def _update_annc_update(self, resource, onem2m_req): + self.logger.debug("_update_annc_update: %s", resource) + # if not req_ind.do_announce: + # return + # self._update_annc(resource, self._UPDATE, req_ind.requestingEntity) + self._update_annc(resource, self._UPDATE, onem2m_req.originator) + + def _update_annc(self, resource, method, req_ent): + self.logger.debug("_update_annc %s - %s: ", resource, + resource.announceTo) + + if resource.announceTo: + # This action shall apply if the following conditions are met: + # * A new announceable resource is created in the local CSE and the + # CREATE request contains an announceTo attribute. + # * The announceTo attribute of the announceable resource is + # added/changed. + self._update_annc_by_app(resource, method, req_ent) + # else: + # # This action shall apply if the following conditions are met: + # # * A new announceable resource is created in the local CSE, the + # # CREATE request does not contain an announceTo attribute. + # # * The announceTo attribute is removed from the resource + # # representation by an XXXUpdateRequestIndication (where XXX + # # represents the resource that contains the announceTo attribute). + # self._update_annc_by_cse(resource, req_ent) + + def _update_annc_by_app(self, resource, method, req_ent): + + # 1) If the announceTo attribute contains the activated element set + # to FALSE and the original request that triggered this action is + # not a CREATE, then the local CSE shall reject the original + # request with STATUS_NOT_FORBIDDEN and then this procedure stops. + # if method != self._CREATE: + # resource.announce_to.set('activated', True) + + # 2) If the announceTo attribute contains the activated element set + # to FALSE and the original request that trigger this action is a + # CREATE, then the announceTo attribute value is stored as-is in the + # resource. No further actions are required. + # else: + # self._update_resource(resource) + + # In oneM2M, if announceTo is present, it has to be handled. (no ignoring) + # TODO_oneM2M: Deal w/ updates + force = method == 'update' + self._handle_announce_to(resource, req_ent) \ + .then(lambda l: self._update_resource(resource, l[0], l[1], force)) + + """ TODO_oneM2M: Check if this really is obsolete + def _update_annc_by_cse(self, resource, req_ent): + # 1) It checks if the issuer is an application registered to this local + # CSE, if not the procedure stops here, otherwise the local CSE checks + # if global element is set to TRUE. (If global is set to FALSE the + # procedure does not apply). + def get_app(): + get_app_req_ind = RetrieveRequestIndication(urlparse(req_ent).path) + return self.api.handle_request_indication(get_app_req_ind) \ + .then(lambda r: r.resource) + + def handle_app(app): + # Todo: get application if resource is not an application + # 2) The cseList of announceTo of the application Registration is + # used (note that the list may be empty, in case the issuer wants + # to forbid announcing any created resources). + + # 3) If the activate element of the announceTo attribute from the + # application resource is set to FALSE, then the local CSE adds the + # announceTo attribute in the announceable resource (that trigger + # this procedure) and it sets the activate element to FALSE. The + # action returns. + + # 4) Otherwise (i.e. global set to FALSE or no announceTo is + # present), the CSE determines based on its policies, if the + # resource shall be announced: + #resource.announceTo = {'cseList': {'reference': []}} + resource.announceTo = [] + + # a) If the policies state the resource shall not be announced, + # this action returns and the original procedure is continued. + + # b) Otherwise, the CSE determines which CSEs shall be in the + # cseList element of the announceTo attribute. The CSE also set the + # active element to TRUE. + + # Then, the local CSE shall announce the resource to the CSEs in the + # cseList element. This procedure is described in step 3, from a to e + # in clause 10.3.2.8.1, with the exception that the requestingEntity + # for any initiated request is set to the CSE hosting the announcing + # resource. + # 5) The announceTo with the updated cseList is set on the + # original resource (that trigger this procedure). This may + # trigger notifications to resources that have subscribed to + # changes in the original resource or its announceTo attribute. + pass + + # get_app().then(handle_app) + #if self.config.get("auto_announce", True): + # #resource.announceTo = { + # # 'cseList': {'reference': self._cse_links.values()}, + # # 'activated': True + # #} + # resource.announceTo = self._cse_links.values() + # # TODO: update the resource internally + #else: + # #resource.announceTo = {'cseList': {'reference': []}} + # resource.announceTo = [] + + self.logger.debug('resource announceTo is %s'%resource.announceTo) + self._handle_announce_to(resource, req_ent) \ + .then(lambda l: self._update_resource(resource, l[0], l[1], True)) + """ + + def _delete_annc(self, resource, onem2m_req): + # if req_ind.expired: + # return self._announcements.pop(req_ind.path, None) + + # req_ent = req_ind.requestingEntity + originator = onem2m_req.originator + try: + resource = deepcopy(self._announcements[onem2m_req.to]['resource']) + except KeyError: + return + # resource.announceTo = {'cseList': {'reference': []}} + resource.announceTo = [] + + self._handle_announce_to(resource, originator) \ + .then(lambda r: self._announcements.pop(onem2m_req.to, None)) + + def _handle_announce_to(self, resource, req_ent): + self.logger.debug("Handle annouceTo called %s" % resource) + try: + old_resource = self._announcements[resource.path]['resource'] + except KeyError: + # todo: handle missing resource + raise CSEError() + try: + # old_cse_list = old_resource.announceTo.get('cseList') \ + # .get('reference') \ + # if old_resource.announceTo.get('activated', True) else [] + old_cse_list = old_resource.announceTo + except AttributeError: + old_cse_list = [] + + # db_cse_list = resource.announceTo.get('cseList', { + # "reference": [] + # }).get('reference') + db_cse_list = resource.announceTo + + annc_model = get_onem2m_type(resource.typename + "Annc") + + resource_id = getattr(resource, resource.id_attribute) + annc_id = resource_id + 'Annc' + + # a) Check if the CSEs indicated in the cseList element of the + # announceTo attribute are registered to/from this local CSE. If any of + # the CSEs in the cseList is not registered then those CSEs are removed + # from the cseList and no further actions for those CSEs are performed. + def check_cse_list(): + self.logger.debug("check_cse_list: %s vs %s" % ( + db_cse_list, self._cse_links.values())) + return filter(lambda x: x in db_cse_list, self._cse_links.values()) + + # b) Send createXXXAnnouncementResourceRequestIndication (where XXX is + # replaced by the type of the resource to be announced) for each CSE in + # the cseLists element of the announceTo attribute that is NOT yet + # included in the previous-announceTo. The request includes: + def send_create_annc_pre(cse_uri): + try: + if resource.accessRightID is None: + return send_create_annc(cse_uri) + except AttributeError: + return send_create_annc(cse_uri) + + return self.api.is_local_path(resource.accessRightID) \ + .then(lambda local_ar: send_create_annc(cse_uri, local_ar)) + + def send_create_annc(cse_uri, local_ar=None): + annc = annc_model() + # endpoint = self.api.get_endpoint('mid', urlparse(cse_uri).scheme) + endpoint = self.config.get('endpoint', '') + + # * labels from the original resource; + annc.labels = resource.labels + + # * accessRightID from the original resource; + if local_ar: + annc.accessRightID = urljoin(endpoint, urlparse( + resource.accessRightID).path) + elif local_ar is None: + annc.accessRightID = local_ar + else: + annc.accessRightID = resource.accessRightID + + # * link is set to the URI of the original resource; + annc.link = urljoin(endpoint, resource.path) + # annc.link = self._cse_base + resource.path + + # * requestingEntity is set to the application; + # req_ent from from outer scope + + # * issuer is set to its own CSE ID (the local CSE performing the + # action); + # rst: not needed probably + + # * id of the resource shall be set to the id of the original + # resource postfixed with Annc. I.e. if the original resource has id + # "myApp", the announced resource shall have the id "myAppAnnc"; + annc.AE_ID = annc_id + annc.name = annc_id + + # * expirationTime handling is to the discretion of the CSE + # implementation. It is the responsibility of the local CSE to keep + # the announced resource in sync with the lifetime of the original + # resource, as long as the announcement is active. One strategy to + # minimize signalling would be to request the same expiration from + # the original resource. If this is accepted by the remote CSE, then + # no explicit de-announce is needed in case of expiration of the + # original resource; + annc.expirationTime = resource.expirationTime + + # * targetID is set as follow. + # RODO: inline + def get_target_id(): + cse_path = cse_uri # + '/cses/' + self.config['global']['cse_id'] + apps_path = self._cse_base # + '/applications/' + if resource.typename == 'AE': # 'application': # is appAnnc + return cse_path # + '/applications/' + else: + # TODO_oneM2M: Translate to onem2m + parent = sub(r'^locationC', 'c', resource.typename) + 's/' + if resource.path.find(apps_path) == 0: # is under appAnnc + # todo: lookup appAnnc in self._announcements + return cse_path + '/applications/' + \ + sub(apps_path, '', resource.path).split('/')[0] + \ + 'Annc/' + parent + else: # is other Annc + return cse_path + '/' + parent + + target_id = get_target_id() + # try: + # req_ent_mid = urljoin(endpoint, urlparse(req_ent).path) + # except AttributeError: + # self.logger.exception("Could not midify") + # req_ent_mid = None + req_ent_mid = None + + # create_annc_req_ind =\ + # CreateRequestIndication(target_id, annc, + # requestingEntity=req_ent_mid) + # + # return self.api.send_request_indication(create_annc_req_ind) + cse_req = OneM2MRequest(OneM2MOperation.create, target_id, req_ent_mid, + MetaInformation(None), cn=annc, ty=annc_id) + self.logger.debug('Sending Announcement %s' % cse_req) + return self.api.send_onem2m_request(cse_req) + + # c) Ignore all CSEs in the cseList element of the announceTo attribute + # that were already included in the previous-announceTo. + + # d) Send deleteXXXAnnouncementResourceRequestIndication (where XXX is + # replaced by the type of resource to be de-announced) for each CSE in + # the previous-announceTo that is not included in the cseList of the + # provided announceTo attribute. The request shall include the URI of + # the announcement resource to be removed. The request includes: + def send_delete_annc(cse_uri): + # * requestingEntity is set to the application; + # req_ent from from outer scope + + # * issuer is set to its own CSE ID (the local CSE performing the + # action); + # rst: not needed probably + + # * targetID is set to the resource URI of the previously + # announced-resource on the remote CSE. The local CSE received and + # stored the URI of the announced resource after it was created. + self.logger.debug("announcements %s" % self._announcements) + annc_path = self._announcements[resource.path]['uris'][cse_uri] + target_id = urljoin(cse_uri, '/onem2m/' + annc_path) + + cse_req = OneM2MRequest(OneM2MOperation.delete, target_id, None, + MetaInformation(None)) + self.logger.debug('Deleting Announcement %s' % cse_req) + return self.api.send_onem2m_request(cse_req) + # delete_annc_req_ind =\ + # DeleteRequestIndication(target_id, requestingEntity=req_ent) + # + # return self.api.send_request_indication(delete_annc_req_ind) + + # e) Waits until all the createXXXAnnouncementResourceResponseConfirm + # and/or deleteXXXAnnouncementResourceResponseConfirm are received and + # it acts as follow: + def send_anncs(cse_list): + # i) For each unsuccessful + # createXXXAnnouncementResourceResponseIndication, the remote CSE is + # removed from the cseList in the announceTo attribute. + def handle_create_err(res): + return res.cse_uri + + # ii) For each successful + # createXXXAnnouncementResourceResponseIndication, the local CSE + # shall internally store the resourceURI of the created announced + # resource. This URI is needed for delete the resource later on. + def handle_create(res): + # self._announcements[resource.path]['uris'][res.cse_uri] = \ + # res.res_con.resourceURI + self._announcements[resource.path]['uris'][ + res.cse_uri] = annc_id + return False + + # iii) For each unsuccessful + # deleteXXXAnnouncementResourceRequestIndication with the statusCode + # STATUS_NOT_FOUND, the remote CSE is removed from the cseList in + # the announceTo attribute. + # For all other statusCode value, no action is performed. + def handle_delete_err(res): + try: + if res.res_con.statusCode != 'STATUS_NOT_FOUND': + return res.cse_uri + finally: + del self._announcements[resource.path]['uris'][res.cse_uri] + return False + + # iv) For each successful + # deleteXXXAnnouncementResourceRequestIndication, the remote CSE is + # removed from the cseList in the announceTo attribute. + def handle_delete(res): + del self._announcements[resource.path]['uris'][res.cse_uri] + return False + + create_list = [x for x in cse_list if x not in set(old_cse_list)] + delete_list = [x for x in old_cse_list if x not in set(cse_list)] + self.logger.debug( + 'create list %s \n delete list %s' % (cse_list, old_cse_list)) + + filtered_cses = [x for x in db_cse_list if x not in set(cse_list)] + + # links the send funcs with the handle result funcs + create_func = lambda s: send_create_annc_pre(s) \ + .then(lambda r: handle_create(AnncResult(s, r)), + lambda r: handle_create_err(AnncResult(s, r))) + delete_func = lambda s: send_delete_annc(s) \ + .then(lambda r: handle_delete(AnncResult(s, r)), + lambda r: handle_delete_err(AnncResult(s, r))) + + # filters out all False in the list + def filter_func(l): + return filter(None, l) + + return async_all([ + (async_all(map(create_func, create_list)).then(filter_func) + .then(lambda l: l + filtered_cses)), + async_all(map(delete_func, delete_list)).then(filter_func) + ]) + + return send_anncs(check_cse_list()) + + def _update_resource(self, resource, remove_list=None, add_list=None, + force=False): + self.logger.debug('Trying to update resource %s' % resource) + if not add_list: + add_list = [] + if not remove_list: + remove_list = [] + + old_resource = self._announcements[resource.path]['resource'] + old_resource.announceTo = resource.announceTo + + def update_announce_to(): + # update_req_ind = UpdateRequestIndication(resource.path + + # '/announceTo', resource, + # fields=['announceTo'], + # do_announce=False) + + # self.api.handle_request_indication(update_req_ind) + # TODO_oneM2M: Update the resource by sending the request + cse_req = OneM2MRequest(OneM2MOperation.update, resource.path, str(self), + MetaInformation(None), cn=resource, + ty=resource) + # self.api.handle_onem2m_request(cse_req) + + if len(remove_list) or len(add_list): + cse_list = resource.announceTo # .get('cseList').get('reference') + for s in remove_list: + cse_list.remove(s) + cse_list.extend(add_list) + # if not len(cse_list): + # try: + # del resource.announceTo['activated'] + # except KeyError: + # pass + old_resource.announceTo = resource.announceTo + update_announce_to() + + if force: + return update_announce_to() + + return self._update_announcements(resource, add_list) + + def _update_announcements(self, resource, add_list): + old_resource = self._announcements[resource.path]['resource'] + uris = self._announcements[resource.path]['uris'] + + attributes_changed = False + + try: + if resource.expirationTime != old_resource.expirationTime or \ + resource.labels != old_resource.labels or \ + resource.accessRightID != old_resource.accessRightID: + attributes_changed = True + except AttributeError: + if resource.expirationTime != old_resource.expirationTime or \ + resource.labels != old_resource.labels: + attributes_changed = True + + if attributes_changed: + + annc_model = get_onem2m_type(resource.typename + "Annc") + + def send_update_annc_pre(cse_uri): + # TODO_oneM2M: Needs updating + try: + if not resource.accessRightID: + return send_update_annc(cse_uri) + except AttributeError: + return send_update_annc(cse_uri) + + return self.api.is_local_path(resource.accessRightID) \ + .then(lambda local_ar: send_update_annc(cse_uri, local_ar)) + + def send_update_annc(cse_uri, local_ar=None): + # TODO_oneM2M: Update to oneM2M + # endpoint = self.api.get_endpoint('mid', + # urlparse(cse_uri).scheme) + endpoint = self.config.get('endpoint', '') + + annc = annc_model() + + # link hast to be set + annc.link = urljoin(endpoint, resource.path) + + # * labels from the original resource; + annc.labels = resource.labels + + # * accessRightID from the original resource; + if local_ar: + annc.accessRightID = urljoin(endpoint, urlparse( + resource.accessRightID).path) + elif local_ar is None: + annc.accessRightID = local_ar + else: + annc.accessRightID = resource.accessRightID + + # * expirationTime handling is to the discretion of the CSE + # implementation. It is the responsibility of the local CSE to + # keep the announced resource in sync with the lifetime of the + # original resource, as long as the announcement is active. One + # strategy to minimize signalling would be to request the same + # expiration from the original resource. If this is accepted by + # the remote CSE, then no explicit de-announce is needed in case + # of expiration of the original resource; + annc.expirationTime = resource.expirationTime + + # TODO(rst): fix this later + # update_req_ind = UpdateRequestIndication(uris[cse_uri], annc) + # + # # todo investigate response for not accepted expirationTime + # return self.api.send_request_indication(update_req_ind) + + old_resource.labels = resource.labels + try: + old_resource.accessRightID = resource.accessRightID + except AttributeError: + pass + old_resource.expirationTime = resource.expirationTime + + cse_list = resource.announceTo # .get('cseList', {}).get('reference') + # TODO: conversion to set() is questionable + update_list = [x for x in cse_list if x not in set(add_list)] + + return async_all(map(send_update_annc_pre, update_list)) + + self.logger.debug('No attributes changed, returning None') + return None diff --git a/server/openmtc-cse/src/openmtc_cse/plugins/expiration_time_handler/__init__.py b/server/openmtc-cse/src/openmtc_cse/plugins/expiration_time_handler/__init__.py new file mode 100644 index 0000000..168b067 --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/plugins/expiration_time_handler/__init__.py @@ -0,0 +1,122 @@ +from openmtc_server.Plugin import Plugin +from datetime import timedelta +from futile.collections.sortedlist import sortedlist +from openmtc_onem2m.model import ExpiringResource +from openmtc.util import datetime_now + + +class ExpirationTimeHandler(Plugin): + DEFAULT_LIFETIME = 24 * 60 * 60 # 1day + + timeout = 5 + + _timer = None + + def _init(self): + # todo(rst): transform to onem2m + raise RuntimeError('needs to be revised.') + self.events.resource_created.register_handler( + self._handle_expiration_time, ExpiringResource) + self.events.resource_deleted.register_handler(self._handle_delete, + ExpiringResource) + self.events.resource_updated.register_handler(self._handle_update, + ExpiringResource) + + self.default_lifetime = timedelta( + seconds=self.config.get("default_lifetime", self.DEFAULT_LIFETIME)) + + self._timetable = sortedlist() + self._purged = set() + + shelve = self.get_shelve("resources") + + for path, expiration_time in shelve.items(): + self._do_handle_expiration_time(path, expiration_time, shelve) + + shelve.commit() + self._initialized() + + def _start(self): + self._running = True + self._check_timetable() + self._started() + + def _stop(self): + self._running = False + if self._timer is not None: + self.api.cancel_timer(self._timer) + self._stopped() + + def _handle_expiration_time(self, instance, req_ind): + shelve = self.get_shelve("resources") + self._do_handle_expiration_time(instance.path, instance.expirationTime, + shelve) + shelve.commit() + + def _do_handle_expiration_time(self, path, expiration_time, shelve): + if expiration_time is not None: + shelve[path] = expiration_time + self.logger.debug("Adding resource to timetable: %s", path) + self._timetable.add((expiration_time, path)) + + def _handle_delete(self, instance, req_ind): + self._purged.discard(req_ind.path) + shelve = self.get_shelve("resources") + self._do_delete(req_ind.path, shelve) + shelve.commit() + + def _do_delete(self, path, shelve): + try: + expiration_time = shelve.pop(path) + except KeyError: + self.logger.debug("Resource %s is unknown", path) + else: + self.logger.debug("Removing resource from timetable: %s", path) + try: + self._timetable.remove((expiration_time, path)) + except ValueError: + pass + + def _handle_update(self, instance, req_ind): + if instance.path in self._purged: + return + + shelve = self.get_shelve("resources") + try: + self._do_delete(instance.path, shelve) + self._do_handle_expiration_time(instance.path, + instance.expirationTime, shelve) + shelve.commit() + except: + shelve.rollback() + raise + + def _purge(self, path): + self._purged.add(path) + # ri = DeleteRequestIndication(path, reason="expired") + # return self.api.handle_request_indication(ri) + + def _check_timetable(self): + if not self._running: + return + + now = datetime_now() + sleeptime = self.timeout + + while len(self._timetable) > 0 and now >= self._timetable[0][0]: + expired_path = self._timetable.pop(0)[1] + # self._resources.pop(expired_path) + self.logger.info("Resource has expired: %s", expired_path) + self._purge(expired_path) + try: + td = self._timetable[0][0] - now + try: + td = td.total_seconds() + except AttributeError: + # Jython does not have timedelta.total_seconds() + td = td.seconds + (td.days * 24 * 60 * 60) + sleeptime = min(td, sleeptime) + except IndexError: + pass + + self._timer = self.api.set_timer(sleeptime, self._check_timetable) diff --git a/server/openmtc-cse/src/openmtc_cse/plugins/notification_handler/__init__.py b/server/openmtc-cse/src/openmtc_cse/plugins/notification_handler/__init__.py new file mode 100644 index 0000000..db9a1ba --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/plugins/notification_handler/__init__.py @@ -0,0 +1,372 @@ +from openmtc_onem2m import OneM2MRequest +from openmtc_onem2m.exc import CSENotFound +from openmtc_onem2m.model import ( + Subscription, + Notification, + SubscribableResource, + NotificationEventC, + NotificationContentTypeE, + EventNotificationCriteria, + NotificationEventTypeE, +) +from openmtc_onem2m.transport import OneM2MOperation +from openmtc_server.Plugin import Plugin + + +def get_event_notification_criteria(subscription): + # If the eventNotificationCriteria attribute is set, then the Originator + # shall check whether the corresponding event matches with the event + # criteria. + event_notification_criteria = subscription.eventNotificationCriteria \ + or EventNotificationCriteria() + # If the eventNotificationCriteria attribute is not configured, the + # Originator shall use the default setting of Update_of_Resource for + # notificationEventType and then continue with the step 2.0 + + # If notificationEventType is not set within the eventNotificationCriteria + # attribute, the Originator shall use the default setting of + # Update_of_Resource to compare against the event. + if not event_notification_criteria.notificationEventType: + event_notification_criteria.notificationEventType = [NotificationEventTypeE.updateOfResource] + + # If the event matches, go to the step 2.0. Otherwise, the Originator shall + # discard the corresponding event + return event_notification_criteria + + +class NotificationHandler(Plugin): + def __init__(self, api, config, *args, **kw): + super(NotificationHandler, self).__init__(api, config, *args, **kw) + + # subscriptions_info contains the relevant info of a current + # subscriptions, i.e. {pid: {rid: {net: value, ...}}} + self.subscriptions_info = {} + cse_id = config.get('onem2m', {}).get('cse_id', 'mn-cse-1') + sp_id = config.get('onem2m', {}).get('sp_id', 'openmtc.org') + self._rel_cse_id = '/' + cse_id + self._abs_cse_id = '//' + sp_id + '/' + cse_id + + def _init(self): + # subscription created + self.events.resource_created.register_handler(self._handle_subscription_created, Subscription) + + # subscription updated + self.events.resource_updated.register_handler(self._handle_subscription_updated, Subscription) + + # subscription deleted + self.events.resource_deleted.register_handler(self._handle_subscription_deleted, Subscription) + + # resource updated + self.events.resource_updated.register_handler(self._handle_subscribable_resource_updated, SubscribableResource) + + # resource created + self.events.resource_created.register_handler(self._handle_subscribable_resource_created, SubscribableResource) + + # resource deleted + self.events.resource_deleted.register_handler(self._handle_subscribable_resource_deleted, SubscribableResource) + + self._initialized() + + def _get_sub_list(self, pid, net): + return [ + v['sub'] for v in self.subscriptions_info.itervalues() + if v['pid'] == pid and net in v['enc'].notificationEventType + ] + + def _delete_subs_from_parent(self, pid): + self.subscriptions_info = { + k: v for k, v in self.subscriptions_info.iteritems() if v["pid"] != pid + } + + def _handle_subscription_created(self, subscription, _): + # todo: store somewhere + # if not self.subscriptions_info.get(subscription.resourceID): + # self.subscriptions_info[subscription.resourceID] = {} + + self.subscriptions_info[subscription.resourceID] = { + "pid": subscription.parentID, + "enc": get_event_notification_criteria(subscription), + "sub": subscription, + } + + def _handle_subscription_updated(self, subscription, _): + self.subscriptions_info[subscription.resourceID].update({ + "enc": get_event_notification_criteria(subscription), + # TODO(rst): test this + "sub": subscription, + }) + + def _handle_subscription_deleted(self, subscription, req): + # only when subscription is deleted directly + if not req.cascading: + try: + del self.subscriptions_info[subscription.resourceID] + except KeyError: + pass + + # 7.5.1.2.4 Notification for Subscription Deletion + # Originator: + # When the resource is deleted and subscriberURI of the + # resource is configured, the Originator shall send a + # Notify request primitive with subscriptionDeletion element of the + # notification data object set as TRUE and subscriptionReference element + # set as the URI of the resource to the entity indicated + # in subscriberURI. + + su = subscription.subscriberURI + if not su: + return + + try: + self.api.handle_onem2m_request(OneM2MRequest( + OneM2MOperation.notify, + su, + pc=Notification( + subscriptionDeletion=True, + subscriptionReference=subscription.path, + ), + )) + except CSENotFound: + self.logger.debug("subscription target %s already deleted or not existing." % su) + + def _handle_subscribable_resource_updated(self, resource, _): + self.logger.debug("_handle_subscribable_resource_updated for %s", resource) + + map( + lambda sub: self._handle_subscription(resource, sub), + self._get_sub_list( + resource.resourceID, + NotificationEventTypeE.updateOfResource, + ) + ) + + def _handle_subscribable_resource_created(self, resource, _): + self.logger.debug("_handle_subscribable_resource_created for %s", resource) + + map( + lambda sub: self._handle_subscription(resource, sub), + self._get_sub_list( + resource.parentID, + NotificationEventTypeE.createOfDirectChildResource, + ) + ) + + def _handle_subscribable_resource_deleted(self, resource, _): + self.logger.debug("_handle_subscribable_resource_deleted for %s", resource) + + rid = resource.resourceID + net_delete = NotificationEventTypeE.deleteOfResource + pid = resource.parentID + net_delete_child = NotificationEventTypeE.deleteOfDirectChildResource + + sub_list = (self._get_sub_list(rid, net_delete) + self._get_sub_list(pid, net_delete_child)) + + for sub in sub_list: + self._handle_subscription(resource, sub) + + # delete remaining subscriptions of parent from subscriptions_info + self._delete_subs_from_parent(resource.resourceID) + + def _handle_subscription(self, resource, sub): + self.logger.debug("_handle_subscription: %s", sub.get_values()) + + # 7.5.1.2.2 Notification for modification of subscribed resources + # When the notification message is forwarded or aggregated by transit + # CSEs, the Originator or a transit CSE shall check whether there are + # notification policies to enforce between subscription resource Hosting + # CSE and the notification target. In that case, the transit CSE as well + # as the Originator shall process Notify request primitive(s) by using + # the corresponding policy and send processed Notify request + # primitive(s) to the next CSE with notification policies related to the + # enforcement so that the transit CSE is able to enforce the policy + # defined by the subscriber. The notification policies related to the + # enforcement at this time is verified by using the subscription + # reference in the Notify request primitive. In the notification + # policies, the latestNotify attribute is only enforced in the transit + # CSE as well as the Originator. + + # If Event Category parameter is set to ''latest' in the notification + # request primitive, the transit CSE as well as Originator shall cache + # the most recent Notify request. That is, if a new Notify request is + # received by the CSE with a subscription reference that has already + # been buffered for a pending Notify request, the newer Notify request + # will replace the buffered older Notify request. + + # Originator: When an event is generated, the Originator shall execute + # the following steps in order: + + # Step 1.0 Check the eventNotificationCriteria attribute of the + # resource associated with the modified resource: + + def __check_event_notification_criteria(): + # return check_match() + return True + + if not __check_event_notification_criteria(): + return + + # step 2.0 + # The Originator shall check the notification policy as described in the + # below steps, but the notification policy may be checked in different + # order. After checking the notification policy in step 2.0 (i.e., from + # step 2.1to step 2.6), then continue with step 3.0 + + # Step 2.1 The Originator shall determine the type of the notification + # per the notificationContentType attribute. The possible values of for + # notificationContentType attribute are 'Modified Attributes', 'All + # Attributes', and or optionally 'ResourceID'. This attribute may be + # used joint with eventType attribute in the eventNotificationCriteria + # to determine if it is the attributes of the subscribed-to resource or + # the attributes of the child resource of the subscribed-to resource + # that shall be returned in the notification. + notification_content_type = sub.notificationContentType or NotificationContentTypeE.allAttributes + + # - If the value of notificationContentType is set to 'All Attributes', + # the Notify request primitive shall include the whole subscribed-to + # resource + # - If the notificationContentType attribute is not configured, the + # default value is set to 'All Attributes' + # - If the value of notificationContentType is set to 'Modified + # Attribute', the Notify request primitive shall include the modified + # attribute(s) only + # - If the value of notificationContentType is set to 'ResourceID', the + # Notify request primitive shall include the resourceID of the + # subscribed-to resource + if notification_content_type is ( + NotificationContentTypeE.allAttributes, + NotificationContentTypeE.modifiedAttributes, + NotificationContentTypeE.resourceID, + ): + pass + + # Step 2.2 Check the notificationEventCat attribute: + try: + # notification_event_cat = sub.notificationEventCat + # - If the notificationEventCat attribute is set, the Notify request + # primitive shall employ the Event Category parameter as given in the + # notificationEventCat attribute. Then continue with the next step + + if sub.notificationEventCat: + pass + # todo: what does this mean? + + # - If the notificationEventCat attribute is not configured,then + # continue with other step + else: + pass + except AttributeError: + pass + + try: + latest_notify = sub.latestNotify + # Step 2.3 Check the latestNotify attribute: + # - If the latestNotify attribute is set, the Originator shall assign + # Event Category parameter of value 'latest' of the notifications + # generated pertaining to the subscription created. Then continue with + # other step + if latest_notify: + pass + except AttributeError: + pass + + # NOTE: The use of some attributes such as rateLimit, batchNotify and + # preSubscriptionNotify is not supported in this release of the + # document. + + # Step 3.0 The Originator shall check the notification and reachability + # schedules, but the notification schedules may be checked in different + # order. + # - If the resource associated with the modified resource + # includes a child resource, the Originator + # shall check the time periods given in the scheduleElement attribute + # of the child resource. + # - Also, the Originator shall check the reachability schedule + # associated with the Receiver by exploring its resource. + # If reachability schedules are not present in a Node then that Entity + # is considered to be always reachable + # - If notificationSchedule and reachability schedule indicate that + # message transmission is allowed, then proceed with step 5.0. + # Otherwise, proceed with step 4.0 + # - In particular, if the notificationEventCat attribute is set to + # ''immediate'' and the resource does not allow + # transmission, then go to step 5.0 and send the corresponding Notify + # request primitive by temporarily ignoring the Originator''s + # notification schedule + + # Step 4.0 Check the pendingNotification attribute: + # - If the pendingNotification attribute is set, then the Originator + # shall cache pending Notify request primitives according to the + # pendingNotification attribute. The possible values are + # ''sendLatest'' and ''sendAllPending''. If the value of + # pendingNotification is set to ''sendLatest'', the most recent Notify + # request primitive shall be cached by the Originator and it shall set + # the Event Category parameter to ''latest''. If pendingNotification + # is set to ''sendAllPending'', all Notify request primitives shall be + # cached by the Originator. If the pendingNotification attribute is + # not configured, the Originator shall discard the corresponding + # Notify request primitive. The processed Notify request primitive by + # the pendingNotification attribute is sent to the Receiver after the + # reachability recovery (see the step 6.0) + + # Step 5.0 Check the expirationCounter attribute: + # - If the expirationCounter attribute is set, then it shall be + # decreased by one when the Originator successfully sends the Notify + # request primitive. If the counter equals to zero('0'), the + # corresponding resource shall be deleted. Then end the + # 'Compose Notify Request Primitive' procedure If the + # expirationCounter attribute is not configured, then end the 'Compose + # Notify Request Primitive' procedure + + # Originator: After reachability recovery, the Originator shall execute + # the following steps in order: + + # Step 6.0 If the pendingNotification attribute is set, the Originator + # shall send the processed Notify request primitive by the + # pendingNotification attribute and, then continue with the step 7.0 + + # Step 7.0 Check the expirationCounter attribute: + # If the expirationCounter attribute is set, then its value shall be + # decreased by one when the Originator successfully sends the Notify + # request primitive. If the counter meets zero, the corresponding + # resource shall be deleted. Then end the 'Compose Notify + # Request Primitive' procedure. + # - If the expirationCounter attribute is not configured, then end the + # 'Compose Notify Request Primitive' procedure + + # Receiver: When the Hosting CSE receives a Notify request primitive, + # the Hosting CSE shall check validity of the primitive parameters. In + # case the Receiver is a transit CSE which forwards or aggregates Notify + # request primitives before sending to the subscriber or other transit + # CSEs, upon receiving the Notify request primitive with the Event + # Category parameter set to 'latest', the Receiver shall identify the + # latest Notify request primitive with the same subscription reference + # while storing Notify request primitives locally. When the Receiver as + # a transit CSE needs to send pending Notify request primitives, it + # shall send the latest Notify request primitive. + self._send_notification(resource, sub) + + def _send_notification(self, resource, sub): + self.logger.debug("sending notification for resource: %s", resource) + + def get_subscription_reference(to, path): + if to.startswith('//'): + return self._abs_cse_id + '/' + path + elif to.startswith('/'): + return self._rel_cse_id + '/' + path + else: + return path + + for uri in sub.notificationURI: + self.api.handle_onem2m_request(OneM2MRequest( + op=OneM2MOperation.notify, + to=uri, + pc=Notification( + notificationEvent=NotificationEventC( + representation=resource + ), + subscriptionReference=get_subscription_reference(uri, sub.path), + # TODO(rst): check if this is the sub creator or the creator of the notification + # TODO in this case the CSE + creator=sub.creator + ), + )) diff --git a/server/openmtc-cse/src/openmtc_cse/plugins/notification_handler/__init__.pyc b/server/openmtc-cse/src/openmtc_cse/plugins/notification_handler/__init__.pyc new file mode 100644 index 0000000..c03dc69 Binary files /dev/null and b/server/openmtc-cse/src/openmtc_cse/plugins/notification_handler/__init__.pyc differ diff --git a/server/openmtc-cse/src/openmtc_cse/plugins/registration_handler/__init__.py b/server/openmtc-cse/src/openmtc_cse/plugins/registration_handler/__init__.py new file mode 100644 index 0000000..2215751 --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/plugins/registration_handler/__init__.py @@ -0,0 +1,329 @@ +from openmtc_onem2m.exc import (CSENotFound, CSEError, STATUS_CONFLICT, + CSETargetNotReachable, CSEConflict) +from openmtc_onem2m.model import RemoteCSE, CSETypeIDE, CSEBase, Subscription +from openmtc_onem2m.transport import (OneM2MRequest, OneM2MOperation, + OneM2MErrorResponse) +from openmtc_server.Plugin import Plugin +from openmtc_server.exc import ConfigurationError + +from openmtc_cse.util import ExpTimeUpdater + + +class RegistrationHandler(Plugin): + """ Plugin to register this CSE with other CSEs. + """ + # defaults: + DEF_INTERVAL = 60 * 60 + DEF_OFFSET = 60 * 60 + + DEF_INTERVAL = 5 + DEF_OFFSET = 10 + + @property + def remote_cses(self): + return self.config.get("remote_cses") or () + + @property + def get_interval(self): + """ ExpirationTime update interval. + """ + return self.config.get("interval", self.DEF_INTERVAL) + + @property + def originator(self): + return self.cse_id + + @property + def get_offset(self): + """ Offset added to ExpirationTime to ensure it can be met early. + """ + return self.config.get("offset", self.DEF_OFFSET) + + def _init(self): + try: + onem2m_config = self.config["onem2m"] + except KeyError: + raise ConfigurationError("No onem2m config part!") + + # cse id + try: + self.cse_id = "/" + onem2m_config["cse_id"] + except KeyError: + raise ConfigurationError("Missing configuration key: cse_id") + + # cse type + cse_type = onem2m_config.get("cse_type") + try: + cse_type = getattr(CSETypeIDE, + str(cse_type).replace("-", "_").upper()) + except (AttributeError, TypeError, ValueError): + raise ConfigurationError("Invalid value for 'cse_type': %s" % + (cse_type,)) + self.cse_type = CSETypeIDE(cse_type) + + self.cse_base = onem2m_config.get("cse_base", "onem2m") + self.labels = self.config.get("labels", []) + + self.__registrations = [] + self._initialized() + + def _start(self): + """ Creates the CSE resource on the registered-to CSE. + Starts an ExpTimeUpdater for this resource. + """ + self.refresher = None + self._registered = False + self._register() + self._started() + + def _handle_registration_error(self, error): + self.logger.warn("Could not register: %s", error) + self.__timer = self.api.set_timer(12000, self._register) + + def _register(self): + try: # todo: put this into own method + self.refresher = ExpTimeUpdater(interval=self.get_interval, + offset=self.get_offset) + except Exception as e: + self.logger.warn("The refresher was not started: %s", e) + + try: + self._handle_remote_cses() + except CSEError as e: + self._handle_registration_error(e) + + def _handle_remote_cses(self, handle_remote_cse_method=None): + remote_cses = self.remote_cses + + # default handle method like it used to be + if not handle_remote_cse_method: + handle_remote_cse_method = self._handle_remote_cse + + if not remote_cses: + self.logger.info("No remote CSEs configured") + return + + # add poa information + for remote_cse in remote_cses: + remote_cse_id = remote_cse.get("cse_id") + if remote_cse_id: + remote_cse_poa = remote_cse.get("poa", []) + self.api.add_poa_list(remote_cse_id, remote_cse_poa) + + return map(handle_remote_cse_method, remote_cses) + + def _handle_remote_cse_delete(self, remote_cse): + """ Sends a delete request for the RemoteCSE resource. + """ + try: + remote_cse_id = "/" + remote_cse["cse_id"] + except KeyError: + raise ConfigurationError('Missing parameter (cse_id) in %s' % + remote_cse) + remote_cse_base = remote_cse.get("cse_base", "onem2m") + + def _delete_remote_cse_base(): + to = remote_cse_id + '/' + remote_cse_base + self.cse_id + request = OneM2MRequest(OneM2MOperation.delete, to, + self.originator) + return self.api.send_onem2m_request(request) + + _delete_remote_cse_base() + + def _handle_remote_cse(self, remote_cse): + """ Sends a create request for the RemoteCSE resource. + Retrieves resource data afterwards. + + @return: RemoteCSE instance representing the created resource. + """ + + try: + remote_cse_id = "/" + remote_cse["cse_id"] + except KeyError: + raise ConfigurationError('Missing parameter (cse_id) in %s' % + remote_cse) + + # cse type + remote_cse_type = remote_cse.get("cse_type") + try: + remote_cse_type = getattr(CSETypeIDE, str( + remote_cse_type).replace("-", "_").upper()) + except (AttributeError, TypeError, ValueError): + raise ConfigurationError("Invalid value for 'cse_type': %s" % + (remote_cse_type,)) + remote_cse_type = CSETypeIDE(remote_cse_type) + + remote_cse_base = remote_cse.get("cse_base", "onem2m") + + remote_cse_uri = remote_cse_id + '/' + remote_cse_base + + self.logger.info("registering %s at %s", self.cse_id, remote_cse_id) + + def _create_own_remote_cse_remotely(): + endpoints = self.api.get_onem2m_endpoints() + + from openmtc.util import datetime_the_future + + # init RemoteCSE object + cse = RemoteCSE(resourceName=self.cse_id[1:], + labels=self.labels, + cseType=self.cse_type, + pointOfAccess=endpoints, + CSEBase=self.cse_base, + CSE_ID=self.cse_id, + requestReachability=bool(len(endpoints)), + expirationTime=datetime_the_future(self.get_offset) + ) + if remote_cse.get('own_poa'): + cse.pointOfAccess = remote_cse.get('own_poa') + + request = OneM2MRequest(OneM2MOperation.create, remote_cse_uri, + self.originator, + ty=RemoteCSE, + pc=cse) + + return self.api.send_onem2m_request(request) + + def _retrieve_remote_cse_base(): + request = OneM2MRequest(OneM2MOperation.retrieve, remote_cse_uri, + self.originator, + ty=CSEBase) + return self.api.send_onem2m_request(request) + + def _create_remote_cse_locally(cse_base): + cse = RemoteCSE(resourceName=remote_cse_id[1:], + CSEBase=remote_cse_base, + CSE_ID=remote_cse_id, + cseType=remote_cse_type, + pointOfAccess=cse_base.pointOfAccess) + cse.pointOfAccess = remote_cse.get('poa') + + request = OneM2MRequest(OneM2MOperation.create, self.cse_base, + self.originator, + ty=RemoteCSE, + pc=cse) + + return self.api.handle_onem2m_request(request) + + try: + instance = _create_own_remote_cse_remotely().get().content + + def _update_function(updated_instance): + self._handle_remote_cse_update_expiration_time(remote_cse, + updated_instance) + + self.refresher.start(instance, send_update=_update_function) + except CSETargetNotReachable as e_not_reachable: + # TODO(rst): print error message + raise e_not_reachable + except OneM2MErrorResponse as error_response: + if error_response.response_status_code == STATUS_CONFLICT: + # TODO(rst): handle conflict here + raise CSEConflict() + else: + retrieved_cse_base = _retrieve_remote_cse_base().get().content + if retrieved_cse_base is None: + raise CSENotFound() + _create_remote_cse_locally(retrieved_cse_base).get() + + def _handle_remote_cse_update_expiration_time(self, remote_cse, + instance=None): + """ Sends a update request for the RemoteCSE resource. + Retrieves resource data afterwards. + + @return: RemoteCSE instance representing the created resource. + """ + try: + remote_cse_id = "/" + remote_cse["cse_id"] + except KeyError: + raise ConfigurationError('Missing parameter (cse_id) in %s' % + remote_cse) + # cse type + remote_cse_type = remote_cse.get("cse_type") + try: + remote_cse_type = getattr(CSETypeIDE, str( + remote_cse_type).replace("-", "_").upper()) + except (AttributeError, TypeError, ValueError): + raise ConfigurationError("Invalid value for 'cse_type': %s" % + (remote_cse_type,)) + + def _update_own_remote_cse_remotely(): + cse = RemoteCSE( + expirationTime=instance.expirationTime + ) + # todo: need to check format here? + to = remote_cse_id + "/" + instance.resourceID + request = OneM2MRequest(OneM2MOperation.update, to, + fr=self.originator, + ty=RemoteCSE, + pc=cse + ) + return self.api.send_onem2m_request(request) + + try: + _update_own_remote_cse_remotely().get() + except CSETargetNotReachable as e_not_reachable: + raise e_not_reachable + except OneM2MErrorResponse as error_response: + if error_response.response_status_code == STATUS_CONFLICT: + raise CSEConflict() + else: + pass + + def _handle_remote_cse_update_endpoints(self, remote_cse, instance): + self.logger.debug("Updating endpoints in remote cse: %s", remote_cse) + self.logger.warn("Not implemented yet") + # todo: implement this + + def _handle_endpoint_change(self, csebase, req): + self._handle_remote_cses(self._handle_endpoint_change) + + def _register_events(self): + self.events.resource_updated.register_handler( + self._handle_endpoint_change, CSEBase) + + def _subscribe_remote_endpoints(self): + """ subscribe to the remote basecse resources, to get notified if + something changes + todo: fix and activate, needs notification server + """ + def __subscribe(remote_cse): + try: + remote_cse_id = "/" + remote_cse["cse_id"] + except KeyError: + raise ConfigurationError('Missing parameter (cse_id) in %s' % + remote_cse) + subs = Subscription(notificationURI=self.api.get_onem2m_endpoints()) + to = remote_cse_id + "/" + remote_cse.get("cse_base", "onem2m") + request = OneM2MRequest(OneM2MOperation.create, to, + self.originator, + ty=Subscription, + pc=subs) + return self.api.send_onem2m_request(request) + + self._handle_remote_cses(handle_remote_cse_method=__subscribe) + + def _start_refresher(self, instance): + self.refresher.start(instance) + + def _stop(self): + """ Stops the plugin. + DELETES CSE resource. + """ + self._handle_remote_cses( + handle_remote_cse_method=self._handle_remote_cse_delete) + + try: + self.api.cancel_timer(self.__timer) + except AttributeError: + pass + + try: + self.refresher.stop() + except AttributeError: + pass + + if self._registered: + pass + + self._stopped() diff --git a/server/openmtc-cse/src/openmtc_cse/plugins/registration_handler/__init__.pyc b/server/openmtc-cse/src/openmtc_cse/plugins/registration_handler/__init__.pyc new file mode 100644 index 0000000..d9cc57a Binary files /dev/null and b/server/openmtc-cse/src/openmtc_cse/plugins/registration_handler/__init__.pyc differ diff --git a/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_http/__init__.py b/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_http/__init__.py new file mode 100644 index 0000000..54c58ce --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_http/__init__.py @@ -0,0 +1,91 @@ +import ssl +from socket import getservbyname + +from openmtc_cse import OneM2MEndPoint +from openmtc_onem2m.client.http import get_client +from openmtc_server.Plugin import Plugin +from openmtc_server.configuration import Configuration, SimpleOption +from openmtc_server.platform.gevent.ServerRack import GEventServerRack +from .wsgi import OpenMTCWSGIServer, OpenMTCWSGIApplication, OpenMTCWSGIHandler + + +class HTTPTransportPluginConfiguration(Configuration): + __name__ = "HTTPTransportPluginConfiguration configuration" + __options__ = { + "port": SimpleOption(int, default=8000) + } + + +class HTTPTransportPlugin(Plugin): + __configuration__ = HTTPTransportPluginConfiguration + + def _init(self): + self._initialized() + + def _start_server_rack(self): + servers = [] + + interface = self.config.get("interface", "") + + ssl_certs = self.config.get("onem2m", {}).get("ssl_certs") + key_file = ssl_certs.get("key") + cert_file = ssl_certs.get("crt") + ca_file = ssl_certs.get("ca") + enable_https = self.config.get("enable_https", False) + require_cert = self.config.get("require_cert", True) + + is_https = enable_https and key_file and cert_file and ca_file + + scheme = "https" if is_https else "http" + + port = self.config.get("port", getservbyname(scheme)) + + default_content_type = self.config.get("global", {}).get( + "default_content_type", False) + + pretty = self.config.get("global", {}).get("pretty", False) + + # the __call__ of the OpenMTCWSGIApplication should return a function, + # which is given to the WSGIServer as + # the function that is called by the server for each incoming request + application = OpenMTCWSGIApplication( + self.api.handle_onem2m_request, server_address=interface, + default_content_type=default_content_type, pretty=pretty, + require_cert=require_cert + ) + + if is_https: + # in WSGI the application consists of a single function + # this function is called by the server for each request that the + # server has to handle + servers.append(OpenMTCWSGIServer((interface, port), application, + keyfile=key_file, certfile=cert_file, + ca_certs=ca_file, cert_reqs=ssl.CERT_OPTIONAL, + environ={'SERVER_NAME': 'openmtc.local'}, + handler_class=OpenMTCWSGIHandler, + ssl_version=ssl.PROTOCOL_TLSv1_2 + )) + else: + servers.append(OpenMTCWSGIServer((interface, port), application, + environ={'SERVER_NAME': 'openmtc.local'}, + handler_class=OpenMTCWSGIHandler)) + + rack = self.__rack = GEventServerRack(servers) + + rack.start() + + return scheme, interface, port + + def _start(self): + self.api.register_onem2m_client(("http", "https"), get_client) + + scheme, interface, port = self._start_server_rack() + + self.api.register_point_of_access( + OneM2MEndPoint(scheme=scheme, server_address=interface, port=port)) + + self._started() + + def _stop(self): + self.__rack.stop() + self._stopped() diff --git a/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_http/__init__.pyc b/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_http/__init__.pyc new file mode 100644 index 0000000..f9bfd62 Binary files /dev/null and b/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_http/__init__.pyc differ diff --git a/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_http/wsgi.py b/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_http/wsgi.py new file mode 100644 index 0000000..dffae63 --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_http/wsgi.py @@ -0,0 +1,478 @@ +import urlparse +import ssl +from _socket import gaierror +from datetime import datetime +from operator import itemgetter +from socket import AF_INET, AF_INET6, getaddrinfo, SOCK_STREAM, inet_pton + +from funcy import pluck +from gevent.wsgi import WSGIHandler, WSGIServer +from werkzeug.wrappers import (BaseRequest, CommonRequestDescriptorsMixin, + UserAgentMixin, AcceptMixin, Response) + +from futile.collections import get_iterable +from futile.logging import LoggerMixin +from openmtc.model import ModelTypeError +from openmtc_cse.methoddomain.filtercriteria import parse_filter_criteria +from openmtc_onem2m.exc import (CSEError, CSEContentsUnacceptable, + STATUS_INTERNAL_SERVER_ERROR, CSEBadRequest, + STATUS_IMPERSONATION_ERROR) +from openmtc_onem2m.model import (Notification, AttributeList, + get_long_attribute_name) +from openmtc_onem2m.model import get_long_member_name +from openmtc_onem2m.serializer import get_onem2m_supported_content_types +from openmtc_onem2m.serializer.util import (decode_onem2m_content, + encode_onem2m_content) +from openmtc_onem2m.transport import (OneM2MErrorResponse, OneM2MOperation, + OneM2MRequest, OneM2MResponse) + +_method_map_from_http = { + 'POST': OneM2MOperation.create, + 'GET': OneM2MOperation.retrieve, + 'PUT': OneM2MOperation.update, + 'DELETE': OneM2MOperation.delete +} + + +def is_ipv4(address): + try: + inet_pton(AF_INET, address) + except: + return False + return True + + +def is_ipv6(address): + try: + inet_pton(AF_INET6, address) + except: + return False + return True + + +class Request(BaseRequest, CommonRequestDescriptorsMixin, UserAgentMixin, + AcceptMixin): + pass + + +class OpenMTCWSGIServer(WSGIServer, LoggerMixin): + def do_read(self): + return super(OpenMTCWSGIServer, self).do_read() + + def wrap_socket_and_handle(self, client_socket, address): + try: + return super(OpenMTCWSGIServer, self).wrap_socket_and_handle(client_socket, address) + except ssl.SSLError as ssl_error: + # TODO(rst): add better handling of SSL errors + self.logger.debug(ssl_error) + return None + + +class OpenMTCWSGIApplication(LoggerMixin): + __cached_addresses = {} + + def __init__(self, request_handler, server_address, default_content_type, + pretty=False, require_cert=True): + super(OpenMTCWSGIApplication, self).__init__() + + self.request_handler = request_handler + self.__cache = set() + + if server_address == "::": + self.__addresses = self._get_addresses(AF_INET6) | \ + self._get_addresses(AF_INET) + self._resolve_host = self._resolve_host_ipv6 + elif server_address in ("", "0.0.0.0"): + self.__addresses = self._get_addresses(AF_INET) + else: + self.__addresses = get_iterable(server_address) + + self.logger.debug("All listening addresses: %s", self.__addresses) + + self.default_content_type = default_content_type + self.pretty = pretty + self.require_cert = require_cert + + def _get_addresses(self, family): + try: + return self.__cached_addresses[family] + except KeyError: + from netifaces import interfaces, ifaddresses + + addresses = self.__cached_addresses[family] = set() + + for interface in interfaces(): + try: + ifdata = ifaddresses(interface)[family] + ifaddrs = map(lambda x: x.split("%")[0], pluck("addr", + ifdata)) + addresses.update(ifaddrs) + except KeyError: + pass + + return addresses + + def _get_addr_info(self, host, family): + self.logger.debug("Resolving %s", host) + try: + info = getaddrinfo(host, 0, family, SOCK_STREAM) + return set(map(itemgetter(0), map(itemgetter(-1), info))) + except gaierror as e: + self.logger.error("Failed to resolve %s: %s", host, e) + return set() + + def _resolve_host(self, host): + if is_ipv4(host): + return {host} + return self._get_addr_info(host, AF_INET) + + def _resolve_host_ipv6(self, host): + self.logger.debug("Resolving: %s", host) + if is_ipv6(host): + return {host} + # TODO: kca: optimize + return (self._get_addr_info(host, AF_INET) | + self._get_addr_info(host, AF_INET6)) + + # environ is a dict that holds information about the request and the server + # start_response is the function that sets status and headers on the http + # response + def __call__(self, environ, start_response, subject_alt_name=None): + with Request(environ) as request: + # item assignment not supported for Request + # request['SubjectAltName'] = "test-cse_ae_id" + # just for testing + # subject_alt_name = "test_cse_ae_id" + + # handle no client certificate provided + if subject_alt_name is False: + if self.require_cert: + return Response( + status=403 + )(environ, start_response) + else: + subject_alt_name = None + + # our handle function for incoming requests + response = (self._handle_options(request) + if request.method == "OPTIONS" + else self._handle_http_request(request, + subject_alt_name)) + return response(environ, start_response) + + @staticmethod + def _handle_options(request): + # TODO: use full list of supported encodings + accept = request.accept_mimetypes + if not ('*/*' in accept or not accept or + 'application/json' in accept or + 'application/xml' in accept): + return Response('Only application/json and ' + 'application/xml are supported.', + status=400) + if not ('application/json' in accept or + 'application/xml' in accept): + return Response('Only application/json and ' + 'application/xml are supported.', + status=405) + return Response( + status=204 + ) + + def map_http_request_to_onem2m_request(self, http_request): + """Maps a HTTP request to a OneM2M request. + + :param http_request: the HTTP Request object to be mapped + :returns: OneM2MRequest -- the resulting OneM2M request object + :raises: OpenMTCError + """ + self.logger.debug("Mapping HTTP request '%s' to OneM2M request", + http_request) + + op = _method_map_from_http[http_request.method] + to = http_request.path[1:].lstrip('/') + if to.startswith('~/'): + to = to[1:] + elif to.startswith('_/'): + to = '/' + to[1:] + + get_header = http_request.headers.get + + # The X-M2M-Origin header shall be mapped to the From parameter of + # request and response primitives and vice versa, if applicable. + fr = get_header("x-m2m-origin") + + # The X-M2M-RI header shall be mapped to the Request Identifier + # parameter of request and response primitives and vice versa. + rqi = get_header("x-m2m-ri") + + # primitive content + pc = decode_onem2m_content(http_request.input_stream.read(), + http_request.content_type) + + # resource type + # get out of content-type or from resource + ty = type(pc) if pc else None + if ty is Notification: + op = OneM2MOperation.notify + + # The X-M2M-GID header shall be mapped to the Group Request Identifier + # parameter of request primitives and vice versa, if applicable. + gid = get_header("x-m2m-gid") + + # The X-M2M-RTU header shall be mapped to the notificationURI element of + # the Response Type parameter of request primitives and vice versa, if + # applicable. If there are more than one value in the element, then the + # values shall be combined with "&" character. + rt = get_header("x-m2m-rtu") + + # The X-M2M-OT header shall be mapped to the Originating Timestamp + # parameter of request and response primitives, and vice versa, if + # applicable. + ot = get_header("x-m2m-ot") + + # The X-M2M-RST header shall be mapped to the Result Expiration + # Timestamp parameter of request and response primitives, and vice + # versa, if applicable. + rset = get_header("x-m2m-rst") + + # The X-M2M-RET header shall be mapped to the Request Expiration + # Timestamp parameter of request primitives and vice versa, if + # applicable. + rqet = get_header("x-m2m-ret") + + # The X-M2M-OET header shall be mapped to the Operation Execution Time + # parameter of request primitives and vice versa, if applicable + oet = get_header("x-m2m-oet") + + # The X-M2M-EC header shall be mapped to the Event Category parameter of + # request and response primitives, and vice versa, if applicable. + ec = get_header("x-m2m-ec") + + onem2m_request = OneM2MRequest(op=op, to=to, fr=fr, rqi=rqi, ty=ty, + pc=pc, ot=ot, rqet=rqet, rset=rset, + oet=oet, rt=rt, ec=ec, gid=gid) + + not_filter_params = ('rt', 'rp', 'rcn', 'da', 'drt') + multiple_params = ('lbl', 'ty', 'cty', 'atr') + + if http_request.query_string: + from openmtc_cse.methoddomain.filtercriteria import filters + params = urlparse.parse_qs(http_request.query_string) + get_param = params.get + f_c = {} + + for param in params: + self.logger.debug("checking '%s'", param) + + values = get_param(param) + + if param not in multiple_params and len(values) > 1: + raise CSEBadRequest("Multiple field names not permitted " + "for parameter %s" % param) + + param_long_name = get_long_member_name(param) + + # TODO(rst): handle attributes with get_long_attribute_name + if param in not_filter_params: + setattr(onem2m_request, param, values[0]) + elif param_long_name == 'attributeList': + onem2m_request.pc = AttributeList( + map(get_long_attribute_name, values[0].split(' '))) + elif param_long_name and hasattr(filters, param_long_name): + self.logger.debug("got values for '%s' ('%s'): %s", + param_long_name, param, values) + + if param in multiple_params: + f_c[param_long_name] = values + else: + f_c[param_long_name] = values[0] + else: + raise CSEBadRequest("Unknown parameter: %s" % param) + onem2m_request.filter_criteria = parse_filter_criteria(f_c) + + return onem2m_request + + def map_onem2m_response_to_http_response(self, request, response): + """Maps a OneM2M response to a HTTP response. + + :param request: the HTTP request + :param response: the OneM2MResponse object to be mapped + :returns: Response -- the resulting HTTP Response + :raises: OpenMTCError + """ + self.logger.debug("Mapping OneM2M response: %s", response) + + status_code = response.status_code + + # resourceID prefix + if response.to.startswith('//'): + resource_id_pre = '/'.join(response.to.split('/')[:4]) + '/' + elif response.to.startswith('/'): + resource_id_pre = '/'.join(response.to.split('/')[:2]) + '/' + else: + resource_id_pre = '' + + headers = { + "x-m2m-ri": str(response.rqi), + "x-m2m-rsc": str(response.rsc) + } + + try: + headers['Content-Location'] = (resource_id_pre + response.content.resourceID) + except (AttributeError, TypeError): + pass + + pretty = self.pretty + if pretty is None: + user_agent = request.user_agent + pretty = user_agent and ("opera" in user_agent or + "mozilla" in user_agent) + supported = get_onem2m_supported_content_types() + if request.accept_mimetypes: + accept = request.accept_mimetypes.best_match(supported) + if accept is None: + # TODO(rst): raise 406 or similar + accept = self.default_content_type + else: + accept = self.default_content_type + try: + content_type, payload = encode_onem2m_content(response.content, + accept, pretty, + path=resource_id_pre) + except CSEContentsUnacceptable as e: + status_code = e.status_code + content_type = "text/plain" + payload = str(e) + + return Response( + payload, + status=status_code, + headers=headers, + content_type=content_type + ) + + def map_onem2m_error_to_http_error(self, response): + """Maps a OneM2M error response to a HTTP response. + + :param response: the OneM2MResponse object to be mapped + :returns: Response -- the resulting Response object + :raises: OpenMTCError + """ + self.logger.debug("Mapping OneM2M error: %s", response) + # This is strange, rs is defined as a string (Okay, Not Okay), and + # status codes are optional in onem2m req + + # TODO(rst): handle this better + try: + status_code = response.status_code + + headers = { + "x-m2m-ri": str(response.rqi), + "x-m2m-rsc": str(response.rsc) + } + except AttributeError: + status_code = STATUS_INTERNAL_SERVER_ERROR.http_status_code + headers = None + + return Response(status=status_code, headers=headers) + + def _handle_http_request(self, request, subject_alt_name): + onem2m_request = OneM2MRequest(None, None) + + remote_ip_addr = request.environ.get("REMOTE_ADDR", None) + + # check for impersonation error + # the from parameter of the onem2m request is verified against the + # association info of the cert + # subjectAltName of certificate vs. from parameter of onem2m request + impersonation_error = False + try: + onem2m_request = self.map_http_request_to_onem2m_request(request) + setattr(onem2m_request, "_remote_ip_addr", remote_ip_addr) + setattr(onem2m_request, "_authenticated", False) + if subject_alt_name is not None: + setattr(onem2m_request, "_authenticated", True) + impersonation_error = not bool(len(filter( + lambda x: x[0] == 'URI' and x[1] == onem2m_request.fr, + subject_alt_name))) + if impersonation_error: + onem2m_response = OneM2MResponse(STATUS_IMPERSONATION_ERROR, + request=onem2m_request) + response = self.map_onem2m_response_to_http_response( + request, onem2m_response) + + return response + + onem2m_response = self.request_handler(onem2m_request).get() + except OneM2MErrorResponse as error_response: + response = self.map_onem2m_error_to_http_error(error_response) + except CSEError as error: + error_response = OneM2MErrorResponse(error.response_status_code, + onem2m_request) + response = self.map_onem2m_error_to_http_error(error_response) + except (ModelTypeError, ValueError): + error_response = OneM2MErrorResponse(CSEBadRequest.response_status_code, + onem2m_request) + response = self.map_onem2m_error_to_http_error(error_response) + else: + response = self.map_onem2m_response_to_http_response( + request, onem2m_response) + + return response + + +class OpenMTCWSGIHandler(WSGIHandler, LoggerMixin): + + @staticmethod + def _is_cert_expired(not_after): + # TODO(rkr): timestamp issue when server checks the notAfter timestamp + # TODO of the certificate? + """ + Checks, if the notAfter field of the certificate is expired. + Note: The gevent ssl2 module is performing this check while method + do_handshake(), but looks like without regard + to the timestamps, therefore cert is valid for another hour. + + :param not_after: + :return: True | False + """ + ssl_date_fmt = r'%b %d %H:%M:%S %Y %Z' + valid_until = datetime.strptime(not_after, ssl_date_fmt) + now = datetime.now() + + if valid_until < now: + return True + else: + return False + + def verify_path(self): + pass + + def run_application(self): + from ssl import SSLSocket + + if type(self.socket) == SSLSocket: + self.logger.debug("ssl socket connection established") + cert = self.socket.getpeercert() + if cert: + not_after = cert.get('notAfter', None) + subject = cert.get('subject', None) + subject_alt_name = cert.get('subjectAltName', None) + if subject_alt_name is not None: + self.logger.debug("san: %s", subject_alt_name) + if subject is not None: + self.logger.debug("subject: %s", subject) + if not_after is not None: + if self._is_cert_expired(not_after): + # raise + pass + else: + # no client certificate provided + subject_alt_name = False + self.result = self.application(self.environ, self.start_response, + subject_alt_name) + else: + self.result = self.application(self.environ, self.start_response) + + self.process_result() diff --git a/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_http/wsgi.pyc b/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_http/wsgi.pyc new file mode 100644 index 0000000..04e7846 Binary files /dev/null and b/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_http/wsgi.pyc differ diff --git a/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_mqtt/__init__.py b/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_mqtt/__init__.py new file mode 100644 index 0000000..8c8efd6 --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/plugins/transport_gevent_mqtt/__init__.py @@ -0,0 +1,39 @@ +from openmtc_server.Plugin import Plugin +from openmtc_server.configuration import Configuration +from openmtc_onem2m.client.mqtt import get_client, portmap + + +class MQTTTransportPluginConfiguration(Configuration): + __name__ = "MQTTTransportPluginConfiguration configuration" + + +class MQTTTransportPlugin(Plugin): + __configuration__ = MQTTTransportPluginConfiguration + + def _init(self): + self._initialized() + + def _start(self): + self.api.register_onem2m_client(portmap.keys(), get_client) + interface = self.config.get('interface', '127.0.0.1') + port = self.config.get('port', 1883) + try: + scheme = portmap.keys()[portmap.values().index(port)] + except (KeyError, ValueError): + scheme = 'mqtt' + self._client = get_client( + ''.join([ + scheme, + '://', + interface, + ':', + str(port), + ]), + handle_request_func=self.api.handle_onem2m_request, + client_id=self.config['onem2m'].get('cse_id'), + ) + self._started() + + def _stop(self): + self._client.stop() + self._stopped() diff --git a/server/openmtc-cse/src/openmtc_cse/transport/__init__.py b/server/openmtc-cse/src/openmtc_cse/transport/__init__.py new file mode 100644 index 0000000..600f4e5 --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/transport/__init__.py @@ -0,0 +1,215 @@ +from urlparse import urlparse +from netifaces import AF_INET, AF_INET6 + +from aplus import Promise +from openmtc.exc import OpenMTCNetworkError +from openmtc_onem2m.model import RemoteCSE +from futile.collections import get_iterable +from openmtc_onem2m.transport import OneM2MErrorResponse +from openmtc_server import Component +from openmtc_onem2m.exc import CSETargetNotReachable, CSENotImplemented +from openmtc_onem2m.util import split_onem2m_address + + +class OneM2MTransportDomain(Component): + def __init__(self, config, *args, **kw): + super(OneM2MTransportDomain, self).__init__(*args, **kw) + + self._api = None + self.events = None + + self.config = config + + self.sp_id = self.config.get("onem2m", {}).get("sp_id", "") + self.cse_id = self.config.get("onem2m", {}).get("cse_id", "") + + do_overwrite = (self.config.get("onem2m", {}).get("overwrite_originator", {}) + .get("enabled", False)) + orig_overwrite = (self.config.get("onem2m", {}).get("overwrite_originator", {}) + .get("originator", "")) + if do_overwrite: + self.originator = orig_overwrite + else: + self.originator = "//" + self.sp_id + "/" + self.cse_id + + ssl_certs = self.config.get("onem2m", {}).get("ssl_certs") + self.key_file = ssl_certs.get("key") + self.cert_file = ssl_certs.get("crt") + self.ca_file = ssl_certs.get("ca") + self.accept_insecure_certs = self.config.get("onem2m", {}).get("accept_insecure_certs") + + self._addresses = {} + + self._poa_templates = [] + + self._endpoints = [] + + self._poa_lists = {} + + self._get_clients = {} + + def initialize(self, api): + self._api = api + self.events = api.events + + # addresses + self._api.events.interface_created.register_handler( + self._handle_interface_created) + self._api.events.interface_removed.register_handler( + self._handle_interface_removed) + self._api.events.address_created.register_handler( + self._handle_address_created) + self._api.events.address_removed.register_handler( + self._handle_address_removed) + + # remote CSEs + self.events.resource_created.register_handler( + self._handle_cse_created, RemoteCSE) + self.events.resource_updated.register_handler( + self._handle_cse_updated, RemoteCSE) + self.events.resource_deleted.register_handler( + self._handle_cse_deleted, RemoteCSE) + + interfaces = self._api.network_manager.get_interfaces().get() + self._addresses = {i.name: filter(self._filter_out_link_local, + i.addresses) + for i in interfaces} + + @staticmethod + def _filter_out_link_local(address): + return not address.address.startswith("fe80:") + + def _get_address_list(self): + return [i for s in self._addresses.values() for i in s] + + def _create_endpoints(self): + self._endpoints = [] + + for poa_t in self._poa_templates: + def map_func(address): + if address.family == AF_INET6: + a = '[' + address.address + ']' + else: + a = address.address + return poa_t.scheme + '://' + a + ':' + str(poa_t.port) + + if poa_t.server_address == "::": + def filter_func(x): + return x + elif poa_t.server_address in ("", "0.0.0.0"): + def filter_func(x): + return x.family == AF_INET + else: + def filter_func(x): + return x.address == poa_t.server_address + + self._endpoints += map(map_func, filter(filter_func, + self._get_address_list())) + + # interface handling + def _handle_interface_created(self, interface): + self._addresses[interface.name] = filter(self._filter_out_link_local, + interface.addresses) + self._create_endpoints() + + def _handle_interface_removed(self, interface): + del self._addresses[interface.name] + self._create_endpoints() + + def _handle_address_created(self, interface_name, address): + if self._filter_out_link_local(address): + self._addresses[interface_name].append(address) + self._create_endpoints() + + def _handle_address_removed(self, interface_name, address): + if self._filter_out_link_local(address): + self._addresses[interface_name].remove(address) + self._create_endpoints() + + # cse handling + # TODO(rst): find out if IDs starting with slash or not + def _handle_cse_created(self, instance, req=None): + self.logger.debug("_handle_cse_created(instance=%s, req=%s)", + instance, req) + self.add_poa_list(instance.CSE_ID[1:], instance.pointOfAccess) + + def _handle_cse_updated(self, instance, req=None): + self.logger.debug("_handle_cse_updated(instance=%s, req=%s)", + instance, req) + # self._remove_poas(req_ind.path) + # self.add_poa_list(instance.pointOfAccess, instance.path) + + def _handle_cse_deleted(self, instance, req): + self.logger.debug("_handle_cse_deleted(req_ind=%s)", req) + # self._remove_poas(req.path) + + # api functions + def register_client(self, schemes, get_client): + """Registers a specific client for the given schemes.""" + schemes = set(map(str.lower, get_iterable(schemes))) + + for scheme in schemes: + self._get_clients[scheme] = get_client + + def register_point_of_access(self, poa): + """Registers a point of access.""" + self._poa_templates.append(poa) + self._create_endpoints() + + def _send_request_to_endpoints(self, onem2m_request, poa_list): + with Promise() as p: + if not poa_list: + p.reject(CSETargetNotReachable()) + + onem2m_request.originator = self.originator + + for poa in poa_list: + use_xml = False # TODO(rst): check how this needs to be handled + ssl_certs = { + 'ca_certs': self.ca_file, + 'cert_file': self.cert_file, + 'key_file': self.key_file + } + # TODO(hve): add scheme test. + client = self._get_clients[urlparse(poa).scheme]( + poa, use_xml, insecure=self.accept_insecure_certs, **ssl_certs) + try: + response = client.send_onem2m_request(onem2m_request).get() + p.fulfill(response) + break + except OpenMTCNetworkError: + continue + except OneM2MErrorResponse as error_response: + p.reject(error_response) + + if p.isPending(): + p.reject(CSETargetNotReachable()) + return p + + def send_notify(self, notify_request, poa_list=None): + notify_request.to = '' + return self._send_request_to_endpoints(notify_request, poa_list) + + def send_onem2m_request(self, onem2m_request): + path = onem2m_request.to + + sp_id, cse_id, _ = split_onem2m_address(path) + + if not cse_id: + poa_list = [] + else: + if sp_id and sp_id[2:] != self.sp_id: + # inter-domain routing + raise CSENotImplemented() + else: + # intra-domain routing + poa_list = self._poa_lists.get(cse_id[1:], []) + + return self._send_request_to_endpoints(onem2m_request, poa_list) + + # TODO(rst): add here more options to retrieve only non-local addresses etc. + def get_endpoints(self): + return self._endpoints + + def add_poa_list(self, identifier, poa_list): + self._poa_lists[identifier] = poa_list diff --git a/server/openmtc-cse/src/openmtc_cse/transport/__init__.pyc b/server/openmtc-cse/src/openmtc_cse/transport/__init__.pyc new file mode 100644 index 0000000..9da940d Binary files /dev/null and b/server/openmtc-cse/src/openmtc_cse/transport/__init__.pyc differ diff --git a/server/openmtc-cse/src/openmtc_cse/util.py b/server/openmtc-cse/src/openmtc_cse/util.py new file mode 100644 index 0000000..6017355 --- /dev/null +++ b/server/openmtc-cse/src/openmtc_cse/util.py @@ -0,0 +1,170 @@ +import aplus +from openmtc.exc import OpenMTCNetworkError +from openmtc_onem2m.exc import CSEError +from openmtc_onem2m.transport import OneM2MErrorResponse +from futile.logging import LoggerMixin +from threading import Timer +import time +from openmtc.util import datetime_the_future +import abc + + +def log_error(error): + try: + return error.status_code == 500 + except AttributeError: + return not isinstance(error, (OpenMTCNetworkError, CSEError, + OneM2MErrorResponse)) + +aplus.log_error = log_error + + +class ResourceUpdater(LoggerMixin): + """ Baseclass for automatic resource updating""" + + def __init__(self, send_update=None, interval=None, offset=None, + *args, **kw): + """ + @param send_update: send_update function for update requests + @param interval: refresh interval in seconds + @param offset: additional offset for expirationTime in seconds + """ + super(ResourceUpdater, self).__init__(*args, **kw) + self.__interval = interval or 60 * 60 + self.__offset = offset or 60 * 60 + self.__timers = {} + self.__shutdown = False + self.send_update = send_update + + @abc.abstractmethod + def start(self, instance, fields=None, restore=None, send_update=None ): + return + + @abc.abstractmethod + def stop(self): + return + + @property + def offset(self): + return self.__offset + + @property + def interval(self): + return self.__interval + + @property + def timers(self): + return self.__timers + + +class ExpTimeUpdater(ResourceUpdater): + """ Utility class to update mtc resources' expirationTime periodically. + Based on python Timers. + """ + def __init__(self, send_update=None, interval=None, offset=None, + *args, **kw): + """ + @param send_update: send_update function for update requests + @param interval: refresh interval in seconds + @param offset: additional offset for expirationTime in seconds + """ + super(ExpTimeUpdater, self).__init__(send_update=send_update, + interval=interval, offset=offset, + *args, **kw) + + def start(self, instance, fields=None, restore=None, send_update=None): + """ Starts a threading.Timer chain, + to repeatedly update a resource instances's expirationTime. + + @param instance: resource instance + @param fields: additional fields mandatory during update + @param restore: function that will restore the instance, if it has + expired accidentally. Has to restart the refresher. + @param send_update: + """ + self.logger.debug("starting expTimeUpdater: %s %s" % (instance, + fields)) + self.__shutdown = False + interval = (time.mktime(instance.expirationTime.timetuple()) - + (time.time() + time.timezone)) + if interval > self.offset: + interval -= self.offset + else: + interval -= (interval/10) + + send_update = send_update or self.send_update or None + + kwargs = {"instance": instance, "fields": fields, "restore": restore, + "send_update": send_update} + t = Timer(interval, self.__updateExpTime, kwargs=kwargs) + t.start() + + self.timers[instance.path] = t + + def __updateExpTime(self, instance, future=None, fields=[], restore=None, + send_update=None): + """ Updates a resource instance's expirationTime to future. + + @note: Starts a new Timer. + @param instance: resource instance to update + @param future: new expirationTime value (optional) + @param fields: additional fields mandatory during update + @param restore: function that will restore the instance, if it has + expired accidentally. Has to restart the refresher. + """ + self.logger.debug("__updateExpTime: %s" % instance ) + if self.__shutdown: + return + + interval = self.interval + offset = self.offset + future = datetime_the_future(interval + offset) + + instance.expirationTime = future + + if send_update: + send_update(instance) + else: + return + + # NOTE: expirationTime might have been changed by CSE at this point. + # update could/should return the updated instance in this case, + # but does it? + # => additional GET to confirm expirationTime ? + kwargs = {"instance": instance, "fields": fields, "restore": restore, + "send_update": send_update} + t = Timer(interval, self.__updateExpTime, kwargs=kwargs) + t.start() + self.timers[instance.path] = t + # hopefully, GC will delete the old timer + + def stop(self): + self.__shutdown = True + self.logger.debug("Killing timers: %s" % self.timers) + for t in self.timers.values(): + t.cancel() + + +def main(): + """ + Testing code + """ + + def update_function(instance): + pass + + e = ExpTimeUpdater(send_update=update_function, interval=5, offset=1) + + from openmtc_onem2m.model import Container + from openmtc.util import datetime_now + from time import sleep + + instance = Container( + expirationTime=datetime_now(), + path="/test/path" + ) + e.start(instance) + sleep(10) + +if __name__ == "__main__": + main() diff --git a/server/openmtc-cse/src/openmtc_cse/util.pyc b/server/openmtc-cse/src/openmtc_cse/util.pyc new file mode 100644 index 0000000..7150340 Binary files /dev/null and b/server/openmtc-cse/src/openmtc_cse/util.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/Event.py b/server/openmtc-server/src/openmtc_server/Event.py new file mode 100644 index 0000000..6a3ca16 --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/Event.py @@ -0,0 +1,95 @@ +from abc import abstractmethod, ABCMeta + +from futile import issubclass as safe_issubclass +from futile.logging import LoggerMixin +from openmtc.model import Resource + + +class Event(LoggerMixin): + __metaclass__ = ABCMeta + + @abstractmethod + def fire(self, *event_data): + raise NotImplementedError() + + @abstractmethod + def register_handler(self, handler, *args, **kw): + raise NotImplementedError() + + +class EventSpec(object): + __metaclass__ = ABCMeta + + @abstractmethod + def matches(self, item): + raise NotImplementedError() + + +class BasicEvent(Event): + def __init__(self): + super(BasicEvent, self).__init__() + + self._handlers = [] + + def _add_handler_data(self, data): + handler = data + if handler in self._handlers: + self.logger.warn("Handler %s is already registered", handler) + else: + self._handlers.append(handler) + + def register_handler(self, handler, **kw): + self._add_handler_data(handler) + + def _execute_handler(self, handler, *event_data): + self.logger.debug("Running handler %s with %s", handler, event_data) + try: + handler(*event_data) + except Exception: + self.logger.exception("Error in event handler") + self.logger.debug("handler %s finished", handler) + + def _fired(self, *event_data): + for handler in self._handlers: + self._execute_handler(handler, *event_data) + + def fire(self, *event_data): + self.logger.debug("Fired: %s with %s", self, event_data) + self._fired(*event_data) + + +class ResourceTreeEvent(BasicEvent): + def _add_handler_data(self, data): + resource_type = data[0] + handler = data[1] + + # TODO: kca: error messages + if resource_type is not None and not safe_issubclass(resource_type, + Resource): + raise TypeError(resource_type) + + if not callable(handler): + raise TypeError(handler) + + if data in self._handlers: + self.logger.warn("Handler %s is already registered for type %s", + handler, resource_type or "") + else: + self._handlers.append(data) + + def register_handler(self, handler, resource_type=None, **kw): + self._add_handler_data((resource_type, handler)) + + def _execute_handler(self, data, *event_data): + handler = data[1] + self.logger.debug("Running handler %s with %s", handler, event_data) + handler(*event_data) + self.logger.debug("handler finished") + + def _fired(self, resource_type, *event_data): + for data in self._handlers: + handled_type = data[0] + if handled_type is None or issubclass(resource_type, handled_type): + self._execute_handler(data, *event_data) + else: + pass diff --git a/server/openmtc-server/src/openmtc_server/Event.pyc b/server/openmtc-server/src/openmtc_server/Event.pyc new file mode 100644 index 0000000..c3635ba Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/Event.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/NetworkManager.py b/server/openmtc-server/src/openmtc_server/NetworkManager.py new file mode 100644 index 0000000..f795cb5 --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/NetworkManager.py @@ -0,0 +1,34 @@ +from abc import abstractmethod +from collections import namedtuple +from openmtc_server import Component + + +Interface = namedtuple("Interface", ("name", "addresses", "hw_address")) +Address = namedtuple("Address", ("address", "family")) + + +class NetworkManager(Component): + + @abstractmethod + def get_interfaces(self): + """Returns all known network interfaces + + :return Promise([Interface]): a promise for a list of interfaces + """ + + @abstractmethod + def get_interface(self, name): + """Returns an Interface object identified by name + + :param name: name of interface + :return Promise(Interface): a promise for an interface + :raise UnknownInterface: if interface was not found + """ + + @abstractmethod + def get_addresses(self, interface=None): + """Get addresses of a given interface or all addresses if :interface: is None + + :param interface: name of interface + :return: Promise([Address]): a promise for a list of addresses + """ diff --git a/server/openmtc-server/src/openmtc_server/NetworkManager.pyc b/server/openmtc-server/src/openmtc_server/NetworkManager.pyc new file mode 100644 index 0000000..56c8bce Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/NetworkManager.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/Plugin.py b/server/openmtc-server/src/openmtc_server/Plugin.py new file mode 100644 index 0000000..0573ac8 --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/Plugin.py @@ -0,0 +1,102 @@ +from futile.logging import LoggerMixin +from aplus import Promise +from openmtc.exc import OpenMTCError +from openmtc_server.configuration import Configuration + + +class BasicPlugin(LoggerMixin): + started = False + initialized = False + + def __init__(self, config, *args, **kw): + super(BasicPlugin, self).__init__(*args, **kw) + + self.config = config or {} + + self.logger.debug("Plugin %s created with config %s" % + (type(self).__name__, self.config)) + + self.__promise = None + + def initialize(self): + self.logger.info("Initializing plugin %s", type(self).__name__) + p = self.__promise = Promise() + try: + self._init() + except BaseException as e: + self.logger.exception("Failed to initialize plugin") + self._error(e) + + return p + init = initialize + + def start(self): + self.logger.info("Starting plugin %s", type(self).__name__) + p = self.__promise = Promise() + try: + self._start() + except BaseException as e: + self.logger.exception("Failed to start plugin") + self._error(e) + + return p + + def stop(self): + p = Promise() + + if not self.started: + p.reject(OpenMTCError("Plugin %s was not started")) + else: + self.__promise = p + try: + self._stop() + except BaseException as e: + self.logger.exception("Failed to stop plugin") + self._error(e) + + return p + + def _init(self): + self._initialized() + + def _start(self): + self._started() + + def _stop(self): + self._stopped() + + def _ready(self): + p = self.__promise + del self.__promise + p.fulfill(self) + + def _initialized(self): + self.logger.debug("Plugin %s is initialized", self) + self.initialized = True + self._ready() + + def _started(self): + self.logger.debug("Plugin %s is started", self) + self.started = True + self._ready() + + def _stopped(self): + del self.started + self._ready() + + def _error(self, e): + self.__promise.reject(e) + del self.__promise + + +class Plugin(BasicPlugin): + __configuration__ = Configuration + + def __init__(self, api, config, *args, **kw): + super(Plugin, self).__init__(config=config, *args, **kw) + + self.api = api + self.events = api.events + + def get_shelve(self, name): + return self.api.db.get_shelve("%s_%s" % (type(self).__name__, name)) diff --git a/server/openmtc-server/src/openmtc_server/Plugin.pyc b/server/openmtc-server/src/openmtc_server/Plugin.pyc new file mode 100644 index 0000000..73e0c0b Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/Plugin.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/TaskRunner.py b/server/openmtc-server/src/openmtc_server/TaskRunner.py new file mode 100644 index 0000000..5b5116d --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/TaskRunner.py @@ -0,0 +1,19 @@ +from abc import ABCMeta, abstractmethod +from futile.logging import LoggerMixin + + +class TaskRunner(LoggerMixin): + __metaclass__ = ABCMeta + + @abstractmethod + def run_task(self, task, *args, **kw): + raise NotImplementedError() + + def stop(self): + pass + + def _execute(self, func, args, kw): + try: + func(*args, **kw) + except Exception: + self.logger.exception("Error in background task") diff --git a/server/openmtc-server/src/openmtc_server/TaskRunner.pyc b/server/openmtc-server/src/openmtc_server/TaskRunner.pyc new file mode 100644 index 0000000..898628f Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/TaskRunner.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/__init__.py b/server/openmtc-server/src/openmtc_server/__init__.py new file mode 100644 index 0000000..dbeb488 --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/__init__.py @@ -0,0 +1,25 @@ +from futile.logging import LoggerMixin +from abc import ABCMeta, abstractmethod + + +class Component(LoggerMixin): + def initialize(self, api): + pass + + def start(self): + pass + + def stop(self): + pass + + +class Serializer(LoggerMixin): + __metaclass__ = ABCMeta + + @abstractmethod + def encode(self, resource): + raise NotImplementedError() + + @abstractmethod + def decode(self, input): + raise NotImplementedError() diff --git a/server/openmtc-server/src/openmtc_server/__init__.pyc b/server/openmtc-server/src/openmtc_server/__init__.pyc new file mode 100644 index 0000000..2362cc2 Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/__init__.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/api.py b/server/openmtc-server/src/openmtc_server/api.py new file mode 100644 index 0000000..e69de29 diff --git a/server/openmtc-server/src/openmtc_server/api.pyc b/server/openmtc-server/src/openmtc_server/api.pyc new file mode 100644 index 0000000..4f782c2 Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/api.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/configuration.py b/server/openmtc-server/src/openmtc_server/configuration.py new file mode 100644 index 0000000..0339dfa --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/configuration.py @@ -0,0 +1,32 @@ +from datetime import timedelta +from openmtc.configuration import (Configuration, BooleanOption, ListOption, + LowerCaseEnumOption, LogLevel, SimpleOption) + + +class GlobalConfiguration(Configuration): + __name__ = "global configuration" + __options__ = {"disable_forwarding": BooleanOption(default=False), + "default_lifetime": SimpleOption(type=int, + default=timedelta(60 * 60), + converter=timedelta), + "max_lifetime": SimpleOption(type=int, + default=timedelta(60 * 60 * 24), + converter=timedelta), + "min_lifetime": SimpleOption(type=int, + default=timedelta(5), + converter=timedelta), + "additional_host_names": ListOption(str), + "require_auth": BooleanOption(default=False), + "default_content_type": SimpleOption()} + + +class LoggingConfiguration(Configuration): + __name__ = "logging configuration" + __options__ = {"level": LowerCaseEnumOption(default=LogLevel.error), + "file": SimpleOption(default=None)} + + +class MainConfiguration(Configuration): + __name__ = "main configuration" + __options__ = {"global": SimpleOption(GlobalConfiguration), + "logging": SimpleOption(LoggingConfiguration)} diff --git a/server/openmtc-server/src/openmtc_server/configuration.pyc b/server/openmtc-server/src/openmtc_server/configuration.pyc new file mode 100644 index 0000000..a357a3d Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/configuration.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/db/__init__.py b/server/openmtc-server/src/openmtc_server/db/__init__.py new file mode 100644 index 0000000..7601254 --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/db/__init__.py @@ -0,0 +1,135 @@ +from abc import ABCMeta, abstractmethod +from futile.logging import LoggerMixin +from openmtc_onem2m.model import ContentInstance as Cin +from openmtc_server.db.exc import DBError +from collections import MutableMapping + + +class DBAdapter(LoggerMixin): + __metaclass__ = ABCMeta + + def __init__(self, config, *args, **kw): + super(DBAdapter, self).__init__(*args, **kw) + + self.config = config + + @abstractmethod + def start_session(self, std_type): + raise NotImplementedError() + + def start_onem2m_session(self): + return self.start_session('onem2m') + + @abstractmethod + def get_shelve(self, name): + raise NotImplementedError() + + @abstractmethod + def is_initialized(self): + raise NotImplementedError() + + @abstractmethod + def initialize(self, force=False): + raise NotImplementedError() + + def stop(self): + pass + + +class Shelve(LoggerMixin, MutableMapping): + __metaclass__ = ABCMeta + + @abstractmethod + def commit(self): + raise NotImplementedError() + + @abstractmethod + def rollback(self): + raise NotImplementedError() + + +class Session(object): + __metaclass__ = ABCMeta + + @abstractmethod + def store(self, resource): + raise NotImplementedError() + + @abstractmethod + def get(self, resource): + raise NotImplementedError() + + @abstractmethod + def get_collection(self, resource_type, parent, filter_criteria=None): + raise NotImplementedError() + + @abstractmethod + def get_oldest_content_instance(self, parent): + raise NotImplementedError() + + @abstractmethod + def get_latest_content_instance(self, parent): + raise NotImplementedError() + + @abstractmethod + def exists(self, resource_type, fields): + raise NotImplementedError() + + @abstractmethod + def update(self, resource, fields=None): + raise NotImplementedError() + + @abstractmethod + def delete(self, resource): + raise NotImplementedError() + + @abstractmethod + def delete_children(self, resource_type, parent): + raise NotImplementedError() + + @abstractmethod + def commit(self): + raise NotImplementedError() + + @abstractmethod + def rollback(self): + raise NotImplementedError() + + +class BasicSession(Session, LoggerMixin): + __metaclass__ = ABCMeta + + def __init__(self, resource_type, *args, **kw): + super(BasicSession, self).__init__(*args, **kw) + + if resource_type == 'onem2m': + self.cinType = Cin + else: + raise DBError('no valid type: %s' % type) + + @staticmethod + def _filter_oldest(collection): + if not collection: + raise DBError("ContentInstance collection is empty") + return collection[0] + + def get_oldest_content_instance(self, parent): + collection = self._get_content_instances(parent) + return self._filter_oldest(collection) + + @staticmethod + def _filter_latest(collection): + if not collection: + raise DBError("ContentInstance collection is empty") + return collection[-1] + + def get_latest_content_instance(self, parent): + collection = self._get_content_instances(parent) + return self._filter_latest(collection) + + def delete_children(self, resource_type, parent): + children = self.get_collection(resource_type, parent) + map(self.delete, children) + + def _get_content_instances(self, parent): + return self.get_collection(self.cinType, parent) diff --git a/server/openmtc-server/src/openmtc_server/db/__init__.pyc b/server/openmtc-server/src/openmtc_server/db/__init__.pyc new file mode 100644 index 0000000..5afe670 Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/db/__init__.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/db/exc.py b/server/openmtc-server/src/openmtc_server/db/exc.py new file mode 100644 index 0000000..d3079a6 --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/db/exc.py @@ -0,0 +1,17 @@ +from openmtc.exc import OpenMTCError + + +class DBError(OpenMTCError): + status_code = 500 + + @property + def statusCode(self): + return self.status_code + + +class DBConflict(DBError): + status_code = 409 + + +class DBNotFound(DBError): + status_code = 404 diff --git a/server/openmtc-server/src/openmtc_server/db/exc.pyc b/server/openmtc-server/src/openmtc_server/db/exc.pyc new file mode 100644 index 0000000..08dc9f2 Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/db/exc.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/db/nodb2.py b/server/openmtc-server/src/openmtc_server/db/nodb2.py new file mode 100644 index 0000000..5d61816 --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/db/nodb2.py @@ -0,0 +1,172 @@ +from openmtc_server.db import DBAdapter, Shelve, DBError +from openmtc_server.db import BasicSession +from copy import copy +from collections import defaultdict, OrderedDict +from openmtc_server.db.exc import DBConflict, DBNotFound +from openmtc_onem2m.model import OneM2MResource + + +class NoDB2Session(BasicSession): + def __init__(self, db, std_type, *args, **kw): + super(NoDB2Session, self).__init__(std_type, *args, **kw) + + self.db = db + self.std_type = std_type + + if std_type == 'onem2m': + self.resources = db.onem2m_resources + self.children = db.onem2m_children + self.resource_type = OneM2MResource + else: + raise DBError('no valid type: %s' % type) + + def store(self, resource): + path = resource.path + + resource_type = type(resource) + + self.logger.debug("Adding resource to db: %s -> %s (%s)", + path, resource, resource_type) + + if path and path in self.resources: + raise DBConflict(path) + + if resource_type is not self.resource_type: + parent_path = resource.parent_path + self.logger.debug("parent path: %s", parent_path) + try: + children = self.children[parent_path] + except KeyError: + self.logger.debug("No parent found") + else: + children[resource_type][resource.path] = resource + + self.children[path] = defaultdict(OrderedDict) + self.resources[path] = resource + if self.std_type == 'onem2m': + self.resources[resource.resourceID] = resource + + def _get(self, path): + try: + return self.resources[path] + except KeyError: + self.logger.debug("Resource not found") + raise DBNotFound(path) + + def get(self, path): + assert path is not None + self.logger.debug("Getting resource: %s", path) + resource = self._get(path) + return copy(resource) + + def get_collection(self, resource_type, parent, filter_criteria=None): + self.logger.debug("Getting %s children of %s (%s)", resource_type, + parent, parent.__model_name__) + + if parent.__model_name__ == "onem2m": + if resource_type is None: + resources = self.children[parent.path].values() + resources = map(dict.values, resources) + resources = reduce(list.__add__, resources, []) + elif isinstance(resource_type, (list, tuple, set)): + resources = [v for k, v in self.children[parent.path].items() + if k in resource_type] + resources = map(dict.values, resources) + resources = reduce(list.__add__, resources, []) + else: + resources = self.children[parent.path][resource_type].values() + else: + resources = self.children[parent.path][resource_type].values() + self.logger.debug("Found children: %s", resources) + return resources + + def exists(self, resource_type, fields): + self.logger.debug("Checking existence of %s with %s", resource_type, + fields) + fields = dict(fields) + + if not fields: + raise ValueError(fields) + + if len(fields) != 1: + raise NotImplementedError("exist() only works for path") + + if resource_type is not None: + if issubclass(resource_type, self.resource_type): + path = fields.get("path") + if not path: + raise NotImplementedError("exist() only works for path") + return path in self.resources + + path = fields.get("path") + if path: + return path in self.resources + + try: + return fields["path"] in self.resources + except KeyError: + raise NotImplementedError("exist() only works for path") + + def update(self, resource, fields=None): + old_resource = self._get(resource.path) + self.logger.debug("Updating resource %s with %s", resource.path, fields) + + if fields is None: + old_resource.set_values(resource.values) + else: + for field in fields: + setattr(old_resource, field, getattr(resource, field)) + + def delete(self, resource): + self.logger.debug("Deleting: %s", resource) + + del self.resources[resource.path] + if self.std_type == 'onem2m': + del self.resources[resource.resourceID] + del self.children[resource.path] + try: + children = self.children[resource.parent_path] + except KeyError: + self.logger.debug("No parent found") + else: + del children[type(resource)][resource.path] + + def commit(self): + pass + + def rollback(self): + pass + + +class NoDB2Shelve(dict, Shelve): + def commit(self): + pass + + def rollback(self): + pass + + +class NoDB2(DBAdapter): + def __init__(self, *args, **kw): + super(NoDB2, self).__init__(*args, **kw) + self.onem2m_resources = None + self.onem2m_children = None + self.shelves = None + self.initialized = False + + def initialize(self, force=False): + if not force and self.is_initialized(): + raise Exception("Already initialized") + self.onem2m_resources = {} + self.onem2m_children = {} + self.shelves = defaultdict(NoDB2Shelve) + self.initialized = True + + def get_shelve(self, name): + return self.shelves[name] + + def start_session(self, std_type): + return NoDB2Session(self, std_type) + + def is_initialized(self): + return self.initialized diff --git a/server/openmtc-server/src/openmtc_server/db/nodb2.pyc b/server/openmtc-server/src/openmtc_server/db/nodb2.pyc new file mode 100644 index 0000000..a943a1d Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/db/nodb2.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/exc.py b/server/openmtc-server/src/openmtc_server/exc.py new file mode 100644 index 0000000..0bfaf97 --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/exc.py @@ -0,0 +1,16 @@ +from openmtc.exc import OpenMTCError + + +class ConfigurationError(OpenMTCError): + pass + + +class NetworkManagerException(OpenMTCError): + """Generic Network Manager exception""" + pass + + +class InterfaceNotFoundException(NetworkManagerException): + """Exception raised if no interface was found matching request""" + pass + diff --git a/server/openmtc-server/src/openmtc_server/exc.pyc b/server/openmtc-server/src/openmtc_server/exc.pyc new file mode 100644 index 0000000..fed18c0 Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/exc.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/platform/__init__.py b/server/openmtc-server/src/openmtc_server/platform/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/openmtc-server/src/openmtc_server/platform/__init__.pyc b/server/openmtc-server/src/openmtc_server/platform/__init__.pyc new file mode 100644 index 0000000..49bd2a4 Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/platform/__init__.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/platform/default/Event.py b/server/openmtc-server/src/openmtc_server/platform/default/Event.py new file mode 100644 index 0000000..97d83ab --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/platform/default/Event.py @@ -0,0 +1,26 @@ +from openmtc_server.Event import ResourceTreeEvent, BasicEvent + + +class NetworkEvent(BasicEvent): + def __init__(self, run_task): + super(NetworkEvent, self).__init__() + + self._run_task = run_task + + def _execute_handler(self, data, *event_data): + self._run_task(super(NetworkEvent, self)._execute_handler, data, + *event_data) + + +class ResourceFinishEvent(ResourceTreeEvent): + def __init__(self, run_task): + super(ResourceFinishEvent, self).__init__() + + self._run_task = run_task + + def _execute_handler(self, data, *event_data): + self._run_task(super(ResourceFinishEvent, self)._execute_handler, data, + *event_data) + + def fire(self, resource, req): + self._fired(type(resource), resource, req) diff --git a/server/openmtc-server/src/openmtc_server/platform/default/Event.pyc b/server/openmtc-server/src/openmtc_server/platform/default/Event.pyc new file mode 100644 index 0000000..f2d13d4 Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/platform/default/Event.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/platform/default/TaskRunner.py b/server/openmtc-server/src/openmtc_server/platform/default/TaskRunner.py new file mode 100644 index 0000000..12dd9e8 --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/platform/default/TaskRunner.py @@ -0,0 +1,36 @@ +from Queue import Queue, Empty +from threading import Thread + +from openmtc_server.TaskRunner import TaskRunner + + +class AsyncTaskRunner(TaskRunner): + timeout = 5 + + def __init__(self, pool_size=20, *args, **kw): + super(AsyncTaskRunner, self).__init__(*args, **kw) + self._queue = Queue() + self._running = True + worker = self._worker = Thread(target=self._dispatcher) + + worker.start() + + def _dispatcher(self): + #TODO: kca: Should we process the whole queue before exiting? + while self._running: + try: + func, args, kw = self._queue.get(timeout=self.timeout) + except Empty: + continue + + self._execute(func, args, kw) + + def run_task(self, func, *args, **kw): + self._queue.put((func, args, kw)) + + def stop(self): + self._running = False + self.logger.debug("Waiting for worker to finish") + self._worker.join(self.timeout + 1) + self.logger.debug("Worker finished") + super(AsyncTaskRunner, self).stop() diff --git a/server/openmtc-server/src/openmtc_server/platform/default/__init__.py b/server/openmtc-server/src/openmtc_server/platform/default/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/openmtc-server/src/openmtc_server/platform/default/__init__.pyc b/server/openmtc-server/src/openmtc_server/platform/default/__init__.pyc new file mode 100644 index 0000000..9f89e55 Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/platform/default/__init__.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/platform/gevent/ServerRack.py b/server/openmtc-server/src/openmtc_server/platform/gevent/ServerRack.py new file mode 100644 index 0000000..6afa2af --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/platform/gevent/ServerRack.py @@ -0,0 +1,6 @@ + +from futile.logging import get_logger + +get_logger(__name__).warning("Deprecated import") + +from openmtc_gevent.ServerRack import GEventServerRack \ No newline at end of file diff --git a/server/openmtc-server/src/openmtc_server/platform/gevent/ServerRack.pyc b/server/openmtc-server/src/openmtc_server/platform/gevent/ServerRack.pyc new file mode 100644 index 0000000..33a3883 Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/platform/gevent/ServerRack.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/platform/gevent/TaskRunner.py b/server/openmtc-server/src/openmtc_server/platform/gevent/TaskRunner.py new file mode 100644 index 0000000..51e32d3 --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/platform/gevent/TaskRunner.py @@ -0,0 +1 @@ +from openmtc_gevent.TaskRunner import GEventTaskRunner as AsyncTaskRunner \ No newline at end of file diff --git a/server/openmtc-server/src/openmtc_server/platform/gevent/__init__.py b/server/openmtc-server/src/openmtc_server/platform/gevent/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/openmtc-server/src/openmtc_server/platform/gevent/__init__.pyc b/server/openmtc-server/src/openmtc_server/platform/gevent/__init__.pyc new file mode 100644 index 0000000..5b90063 Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/platform/gevent/__init__.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/util/__init__.py b/server/openmtc-server/src/openmtc_server/util/__init__.py new file mode 100644 index 0000000..7c9e6a2 --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/util/__init__.py @@ -0,0 +1,128 @@ +from urllib import quote +from datetime import datetime + +from mimeparse import parse_mime_type + +import aplus +from futile.logging import get_logger +from openmtc.exc import OpenMTCNetworkError +from openmtc_onem2m.exc import CSEError +from openmtc_onem2m.transport import OneM2MErrorResponse + +logger = get_logger(__name__) + + +def log_error(error): + try: + return error.status_code == 500 + except AttributeError: + try: + return error.statusCode == 500 + except AttributeError: + return not isinstance(error, (OpenMTCNetworkError, CSEError, + OneM2MErrorResponse)) + + +aplus.log_error = log_error + + +def uri_safe(s): + return quote(s.replace("/", "_")) + + +def is_text_content(mimetype): + try: + maintype, subtype, _ = parse_mime_type(mimetype) + + maintype = maintype.lower() + + if maintype == "text": + return True + + if maintype == "application": + return subtype.rpartition("+")[-1].lower() in ("xml", "json") + except Exception as e: + logger.warn("Failed to parse mimetype '%s': %s", mimetype, e) + + return False + + +def join_url(base, path): + if not base.endswith("/"): + if not path.startswith("/"): + base += "/" + elif path.startswith("/"): + path = path[1:] + return base + path + + +def match_now_cron(cron): + return match_time_cron(datetime.now(), cron) + + +def match_time_cron(time, cron): + cron_parts = cron.split(' ') + + if len(cron_parts) < 5: + return False + + minute, hour, day, month, weekday = cron_parts + + to_check = { + 'minute': minute, + 'hour': hour, + 'day': day, + 'month': month, + 'weekday': weekday + } + + ranges = { + 'minute': '0-59', + 'hour': '0-23', + 'day': '1-31', + 'month': '1-12', + 'weekday': '0-6' + } + + for c in to_check.keys(): + val = to_check[c] + values = [] + + # For patters like 0-23/2 + if val.find('/') >= 0: + # Get the range and step + _range, steps = val.split('/') + steps = int(steps) + + # Now get the start and stop + if _range == '*': + _range = ranges[c] + + start, stop = map(int, _range.split('-')) + + for i in range(start, stop, steps): + values.append(i) + + # For patters like : 2 or 2,5,8 or 2-23 + else: + k = val.split(',') + + for v in k: + if v.find('-') >= 0: + start, stop = map(int, v.split('-')) + + for i in range(start, stop): + values.append(i) + elif v == '*': + values.append('*') + else: + values.append(int(v)) + + if c is 'weekday': + if time.weekday() not in values and val != '*': + return False + else: + if getattr(time, c) not in values and val != '*': + return False + + return True diff --git a/server/openmtc-server/src/openmtc_server/util/__init__.pyc b/server/openmtc-server/src/openmtc_server/util/__init__.pyc new file mode 100644 index 0000000..14aa21e Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/util/__init__.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/util/async.py b/server/openmtc-server/src/openmtc_server/util/async.py new file mode 100644 index 0000000..1ba4b42 --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/util/async.py @@ -0,0 +1,130 @@ +from functools import update_wrapper +from decorator import decorator +from futile.logging import get_logger +from aplus import Promise + +logger = get_logger(__name__) + + +class AsyncError(Exception): + pass + + +class RPCError(AsyncError): + pass + + +class MessageKeyError(KeyError): + pass + + +class Message(object): + __slots__ = ("_message", ) + + def __init__(self, message, *args, **kw): + super(Message, self).__init__(*args, **kw) + self._message = message + + def reply(self, message, handler=None): + return self._message.reply(message, handler) + + def __getitem__(self, k): + try: + return self._message.body[k] + except KeyError as e: + raise MessageKeyError("Missing message parameter: %s" % (e, )) + + def get(self, k, default=None): + return self._message.body.get(k, default) + + def copy(self): + return self._message.body.copy() + + def __str__(self): + return str(self._message.body) + + +class AsyncResult(object): + __slots__ = ("_result", "error") + + def __init__(self, result=None, error=None, *args, **kw): + super(AsyncResult, self).__init__(*args, **kw) + + if error is not None and not isinstance(error, BaseException): + error = AsyncError(error) + + self._result = result + self.error = error + + @property + def result(self): + if self.error is not None: + raise self.error + return self._result + + +class RPCResult(AsyncResult): + __slots__ = ("reply") + + def __init__(self, replyfunc, result=None, error=None, *args, **kw): + super(RPCResult, self).__init__(result=None, error=None, *args, **kw) + + self.reply = replyfunc + + +def async_result_handler(func): + def _handle_async_result(error, result): + return func(AsyncResult(result, error)) + update_wrapper(_handle_async_result, func) + return _handle_async_result + + +@decorator +def rpc_result_handler(func, result): + if result.body["status"].lower() != "ok": + return func(AsyncResult(replyfunc=result.reply, + error=RPCError(result.body["message"]))) + result = AsyncResult(replyfunc=result.reply, result=result.body["result"]) + return func(result) + + +@decorator +def rpc_handler(func, message): + try: + func(Message(message)) + except Exception as e: + logger.exception("Error in RPC call") + message.reply({"status": "error", "message": str(e)}) + + +def async_all(promises, fulfill_with_none=False): + p = Promise() + num = len(promises) + + if num == 0: + if fulfill_with_none: + p.fulfill(None) + else: + p.fulfill([]) + else: + results = [] + + def _done(result): + if not p.isRejected(): + results.append(result) + if len(results) == num: + if fulfill_with_none: + #logger.debug("Fulfilling with None") + p.fulfill(None) + else: + #logger.debug("Fulfilling with %s", results) + p.fulfill(results) + + def _error(error): + if p.isPending(): + p.reject(error) + + for promise in promises: + promise.then(_done, _error) + + return p diff --git a/server/openmtc-server/src/openmtc_server/util/async.pyc b/server/openmtc-server/src/openmtc_server/util/async.pyc new file mode 100644 index 0000000..1cf3131 Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/util/async.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/util/db.py b/server/openmtc-server/src/openmtc_server/util/db.py new file mode 100644 index 0000000..ac63d22 --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/util/db.py @@ -0,0 +1,27 @@ +from openmtc_server.exc import ConfigurationError +from importlib import import_module + + +def load_db_module(config, **override_params): + try: + dbconfig = config["database"] + except KeyError: + raise ConfigurationError("db configuration is missing") + + try: + db_fq_name = dbconfig["driver"] + except KeyError: + raise ConfigurationError("db configuration is missing a 'driver' entry") + + package, _, clsname = db_fq_name.rpartition(".") + + if not package or not clsname: + raise ConfigurationError("Invalid DB driver string") + + module = import_module(package) + + cls = getattr(module, clsname) + + dbconfig.update(override_params) + + return cls(dbconfig) diff --git a/server/openmtc-server/src/openmtc_server/util/db.pyc b/server/openmtc-server/src/openmtc_server/util/db.pyc new file mode 100644 index 0000000..b079500 Binary files /dev/null and b/server/openmtc-server/src/openmtc_server/util/db.pyc differ diff --git a/server/openmtc-server/src/openmtc_server/util/regexer.py b/server/openmtc-server/src/openmtc_server/util/regexer.py new file mode 100644 index 0000000..f8fc72b --- /dev/null +++ b/server/openmtc-server/src/openmtc_server/util/regexer.py @@ -0,0 +1,57 @@ +""" +Created to generate semantically closed regular expressions. +""" + + +def capturing_group(name): + return _group(name) + + +def not_capturing_group(name, negated=False): + if not negated: + return _group("?:" + name) + else: + return _group("?!" + name) + + +def _group(name): + return "(" + name + ")" + + +def zero_plus_elem(name): + return name + "*" + + +def one_plus_elem(name): + return name + "+" + + +def zero_or_one_elem(name): + return name + "?" + + +def negate(name): + return "!" + name + + +def slash(name): + return "/" + name + + +def alternative(names): + return "|".join(names) + + +""" +@deprecated: +""" + + +def test_route_regex(route): + result = route.regex.match('/m2m/applications/myApp/test') + print(result.groups()) + result = route.regex.match('/m2m/applications/myApp/') + print(result.groups()) + result = route.regex.match('/m2m/applications/myApp') + print(result.groups()) + exit() diff --git a/setup-gevent-all.py b/setup-gevent-all.py new file mode 100755 index 0000000..01e2f26 --- /dev/null +++ b/setup-gevent-all.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python + +from setuptools import setup +from distutils.core import setup +from glob import glob +import sys +import os + +from utils import (get_packages, OpenMTCSdist, OpenMTCBuildPy, + OpenMTCBuildPyBinary, OpenMTCSdistBinary, + create_openmtc_user, move_config_files, enable_init_files) + + +# name and version +SETUP_NAME = "openmtc-all" +SETUP_VERSION = "4.99.0" +SETUP_DESCRIPTION = "The OpenMTC Backend and Gateway (GEvent version)" + +# meta +SETUP_AUTHOR = "Konrad Campowsky" +SETUP_AUTHOR_EMAIL = "konrad.campowsky@fraunhofer.fokus.de" +SETUP_URL = "http://www.openmtc.org" +SETUP_LICENSE = "Fraunhofer FOKUS proprietary" + +# requirements +SETUP_REQUIRES = [ + "urllib3", "gevent (>=1.0)", "iso8601 (>=0.1.5)", "werkzeug (>=0.9)", + "blist", "simplejson", "ujson", "python_socketio", "gevent_websocket", + "flask", "pyxb (==1.2.3)", "enum34", "dtls", "geventhttpclient", + # server only + "funcy", "netifaces", "decorator", "mimeparse", "coapthon", "rdflib", + "fyzz", "yapps" +] +SETUP_INSTALL_REQUIRES = [ + "urllib3", "gevent >= 1.0", "iso8601 >= 0.1.5", "werkzeug >= 0.9", + "blist", "simplejson", "ujson", "python_socketio", "gevent_websocket", + "flask", "pyxb == 1.2.3", "enum34", "dtls", "geventhttpclient", + # server only + "funcy", "netifaces", "decorator", "mimeparse", "coapthon", "rdflib", + "fyzz", "yapps" +] + +# packages +PACKAGES = ["aplus", "openmtc", "openmtc_onem2m", "futile", "openmtc_app", + "openmtc_gevent", "openmtc_cse", "openmtc_server"] +PACKAGE_DIR = { + "": "common/openmtc/lib", + "openmtc": "common/openmtc/src/openmtc", + "openmtc_onem2m": "common/openmtc-onem2m/src/openmtc_onem2m", + "futile": "futile/src/futile", + "openmtc_app": "openmtc-app/src/openmtc_app", + "openmtc_gevent": "openmtc-gevent/src/openmtc_gevent", + "openmtc_cse": "server/openmtc-cse/src/openmtc_cse", + "openmtc_server": "server/openmtc-server/src/openmtc_server" +} +all_packages = [] +EXCLUDE = [] +for package in PACKAGES: + all_packages.extend(get_packages(package, PACKAGE_DIR, EXCLUDE)) + +# scripts +SETUP_SCRIPTS = glob("openmtc-gevent/bin/*") + +# package data +PACKAGE_DATA = {} + +# data files +DB_DIR = "/var/lib/openmtc" +LOG_DIR = "/var/log/openmtc" +LOG_ROTATE_DIR = "/etc/logrotate.d" +LOG_ROTATE_FILES = ("openmtc-gevent/etc/logrotate.d/openmtc",) +INIT_DIR = "/etc/init.d" +INIT_DIST_FILES = ("openmtc-gevent/etc/init.d/openmtc-gateway", + "openmtc-gevent/etc/init.d/openmtc-backend") +CONFIG_FILES = ("config-backend.json", "config-gateway.json") +CONFIG_DIR = "/etc/openmtc/gevent" +CONFIG_DIST_FILES = ("openmtc-gevent/etc/conf/config-backend.json.dist", + "openmtc-gevent/etc/conf/config-gateway.json.dist") +SSL_CERT_DIR = "/etc/openmtc/certs" +SSL_CERT_FILES = tuple(map(lambda x: os.path.join('openmtc-gevent/certs/', x), + os.listdir('openmtc-gevent/certs'))) +DATA_FILES = [ + (DB_DIR, ""), + (LOG_DIR, ""), + (LOG_ROTATE_DIR, LOG_ROTATE_FILES), + (INIT_DIR, INIT_DIST_FILES), + (CONFIG_DIR, CONFIG_DIST_FILES), + (SSL_CERT_DIR, SSL_CERT_FILES), +] + +# handle binary only +binary_only = "--binary-only" in sys.argv +if binary_only: + sys.argv.remove("--binary-only") + CMD_CLASS = {'build_py': OpenMTCBuildPyBinary, 'sdist': OpenMTCSdistBinary} +else: + CMD_CLASS = {'build_py': OpenMTCBuildPy, 'sdist': OpenMTCSdist} + +if __name__ == "__main__": + ############################################################################ + # setup + setup(name=SETUP_NAME, + version=SETUP_VERSION, + description=SETUP_DESCRIPTION, + author=SETUP_AUTHOR, + author_email=SETUP_AUTHOR_EMAIL, + url=SETUP_URL, + license=SETUP_LICENSE, + requires=SETUP_REQUIRES, + install_requires=SETUP_INSTALL_REQUIRES, + package_dir=PACKAGE_DIR, + packages=all_packages, + scripts=SETUP_SCRIPTS, + package_data=PACKAGE_DATA, + data_files=DATA_FILES, + cmdclass=CMD_CLASS, + py_modules=["pyio"] + ) + + ############################################################################ + # install + if "install" in sys.argv: + # only do this during install + enable_init_files(INIT_DIR, INIT_DIST_FILES) + + move_config_files(CONFIG_DIR, CONFIG_FILES) + + create_openmtc_user(DB_DIR, LOG_DIR) diff --git a/setup-sdk.py b/setup-sdk.py new file mode 100755 index 0000000..b74c26c --- /dev/null +++ b/setup-sdk.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python + +from setuptools import setup +from distutils.core import setup +from utils import get_packages, OpenMTCSdist + +# name and version +NAME = "sdk" +SETUP_NAME = "openmtc-" + NAME +SETUP_VERSION = "4.99.0" +SETUP_DESCRIPTION = "The OpenMTC Python SDK" + +# meta +SETUP_AUTHOR = "Konrad Campowsky" +SETUP_AUTHOR_EMAIL = "konrad.campowsky@fraunhofer.fokus.de" +SETUP_URL = "http://www.openmtc.org" +SETUP_LICENSE = "Fraunhofer FOKUS proprietary" + +# requirements +SETUP_REQUIRES = [ + "urllib3", "gevent (>=1.0)", "iso8601 (>=0.1.5)", "werkzeug (>=0.9)", + "blist", "simplejson", "ujson", "python_socketio", "gevent_websocket", + "flask", "pyxb (==1.2.3)", "enum34", "dtls", "geventhttpclient" +] +SETUP_INSTALL_REQUIRES = [ + "urllib3", "gevent >= 1.0", "iso8601 >= 0.1.5", "werkzeug >= 0.9", + "blist", "simplejson", "ujson", "python_socketio", "gevent_websocket", + "flask", "pyxb == 1.2.3", "enum34", "dtls", "geventhttpclient" +] + +# packages +PACKAGES = ["aplus", "openmtc", "openmtc_onem2m", "futile", "openmtc_app"] +PACKAGE_DIR = { + "": "common/openmtc/lib", + "openmtc": "common/openmtc/src/openmtc", + "openmtc_onem2m": "common/openmtc-onem2m/src/openmtc_onem2m", + "openmtc_app": "openmtc-app/src/openmtc_app", + "futile": "futile/src/futile" +} +all_packages = [] +for package in PACKAGES: + all_packages.extend(get_packages(package, PACKAGE_DIR)) + +# scripts +SETUP_SCRIPTS = [] + +# package data +PACKAGE_DATA = {NAME: []} + +# data files +DATA_FILES = [] + +# cmd class +CMD_CLASS = {'sdist': OpenMTCSdist} + +if __name__ == "__main__": + ############################################################################ + # setup + setup(name=SETUP_NAME, + version=SETUP_VERSION, + description=SETUP_DESCRIPTION, + author=SETUP_AUTHOR, + author_email=SETUP_AUTHOR_EMAIL, + url=SETUP_URL, + license=SETUP_LICENSE, + requires=SETUP_REQUIRES, + install_requires=SETUP_INSTALL_REQUIRES, + package_dir=PACKAGE_DIR, + packages=all_packages, + scripts=SETUP_SCRIPTS, + package_data=PACKAGE_DATA, + data_files=DATA_FILES, + cmdclass=CMD_CLASS, + py_modules=["pyio"] + ) diff --git a/util/json_benchmark.py b/util/json_benchmark.py new file mode 100644 index 0000000..3315c1f --- /dev/null +++ b/util/json_benchmark.py @@ -0,0 +1,83 @@ +""" +Dependencies: + pip install tabulate bson simplejson python-cjson ujson demjson yajl msgpack-python jsonpickle jsonlib jsonlib2 dill +""" + +from timeit import timeit +from tabulate import tabulate + +setup = '''d = { + 'words': """ + Lorem ipsum dolor sit amet, consectetur adipiscing + elit. Mauris adipiscing adipiscing placerat. + Vestibulum augue augue, + pellentesque quis sollicitudin id, adipiscing. + """, + 'list': range(100), + 'dict': dict((str(i),'a') for i in xrange(100)), + 'int': 100, + 'float': 100.123456 +}''' + +setup_cpickle = '%s ; src = cPickle.dumps(d)' % setup +setup_cpickle_binary = '%s ; src = cPickle.dumps(d, 2)' % setup +setup_json = '%s ; import json; src = json.dumps(d)' % setup +setup_bson = '%s ; src = dumps(d)' % setup +setup_msgpack = '%s ; src = msgpack.dumps(d)' % setup +setup_json_pickle = '%s ; src = jsonpickle.encode(d)' % setup +setup_b64_pickle = '%s ; src = base64.b64encode(cPickle.dumps(d, 2))' % setup +setup_b64_dill = '%s ; src = base64.b64encode(dill.dumps(d, 2))' % setup + +tests = [ + # (title, setup, enc_test, dec_test) + ('pickle (ascii)', 'import pickle; import cPickle; %s' % setup_cpickle, 'pickle.dumps(d, 0)', 'pickle.loads(src)'), + ('pickle (binary)', 'import pickle; import cPickle; %s' % setup_cpickle_binary, 'pickle.dumps(d, 2)', 'pickle.loads(src)'), + ('cPickle (ascii)', 'import cPickle; %s' % setup_cpickle, 'cPickle.dumps(d, 0)', 'cPickle.loads(src)'), + ('cPickle (binary)', 'import cPickle; %s' % setup_cpickle_binary, 'cPickle.dumps(d, 2)', 'cPickle.loads(src)'), + ('json', 'import json; %s' % setup_json, 'json.dumps(d)', 'json.loads(src)'), + ('json-check_circular', 'import json; %s' % setup_json, 'json.dumps(d, check_circular=False)', 'json.loads(src)'), + ('json+indent', 'import json; %s' % setup_json, 'json.dumps(d, indent=2)', 'json.loads(src)'), + ('bson 0.3.3', 'from bson import dumps, loads; %s' % setup_bson, 'dumps(d)', 'loads(src)'), + # if there are problems with bson. Change the line on top with this one + # ('bson 0.3.3', 'from bson.json_util import dumps, loads; %s' % setup_bson, 'dumps(d)', 'loads(src)'), + ('simplejson-3.3.1', 'import simplejson; %s' % setup_json, 'simplejson.dumps(d)', 'simplejson.loads(src)'), + ('python-cjson-1.1.0', 'import cjson; %s' % setup_json, 'cjson.encode(d)', 'cjson.decode(src)'), + ('ujson-1.33', 'import ujson; %s' % setup_json, 'ujson.dumps(d)', 'ujson.loads(src)'), + # ('demjson 2.2.2', 'import demjson; %s' % setup_json, 'demjson.encode(d)', 'demjson.decode(src)'), + ('yajl 0.3.5', 'import yajl; %s' % setup_json, 'yajl.dumps(d)', 'yajl.loads(src)'), + ('msgpack-python-0.3.0', 'import msgpack; %s' % setup_msgpack, 'msgpack.dumps(d)', 'msgpack.loads(src)'), + ('jsonpickle 0.9.1', 'import jsonpickle; %s' % setup_json_pickle, 'jsonpickle.encode(d)', 'jsonpickle.decode(src)'), + ('jsonlib 1.6.1', 'import jsonlib; %s' % setup_json, 'jsonlib.write(d)', 'jsonlib.read(src)'), + ('jsonlib2 1.5.2', 'import jsonlib2; %s' % setup_json, 'jsonlib2.write(d)', 'jsonlib2.read(src)'), + ('b64Pickle', 'import cPickle; import base64; %s' % setup_b64_pickle, 'base64.b64encode(cPickle.dumps(d, 2))', 'cPickle.loads(base64.b64decode(src))'), + ('b64Dill', 'import dill; import base64; %s' % setup_b64_dill, 'base64.b64encode(dill.dumps(d, 2))', 'dill.loads(base64.b64decode(src))'), +] + +loops = 1000 +enc_table = [] +dec_table = [] + +print "Running tests (%d loops each)" % loops + +for title, mod, enc, dec in tests: + print title + + print " [Encode]", enc + result = timeit(enc, mod, number=loops) + enc_table.append([title, result]) + + print " [Decode]", dec + result = timeit(dec, mod, number=loops) + dec_table.append([title, result]) + +enc_table.sort(key=lambda x: x[1]) +enc_table.insert(0, ['Package', 'Seconds']) + +dec_table.sort(key=lambda x: x[1]) +dec_table.insert(0, ['Package', 'Seconds']) + +print "\nEncoding Test (%d loops)" % loops +print tabulate(enc_table, headers="firstrow") + +print "\nDecoding Test (%d loops)" % loops +print tabulate(dec_table, headers="firstrow") diff --git a/util/shell-utils.sh b/util/shell-utils.sh new file mode 100755 index 0000000..589dbfb --- /dev/null +++ b/util/shell-utils.sh @@ -0,0 +1,59 @@ +#!/usr/bin/env bash + +#set -x + + +# A (partial) 'realpath' replacement based on Bash functionality, +# using 'readlink' in a bare form (should be compatible with non-GNU +# 'readlink). +# +# For the original idea, see: +# http://stackoverflow.com/questions/3572030/bash-script-absolute-path-with-osx#30267480 +# +get_realpath() +{ + local _READLINK="$(readlink "$1")" + if [ -z ${_READLINK} ]; then + echo "$1" + return + else + # Note, a symbolic link could be either a link to a file or a + # link to a path! + + # If is symbolic link to directory + if [ -d ${_READLINK} ]; then + pushd ${_READLINK} + echo "${PWD}" + popd + return + + # If is symbolic link to a file + else + # Save depth of 'dirs' to be able to jump back were you + # came from + local _DIRSDEPTH=$(dirs | wc -l) + + local _LASTBASENAME=$(basename ${_READLINK}) + pushd $(dirname $1) >/dev/null + _READLINK=$(readlink $(basename "$1")) + while [ -n "${_READLINK}" ]; do + pushd $(dirname "${_READLINK}" ) >/dev/null + _READLINK=$(readlink $(basename "${_READLINK}")) + done + _REALPATH=${PWD}/$(basename "${_LASTBASENAME}") + fi + fi + + # Get back to where you originally came from + while [ "${_DIRSDEPTH}" != "$(dirs | wc -l)" ]; do + popd + done + + echo "${_REALPATH}" +} + +# echo "" +# echo "checking: $1" +# echo "get_realpath(): $(get_realpath $1)" +# echo "realpath: $(realpath $1)" + diff --git a/utils.py b/utils.py new file mode 100644 index 0000000..d8a733f --- /dev/null +++ b/utils.py @@ -0,0 +1,148 @@ +import distutils.command.sdist +import distutils.command.build_py +import os +import subprocess +import sys + + +def echo(msg, *args): + if args: + msg = msg % args + sys.stdout.write(msg + "\n") + + +def get_packages(package, package_dir, excluded_list=None, included_list=None): + included_list = included_list or [] + excluded_list = excluded_list or [] + + try: + root = package_dir[package] + except KeyError: + root = package_dir.get("", ".") + "/" + package + + if not os.path.exists(root): + sys.stderr.write( + "Directory for package %s does not exist: %s\n" % (package, root)) + sys.exit(1) + + def on_error(error): + sys.stderr.write( + "Error while collecting packages for %s: %s\n" % (package, error)) + sys.exit(1) + + packages = [package] + + r_prefix = len(root) + 1 + for path, dirs, files in os.walk(root, onerror=on_error): + is_module = "__init__.py" in files and path != root + excluded = any(map(lambda x: x in path, excluded_list)) + included = any(map(lambda x: x in path, included_list)) + if is_module and (not excluded or included): + packages.append(package + "." + path[r_prefix:].replace("/", ".")) + + return packages + + +def get_pkg_files(base_dir, name): + package_files = [] + pkg_dir = os.path.join(base_dir, 'src', name) + pkg_data_dir = os.path.join(pkg_dir, 'static') + for (path, directories, filenames) in os.walk(pkg_data_dir): + for filename in filenames: + package_files.append(os.path.join(os.path.relpath(path, pkg_dir), + filename)) + return package_files + + +def enable_init_files(init_dir, init_dist_files): + for f in init_dist_files: + os.chmod(os.path.join(init_dir, os.path.basename(f)), 0755) + + +def move_config_files(config_dir, config_files): + for f in config_files: + target_file = os.path.join(config_dir, f) + if not os.path.exists(target_file): + echo("Installing config file %s", target_file) + os.rename(target_file + ".dist", target_file) + # os.chmod(target_file, 0644) + else: + echo("Not overwriting config file %s", target_file) + + +def create_openmtc_user(db_dir=None, log_dir=None): + try: + from pwd import getpwnam + except ImportError: + print "Could not import the 'pwd' module. Skipping user management" + else: + # assuming DB_DIR was created by setup already + try: + pw = getpwnam('openmtc') + except KeyError as e: + try: + # add system user openmtc:openmtc + # useradd --system -UM openmtc + useradd = "useradd --system -UM openmtc" + retcode = subprocess.call(useradd, shell=True) + if retcode: + raise Exception("Failed to add user 'openmtc'") + pw = getpwnam('openmtc') + except Exception as e: + sys.stderr.write("Error creating user: %s\n" % (e, )) + sys.exit(1) + uid = pw.pw_uid + gid = pw.pw_gid + + # set path permissions + if db_dir: + os.chown(db_dir, uid, gid) + if log_dir: + os.chown(log_dir, uid, gid) + + +class OpenMTCSdist(distutils.command.sdist.sdist): + def make_release_tree(self, base_dir, files): + distutils.command.sdist.sdist.make_release_tree(self, base_dir, files) + + script_name = os.path.basename(sys.argv[0]) + + if script_name != "setup.py": + os.rename(base_dir + "/" + script_name, base_dir + "/setup.py") + self.filelist.files.remove(script_name) + self.filelist.files.append("setup.py") + + +class OpenMTCSdistBinary(OpenMTCSdist, object): + def make_release_tree(self, base_dir, files): + super(OpenMTCSdistBinary, self).make_release_tree(base_dir, files) + + script_name = os.path.basename(sys.argv[0]) + + build_py = self.get_finalized_command('build_py') + build_py.compile = 1 + build_py.optimize = 2 + build_py.retain_init_py = 1 + build_py.build_lib = base_dir + build_py.byte_compile( + [base_dir + "/" + f for f in self.filelist.files if + f != script_name and f.endswith(".py")]) + + +class OpenMTCBuildPy(distutils.command.build_py.build_py): + retain_init_py = 0 + + def byte_compile(self, files): + distutils.command.build_py.build_py.byte_compile(self, files) + + +class OpenMTCBuildPyBinary(OpenMTCBuildPy, object): + retain_init_py = 0 + + def byte_compile(self, files): + super(OpenMTCBuildPyBinary, self).byte_compile(files) + + for f in files: + if (f.endswith('.py') and (os.path.basename(f) != "__init__.py" or + not self.retain_init_py)): + os.unlink(f) diff --git a/utils.pyc b/utils.pyc new file mode 100644 index 0000000..05762b0 Binary files /dev/null and b/utils.pyc differ