mirror of
https://github.com/OpenMTC/OpenMTC.git
synced 2025-02-21 17:26:36 +00:00
clean up
This commit is contained in:
parent
8aaa943930
commit
30c6fa7b06
@ -1,456 +0,0 @@
|
||||
import sys
|
||||
from logging import DEBUG
|
||||
from threading import Thread
|
||||
from traceback import print_stack
|
||||
|
||||
from futile.logging import LoggerMixin
|
||||
from openmtc.exc import OpenMTCError
|
||||
|
||||
if sys.subversion[0] != "CPython":
|
||||
from inspect import ismethod, getargspec
|
||||
|
||||
# TODO: kca: can't pass in values for then/error currently
|
||||
|
||||
|
||||
def log_error(error):
|
||||
if isinstance(error, OpenMTCError):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class Promise(LoggerMixin):
|
||||
"""
|
||||
This is a class that attempts to comply with the
|
||||
Promises/A+ specification and test suite:
|
||||
|
||||
http://promises-aplus.github.io/promises-spec/
|
||||
"""
|
||||
|
||||
__slots__ = ("_state", "value", "reason",
|
||||
"_callbacks", "_errbacks", "name")
|
||||
|
||||
# These are the potential states of a promise
|
||||
PENDING = -1
|
||||
REJECTED = 0
|
||||
FULFILLED = 1
|
||||
|
||||
def __init__(self, name=None):
|
||||
"""
|
||||
Initialize the Promise into a pending state.
|
||||
"""
|
||||
self._state = self.PENDING
|
||||
self.value = None
|
||||
self.reason = None
|
||||
self._callbacks = []
|
||||
self._errbacks = []
|
||||
self.name = name
|
||||
|
||||
def _fulfill(self, value):
|
||||
"""
|
||||
Fulfill the promise with a given value.
|
||||
"""
|
||||
|
||||
assert self._state == self.PENDING, "Promise state is not pending"
|
||||
|
||||
self._state = self.FULFILLED
|
||||
self.value = value
|
||||
for callback in self._callbacks:
|
||||
try:
|
||||
callback(value)
|
||||
except Exception:
|
||||
# Ignore errors in callbacks
|
||||
self.logger.exception("Error in callback %s", callback)
|
||||
# We will never call these callbacks again, so allow
|
||||
# them to be garbage collected. This is important since
|
||||
# they probably include closures which are binding variables
|
||||
# that might otherwise be garbage collected.
|
||||
self._callbacks = []
|
||||
self._errbacks = []
|
||||
|
||||
def fulfill(self, value):
|
||||
self._fulfill(value)
|
||||
return self
|
||||
|
||||
def _reject(self, reason, bubbling=False):
|
||||
"""
|
||||
Reject this promise for a given reason.
|
||||
"""
|
||||
|
||||
assert self._state == self.PENDING, "Promise state is not pending"
|
||||
|
||||
if not bubbling and log_error(reason):
|
||||
exc_info = sys.exc_info()
|
||||
self.logger.debug("Promise (%s) rejected: %s", self.name, reason,
|
||||
exc_info=exc_info[0] and exc_info or None)
|
||||
self.logger.debug(self._errbacks)
|
||||
if self.logger.isEnabledFor(DEBUG):
|
||||
print_stack()
|
||||
else:
|
||||
pass
|
||||
|
||||
self._state = self.REJECTED
|
||||
self.reason = reason
|
||||
for errback in self._errbacks:
|
||||
try:
|
||||
errback(reason)
|
||||
except Exception:
|
||||
self.logger.exception("Error in errback %s", errback)
|
||||
# Ignore errors in callbacks
|
||||
|
||||
# We will never call these errbacks again, so allow
|
||||
# them to be garbage collected. This is important since
|
||||
# they probably include closures which are binding variables
|
||||
# that might otherwise be garbage collected.
|
||||
self._errbacks = []
|
||||
self._callbacks = []
|
||||
|
||||
def reject(self, reason):
|
||||
self._reject(reason)
|
||||
return self
|
||||
|
||||
def isPending(self):
|
||||
"""Indicate whether the Promise is still pending."""
|
||||
return self._state == self.PENDING
|
||||
|
||||
def isFulfilled(self):
|
||||
"""Indicate whether the Promise has been fulfilled."""
|
||||
return self._state == self.FULFILLED
|
||||
|
||||
def isRejected(self):
|
||||
"""Indicate whether the Promise has been rejected."""
|
||||
return self._state == self.REJECTED
|
||||
|
||||
def get(self, timeout=None):
|
||||
"""Get the value of the promise, waiting if necessary."""
|
||||
self.wait(timeout)
|
||||
if self._state == self.FULFILLED:
|
||||
return self.value
|
||||
raise self.reason
|
||||
|
||||
def wait(self, timeout=None):
|
||||
"""
|
||||
An implementation of the wait method which doesn't involve
|
||||
polling but instead utilizes a "real" synchronization
|
||||
scheme.
|
||||
"""
|
||||
import threading
|
||||
|
||||
if self._state != self.PENDING:
|
||||
return
|
||||
|
||||
e = threading.Event()
|
||||
self.addCallback(lambda v: e.set())
|
||||
self.addErrback(lambda r: e.set())
|
||||
e.wait(timeout)
|
||||
|
||||
def addCallback(self, f):
|
||||
"""
|
||||
Add a callback for when this promise is fulfilled. Note that
|
||||
if you intend to use the value of the promise somehow in
|
||||
the callback, it is more convenient to use the 'then' method.
|
||||
"""
|
||||
self._callbacks.append(f)
|
||||
|
||||
def addErrback(self, f):
|
||||
"""
|
||||
Add a callback for when this promise is rejected. Note that
|
||||
if you intend to use the rejection reason of the promise
|
||||
somehow in the callback, it is more convenient to use
|
||||
the 'then' method.
|
||||
"""
|
||||
self._errbacks.append(f)
|
||||
|
||||
if sys.subversion[0] != "CPython":
|
||||
def _invoke(self, func, value):
|
||||
try:
|
||||
if value is None:
|
||||
args, _, _, _ = getargspec(func)
|
||||
arglen = len(args)
|
||||
if not arglen or (arglen == 1 and ismethod(func)):
|
||||
return func()
|
||||
|
||||
return func(value)
|
||||
except Exception as e:
|
||||
if log_error(e):
|
||||
self.logger.exception("Error in handler %s", func)
|
||||
else:
|
||||
self.logger.debug("Error in handler %s: %s", func, e)
|
||||
raise
|
||||
else:
|
||||
def _invoke(self, func, value):
|
||||
try:
|
||||
if value is None:
|
||||
try:
|
||||
target = func.im_func
|
||||
except AttributeError:
|
||||
argcount = func.func_code.co_argcount
|
||||
else:
|
||||
argcount = target.func_code.co_argcount - 1
|
||||
|
||||
if argcount == 0:
|
||||
return func()
|
||||
|
||||
return func(value)
|
||||
except Exception as e:
|
||||
if log_error(e):
|
||||
self.logger.exception("Error in handler %s", func)
|
||||
else:
|
||||
self.logger.debug("Error in handler %s: %s", func, repr(e))
|
||||
raise
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_tb):
|
||||
if self.isPending():
|
||||
if exc_value is not None:
|
||||
if log_error(exc_value):
|
||||
self.logger.exception("Promise automatically rejected")
|
||||
self._reject(exc_value, bubbling=True)
|
||||
return True
|
||||
else:
|
||||
self.fulfill(None)
|
||||
|
||||
def then(self, success=None, failure=None, name=None):
|
||||
"""
|
||||
This method takes two optional arguments. The first argument
|
||||
is used if the "self promise" is fulfilled and the other is
|
||||
used if the "self promise" is rejected. In either case, this
|
||||
method returns another promise that effectively represents
|
||||
the result of either the first of the second argument (in the
|
||||
case that the "self promise" is fulfilled or rejected,
|
||||
respectively).
|
||||
|
||||
Each argument can be either:
|
||||
* None - Meaning no action is taken
|
||||
* A function - which will be called with either the value
|
||||
of the "self promise" or the reason for rejection of
|
||||
the "self promise". The function may return:
|
||||
* A value - which will be used to fulfill the promise
|
||||
returned by this method.
|
||||
* A promise - which, when fulfilled or rejected, will
|
||||
cascade its value or reason to the promise returned
|
||||
by this method.
|
||||
* A value - which will be assigned as either the value
|
||||
or the reason for the promise returned by this method
|
||||
when the "self promise" is either fulfilled or rejected,
|
||||
respectively.
|
||||
"""
|
||||
|
||||
if name is None:
|
||||
try:
|
||||
name = success.__name__
|
||||
except AttributeError:
|
||||
name = str(success)
|
||||
|
||||
ret = Promise(name=name)
|
||||
|
||||
state = self._state
|
||||
if state == self.PENDING:
|
||||
"""
|
||||
If this is still pending, then add callbacks to the
|
||||
existing promise that call either the success or
|
||||
rejected functions supplied and then fulfill the
|
||||
promise being returned by this method
|
||||
"""
|
||||
|
||||
def callAndFulfill(v):
|
||||
"""
|
||||
A callback to be invoked if the "self promise"
|
||||
is fulfilled.
|
||||
"""
|
||||
try:
|
||||
# From 3.2.1, don't call non-functions values
|
||||
if callable(success):
|
||||
newvalue = self._invoke(success, v)
|
||||
if _isPromise(newvalue):
|
||||
newvalue.then(ret._fulfill,
|
||||
ret._reject)
|
||||
else:
|
||||
ret._fulfill(newvalue)
|
||||
else:
|
||||
# From 3.2.6.4
|
||||
ret._fulfill(v)
|
||||
except Exception as e:
|
||||
ret._reject(e)
|
||||
|
||||
def callAndReject(r):
|
||||
"""
|
||||
A callback to be invoked if the "self promise"
|
||||
is rejected.
|
||||
"""
|
||||
try:
|
||||
if callable(failure):
|
||||
newvalue = failure(r)
|
||||
if _isPromise(newvalue):
|
||||
newvalue.then(ret._fulfill,
|
||||
ret._reject)
|
||||
else:
|
||||
ret._fulfill(newvalue)
|
||||
else:
|
||||
# From 3.2.6.5
|
||||
ret._reject(r)
|
||||
except Exception as e:
|
||||
ret._reject(e)
|
||||
|
||||
self._callbacks.append(callAndFulfill)
|
||||
self._errbacks.append(callAndReject)
|
||||
|
||||
elif state == self.FULFILLED:
|
||||
# If this promise was already fulfilled, then
|
||||
# we need to use the first argument to this method
|
||||
# to determine the value to use in fulfilling the
|
||||
# promise that we return from this method.
|
||||
try:
|
||||
if callable(success):
|
||||
newvalue = self._invoke(success, self.value)
|
||||
if _isPromise(newvalue):
|
||||
newvalue.then(ret._fulfill,
|
||||
lambda r: ret._reject(r, bubbling=True))
|
||||
else:
|
||||
ret._fulfill(newvalue)
|
||||
else:
|
||||
# From 3.2.6.4
|
||||
ret._fulfill(self.value)
|
||||
except Exception as e:
|
||||
ret._reject(e)
|
||||
else:
|
||||
# If this promise was already rejected, then
|
||||
# we need to use the second argument to this method
|
||||
# to determine the value to use in fulfilling the
|
||||
# promise that we return from this method.
|
||||
try:
|
||||
if callable(failure):
|
||||
newvalue = self._invoke(failure, self.reason)
|
||||
if _isPromise(newvalue):
|
||||
newvalue.then(ret._fulfill,
|
||||
ret._reject)
|
||||
else:
|
||||
ret._fulfill(newvalue)
|
||||
else:
|
||||
# From 3.2.6.5
|
||||
ret._reject(self.reason, bubbling=True)
|
||||
except Exception as e:
|
||||
ret._reject(e)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _isPromise(obj):
|
||||
"""
|
||||
A utility function to determine if the specified
|
||||
object is a promise using "duck typing".
|
||||
"""
|
||||
if isinstance(obj, Promise):
|
||||
return True
|
||||
|
||||
try:
|
||||
return callable(obj.fulfill) and callable(obj.reject) and\
|
||||
callable(obj.then)
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
|
||||
def listPromise(*args):
|
||||
"""
|
||||
A special function that takes a bunch of promises
|
||||
and turns them into a promise for a vector of values.
|
||||
In other words, this turns an list of promises for values
|
||||
into a promise for a list of values.
|
||||
"""
|
||||
ret = Promise()
|
||||
|
||||
def handleSuccess(v, ret):
|
||||
for arg in args:
|
||||
if not arg.isFulfilled():
|
||||
return
|
||||
|
||||
value = map(lambda p: p.value, args)
|
||||
ret._fulfill(value)
|
||||
|
||||
for arg in args:
|
||||
arg.addCallback(lambda v: handleSuccess(v, ret))
|
||||
arg.addErrback(lambda r: ret.reject(r))
|
||||
|
||||
# Check to see if all the promises are already fulfilled
|
||||
handleSuccess(None, ret)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def dictPromise(m):
|
||||
"""
|
||||
A special function that takes a dictionary of promises
|
||||
and turns them into a promise for a dictionary of values.
|
||||
In other words, this turns an dictionary of promises for values
|
||||
into a promise for a dictionary of values.
|
||||
"""
|
||||
ret = Promise()
|
||||
|
||||
def handleSuccess(v, ret):
|
||||
for p in m.values():
|
||||
if not p.isFulfilled():
|
||||
return
|
||||
|
||||
value = {}
|
||||
for k in m:
|
||||
value[k] = m[k].value
|
||||
ret.fulfill(value)
|
||||
|
||||
for p in m.values():
|
||||
p.addCallback(lambda v: handleSuccess(v, ret))
|
||||
p.addErrback(lambda r: ret.reject(r))
|
||||
|
||||
# Check to see if all the promises are already fulfilled
|
||||
handleSuccess(None, ret)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
class BackgroundThread(Thread):
|
||||
def __init__(self, promise, func):
|
||||
self.promise = promise
|
||||
self.func = func
|
||||
Thread.__init__(self)
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
val = self.func()
|
||||
self.promise.fulfill(val)
|
||||
except Exception as e:
|
||||
self.promise.reject(e)
|
||||
|
||||
|
||||
def background(f):
|
||||
p = Promise()
|
||||
t = BackgroundThread(p, f)
|
||||
t.start()
|
||||
return p
|
||||
|
||||
|
||||
def spawn(f):
|
||||
from gevent import spawn
|
||||
|
||||
p = Promise()
|
||||
|
||||
def process():
|
||||
try:
|
||||
val = f()
|
||||
p.fulfill(val)
|
||||
except Exception as e:
|
||||
p.reject(e)
|
||||
|
||||
spawn(process)
|
||||
return p
|
||||
|
||||
|
||||
def FulfilledPromise(result):
|
||||
p = Promise()
|
||||
p.fulfill(result)
|
||||
return p
|
||||
|
||||
|
||||
def RejectedPromise(error):
|
||||
p = Promise()
|
||||
p.reject(error)
|
||||
return p
|
@ -1,4 +0,0 @@
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
@ -1,82 +0,0 @@
|
||||
from futile.basictypes import basestring, BASE_STR
|
||||
from futile.logging import LoggerMixin
|
||||
|
||||
Base = LoggerMixin
|
||||
|
||||
|
||||
class NOT_SET(object):
|
||||
__slots__ = ()
|
||||
|
||||
def __bool__(self):
|
||||
return False
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __str__(self):
|
||||
return ""
|
||||
|
||||
NOT_SET = NOT_SET()
|
||||
DEFAULT_ENCODING = "utf-8"
|
||||
DEFAULT_CHUNK_SIZE = 128 * 1024
|
||||
THREADSAFE = True
|
||||
|
||||
|
||||
def noop(*args, **kw):
|
||||
pass
|
||||
|
||||
|
||||
def not_implemented(*args, **kw):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def tostr(o):
|
||||
if isinstance(o, basestring):
|
||||
return o
|
||||
return BASE_STR(o)
|
||||
|
||||
|
||||
if basestring == str:
|
||||
uc = tostr
|
||||
encstr = not_implemented
|
||||
else:
|
||||
def uc(s):
|
||||
if isinstance(s, unicode):
|
||||
return s
|
||||
if isinstance(s, basestring):
|
||||
return s.decode(DEFAULT_ENCODING)
|
||||
return unicode(s)
|
||||
|
||||
def encstr(s):
|
||||
if isinstance(s, str):
|
||||
return s
|
||||
if not isinstance(s, unicode):
|
||||
s = unicode(s)
|
||||
return s.encode(DEFAULT_ENCODING)
|
||||
|
||||
|
||||
def identity(x):
|
||||
return x
|
||||
|
||||
_isc = issubclass
|
||||
|
||||
|
||||
def issubclass(o, classes):
|
||||
"A safer version of __builtin__.issubclass that does not raise TypeError when called with a non-type object"
|
||||
|
||||
return isinstance(o, type) and _isc(o, classes)
|
||||
|
||||
try:
|
||||
callable
|
||||
except NameError:
|
||||
def callable(x):
|
||||
return hasattr(x, "__call__")
|
||||
|
||||
|
||||
class ObjectProxy(object):
|
||||
__slots__ = ("_o")
|
||||
|
||||
def __init__(self, proxyobject, *args, **kw):
|
||||
super(ObjectProxy, self).__init__(*args, **kw)
|
||||
self._o = proxyobject
|
||||
|
||||
def __getattr__(self, k):
|
||||
return getattr(self._o, k)
|
@ -1,12 +0,0 @@
|
||||
'''
|
||||
Created on 13.11.2012
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
try:
|
||||
from abc import ABCMeta, abstractmethod, abstractproperty
|
||||
except ImportError:
|
||||
from futile import identity
|
||||
ABCMeta = type
|
||||
abstractmethod = abstractproperty = identity
|
@ -1,20 +0,0 @@
|
||||
'''
|
||||
Created on 11.05.2013
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
try:
|
||||
from types import ClassType
|
||||
except ImportError:
|
||||
ClassType = type
|
||||
|
||||
try:
|
||||
basestring = basestring
|
||||
except NameError:
|
||||
basestring = str
|
||||
|
||||
try:
|
||||
BASE_STR = unicode
|
||||
except NameError:
|
||||
BASE_STR = str
|
@ -1,63 +0,0 @@
|
||||
'''
|
||||
Created on 17.07.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from ..collections import OrderedDict
|
||||
import futile
|
||||
|
||||
class LRUCache(OrderedDict):
|
||||
max_items = 100
|
||||
|
||||
def __init__(self, max_items = None, threadsafe = None, *args, **kw):
|
||||
super(LRUCache, self).__init__(*args, **kw)
|
||||
if max_items is not None:
|
||||
if max_items <= 0:
|
||||
raise ValueError(max_items)
|
||||
self.max_items = max_items
|
||||
|
||||
if threadsafe is None:
|
||||
threadsafe = futile.THREADSAFE
|
||||
|
||||
if threadsafe:
|
||||
from threading import RLock
|
||||
self.__lock = RLock()
|
||||
else:
|
||||
self.__lock = None
|
||||
self.__getitem__ = self._getitem
|
||||
self.__setitem__ = self._setitem
|
||||
|
||||
def __getitem__(self, k):
|
||||
if self.__lock is None:
|
||||
return self._getitem(k)
|
||||
with self.__lock:
|
||||
return self._getitem(k)
|
||||
|
||||
def get(self, k, default = None):
|
||||
try:
|
||||
return self[k]
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def _getitem(self, k):
|
||||
v = super(LRUCache, self).__getitem__(k)
|
||||
del self[k]
|
||||
super(LRUCache, self).__setitem__(k, v)
|
||||
return v
|
||||
|
||||
def __iter__(self):
|
||||
for k in tuple(super(LRUCache, self).__iter__()):
|
||||
yield k
|
||||
|
||||
def __setitem__(self, k, v):
|
||||
if self.__lock is None:
|
||||
return self._setitem(k, v)
|
||||
with self.__lock:
|
||||
self._setitem(k, v)
|
||||
|
||||
def _setitem(self, k, v):
|
||||
super(LRUCache, self).__setitem__(k, v)
|
||||
if len(self) > self.max_items:
|
||||
self.popitem(False)
|
||||
|
@ -1,110 +0,0 @@
|
||||
# Copyright (C) 2009 Raymond Hettinger
|
||||
|
||||
# *** MIT License ***
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
# this software and associated documentation files (the "Software"), to deal in
|
||||
# the Software without restriction, including without limitation the rights to
|
||||
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
# of the Software, and to permit persons to whom the Software is furnished to do
|
||||
# so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
## {{{ http://code.activestate.com/recipes/576694/ (r7)
|
||||
|
||||
# kca: fixed exception at interpreter shutdown
|
||||
# kca: added list methods
|
||||
|
||||
import collections
|
||||
|
||||
KEY, PREV, NEXT = range(3)
|
||||
|
||||
class OrderedSet(collections.MutableSet):
|
||||
|
||||
def __init__(self, iterable=None):
|
||||
self.end = end = []
|
||||
end += [None, end, end] # sentinel node for doubly linked list
|
||||
self.map = {} # key --> [key, prev, next]
|
||||
if iterable is not None:
|
||||
self |= iterable
|
||||
|
||||
def __len__(self):
|
||||
return len(self.map)
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.map
|
||||
|
||||
def add(self, key):
|
||||
if key not in self.map:
|
||||
end = self.end
|
||||
curr = end[PREV]
|
||||
curr[NEXT] = end[PREV] = self.map[key] = [key, curr, end]
|
||||
append = add
|
||||
|
||||
def discard(self, key):
|
||||
_KEY, PREV, NEXT = 0, 1, 2
|
||||
if key in self.map:
|
||||
key, prev, next = self.map.pop(key)
|
||||
prev[NEXT] = next
|
||||
next[PREV] = prev
|
||||
|
||||
def __iter__(self):
|
||||
end = self.end
|
||||
curr = end[NEXT]
|
||||
while curr is not end:
|
||||
yield curr[KEY]
|
||||
curr = curr[NEXT]
|
||||
|
||||
def __reversed__(self):
|
||||
KEY, PREV, NEXT = 0, 1, 2
|
||||
end = self.end
|
||||
curr = end[PREV]
|
||||
while curr is not end:
|
||||
yield curr[KEY]
|
||||
curr = curr[PREV]
|
||||
|
||||
def pop(self, last=True):
|
||||
# changed default to last=False - by default, treat as queue.
|
||||
if not self:
|
||||
raise KeyError('set is empty')
|
||||
key = next(reversed(self)) if last else next(iter(self))
|
||||
self.discard(key)
|
||||
return key
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return '%s()' % (self.__class__.__name__,)
|
||||
return '%s(%r)' % (self.__class__.__name__, list(self))
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, OrderedSet):
|
||||
return len(self) == len(other) and list(self) == list(other)
|
||||
return set(self) == set(other)
|
||||
|
||||
def __del__(self):
|
||||
self.clear() # remove circular references
|
||||
|
||||
def __getitem__(self, index):
|
||||
return list(self)[index]
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print(OrderedSet('abracadaba'))
|
||||
print(OrderedSet('simsalabim'))
|
||||
## end of http://code.activestate.com/recipes/576694/ }}}
|
||||
|
||||
## kca:
|
||||
print OrderedSet('simsalabim')[1]
|
||||
|
||||
# Test case for exception at shutdown (yes, really...)
|
||||
x = OrderedSet('simsalabim')
|
||||
|
@ -1,44 +0,0 @@
|
||||
'''
|
||||
Created on 17.07.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
import futile
|
||||
from futile.basictypes import basestring
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from ordereddict import OrderedDict
|
||||
|
||||
from abc import ABCMeta
|
||||
from collections import Iterable, Sequence
|
||||
|
||||
|
||||
def is_iterable(o):
|
||||
return isinstance(o, Iterable) and not isinstance(o, basestring)
|
||||
|
||||
|
||||
def get_iterable(o):
|
||||
if o is None:
|
||||
return ()
|
||||
return ((not isinstance(o, Iterable) or isinstance(o, basestring))
|
||||
and (o,) or o)
|
||||
|
||||
|
||||
def get_list(o):
|
||||
if o is None:
|
||||
return []
|
||||
return ((not isinstance(o, Iterable) or isinstance(o, basestring))
|
||||
and [o] or list(o))
|
||||
|
||||
|
||||
def yield_buffer(buffer, chunk_size=None):
|
||||
chunk_size = chunk_size or futile.DEFAULT_CHUNK_SIZE
|
||||
|
||||
while True:
|
||||
chunk = buffer.read(chunk_size)
|
||||
if not chunk:
|
||||
return
|
||||
yield chunk
|
@ -1,127 +0,0 @@
|
||||
# Copyright (c) 2009 Raymond Hettinger
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person
|
||||
# obtaining a copy of this software and associated documentation files
|
||||
# (the "Software"), to deal in the Software without restriction,
|
||||
# including without limitation the rights to use, copy, modify, merge,
|
||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
||||
# and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
# OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
from UserDict import DictMixin
|
||||
|
||||
|
||||
class OrderedDict(dict, DictMixin):
|
||||
def __init__(self, *args, **kwds):
|
||||
if len(args) > 1:
|
||||
raise TypeError('expected at most 1 arguments, got %d' % len(args))
|
||||
try:
|
||||
self.__end
|
||||
except AttributeError:
|
||||
self.clear()
|
||||
self.update(*args, **kwds)
|
||||
|
||||
def clear(self):
|
||||
self.__end = end = []
|
||||
end += [None, end, end] # sentinel node for doubly linked list
|
||||
self.__map = {} # key --> [key, prev, next]
|
||||
dict.clear(self)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if key not in self:
|
||||
end = self.__end
|
||||
curr = end[1]
|
||||
curr[2] = end[1] = self.__map[key] = [key, curr, end]
|
||||
dict.__setitem__(self, key, value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
dict.__delitem__(self, key)
|
||||
key, prev, next = self.__map.pop(key)
|
||||
prev[2] = next
|
||||
next[1] = prev
|
||||
|
||||
def __iter__(self):
|
||||
end = self.__end
|
||||
curr = end[2]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[2]
|
||||
|
||||
def __reversed__(self):
|
||||
end = self.__end
|
||||
curr = end[1]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[1]
|
||||
|
||||
def popitem(self, last=True):
|
||||
if not self:
|
||||
raise KeyError('dictionary is empty')
|
||||
if last:
|
||||
key = reversed(self).next()
|
||||
else:
|
||||
key = iter(self).next()
|
||||
value = self.pop(key)
|
||||
return key, value
|
||||
|
||||
def __reduce__(self):
|
||||
items = [[k, self[k]] for k in self]
|
||||
tmp = self.__map, self.__end
|
||||
del self.__map, self.__end
|
||||
inst_dict = vars(self).copy()
|
||||
self.__map, self.__end = tmp
|
||||
if inst_dict:
|
||||
return (self.__class__, (items,), inst_dict)
|
||||
return self.__class__, (items,)
|
||||
|
||||
def keys(self):
|
||||
return list(self)
|
||||
|
||||
setdefault = DictMixin.setdefault
|
||||
update = DictMixin.update
|
||||
pop = DictMixin.pop
|
||||
values = DictMixin.values
|
||||
items = DictMixin.items
|
||||
iterkeys = DictMixin.iterkeys
|
||||
itervalues = DictMixin.itervalues
|
||||
iteritems = DictMixin.iteritems
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return '%s()' % (self.__class__.__name__,)
|
||||
return '%s(%r)' % (self.__class__.__name__, self.items())
|
||||
|
||||
def copy(self):
|
||||
return self.__class__(self)
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, value=None):
|
||||
d = cls()
|
||||
for key in iterable:
|
||||
d[key] = value
|
||||
return d
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, OrderedDict):
|
||||
if len(self) != len(other):
|
||||
return False
|
||||
for p, q in zip(self.items(), other.items()):
|
||||
if p != q:
|
||||
return False
|
||||
return True
|
||||
return dict.__eq__(self, other)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
@ -1,38 +0,0 @@
|
||||
try:
|
||||
from blist import sortedlist
|
||||
except ImportError:
|
||||
from futile.logging import get_logger
|
||||
from heapq import heappush, heappop, heapify
|
||||
|
||||
get_logger(__name__).warning("blist.sortedlist is not available. Using a fallback implementation")
|
||||
|
||||
class sortedlist(object):
|
||||
def __init__(self, iterable=(), *args, **kw):
|
||||
super(sortedlist, self).__init__(*args, **kw)
|
||||
|
||||
l = self._list = list(iterable)
|
||||
|
||||
if iterable is not None:
|
||||
heapify(l)
|
||||
|
||||
def add(self, v):
|
||||
heappush(self._list, v)
|
||||
|
||||
def pop(self, index=-1):
|
||||
if index != 0:
|
||||
raise NotImplementedError()
|
||||
|
||||
return heappop(self._list)
|
||||
|
||||
def remove(self, object):
|
||||
self._list.remove(object)
|
||||
heapify(self._list)
|
||||
|
||||
def __getitem__(self, index):
|
||||
if index != 0:
|
||||
raise NotImplementedError()
|
||||
|
||||
return self._list[index]
|
||||
|
||||
def __len__(self):
|
||||
return len(self._list)
|
@ -1,21 +0,0 @@
|
||||
'''
|
||||
Created on 14.07.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
from futile import ObjectProxy
|
||||
|
||||
class closing(ObjectProxy):
|
||||
def __enter__(self):
|
||||
return self._o
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._o.close()
|
||||
|
||||
|
||||
class exiting(ObjectProxy):
|
||||
def __enter__(self):
|
||||
return self._o
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._o.__exit__(exc_type, exc_val, exc_tb)
|
@ -1,44 +0,0 @@
|
||||
'''
|
||||
Created on 25.07.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
import sys
|
||||
from .logging import get_logger
|
||||
|
||||
try:
|
||||
from lxml import etree as impl
|
||||
from lxml.etree import tostring as _ts
|
||||
|
||||
get_logger(__name__).debug("Using lxml etree implementation1.")
|
||||
|
||||
def tostring(element, encoding="utf-8", pretty_print=False):
|
||||
return _ts(element, encoding=encoding, pretty_print=pretty_print)
|
||||
except ImportError:
|
||||
logger = get_logger(__name__)
|
||||
logger.warning(
|
||||
"lxml library not found, trying builtin ElementTree implementations. Pretty printing will be disabled.")
|
||||
try:
|
||||
from xml.etree import cElementTree as impl
|
||||
|
||||
try:
|
||||
impl.ParseError = impl.XMLParserError
|
||||
except AttributeError:
|
||||
pass
|
||||
logger.debug("Using native xml.etree.cElementTree")
|
||||
except ImportError:
|
||||
from xml.etree import ElementTree as impl
|
||||
|
||||
logger.debug("Using python xml.etree.ElementTree")
|
||||
|
||||
_ts = impl.tostring
|
||||
|
||||
def tostring(element, encoding="utf-8", pretty_print=False):
|
||||
return _ts(element, encoding=encoding)
|
||||
|
||||
impl.tostring = tostring
|
||||
impl.XMLSyntaxError = impl.ParseError
|
||||
|
||||
sys.modules[__name__ + ".impl"] = sys.modules[__name__ + ".ElementTree"] = ElementTree = impl
|
||||
|
@ -1,22 +0,0 @@
|
||||
'''
|
||||
Created on 14.07.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from . import issubclass
|
||||
|
||||
def errorstr(e):
|
||||
try:
|
||||
message = e.message
|
||||
except AttributeError:
|
||||
message = str(e)
|
||||
else:
|
||||
if not message:
|
||||
message = str(e)
|
||||
return message
|
||||
|
||||
def raise_error(e):
|
||||
if isinstance(e, Exception) or (isinstance(e, type) and issubclass(e, Exception)):
|
||||
raise e
|
||||
raise Exception(e)
|
@ -1,230 +0,0 @@
|
||||
"""
|
||||
Created on 15.07.2011
|
||||
|
||||
@author: kca
|
||||
"""
|
||||
import logging
|
||||
import logging.handlers
|
||||
from futile.basictypes import ClassType, basestring
|
||||
from futile.threading import current_thread
|
||||
from logging import Filter
|
||||
from futile.collections import get_iterable
|
||||
|
||||
# statics
|
||||
_handlers = []
|
||||
_formatter = logging.Formatter('%(asctime)s %(levelname)s - %(name)s: %(message)s')
|
||||
_level = logging.NOTSET
|
||||
|
||||
# log level constants for convenience
|
||||
from logging import CRITICAL, FATAL, ERROR, WARNING, INFO, DEBUG, NOTSET
|
||||
|
||||
CRITICAL = CRITICAL
|
||||
FATAL = FATAL
|
||||
ERROR = ERROR
|
||||
WARNING = WARNING
|
||||
INFO = INFO
|
||||
DEBUG = DEBUG
|
||||
NOTSET = NOTSET
|
||||
|
||||
|
||||
def get_default_level():
|
||||
return _level
|
||||
|
||||
|
||||
def set_default_level(l):
|
||||
global _level
|
||||
_level = l
|
||||
logging.basicConfig(level=l)
|
||||
|
||||
|
||||
# try:
|
||||
# from colorlog import ColoredFormatter
|
||||
# formatter = ColoredFormatter(
|
||||
# "%(blue)s%(asctime)s %(log_color)s%(levelname) - 8s%(reset)s%(name)s: %(message)s",
|
||||
# datefmt=None,
|
||||
# reset=True,
|
||||
# log_colors={
|
||||
# 'DEBUG': 'cyan',
|
||||
# 'INFO': 'green',
|
||||
# 'WARNING': 'yellow',
|
||||
# 'ERROR': 'red',
|
||||
# 'CRITICAL': 'red',
|
||||
# }
|
||||
# )
|
||||
# import logging
|
||||
# hand = logging.StreamHandler()
|
||||
# hand.setFormatter(formatter)
|
||||
# futile.logging.add_handler( hand)
|
||||
# except ImportError:
|
||||
# pass
|
||||
def get_default_formatter():
|
||||
return _formatter
|
||||
|
||||
|
||||
def set_default_formatter(frmt):
|
||||
global _formatter
|
||||
if frmt and isinstance(frmt, logging.Formatter):
|
||||
_formatter = frmt
|
||||
else:
|
||||
raise TypeError("Not a logging Formatter: %s" % (frmt, ))
|
||||
|
||||
|
||||
def add_handler(h):
|
||||
if not isinstance(h, logging.Handler):
|
||||
raise TypeError(h)
|
||||
|
||||
_handlers.append(h)
|
||||
|
||||
|
||||
def add_log_file(path, level=None, formatter=None):
|
||||
""" Adds a log file to all future loggers.
|
||||
Files will be rotated depending on max_bytes and backups parameters.
|
||||
|
||||
@param path: path to logfile
|
||||
@param level: minimum log level
|
||||
@param formatter: a logging.Formatter for this log file
|
||||
"""
|
||||
handler = logging.handlers.WatchedFileHandler(path)
|
||||
handler.setFormatter(formatter or _formatter)
|
||||
# TODO(rst): probably try/except is necessary
|
||||
handler.setLevel(level or _level)
|
||||
add_handler(handler)
|
||||
|
||||
|
||||
def get_logger(logger_name=None, level=None):
|
||||
level = level if level is not None else _level
|
||||
# logging.basicConfig(level=level)
|
||||
if logger_name:
|
||||
if not isinstance(logger_name, basestring):
|
||||
if not isinstance(logger_name, (type, ClassType)):
|
||||
l_class = logger_name.__class__
|
||||
else:
|
||||
l_class = logger_name
|
||||
logger_name = l_class.__module__ + "." + l_class.__name__
|
||||
else:
|
||||
logger_name = __name__
|
||||
|
||||
try:
|
||||
logger = logging.getLogger(logger_name)
|
||||
except Exception as e:
|
||||
print ("Failed to get logger '%s': %s" % (logger_name, e))
|
||||
raise
|
||||
|
||||
try:
|
||||
logger.setLevel(level) # raises TypeError: not a valid string or int
|
||||
except TypeError:
|
||||
logger.setLevel(NOTSET) # TODO(rst): set another level if wrong level?
|
||||
for h in _handlers:
|
||||
logger.addHandler(h)
|
||||
return logger
|
||||
|
||||
|
||||
class LoggerMixin(object):
|
||||
|
||||
log_file = None
|
||||
log_level = None
|
||||
|
||||
def __init__(self):
|
||||
self.__logger = None
|
||||
|
||||
@classmethod
|
||||
def _get_logger(cls, logger_name=None):
|
||||
logger = get_logger(logger_name, cls.log_level)
|
||||
if cls.log_file:
|
||||
formatter = get_default_formatter()
|
||||
handler = logging.handlers.WatchedFileHandler(cls.log_file)
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
|
||||
return logger
|
||||
|
||||
def get_logger(self):
|
||||
try:
|
||||
if self.__logger is not None:
|
||||
return self.__logger
|
||||
except AttributeError:
|
||||
pass
|
||||
self.__logger = l = self.get_class_logger()
|
||||
return l
|
||||
|
||||
def set_logger(self, logger):
|
||||
self.__logger = logger
|
||||
logger = property(get_logger, set_logger)
|
||||
|
||||
@classmethod
|
||||
def get_class_logger(cls):
|
||||
try:
|
||||
return cls.__dict__["__logger__"]
|
||||
except KeyError:
|
||||
l = cls.__logger__ = cls._get_logger(cls.__name__)
|
||||
return l
|
||||
|
||||
def __getstate__(self):
|
||||
l = getattr(self, "_LoggerMixin__logger", None)
|
||||
self.__logger = None
|
||||
try:
|
||||
sgs = super(LoggerMixin, self).__getstate__
|
||||
except AttributeError:
|
||||
state = self.__dict__.copy()
|
||||
else:
|
||||
state = sgs()
|
||||
self.__logger = l
|
||||
return state
|
||||
|
||||
|
||||
class ThreadFilter(Filter):
|
||||
def __init__(self, thread=None, name=''):
|
||||
Filter.__init__(self, name=name)
|
||||
self.thread = thread or current_thread()
|
||||
|
||||
def filter(self, record):
|
||||
return current_thread() == self.thread
|
||||
|
||||
|
||||
class ErrorLogger(LoggerMixin):
|
||||
def __init__(self, name="operation", logger=None,
|
||||
level=get_default_level(), *args, **kw):
|
||||
super(ErrorLogger, self).__init__(*args, **kw)
|
||||
if logger is not None:
|
||||
self.logger = logger
|
||||
self.name = name
|
||||
self.log_level = level
|
||||
assert level is not None
|
||||
|
||||
def __enter__(self):
|
||||
self.logger.debug("Entering %s", self.name)
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
if type is not None:
|
||||
self.logger.exception("Error in %s", self.name)
|
||||
else:
|
||||
self.logger.log(self.log_level, "%s finished", self.name)
|
||||
|
||||
|
||||
def log_errors(f):
|
||||
def _f(*args, **kw):
|
||||
with ErrorLogger(f.__name__):
|
||||
result = f(*args, **kw)
|
||||
get_logger(f).debug("%s returning: %s", f.__name__, result)
|
||||
return result
|
||||
_f.__name__ = f.__name__
|
||||
return _f
|
||||
|
||||
|
||||
def sanitize_dict(d, keys=("password",), replacement="*", inplace=False):
|
||||
keys = get_iterable(keys)
|
||||
if not inplace:
|
||||
d = dict(d)
|
||||
|
||||
if replacement is None:
|
||||
for k in keys:
|
||||
d.pop(k, None)
|
||||
else:
|
||||
for k in keys:
|
||||
v = d[k]
|
||||
if isinstance(v, basestring):
|
||||
d[k] = replacement * len(v)
|
||||
else:
|
||||
d[k] = replacement
|
||||
return d
|
@ -1,14 +0,0 @@
|
||||
'''
|
||||
Created on 30.08.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from logging.handlers import BufferingHandler as _BufferingHandler
|
||||
|
||||
class BufferingHandler(_BufferingHandler):
|
||||
def __init__(self, capacity = None):
|
||||
_BufferingHandler.__init__(self, capacity = capacity)
|
||||
|
||||
def shouldFlush(self, record):
|
||||
return self.capacity and super(BufferingHandler, self).shouldFlush(record) or False
|
@ -1,9 +0,0 @@
|
||||
'''
|
||||
Created on 30.08.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
Logbook = namedtuple("Logbook", ("name", "component", "entries"))
|
@ -1,60 +0,0 @@
|
||||
'''
|
||||
Created on 29.08.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
import logging
|
||||
from . import ThreadFilter
|
||||
from ..collections import get_list
|
||||
from futile import NOT_SET
|
||||
from logging import LogRecord, DEBUG
|
||||
from futile.logging import ErrorLogger
|
||||
|
||||
class LogTap(ErrorLogger):
|
||||
def __init__(self, handler, logger = None, name = None, level = DEBUG, *args, **kw):
|
||||
super(LogTap, self).__init__(name = name, logger = logger, level = level, *args, **kw)
|
||||
handler = get_list(handler)
|
||||
self.handlers = handler
|
||||
self.target_logger = logger or logging.root
|
||||
|
||||
def attach(self):
|
||||
map(self.target_logger.addHandler, self.handlers)
|
||||
|
||||
def detach(self):
|
||||
for handler in self.handlers:
|
||||
self.target_logger.removeHandler(handler)
|
||||
handler.close()
|
||||
|
||||
def emit(self, record):
|
||||
for handler in self.handlers:
|
||||
handler.emit(record)
|
||||
|
||||
def __enter__(self):
|
||||
self.attach()
|
||||
return super(LogTap, self).__enter__()
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
super(LogTap, self).__exit__(type, value, traceback)
|
||||
self.detach()
|
||||
|
||||
class BufferingLogTap(LogTap):
|
||||
log = None
|
||||
|
||||
def __init__(self, handler = None, name = None, logger = None, level = DEBUG, capacity = None, memhandler = None, *args, **kw):
|
||||
if not memhandler:
|
||||
from handlers import BufferingHandler
|
||||
memhandler = BufferingHandler(capacity)
|
||||
memhandler.addFilter(ThreadFilter())
|
||||
self.memhandler = memhandler
|
||||
handler = [ memhandler ] + get_list(handler)
|
||||
super(BufferingLogTap, self).__init__(handler = handler, logger = logger, name = name, level = level, *args, **kw)
|
||||
|
||||
def detach(self):
|
||||
self.log = map(lambda r: isinstance(r, LogRecord) and self.memhandler.format(r) or r, self.memhandler.buffer)
|
||||
super(BufferingLogTap, self).detach()
|
||||
|
||||
def emit(self, record, level = NOT_SET):
|
||||
if isinstance(record, LogRecord):
|
||||
return super(BufferingLogTap, self).emit(record)
|
||||
self.memhandler.buffer.append(record)
|
@ -1,82 +0,0 @@
|
||||
'''
|
||||
Created on 30.04.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
import os
|
||||
from fcntl import lockf, LOCK_EX, LOCK_SH, LOCK_UN
|
||||
from contextlib import contextmanager
|
||||
from futile.signal import timeout
|
||||
|
||||
class RWLock(object):
|
||||
def __init__(self, path = None, threadsafe = True, *args, **kw):
|
||||
if not path:
|
||||
raise NotImplementedError()
|
||||
|
||||
if not os.path.exists(path):
|
||||
open(path, "a").close()
|
||||
|
||||
self.__path = path
|
||||
|
||||
if threadsafe:
|
||||
import threading
|
||||
self.__local = threading.local()
|
||||
else:
|
||||
class Local(object):
|
||||
pass
|
||||
self.__local = Local
|
||||
|
||||
self.__local.f = None
|
||||
|
||||
|
||||
@contextmanager
|
||||
def read_transaction(self, timeout = None):
|
||||
self.read_acquire(timeout = timeout)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.read_release()
|
||||
pass
|
||||
pass
|
||||
|
||||
@contextmanager
|
||||
def write_transaction(self, timeout = None):
|
||||
self.write_acquire(timeout = timeout)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.write_release()
|
||||
|
||||
def __acquire(self, fmode, lmode, to):
|
||||
assert getattr(self.__local, "f", None) is None
|
||||
f = open(self.__path, fmode)
|
||||
try:
|
||||
if timeout:
|
||||
with timeout(to):
|
||||
lockf(f, lmode)
|
||||
else:
|
||||
lockf(f, lmode)
|
||||
except:
|
||||
f.close()
|
||||
raise
|
||||
self.__local.f = f
|
||||
return f
|
||||
|
||||
def read_acquire(self, timeout = None):
|
||||
return self.__acquire("r", LOCK_SH, timeout)
|
||||
|
||||
def read_release(self):
|
||||
with self.__local.f as f:
|
||||
self.__local.f = None
|
||||
lockf(f, LOCK_UN)
|
||||
|
||||
write_release = read_release
|
||||
|
||||
def write_acquire(self, timeout = None):
|
||||
return self.__acquire("a", LOCK_EX, timeout)
|
||||
|
||||
__enter__ = write_acquire
|
||||
|
||||
def __exit__(self, *args):
|
||||
self.write_release()
|
@ -1,3 +0,0 @@
|
||||
from RWLock import RWLock
|
||||
|
||||
Lock = RWLock
|
@ -1,83 +0,0 @@
|
||||
'''
|
||||
Created on 15.07.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from asyncore import dispatcher, loop
|
||||
from socket import AF_INET, SOCK_STREAM, error
|
||||
from sockethelper import socket
|
||||
from futile.exc import errorstr
|
||||
from collections import namedtuple
|
||||
import sys
|
||||
from time import time
|
||||
|
||||
class TestResult(namedtuple("TestResultTuple", ("result", "message"))):
|
||||
def __new__(cls, result, message = ""):
|
||||
return super(TestResult, cls).__new__(cls, result, message)
|
||||
|
||||
def __bool__(self):
|
||||
return self.result
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __str__(self):
|
||||
if self.message:
|
||||
return "%s - %s" % (self.result, self.message)
|
||||
return str(self.result)
|
||||
|
||||
def __eq__(self, o):
|
||||
try:
|
||||
return self.result == o.result
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
def __ne__(self, o):
|
||||
return not (self == o)
|
||||
|
||||
def test_port(host, port, family = AF_INET, type = SOCK_STREAM):
|
||||
try:
|
||||
with socket(family, type) as s:
|
||||
s.connect((host, port))
|
||||
except error, e:
|
||||
return TestResult(False, "%s (%d)" % (e.strerror, e.errno))
|
||||
except Exception, e:
|
||||
return TestResult(False, errorstr(e))
|
||||
return TestResult(True)
|
||||
|
||||
class PortTester(dispatcher):
|
||||
result = TestResult(False, "Test did not run")
|
||||
|
||||
def __init__(self, host, port, family = AF_INET, type = SOCK_STREAM, map = None):
|
||||
dispatcher.__init__(self, map = map)
|
||||
self.create_socket(family, type)
|
||||
self.connect((host, port))
|
||||
self.host = host
|
||||
self.port = port
|
||||
|
||||
def handle_connect(self):
|
||||
self.result = TestResult(True)
|
||||
self.close()
|
||||
|
||||
def handle_error(self):
|
||||
self.result = TestResult(False, errorstr(sys.exc_value))
|
||||
self.close()
|
||||
|
||||
def run_test(map, timeout = 0.0):
|
||||
if timeout and timeout > 0.0:
|
||||
timeout = float(timeout)
|
||||
start = time()
|
||||
while True:
|
||||
loop(map = map, timeout = timeout, count = 1)
|
||||
if map:
|
||||
now = time()
|
||||
timeout -= now - start
|
||||
if timeout <= 0.0:
|
||||
for r in map.itervalues():
|
||||
r.result = TestResult(False, "Timeout")
|
||||
break
|
||||
start = now
|
||||
else:
|
||||
break
|
||||
else:
|
||||
loop(map = map)
|
||||
|
@ -1,4 +0,0 @@
|
||||
|
||||
|
||||
class NetworkError(Exception):
|
||||
pass
|
@ -1,96 +0,0 @@
|
||||
'''
|
||||
Created on 17.07.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
try:
|
||||
from httplib import HTTPConnection as _HTTPConnection, HTTPSConnection as _HTTPSConnection
|
||||
except ImportError:
|
||||
from http.client import HTTPConnection as _HTTPConnection, HTTPSConnection as _HTTPSConnection
|
||||
|
||||
from futile.contextlib import closing
|
||||
from futile import NOT_SET
|
||||
import socket
|
||||
from . import exc as _exc
|
||||
import sys
|
||||
import types
|
||||
|
||||
try:
|
||||
from urllib import quote, quote_plus, unquote, unquote_plus
|
||||
except ImportError:
|
||||
from urllib.parse import quote, quote_plus, unquote, unquote_plus
|
||||
|
||||
class HTTPResponseWrapper(object):
|
||||
def __init__(self, connection, response, *args, **kw):
|
||||
super(HTTPResponseWrapper, self).__init__(*args, **kw)
|
||||
|
||||
self.__response = response
|
||||
self.__connection = connection
|
||||
|
||||
#def __del__(self):
|
||||
# self.close()
|
||||
|
||||
def __getattr__(self, k):
|
||||
return getattr(self.__response, k)
|
||||
|
||||
def __enter__(self):
|
||||
return self.__response
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
try:
|
||||
self.__response.close()
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
self.__connection.close()
|
||||
|
||||
class HTTPConnection(_HTTPConnection):
|
||||
response_wrapper = closing
|
||||
|
||||
def __init__(self, host, port=None, strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, response_wrapper = NOT_SET):
|
||||
_HTTPConnection.__init__(self, host, port, strict, timeout, source_address)
|
||||
if response_wrapper is not NOT_SET:
|
||||
self.response_wrapper = response_wrapper
|
||||
|
||||
def getresponse(self, buffering = False):
|
||||
r = _HTTPConnection.getresponse(self, buffering)
|
||||
if self.response_wrapper:
|
||||
r = self.response_wrapper(r)
|
||||
return r
|
||||
|
||||
class HTTPSConnection(_HTTPSConnection):
|
||||
response_wrapper = closing
|
||||
|
||||
def __init__(self, host, port=None, key_file = None, cert_file = None, strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, response_wrapper = NOT_SET):
|
||||
_HTTPSConnection.__init__(self, host, port, key_file = key_file, cert_file = cert_file, strict = strict, timeout = timeout, source_address = source_address)
|
||||
if response_wrapper is not NOT_SET:
|
||||
self.response_wrapper = response_wrapper
|
||||
|
||||
def getresponse(self, buffering = False):
|
||||
r = _HTTPSConnection.getresponse(self, buffering)
|
||||
if self.response_wrapper:
|
||||
r = self.response_wrapper(r)
|
||||
return r
|
||||
|
||||
|
||||
class exc(types.ModuleType):
|
||||
def __getattr__(self, k):
|
||||
try:
|
||||
v = getattr(_exc, k)
|
||||
except AttributeError:
|
||||
if not k.startswith("HTTPError"):
|
||||
raise
|
||||
v = _exc.get_error_class(k[9:])
|
||||
setattr(self, k, v)
|
||||
return v
|
||||
|
||||
|
||||
name = __name__ + ".exc"
|
||||
exc = exc(name)
|
||||
sys.modules[name] = exc
|
||||
del name
|
||||
|
||||
|
@ -1,55 +0,0 @@
|
||||
'''
|
||||
Created on 19.03.2013
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from logging import DEBUG, WARNING
|
||||
import futile.logging
|
||||
import urllib3.connectionpool
|
||||
from urllib3.connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
||||
from futile.logging import LoggerMixin
|
||||
from futile import ObjectProxy
|
||||
|
||||
if not futile.logging.get_logger().isEnabledFor(DEBUG):
|
||||
urllib3.connectionpool.log.setLevel(WARNING)
|
||||
|
||||
class Urllib3ResponseWrapper(ObjectProxy):
|
||||
def getheader(self, header, default=None):
|
||||
return self._o.getheader(header.lower(), default)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
self._o.release_conn()
|
||||
|
||||
def isclosed(self):
|
||||
return False
|
||||
|
||||
class ConnectionPoolManager(LoggerMixin):
|
||||
def __init__(self, host, port, certfile = None, keyfile = None, cacertfile=None, force_ssl = False, *args, **kw):
|
||||
super(ConnectionPoolManager, self).__init__(*args, **kw)
|
||||
|
||||
self.logger.debug("Creating ConnectionPoolManager for %s:%s", host, port)
|
||||
|
||||
if certfile or keyfile or force_ssl:
|
||||
#https://docs.python.org/2/library/ssl.html#ssl.SSLContext
|
||||
from ssl import SSLContext, PROTOCOL_SSLv23
|
||||
ssl_context=SSLContext(PROTOCOL_SSLv23)
|
||||
ssl_context.load_cert_chain(certfile = certfile, keyfile = keyfile)
|
||||
ssl_context.load_verify_locations(cafile=cacertfile)
|
||||
#https://docs.python.org/2/library/httplib.html
|
||||
self.__pool = HTTPSConnectionPool(host, port, maxsize = 16, context = ssl_context)
|
||||
else:
|
||||
self.__pool = HTTPConnectionPool(host, port, maxsize = 16)
|
||||
|
||||
def request(self, method, path, body, headers, timeout):
|
||||
return Urllib3ResponseWrapper(self.__pool.urlopen(method, path, body,
|
||||
headers, timeout = timeout, pool_timeout = 30, preload_content = False, assert_same_host = False))
|
||||
|
||||
|
||||
|
@ -1,353 +0,0 @@
|
||||
'''
|
||||
Created on 21.05.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from base64 import b64encode
|
||||
from cStringIO import StringIO
|
||||
from datetime import datetime
|
||||
from logging import DEBUG
|
||||
from socket import getservbyname
|
||||
from time import time
|
||||
from urllib import quote_plus
|
||||
from urllib2 import quote
|
||||
from urlparse import urlparse
|
||||
|
||||
from futile import ObjectProxy
|
||||
from futile.logging import LoggerMixin
|
||||
from futile.net.http.exc import NetworkError, HTTPError
|
||||
|
||||
|
||||
def compose_qs(values):
|
||||
return "&".join([ "%s=%s" % (quote(k), quote(v)) for k, v in dict(values).iteritems() ])
|
||||
|
||||
class LoggingResponseWrapper(LoggerMixin, ObjectProxy):
|
||||
def __init__(self, response, *args, **kw):
|
||||
super(LoggingResponseWrapper, self).__init__(proxyobject = response, *args, **kw)
|
||||
self.__buffer = StringIO()
|
||||
self.__finalized = False
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def read(self, n = None):
|
||||
s = self._o.read(n)
|
||||
self.__buffer.write(s)
|
||||
return s
|
||||
|
||||
def readline(self):
|
||||
s = self._o.readline()
|
||||
self.__buffer.write(s)
|
||||
return s
|
||||
|
||||
def readlines(self, sizehint = None):
|
||||
lines = self._o.readlines(sizehint)
|
||||
self.__buffer.write(''.join(lines))
|
||||
return lines
|
||||
|
||||
def close(self):
|
||||
if self.__finalized:
|
||||
self.logger.debug("%s is already finalized" % (self, ))
|
||||
return
|
||||
|
||||
self.__finalized = True
|
||||
try:
|
||||
if not self._o.isclosed():
|
||||
self.__buffer.write(self._o.read())
|
||||
self.logger.debug("Read data:\n %s", self.__buffer.getvalue())
|
||||
except:
|
||||
self.logger.exception("Finalizing response failed")
|
||||
finally:
|
||||
self._o.close()
|
||||
|
||||
self.__buffer.close()
|
||||
|
||||
|
||||
class CachingHttplibResponseWrapper(ObjectProxy, LoggerMixin):
|
||||
def __init__(self, response, path, tag, last_modified, cache, *args, **kw):
|
||||
super(CachingHttplibResponseWrapper, self).__init__(proxyobject = response, *args, **kw)
|
||||
self.__cache = cache
|
||||
self.__buffer = StringIO()
|
||||
self.__path = path
|
||||
self.__tag = tag
|
||||
self.__last_modified = last_modified
|
||||
self.__finalized = False
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def read(self, n = None):
|
||||
s = self._o.read(n)
|
||||
self.__buffer.write(s)
|
||||
return s
|
||||
|
||||
def readline(self):
|
||||
s = self._o.readline()
|
||||
self.__buffer.write(s)
|
||||
return s
|
||||
|
||||
def readlines(self, sizehint = None):
|
||||
lines = self._o.readlines(sizehint)
|
||||
self.__buffer.write(''.join(lines))
|
||||
return lines
|
||||
|
||||
def close(self):
|
||||
if self.__finalized:
|
||||
self.logger.debug("%s is already finalized" % (self, ))
|
||||
return
|
||||
|
||||
self.__finalized = True
|
||||
try:
|
||||
if not self._o.isclosed():
|
||||
self.__buffer.write(self._o.read())
|
||||
val = self.__buffer.getvalue()
|
||||
self.logger.debug("Putting to cache: %s -> %s, %s\n %s", self.__path, self.__tag, self.__last_modified, val)
|
||||
self.__cache[self.__path] = (self.__tag, self.__last_modified, val)
|
||||
except:
|
||||
self.logger.exception("Finalizing response failed")
|
||||
finally:
|
||||
self._o.close()
|
||||
|
||||
self.__buffer.close()
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._o, name)
|
||||
|
||||
|
||||
class closing(ObjectProxy):
|
||||
def __getattr__(self, k):
|
||||
return getattr(self._o, k)
|
||||
|
||||
def __enter__(self):
|
||||
return self._o
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._o.close()
|
||||
|
||||
def close(self):
|
||||
self._o.close()
|
||||
|
||||
|
||||
class RestClient(LoggerMixin):
|
||||
ERROR_RESPONSE_MAX = 320
|
||||
|
||||
get_timeout = timeout = 120.0
|
||||
|
||||
def __init__(self, uri, username=None, password=None, certfile=None,
|
||||
keyfile=None, cacertfile=None, content_type="text/plain",
|
||||
headers=None,
|
||||
cache=True, timeout=None, get_timeout=None,
|
||||
component_name = "server", connection_manager = None,
|
||||
*args, **kw):
|
||||
super(RestClient, self).__init__(*args, **kw)
|
||||
|
||||
self.logger.debug("Creating RestClient for %s", uri)
|
||||
|
||||
self.timeout = timeout or self.timeout
|
||||
self.get_timeout = get_timeout or timeout or self.get_timeout
|
||||
|
||||
if cache:
|
||||
if cache is True:
|
||||
from futile.caching import LRUCache
|
||||
cache = LRUCache()
|
||||
self.__cache = cache
|
||||
|
||||
if "://" not in uri:
|
||||
uri = "http://" + uri
|
||||
|
||||
self.__content_type = content_type
|
||||
self.component_name = component_name
|
||||
|
||||
info = urlparse(uri)
|
||||
|
||||
self.logger.debug("Restclient certfile is %s"%certfile)
|
||||
if info.scheme == "https":
|
||||
if bool(certfile) ^ bool(keyfile):
|
||||
raise ValueError("Must give both certfile and keyfile if any")
|
||||
if certfile:
|
||||
from os.path import exists
|
||||
if not exists(certfile):
|
||||
raise ValueError("Certificate file not found: %s" % (certfile, ))
|
||||
if not exists(keyfile):
|
||||
raise ValueError("Key file not found: %s" % (keyfile, ))
|
||||
elif info.scheme != "http":
|
||||
raise ValueError(info.scheme)
|
||||
else:
|
||||
# In case of http, we do not want any certificates
|
||||
keyfile = certfile = None
|
||||
|
||||
port = info.port and int(info.port) or getservbyname(info.scheme)
|
||||
|
||||
self.__base = info.path or ""
|
||||
#if not self.__base.endswith("/"):
|
||||
# self.__base += "/"
|
||||
|
||||
if not username:
|
||||
username = info.username
|
||||
|
||||
if not headers:
|
||||
headers = {}
|
||||
|
||||
headers.setdefault("Accept", "*/*")
|
||||
headers["Accept-Encoding"] = "identity"
|
||||
|
||||
if username:
|
||||
password = password or info.password or ""
|
||||
headers["Authorization"] = "Basic " + b64encode("%s:%s" % (username, password))
|
||||
|
||||
self.__headers = headers
|
||||
|
||||
if not connection_manager:
|
||||
#from SimpleConnectionManager import SimpleConnectionManager as connection_manager
|
||||
from ConnectionPoolManager import ConnectionPoolManager as connection_manager
|
||||
|
||||
self.__connection_manager = connection_manager(host=info.hostname,
|
||||
port=port,
|
||||
certfile = certfile, keyfile = keyfile, cacertfile = cacertfile, force_ssl = info.scheme == "https")
|
||||
|
||||
def set_authinfo(self, username, password=""):
|
||||
if not username:
|
||||
self.__headers.pop("Authorization")
|
||||
else:
|
||||
self.__headers["Authorization"] = "Basic " + b64encode("%s:%s" % (quote_plus(username), password))
|
||||
|
||||
def request(self, method, path, data = None, headers = {}, args = None):
|
||||
if isinstance(data, unicode):
|
||||
data = data.encode("utf-8")
|
||||
|
||||
fullpath = self.__base + path
|
||||
|
||||
request_headers = self.__headers.copy()
|
||||
|
||||
if args:
|
||||
fullpath += ("?" in fullpath and "&" or "?") + compose_qs(args)
|
||||
|
||||
if headers:
|
||||
request_headers.update(headers)
|
||||
|
||||
if method == "GET":
|
||||
timeout = self.get_timeout
|
||||
if self.__cache:
|
||||
try:
|
||||
etag, modified, cached = self.__cache[fullpath]
|
||||
if etag:
|
||||
request_headers["If-None-Match"] = etag
|
||||
request_headers["If-Modified-Since"] = modified
|
||||
except KeyError:
|
||||
request_headers.pop("If-None-Match", None)
|
||||
request_headers.pop("If-Modified-Since", None)
|
||||
else:
|
||||
timeout = self.timeout
|
||||
|
||||
if data:
|
||||
request_headers.setdefault("Content-Type", self.__content_type)
|
||||
if hasattr(data, "read") and not hasattr(data, "fileno"):
|
||||
data = data.read()
|
||||
|
||||
log_headers = request_headers
|
||||
#if self.logger.isEnabledFor(DEBUG) and "Authorization" in request_headers:
|
||||
#log_headers = request_headers.copy()
|
||||
#log_headers["Authorization"] = "<purged>"
|
||||
|
||||
if method == "GET":
|
||||
self.logger.debug("%s: %s (%s)", method, fullpath, log_headers)
|
||||
else:
|
||||
self.logger.debug("%s: %s (%s)\n%s", method, fullpath, log_headers, repr(data))
|
||||
|
||||
t = time()
|
||||
try:
|
||||
response = self.__connection_manager.request(method, fullpath, data, request_headers, timeout)
|
||||
except Exception as e:
|
||||
if self.logger.isEnabledFor(DEBUG):
|
||||
self.logger.exception("Error during request")
|
||||
if str(e) in ("", "''"):
|
||||
e = repr(e)
|
||||
try:
|
||||
error_msg = "An error occurred while contacting the %s: %s. Request was: %s %s (%.4fs)" % (self.component_name, e, method, fullpath, time() - t)
|
||||
except:
|
||||
self.logger.exception("Failed to format error message.")
|
||||
error_msg = "Error during request."
|
||||
|
||||
raise NetworkError(error_msg)
|
||||
|
||||
self.logger.debug("%s %s result: %s (%.4fs)", method, fullpath, response.status, time() - t)
|
||||
r_status = response.status
|
||||
if r_status == 304:
|
||||
response.close()
|
||||
try:
|
||||
self.logger.debug("Using cached answer for %s (%s, %s):\n %s", fullpath, etag, modified, cached)
|
||||
return closing(StringIO(cached))
|
||||
except NameError:
|
||||
raise NetworkError("Error: The %s returned 304 though no cached version is available. Request was: %s %s" % (self.component_name, method, fullpath))
|
||||
if r_status == 302:
|
||||
raise NotImplementedError("HTTP redirect")
|
||||
if r_status < 200 or r_status >= 300:
|
||||
with response:
|
||||
via = response.getheader("Via")
|
||||
try:
|
||||
data = response.read(self.ERROR_RESPONSE_MAX and self.ERROR_RESPONSE_MAX + 1 or None)
|
||||
if not data or (not self.logger.isEnabledFor(DEBUG) and "<html>" in data):
|
||||
data = "<no further information available>"
|
||||
else:
|
||||
if self.ERROR_RESPONSE_MAX and len(data) > self.ERROR_RESPONSE_MAX:
|
||||
data = data[:self.ERROR_RESPONSE_MAX] + " (truncated)\n"
|
||||
data = data.encode("utf-8")
|
||||
except Exception as e:
|
||||
data = "<failed to read error response: %s>" % (e, )
|
||||
|
||||
if not data.endswith("\n"):
|
||||
data += "\n"
|
||||
|
||||
try:
|
||||
msg = "Error during execution. The %s said: %s %s - %sRequest was: %s %s. " % (self.component_name, response.status, response.reason, data, method, fullpath)
|
||||
except:
|
||||
msg = "Error during execution. The %s said %s. " % (self.component_name, response.status)
|
||||
|
||||
if via:
|
||||
culprit = via.split(",")[0]
|
||||
p = culprit.rfind("(")
|
||||
if p >= 0 and culprit.endswith(")"):
|
||||
culprit = culprit[p + 1:-1]
|
||||
msg += "The error occurred after the request went through %s (Via: %s)." % (culprit, via)
|
||||
else:
|
||||
msg += "The error seems to have occurred at the %s (No Via header found in response)." % (self.component_name, )
|
||||
|
||||
raise HTTPError(msg=msg, status=response.status)
|
||||
|
||||
if method == "DELETE":
|
||||
try:
|
||||
self.__cache.pop(fullpath, None)
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
etag = response.getheader("Etag")
|
||||
modified = response.getheader("Last-Modified")
|
||||
if self.__cache is not False and (etag or modified):
|
||||
if not modified:
|
||||
modified = datetime.utcnow().strftime("%a, %d %b %Y %X GMT")
|
||||
response = CachingHttplibResponseWrapper(response, fullpath, etag, modified, self.__cache)
|
||||
elif self.logger.isEnabledFor(DEBUG):
|
||||
response = LoggingResponseWrapper(response)
|
||||
|
||||
return response
|
||||
|
||||
def get(self, path, headers = None, args = None):
|
||||
return self.request("GET", path, headers = headers, args = args)
|
||||
|
||||
def post(self, path, data, headers = None):
|
||||
return self.request("POST", path, data, headers)
|
||||
add = post
|
||||
|
||||
def put(self, path, data, headers = None):
|
||||
return self.request("PUT", path, data, headers)
|
||||
update = put
|
||||
|
||||
def delete(self, path, headers = None):
|
||||
return self.request("DELETE", path, None, headers)
|
@ -1,327 +0,0 @@
|
||||
'''
|
||||
Created on 21.05.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from base64 import b64encode
|
||||
from cStringIO import StringIO
|
||||
from logging import DEBUG
|
||||
from socket import getservbyname
|
||||
from urllib2 import quote
|
||||
from urlparse import urlparse
|
||||
|
||||
#import vertx
|
||||
|
||||
from aplus import Promise
|
||||
from futile import ObjectProxy
|
||||
from futile.logging import LoggerMixin
|
||||
from futile.net.http.exc import NetworkError, HTTPError
|
||||
|
||||
|
||||
def compose_qs(values):
|
||||
return "&".join([ "%s=%s" % (quote(k), quote(v)) for k, v in dict(values).iteritems() ])
|
||||
|
||||
class LoggingResponseWrapper(LoggerMixin, ObjectProxy):
|
||||
def __init__(self, response, *args, **kw):
|
||||
super(LoggingResponseWrapper, self).__init__(proxyobject = response, *args, **kw)
|
||||
self.__buffer = StringIO()
|
||||
self.__finalized = False
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def read(self, n = None):
|
||||
s = self._o.read(n)
|
||||
self.__buffer.write(s)
|
||||
return s
|
||||
|
||||
def readline(self):
|
||||
s = self._o.readline()
|
||||
self.__buffer.write(s)
|
||||
return s
|
||||
|
||||
def readlines(self, sizehint = None):
|
||||
lines = self._o.readlines(sizehint)
|
||||
self.__buffer.write(''.join(lines))
|
||||
return lines
|
||||
|
||||
def close(self):
|
||||
if self.__finalized:
|
||||
self.logger.debug("%s is already finalized" % (self, ))
|
||||
return
|
||||
|
||||
self.__finalized = True
|
||||
try:
|
||||
if not self._o.isclosed():
|
||||
self.__buffer.write(self._o.read())
|
||||
self.logger.debug("Read data:\n %s", self.__buffer.getvalue())
|
||||
except:
|
||||
self.logger.exception("Finalizing response failed")
|
||||
finally:
|
||||
self._o.close()
|
||||
|
||||
self.__buffer.close()
|
||||
|
||||
|
||||
class CachingHttplibResponseWrapper(ObjectProxy, LoggerMixin):
|
||||
def __init__(self, response, path, tag, last_modified, cache, *args, **kw):
|
||||
super(CachingHttplibResponseWrapper, self).__init__(proxyobject = response, *args, **kw)
|
||||
self.__cache = cache
|
||||
self.__buffer = StringIO()
|
||||
self.__path = path
|
||||
self.__tag = tag
|
||||
self.__last_modified = last_modified
|
||||
self.__finalized = False
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def read(self, n = None):
|
||||
s = self._o.read(n)
|
||||
self.__buffer.write(s)
|
||||
return s
|
||||
|
||||
def readline(self):
|
||||
s = self._o.readline()
|
||||
self.__buffer.write(s)
|
||||
return s
|
||||
|
||||
def readlines(self, sizehint = None):
|
||||
lines = self._o.readlines(sizehint)
|
||||
self.__buffer.write(''.join(lines))
|
||||
return lines
|
||||
|
||||
def close(self):
|
||||
if self.__finalized:
|
||||
self.logger.debug("%s is already finalized" % (self, ))
|
||||
return
|
||||
|
||||
self.__finalized = True
|
||||
try:
|
||||
if not self._o.isclosed():
|
||||
self.__buffer.write(self._o.read())
|
||||
val = self.__buffer.getvalue()
|
||||
self.logger.debug("Putting to cache: %s -> %s, %s\n %s", self.__path, self.__tag, self.__last_modified, val)
|
||||
self.__cache[self.__path] = (self.__tag, self.__last_modified, val)
|
||||
except:
|
||||
self.logger.exception("Finalizing response failed")
|
||||
finally:
|
||||
self._o.close()
|
||||
|
||||
self.__buffer.close()
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._o, name)
|
||||
|
||||
|
||||
class closing(ObjectProxy):
|
||||
def __getattr__(self, k):
|
||||
return getattr(self._o, k)
|
||||
|
||||
def __enter__(self):
|
||||
return self._o
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._o.close()
|
||||
|
||||
def close(self):
|
||||
self._o.close()
|
||||
|
||||
|
||||
class RestClient(LoggerMixin):
|
||||
ERROR_RESPONSE_MAX = 320
|
||||
|
||||
get_timeout = timeout = 120.0
|
||||
|
||||
def __init__(self, uri, username=None, password=None, certfile=None,
|
||||
keyfile=None, content_type="text/plain", headers=None,
|
||||
cache=True, timeout=None, get_timeout=None,
|
||||
component_name = "server", connection_manager = None,
|
||||
*args, **kw):
|
||||
super(RestClient, self).__init__(*args, **kw)
|
||||
|
||||
self.logger.debug("Creating RestClient for %s", uri)
|
||||
|
||||
self.timeout = timeout or self.timeout
|
||||
self.get_timeout = get_timeout or timeout or self.get_timeout
|
||||
|
||||
if cache:
|
||||
if cache is True:
|
||||
from futile.caching import LRUCache
|
||||
cache = LRUCache()
|
||||
self.__cache = cache
|
||||
|
||||
if "://" not in uri:
|
||||
uri = "http://" + uri
|
||||
|
||||
self.__content_type = content_type
|
||||
self.component_name = component_name
|
||||
|
||||
info = urlparse(uri)
|
||||
|
||||
if info.scheme == "https":
|
||||
if bool(certfile) ^ bool(keyfile):
|
||||
raise ValueError("Must give both certfile and keyfile if any")
|
||||
if certfile:
|
||||
from os.path import exists
|
||||
if not exists(certfile):
|
||||
raise ValueError("Certificate file not found: %s" % (certfile, ))
|
||||
if not exists(keyfile):
|
||||
raise ValueError("Key file not found: %s" % (keyfile, ))
|
||||
elif info.scheme != "http":
|
||||
raise ValueError(info.scheme)
|
||||
|
||||
port = info.port and int(info.port) or getservbyname(info.scheme)
|
||||
|
||||
self.__base = info.path or ""
|
||||
#if not self.__base.endswith("/"):
|
||||
# self.__base += "/"
|
||||
|
||||
if not username:
|
||||
username = info.username
|
||||
|
||||
if not headers:
|
||||
headers = {}
|
||||
|
||||
headers.setdefault("Accept", "*/*")
|
||||
headers["Accept-Encoding"] = "identity"
|
||||
|
||||
if username:
|
||||
password = password or info.password or ""
|
||||
headers["Authorization"] = "Basic " + b64encode("%s:%s" % (username, password))
|
||||
|
||||
self.__headers = headers
|
||||
|
||||
#if not connection_manager:
|
||||
# #from SimpleConnectionManager import SimpleConnectionManager as connection_manager
|
||||
# from ConnectionPoolManager import ConnectionPoolManager as connection_manager
|
||||
#
|
||||
# self.__connection_manager = connection_manager(host = info.hostname, port = port,
|
||||
# certfile = certfile, keyfile = keyfile, force_ssl = info.scheme == "https")
|
||||
#
|
||||
|
||||
self.client= vertx.create_http_client()
|
||||
self.client.host = info.netloc.split(":")[0]
|
||||
self.client.port = port
|
||||
|
||||
#temporary test server
|
||||
#import json
|
||||
#self.srv = vertx.create_http_server()
|
||||
#def srv_handle(re):
|
||||
# re.response.put_header("Content-Type","application/json; charset=utf-8")
|
||||
# re.response.put_header("Location","locationlocation.location")
|
||||
# re.response.end(json.dumps({"One":"Two"}))
|
||||
#self.srv.request_handler(srv_handle)
|
||||
#self.srv.listen(5000)
|
||||
|
||||
def request(self, method, path, data = None, headers = {}, args = None):
|
||||
if isinstance(data, unicode):
|
||||
data = data.encode("utf-8")
|
||||
fullpath = self.__base + path
|
||||
request_headers = self.__headers.copy()
|
||||
|
||||
if args:
|
||||
fullpath += ("?" in fullpath and "&" or "?") + compose_qs(args)
|
||||
|
||||
if headers:
|
||||
request_headers.update(headers)
|
||||
|
||||
if method == "GET":
|
||||
timeout = self.get_timeout
|
||||
try:
|
||||
etag, modified, cached = self.__cache[fullpath]
|
||||
if etag:
|
||||
request_headers["If-None-Match"] = etag
|
||||
request_headers["If-Modified-Since"] = modified
|
||||
except KeyError:
|
||||
request_headers.pop("If-None-Match", None)
|
||||
request_headers.pop("If-Modified-Since", None)
|
||||
else:
|
||||
timeout = self.timeout
|
||||
request_headers.setdefault("Content-Type", self.__content_type)
|
||||
|
||||
log_headers = request_headers
|
||||
if self.logger.isEnabledFor(DEBUG) and "Authorization" in request_headers:
|
||||
log_headers = request_headers.copy()
|
||||
log_headers["Authorization"] = "<purged>"
|
||||
|
||||
if method == "GET":
|
||||
self.logger.debug("%s: %s (%s)", method, fullpath, log_headers)
|
||||
else:
|
||||
self.logger.debug("%s: %s (%s)\n%s", method, fullpath, log_headers, repr(data))
|
||||
|
||||
#t = time()
|
||||
promise=Promise()
|
||||
try:
|
||||
#response = self.__connection_manager.request(method, fullpath, data, request_headers, timeout)
|
||||
|
||||
def response_handler(resp):
|
||||
if resp.status_code == 304:
|
||||
try:
|
||||
promise.fulfill(closing(StringIO(cached)))
|
||||
except NameError:
|
||||
promise.reject(NetworkError("Error: The %s returned 304 though no cached version is available. Request was: %s %s" % (self.component_name, method, fullpath)))
|
||||
if resp.status_code < 200 or resp.status_code >= 300:
|
||||
try:
|
||||
promise.reject(HTTPError(msg = resp.status_message, status = resp.status_code))
|
||||
except:
|
||||
promise.reject(HTTPError(msg = "Http error", status = response.status))
|
||||
else:
|
||||
promise.fulfill(resp)
|
||||
|
||||
req=self.client.request(method,fullpath,response_handler)
|
||||
for head,value in request_headers.items():
|
||||
req.put_header(head,value)
|
||||
if data:
|
||||
req.chunked = True
|
||||
req.write_str(data)
|
||||
req.end()
|
||||
|
||||
except Exception as e:
|
||||
print "Exception triggered: %s"%e
|
||||
promise.reject(e)
|
||||
|
||||
return promise
|
||||
|
||||
#if method == "DELETE":
|
||||
# try:
|
||||
# self.__cache.pop(fullpath, None)
|
||||
# except AttributeError:
|
||||
# pass
|
||||
#else:
|
||||
# etag = response.getheader("Etag")
|
||||
# modified = response.getheader("Last-Modified")
|
||||
# if etag or modified:
|
||||
# if not modified:
|
||||
# modified = datetime.utcnow().strftime("%a, %d %b %Y %X GMT")
|
||||
# response = CachingHttplibResponseWrapper(response, fullpath, etag, modified, self.__cache)
|
||||
# elif self.logger.isEnabledFor(DEBUG):
|
||||
# response = LoggingResponseWrapper(response)
|
||||
|
||||
|
||||
|
||||
|
||||
def get(self, path, headers = None, args = None):
|
||||
p = self.request("GET", path, headers = headers, args = args)
|
||||
return p
|
||||
|
||||
def post(self, path, data, headers = None):
|
||||
p = self.request("POST", path, data, headers)
|
||||
return p
|
||||
add = post
|
||||
|
||||
def put(self, path, data, headers = None):
|
||||
p = self.request("PUT", path, data)
|
||||
return p
|
||||
update = put
|
||||
|
||||
def delete(self, path, headers = None):
|
||||
p = self.request("DELETE", path, None, headers)
|
||||
return p
|
@ -1,61 +0,0 @@
|
||||
'''
|
||||
Created on 19.03.2013
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from httplib import HTTPConnection, HTTPSConnection
|
||||
from futile.logging import LoggerMixin
|
||||
|
||||
class HttplibResponseWrapper(LoggerMixin):
|
||||
def __init__(self, connection, *args, **kw):
|
||||
super(HttplibResponseWrapper, self).__init__(*args, **kw)
|
||||
|
||||
self.__response = connection.getresponse()
|
||||
self.__connection = connection
|
||||
|
||||
def __getattr__(self, k):
|
||||
return getattr(self.__response, k)
|
||||
|
||||
def __enter__(self):
|
||||
return self.__response
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
try:
|
||||
self.__response.close()
|
||||
except:
|
||||
self.logger.exception("Error closing response")
|
||||
finally:
|
||||
self.__connection.close()
|
||||
|
||||
class SimpleConnectionManager(LoggerMixin):
|
||||
def __init__(self, host, port, certfile = None, keyfile = None, force_ssl = False, *args, **kw):
|
||||
super(SimpleConnectionManager, self).__init__(*args, **kw)
|
||||
|
||||
self.logger.debug("Creating SimpleConnectionManager for %s:%s", host, port)
|
||||
|
||||
if keyfile or certfile or force_ssl:
|
||||
self.__certfile = certfile
|
||||
self.__keyfile = keyfile
|
||||
self._get_connection = self._get_secure_connection
|
||||
|
||||
self.__host = host
|
||||
self.__port = port
|
||||
|
||||
def request(self, method, path, body, headers, timeout):
|
||||
connection = self._get_connection(timeout)
|
||||
try:
|
||||
connection.request(method, path, body, headers)
|
||||
return HttplibResponseWrapper(connection)
|
||||
except:
|
||||
connection.close()
|
||||
raise
|
||||
|
||||
def _get_connection(self, timeout):
|
||||
return HTTPConnection(self.__host, self.__port, timeout = timeout)
|
||||
|
||||
def _get_secure_connection(self, timeout):
|
||||
return HTTPSConnection(self.__host, self.__port, self.__keyfile, self.__certfile, timeout = timeout)
|
@ -1,134 +0,0 @@
|
||||
'''
|
||||
Created on 21.07.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
|
||||
from futile.net.exc import NetworkError
|
||||
|
||||
STATUS_STRINGS = {
|
||||
100: "Continue",
|
||||
101: "Switching Protocols",
|
||||
200: "Ok",
|
||||
201: "Created",
|
||||
202: "Accepted",
|
||||
203: "Non-Authoritative Information",
|
||||
204: "No Content",
|
||||
205: "Reset Content",
|
||||
206: "Partial Content",
|
||||
300: "Multiple Choices",
|
||||
301: "Moved Permanently",
|
||||
302: "Found",
|
||||
303: "See Other",
|
||||
304: "Not Modfied",
|
||||
305: "Use Proxy",
|
||||
306: "",
|
||||
307: "Temporary Redirect",
|
||||
400: "Bad Request",
|
||||
401: "Unauthorized",
|
||||
402: "Payment Required",
|
||||
403: "Forbidden",
|
||||
404: "Not Found",
|
||||
405: "Method Not Allowed",
|
||||
406: "Not Acceptable",
|
||||
407: "Proxy Authentication Required",
|
||||
408: "Request Timeout",
|
||||
409: "Conflict",
|
||||
410: "Gone",
|
||||
411: "Length Required",
|
||||
412: "Precondition Failed",
|
||||
413: "Request Entity Too Large",
|
||||
414: "Request-URI Too Long",
|
||||
415: "Unsupported Media Type",
|
||||
416: "Requested Range Not Satisfiable",
|
||||
417: "Expectation Failed",
|
||||
500: "Internal Server Error",
|
||||
501: "Not Implemented",
|
||||
502: "Bad Gateway",
|
||||
503: "Service Unavailable",
|
||||
504: "Gateway Timeout",
|
||||
}
|
||||
|
||||
STATUS_MIN = 100
|
||||
STATUS_MAX = 504
|
||||
ERROR_MIN = 400
|
||||
ERROR_MAX = 504
|
||||
|
||||
|
||||
def get_error_message(statuscode):
|
||||
try:
|
||||
return STATUS_STRINGS[statuscode]
|
||||
except KeyError:
|
||||
raise ValueError(statuscode)
|
||||
|
||||
|
||||
class HTTPErrorType(type):
|
||||
__classes = {}
|
||||
|
||||
@classmethod
|
||||
def get_error_class(cls, status):
|
||||
try:
|
||||
status = int(status)
|
||||
except (TypeError, ValueError):
|
||||
raise ValueError("Not a valid HTTP error code: '%s'" % (status, ))
|
||||
|
||||
try:
|
||||
errorcls = cls.__classes[status]
|
||||
except KeyError:
|
||||
if status < STATUS_MIN or status > STATUS_MAX:
|
||||
raise ValueError("Not a valid HTTP error code: %s" % (status,))
|
||||
name = "HTTPError%s" % (status, )
|
||||
errorcls = cls(name, (HTTPError, ), {"__init__":
|
||||
cls._make_init(status)})
|
||||
cls.__classes[status] = errorcls
|
||||
globals()[name] = errorcls
|
||||
|
||||
return errorcls
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
if self is HTTPError:
|
||||
try:
|
||||
status = kw.pop("status")
|
||||
except KeyError:
|
||||
try:
|
||||
status = args[0]
|
||||
args = args[1:]
|
||||
except IndexError:
|
||||
return super(HTTPErrorType, self).__call__(*args, **kw)
|
||||
|
||||
self = self.get_error_class(status)
|
||||
return super(HTTPErrorType, self).__call__(*args, **kw)
|
||||
|
||||
@classmethod
|
||||
def _make_init(cls, status):
|
||||
def __init__(self, msg=None, reason=None, *args, **kw):
|
||||
super(self.__class__, self).__init__(status=status,
|
||||
reason=reason, msg=msg, *args, **kw)
|
||||
return __init__
|
||||
|
||||
get_error_class = HTTPErrorType.get_error_class
|
||||
|
||||
|
||||
class HTTPError(NetworkError):
|
||||
__metaclass__ = HTTPErrorType
|
||||
|
||||
def __init__(self, status, reason=None, msg=None, *args, **kw):
|
||||
status = int(status)
|
||||
if not reason:
|
||||
reason = STATUS_STRINGS.get(status, "Unknown Error")
|
||||
if not msg:
|
||||
msg = "HTTP Error %s - %s" % (status, reason)
|
||||
super(HTTPError, self).__init__(msg, status, reason, *args, **kw)
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
return self.args[0]
|
||||
|
||||
@property
|
||||
def status(self):
|
||||
return self.args[1]
|
||||
|
||||
@property
|
||||
def reason(self):
|
||||
return self.args[2]
|
@ -1,54 +0,0 @@
|
||||
'''
|
||||
Created on 18.08.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from futile.logging import LoggerMixin
|
||||
from ssl import wrap_socket, SSLSocket, SSLError, CERT_OPTIONAL, CERT_NONE
|
||||
from socket import error
|
||||
from futile import NOT_SET
|
||||
|
||||
class HTTPSMixin(LoggerMixin):
|
||||
certfile = keyfile = ca_certs = None
|
||||
cert_reqs = CERT_NONE
|
||||
|
||||
def init_https(self, certfile, keyfile = None, ca_certs = None, cert_reqs = NOT_SET, secure = True):
|
||||
self.keyfile = keyfile
|
||||
self.certfile = certfile
|
||||
self.ca_certs = ca_certs
|
||||
if cert_reqs is NOT_SET:
|
||||
cert_reqs = ca_certs and CERT_OPTIONAL or CERT_NONE
|
||||
self.cert_reqs = cert_reqs
|
||||
if secure:
|
||||
self.enable_https()
|
||||
|
||||
def enable_https(self):
|
||||
if not self.secure:
|
||||
if not self.certfile:
|
||||
raise SSLError("Certificate info missing.")
|
||||
if self.cert_reqs != CERT_NONE and not self.ca_certs:
|
||||
raise SSLError("Certificate validation requested but no ca certs available.")
|
||||
self.logger.debug("Enabling https with certfile=%s kefile=%s ca_certs=%s cert_reqs=%s", self.certfile, self.keyfile, self.ca_certs, self.cert_reqs)
|
||||
self.socket = wrap_socket(self.socket, server_side = True, keyfile = self.keyfile, certfile = self.certfile, ca_certs = self.ca_certs, cert_reqs = self.cert_reqs)
|
||||
|
||||
def disable_https(self):
|
||||
if self.secure:
|
||||
self.socket = self.socket._sock
|
||||
|
||||
def get_request(self):
|
||||
try:
|
||||
return self.socket.accept()
|
||||
except error, e:
|
||||
self.logger.exception("Error during accept(): %s", e)
|
||||
raise
|
||||
|
||||
def is_secure(self):
|
||||
return isinstance(self.socket, SSLSocket)
|
||||
def set_secure(self, s):
|
||||
if s:
|
||||
self.enable_https()
|
||||
else:
|
||||
self.disable_https()
|
||||
return s
|
||||
secure = property(is_secure)
|
@ -1,19 +0,0 @@
|
||||
'''
|
||||
Created on 17.07.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from wsgiref.simple_server import WSGIRequestHandler, WSGIServer as _WSGIServer
|
||||
from SocketServer import ThreadingMixIn, ForkingMixIn
|
||||
|
||||
class WSGIServer(_WSGIServer):
|
||||
def __init__(self, server_address, app = None, RequestHandlerClass = WSGIRequestHandler):
|
||||
_WSGIServer.__init__(self, server_address, RequestHandlerClass)
|
||||
self.set_app(app)
|
||||
|
||||
class ThreadingWSGIServer(ThreadingMixIn, WSGIServer):
|
||||
pass
|
||||
|
||||
class ForkingWSGIServer(ForkingMixIn, WSGIServer):
|
||||
pass
|
@ -1,22 +0,0 @@
|
||||
'''
|
||||
Created on 22.08.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from ..ssl import HTTPSMixin
|
||||
from ..wsgi import WSGIServer
|
||||
from SocketServer import ThreadingMixIn, ForkingMixIn
|
||||
from wsgiref.simple_server import WSGIRequestHandler
|
||||
from futile import NOT_SET
|
||||
|
||||
class SecureWSGIServer(HTTPSMixin, WSGIServer):
|
||||
def __init__(self, server_address, certfile, keyfile = None, ca_certs = None, cert_reqs = NOT_SET, app = None, RequestHandlerClass = WSGIRequestHandler):
|
||||
WSGIServer.__init__(self, server_address, app = app, RequestHandlerClass = RequestHandlerClass)
|
||||
self.init_https(certfile, keyfile, ca_certs = ca_certs, cert_reqs = cert_reqs)
|
||||
|
||||
class SecureThreadingWSGIServer(ThreadingMixIn, SecureWSGIServer):
|
||||
pass
|
||||
|
||||
class SecureForkingWSGIServer(ForkingMixIn, SecureWSGIServer):
|
||||
pass
|
@ -1,13 +0,0 @@
|
||||
'''
|
||||
Created on 14.07.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from socket import socket as _socket, AF_INET, SOCK_STREAM
|
||||
from futile.contextlib import closing
|
||||
|
||||
def socket(family = AF_INET, type = SOCK_STREAM, proto = 0):
|
||||
return closing(_socket(family, type, proto))
|
||||
|
||||
|
@ -1,14 +0,0 @@
|
||||
'''
|
||||
Created on 21.01.2012
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from wsgiref.simple_server import WSGIServer
|
||||
from SocketServer import ThreadingMixIn, ForkingMixIn
|
||||
|
||||
class ThreadingWSGIServer(ThreadingMixIn, WSGIServer):
|
||||
pass
|
||||
|
||||
class ForkingWSGIServer(ForkingMixIn, WSGIServer):
|
||||
pass
|
@ -1,40 +0,0 @@
|
||||
'''
|
||||
Created on 17.07.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from futile import Base
|
||||
from SimpleXMLRPCServer import SimpleXMLRPCDispatcher
|
||||
|
||||
class WSGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher, Base):
|
||||
def __init__(self, encoding=None):
|
||||
SimpleXMLRPCDispatcher.__init__(self, allow_none = True, encoding = encoding)
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
if environ["REQUEST_METHOD"] != "POST":
|
||||
headers = [("Content-type", "text/html")]
|
||||
|
||||
if environ["REQUEST_METHOD"] == "HEAD":
|
||||
data = ""
|
||||
else:
|
||||
data = "<html><head><title>400 Bad request</title></head><body><h1>400 Bad request</h1></body></html>"
|
||||
headers.append(("Content-length", str(len(data))))
|
||||
start_response("400 Bad request", headers)
|
||||
return (data, )
|
||||
|
||||
l = int(environ["CONTENT_LENGTH"])
|
||||
request = environ["wsgi.input"].read(l)
|
||||
response = self._marshaled_dispatch(request)
|
||||
headers = [("Content-type", "text/xml"), ("Content-length", str(len(response)))]
|
||||
start_response("200 OK", headers)
|
||||
return (response, )
|
||||
|
||||
def _dispatch(self, *args, **kw):
|
||||
try:
|
||||
result = SimpleXMLRPCDispatcher._dispatch(self, *args, **kw)
|
||||
# self.logger.debug("Result: %s" % (result, ))
|
||||
return result
|
||||
except:
|
||||
self.logger.exception("Error while processing request")
|
||||
raise
|
@ -1,9 +0,0 @@
|
||||
from operator import attrgetter
|
||||
|
||||
def attrproperty(name):
|
||||
return property(attrgetter(name))
|
||||
|
||||
def resolve_attr(obj, attr):
|
||||
for name in attr.split("."):
|
||||
obj = getattr(obj, name)
|
||||
return obj
|
@ -1,6 +0,0 @@
|
||||
|
||||
|
||||
def get_fileobj(f):
|
||||
if not hasattr(f, "read"):
|
||||
return open(f)
|
||||
return f
|
@ -1,53 +0,0 @@
|
||||
'''
|
||||
Created on 24.01.2012
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from ..path import Path
|
||||
from ..subprocess import check_output
|
||||
|
||||
def umount(where, force = False):
|
||||
cmd = [ "umount", where ]
|
||||
if force:
|
||||
cmd.append("-f")
|
||||
check_output(cmd)
|
||||
unmount = umount
|
||||
|
||||
def mount(what, where, fstype = None, options = None):
|
||||
return Mount(what, where, fstype, options).mount()
|
||||
|
||||
class Mount(object):
|
||||
def __init__(self, what, where, fstype = None, options = None):
|
||||
self.what = Path(what)
|
||||
self.where = Path(where)
|
||||
self.fstype = fstype
|
||||
options = self.options = options and set(options) or set()
|
||||
if what.isfile():
|
||||
options.add("loop")
|
||||
elif not what.isblockdev():
|
||||
raise ValueError("Mount source must be a file or block device: %s" % (what, ))
|
||||
|
||||
def mount(self, fstype = None, options = None):
|
||||
cmd = [ "mount", self.what, self.where ]
|
||||
|
||||
fstype = fstype or self.fstype
|
||||
if fstype:
|
||||
cmd += [ "-t", self.fstype ]
|
||||
|
||||
opts = self.options
|
||||
if options:
|
||||
opts += set(self.options)
|
||||
if opts:
|
||||
cmd += [ "-o", ','.join(self.options) ]
|
||||
|
||||
check_output(cmd)
|
||||
return self
|
||||
__enter__ = mount
|
||||
|
||||
def umount(self, force = False):
|
||||
umount(self.where, force)
|
||||
unmount = umount
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.umount(True)
|
@ -1,865 +0,0 @@
|
||||
""" path.py - An object representing a path to a file or directory.
|
||||
|
||||
Example:
|
||||
|
||||
from path import path
|
||||
d = path('/home/guido/bin')
|
||||
for f in d.files('*.py'):
|
||||
f.chmod(0755)
|
||||
|
||||
This module requires Python 2.2 or later.
|
||||
|
||||
|
||||
URL: http://www.jorendorff.com/articles/python/path
|
||||
Author: Jason Orendorff <jason@jorendorff.com> (and others - see the url!)
|
||||
Date: 7 Mar 2004
|
||||
"""
|
||||
|
||||
# Note - this is an umodified version of Jason Orendorff's 'path' module.
|
||||
|
||||
# TODO
|
||||
# - Bug in write_text(). It doesn't support Universal newline mode.
|
||||
# - Better error message in listdir() when self isn't a
|
||||
# directory. (On Windows, the error message really sucks.)
|
||||
# - Make sure everything has a good docstring.
|
||||
# - Add methods for regex find and replace.
|
||||
# - guess_content_type() method?
|
||||
# - Perhaps support arguments to touch().
|
||||
# - Could add split() and join() methods that generate warnings.
|
||||
# - Note: __add__() technically has a bug, I think, where
|
||||
# it doesn't play nice with other types that implement
|
||||
# __radd__(). Test this.
|
||||
|
||||
from __future__ import generators
|
||||
|
||||
def quote(p):
|
||||
from urllib2 import quote
|
||||
return quote(p, "")
|
||||
|
||||
|
||||
import sys, os, fnmatch, glob, shutil, codecs
|
||||
|
||||
__version__ = '2.0.4'
|
||||
__all__ = ['path']
|
||||
|
||||
# Pre-2.3 support. Are unicode filenames supported?
|
||||
_base = str
|
||||
try:
|
||||
if os.path.supports_unicode_filenames:
|
||||
_base = unicode
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Pre-2.3 workaround for basestring.
|
||||
try:
|
||||
basestring
|
||||
except NameError:
|
||||
basestring = (str, unicode)
|
||||
|
||||
# Universal newline support
|
||||
_textmode = 'r'
|
||||
if hasattr(file, 'newlines'):
|
||||
_textmode = 'U'
|
||||
|
||||
|
||||
class path(_base):
|
||||
""" Represents a filesystem path.
|
||||
|
||||
For documentation on individual methods, consult their
|
||||
counterparts in os.path.
|
||||
"""
|
||||
|
||||
# --- Special Python methods.
|
||||
|
||||
def __repr__(self):
|
||||
return 'path(%s)' % _base.__repr__(self)
|
||||
|
||||
# Adding a path and a string yields a path.
|
||||
def __add__(self, more):
|
||||
return path(_base(self) + more)
|
||||
|
||||
def __radd__(self, other):
|
||||
return path(other + _base(self))
|
||||
|
||||
# The / operator joins paths.
|
||||
def __div__(self, rel):
|
||||
""" fp.__div__(rel) == fp / rel == fp.joinpath(rel)
|
||||
|
||||
Join two path components, adding a separator character if
|
||||
needed.
|
||||
"""
|
||||
return path(os.path.join(self, rel))
|
||||
|
||||
# Make the / operator work even when true division is enabled.
|
||||
__truediv__ = __div__
|
||||
|
||||
def getcwd():
|
||||
""" Return the current working directory as a path object. """
|
||||
return path(os.getcwd())
|
||||
getcwd = staticmethod(getcwd)
|
||||
|
||||
|
||||
# --- Operations on path strings.
|
||||
|
||||
def abspath(self): return path(os.path.abspath(self))
|
||||
def normcase(self): return path(os.path.normcase(self))
|
||||
def normpath(self): return path(os.path.normpath(self))
|
||||
def realpath(self): return path(os.path.realpath(self))
|
||||
def expanduser(self): return path(os.path.expanduser(self))
|
||||
def expandvars(self): return path(os.path.expandvars(self))
|
||||
def dirname(self): return path(os.path.dirname(self))
|
||||
basename = os.path.basename
|
||||
|
||||
def expand(self):
|
||||
""" Clean up a filename by calling expandvars(),
|
||||
expanduser(), and normpath() on it.
|
||||
|
||||
This is commonly everything needed to clean up a filename
|
||||
read from a configuration file, for example.
|
||||
"""
|
||||
return self.expandvars().expanduser().normpath()
|
||||
|
||||
def _get_namebase(self):
|
||||
base, _ext = os.path.splitext(self.name)
|
||||
return base
|
||||
|
||||
def _get_ext(self):
|
||||
_f, ext = os.path.splitext(_base(self))
|
||||
return ext
|
||||
|
||||
def _get_drive(self):
|
||||
drive, _r = os.path.splitdrive(self)
|
||||
return path(drive)
|
||||
|
||||
parent = property(
|
||||
dirname, None, None,
|
||||
""" This path's parent directory, as a new path object.
|
||||
|
||||
For example, path('/usr/local/lib/libpython.so').parent == path('/usr/local/lib')
|
||||
""")
|
||||
|
||||
name = property(
|
||||
basename, None, None,
|
||||
""" The name of this file or directory without the full path.
|
||||
|
||||
For example, path('/usr/local/lib/libpython.so').name == 'libpython.so'
|
||||
""")
|
||||
|
||||
namebase = property(
|
||||
_get_namebase, None, None,
|
||||
""" The same as path.name, but with one file extension stripped off.
|
||||
|
||||
For example, path('/home/guido/python.tar.gz').name == 'python.tar.gz',
|
||||
but path('/home/guido/python.tar.gz').namebase == 'python.tar'
|
||||
""")
|
||||
|
||||
ext = property(
|
||||
_get_ext, None, None,
|
||||
""" The file extension, for example '.py'. """)
|
||||
|
||||
drive = property(
|
||||
_get_drive, None, None,
|
||||
""" The drive specifier, for example 'C:'.
|
||||
This is always empty on systems that don't use drive specifiers.
|
||||
""")
|
||||
|
||||
def splitpath(self):
|
||||
""" p.splitpath() -> Return (p.parent, p.name). """
|
||||
parent, child = os.path.split(self)
|
||||
return path(parent), child
|
||||
|
||||
def splitdrive(self):
|
||||
""" p.splitdrive() -> Return (p.drive, <the rest of p>).
|
||||
|
||||
Split the drive specifier from this path. If there is
|
||||
no drive specifier, p.drive is empty, so the return value
|
||||
is simply (path(''), p). This is always the case on Unix.
|
||||
"""
|
||||
drive, rel = os.path.splitdrive(self)
|
||||
return path(drive), rel
|
||||
|
||||
def splitext(self):
|
||||
""" p.splitext() -> Return (p.stripext(), p.ext).
|
||||
|
||||
Split the filename extension from this path and return
|
||||
the two parts. Either part may be empty.
|
||||
|
||||
The extension is everything from '.' to the end of the
|
||||
last path segment. This has the property that if
|
||||
(a, b) == p.splitext(), then a + b == p.
|
||||
"""
|
||||
filename, ext = os.path.splitext(self)
|
||||
return path(filename), ext
|
||||
|
||||
def stripext(self):
|
||||
""" p.stripext() -> Remove one file extension from the path.
|
||||
|
||||
For example, path('/home/guido/python.tar.gz').stripext()
|
||||
returns path('/home/guido/python.tar').
|
||||
"""
|
||||
return self.splitext()[0]
|
||||
|
||||
if hasattr(os.path, 'splitunc'):
|
||||
def splitunc(self):
|
||||
unc, rest = os.path.splitunc(self)
|
||||
return path(unc), rest
|
||||
|
||||
def _get_uncshare(self):
|
||||
unc, r = os.path.splitunc(self)
|
||||
return path(unc)
|
||||
|
||||
uncshare = property(
|
||||
_get_uncshare, None, None,
|
||||
""" The UNC mount point for this path.
|
||||
This is empty for paths on local drives. """)
|
||||
|
||||
def joinpath(self, *args):
|
||||
""" Join two or more path components, adding a separator
|
||||
character (os.sep) if needed. Returns a new path
|
||||
object.
|
||||
"""
|
||||
return path(os.path.join(self, *args))
|
||||
|
||||
def splitall(self):
|
||||
""" Return a list of the path components in this path.
|
||||
|
||||
The first item in the list will be a path. Its value will be
|
||||
either os.curdir, os.pardir, empty, or the root directory of
|
||||
this path (for example, '/' or 'C:\\'). The other items in
|
||||
the list will be strings.
|
||||
|
||||
path.path.joinpath(*result) will yield the original path.
|
||||
"""
|
||||
parts = []
|
||||
loc = self
|
||||
while loc != os.curdir and loc != os.pardir:
|
||||
prev = loc
|
||||
loc, child = prev.splitpath()
|
||||
if loc == prev:
|
||||
break
|
||||
parts.append(child)
|
||||
parts.append(loc)
|
||||
parts.reverse()
|
||||
return parts
|
||||
|
||||
def relpath(self):
|
||||
""" Return this path as a relative path,
|
||||
based from the current working directory.
|
||||
"""
|
||||
cwd = path(os.getcwd())
|
||||
return cwd.relpathto(self)
|
||||
|
||||
def relpathto(self, dest):
|
||||
""" Return a relative path from self to dest.
|
||||
|
||||
If there is no relative path from self to dest, for example if
|
||||
they reside on different drives in Windows, then this returns
|
||||
dest.abspath().
|
||||
"""
|
||||
origin = self.abspath()
|
||||
dest = path(dest).abspath()
|
||||
|
||||
orig_list = origin.normcase().splitall()
|
||||
# Don't normcase dest! We want to preserve the case.
|
||||
dest_list = dest.splitall()
|
||||
|
||||
if orig_list[0] != os.path.normcase(dest_list[0]):
|
||||
# Can't get here from there.
|
||||
return dest
|
||||
|
||||
# Find the location where the two paths start to differ.
|
||||
i = 0
|
||||
for start_seg, dest_seg in zip(orig_list, dest_list):
|
||||
if start_seg != os.path.normcase(dest_seg):
|
||||
break
|
||||
i += 1
|
||||
|
||||
# Now i is the point where the two paths diverge.
|
||||
# Need a certain number of "os.pardir"s to work up
|
||||
# from the origin to the point of divergence.
|
||||
segments = [os.pardir] * (len(orig_list) - i)
|
||||
# Need to add the diverging part of dest_list.
|
||||
segments += dest_list[i:]
|
||||
if len(segments) == 0:
|
||||
# If they happen to be identical, use os.curdir.
|
||||
return path(os.curdir)
|
||||
else:
|
||||
return path(os.path.join(*segments))
|
||||
|
||||
|
||||
# --- Listing, searching, walking, and matching
|
||||
|
||||
def listdir(self, pattern=None):
|
||||
""" D.listdir() -> List of items in this directory.
|
||||
|
||||
Use D.files() or D.dirs() instead if you want a listing
|
||||
of just files or just subdirectories.
|
||||
|
||||
The elements of the list are path objects.
|
||||
|
||||
With the optional 'pattern' argument, this only lists
|
||||
items whose names match the given pattern.
|
||||
"""
|
||||
names = os.listdir(self)
|
||||
if pattern is not None:
|
||||
names = fnmatch.filter(names, pattern)
|
||||
return [self / child for child in names]
|
||||
|
||||
def dirs(self, pattern=None):
|
||||
""" D.dirs() -> List of this directory's subdirectories.
|
||||
|
||||
The elements of the list are path objects.
|
||||
This does not walk recursively into subdirectories
|
||||
(but see path.walkdirs).
|
||||
|
||||
With the optional 'pattern' argument, this only lists
|
||||
directories whose names match the given pattern. For
|
||||
example, d.dirs('build-*').
|
||||
"""
|
||||
return [p for p in self.listdir(pattern) if p.isdir()]
|
||||
|
||||
def devs(self, pattern = None):
|
||||
return [p for p in self.listdir(pattern) if p.isdev()]
|
||||
|
||||
def blockdevs(self, pattern = None):
|
||||
return [p for p in self.listdir(pattern) if p.isblockdev()]
|
||||
|
||||
def chardevs(self, pattern = None):
|
||||
return [p for p in self.listdir(pattern) if p.ischardev()]
|
||||
|
||||
def files(self, pattern=None):
|
||||
""" D.files() -> List of the files in this directory.
|
||||
|
||||
The elements of the list are path objects.
|
||||
This does not walk into subdirectories (see path.walkfiles).
|
||||
|
||||
With the optional 'pattern' argument, this only lists files
|
||||
whose names match the given pattern. For example,
|
||||
d.files('*.pyc').
|
||||
"""
|
||||
|
||||
return [p for p in self.listdir(pattern) if p.isfile()]
|
||||
|
||||
def walk(self, pattern=None):
|
||||
""" D.walk() -> iterator over files and subdirs, recursively.
|
||||
|
||||
The iterator yields path objects naming each child item of
|
||||
this directory and its descendants. This requires that
|
||||
D.isdir().
|
||||
|
||||
This performs a depth-first traversal of the directory tree.
|
||||
Each directory is returned just before all its children.
|
||||
"""
|
||||
for child in self.listdir():
|
||||
if pattern is None or child.fnmatch(pattern):
|
||||
yield child
|
||||
if child.isdir():
|
||||
for item in child.walk(pattern):
|
||||
yield item
|
||||
|
||||
def walkdirs(self, pattern=None):
|
||||
""" D.walkdirs() -> iterator over subdirs, recursively.
|
||||
|
||||
With the optional 'pattern' argument, this yields only
|
||||
directories whose names match the given pattern. For
|
||||
example, mydir.walkdirs('*test') yields only directories
|
||||
with names ending in 'test'.
|
||||
"""
|
||||
for child in self.dirs():
|
||||
if pattern is None or child.fnmatch(pattern):
|
||||
yield child
|
||||
for subsubdir in child.walkdirs(pattern):
|
||||
yield subsubdir
|
||||
|
||||
def walkfiles(self, pattern=None):
|
||||
""" D.walkfiles() -> iterator over files in D, recursively.
|
||||
|
||||
The optional argument, pattern, limits the results to files
|
||||
with names that match the pattern. For example,
|
||||
mydir.walkfiles('*.tmp') yields only files with the .tmp
|
||||
extension.
|
||||
"""
|
||||
for child in self.listdir():
|
||||
if child.isfile():
|
||||
if pattern is None or child.fnmatch(pattern):
|
||||
yield child
|
||||
elif child.isdir():
|
||||
for f in child.walkfiles(pattern):
|
||||
yield f
|
||||
|
||||
def fnmatch(self, pattern):
|
||||
""" Return True if self.name matches the given pattern.
|
||||
|
||||
pattern - A filename pattern with wildcards,
|
||||
for example '*.py'.
|
||||
"""
|
||||
return fnmatch.fnmatch(self.name, pattern)
|
||||
|
||||
def glob(self, pattern):
|
||||
""" Return a list of path objects that match the pattern.
|
||||
|
||||
pattern - a path relative to this directory, with wildcards.
|
||||
|
||||
For example, path('/users').glob('*/bin/*') returns a list
|
||||
of all the files users have in their bin directories.
|
||||
"""
|
||||
return map(path, glob.glob(_base(self / pattern)))
|
||||
|
||||
|
||||
# --- Reading or writing an entire file at once.
|
||||
|
||||
def open(self, mode='r'):
|
||||
""" Open this file. Return a file object. """
|
||||
return file(self, mode)
|
||||
|
||||
def bytes(self):
|
||||
""" Open this file, read all bytes, return them as a string. """
|
||||
f = self.open('rb')
|
||||
try:
|
||||
return f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def write_bytes(self, bytes, append=False):
|
||||
""" Open this file and write the given bytes to it.
|
||||
|
||||
Default behavior is to overwrite any existing file.
|
||||
Call this with write_bytes(bytes, append=True) to append instead.
|
||||
"""
|
||||
if append:
|
||||
mode = 'ab'
|
||||
else:
|
||||
mode = 'wb'
|
||||
f = self.open(mode)
|
||||
try:
|
||||
f.write(bytes)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def text(self, encoding=None, errors='strict'):
|
||||
""" Open this file, read it in, return the content as a string.
|
||||
|
||||
This uses 'U' mode in Python 2.3 and later, so '\r\n' and '\r'
|
||||
are automatically translated to '\n'.
|
||||
|
||||
Optional arguments:
|
||||
|
||||
encoding - The Unicode encoding (or character set) of
|
||||
the file. If present, the content of the file is
|
||||
decoded and returned as a unicode object; otherwise
|
||||
it is returned as an 8-bit str.
|
||||
errors - How to handle Unicode errors; see help(str.decode)
|
||||
for the options. Default is 'strict'.
|
||||
"""
|
||||
if encoding is None:
|
||||
# 8-bit
|
||||
f = self.open(_textmode)
|
||||
try:
|
||||
return f.read()
|
||||
finally:
|
||||
f.close()
|
||||
else:
|
||||
# Unicode
|
||||
f = codecs.open(self, 'r', encoding, errors)
|
||||
# (Note - Can't use 'U' mode here, since codecs.open
|
||||
# doesn't support 'U' mode, even in Python 2.3.)
|
||||
try:
|
||||
t = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
return (t.replace(u'\r\n', u'\n')
|
||||
.replace(u'\r\x85', u'\n')
|
||||
.replace(u'\r', u'\n')
|
||||
.replace(u'\x85', u'\n')
|
||||
.replace(u'\u2028', u'\n'))
|
||||
|
||||
def write_text(self, text, encoding=None, errors='strict', linesep=os.linesep, append=False):
|
||||
""" Write the given text to this file.
|
||||
|
||||
The default behavior is to overwrite any existing file;
|
||||
to append instead, use the 'append=True' keyword argument.
|
||||
|
||||
There are two differences between path.write_text() and
|
||||
path.write_bytes(): newline handling and Unicode handling.
|
||||
See below.
|
||||
|
||||
Parameters:
|
||||
|
||||
- text - str/unicode - The text to be written.
|
||||
|
||||
- encoding - str - The Unicode encoding that will be used.
|
||||
This is ignored if 'text' isn't a Unicode string.
|
||||
|
||||
- errors - str - How to handle Unicode encoding errors.
|
||||
Default is 'strict'. See help(unicode.encode) for the
|
||||
options. This is ignored if 'text' isn't a Unicode
|
||||
string.
|
||||
|
||||
- linesep - keyword argument - str/unicode - The sequence of
|
||||
characters to be used to mark end-of-line. The default is
|
||||
os.linesep. You can also specify None; this means to
|
||||
leave all newlines as they are in 'text'.
|
||||
|
||||
- append - keyword argument - bool - Specifies what to do if
|
||||
the file already exists (True: append to the end of it;
|
||||
False: overwrite it.) The default is False.
|
||||
|
||||
|
||||
--- Newline handling.
|
||||
|
||||
write_text() converts all standard end-of-line sequences
|
||||
('\n', '\r', and '\r\n') to your platform's default end-of-line
|
||||
sequence (see os.linesep; on Windows, for example, the
|
||||
end-of-line marker is '\r\n').
|
||||
|
||||
If you don't like your platform's default, you can override it
|
||||
using the 'linesep=' keyword argument. If you specifically want
|
||||
write_text() to preserve the newlines as-is, use 'linesep=None'.
|
||||
|
||||
This applies to Unicode text the same as to 8-bit text, except
|
||||
there are three additional standard Unicode end-of-line sequences:
|
||||
u'\x85', u'\r\x85', and u'\u2028'.
|
||||
|
||||
(This is slightly different from when you open a file for
|
||||
writing with fopen(filename, "w") in C or file(filename, 'w')
|
||||
in Python.)
|
||||
|
||||
|
||||
--- Unicode
|
||||
|
||||
If 'text' isn't Unicode, then apart from newline handling, the
|
||||
bytes are written verbatim to the file. The 'encoding' and
|
||||
'errors' arguments are not used and must be omitted.
|
||||
|
||||
If 'text' is Unicode, it is first converted to bytes using the
|
||||
specified 'encoding' (or the default encoding if 'encoding'
|
||||
isn't specified). The 'errors' argument applies only to this
|
||||
conversion.
|
||||
|
||||
"""
|
||||
if isinstance(text, unicode):
|
||||
if linesep is not None:
|
||||
# Convert all standard end-of-line sequences to
|
||||
# ordinary newline characters.
|
||||
text = (text.replace(u'\r\n', u'\n')
|
||||
.replace(u'\r\x85', u'\n')
|
||||
.replace(u'\r', u'\n')
|
||||
.replace(u'\x85', u'\n')
|
||||
.replace(u'\u2028', u'\n'))
|
||||
text = text.replace(u'\n', linesep)
|
||||
if encoding is None:
|
||||
encoding = sys.getdefaultencoding()
|
||||
bytes = text.encode(encoding, errors)
|
||||
else:
|
||||
# It is an error to specify an encoding if 'text' is
|
||||
# an 8-bit string.
|
||||
assert encoding is None
|
||||
|
||||
if linesep is not None:
|
||||
text = (text.replace('\r\n', '\n')
|
||||
.replace('\r', '\n'))
|
||||
bytes = text.replace('\n', linesep)
|
||||
|
||||
self.write_bytes(bytes, append)
|
||||
|
||||
def lines(self, encoding=None, errors='strict', retain=True):
|
||||
""" Open this file, read all lines, return them in a list.
|
||||
|
||||
Optional arguments:
|
||||
encoding - The Unicode encoding (or character set) of
|
||||
the file. The default is None, meaning the content
|
||||
of the file is read as 8-bit characters and returned
|
||||
as a list of (non-Unicode) str objects.
|
||||
errors - How to handle Unicode errors; see help(str.decode)
|
||||
for the options. Default is 'strict'
|
||||
retain - If true, retain newline characters; but all newline
|
||||
character combinations ('\r', '\n', '\r\n') are
|
||||
translated to '\n'. If false, newline characters are
|
||||
stripped off. Default is True.
|
||||
|
||||
This uses 'U' mode in Python 2.3 and later.
|
||||
"""
|
||||
if encoding is None and retain:
|
||||
f = self.open(_textmode)
|
||||
try:
|
||||
return f.readlines()
|
||||
finally:
|
||||
f.close()
|
||||
else:
|
||||
return self.text(encoding, errors).splitlines(retain)
|
||||
|
||||
def write_lines(self, lines, encoding=None, errors='strict',
|
||||
linesep=os.linesep, append=False):
|
||||
""" Write the given lines of text to this file.
|
||||
|
||||
By default this overwrites any existing file at this path.
|
||||
|
||||
This puts a platform-specific newline sequence on every line.
|
||||
See 'linesep' below.
|
||||
|
||||
lines - A list of strings.
|
||||
|
||||
encoding - A Unicode encoding to use. This applies only if
|
||||
'lines' contains any Unicode strings.
|
||||
|
||||
errors - How to handle errors in Unicode encoding. This
|
||||
also applies only to Unicode strings.
|
||||
|
||||
linesep - The desired line-ending. This line-ending is
|
||||
applied to every line. If a line already has any
|
||||
standard line ending ('\r', '\n', '\r\n', u'\x85',
|
||||
u'\r\x85', u'\u2028'), that will be stripped off and
|
||||
this will be used instead. The default is os.linesep,
|
||||
which is platform-dependent ('\r\n' on Windows, '\n' on
|
||||
Unix, etc.) Specify None to write the lines as-is,
|
||||
like file.writelines().
|
||||
|
||||
Use the keyword argument append=True to append lines to the
|
||||
file. The default is to overwrite the file. Warning:
|
||||
When you use this with Unicode data, if the encoding of the
|
||||
existing data in the file is different from the encoding
|
||||
you specify with the encoding= parameter, the result is
|
||||
mixed-encoding data, which can really confuse someone trying
|
||||
to read the file later.
|
||||
"""
|
||||
if append:
|
||||
mode = 'ab'
|
||||
else:
|
||||
mode = 'wb'
|
||||
f = self.open(mode)
|
||||
try:
|
||||
for line in lines:
|
||||
isUnicode = isinstance(line, unicode)
|
||||
if linesep is not None:
|
||||
# Strip off any existing line-end and add the
|
||||
# specified linesep string.
|
||||
if isUnicode:
|
||||
if line[-2:] in (u'\r\n', u'\x0d\x85'):
|
||||
line = line[:-2]
|
||||
elif line[-1:] in (u'\r', u'\n',
|
||||
u'\x85', u'\u2028'):
|
||||
line = line[:-1]
|
||||
else:
|
||||
if line[-2:] == '\r\n':
|
||||
line = line[:-2]
|
||||
elif line[-1:] in ('\r', '\n'):
|
||||
line = line[:-1]
|
||||
line += linesep
|
||||
if isUnicode:
|
||||
if encoding is None:
|
||||
encoding = sys.getdefaultencoding()
|
||||
line = line.encode(encoding, errors)
|
||||
f.write(line)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
|
||||
# --- Methods for querying the filesystem.
|
||||
|
||||
exists = os.path.exists
|
||||
isabs = os.path.isabs
|
||||
isdir = os.path.isdir
|
||||
isfile = os.path.isfile
|
||||
islink = os.path.islink
|
||||
ismount = os.path.ismount
|
||||
|
||||
if hasattr(os.path, 'samefile'):
|
||||
samefile = os.path.samefile
|
||||
|
||||
getatime = os.path.getatime
|
||||
atime = property(
|
||||
getatime, None, None,
|
||||
""" Last access time of the file. """)
|
||||
|
||||
getmtime = os.path.getmtime
|
||||
mtime = property(
|
||||
getmtime, None, None,
|
||||
""" Last-modified time of the file. """)
|
||||
|
||||
if hasattr(os.path, 'getctime'):
|
||||
getctime = os.path.getctime
|
||||
ctime = property(
|
||||
getctime, None, None,
|
||||
""" Creation time of the file. """)
|
||||
|
||||
getsize = os.path.getsize
|
||||
size = property(
|
||||
getsize, None, None,
|
||||
""" Size of the file, in bytes. """)
|
||||
|
||||
def isdev(self):
|
||||
from stat import S_ISBLK, S_ISCHR
|
||||
mode = self.__st_mode()
|
||||
return S_ISBLK(mode) or S_ISCHR(mode)
|
||||
|
||||
def __st_mode(self):
|
||||
try:
|
||||
return self.stat().st_mode
|
||||
except OSError as e:
|
||||
if e.errno != 2:
|
||||
raise
|
||||
return 0
|
||||
|
||||
def ischardev(self):
|
||||
from stat import S_ISCHR
|
||||
return S_ISCHR(self.__st_mode())
|
||||
|
||||
def isblockdev(self):
|
||||
from stat import S_ISBLK
|
||||
return S_ISBLK(self.__st_mode())
|
||||
|
||||
if hasattr(os, 'access'):
|
||||
def access(self, mode):
|
||||
""" Return true if current user has access to this path.
|
||||
|
||||
mode - One of the constants os.F_OK, os.R_OK, os.W_OK, os.X_OK
|
||||
"""
|
||||
return os.access(self, mode)
|
||||
|
||||
def stat(self):
|
||||
""" Perform a stat() system call on this path. """
|
||||
return os.stat(self)
|
||||
|
||||
def lstat(self):
|
||||
""" Like path.stat(), but do not follow symbolic links. """
|
||||
return os.lstat(self)
|
||||
|
||||
if hasattr(os, 'statvfs'):
|
||||
def statvfs(self):
|
||||
""" Perform a statvfs() system call on this path. """
|
||||
return os.statvfs(self)
|
||||
|
||||
if hasattr(os, 'pathconf'):
|
||||
def pathconf(self, name):
|
||||
return os.pathconf(self, name)
|
||||
|
||||
|
||||
# --- Modifying operations on files and directories
|
||||
|
||||
def utime(self, times):
|
||||
""" Set the access and modified times of this file. """
|
||||
os.utime(self, times)
|
||||
|
||||
def chmod(self, mode):
|
||||
os.chmod(self, mode)
|
||||
|
||||
if hasattr(os, 'chown'):
|
||||
def chown(self, uid, gid):
|
||||
os.chown(self, uid, gid)
|
||||
|
||||
def rename(self, new):
|
||||
os.rename(self, new)
|
||||
|
||||
def renames(self, new):
|
||||
os.renames(self, new)
|
||||
# --- Create/delete operations on directories
|
||||
|
||||
def mkdir(self, mode=0750):
|
||||
os.mkdir(self, mode)
|
||||
|
||||
def makedirs(self, mode=0750):
|
||||
os.makedirs(self, mode)
|
||||
|
||||
def rmdir(self):
|
||||
os.rmdir(self)
|
||||
|
||||
def removedirs(self):
|
||||
os.removedirs(self)
|
||||
|
||||
|
||||
# --- Modifying operations on files
|
||||
|
||||
def touch(self, mode = 0640):
|
||||
""" Set the access/modified times of this file to the current time.
|
||||
Create the file if it does not exist.
|
||||
"""
|
||||
fd = os.open(self, os.O_WRONLY | os.O_CREAT, mode)
|
||||
os.close(fd)
|
||||
os.utime(self, None)
|
||||
|
||||
def remove(self):
|
||||
os.remove(self)
|
||||
|
||||
def unlink(self):
|
||||
os.unlink(self)
|
||||
|
||||
|
||||
# --- Links
|
||||
|
||||
if hasattr(os, 'link'):
|
||||
def link(self, newpath):
|
||||
""" Create a hard link at 'newpath', pointing to this file. """
|
||||
os.link(self, newpath)
|
||||
|
||||
if hasattr(os, 'symlink'):
|
||||
def symlink(self, newlink):
|
||||
""" Create a symbolic link at 'newlink', pointing here. """
|
||||
os.symlink(self, newlink)
|
||||
|
||||
if hasattr(os, 'readlink'):
|
||||
def readlink(self):
|
||||
""" Return the path to which this symbolic link points.
|
||||
|
||||
The result may be an absolute or a relative path.
|
||||
"""
|
||||
return path(os.readlink(self))
|
||||
|
||||
def readlinkabs(self):
|
||||
""" Return the path to which this symbolic link points.
|
||||
|
||||
The result is always an absolute path.
|
||||
"""
|
||||
p = self.readlink()
|
||||
if p.isabs():
|
||||
return p
|
||||
else:
|
||||
return (self.parent / p).abspath()
|
||||
|
||||
def checkdir(self):
|
||||
if not self.isdir():
|
||||
raise Exception("Not a directory: '%s'" % (self, ))
|
||||
|
||||
def checkfile(self):
|
||||
if not self.isfile():
|
||||
raise Exception("Not a file: '%s'" % (self, ))
|
||||
|
||||
def forcedir(self, mode = 0750):
|
||||
if not self.isdir():
|
||||
if self.exists():
|
||||
raise Exception("Not a directory: '%s'" % (self, ))
|
||||
self.makedirs(mode)
|
||||
|
||||
def forcefile(self, mode = 0640):
|
||||
if not self.exists():
|
||||
return self.touch(mode = 0640)
|
||||
if not self.isfile():
|
||||
raise Exception("Not a file: %s" % (self ,))
|
||||
|
||||
# --- High-level functions from shutil
|
||||
|
||||
copyfile = shutil.copyfile
|
||||
copymode = shutil.copymode
|
||||
copystat = shutil.copystat
|
||||
copy = shutil.copy
|
||||
copy2 = shutil.copy2
|
||||
copytree = shutil.copytree
|
||||
if hasattr(shutil, 'move'):
|
||||
move = shutil.move
|
||||
|
||||
def rmtree(self):
|
||||
if self.isdir():
|
||||
return shutil.rmtree(self)
|
||||
self.unlink()
|
||||
|
||||
quote = quote
|
||||
|
||||
# --- Special stuff from os
|
||||
|
||||
if hasattr(os, 'chroot'):
|
||||
def chroot(self):
|
||||
os.chroot(self)
|
||||
|
||||
if hasattr(os, 'startfile'):
|
||||
startfile = os.startfile
|
||||
|
||||
Path = path
|
@ -1,16 +0,0 @@
|
||||
from time import time
|
||||
|
||||
def timeit(f):
|
||||
def _timeit(*args, **kw):
|
||||
_timeit.__runs__ += 1
|
||||
start = time()
|
||||
try:
|
||||
return f(*args, **kw)
|
||||
finally:
|
||||
spent = _timeit.__last_time__ = time() - start
|
||||
_timeit.__total_time__ += spent
|
||||
_timeit.__runs__ = 0
|
||||
_timeit.__total_time__ = 0.0
|
||||
_timeit.__last_time__ = None
|
||||
_timeit.__name__ = f.__name__
|
||||
return _timeit
|
@ -1,8 +0,0 @@
|
||||
'''
|
||||
Created on 24.09.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
class ParseError(Exception):
|
||||
pass
|
@ -1,51 +0,0 @@
|
||||
'''
|
||||
Created on 28.08.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from ..logging import LoggerMixin
|
||||
from logging import DEBUG
|
||||
from ..etree.impl import ElementTree, XML, ParseError as XMLParseError, XMLSyntaxError, tostring
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from futile.serializer.exc import ParseError
|
||||
|
||||
class AbstractXMLSerializer(LoggerMixin):
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def load(self, input):
|
||||
if self.logger.isEnabledFor(DEBUG):
|
||||
from cStringIO import StringIO
|
||||
input = input.read()
|
||||
self.logger.debug("Parsing input: %s", input)
|
||||
input = StringIO(input)
|
||||
root = self._load(input)
|
||||
return self._parse_input(root)
|
||||
|
||||
def _load(self, input):
|
||||
try:
|
||||
if isinstance(input, str):
|
||||
return XML(input)
|
||||
else:
|
||||
return ElementTree().parse(input)
|
||||
except Exception, e:
|
||||
self._handle_parse_error(e)
|
||||
raise ParseError(e)
|
||||
|
||||
def _handle_parse_error(self, e):
|
||||
self.logger.exception("Error parsing input: %s", e)
|
||||
|
||||
@abstractmethod
|
||||
def _parse_input(self, root):
|
||||
raise NotImplementedError()
|
||||
|
||||
def dump(self, o, pretty_print = True):
|
||||
raise NotImplementedError()
|
||||
|
||||
def dumps(self, o, pretty_print = True):
|
||||
xml = self._dump_object(o)
|
||||
return tostring(xml, pretty_print = pretty_print)
|
||||
|
||||
@abstractmethod
|
||||
def _dump_object(self, o):
|
||||
raise NotImplementedError()
|
@ -1 +0,0 @@
|
||||
from timeout import timeout, Timeout
|
@ -1,29 +0,0 @@
|
||||
'''
|
||||
Created on 20.05.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from signal import signal, SIGALRM, alarm
|
||||
from contextlib import contextmanager
|
||||
from futile import noop
|
||||
|
||||
|
||||
@contextmanager
|
||||
def timeout(seconds):
|
||||
if not seconds:
|
||||
yield
|
||||
return
|
||||
|
||||
original_handler = signal(SIGALRM, noop)
|
||||
|
||||
try:
|
||||
alarm(seconds)
|
||||
yield
|
||||
finally:
|
||||
alarm(0)
|
||||
signal(SIGALRM, original_handler)
|
||||
|
||||
|
||||
def Timeout(seconds):
|
||||
return lambda: timeout(seconds)
|
@ -1,30 +0,0 @@
|
||||
'''
|
||||
Created on 23.07.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
from futile import Base
|
||||
from futile.logging import LoggerMixin
|
||||
|
||||
class SingletonType(type, LoggerMixin):
|
||||
__instances = {}
|
||||
|
||||
def get_instance(self):
|
||||
try:
|
||||
i = self.__instances[self]
|
||||
self.logger.debug("Reusing singleton instance for %s.%s" % (self.__module__, self.__name__))
|
||||
except KeyError:
|
||||
self.logger.debug("Creating singleton instance for %s.%s" % (self.__module__, self.__name__))
|
||||
i = super(SingletonType, self).__call__()
|
||||
self.__instances[self] = i
|
||||
return i
|
||||
|
||||
class ForcedSingletonType(SingletonType):
|
||||
def __call__(self, *args, **kw):
|
||||
return self.get_instance()
|
||||
|
||||
class Singleton(Base):
|
||||
__metaclass__ = SingletonType
|
||||
|
||||
class ForcedSingleton(Base):
|
||||
__metaclass__ = ForcedSingletonType
|
@ -1,23 +0,0 @@
|
||||
import string
|
||||
|
||||
letters_digits_underscore = string.letters + string.digits + "_"
|
||||
|
||||
|
||||
class InvalidIdentifier(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
def is_identifier(s):
|
||||
if not s or s[0] not in string.letters:
|
||||
return False
|
||||
|
||||
for c in s:
|
||||
if c not in letters_digits_underscore:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_identifier(s):
|
||||
if not is_identifier(s):
|
||||
raise InvalidIdentifier(s)
|
@ -1,46 +0,0 @@
|
||||
'''
|
||||
Created on 17.07.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
import logging, sys
|
||||
from futile.logging import get_logger
|
||||
from subprocess import check_output as _check_output, check_call as _check_call, CalledProcessError, STDOUT, Popen
|
||||
|
||||
try:
|
||||
from subprocces import SubprocessError, TimeoutExpired
|
||||
except ImportError:
|
||||
class SubprocessError(Exception):
|
||||
pass
|
||||
|
||||
class TimeoutExpired(SubprocessError):
|
||||
pass
|
||||
|
||||
def _pre_call(args):
|
||||
#needed for chroot safety
|
||||
import encodings.string_escape
|
||||
|
||||
cmd = ' '.join(args)
|
||||
get_logger().debug("running %s" % (cmd, ))
|
||||
return cmd
|
||||
|
||||
|
||||
def check_output(args, stdin=None, stderr=STDOUT, shell=False, cwd=None, env=None, *popenargs, **popenkw):
|
||||
cmd = _pre_call(args)
|
||||
|
||||
try:
|
||||
return _check_output(args, stdin=stdin, stderr=stderr, shell=shell, cwd=cwd, env=env, *popenargs, **popenkw)
|
||||
except CalledProcessError as e:
|
||||
get_logger().debug("Command %s returned exit code %s. This is the programs output:\n%s<<EOF>>" % (cmd, e.returncode, e.output))
|
||||
raise
|
||||
|
||||
def check_call(args, stdin=None, stdout=None, stderr=None, shell=False, cwd=None, env=None, *popenargs, **popenkw):
|
||||
cmd = _pre_call(args)
|
||||
|
||||
try:
|
||||
return _check_call(args, stdin=stdin, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd, env=env, *popenargs, **popenkw)
|
||||
except CalledProcessError as e:
|
||||
get_logger().debug("Command %s returned exit code %s." % (cmd, e.returncode))
|
||||
raise
|
||||
|
@ -1,165 +0,0 @@
|
||||
'''
|
||||
Created on 02.02.2012
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from time import sleep
|
||||
from abc import ABCMeta, abstractproperty, abstractmethod
|
||||
from futile import Base
|
||||
from futile.path import Path
|
||||
from . import check_call, STDOUT
|
||||
|
||||
class DaemonController(Base):
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(self, sleep = 5, stop_sleep = 3, *args, **kw):
|
||||
super(DaemonController, self).__init__(*args, **kw)
|
||||
self.__sleep = int(sleep)
|
||||
self.__stop_sleep = int(stop_sleep)
|
||||
|
||||
@abstractproperty
|
||||
def is_running(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def start(self):
|
||||
self._start()
|
||||
sleep(self.__sleep)
|
||||
|
||||
@abstractmethod
|
||||
def _start(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def stop(self):
|
||||
self._stop()
|
||||
sleep(self.__stop_sleep)
|
||||
|
||||
@abstractmethod
|
||||
def _stop(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def __enter__(self):
|
||||
self.start()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.stop()
|
||||
|
||||
class DummyController(DaemonController):
|
||||
def __init__(self, sleep = 0, stop_sleep = 0, *args, **kw):
|
||||
super(DummyController).__init__(sleep = sleep, stop_sleep = stop_sleep, *args, **kw)
|
||||
|
||||
def _start(self):
|
||||
pass
|
||||
_stop = _start
|
||||
|
||||
@property
|
||||
def is_running(self):
|
||||
return False
|
||||
|
||||
import os
|
||||
import errno
|
||||
|
||||
class CheckPIDFileController(DaemonController):
|
||||
def __init__(self, pidfile, *args, **kw):
|
||||
super(CheckPIDFileController, self).__init__(*args, **kw)
|
||||
self.__pidfile = Path(pidfile)
|
||||
|
||||
@property
|
||||
def pidfile(self):
|
||||
return self.__pidfile
|
||||
|
||||
@property
|
||||
def is_running(self):
|
||||
if not self.pidfile.exists():
|
||||
return False
|
||||
|
||||
if not self.pidfile.isfile():
|
||||
raise Exception("pidfile '%s' is not a file" % (self.pidfile, ))
|
||||
|
||||
try:
|
||||
pid = int(self.__pidfile.open().readline(16))
|
||||
except:
|
||||
self.logger.exception("Error reading pidfile %s" % (self.pidfile))
|
||||
raise
|
||||
|
||||
try:
|
||||
os.kill(pid, 0)
|
||||
return True
|
||||
except OSError, e:
|
||||
if e.errno == errno.ESRCH:
|
||||
return False
|
||||
raise
|
||||
|
||||
class StartStopDaemonController(CheckPIDFileController):
|
||||
def __init__(self, executable, fork = False, workingdir = None, pidfile = None, makepidfile = False, daemonargs = None, ssd = "/sbin/start-stop-daemon", ldpath = None, outfile = "/dev/null", *args, **kw):
|
||||
if not pidfile:
|
||||
pidfile = "/tmp/" + executable.replace("/", "_") + ".pid"
|
||||
super(StartStopDaemonController, self).__init__(pidfile = pidfile, *args, **kw)
|
||||
|
||||
self.__executable = unicode(executable)
|
||||
self.__workingdir = workingdir and unicode(workingdir) or None
|
||||
|
||||
if ldpath is not None:
|
||||
if not isinstance(ldpath, (list, set, tuple, frozenset)):
|
||||
ldpath = [ ldpath ]
|
||||
ldpath = tuple(set(ldpath))
|
||||
self.__ldpath = ldpath
|
||||
|
||||
self.__makepidfile = makepidfile
|
||||
self.__daemonargs = daemonargs
|
||||
self.__fork = fork
|
||||
self.__ssd = ssd
|
||||
self.__outfile = outfile
|
||||
|
||||
def get_daemonargs(self):
|
||||
return self.__daemonargs
|
||||
def set_daemonargs(self, da):
|
||||
self.__daemonargs = da
|
||||
daemonargs = property(get_daemonargs, set_daemonargs)
|
||||
|
||||
def __make_cmd(self, cmd, test):
|
||||
cmd = [ self.__ssd, cmd, '-x', self.__executable, '-p', self.pidfile, '-o' ]
|
||||
|
||||
if self.__workingdir:
|
||||
cmd += [ '-d', self.__workingdir ]
|
||||
|
||||
if test:
|
||||
cmd.append('-t')
|
||||
|
||||
env = None
|
||||
if self.__ldpath:
|
||||
env = dict(LD_LIBRARY_PATH = ':'.join(self.__ldpath))
|
||||
|
||||
return cmd, env
|
||||
|
||||
def __check_cmd(self, cmd, env):
|
||||
self.logger.debug("ssd env: " + str(env))
|
||||
|
||||
outfile = self.__outfile
|
||||
if outfile:
|
||||
outfile = Path(outfile).open("a")
|
||||
|
||||
try:
|
||||
check_call(cmd, stdout = outfile, stderr = STDOUT, close_fds = True, cwd = self.__workingdir, env = env)
|
||||
finally:
|
||||
if outfile is not None:
|
||||
outfile.close()
|
||||
|
||||
def _start(self):
|
||||
cmd, env = self.__make_cmd("-S", False)
|
||||
if self.__makepidfile:
|
||||
cmd.append('-m')
|
||||
|
||||
if self.__fork:
|
||||
cmd.append('-b')
|
||||
|
||||
if self.__daemonargs:
|
||||
cmd += [ '--' ] + list(self.__daemonargs)
|
||||
|
||||
self.__check_cmd(cmd, env)
|
||||
|
||||
def _stop(self):
|
||||
cmd, env = self.__make_cmd("-K", False)
|
||||
self.__check_cmd(cmd, env)
|
||||
|
@ -1,38 +0,0 @@
|
||||
from tempfile import mkdtemp as _mkdtemp
|
||||
from shutil import rmtree
|
||||
from .. import Base
|
||||
from futile import noop
|
||||
|
||||
class TempDir(Base):
|
||||
delete_on_error = delete = True
|
||||
|
||||
def __init__(self, suffix='', prefix='tmp', dir=None, delete = None, delete_on_error = None, *args, **kw):
|
||||
super(TempDir, self).__init__(*args, **kw)
|
||||
self.__name = _mkdtemp(suffix, prefix, dir)
|
||||
if delete is not None:
|
||||
self.delete = delete
|
||||
if delete_on_error is not None:
|
||||
self.delete_on_error = delete_on_error
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.__name
|
||||
|
||||
def rmtree(self):
|
||||
rmtree(self.__name)
|
||||
self.rmtree = noop
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self.delete or (exc_type and self.delete_on_error):
|
||||
self.rmtree()
|
||||
|
||||
def __del__(self):
|
||||
self.__exit__(None, None, None)
|
||||
|
||||
def __str__(self):
|
||||
return self.__name
|
||||
|
||||
mkdtemp = TempDir
|
@ -1,106 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
'''
|
||||
Created on 01.04.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
#TODO: proper timeout handling
|
||||
from __future__ import with_statement
|
||||
|
||||
from threading import Lock, Event
|
||||
from contextlib import contextmanager
|
||||
|
||||
class Timeout(Exception):
|
||||
pass
|
||||
|
||||
class ReverseSemaphore(object):
|
||||
def __init__(self, *args, **kw):
|
||||
super(ReverseSemaphore, self).__init__(*args, **kw)
|
||||
|
||||
self.counter = 0
|
||||
self.lock = Lock()
|
||||
self.event = Event()
|
||||
self.event.set()
|
||||
pass
|
||||
|
||||
def acquire(self):
|
||||
with self.lock:
|
||||
self.counter += 1
|
||||
self.event.clear()
|
||||
pass
|
||||
pass
|
||||
|
||||
def release(self):
|
||||
with self.lock:
|
||||
self.counter -= 1
|
||||
if self.counter == 0:
|
||||
self.event.set()
|
||||
if self.counter < 0:
|
||||
self.counter = 0
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
|
||||
def wait(self):
|
||||
return self.event.wait()
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
self.acquire()
|
||||
pass
|
||||
|
||||
def __exit__ (self, type, value, tb):
|
||||
self.release()
|
||||
pass
|
||||
pass
|
||||
|
||||
|
||||
class RWLock(object):
|
||||
def __init__(self, *args, **kw):
|
||||
super(RWLock, self).__init__(*args, **kw)
|
||||
|
||||
self.write_lock = Lock()
|
||||
self.read_lock = ReverseSemaphore()
|
||||
self.write_event = Event()
|
||||
self.write_event.set()
|
||||
|
||||
@contextmanager
|
||||
def read_transaction(self, timeout = None):
|
||||
self.read_acquire(timeout = timeout)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.read_release()
|
||||
pass
|
||||
pass
|
||||
|
||||
@contextmanager
|
||||
def write_transaction(self, timeout = None):
|
||||
self.write_acquire(timeout = timeout)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.write_release()
|
||||
pass
|
||||
pass
|
||||
|
||||
def read_acquire(self, timeout = None):
|
||||
self.write_event.wait(timeout = timeout)
|
||||
if not self.write_event.is_set():
|
||||
raise Timeout()
|
||||
self.read_lock.acquire()
|
||||
return True
|
||||
|
||||
def read_release(self):
|
||||
self.read_lock.release()
|
||||
pass
|
||||
|
||||
def write_acquire(self, timeout = None):
|
||||
self.write_lock.acquire()
|
||||
self.write_event.clear()
|
||||
self.read_lock.wait()
|
||||
pass
|
||||
|
||||
def write_release(self):
|
||||
self.write_event.set()
|
||||
self.write_lock.release()
|
@ -1,18 +0,0 @@
|
||||
import sys
|
||||
|
||||
try:
|
||||
from threading import current_thread
|
||||
except ImportError:
|
||||
from threading import currentThread as current_thread
|
||||
|
||||
|
||||
if sys.version_info < (2, 7):
|
||||
from threading import _Event
|
||||
class Event(_Event):
|
||||
def wait(self, timeout = None):
|
||||
super(_Event, self).wait(timeout = timeout)
|
||||
return self.is_set()
|
||||
else:
|
||||
from threading import Event
|
||||
|
||||
|
@ -1,28 +0,0 @@
|
||||
'''
|
||||
Created on 08.08.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
from threading import Condition
|
||||
|
||||
def synchronized(f):
|
||||
done = Condition()
|
||||
f.in_progress = False
|
||||
|
||||
def sync(*args, **kw):
|
||||
done.acquire()
|
||||
if not f.in_progress:
|
||||
f.in_progress = True
|
||||
done.release()
|
||||
try:
|
||||
return f(*args, **kw)
|
||||
finally:
|
||||
f.in_progress = False
|
||||
with done:
|
||||
done.notify_all()
|
||||
else:
|
||||
done.wait()
|
||||
assert(not f.in_progress)
|
||||
done.release()
|
||||
return sync
|
@ -1,19 +0,0 @@
|
||||
import sys
|
||||
from traceback import format_exception
|
||||
|
||||
def get_traceback(self, exc_info=None):
|
||||
return ''.join(format_exception(*(exc_info or sys.exc_info())))
|
||||
|
||||
|
||||
def current_stack(skip=0):
|
||||
try:
|
||||
1 / 0
|
||||
except ZeroDivisionError:
|
||||
f = sys.exc_info()[2].tb_frame
|
||||
for _ in xrange(skip + 2):
|
||||
f = f.f_back
|
||||
lst = []
|
||||
while f is not None:
|
||||
lst.append((f, f.f_lineno))
|
||||
f = f.f_back
|
||||
return lst
|
@ -1,9 +0,0 @@
|
||||
'''
|
||||
Created on 01.09.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
from futile.types import AbstractTypeManager
|
||||
|
||||
class TypeManager(AbstractTypeManager):
|
||||
pass
|
@ -1,52 +0,0 @@
|
||||
'''
|
||||
Created on 01.09.2011
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
|
||||
import sys
|
||||
from types import ModuleType
|
||||
|
||||
from futile.collections import get_iterable
|
||||
from ..logging import LoggerMixin
|
||||
|
||||
|
||||
class ImmutableType(type):
|
||||
def __call__(self, *args, **kw):
|
||||
if args and isinstance(args[0], self):
|
||||
return args[0]
|
||||
return super(ImmutableType, self).__call__(*args, **kw)
|
||||
|
||||
class TypeManagerType(LoggerMixin, type):
|
||||
def __init__(self, *args, **kw):
|
||||
super(TypeManagerType, self).__init__(*args, **kw)
|
||||
modname = self.__module__ + "." + self.__name__
|
||||
if self.__module__ != __name__:
|
||||
sys.modules[modname] = self
|
||||
self.__module_name__ = modname
|
||||
|
||||
|
||||
class AbstractTypeManager(LoggerMixin, ModuleType):
|
||||
__metaclass__ = TypeManagerType
|
||||
|
||||
def __init__(self, name = None, *args, **kw):
|
||||
name = name or str(id(name))
|
||||
self.modulename = self.__module_name__ + "." + getattr(self, "__prefix__", self.__class__.__name__) + name
|
||||
sys.modules[self.modulename] = self
|
||||
|
||||
def create_type(self, name, base = (), dict = {}, metaclass = type):
|
||||
try:
|
||||
existing = getattr(self, name)
|
||||
if not isinstance(existing, type):
|
||||
raise ValueError(name)
|
||||
return existing
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
base = get_iterable(base)
|
||||
self.logger.debug("Creating %s %s(%s) with %s", metaclass.__name__,
|
||||
name, base, dict)
|
||||
dict["__module__"] = self.modulename
|
||||
type = metaclass(name, base, dict)
|
||||
setattr(self, name, type)
|
||||
return type
|
@ -1,178 +0,0 @@
|
||||
import logging
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
from enum import Enum
|
||||
|
||||
from futile import NOT_SET, identity
|
||||
from futile.logging import LoggerMixin
|
||||
from openmtc.exc import OpenMTCError
|
||||
|
||||
|
||||
class ConfigurationError(OpenMTCError):
|
||||
pass
|
||||
|
||||
|
||||
class ConfigurationKeyError(KeyError, ConfigurationError):
|
||||
pass
|
||||
|
||||
|
||||
class ConfigurationAttributeError(AttributeError, ConfigurationError):
|
||||
pass
|
||||
|
||||
|
||||
class ConfigurationValueError(ValueError, ConfigurationError):
|
||||
pass
|
||||
|
||||
|
||||
class ExtraOptionsStrategy(Enum):
|
||||
ignore = "ignore"
|
||||
warn = "warn"
|
||||
prune = "prune"
|
||||
fatal = "fatal"
|
||||
|
||||
|
||||
class ConfigurationOption(LoggerMixin):
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
def __init__(self, type, default=NOT_SET, converter=identity,
|
||||
*args, **kw):
|
||||
super(ConfigurationOption, self).__init__(*args, **kw)
|
||||
self.type = type
|
||||
self.default = default
|
||||
self.converter = converter
|
||||
|
||||
def convert(self, v):
|
||||
if v is None:
|
||||
if self.default is not NOT_SET:
|
||||
return self.default
|
||||
raise ConfigurationValueError("Value must not be None")
|
||||
|
||||
v = self._convert(v)
|
||||
return self.converter(v)
|
||||
|
||||
@abstractmethod
|
||||
def _convert(self, v):
|
||||
return v
|
||||
|
||||
|
||||
class SimpleOption(ConfigurationOption):
|
||||
def __init__(self, type=str, default=NOT_SET, converter=identity,
|
||||
*args, **kw):
|
||||
super(SimpleOption, self).__init__(type=type, default=default,
|
||||
converter=converter)
|
||||
|
||||
def _convert(self, v):
|
||||
if isinstance(v, self.type):
|
||||
return v
|
||||
return self.type(v)
|
||||
|
||||
|
||||
class ListOption(SimpleOption):
|
||||
def __init__(self, content_type, type=list, default=NOT_SET,
|
||||
converter=identity, *args, **kw):
|
||||
super(ListOption, self).__init__(type=type, default=default,
|
||||
converter=converter)
|
||||
self.content_type = content_type
|
||||
|
||||
def _convert(self, v):
|
||||
v = super(ListOption, self)._convert(v)
|
||||
return map(self._convert_content, v)
|
||||
|
||||
def _convert_content(self, v):
|
||||
if not isinstance(v, self.content_type):
|
||||
v = self.content_type(v)
|
||||
return v
|
||||
|
||||
|
||||
class BooleanOption(ConfigurationOption):
|
||||
def __init__(self, default=NOT_SET, converter=identity, *args, **kw):
|
||||
super(BooleanOption, self).__init__(type=bool, default=default,
|
||||
converter=converter)
|
||||
|
||||
def _convert(self, v):
|
||||
if isinstance(v, (bool, int)):
|
||||
return bool(v)
|
||||
if isinstance(v, basestring):
|
||||
return v and v.lower() not in ("0", "no", "n", "f", "false")
|
||||
raise ConfigurationValueError("Illegal value for boolean: %s" % (v, ))
|
||||
|
||||
|
||||
class EnumOption(SimpleOption):
|
||||
def _convert(self, v):
|
||||
try:
|
||||
return super(EnumOption, self)._convert(v)
|
||||
except Exception as exc:
|
||||
try:
|
||||
return getattr(self.type, v)
|
||||
except:
|
||||
raise exc
|
||||
|
||||
|
||||
class LowerCaseEnumOption(EnumOption):
|
||||
def _convert(self, v):
|
||||
try:
|
||||
return super(LowerCaseEnumOption, self)._convert(v)
|
||||
except Exception as exc:
|
||||
try:
|
||||
return getattr(self.type, v.lower())
|
||||
except:
|
||||
raise exc
|
||||
|
||||
|
||||
class Configuration(dict):
|
||||
__options__ = {}
|
||||
__name__ = "configuration"
|
||||
__extra_options_strategy__ = ExtraOptionsStrategy.ignore
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
config = dict(*args, **kw)
|
||||
options = self.__options__.copy()
|
||||
|
||||
for k, v in config.copy().items():
|
||||
try:
|
||||
option = options.pop(k)
|
||||
except KeyError:
|
||||
strategy = self.__extra_options_strategy__
|
||||
if strategy == ExtraOptionsStrategy.fatal:
|
||||
raise ConfigurationError("Unknown configuration key in %s:"
|
||||
" %s" % (self.__name__, k))
|
||||
if strategy == ExtraOptionsStrategy.prune:
|
||||
del config[k]
|
||||
elif strategy == ExtraOptionsStrategy.warn:
|
||||
self.logger.warn("Unknown configuration key in %s: %s",
|
||||
self.__name__, k)
|
||||
else:
|
||||
config[k] = option.convert(v)
|
||||
|
||||
for k, v in options.items():
|
||||
if v.default is NOT_SET:
|
||||
raise ConfigurationKeyError("Missing configuration key in"
|
||||
" %s: %s" %
|
||||
(self.__name__, k, ))
|
||||
config[k] = v.default
|
||||
|
||||
super(Configuration, self).__init__(config)
|
||||
|
||||
def __getitem__(self, k):
|
||||
try:
|
||||
return dict.__getitem__(self, k)
|
||||
except KeyError:
|
||||
raise ConfigurationKeyError("Missing configuration key in"
|
||||
" %s: %s" %
|
||||
(self.__name__, k, ))
|
||||
|
||||
def __getattr__(self, k, default=NOT_SET):
|
||||
try:
|
||||
return self[k]
|
||||
except ConfigurationKeyError as exc:
|
||||
if default is not NOT_SET:
|
||||
return default
|
||||
raise ConfigurationAttributeError(str(exc))
|
||||
|
||||
|
||||
class LogLevel(Enum):
|
||||
trace = logging.DEBUG
|
||||
debug = logging.DEBUG
|
||||
warning = logging.WARNING
|
||||
error = logging.ERROR
|
||||
fatal = logging.FATAL
|
@ -1,13 +0,0 @@
|
||||
from futile.net.exc import NetworkError
|
||||
|
||||
|
||||
class OpenMTCError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class OpenMTCNetworkError(OpenMTCError, NetworkError):
|
||||
pass
|
||||
|
||||
|
||||
class ConnectionFailed(OpenMTCNetworkError):
|
||||
pass
|
@ -1,97 +0,0 @@
|
||||
from futile.logging import LoggerMixin
|
||||
from futile import ObjectProxy
|
||||
from openmtc.model import Collection
|
||||
from openmtc.mapper.exc import MapperError
|
||||
|
||||
|
||||
class MemberProxy(ObjectProxy):
|
||||
def __get__(self, instance, owner=None):
|
||||
if instance is None:
|
||||
return self._o
|
||||
|
||||
if not instance._synced:
|
||||
if not _is_attached(instance) or self.name not in instance._changes:
|
||||
instance._mapper._init_resource(instance)
|
||||
return self._o.__get__(instance, owner)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
if _is_attached(instance):
|
||||
instance._changes.add(self._o.name)
|
||||
return self._o.__set__(instance, value)
|
||||
|
||||
|
||||
class MapperCollection(Collection):
|
||||
def __init__(self, name, type, parent, collection=(), *args, **kw):
|
||||
super(MapperCollection, self).__init__(name=name, type=type,
|
||||
parent=parent,
|
||||
collection=collection, *args,
|
||||
**kw)
|
||||
|
||||
def _handle_newitem(self, item):
|
||||
if _is_attached(item) or item.path is not None:
|
||||
raise NotImplementedError()
|
||||
super(MapperCollection, self)._handle_newitem(item)
|
||||
self._changes.added.add(item)
|
||||
if _is_attached(self.parent):
|
||||
self.parent._changes.collection_changes.add(self.name)
|
||||
if self.parent.parent is not None:
|
||||
self.parent.parent._changes.subresource_changes.add(
|
||||
self.parent.name)
|
||||
|
||||
|
||||
class BasicMapper(LoggerMixin):
|
||||
def __init__(self, *args, **kw):
|
||||
super(BasicMapper, self).__init__(*args, **kw)
|
||||
# self._patch_model()
|
||||
self._send_request = lambda x: x
|
||||
|
||||
def create(self, path, instance):
|
||||
raise NotImplementedError()
|
||||
|
||||
def update(self, instance, fields):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _do_update(self, instance, fields):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get(self, path):
|
||||
raise NotImplementedError()
|
||||
|
||||
def delete(self, instance):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _get_data(self, path):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _map(self, path, typename, data):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _init_resource(self, res):
|
||||
return self._fill_resource(res, self._get_data(res.path)[1])
|
||||
|
||||
def _make_subresource(self, type, path, parent):
|
||||
subresource = type(path=path, parent=parent)
|
||||
subresource._synced = False
|
||||
# return self._attach_instance(subresource)
|
||||
return subresource
|
||||
|
||||
def _fill_resource(self, res, data):
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def _patch_model(cls):
|
||||
import openmtc.model as model
|
||||
|
||||
model.Resource._synced = True
|
||||
model.Resource._mapper = None
|
||||
|
||||
for t in model.get_types():
|
||||
if "_initialized" not in t.__dict__:
|
||||
setattr(t, "_initialized", True)
|
||||
for a in t.__members__:
|
||||
# TODO: deal with name differences
|
||||
setattr(t, a.name, MemberProxy(a))
|
||||
for a in t.collections:
|
||||
if a.type is not Collection:
|
||||
raise NotImplementedError()
|
||||
a.type = MapperCollection
|
@ -1,11 +0,0 @@
|
||||
"""
|
||||
Created on 02.06.2013
|
||||
|
||||
@author: kca
|
||||
"""
|
||||
|
||||
from openmtc.exc import OpenMTCError
|
||||
|
||||
|
||||
class MapperError(OpenMTCError):
|
||||
pass
|
@ -1,706 +0,0 @@
|
||||
from abc import ABCMeta
|
||||
from collections import Sequence, OrderedDict, Mapping
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from iso8601 import parse_date, ParseError
|
||||
from operator import attrgetter
|
||||
|
||||
from futile import basestring, issubclass, NOT_SET
|
||||
from futile.logging import LoggerMixin
|
||||
from openmtc.model.exc import ModelError, ModelTypeError
|
||||
|
||||
|
||||
class StrEnum(str, Enum):
|
||||
pass
|
||||
|
||||
|
||||
class Collection(Sequence, Mapping):
|
||||
def __init__(self, name, type, parent, collection=(), *args, **kw):
|
||||
super(Collection, self).__init__(*args, **kw)
|
||||
self._map = OrderedDict()
|
||||
self.type = type
|
||||
self.parent = parent
|
||||
self.name = name
|
||||
for c in collection:
|
||||
self.append(c)
|
||||
|
||||
def __getitem__(self, index):
|
||||
if isinstance(index, (int, slice)):
|
||||
return self._map.values()[index]
|
||||
return self._map[index]
|
||||
|
||||
def __contains__(self, v):
|
||||
return v in self._map or v in self._map.values()
|
||||
|
||||
def append(self, v):
|
||||
if not isinstance(v, self.type):
|
||||
raise ModelTypeError(v)
|
||||
|
||||
self._handle_newitem(v)
|
||||
|
||||
assert v.name is not None, "name is None: %s %s" % (v, v.path)
|
||||
self._map[v.name] = v
|
||||
|
||||
add = append
|
||||
|
||||
def get(self, k, default=None):
|
||||
return self._map.get(k, default)
|
||||
|
||||
def __iter__(self):
|
||||
return self._map.itervalues()
|
||||
|
||||
def __len__(self):
|
||||
return len(self._map)
|
||||
|
||||
def __delitem__(self, index):
|
||||
if isinstance(index, int):
|
||||
instance = self[index]
|
||||
index = instance.name
|
||||
del self._map[index]
|
||||
|
||||
discard = __delitem__
|
||||
|
||||
def _handle_newitem(self, item):
|
||||
if item.parent and item.parent is not self.parent:
|
||||
# TODO !
|
||||
return
|
||||
# raise NotImplementedError()
|
||||
item.parent = self.parent
|
||||
|
||||
def __str__(self):
|
||||
try:
|
||||
return "openmtc.Collection(%s, %s)" % (
|
||||
self.name, self._map)
|
||||
except AttributeError:
|
||||
return "openmtc.Collection(%s)" % (self.__len__())
|
||||
|
||||
|
||||
class Member(LoggerMixin):
|
||||
def __init__(self, type=unicode, version="1.0", *args, **kw):
|
||||
super(Member, self).__init__(*args, **kw)
|
||||
self.type = type
|
||||
self.version = version
|
||||
|
||||
def _init(self, name):
|
||||
self.name = name
|
||||
|
||||
def __set__(self, instance, value):
|
||||
if value is not None and not isinstance(value, self.type):
|
||||
value = self.convert(value, instance)
|
||||
self.set_value(instance, value)
|
||||
|
||||
def set_value(self, instance, value):
|
||||
setattr(instance, "_" + self.name, value)
|
||||
|
||||
def convert(self, value, instance):
|
||||
try:
|
||||
return self.type(value)
|
||||
except (TypeError, ValueError):
|
||||
raise ModelTypeError("Illegal value for %s (%s): %r" %
|
||||
(self.name, self.type, value))
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(name="%s", type=%s)' % (type(self).__name__, self.name,
|
||||
self.type.__name__)
|
||||
|
||||
|
||||
class Attribute(Member):
|
||||
RW = "RW"
|
||||
RO = "RO"
|
||||
WO = "WO"
|
||||
|
||||
def __init__(self, type=unicode, default=None,
|
||||
accesstype=None, mandatory=None,
|
||||
update_mandatory=None,
|
||||
id_attribute=None, path_attribute=None,
|
||||
id_immutable=None, *args, **kw):
|
||||
super(Attribute, self).__init__(type=type, *args, **kw)
|
||||
|
||||
if path_attribute and id_attribute:
|
||||
raise ModelError("Attribute can't be id_attribute and "
|
||||
"path_attribute at the same time")
|
||||
|
||||
self.default = default
|
||||
self.id_attribute = id_attribute
|
||||
self.path_attribute = path_attribute
|
||||
self.id_immutable = id_immutable
|
||||
|
||||
if accesstype is None:
|
||||
if path_attribute:
|
||||
accesstype = self.RO
|
||||
elif id_attribute:
|
||||
accesstype = self.WO
|
||||
else:
|
||||
accesstype = self.RW
|
||||
self.accesstype = accesstype
|
||||
|
||||
if mandatory is None:
|
||||
if accesstype == self.WO:
|
||||
mandatory = True
|
||||
else:
|
||||
mandatory = False
|
||||
self.mandatory = mandatory
|
||||
|
||||
if update_mandatory is None:
|
||||
if accesstype == self.RW:
|
||||
update_mandatory = mandatory
|
||||
else:
|
||||
update_mandatory = False
|
||||
self.update_mandatory = update_mandatory
|
||||
|
||||
def __get__(self, instance, owner=None):
|
||||
if instance is None:
|
||||
return self
|
||||
try:
|
||||
return getattr(instance, "_" + self.name)
|
||||
except AttributeError:
|
||||
return self.default
|
||||
|
||||
|
||||
try:
|
||||
unicode
|
||||
|
||||
class UnicodeAttribute(Attribute):
|
||||
def __init__(self, default=None, accesstype=None,
|
||||
mandatory=False, *args, **kw):
|
||||
super(UnicodeAttribute, self).__init__(type=unicode,
|
||||
default=default,
|
||||
accesstype=accesstype,
|
||||
mandatory=mandatory, *args,
|
||||
**kw)
|
||||
|
||||
def convert(self, value, instance):
|
||||
if isinstance(value, str):
|
||||
return value.decode("utf-8")
|
||||
return super(UnicodeAttribute, self).convert(value, instance)
|
||||
except NameError:
|
||||
UnicodeAttribute = Attribute
|
||||
|
||||
|
||||
class DatetimeAttribute(Attribute):
|
||||
def __init__(self, default=None, accesstype=None,
|
||||
mandatory=False, *args, **kw):
|
||||
super(DatetimeAttribute, self).__init__(type=datetime,
|
||||
default=default,
|
||||
accesstype=accesstype,
|
||||
mandatory=mandatory, *args,
|
||||
**kw)
|
||||
|
||||
def convert(self, value, instance):
|
||||
if isinstance(value, basestring):
|
||||
try:
|
||||
return parse_date(value)
|
||||
except ParseError as e:
|
||||
raise ValueError(str(e))
|
||||
return super(DatetimeAttribute, self).convert(value, instance)
|
||||
|
||||
|
||||
class ListAttribute(Attribute):
|
||||
def __init__(self, content_type=unicode, type=list,
|
||||
default=NOT_SET, *args, **kw):
|
||||
super(ListAttribute, self).__init__(type=type,
|
||||
default=default, *args, **kw)
|
||||
self.content_type = content_type
|
||||
|
||||
def __get__(self, instance, owner=None):
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
key = "_" + self.name
|
||||
try:
|
||||
return getattr(instance, key)
|
||||
except AttributeError:
|
||||
if self.default is NOT_SET:
|
||||
subresource = self.type()
|
||||
else:
|
||||
subresource = self.default
|
||||
setattr(instance, key, subresource)
|
||||
return subresource
|
||||
|
||||
def _convert_mapping(self, value, instance):
|
||||
self.logger.debug("Creating %s from %s", self.content_type, value)
|
||||
return self.content_type(**value)
|
||||
|
||||
def convert_content(self, value, instance):
|
||||
if isinstance(value, self.content_type):
|
||||
return value
|
||||
if issubclass(self.content_type, Entity):
|
||||
if isinstance(value, Mapping):
|
||||
return self._convert_mapping(value, instance)
|
||||
raise ValueError("Illegal value for sequence '%s' (%s): %s (%s)" %
|
||||
(self.name, self.content_type, value, type(value)))
|
||||
return self.content_type(value)
|
||||
|
||||
def set_value(self, instance, value):
|
||||
if value:
|
||||
value = self.type([self.convert_content(v, instance)
|
||||
for v in value])
|
||||
super(ListAttribute, self).set_value(instance, value)
|
||||
|
||||
|
||||
class StringListAttribute(Attribute):
|
||||
def __init__(self, content_type=unicode, type=list,
|
||||
default=NOT_SET, *args, **kw):
|
||||
super(StringListAttribute, self).__init__(type=type, default=default,
|
||||
*args, **kw)
|
||||
self.content_type = content_type
|
||||
|
||||
def __get__(self, instance, owner=None):
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
key = "_" + self.name
|
||||
try:
|
||||
return getattr(instance, key)
|
||||
except AttributeError:
|
||||
if self.default is NOT_SET:
|
||||
subresource = self.type()
|
||||
else:
|
||||
subresource = self.default
|
||||
setattr(instance, key, subresource)
|
||||
return subresource
|
||||
|
||||
def convert(self, value, instance):
|
||||
if isinstance(value, str):
|
||||
return value.strip(' ').split(' ')
|
||||
return super(StringListAttribute, self).convert(value, instance)
|
||||
|
||||
def _convert_mapping(self, value, instance):
|
||||
self.logger.debug("Creating %s from %s", self.content_type, value)
|
||||
return self.content_type(**value)
|
||||
|
||||
def convert_content(self, value, instance):
|
||||
if isinstance(value, self.content_type):
|
||||
return value
|
||||
if issubclass(self.content_type, Entity):
|
||||
if isinstance(value, Mapping):
|
||||
return self._convert_mapping(value, instance)
|
||||
raise ValueError("Illegal value for sequence '%s' (%s): %s (%s)" %
|
||||
(self.name, self.content_type, value, type(value)))
|
||||
return self.content_type(value)
|
||||
|
||||
def set_value(self, instance, value):
|
||||
if value:
|
||||
value = self.type([self.convert_content(v, instance)
|
||||
for v in value])
|
||||
super(StringListAttribute, self).set_value(instance, value)
|
||||
|
||||
|
||||
class EntityAttribute(Attribute):
|
||||
def __init__(self, type, default=None, accesstype=None, mandatory=None,
|
||||
update_mandatory=None):
|
||||
super(EntityAttribute, self).__init__(type=type, default=default,
|
||||
accesstype=accesstype,
|
||||
mandatory=mandatory,
|
||||
update_mandatory=update_mandatory)
|
||||
|
||||
def convert(self, value, instance):
|
||||
if isinstance(value, Mapping):
|
||||
self.logger.debug("Creating %s from %s", self.type, value)
|
||||
return self.type(**value)
|
||||
return super(EntityAttribute, self).convert(value, instance)
|
||||
|
||||
|
||||
class CollectionMember(Member):
|
||||
def __init__(self, content_type, type=Collection, *args,
|
||||
**kw): # TODO: kca: use type for content_type
|
||||
super(CollectionMember, self).__init__(type=type, *args, **kw)
|
||||
self.content_type = content_type
|
||||
|
||||
def convert(self, value, instance):
|
||||
try:
|
||||
return self.type(collection=value, name=self.name,
|
||||
parent=instance, type=self.content_type)
|
||||
except:
|
||||
return super(CollectionMember, self).convert(value, instance)
|
||||
|
||||
def __get__(self, instance, owner=None):
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
key = "_" + self.name
|
||||
try:
|
||||
return getattr(instance, key)
|
||||
except AttributeError:
|
||||
subresource = self.type(name=self.name, parent=instance,
|
||||
type=self.content_type)
|
||||
setattr(instance, key, subresource)
|
||||
return subresource
|
||||
|
||||
|
||||
class SubresourceMember(Member):
|
||||
default = None
|
||||
|
||||
def __init__(self, type, virtual=False, default=NOT_SET, *args, **kw):
|
||||
if type and not issubclass(type, Resource):
|
||||
raise TypeError(type)
|
||||
|
||||
super(SubresourceMember, self).__init__(type=type, *args, **kw)
|
||||
|
||||
def __get__(self, instance, owner=None):
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
key = "_" + self.name
|
||||
try:
|
||||
v = getattr(instance, key)
|
||||
if v is not None:
|
||||
return v
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Here we automatically create missing subresources
|
||||
# Might be a stupid idea to do it here
|
||||
path = instance.path and instance.path + "/" + self.name or None
|
||||
subresource = self.type(
|
||||
path=path,
|
||||
parent=instance
|
||||
)
|
||||
|
||||
# TODO: needs to go into the appropriate resource type(s)
|
||||
if hasattr(subresource, "creationTime"):
|
||||
creation_time = instance.creationTime
|
||||
subresource.creationTime = creation_time
|
||||
subresource.lastModifiedTime = creation_time
|
||||
|
||||
setattr(instance, key, subresource)
|
||||
return subresource
|
||||
|
||||
@property
|
||||
def virtual(self):
|
||||
return self.type.virtual
|
||||
|
||||
|
||||
class ResourceType(ABCMeta):
|
||||
def __init__(self, *args, **kw):
|
||||
super(ResourceType, self).__init__(*args, **kw)
|
||||
|
||||
if ("typename" not in self.__dict__ and
|
||||
not self.__name__.endswith("Collection")):
|
||||
self.typename = self.__name__[0].lower() + self.__name__[1:]
|
||||
|
||||
self.id_attribute = self.path_attribute = None
|
||||
attributes = self.attributes = []
|
||||
subresources = self.subresources = []
|
||||
collections = self.collections = []
|
||||
|
||||
for name in dir(self):
|
||||
if name[0] != "_":
|
||||
attr = getattr(self, name)
|
||||
if isinstance(attr, Member):
|
||||
if "_" in name:
|
||||
name = name.replace("_", "-")
|
||||
setattr(self, name, attr)
|
||||
attr._init(name)
|
||||
if isinstance(attr, SubresourceMember):
|
||||
subresources.append(attr)
|
||||
elif isinstance(attr, CollectionMember):
|
||||
collections.append(attr)
|
||||
else:
|
||||
attributes.append(attr)
|
||||
|
||||
if attr.id_attribute and attr.path_attribute:
|
||||
raise ModelTypeError(
|
||||
"Attribute %s of resource %s can only be "
|
||||
"either id_attribute or path_attribute, not "
|
||||
"both." % (name, self.__name__))
|
||||
|
||||
if attr.id_attribute:
|
||||
if self.id_attribute is not None:
|
||||
raise ModelTypeError(
|
||||
"Resource %s defines more than one id "
|
||||
"attribute: %s and %s" %
|
||||
(self.__name__, self.id_attribute, name))
|
||||
self.id_attribute = attr.name
|
||||
self.id_immutable = attr.id_immutable
|
||||
|
||||
if attr.path_attribute:
|
||||
if self.path_attribute is not None:
|
||||
raise ModelTypeError(
|
||||
"Resource %s defines more than one path "
|
||||
"attribute: %s and %s" %
|
||||
(self.__name__, self.id_attribute, name))
|
||||
self.path_attribute = attr.name
|
||||
|
||||
self.__members__ = attributes + subresources + collections
|
||||
|
||||
# TODO: caching
|
||||
@property
|
||||
def attribute_names(self):
|
||||
return map(attrgetter("name"), self.attributes)
|
||||
|
||||
@property
|
||||
def collection_names(self):
|
||||
return map(attrgetter("name"), self.collections)
|
||||
|
||||
@property
|
||||
def subresource_names(self):
|
||||
return map(attrgetter("name"), self.subresources)
|
||||
|
||||
@property
|
||||
def member_names(self):
|
||||
return map(attrgetter("name"), self.__members__)
|
||||
|
||||
|
||||
class Entity(LoggerMixin):
|
||||
__metaclass__ = ResourceType
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
self.set_values(kw)
|
||||
|
||||
def set_values(self, values):
|
||||
self.logger.debug("Setting values for entity of type %s with %s",
|
||||
type(self), values)
|
||||
values = values.copy()
|
||||
|
||||
for member in self.__members__:
|
||||
try:
|
||||
v = values.pop(member.name)
|
||||
if (v is not None and isinstance(member, ListAttribute) and
|
||||
not isinstance(v, (list, tuple, set))):
|
||||
l = [v]
|
||||
v = l
|
||||
setattr(self, member.name, v)
|
||||
except KeyError:
|
||||
try:
|
||||
v = values.pop(member.name + "Reference")
|
||||
# TODO: proper solution?
|
||||
if (v is not None and isinstance(member, ListAttribute) and
|
||||
not isinstance(v, (list, tuple, set))):
|
||||
v = v.values()[0]
|
||||
setattr(self, member.name, v)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if values:
|
||||
self._set_extra_values(values)
|
||||
|
||||
def _set_extra_values(self, values):
|
||||
"""
|
||||
names = type(self).subresource_names
|
||||
for k in values.keys():
|
||||
if k.strip("Reference") in names:
|
||||
values.pop(k)
|
||||
print names, values
|
||||
from traceback import print_stack
|
||||
print_stack()
|
||||
"""
|
||||
if values:
|
||||
raise ModelTypeError("%s resource has no attribute %s" %
|
||||
(self.typename, values.keys()[0]))
|
||||
|
||||
@classmethod
|
||||
def get_typename(cls):
|
||||
return cls.typename
|
||||
|
||||
def get_attribute_values(self, filter=False):
|
||||
vals = {}
|
||||
for attr in self.attributes:
|
||||
a_name = attr.name
|
||||
val = getattr(self, a_name)
|
||||
if (val is None or val == '' or val == []) and filter:
|
||||
continue
|
||||
vals[a_name] = val
|
||||
return vals
|
||||
attribute_values = property(get_attribute_values)
|
||||
|
||||
def get_values_representation(self, fields=None, internal=False):
|
||||
vals = {}
|
||||
id_attribute = self.id_attribute
|
||||
for attr in self.attributes:
|
||||
a_name = attr.name
|
||||
if (fields is None or a_name == id_attribute or a_name in fields) \
|
||||
and (internal or attr.accesstype is not None):
|
||||
val = getattr(self, "_" + a_name, None)
|
||||
if val is None:
|
||||
continue
|
||||
if isinstance(attr, ListAttribute):
|
||||
# TODO: return simple values. No representation
|
||||
if attr.content_type is AnyURI: # any uri list
|
||||
vals[a_name] = {"reference": val}
|
||||
elif issubclass(attr.content_type, Entity): # complex list
|
||||
vals[a_name] = {
|
||||
a_name: [x.get_values_representation() for x in val]
|
||||
}
|
||||
else: # simple list
|
||||
vals[a_name] = {a_name[:-1]: val}
|
||||
elif isinstance(attr, EntityAttribute):
|
||||
vals[a_name] = val.values
|
||||
else:
|
||||
try:
|
||||
val = val.isoformat()
|
||||
except AttributeError:
|
||||
pass
|
||||
vals[a_name] = val
|
||||
return vals
|
||||
|
||||
def get_values(self, filter=False):
|
||||
return self.get_attribute_values(filter)
|
||||
|
||||
@property
|
||||
def values(self):
|
||||
return self.get_values()
|
||||
|
||||
@property
|
||||
def subresource_values(self):
|
||||
vals = {}
|
||||
for attr in self.subresources:
|
||||
vals[attr.name] = getattr(self, attr.name)
|
||||
return vals
|
||||
|
||||
|
||||
class ContentResource(Entity):
|
||||
virtual = True
|
||||
__model_name__ = None
|
||||
__model_version__ = None
|
||||
|
||||
def __init__(self, value, *args, **kw):
|
||||
kw = {'CONTENT': value}
|
||||
super(ContentResource, self).__init__(*args, **kw)
|
||||
|
||||
@property
|
||||
def values(self):
|
||||
return self.get_values().get('CONTENT')
|
||||
|
||||
|
||||
class Resource(Entity):
|
||||
virtual = False
|
||||
__model_name__ = None
|
||||
__model_version__ = None
|
||||
|
||||
def __init__(self, path=None, parent=None, *args, **kw):
|
||||
if path is not None and not isinstance(path, basestring):
|
||||
raise TypeError(path)
|
||||
self.__path = path
|
||||
self.parent = parent
|
||||
super(Resource, self).__init__(*args, **kw)
|
||||
|
||||
def get_path(self):
|
||||
return self.__path
|
||||
|
||||
def set_path(self, path):
|
||||
self.__path = path
|
||||
if self.id_attribute and getattr(self, self.id_attribute) is None:
|
||||
setattr(self, self.id_attribute, path.rpartition("/")[-1])
|
||||
if self.path_attribute and getattr(self, self.path_attribute) is None:
|
||||
setattr(self, self.path_attribute, path)
|
||||
|
||||
path = property(get_path, set_path)
|
||||
|
||||
@property
|
||||
def parent_path(self):
|
||||
if self.__path is not None:
|
||||
return self.__path.rpartition("/")[0]
|
||||
|
||||
# TODO: deprecated
|
||||
@property
|
||||
def name(self):
|
||||
return self.basename
|
||||
|
||||
@property
|
||||
def basename(self):
|
||||
if self.path is not None:
|
||||
return self.path.rpartition("/")[-1]
|
||||
if self.id_attribute is not None:
|
||||
return getattr(self, self.id_attribute)
|
||||
|
||||
def set_values(self, values):
|
||||
values = values.copy()
|
||||
|
||||
keys = [k for k in values.keys() if "_" in k]
|
||||
for k in keys:
|
||||
values[k.replace("_", "-")] = values.pop(k)
|
||||
|
||||
path = self.path
|
||||
if path is not None:
|
||||
id_attribute = self.id_attribute
|
||||
if (id_attribute is not None and
|
||||
id_attribute not in values):
|
||||
values[id_attribute] = path.rpartition("/")[-1]
|
||||
|
||||
path_attribute = self.path_attribute
|
||||
if (path_attribute is not None and
|
||||
path_attribute not in values):
|
||||
values[path_attribute] = path
|
||||
|
||||
for member in self.__members__:
|
||||
try:
|
||||
v = values.pop(member.name)
|
||||
# FIXME: move into de-serializer and handle dicts
|
||||
if (v is not None and isinstance(member, ListAttribute) and
|
||||
not isinstance(v, (list, tuple, set))):
|
||||
v = v.values()[0]
|
||||
setattr(self, member.name, v)
|
||||
except KeyError:
|
||||
try:
|
||||
v = values.pop(member.name + "Reference")
|
||||
# TODO: proper solution?
|
||||
if (v is not None and isinstance(member, ListAttribute) and
|
||||
not isinstance(v, (list, tuple, set))):
|
||||
v = v.values()[0]
|
||||
setattr(self, member.name, v)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if values:
|
||||
self._set_extra_values(values)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(path='%s', name='%s')" % (type(self).__name__, self.path,
|
||||
self.name)
|
||||
|
||||
def __eq__(self, o):
|
||||
try:
|
||||
return self.path == o.path
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
def __ne__(self, o):
|
||||
return not self.__eq__(o)
|
||||
|
||||
|
||||
class FlexibleAttributesMixin(object):
|
||||
def __init__(self, path=None, parent=None, *args, **kw):
|
||||
self._flex_attrs = set()
|
||||
|
||||
super(FlexibleAttributesMixin, self).__init__(path=path, parent=parent,
|
||||
*args, **kw)
|
||||
|
||||
def __setattr__(self, k, v):
|
||||
if not k.startswith("_") and not hasattr(self, k) and k != "parent":
|
||||
self._flex_attrs.add(k)
|
||||
|
||||
return super(FlexibleAttributesMixin, self).__setattr__(k, v)
|
||||
|
||||
def __delattr__(self, k):
|
||||
self._flex_attrs.discard(k)
|
||||
|
||||
return super(FlexibleAttributesMixin, self).__delattr__(k)
|
||||
|
||||
@property
|
||||
def flex_values(self):
|
||||
return {k: getattr(self, k) for k in self._flex_attrs}
|
||||
|
||||
def get_values(self, filter=False):
|
||||
vals = super(FlexibleAttributesMixin, self).get_values(filter)
|
||||
vals.update(self.flex_values)
|
||||
return vals
|
||||
|
||||
def get_values_representation(self, fields=None, internal=False):
|
||||
r = super(FlexibleAttributesMixin, self) \
|
||||
.get_values_representation(fields=fields, internal=internal)
|
||||
if fields is None:
|
||||
r.update(self.flex_values)
|
||||
return r
|
||||
|
||||
def _set_extra_values(self, values):
|
||||
for k, v in values.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
|
||||
class AnyURI(str):
|
||||
pass
|
||||
|
||||
|
||||
class AnyURIList(Entity):
|
||||
reference = ListAttribute(mandatory=False)
|
@ -1,14 +0,0 @@
|
||||
'''
|
||||
Created on 26.05.2013
|
||||
|
||||
@author: kca
|
||||
'''
|
||||
from openmtc.exc import OpenMTCError
|
||||
|
||||
|
||||
class ModelError(OpenMTCError):
|
||||
pass
|
||||
|
||||
|
||||
class ModelTypeError(ModelError, TypeError):
|
||||
pass
|
@ -1,39 +0,0 @@
|
||||
from datetime import datetime, timedelta, tzinfo
|
||||
import time
|
||||
|
||||
ZERO = timedelta(0)
|
||||
|
||||
|
||||
class Utc(tzinfo):
|
||||
"""UTC
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return ZERO
|
||||
|
||||
def tzname(self, dt):
|
||||
return "UTC"
|
||||
|
||||
def dst(self, dt):
|
||||
return ZERO
|
||||
|
||||
UTC = Utc()
|
||||
|
||||
|
||||
#del Utc
|
||||
|
||||
|
||||
def datetime_now():
|
||||
return datetime.now(UTC)
|
||||
|
||||
|
||||
def datetime_the_future(offset = 0):
|
||||
""" Returns a datetime instance <offset> seconds in the future.
|
||||
@note: if no offset is provided or offset == 0, this is equivalent to datetime_now
|
||||
@param offset: seconds from now
|
||||
@return: datetime in <offset> seconds
|
||||
"""
|
||||
f = time.time() + offset
|
||||
return datetime.fromtimestamp(f, UTC)
|
||||
|
@ -1 +0,0 @@
|
||||
VERSION="4.0.0"
|
@ -1,2 +0,0 @@
|
||||
class OpenMTCAppError(Exception):
|
||||
pass
|
@ -1,89 +0,0 @@
|
||||
from signal import SIGTERM, SIGINT
|
||||
|
||||
from flask import (Flask, request, abort, redirect, url_for,
|
||||
Response as FlaskResponse)
|
||||
|
||||
from gevent import signal as gevent_signal
|
||||
from gevent.pywsgi import WSGIServer
|
||||
from geventwebsocket.handler import WebSocketHandler
|
||||
from socketio import Server as SioServer, Middleware as SioMiddleware
|
||||
|
||||
from futile.net.http.exc import HTTPError
|
||||
from openmtc_app.runner import AppRunner
|
||||
|
||||
|
||||
class Response(FlaskResponse):
|
||||
pass
|
||||
|
||||
|
||||
class SimpleFlaskRunner(AppRunner):
|
||||
def __init__(self, m2m_app, port=None, listen_on="0.0.0.0", *args, **kw):
|
||||
super(SimpleFlaskRunner, self).__init__(m2m_app=m2m_app, *args, **kw)
|
||||
|
||||
self.port = port or 5050
|
||||
self.listen_on = listen_on
|
||||
self.flask_app = Flask(type(self.m2m_app).__module__)
|
||||
|
||||
def _get_server(self):
|
||||
return WSGIServer((self.listen_on, self.port), self.flask_app)
|
||||
|
||||
def _run(self):
|
||||
self.m2m_app.run(self, self.m2m_ep)
|
||||
|
||||
_server = self._get_server()
|
||||
self.logger.debug("Serving on %s:%s", self.listen_on, self.port)
|
||||
gevent_signal(SIGTERM, _server.stop)
|
||||
gevent_signal(SIGINT, _server.stop)
|
||||
_server.serve_forever()
|
||||
|
||||
def add_route(self, route, handler, methods=("POST", "GET")):
|
||||
def wrapper():
|
||||
try:
|
||||
return handler(request)
|
||||
except HTTPError as e:
|
||||
self.logger.exception("Aborting")
|
||||
abort(e.status)
|
||||
|
||||
self.logger.debug("Adding route: %s -> %s" % (route, handler))
|
||||
self.flask_app.add_url_rule(route, view_func=wrapper,
|
||||
endpoint=route + str(handler),
|
||||
methods=methods)
|
||||
|
||||
|
||||
class FlaskRunner(SimpleFlaskRunner):
|
||||
def __init__(self, m2m_app, port=None, listen_on="0.0.0.0", *args, **kw):
|
||||
super(FlaskRunner, self).__init__(m2m_app=m2m_app, port=port,
|
||||
listen_on=listen_on, *args, **kw)
|
||||
|
||||
@self.flask_app.route("/")
|
||||
def home():
|
||||
return redirect(url_for('static', filename='index.html'))
|
||||
|
||||
self.sio_app = SioServer(async_mode='gevent')
|
||||
|
||||
@self.sio_app.on('connect')
|
||||
def connect(sid, environ):
|
||||
self.logger.debug('client connected: %s' % sid)
|
||||
|
||||
def _get_server(self):
|
||||
return WSGIServer((self.listen_on, self.port),
|
||||
SioMiddleware(self.sio_app, self.flask_app),
|
||||
handler_class=WebSocketHandler)
|
||||
|
||||
def emit(self, event, message=None, sid=None):
|
||||
self.sio_app.emit(event, message, room=sid)
|
||||
|
||||
def get_handler_decorator(self, name):
|
||||
return self.sio_app.on(name)
|
||||
|
||||
def add_message_handler(self, name, handler, client=False, response=False):
|
||||
|
||||
def wrapper(*args, **kw):
|
||||
if not client:
|
||||
args = args[1:]
|
||||
if response:
|
||||
return handler(*args, **kw)
|
||||
else:
|
||||
handler(*args, **kw)
|
||||
|
||||
self.sio_app.on(name, wrapper)
|
@ -1,276 +0,0 @@
|
||||
from gevent import spawn
|
||||
from gevent.pywsgi import WSGIServer
|
||||
from inspect import getargspec
|
||||
from futile.logging import LoggerMixin
|
||||
from openmtc_onem2m.exc import OneM2MError
|
||||
from openmtc_onem2m.model import (
|
||||
EventNotificationCriteria,
|
||||
NotificationEventTypeE,
|
||||
Subscription,
|
||||
)
|
||||
from openmtc_onem2m.serializer import get_onem2m_decoder
|
||||
from urlparse import urlparse
|
||||
|
||||
from openmtc_onem2m.util import split_onem2m_address
|
||||
|
||||
_handler_map = {}
|
||||
|
||||
|
||||
def register_handler(cls, schemes=()):
|
||||
_handler_map.update({
|
||||
scheme: cls for scheme in map(str.lower, schemes)
|
||||
})
|
||||
|
||||
|
||||
def get_handler(scheme, poa, callback_func, ssl_certs=None):
|
||||
return _handler_map[scheme](poa, callback_func, ssl_certs)
|
||||
|
||||
|
||||
class NotificationManager(LoggerMixin):
|
||||
handlers = []
|
||||
endpoints = []
|
||||
callbacks = {}
|
||||
|
||||
def __init__(self, poas, ep, onem2m_mapper, ca_certs=None, cert_file=None, key_file=None):
|
||||
"""
|
||||
:param list poas:
|
||||
:param str ep:
|
||||
:param openmtc_onem2m.mapper.OneM2MMapper onem2m_mapper:
|
||||
"""
|
||||
self.mapper = onem2m_mapper
|
||||
self.sp_id, self.cse_id, _ = split_onem2m_address(onem2m_mapper.originator)
|
||||
self.ssl_certs = {
|
||||
'ca_certs': ca_certs,
|
||||
'cert_file': cert_file,
|
||||
'key_file': key_file
|
||||
}
|
||||
|
||||
for poa in map(urlparse, poas):
|
||||
if poa.hostname == 'auto':
|
||||
poa = poa._replace(netloc="%s:%s" % (self._get_auto_host(ep), poa.port))
|
||||
|
||||
if not poa.scheme:
|
||||
poa = poa._replace(scheme='http')
|
||||
|
||||
try:
|
||||
self.handlers.append(get_handler(poa.scheme, poa, self._handle_callback,
|
||||
self.ssl_certs))
|
||||
self.endpoints.append(poa.geturl())
|
||||
except:
|
||||
pass
|
||||
|
||||
self.logger.debug('Available POAs: %s' % ', '.join(self.endpoints))
|
||||
|
||||
super(NotificationManager, self).__init__()
|
||||
|
||||
@staticmethod
|
||||
def _get_auto_host(ep):
|
||||
try:
|
||||
import socket
|
||||
from urlparse import urlparse
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
netloc = urlparse(ep).netloc.split(':')
|
||||
s.connect((netloc[0], int(netloc[1])))
|
||||
host = s.getsockname()[0]
|
||||
s.close()
|
||||
except:
|
||||
host = "127.0.0.1"
|
||||
|
||||
return host
|
||||
|
||||
def _normalize_path(self, path):
|
||||
path = path[len(self.sp_id):] if path.startswith(self.sp_id) and self.sp_id else path
|
||||
path = path[len(self.cse_id) + 1:] if path.startswith(self.cse_id) and self.cse_id else path
|
||||
return path
|
||||
|
||||
def _init(self):
|
||||
for handler in self.handlers:
|
||||
try:
|
||||
handler.start()
|
||||
except:
|
||||
pass
|
||||
|
||||
def nop():
|
||||
pass
|
||||
self._init = nop
|
||||
|
||||
def register_callback(self, func, sur):
|
||||
self.callbacks[sur] = func if len(getargspec(func)[0]) > 1 \
|
||||
else lambda _, **notification: func(notification['rep'])
|
||||
|
||||
def _handle_callback(self, originator, **notification):
|
||||
sur = notification.pop('sur')
|
||||
sur = self._normalize_path(sur)
|
||||
|
||||
try:
|
||||
callback = self.callbacks[sur]
|
||||
except KeyError:
|
||||
if not sur.startswith('/'):
|
||||
# TODO(rst): maybe not the best, check alternatives
|
||||
# assumes originator is always in the form //SP-ID/CSE-ID
|
||||
sur = originator[originator.rfind('/'):] + '/' + sur
|
||||
try:
|
||||
callback = self.callbacks[sur]
|
||||
except KeyError:
|
||||
return
|
||||
else:
|
||||
return
|
||||
try:
|
||||
spawn(callback, originator, **notification)
|
||||
except:
|
||||
pass
|
||||
|
||||
def get_expiration_time(self):
|
||||
return None
|
||||
|
||||
def subscribe(self, path, func, filter_criteria=None, expiration_time=None,
|
||||
notification_types=(NotificationEventTypeE.updateOfResource, )):
|
||||
self._init()
|
||||
|
||||
event_notification_criteria = filter_criteria or EventNotificationCriteria()
|
||||
event_notification_criteria.notificationEventType = (
|
||||
event_notification_criteria.notificationEventType or list(notification_types))
|
||||
|
||||
subscription = self.mapper.create(path, Subscription(
|
||||
notificationURI=[self.mapper.originator],
|
||||
expirationTime=expiration_time or self.get_expiration_time(),
|
||||
eventNotificationCriteria=event_notification_criteria,
|
||||
))
|
||||
|
||||
reference = self._normalize_path(subscription.subscriberURI or subscription.path)
|
||||
self.register_callback(func, reference)
|
||||
return subscription
|
||||
|
||||
def unsubscribe(self, sur):
|
||||
self.mapper.delete(sur)
|
||||
del self.callbacks[sur]
|
||||
|
||||
def shutdown(self):
|
||||
for subscription in self.callbacks.keys():
|
||||
try:
|
||||
self.unsubscribe(subscription)
|
||||
except OneM2MError:
|
||||
pass
|
||||
|
||||
for handler in self.handlers:
|
||||
try:
|
||||
handler.stop()
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class BaseNotificationHandler(object):
|
||||
def __init__(self, poa, callback_func, ssl_certs=None):
|
||||
self._endpoint = poa
|
||||
self._callback = callback_func
|
||||
self._ssl_certs = ssl_certs
|
||||
|
||||
@classmethod
|
||||
def _unpack_notification(cls, notification):
|
||||
return {
|
||||
'sur': notification.subscriptionReference,
|
||||
'net': notification.notificationEvent.notificationEventType,
|
||||
'rep': notification.notificationEvent.representation,
|
||||
}
|
||||
|
||||
def start(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def stop(self):
|
||||
pass
|
||||
|
||||
|
||||
class MqttNotificationHandler(BaseNotificationHandler):
|
||||
_client = None
|
||||
|
||||
def start(self):
|
||||
from openmtc_onem2m.client.mqtt import get_client
|
||||
from openmtc_onem2m.transport import OneM2MResponse
|
||||
from openmtc_onem2m.exc import get_response_status
|
||||
|
||||
def wrapper(request):
|
||||
notification = self._unpack_notification(request.content)
|
||||
self._callback(request.originator, **notification)
|
||||
return OneM2MResponse(status_code=get_response_status(2002), request=request)
|
||||
|
||||
self._client = get_client(self._endpoint.geturl(), handle_request_func=wrapper)
|
||||
|
||||
def stop(self):
|
||||
self._client.stop()
|
||||
|
||||
register_handler(MqttNotificationHandler, ('mqtt', 'mqtts', 'secure-mqtt'))
|
||||
|
||||
|
||||
class HttpNotificationHandler(BaseNotificationHandler):
|
||||
server = None
|
||||
|
||||
def __init__(self, poa, callback_func, ssl_certs=None):
|
||||
super(HttpNotificationHandler, self).__init__(poa, callback_func, ssl_certs)
|
||||
|
||||
self.ca_certs = ssl_certs.get('ca_certs')
|
||||
self.cert_file = ssl_certs.get('cert_file')
|
||||
self.key_file = ssl_certs.get('key_file')
|
||||
|
||||
# TODO(rst): maybe tis needs to be tested when the server is started
|
||||
if poa.scheme == 'https' and not (self.ca_certs and self.cert_file and self.key_file):
|
||||
raise Exception()
|
||||
|
||||
def start(self):
|
||||
from flask import (
|
||||
Flask,
|
||||
request,
|
||||
Response,
|
||||
)
|
||||
from gevent import signal as gevent_signal
|
||||
from signal import (
|
||||
SIGTERM,
|
||||
SIGINT,
|
||||
)
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.after_request
|
||||
def attach_headers(response):
|
||||
response.headers['x-m2m-ri'] = request.headers['x-m2m-ri']
|
||||
return response
|
||||
|
||||
@app.route('/', methods=['POST'])
|
||||
def index():
|
||||
assert 'x-m2m-origin' in request.headers, 'No originator set'
|
||||
assert 'x-m2m-ri' in request.headers, 'Missing request id'
|
||||
assert 'content-type' in request.headers, 'Unspecified content type'
|
||||
|
||||
notification = self._unpack_notification(get_onem2m_decoder(request.content_type).decode(request.data))
|
||||
self._callback(request.headers['x-m2m-origin'], **notification)
|
||||
|
||||
return Response(
|
||||
headers={
|
||||
'x-m2m-rsc': 2000,
|
||||
},
|
||||
)
|
||||
|
||||
if self._endpoint.scheme == 'https':
|
||||
self.server = WSGIServer(
|
||||
(
|
||||
self._endpoint.hostname,
|
||||
self._endpoint.port or 6050
|
||||
),
|
||||
application=app,
|
||||
keyfile=self.key_file, certfile=self.cert_file, ca_certs=self.ca_certs
|
||||
)
|
||||
else:
|
||||
self.server = WSGIServer(
|
||||
(
|
||||
self._endpoint.hostname,
|
||||
self._endpoint.port or 6050
|
||||
),
|
||||
application=app,
|
||||
)
|
||||
gevent_signal(SIGINT, self.server.stop)
|
||||
gevent_signal(SIGTERM, self.server.stop)
|
||||
spawn(self.server.serve_forever)
|
||||
|
||||
def stop(self):
|
||||
self.server.stop()
|
||||
|
||||
register_handler(HttpNotificationHandler, ('http', 'https'))
|
@ -1,744 +0,0 @@
|
||||
from base64 import (
|
||||
b64decode,
|
||||
b64encode,
|
||||
)
|
||||
from datetime import datetime
|
||||
from gevent import (
|
||||
spawn,
|
||||
spawn_later,
|
||||
)
|
||||
from iso8601 import parse_date
|
||||
from json import (
|
||||
dumps as json_dumps,
|
||||
loads as json_loads,
|
||||
)
|
||||
from futile.logging import LoggerMixin
|
||||
import logging
|
||||
from openmtc.util import (
|
||||
UTC,
|
||||
datetime_now,
|
||||
datetime_the_future,
|
||||
)
|
||||
from openmtc_app.flask_runner import FlaskRunner
|
||||
from openmtc_app.notification import NotificationManager
|
||||
from openmtc_onem2m.exc import (
|
||||
CSENotFound,
|
||||
CSENotImplemented,
|
||||
STATUS_CONFLICT,
|
||||
)
|
||||
from openmtc_onem2m.mapper import OneM2MMapper
|
||||
from openmtc_onem2m.model import (
|
||||
AE,
|
||||
Container,
|
||||
ContentInstance,
|
||||
EncodingTypeE,
|
||||
get_short_member_name,
|
||||
NotificationEventTypeE,
|
||||
EventNotificationCriteria)
|
||||
from openmtc_onem2m.serializer import get_onem2m_decoder
|
||||
from openmtc_onem2m.transport import OneM2MErrorResponse
|
||||
import time
|
||||
import re
|
||||
from urllib import urlencode
|
||||
|
||||
logging.getLogger("iso8601").setLevel(logging.ERROR)
|
||||
|
||||
# fix missing SSLv3
|
||||
try:
|
||||
from gevent.ssl import PROTOCOL_SSLv3
|
||||
except ImportError:
|
||||
import gevent.ssl
|
||||
|
||||
gevent.ssl.PROTOCOL_SSLv3 = gevent.ssl.PROTOCOL_TLSv1
|
||||
|
||||
|
||||
class XAE(LoggerMixin):
|
||||
""" Generic OpenMTC application class.
|
||||
Implements functionality common to all typical OpenMTC applications.
|
||||
"""
|
||||
|
||||
# TODO(rst): add more features
|
||||
# support several AEs using the same App-ID and appName
|
||||
|
||||
name = None
|
||||
containers = ()
|
||||
labels = ()
|
||||
# default_access_right = True
|
||||
default_lifetime = 3600
|
||||
max_nr_of_instances = 3
|
||||
resume_registration = remove_registration = True
|
||||
notification_handlers = {}
|
||||
mapper = None
|
||||
notification_manager = None
|
||||
__app = None
|
||||
|
||||
def __init__(self, name=None, cse_base=None, expiration_time=None, announce_to=None, poas=None,
|
||||
originator_pre=None, ca_certs=None, cert_file=None, key_file=None, *args, **kw):
|
||||
super(XAE, self).__init__(*args, **kw)
|
||||
|
||||
self.__subscriptions = []
|
||||
|
||||
self.name = name or type(self).__name__
|
||||
self.cse_base = cse_base or "onem2m"
|
||||
|
||||
ae_id = "C" + self.name
|
||||
self.originator = (originator_pre + '/' + ae_id) if originator_pre else ae_id
|
||||
|
||||
self.ca_certs = ca_certs
|
||||
self.cert_file = cert_file
|
||||
self.key_file = key_file
|
||||
|
||||
if expiration_time is not None:
|
||||
if isinstance(expiration_time, (str, unicode)):
|
||||
expiration_time = parse_date(expiration_time)
|
||||
elif isinstance(expiration_time, (int, float)):
|
||||
expiration_time = datetime.fromtimestamp(expiration_time, UTC)
|
||||
|
||||
if not isinstance(expiration_time, datetime):
|
||||
raise ValueError(expiration_time)
|
||||
|
||||
self.default_lifetime = (expiration_time - datetime_now()).total_seconds()
|
||||
|
||||
self.announceTo = announce_to
|
||||
|
||||
self.__resumed_registration = False
|
||||
self.__known_containers = set()
|
||||
self.__shutdown = False
|
||||
|
||||
self.allow_duplicate = None
|
||||
self.runner = None
|
||||
self.poas = poas or []
|
||||
|
||||
self.fmt_json_regex = re.compile(r'^application/(?:[^+]+\+)?json$', re.IGNORECASE)
|
||||
self.fmt_xml_regex = re.compile(r'^application/(?:[^+]+\+)?xml$', re.IGNORECASE)
|
||||
|
||||
def get_expiration_time(self):
|
||||
if self.default_lifetime is None:
|
||||
return None
|
||||
return datetime_the_future(self.default_lifetime)
|
||||
|
||||
@property
|
||||
def application(self):
|
||||
return self.__app
|
||||
|
||||
def run(self, runner, cse, allow_duplicate=True):
|
||||
self.mapper = OneM2MMapper(cse, originator=self.originator, ca_certs=self.ca_certs,
|
||||
cert_file=self.cert_file, key_file=self.key_file)
|
||||
self.notification_manager = NotificationManager(self.poas, cse, self.mapper,
|
||||
ca_certs=self.ca_certs,
|
||||
cert_file=self.cert_file,
|
||||
key_file=self.key_file)
|
||||
|
||||
self.allow_duplicate = allow_duplicate
|
||||
self.runner = runner
|
||||
self.register()
|
||||
|
||||
def shutdown(self):
|
||||
""" Graceful shutdown.
|
||||
Deletes all Applications and Subscriptions.
|
||||
"""
|
||||
try:
|
||||
self._on_shutdown()
|
||||
except:
|
||||
self.logger.exception("Error in shutdown handler")
|
||||
|
||||
self.logger.debug("shutdown handler is finished")
|
||||
|
||||
self.__shutdown = True
|
||||
|
||||
self.notification_manager.shutdown()
|
||||
|
||||
self._remove_apps()
|
||||
|
||||
def _remove_apps(self):
|
||||
if self.remove_registration:
|
||||
try:
|
||||
if self.__app:
|
||||
self.mapper.delete(self.__app)
|
||||
except:
|
||||
pass
|
||||
self.logger.debug("app deleted")
|
||||
|
||||
@staticmethod
|
||||
def run_forever(period=1000, func=None, *args, **kw):
|
||||
""" executes a given function repeatingly at a given interval
|
||||
:param period: (optional) frequency of repeated execution (in Hz)
|
||||
:param func: (optional) function to be executed
|
||||
"""
|
||||
|
||||
if func is None:
|
||||
def func(*_):
|
||||
pass
|
||||
|
||||
def run_periodically():
|
||||
func(*args, **kw)
|
||||
spawn_later(period, run_periodically)
|
||||
|
||||
return spawn(run_periodically)
|
||||
|
||||
def periodic_discover(self, path, fc, interval, cb, err_cb=None):
|
||||
""" starts periodic discovery at a given frequency
|
||||
:param path: start directory inside cse for discovery
|
||||
:param fc: filter criteria (what to discover)
|
||||
:param interval: frequency of repeated discovery (in Hz)
|
||||
:param cb: callback function to return the result of the discovery to
|
||||
:param err_cb: (optional) callback function for errors to return the error of the discovery to
|
||||
"""
|
||||
if not isinstance(fc, dict):
|
||||
fc = {}
|
||||
|
||||
def run_discovery(o):
|
||||
try:
|
||||
cb(self.discover(path, o))
|
||||
except OneM2MErrorResponse as error_response:
|
||||
if err_cb:
|
||||
return err_cb(error_response)
|
||||
else:
|
||||
o['createdAfter'] = datetime_now()
|
||||
|
||||
spawn_later(interval, run_discovery, o)
|
||||
|
||||
return spawn(run_discovery, fc)
|
||||
|
||||
def register(self):
|
||||
""" Registers the Application with the CSE. """
|
||||
self.logger.info("Registering application as %s." % (self.name,))
|
||||
try:
|
||||
poa = self.notification_manager.endpoints
|
||||
except AttributeError:
|
||||
poa = []
|
||||
app = AE(resourceName=self.name, labels=list(self.labels),
|
||||
pointOfAccess=poa)
|
||||
app.announceTo = self.announceTo
|
||||
app.requestReachability = bool(poa)
|
||||
|
||||
try:
|
||||
registration = self.create_application(app)
|
||||
except OneM2MErrorResponse as error_response:
|
||||
if error_response.response_status_code is STATUS_CONFLICT:
|
||||
registration = self._handle_registration_conflict(app)
|
||||
if not registration:
|
||||
raise
|
||||
else:
|
||||
self.logger.error('Error at start up')
|
||||
self.logger.error(error_response.response_status_code)
|
||||
raise SystemExit
|
||||
self.__app = registration
|
||||
|
||||
assert registration.path
|
||||
|
||||
try:
|
||||
self._on_register()
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except:
|
||||
self.logger.exception("Error on initialization")
|
||||
raise
|
||||
|
||||
def _handle_registration_conflict(self, app):
|
||||
if not self.resume_registration:
|
||||
return None
|
||||
# TODO(rst): update app here for expiration_time and poas
|
||||
|
||||
app = self.get_application(app)
|
||||
|
||||
self.__start_refresher(app)
|
||||
|
||||
self.__resumed_registration = True
|
||||
|
||||
return app
|
||||
|
||||
def emit(self, event, message=None):
|
||||
""" Websocket emit. """
|
||||
if not isinstance(self.runner, FlaskRunner):
|
||||
raise RuntimeError('Runner is not supporting emit!')
|
||||
self.runner.emit(event, message)
|
||||
|
||||
def _on_register(self):
|
||||
pass
|
||||
|
||||
def _on_shutdown(self):
|
||||
pass
|
||||
|
||||
def get_application(self, application, path=None):
|
||||
""" Retrieves an Application resource.
|
||||
:param application: old app instance or appId
|
||||
:param path: (optional) path in the resource tree
|
||||
"""
|
||||
if path is None:
|
||||
# FIXME(rst): use app path and not cse base path
|
||||
path = self.cse_base
|
||||
|
||||
if not isinstance(application, AE):
|
||||
application = AE(resourceName=application)
|
||||
|
||||
name = application.resourceName
|
||||
|
||||
path = "%s/%s" % (path, name) if path else name
|
||||
app = self.mapper.get(path)
|
||||
|
||||
self.logger.debug("retrieved app: %s" % app)
|
||||
|
||||
return app
|
||||
|
||||
def create_application(self, application, path=None):
|
||||
""" Creates an Application resource.
|
||||
|
||||
:param application: Application instance or appId as str
|
||||
:param path: (optional) path in the resource tree
|
||||
"""
|
||||
# TODO(rst): set app_id via config
|
||||
# TODO(rst): set requestReachability based on used runner
|
||||
if path is None:
|
||||
path = self.cse_base
|
||||
|
||||
def restore_app(app):
|
||||
self.logger.warn("Restoring app: %s", app.path)
|
||||
app.expirationTime = None
|
||||
self.create_application(app, path=path)
|
||||
|
||||
if not isinstance(application, AE):
|
||||
application = AE(resourceName=application, App_ID='dummy', requestReachability=False)
|
||||
else:
|
||||
if not application.App_ID:
|
||||
application.App_ID = 'dummy'
|
||||
if not application.requestReachability:
|
||||
application.requestReachability = False
|
||||
|
||||
application.expirationTime = application.expirationTime or self.get_expiration_time()
|
||||
app = self.mapper.create(path, application)
|
||||
self.logger.debug("Created application at %s", app.path)
|
||||
app = self.get_application(application, path)
|
||||
assert app.path
|
||||
self.__start_refresher(app, restore=restore_app)
|
||||
self.logger.info("Registration successful: %s." % (app.path,))
|
||||
|
||||
# TODO(rst): handle when ACP is reimplemented
|
||||
# if accessRight:
|
||||
# if not isinstance(accessRight, AccessRight):
|
||||
# accessRight = AccessRight(
|
||||
# id="ar",
|
||||
# selfPermissions={"permission": [{
|
||||
# "id": "perm",
|
||||
# "permissionFlags": {
|
||||
# "flag": ["READ", "WRITE", "CREATE", "DELETE"]
|
||||
# },
|
||||
# "permissionHolders": {
|
||||
# "all": "all"
|
||||
# }
|
||||
# }]},
|
||||
# permissions={"permission": [{
|
||||
# "id": "perm",
|
||||
# "permissionFlags": {
|
||||
# "flag": ["READ", "WRITE", "CREATE", "DELETE"]
|
||||
# },
|
||||
# "permissionHolders": {
|
||||
# "all": "all"
|
||||
# }
|
||||
# }]}
|
||||
# )
|
||||
# accessRight = self.create_accessRight(app, accessRight)
|
||||
#
|
||||
# app.accessRightID = accessRight.path
|
||||
#
|
||||
# self.mapper.update(app, ("accessRightID",))
|
||||
|
||||
return app
|
||||
|
||||
# TODO(rst): use FilterCriteria from model and convert
|
||||
def discover(self, path=None, filter_criteria=None, unstructured=False):
|
||||
""" Discovers Container resources.
|
||||
|
||||
:param path: (optional) the target path to start the discovery
|
||||
:param filter_criteria: (optional) FilterCriteria for the for the discovery
|
||||
:param unstructured: (optional) set discovery_result_type
|
||||
"""
|
||||
if path is None:
|
||||
path = self.cse_base
|
||||
|
||||
# TODO(rst): use filter_criteria from model
|
||||
if not filter_criteria:
|
||||
filter_criteria = {}
|
||||
path += "?fu=1"
|
||||
if filter_criteria:
|
||||
path += "&" + urlencode(
|
||||
{
|
||||
get_short_member_name(k): v for k, v in filter_criteria.iteritems()
|
||||
},
|
||||
True
|
||||
)
|
||||
|
||||
path += '&drt' + str(1 if unstructured else 2)
|
||||
|
||||
discovery = self.mapper.get(path)
|
||||
|
||||
return discovery.CONTENT
|
||||
|
||||
def create_container(self, target, container, labels=None, max_nr_of_instances=None):
|
||||
""" Creates a Container resource.
|
||||
|
||||
:param target: the target resource/path parenting the Container
|
||||
:param container: the Container resource or a valid container ID
|
||||
:param labels: (optional) the container's labels
|
||||
:param max_nr_of_instances: (optional) the container's maximum number
|
||||
of instances (0=unlimited)
|
||||
"""
|
||||
|
||||
def restore_container(c):
|
||||
self.logger.warn("Restoring container: %s", c.path)
|
||||
c.expirationTime = None
|
||||
self.__known_containers.remove(c.path)
|
||||
self.create_container(target, c, labels=labels)
|
||||
|
||||
if target is None:
|
||||
target = self.__app
|
||||
|
||||
if not isinstance(container, Container):
|
||||
container = Container(resourceName=container)
|
||||
|
||||
# if we got max instances..set them
|
||||
if max_nr_of_instances:
|
||||
container.maxNrOfInstances = max_nr_of_instances
|
||||
# if we did not set max instances yet, set them
|
||||
else:
|
||||
container.maxNrOfInstances = self.max_nr_of_instances
|
||||
|
||||
if container.expirationTime is None:
|
||||
container.expirationTime = self.get_expiration_time()
|
||||
|
||||
if labels:
|
||||
container.labels = labels
|
||||
|
||||
path = getattr(target, "path", target)
|
||||
|
||||
try:
|
||||
container = self.mapper.create(path, container)
|
||||
except OneM2MErrorResponse as error_response:
|
||||
if error_response.response_status_code is STATUS_CONFLICT:
|
||||
c_path = path + '/' + container.resourceName
|
||||
container.path = c_path
|
||||
if (self.__resumed_registration and
|
||||
c_path not in self.__known_containers):
|
||||
container = self.mapper.update(container)
|
||||
else:
|
||||
raise error_response
|
||||
else:
|
||||
raise error_response
|
||||
|
||||
self.__known_containers.add(container.path)
|
||||
self.__start_refresher(container, restore=restore_container)
|
||||
self.logger.info("Container created: %s." % (container.path,))
|
||||
return container
|
||||
|
||||
# TODO(rst): handle when ACP is reimplemented
|
||||
# def create_access_right(self, application, accessRight):
|
||||
# """ Creates an AccessRight resource.
|
||||
#
|
||||
# :param application: the Application which will contain the AR
|
||||
# :param accessRight: the AccessRight instance
|
||||
# """
|
||||
# self.logger.debug("Creating accessRight for %s", application)
|
||||
#
|
||||
# if application is None:
|
||||
# application = self.__app
|
||||
# assert application.path
|
||||
#
|
||||
# path = getattr(application, "path", application)
|
||||
#
|
||||
# if not path.endswith("/accessRights"):
|
||||
# path += "/accessRights"
|
||||
#
|
||||
# accessRight = self.mapper.create(path, accessRight)
|
||||
# accessRight = self.mapper.get(accessRight.path)
|
||||
# self.__start_refresher(accessRight, extra_fields=["selfPermissions"])
|
||||
# self.logger.info("accessRight created: %s." % (accessRight.path,))
|
||||
# return accessRight
|
||||
#
|
||||
# create_accessRight = create_access_right
|
||||
|
||||
def get_resource(self, path, app_local=False):
|
||||
if app_local:
|
||||
path = self.__app.path + '/' + path
|
||||
|
||||
if not path:
|
||||
return None
|
||||
|
||||
try:
|
||||
return self.mapper.get(path)
|
||||
except OneM2MErrorResponse:
|
||||
return None
|
||||
|
||||
def push_content(self, container, content, fmt=None, text=None):
|
||||
""" Creates a ContentInstance resource in the given container,
|
||||
wrapping the content.
|
||||
Defaults to serialising the content as JSON and base64 encodes it.
|
||||
NOTE: Will attempt to create the container, if not found.
|
||||
|
||||
:param container: Container object or container path string
|
||||
:param content: the content data
|
||||
:param fmt:
|
||||
:param text:
|
||||
"""
|
||||
path = getattr(container, "path", container)
|
||||
|
||||
if isinstance(content, (str, unicode)):
|
||||
fmt = 'text/plain' if fmt is None else fmt
|
||||
text = True if text is None else text
|
||||
elif isinstance(content, (dict, list)):
|
||||
fmt = 'application/json' if fmt is None else fmt
|
||||
text = False if text is None else text
|
||||
else:
|
||||
raise CSENotImplemented("Only dict, list and str are supported!")
|
||||
|
||||
if re.search(self.fmt_json_regex, fmt):
|
||||
if text:
|
||||
# TODO(rst): check if it should be with masked quotation marks
|
||||
con = json_dumps(content)
|
||||
cnf = fmt + ':' + str(EncodingTypeE.plain.value)
|
||||
# raise CSENotImplemented("Only json as b64 is supported!")
|
||||
else:
|
||||
con = b64encode(json_dumps(content))
|
||||
cnf = fmt + ':' + str(EncodingTypeE.base64String.value)
|
||||
elif fmt == 'text/plain':
|
||||
if text:
|
||||
con = content
|
||||
cnf = fmt + ':' + str(EncodingTypeE.plain.value)
|
||||
else:
|
||||
con = b64encode(content)
|
||||
cnf = fmt + ':' + str(EncodingTypeE.base64String.value)
|
||||
else:
|
||||
# TODO(rst): add handling of other formats or raise not implemented
|
||||
raise CSENotImplemented("Only json and text are supported!")
|
||||
|
||||
return self.mapper.create(path, ContentInstance(
|
||||
content=con,
|
||||
contentInfo=cnf,
|
||||
))
|
||||
|
||||
@staticmethod
|
||||
def _get_content_from_cin(cin):
|
||||
if isinstance(cin, ContentInstance):
|
||||
# TODO(rst): handle contentInfo and decode
|
||||
# resource.contentInfo -> application/json:1
|
||||
# media, encodingType = split(':')
|
||||
# encodingType = 1 -> base64.decodeString(resource.content)
|
||||
# encodingType = 2 -> not supported
|
||||
media_type, encoding_type = cin.contentInfo.split(':')
|
||||
content = cin.content
|
||||
try:
|
||||
if int(encoding_type) == EncodingTypeE.base64String:
|
||||
content = b64decode(content)
|
||||
|
||||
if media_type == 'application/json':
|
||||
content = json_loads(content)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return content
|
||||
|
||||
return cin
|
||||
|
||||
def get_content(self, container):
|
||||
""" Retrieve the latest ContentInstance of a Container.
|
||||
|
||||
:param container: Container object or container path string
|
||||
"""
|
||||
return self._get_content_from_cin(
|
||||
self.mapper.get(
|
||||
getattr(container, 'path', container) + '/latest'
|
||||
)
|
||||
)
|
||||
|
||||
def _get_notification_data(self, data, content_type):
|
||||
try:
|
||||
return get_onem2m_decoder(content_type).\
|
||||
decode(data).\
|
||||
notificationEvent.\
|
||||
representation
|
||||
# serializer = get_onem2m_decoder(content_type)
|
||||
# notification = serializer.decode(data)
|
||||
# resource = notification.notificationEvent.representation
|
||||
# return resource
|
||||
except (KeyError, TypeError, ValueError, IndexError):
|
||||
self.logger.error("Failed to get notification data from %s" % data)
|
||||
return None
|
||||
|
||||
def _remove_route(self, route):
|
||||
self.logger.debug("removing route: %s", route)
|
||||
self.runner.flask_app.url_map._rules = filter(
|
||||
lambda x: x.rule != route,
|
||||
self.runner.flask_app.url_map._rules
|
||||
)
|
||||
|
||||
def _add_subscription(self, path, _, handler, delete_handler, filter_criteria=None, expiration_time=None):
|
||||
params = {
|
||||
'filter_criteria': filter_criteria,
|
||||
'expiration_time': expiration_time,
|
||||
}
|
||||
self.add_subscription_handler(path, handler, **params)
|
||||
# self.notification_manager.subscribe(path, handler, **params)
|
||||
if delete_handler:
|
||||
params['types'] = (NotificationEventTypeE.deleteOfResource,)
|
||||
self.add_subscription_handler(path, delete_handler, **params)
|
||||
|
||||
def add_subscription(self, path, handler, delete_handler=None):
|
||||
""" Creates a subscription resource at path.
|
||||
And registers handler to receive notification data.
|
||||
|
||||
:param path: path to subscribe to
|
||||
:param handler: notification handler
|
||||
:param delete_handler: reference to delete handling function
|
||||
"""
|
||||
self._add_subscription(path, None, handler, delete_handler)
|
||||
|
||||
def add_subscription_handler(self, path, handler, types=(NotificationEventTypeE.updateOfResource, ),
|
||||
filter_criteria=None, expiration_time=None):
|
||||
"""
|
||||
|
||||
:param path:
|
||||
:param handler:
|
||||
:param types:
|
||||
:param filter_criteria:
|
||||
:param expiration_time:
|
||||
:return:
|
||||
"""
|
||||
def subscribe():
|
||||
return self.notification_manager.subscribe(
|
||||
path,
|
||||
handler,
|
||||
notification_types=types,
|
||||
filter_criteria=filter_criteria,
|
||||
expiration_time=expiration_time
|
||||
)
|
||||
|
||||
subscription = subscribe()
|
||||
|
||||
def restore_subscription():
|
||||
# called to recreate the subscription
|
||||
# for some reason subscription is not assigned here,
|
||||
# so we make it a parameter
|
||||
self.logger.warn("Restoring subscription: %s", subscription.name)
|
||||
self.notification_manager.unsubscribe(subscription.subscriberURI or subscription.path)
|
||||
subscribe()
|
||||
|
||||
# refresh expirationTime regularly
|
||||
# TODO(sho): This should rather be handled through the notification manager itself
|
||||
self.__start_refresher(subscription, restore=restore_subscription)
|
||||
return subscription
|
||||
|
||||
def add_container_subscription(self, container, handler,
|
||||
delete_handler=None, filter_criteria=None):
|
||||
""" Creates a Subscription to the ContentInstances of the given
|
||||
Container.
|
||||
|
||||
:param container: Container object or container path string
|
||||
:param handler: reference of the notification handling function
|
||||
:param delete_handler: reference to delete handling function
|
||||
:param filter_criteria: (optional) FilterCriteria for the subscription
|
||||
"""
|
||||
|
||||
path = getattr(container, "path", container)
|
||||
|
||||
# check if target is container
|
||||
if not isinstance(self.mapper.get(path), Container):
|
||||
raise RuntimeError('Target is not a container.')
|
||||
|
||||
# event notification criteria
|
||||
filter_criteria = filter_criteria or EventNotificationCriteria()
|
||||
filter_criteria.notificationEventType = list([
|
||||
NotificationEventTypeE.createOfDirectChildResource,
|
||||
])
|
||||
|
||||
def content_handler(cin):
|
||||
handler(path, self._get_content_from_cin(cin))
|
||||
|
||||
self._add_subscription(
|
||||
path,
|
||||
None,
|
||||
content_handler,
|
||||
delete_handler,
|
||||
filter_criteria
|
||||
)
|
||||
|
||||
def __start_refresher(self, instance, extra_fields=(), restore=None):
|
||||
""" Starts a threading.Timer chain,
|
||||
to repeatedly update a resource instance's expirationTime.
|
||||
NOTE: instance.expirationTime should already be set and the instance
|
||||
created.
|
||||
|
||||
:param instance: resource instance
|
||||
:param extra_fields: additional fields, needed in the update request
|
||||
:param restore: function that will restore the instance, if it has
|
||||
expired accidentally. Has to restart the refresher.
|
||||
"""
|
||||
if not instance.expirationTime:
|
||||
return
|
||||
interval = time.mktime(instance.expirationTime.timetuple()) - (time.time() + time.timezone)
|
||||
if interval > 120:
|
||||
interval -= 60
|
||||
else:
|
||||
interval = max(1, interval * 0.75)
|
||||
|
||||
self.logger.debug("Will update expiration time of %s in %s seconds", instance, interval)
|
||||
self.runner.set_timer(interval, self.__update_exp_time, instance=instance, extra_fields=extra_fields, restore=restore)
|
||||
|
||||
def start_refresher(self, instance, extra_fields=(), restore=None):
|
||||
self.__start_refresher(instance, extra_fields=extra_fields, restore=restore)
|
||||
|
||||
def __update_exp_time(self, instance=None, the_future=None, extra_fields=(),
|
||||
interval=None, offset=None, restore=None):
|
||||
""" Updates a resource instance's expirationTime to the_future
|
||||
or a default value sometime in the future.
|
||||
|
||||
:note: If instance is not provided or None or False, self.__app is
|
||||
updated.
|
||||
:note: Starts a new Timer.
|
||||
:param instance: resource instance to update
|
||||
:param the_future: new expirationTime value
|
||||
:param extra_fields: additional fields, needed in the update request
|
||||
:param interval: update interval
|
||||
:param offset: expirationTime offset (should be >0)
|
||||
:param restore: function that will restore the instance, if it has
|
||||
expired accidentally. Has to restart the refresher.
|
||||
:raise CSENotFound: If the instance could not be found and no restore
|
||||
was provided.
|
||||
"""
|
||||
self.logger.debug("updating ExpirationTime of %s", instance)
|
||||
if self.__shutdown:
|
||||
# not sure this ever happens.
|
||||
return
|
||||
|
||||
interval = interval or 60 * 10 # TODO make configurable
|
||||
offset = offset or 60 * 10 # 10min default
|
||||
if not the_future:
|
||||
the_future = datetime.utcfromtimestamp(time.time() + interval + offset)
|
||||
fields = ["expirationTime"]
|
||||
fields.extend(extra_fields)
|
||||
if not instance:
|
||||
# does this happen if the instance was deleted?
|
||||
instance = self.__app
|
||||
instance.expirationTime = the_future
|
||||
try:
|
||||
self.mapper.update(instance, fields)
|
||||
except CSENotFound as e:
|
||||
self.logger.warn("ExpirationTime update of %s failed: %s", instance, e)
|
||||
# subscription disappeared?
|
||||
# missed the expirationTime?
|
||||
# mb sync issue?; mb congestion?
|
||||
if restore:
|
||||
restore(instance)
|
||||
return
|
||||
else:
|
||||
raise
|
||||
# NOTE: expirationTime might have been changed by CSE at this point.
|
||||
# update could/should return the updated instance in this case, but
|
||||
# doesnt. => additional GET to confirm expirationTime ?
|
||||
|
||||
self.logger.debug("Will update expiration time in %s seconds", interval)
|
||||
self.runner.set_timer(
|
||||
interval,
|
||||
self.__update_exp_time,
|
||||
instance=instance,
|
||||
extra_fields=extra_fields,
|
||||
restore=restore,
|
||||
)
|
@ -1,51 +0,0 @@
|
||||
from gevent import spawn_later, wait
|
||||
|
||||
from futile.logging import LoggerMixin
|
||||
|
||||
|
||||
class AppRunner(LoggerMixin):
|
||||
def __init__(self, m2m_app, *args, **kw):
|
||||
super(AppRunner, self).__init__(*args, **kw)
|
||||
|
||||
self._timers = set()
|
||||
self.m2m_app = m2m_app
|
||||
self.m2m_ep = None
|
||||
|
||||
def run(self, m2m_ep):
|
||||
self.m2m_ep = m2m_ep
|
||||
|
||||
try:
|
||||
self._run()
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
self.logger.info("Exiting...")
|
||||
except Exception:
|
||||
self.logger.exception("Error")
|
||||
raise
|
||||
finally:
|
||||
self.logger.debug("Shutting down")
|
||||
self._shutdown_app()
|
||||
for timer in self._timers:
|
||||
timer.kill()
|
||||
|
||||
def _run(self):
|
||||
self.m2m_app.run(self, self.m2m_ep)
|
||||
|
||||
wait()
|
||||
|
||||
def _shutdown_app(self):
|
||||
self.m2m_app.shutdown()
|
||||
|
||||
def set_timer(self, t, f, *args, **kw):
|
||||
timer = None
|
||||
|
||||
def wrapper():
|
||||
self._timers.discard(timer)
|
||||
f(*args, **kw)
|
||||
|
||||
timer = spawn_later(t, wrapper)
|
||||
self._timers.add(timer)
|
||||
return timer
|
||||
|
||||
def cancel_timer(self, timer):
|
||||
self._timers.discard(timer)
|
||||
timer.kill()
|
@ -1,75 +0,0 @@
|
||||
import sys
|
||||
from json import load as json_load
|
||||
from operator import getitem
|
||||
|
||||
import futile
|
||||
|
||||
|
||||
def prepare_app(parser, loader, name, default_config_file):
|
||||
parser.add_argument("-v", "--verbose", action="count", default=None,
|
||||
help="Increase verbosity in output. This option can be"
|
||||
" specified multiple times.")
|
||||
args = parser.parse_args()
|
||||
|
||||
module_ = loader.fullname.split("." + name).pop(0)
|
||||
|
||||
futile.logging.set_default_level(futile.logging.DEBUG)
|
||||
logger = futile.logging.get_logger(name)
|
||||
|
||||
config_locations = (".", "/etc/openmtc/" + module_)
|
||||
|
||||
try:
|
||||
import os.path
|
||||
for d in config_locations:
|
||||
config_file = os.path.join(os.path.abspath(d),
|
||||
default_config_file)
|
||||
logger.debug("Trying config file location: %s", config_file)
|
||||
if os.path.isfile(config_file):
|
||||
break
|
||||
else:
|
||||
raise Exception("Configuration file %s not found in any of these "
|
||||
"locations: %s" % default_config_file,
|
||||
config_locations)
|
||||
except Exception as e:
|
||||
sys.stderr.write(str(e) + "\n")
|
||||
sys.exit(2)
|
||||
|
||||
try:
|
||||
with open(config_file) as f:
|
||||
logger.info("Reading configuration file %s.", config_file)
|
||||
config = json_load(f)
|
||||
except IOError as e:
|
||||
logger.warning("Failed to read configuration file %s: %s",
|
||||
config_file, e)
|
||||
config = {}
|
||||
except Exception as e:
|
||||
logger.critical("Error reading configuration file %s: %s",
|
||||
config_file, e)
|
||||
sys.exit(2)
|
||||
|
||||
if "logging" in config: # TODO init logging
|
||||
log_conf = config["logging"]
|
||||
if args.verbose is None:
|
||||
futile.logging.set_default_level(log_conf.get("level") or
|
||||
futile.logging.WARNING)
|
||||
elif args.verbose >= 2:
|
||||
futile.logging.set_default_level(futile.logging.DEBUG)
|
||||
else:
|
||||
futile.logging.set_default_level(futile.logging.INFO)
|
||||
logfile = log_conf.get("file")
|
||||
if logfile:
|
||||
futile.logging.add_log_file(logfile)
|
||||
else:
|
||||
futile.logging.set_default_level(futile.logging.DEBUG)
|
||||
|
||||
return args, config
|
||||
|
||||
|
||||
def get_value(name, value_type, default_value, args, config):
|
||||
try:
|
||||
value = (getattr(args, name.replace(".", "_"), None) or
|
||||
reduce(getitem, name.split("."), config))
|
||||
except KeyError:
|
||||
value = None
|
||||
value = value if isinstance(value, value_type) else default_value
|
||||
return value
|
@ -1,2 +0,0 @@
|
||||
from openmtc_onem2m.transport import AdditionalInformation, MetaInformation, \
|
||||
OneM2MRequest, OneM2MResponse
|
@ -1,23 +0,0 @@
|
||||
from abc import abstractmethod
|
||||
from futile import LoggerMixin
|
||||
|
||||
|
||||
def normalize_path(path):
|
||||
if not path:
|
||||
return ''
|
||||
if path.startswith('//'):
|
||||
# abs CSE
|
||||
return '/_' + path[1:]
|
||||
elif path.startswith('/'):
|
||||
# sp rel CSE
|
||||
return '/~' + path
|
||||
return path
|
||||
|
||||
|
||||
class OneM2MClient(LoggerMixin):
|
||||
def __init__(self):
|
||||
super(OneM2MClient, self).__init__()
|
||||
|
||||
@abstractmethod
|
||||
def send_onem2m_request(self, onem2m_request):
|
||||
pass
|
@ -1,217 +0,0 @@
|
||||
import urllib
|
||||
import ssl
|
||||
from socket import (
|
||||
gaierror,
|
||||
error as socket_error,
|
||||
)
|
||||
from time import time
|
||||
from urlparse import urlparse
|
||||
from aplus import Promise
|
||||
from futile.caching import LRUCache
|
||||
from geventhttpclient.client import HTTPClient
|
||||
from geventhttpclient.response import HTTPResponse
|
||||
from openmtc.exc import (
|
||||
OpenMTCNetworkError,
|
||||
ConnectionFailed,
|
||||
)
|
||||
from openmtc_onem2m.exc import (
|
||||
get_error_class,
|
||||
get_response_status,
|
||||
ERROR_MIN,
|
||||
)
|
||||
from openmtc_onem2m.model import (
|
||||
ResourceTypeE,
|
||||
get_short_attribute_name,
|
||||
get_short_member_name,
|
||||
)
|
||||
from openmtc_onem2m.serializer.util import (
|
||||
decode_onem2m_content,
|
||||
encode_onem2m_content,
|
||||
)
|
||||
from openmtc_onem2m.transport import (
|
||||
OneM2MOperation,
|
||||
OneM2MResponse,
|
||||
OneM2MErrorResponse,
|
||||
)
|
||||
from . import (
|
||||
OneM2MClient,
|
||||
normalize_path,
|
||||
)
|
||||
|
||||
_method_map_to_http = {
|
||||
OneM2MOperation.create: 'POST',
|
||||
OneM2MOperation.retrieve: 'GET',
|
||||
OneM2MOperation.update: 'PUT',
|
||||
OneM2MOperation.delete: 'DELETE',
|
||||
OneM2MOperation.notify: 'POST',
|
||||
}
|
||||
|
||||
_clients = LRUCache(threadsafe=False)
|
||||
|
||||
_query_params = frozenset(['rt', 'rp', 'rcn', 'da', 'drt'])
|
||||
|
||||
_header_to_field_map = {
|
||||
'X-M2M-ORIGIN': 'originator',
|
||||
'X-M2M-RI': 'rqi',
|
||||
'X-M2M-GID': 'gid',
|
||||
'X-M2M-OT': 'ot',
|
||||
'X-M2M-RST': 'rset',
|
||||
'X-M2M-RET': 'rqet',
|
||||
'X-M2M-OET': 'oet',
|
||||
'X-M2M-EC': 'ec',
|
||||
}
|
||||
|
||||
|
||||
def get_client(m2m_ep, use_xml=False, ca_certs=None, cert_file=None, key_file=None,
|
||||
insecure=False):
|
||||
try:
|
||||
return _clients[(m2m_ep, use_xml)]
|
||||
except KeyError:
|
||||
# TODO: make connection_timeout and concurrency configurable
|
||||
client = _clients[(m2m_ep, use_xml)] = OneM2MHTTPClient(
|
||||
m2m_ep, use_xml, ca_certs, cert_file, key_file, insecure)
|
||||
return client
|
||||
|
||||
|
||||
class OneM2MHTTPClient(OneM2MClient):
|
||||
# defaults
|
||||
DEF_SSL_VERSION = ssl.PROTOCOL_TLSv1_2
|
||||
|
||||
def __init__(self, m2m_ep, use_xml, ca_certs=None, cert_file=None, key_file=None,
|
||||
insecure=False):
|
||||
super(OneM2MHTTPClient, self).__init__()
|
||||
|
||||
self.parsed_url = urlparse(m2m_ep)
|
||||
is_https = self.parsed_url.scheme[-1].lower() == "s"
|
||||
port = self.parsed_url.port or (is_https and 443 or 80)
|
||||
host = self.parsed_url.hostname
|
||||
self.path = self.parsed_url.path.rstrip('/')
|
||||
if self.path and not self.path.endswith('/'):
|
||||
self.path += '/'
|
||||
|
||||
# TODO(rst): handle IPv6 host here
|
||||
# geventhttpclient sets incorrect host header
|
||||
# i.e "host: ::1:8000" instead of "host: [::1]:8000
|
||||
if (is_https and ca_certs is not None and cert_file is not None and
|
||||
key_file is not None):
|
||||
ssl_options = {
|
||||
"ca_certs": ca_certs,
|
||||
"certfile": cert_file,
|
||||
"keyfile": key_file,
|
||||
"ssl_version": self.DEF_SSL_VERSION
|
||||
}
|
||||
else:
|
||||
ssl_options = None
|
||||
|
||||
client = HTTPClient(host, port, connection_timeout=120.0,
|
||||
concurrency=50, ssl=is_https,
|
||||
ssl_options=ssl_options, insecure=insecure)
|
||||
self.request = client.request
|
||||
|
||||
self.content_type = 'application/' + ('xml' if use_xml else 'json')
|
||||
|
||||
def _handle_network_error(self, exc, p, http_request, t,
|
||||
exc_class=OpenMTCNetworkError):
|
||||
error_str = str(exc)
|
||||
if error_str in ("", "''"):
|
||||
error_str = repr(exc)
|
||||
method = http_request["method"]
|
||||
path = http_request["request_uri"]
|
||||
log_path = "%s://%s/%s" % (self.parsed_url.scheme, self.parsed_url.netloc, path)
|
||||
error_msg = "Error during HTTP request: %s. " \
|
||||
"Request was: %s %s (%.4fs)" % (error_str, method, log_path, time() - t)
|
||||
p.reject(exc_class(error_msg))
|
||||
|
||||
def map_onem2m_request_to_http_request(self, onem2m_request):
|
||||
"""
|
||||
Maps a OneM2M request to a HTTP request
|
||||
:param onem2m_request: OneM2M request to be mapped
|
||||
:return: request: the resulting HTTP request
|
||||
"""
|
||||
self.logger.debug("Mapping OneM2M request to generic request: %s", onem2m_request)
|
||||
|
||||
params = {
|
||||
param: getattr(onem2m_request, param) for param in _query_params
|
||||
if getattr(onem2m_request, param) is not None
|
||||
}
|
||||
|
||||
if onem2m_request.fc is not None:
|
||||
filter_criteria = onem2m_request.fc
|
||||
params.update({
|
||||
(get_short_attribute_name(name) or get_short_member_name(name)): val
|
||||
for name, val in filter_criteria.get_values(True).iteritems()
|
||||
})
|
||||
|
||||
path = normalize_path(onem2m_request.to)
|
||||
|
||||
if params:
|
||||
path += '?' + urllib.urlencode(params, True)
|
||||
|
||||
content_type, data = encode_onem2m_content(onem2m_request.content, self.content_type, path=path)
|
||||
|
||||
# TODO(rst): check again
|
||||
# set resource type
|
||||
if onem2m_request.operation == OneM2MOperation.create:
|
||||
content_type += '; ty=' + str(ResourceTypeE[onem2m_request.resource_type.typename])
|
||||
|
||||
headers = {
|
||||
header: getattr(onem2m_request, field) for header, field in _header_to_field_map.iteritems()
|
||||
if getattr(onem2m_request, field) is not None
|
||||
}
|
||||
headers['content-type'] = content_type
|
||||
|
||||
self.logger.debug("Added request params: %s", params)
|
||||
|
||||
return {
|
||||
'method': _method_map_to_http[onem2m_request.operation],
|
||||
'request_uri': self.path + path,
|
||||
'body': data,
|
||||
'headers': headers,
|
||||
}
|
||||
|
||||
def map_http_response_to_onem2m_response(self, onem2m_request, response):
|
||||
"""
|
||||
Maps HTTP response to OneM2M response
|
||||
:param onem2m_request: the OneM2M request that created the response
|
||||
:param response: the HTTP response
|
||||
:return: resulting OneM2MResponse or OneM2MErrorResponse
|
||||
"""
|
||||
if not isinstance(response, HTTPResponse):
|
||||
self.logger.error("Not a valid response: %s", response)
|
||||
# return OneM2MErrorResponse(STATUS_INTERNAL_SERVER_ERROR)
|
||||
self.logger.debug("Mapping HTTP response for OneM2M response: %s", response)
|
||||
rsc = response.get("x-m2m-rsc", 5000)
|
||||
if int(rsc) >= ERROR_MIN:
|
||||
return OneM2MErrorResponse(
|
||||
get_error_class(rsc).response_status_code, onem2m_request)
|
||||
|
||||
return OneM2MResponse(
|
||||
get_response_status(rsc),
|
||||
request=onem2m_request,
|
||||
rsc=rsc,
|
||||
pc=decode_onem2m_content(response.read(), response.get("content-type"))
|
||||
)
|
||||
|
||||
def send_onem2m_request(self, onem2m_request):
|
||||
with Promise() as p:
|
||||
http_request = self.map_onem2m_request_to_http_request(onem2m_request)
|
||||
t = time()
|
||||
|
||||
try:
|
||||
response = self.request(**http_request)
|
||||
except (socket_error, gaierror) as exc:
|
||||
self._handle_network_error(exc, p, http_request, t, ConnectionFailed)
|
||||
except Exception as exc:
|
||||
self.logger.exception("Error in HTTP request")
|
||||
self._handle_network_error(exc, p, http_request, t)
|
||||
else:
|
||||
try:
|
||||
onem2m_response = self.map_http_response_to_onem2m_response(onem2m_request, response)
|
||||
if isinstance(onem2m_response, OneM2MErrorResponse):
|
||||
p.reject(onem2m_response)
|
||||
else:
|
||||
p.fulfill(onem2m_response)
|
||||
finally:
|
||||
response.release()
|
||||
|
||||
return p
|
@ -1,431 +0,0 @@
|
||||
from aplus import (
|
||||
Promise,
|
||||
)
|
||||
from collections import deque
|
||||
from futile.caching import LRUCache
|
||||
import gevent
|
||||
from gevent import monkey; monkey.patch_all()
|
||||
from . import OneM2MClient
|
||||
from openmtc.exc import ConnectionFailed, OpenMTCNetworkError
|
||||
from ..exc import (
|
||||
ERROR_MIN,
|
||||
CSEValueError,
|
||||
CSEError,
|
||||
)
|
||||
from ..serializer.util import (
|
||||
decode_onem2m_content,
|
||||
encode_onem2m_content,
|
||||
)
|
||||
from ..transport import (
|
||||
OneM2MRequest,
|
||||
OneM2MResponse,
|
||||
OneM2MErrorResponse,
|
||||
OneM2MOperation,
|
||||
)
|
||||
from ..model import ResourceTypeE
|
||||
import paho.mqtt.client as mqtt
|
||||
from simplejson import (
|
||||
JSONDecoder,
|
||||
JSONEncoder,
|
||||
JSONDecodeError,
|
||||
)
|
||||
from socket import error as SocketError
|
||||
from urlparse import urlparse
|
||||
|
||||
#: Dictionary mapping supported schemes to port numbers
|
||||
portmap = {
|
||||
'mqtt': 1883,
|
||||
'mqtts': 8883,
|
||||
# NB: The correct (i.e. registered with IANA) service-name for SSL/TLS-wrapped MQTT is 'secure-mqtt' in an effort to
|
||||
# prevent confusion with MQTT-S/N. But as the entire world seems to insist on using 'mqtts' (including TS 0010,
|
||||
# sec. 6.6) ... We are supporting both names here for maximum compliance and robustness.
|
||||
'secure-mqtt': 8883,
|
||||
}
|
||||
|
||||
MQTT_QOS_LEVEL = 1
|
||||
|
||||
_clients = LRUCache(threadsafe=False)
|
||||
|
||||
|
||||
def get_client(m2m_ep, use_xml=False, client_id=None, handle_request_func=None):
|
||||
"""
|
||||
|
||||
:param string m2m_ep:
|
||||
:param boolean use_xml:
|
||||
:param string client_id:
|
||||
:param fun handle_request_func:
|
||||
:return OneM2MMQTTClient:
|
||||
"""
|
||||
try:
|
||||
return _clients[(m2m_ep, use_xml)]
|
||||
except KeyError:
|
||||
_clients[(m2m_ep, use_xml)] = OneM2MMQTTClient(m2m_ep, use_xml, client_id, handle_request_func)
|
||||
return _clients[(m2m_ep, use_xml)]
|
||||
|
||||
|
||||
class OneM2MMQTTClient(OneM2MClient):
|
||||
"""
|
||||
This class provides for a transport over the MQTT protocol as described in TS 0010
|
||||
"""
|
||||
|
||||
__request_fields = frozenset([
|
||||
'op',
|
||||
'fr',
|
||||
'rqi',
|
||||
'ty',
|
||||
'pc',
|
||||
'rol',
|
||||
'ot',
|
||||
'rqet',
|
||||
'rset',
|
||||
'oet',
|
||||
'rt',
|
||||
'rp',
|
||||
'rcn',
|
||||
'ec',
|
||||
'da',
|
||||
'gid',
|
||||
'drt',
|
||||
'to',
|
||||
])
|
||||
|
||||
__response_fields = frozenset([
|
||||
'rsc',
|
||||
'rqi',
|
||||
'pc',
|
||||
'fr',
|
||||
'to',
|
||||
])
|
||||
|
||||
@staticmethod
|
||||
def _mqtt_mask(id):
|
||||
return id.lstrip('/').replace('/', ':')
|
||||
|
||||
@staticmethod
|
||||
def _build_topic(originator='+', receiver='+', type='req'):
|
||||
"""
|
||||
Helper function to create topic strings
|
||||
|
||||
:param string originator:
|
||||
:param string receiver:
|
||||
:param string type:
|
||||
:return string:
|
||||
"""
|
||||
return '/'.join([
|
||||
'/oneM2M',
|
||||
type,
|
||||
OneM2MMQTTClient._mqtt_mask(originator),
|
||||
OneM2MMQTTClient._mqtt_mask(receiver),
|
||||
])
|
||||
|
||||
def attach_callback(self):
|
||||
"""
|
||||
Wrapper function to attach callback handlers to the MQTT client. Functions attached in this manner are expected
|
||||
to have the same name as the handler they seek to implement.
|
||||
:return fun:
|
||||
"""
|
||||
def decorator(func):
|
||||
def wrapper(_self, *args, **kwargs):
|
||||
func(_self, *args, **kwargs)
|
||||
setattr(self._client, func.__name__, func)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
def __init__(self, m2m_ep, _, client_id, handle_request_func=None, subscribe_sys_topics=False):
|
||||
"""
|
||||
:param str m2m_ep:
|
||||
:param bool _:
|
||||
:param str client_id:
|
||||
:param call handle_request_func:
|
||||
:param bool subscribe_sys_topics: Whether to subscribe to $SYS topics or not
|
||||
(cf <https://github.com/mqtt/mqtt.github.io/wiki/SYS-Topics>)
|
||||
"""
|
||||
super(OneM2MMQTTClient, self).__init__()
|
||||
parsed_url = urlparse(m2m_ep)
|
||||
self._target_id = parsed_url.fragment
|
||||
|
||||
self._encode = JSONEncoder().encode
|
||||
self._decode = JSONDecoder().decode
|
||||
|
||||
self._handle_request_func = handle_request_func
|
||||
|
||||
self._processed_request_ids = deque([], maxlen=200)
|
||||
self._request_promises = LRUCache(threadsafe=False, max_items=200)
|
||||
|
||||
if client_id is None:
|
||||
import random
|
||||
import string
|
||||
client_id = ''.join(random.sample(string.letters, 16))
|
||||
|
||||
self._client = mqtt.Client(
|
||||
clean_session=False,
|
||||
client_id='::'.join([
|
||||
'C' if client_id[0].lower() in ['c', 'm'] else 'A',
|
||||
self._mqtt_mask(client_id),
|
||||
]),
|
||||
)
|
||||
|
||||
@self.attach_callback()
|
||||
def on_connect(client, _, rc):
|
||||
"""
|
||||
:param mqtt.Client client:
|
||||
:param All userdata:
|
||||
:param integer rc:
|
||||
:return void:
|
||||
"""
|
||||
if not rc == mqtt.CONNACK_ACCEPTED:
|
||||
raise ConnectionFailed(mqtt.connack_string(rc))
|
||||
|
||||
def request_callback(client, _, message):
|
||||
"""
|
||||
Catch requests and
|
||||
|
||||
:param mqtt.Client client:
|
||||
:param All _:
|
||||
:param mqtt.MQTTMessage message:
|
||||
:return void:
|
||||
"""
|
||||
originator = message.topic.split('/')[3]
|
||||
try:
|
||||
request = self._decode(message.payload)
|
||||
except JSONDecodeError as e:
|
||||
self.logger.warn(
|
||||
'Got rubbish request from client %s: %s'
|
||||
% (originator, e.message, )
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
if request['rqi'] in self._processed_request_ids:
|
||||
self.logger.info('Request %s already processed; discarding duplicate.' % (request['rqi'], ))
|
||||
return
|
||||
else:
|
||||
rqi = request['rqi']
|
||||
except KeyError:
|
||||
self.logger.warn(
|
||||
'Special treatment for special request w/o request id from %s.'
|
||||
% (originator, )
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
request['pc'] = decode_onem2m_content(self._encode(request['pc']), 'application/json')
|
||||
request['ty'] = type(request['pc'])
|
||||
except KeyError:
|
||||
# No content, eh?
|
||||
request['ty'] = None
|
||||
|
||||
self.logger.debug('Decoded JSON request: %s' % (request, ))
|
||||
|
||||
op = OneM2MOperation._member_map_.values()[request['op'] - 1]
|
||||
to = request['to']
|
||||
del request['op'], request['to']
|
||||
|
||||
try:
|
||||
response = self._handle_request_func(
|
||||
OneM2MRequest(op, to, **request)
|
||||
).get()
|
||||
except OneM2MErrorResponse as response:
|
||||
self.logger.error('OneM2MError: %s' % (response.message, ))
|
||||
except CSEError as e:
|
||||
response = OneM2MErrorResponse(status_code=e.response_status_code, rqi=rqi)
|
||||
|
||||
if not response.rqi:
|
||||
# This really should not happen. No, really, it shouldn't.
|
||||
self.logger.debug(
|
||||
'FIXUP! FIXUP! FIXUP! Adding missing request identifier to response: %s'
|
||||
% (rqi, )
|
||||
)
|
||||
response.rqi = rqi
|
||||
|
||||
if response.content:
|
||||
response.content = self._decode(
|
||||
encode_onem2m_content(response.content, 'application/json', path=response.to)[1]
|
||||
)
|
||||
|
||||
self._publish_message(
|
||||
self._encode({
|
||||
k: getattr(response, k) for k in self.__response_fields if getattr(response, k) is not None
|
||||
}),
|
||||
self._build_topic(originator, client_id, type='resp'),
|
||||
)
|
||||
self._processed_request_ids.append(rqi)
|
||||
|
||||
def response_callback(client, _, message):
|
||||
"""
|
||||
|
||||
:param mqtt.Client client:
|
||||
:param All _:
|
||||
:param mqtt.MQTTMessage message:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
response = self._decode(message.payload)
|
||||
except JSONDecodeError as e:
|
||||
self.logger.error('Discarding response w/ damaged payload: %s', (e.message, ))
|
||||
return
|
||||
|
||||
promise_key = (message.topic.split('/')[4], response['rqi'])
|
||||
try:
|
||||
p = self._request_promises[promise_key]
|
||||
except KeyError:
|
||||
self.logger.debug(
|
||||
'Response %s could not be mapped to a request. Discarding.'
|
||||
% (response['rqi'], )
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
response['pc'] = decode_onem2m_content(self._encode(response['pc']), 'application/json')
|
||||
except KeyError:
|
||||
pass
|
||||
except CSEValueError as e:
|
||||
self.logger.error(
|
||||
'Content of response %s could not be parsed, throwing on the trash heap: %s'
|
||||
% (response['rqi'], e.message)
|
||||
)
|
||||
p.reject(e)
|
||||
|
||||
status_code = response['rsc']
|
||||
del response['rsc']
|
||||
if status_code >= ERROR_MIN:
|
||||
p.reject(OneM2MErrorResponse(status_code, **response))
|
||||
else:
|
||||
p.fulfill(OneM2MResponse(status_code, **response))
|
||||
|
||||
topics = [
|
||||
self._build_topic(originator=client_id, receiver='#', type='resp'),
|
||||
]
|
||||
client.message_callback_add(topics[0], response_callback)
|
||||
|
||||
if self._handle_request_func is not None:
|
||||
topics.append(self._build_topic(receiver=client_id) + '/+')
|
||||
client.message_callback_add(topics[1], request_callback)
|
||||
|
||||
if subscribe_sys_topics:
|
||||
topics.append('$SYS/#')
|
||||
|
||||
self.logger.debug('Subscribing to topic(s) %s ...' % (', '.join(topics), ))
|
||||
client.subscribe([
|
||||
(str(topic), MQTT_QOS_LEVEL) for topic in topics
|
||||
])
|
||||
|
||||
@self.attach_callback()
|
||||
def on_disconnect(client, userdata, rc):
|
||||
"""
|
||||
:param mqtt.Client client:
|
||||
:param All userdata:
|
||||
:param int rc:
|
||||
:return void:
|
||||
"""
|
||||
if not rc == mqtt.MQTT_ERR_SUCCESS:
|
||||
self.logger.error(
|
||||
'Involuntary connection loss: %s (code %d). Waiting for reconnect ...'
|
||||
% (mqtt.error_string(rc), rc)
|
||||
)
|
||||
|
||||
@self.attach_callback()
|
||||
def on_message(client, userdata, message):
|
||||
"""
|
||||
:param mqtt.Client client:
|
||||
:param All userdata:
|
||||
:param mqtt.MQTTMessage message:
|
||||
:return void:
|
||||
"""
|
||||
self.logger.debug('message received on topic %s' % (message.topic, ))
|
||||
|
||||
@self.attach_callback()
|
||||
def on_log(client, userdata, level, buf):
|
||||
"""
|
||||
:param mqtt.Client client:
|
||||
:param All userdata:
|
||||
:param integer level:
|
||||
:param string buf:
|
||||
:return void:
|
||||
"""
|
||||
self.logger.debug('pahomqtt-%d: %s' % (level, buf))
|
||||
|
||||
if parsed_url.username:
|
||||
self._client.username_pw_set(parsed_url.username, parsed_url.password)
|
||||
|
||||
try:
|
||||
self._client.connect(
|
||||
parsed_url.hostname,
|
||||
parsed_url.port or portmap[parsed_url.scheme]
|
||||
)
|
||||
except SocketError as e:
|
||||
raise ConnectionFailed(e.message)
|
||||
|
||||
def loop():
|
||||
try:
|
||||
while self._client.loop(timeout=0.1) != mqtt.mqtt_cs_disconnecting:
|
||||
gevent.sleep()
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
pass
|
||||
|
||||
gevent.spawn(loop)
|
||||
|
||||
def _publish_message(self, payload, topic):
|
||||
(rc, mid) = self._client.publish(topic, payload, MQTT_QOS_LEVEL)
|
||||
if not rc == mqtt.MQTT_ERR_SUCCESS:
|
||||
self.logger.info('Code %d while sending message %d: %s' % (rc, mid, mqtt.error_string(rc)))
|
||||
|
||||
def send_onem2m_request(self, request):
|
||||
"""
|
||||
:param openmtc_onem2m.transport.OneM2MRequest request:
|
||||
:return Promise:
|
||||
"""
|
||||
p = Promise()
|
||||
|
||||
try:
|
||||
client_id = request.originator.split('/')[-1]
|
||||
except (KeyError, AttributeError):
|
||||
# TODO: make this configurable
|
||||
client_id = 'ae0'
|
||||
|
||||
request.op = 1 + OneM2MOperation._member_map_.keys().index(OneM2MOperation[request.op].name)
|
||||
if request.pc:
|
||||
request.pc = self._decode(
|
||||
encode_onem2m_content(request.pc, 'application/json', path=request.to)[1]
|
||||
)
|
||||
try:
|
||||
if request.to.startswith('//'): # abs CSE
|
||||
request.to = '/_' + request.to[1:]
|
||||
elif request.to.startswith('/'): # sp rel CSE
|
||||
request.to = '/~' + request.to
|
||||
except AttributeError:
|
||||
self.logger.error('Could not resolve target id; defaulting to preset')
|
||||
request.to = '/' + self._target_id
|
||||
|
||||
if request.ty:
|
||||
request.ty = ResourceTypeE[request.resource_type.typename].value
|
||||
|
||||
self.logger.debug('Preparing request for transit: %s' % (request, ))
|
||||
|
||||
promises_key = (self._target_id, request.rqi)
|
||||
|
||||
def cleanup(_):
|
||||
self.logger.debug('Clearing request id %s ...' % (promises_key, ))
|
||||
del self._request_promises[promises_key]
|
||||
|
||||
p.addCallback(cleanup)
|
||||
p.addErrback(cleanup)
|
||||
|
||||
self._request_promises[promises_key] = p
|
||||
|
||||
self._publish_message(
|
||||
self._encode({
|
||||
str(k): getattr(request, k) for k in self.__request_fields if getattr(request, k) is not None
|
||||
}),
|
||||
self._build_topic(client_id, self._target_id) + '/json',
|
||||
)
|
||||
|
||||
return p
|
||||
|
||||
def stop(self):
|
||||
self._client.disconnect()
|
||||
# TODO(sho): this is abominable. But for the time being, there seems to be no elegant solution to this.
|
||||
self._client._clean_session = True
|
||||
# TS 0010, sec. 6.3 mandates a reconnect in order to leave a clean state with the MQTT broker
|
||||
self._client.reconnect()
|
||||
self._client.disconnect()
|
@ -1,183 +0,0 @@
|
||||
"""
|
||||
Created on 26.05.2013
|
||||
|
||||
@author: kca
|
||||
"""
|
||||
from openmtc.exc import OpenMTCError
|
||||
from collections import namedtuple
|
||||
|
||||
|
||||
STATUS = namedtuple("STATUS", "numeric_code description http_status_code")
|
||||
|
||||
STATUS_ACCEPTED = STATUS(
|
||||
1000, "ACCEPTED", 202)
|
||||
STATUS_OK = STATUS(
|
||||
2000, "OK", 200)
|
||||
STATUS_CREATED = STATUS(
|
||||
2001, "CREATED", 201)
|
||||
STATUS_BAD_REQUEST = STATUS(
|
||||
4000, "BAD_REQUEST", 400)
|
||||
STATUS_NOT_FOUND = STATUS(
|
||||
4004, "NOT_FOUND", 404)
|
||||
STATUS_OPERATION_NOT_ALLOWED = STATUS(
|
||||
4005, "OPERATION_NOT_ALLOWED", 405)
|
||||
STATUS_REQUEST_TIMEOUT = STATUS(
|
||||
4008, "REQUEST_TIMEOUT", 408)
|
||||
STATUS_SUBSCRIPTION_CREATOR_HAS_NO_PRIVILEGE = STATUS(
|
||||
4101, ",_SUBSCRIPTION_CREATOR_HAS_NO_PRIVILEGE", 403)
|
||||
STATUS_CONTENTS_UNACCEPTABLE = STATUS(
|
||||
4102, "CONTENTS_UNACCEPTABLE", 400)
|
||||
STATUS_ORIGINATOR_HAS_NO_PRIVILEGE = STATUS(
|
||||
4103, "ORIGINATOR_HAS_NO_PRIVILEGE", 403)
|
||||
STATUS_GROUP_REQUEST_IDENTIFIER_EXISTS = STATUS(
|
||||
4104, "GROUP_REQUEST_IDENTIFIER_EXISTS", 409)
|
||||
STATUS_CONFLICT = STATUS(
|
||||
4105, "CONFLICT", 409)
|
||||
STATUS_INTERNAL_SERVER_ERROR = STATUS(
|
||||
5000, "INTERNAL_SERVER_ERROR", 500)
|
||||
STATUS_NOT_IMPLEMENTED = STATUS(
|
||||
5001, "NOT_IMPLEMENTED", 501)
|
||||
STATUS_TARGET_NOT_REACHABLE = STATUS(
|
||||
5103, "TARGET_NOT_REACHABLE", 404)
|
||||
STATUS_NO_PRIVILEGE = STATUS(
|
||||
5105, "NO_PRIVILEGE", 403)
|
||||
STATUS_ALREADY_EXISTS = STATUS(
|
||||
5106, "ALREADY_EXISTS", 403)
|
||||
STATUS_TARGET_NOT_SUBSCRIBABLE = STATUS(
|
||||
5203, "TARGET_NOT_SUBSCRIBABLE", 403)
|
||||
STATUS_SUBSCRIPTION_VERIFICATION_INITIATION_FAILED = STATUS(
|
||||
5204, "SUBSCRIPTION_VERIFICATION_INITIATION_FAILED", 500)
|
||||
STATUS_SUBSCRIPTION_HOST_HAS_NO_PRIVILEGE = STATUS(
|
||||
5205, "SUBSCRIPTION_HOST_HAS_NO_PRIVILEGE", 403)
|
||||
STATUS_NON_BLOCKING_REQUEST_NOT_SUPPORTED = STATUS(
|
||||
5206, "NON_BLOCKING_REQUEST_NOT_SUPPORTED", 501)
|
||||
STATUS_EXTERNAL_OBJECT_NOT_REACHABLE = STATUS(
|
||||
6003, "EXTERNAL_OBJECT_NOT_REACHABLE", 404)
|
||||
STATUS_EXTERNAL_OBJECT_NOT_FOUND = STATUS(
|
||||
6005, "EXTERNAL_OBJECT_NOT_FOUND", 404)
|
||||
STATUS_MAX_NUMBER_OF_MEMBER_EXCEEDED = STATUS(
|
||||
6010, "MAX_NUMBER_OF_MEMBER_EXCEEDED", 400)
|
||||
STATUS_MEMBER_TYPE_INCONSISTENT = STATUS(
|
||||
6011, "MEMBER_TYPE_INCONSISTENT", 400)
|
||||
STATUS_MANAGEMENT_SESSION_CANNOT_BE_ESTABLISHED = STATUS(
|
||||
6020, "MANAGEMENT_SESSION_CANNOT_BE_ESTABLISHED", 500)
|
||||
STATUS_MANAGEMENT_SESSION_ESTABLISHMENT_TIMEOUT = STATUS(
|
||||
6021, "MANAGEMENT_SESSION_ESTABLISHMENT_TIMEOUT", 500)
|
||||
STATUS_INVALID_CMDTYPE = STATUS(
|
||||
6022, "INVALID_CMDTYPE", 400)
|
||||
STATUS_INVALID_ARGUMENTS = STATUS(
|
||||
6023, "INVALID_ARGUMENTS", 400)
|
||||
STATUS_INSUFFICIENT_ARGUMENT = STATUS(
|
||||
6024, "INSUFFICIENT_ARGUMENT", 400)
|
||||
STATUS_MGMT_CONVERSION_ERROR = STATUS(
|
||||
6025, "MGMT_CONVERSION_ERROR", 500)
|
||||
STATUS_CANCELLATION_FAILED = STATUS(
|
||||
6026, "CANCELLATION_FAILED", 500)
|
||||
STATUS_ALREADY_COMPLETE = STATUS(
|
||||
6028, "ALREADY_COMPLETE", 400)
|
||||
STATUS_COMMAND_NOT_CANCELLABLE = STATUS(
|
||||
6029, "COMMAND_NOT_CANCELLABLE", 400)
|
||||
STATUS_IMPERSONATION_ERROR = STATUS(
|
||||
6101, "IMPERSONATION_ERROR", 400)
|
||||
|
||||
|
||||
_status_map = {v.numeric_code: v for v in globals().values()
|
||||
if isinstance(v, STATUS)}
|
||||
|
||||
ERROR_MIN = STATUS_BAD_REQUEST.numeric_code
|
||||
|
||||
|
||||
class OneM2MError(OpenMTCError):
|
||||
pass
|
||||
|
||||
|
||||
class CSEError(OneM2MError):
|
||||
response_status_code = STATUS_INTERNAL_SERVER_ERROR
|
||||
|
||||
@property
|
||||
def status_code(self):
|
||||
return self.response_status_code.http_status_code
|
||||
|
||||
@property
|
||||
def rsc(self):
|
||||
return self.response_status_code.numeric_code
|
||||
|
||||
|
||||
class CSENotFound(CSEError):
|
||||
response_status_code = STATUS_NOT_FOUND
|
||||
|
||||
|
||||
class CSEOperationNotAllowed(CSEError):
|
||||
response_status_code = STATUS_OPERATION_NOT_ALLOWED
|
||||
|
||||
|
||||
class CSENotImplemented(CSEError):
|
||||
response_status_code = STATUS_NOT_IMPLEMENTED
|
||||
|
||||
|
||||
class CSETargetNotReachable(CSEError):
|
||||
response_status_code = STATUS_TARGET_NOT_REACHABLE
|
||||
|
||||
|
||||
class CSEConflict(CSEError):
|
||||
response_status_code = STATUS_CONFLICT
|
||||
|
||||
|
||||
class CSEBadRequest(CSEError):
|
||||
response_status_code = STATUS_BAD_REQUEST
|
||||
|
||||
|
||||
class CSESyntaxError(CSEBadRequest):
|
||||
response_status_code = STATUS_BAD_REQUEST
|
||||
|
||||
|
||||
class CSEPermissionDenied(CSEError):
|
||||
response_status_code = STATUS_ORIGINATOR_HAS_NO_PRIVILEGE
|
||||
|
||||
|
||||
class CSEImpersonationError(CSEBadRequest):
|
||||
response_status_code = STATUS_IMPERSONATION_ERROR
|
||||
|
||||
|
||||
class CSEValueError(CSESyntaxError, ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class CSETypeError(CSESyntaxError, TypeError):
|
||||
pass
|
||||
|
||||
|
||||
class CSEMissingValue(CSESyntaxError):
|
||||
pass
|
||||
|
||||
|
||||
class CSEContentsUnacceptable(CSEError):
|
||||
response_status_code = STATUS_CONTENTS_UNACCEPTABLE
|
||||
|
||||
|
||||
_error_map = {
|
||||
STATUS_INTERNAL_SERVER_ERROR.numeric_code: CSEError
|
||||
}
|
||||
|
||||
|
||||
def get_error_class(rsc):
|
||||
return _error_map.get(int(rsc), CSEError)
|
||||
|
||||
|
||||
def get_response_status(rsc):
|
||||
return _status_map.get(int(rsc), STATUS_INTERNAL_SERVER_ERROR)
|
||||
|
||||
|
||||
def all_subclasses(cls):
|
||||
return cls.__subclasses__() + [g for s in cls.__subclasses__()
|
||||
for g in all_subclasses(s)]
|
||||
|
||||
|
||||
for c in all_subclasses(CSEError):
|
||||
try:
|
||||
code = vars(c)["response_status_code"].numeric_code
|
||||
except KeyError:
|
||||
continue
|
||||
_error_map[code] = c
|
||||
|
||||
del c, code
|
@ -1,118 +0,0 @@
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
except ImportError:
|
||||
from urlparse import urlparse
|
||||
|
||||
from openmtc.mapper import BasicMapper, MapperError
|
||||
from openmtc_onem2m import OneM2MRequest
|
||||
from openmtc_onem2m.transport import OneM2MOperation
|
||||
|
||||
|
||||
def _is_persistent(instance):
|
||||
return bool(instance.path)
|
||||
|
||||
|
||||
class OneM2MMapper(BasicMapper):
|
||||
def __init__(self, cse, originator=None, ca_certs=None, cert_file=None, key_file=None, *args, **kw):
|
||||
super(OneM2MMapper, self).__init__(*args, **kw)
|
||||
|
||||
scheme = urlparse(cse).scheme.lower()
|
||||
if scheme in ("", "https", "http"):
|
||||
from openmtc_onem2m.client.http import get_client
|
||||
self._send_request = get_client(cse, use_xml=False, ca_certs=ca_certs, cert_file=cert_file, key_file=key_file).send_onem2m_request
|
||||
elif scheme in ("mqtt", "mqtts", "secure-mqtt"):
|
||||
from openmtc_onem2m.client.mqtt import get_client
|
||||
self._send_request = get_client(cse, use_xml=False, client_id=originator).send_onem2m_request
|
||||
elif scheme == "coap":
|
||||
raise NotImplementedError
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unsupported URL scheme: %s" % (scheme,)
|
||||
)
|
||||
self.originator = originator
|
||||
|
||||
def create(self, path, instance):
|
||||
instance.__dict__.update({
|
||||
attribute.name: None for attribute in type(instance).attributes if attribute.accesstype == attribute.RO
|
||||
})
|
||||
|
||||
# TODO(rst): add resource_type
|
||||
response = self._send_request(OneM2MRequest(
|
||||
OneM2MOperation.create,
|
||||
path,
|
||||
self.originator,
|
||||
ty=type(instance),
|
||||
pc=instance
|
||||
)).get()
|
||||
|
||||
try:
|
||||
instance.__dict__.update(response.content.values)
|
||||
instance.path = path + '/' + response.content.resourceName
|
||||
except (AttributeError, ):
|
||||
instance.path = path
|
||||
|
||||
self.logger.debug("Set instance path: %s" % (instance.path, ))
|
||||
instance._synced = False
|
||||
return instance
|
||||
|
||||
def update(self, instance, fields=None):
|
||||
if not _is_persistent(instance):
|
||||
raise MapperError("Instance is not yet stored")
|
||||
return self._do_update(instance, fields)
|
||||
|
||||
def _do_update(self, instance, fields=None):
|
||||
attributes = type(instance).attributes
|
||||
fields_to_be_cleared = [a.name for a in attributes if a.accesstype in (a.WO, a.RO)]
|
||||
if fields:
|
||||
fields_to_be_cleared.extend([a.name for a in attributes if a.name not in fields])
|
||||
instance.childResource = []
|
||||
|
||||
# remove NP attributes
|
||||
instance.__dict__.update({
|
||||
a: None for a in fields_to_be_cleared
|
||||
})
|
||||
|
||||
response = self._send_request(OneM2MRequest(
|
||||
OneM2MOperation.update,
|
||||
instance.path,
|
||||
self.originator,
|
||||
pc=instance
|
||||
)).get()
|
||||
|
||||
try:
|
||||
response.content.path = instance.path
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return response.content
|
||||
|
||||
def get(self, path):
|
||||
response = self._get_data(path)
|
||||
response.content.path = path
|
||||
self.logger.debug("Received response: %s", response.content)
|
||||
return response.content
|
||||
|
||||
def delete(self, instance):
|
||||
self._send_request(OneM2MRequest(
|
||||
OneM2MOperation.delete,
|
||||
getattr(instance, "path", instance),
|
||||
self.originator
|
||||
))
|
||||
|
||||
def _get_data(self, path):
|
||||
return self._send_request(OneM2MRequest(
|
||||
OneM2MOperation.retrieve,
|
||||
path,
|
||||
self.originator
|
||||
)).get()
|
||||
|
||||
# TODO(rst): check if this can be removed in parent class
|
||||
@classmethod
|
||||
def _patch_model(cls):
|
||||
pass
|
||||
|
||||
def _fill_resource(self, res, data):
|
||||
pass
|
||||
|
||||
def _map(self, path, typename, data):
|
||||
pass
|
File diff suppressed because it is too large
Load Diff
@ -1,93 +0,0 @@
|
||||
from .json import OneM2MJsonSerializer
|
||||
from openmtc_onem2m.exc import CSEBadRequest, CSEContentsUnacceptable
|
||||
from werkzeug import Accept, parse_accept_header
|
||||
from futile.logging import get_logger
|
||||
from openmtc.exc import OpenMTCError
|
||||
|
||||
_factories = {"application/json": OneM2MJsonSerializer,
|
||||
"application/vnd.onem2m-res+json": OneM2MJsonSerializer,
|
||||
"application/vnd.onem2m-ntfy+json": OneM2MJsonSerializer,
|
||||
"application/vnd.onem2m-attrs+json": OneM2MJsonSerializer,
|
||||
"text/plain": OneM2MJsonSerializer}
|
||||
_serializers = {}
|
||||
|
||||
|
||||
def create_onem2m_serializer(content_type):
|
||||
try:
|
||||
factory = _factories[content_type]
|
||||
except KeyError:
|
||||
raise CSEBadRequest("Unsupported content type: %s. Try one of %s" %
|
||||
(content_type, ', '.join(_factories.keys())))
|
||||
return factory()
|
||||
|
||||
|
||||
def get_onem2m_supported_content_types():
|
||||
return _factories.keys()
|
||||
|
||||
|
||||
def get_onem2m_decoder(content_type):
|
||||
# TODO: Check if this is faster than split
|
||||
content_type, _, _ = content_type.partition(";")
|
||||
|
||||
content_type = content_type.strip().lower()
|
||||
|
||||
try:
|
||||
return _serializers[content_type]
|
||||
except KeyError:
|
||||
serializer = create_onem2m_serializer(content_type)
|
||||
_serializers[content_type] = serializer
|
||||
return serializer
|
||||
get_serializer = get_onem2m_decoder
|
||||
|
||||
|
||||
def get_onem2m_encoder(accept):
|
||||
# TODO: optimize
|
||||
if accept:
|
||||
parsed_accept_header = parse_accept_header(accept, Accept)
|
||||
""":type : Accept"""
|
||||
supported = get_onem2m_supported_content_types()
|
||||
accepted_type = parsed_accept_header.best_match(supported)
|
||||
if not accepted_type:
|
||||
raise CSEContentsUnacceptable("%s is not supported. "
|
||||
"Supported content types are: %s" %
|
||||
(accept, ', '.join(supported)))
|
||||
else:
|
||||
# TODO: use config["default_content_type"]
|
||||
accepted_type = "application/json"
|
||||
|
||||
# TODO: optimize
|
||||
return get_serializer(accepted_type)
|
||||
|
||||
|
||||
def register_onem2m_serializer(content_type, factory):
|
||||
set_value = _factories.setdefault(content_type, factory)
|
||||
|
||||
if set_value is not factory:
|
||||
raise OpenMTCError("Content type is already registered: %s" %
|
||||
(content_type, ))
|
||||
|
||||
################################################################################
|
||||
# import other serializers at serializers
|
||||
################################################################################
|
||||
# import impl
|
||||
# import pkgutil
|
||||
#
|
||||
# logger = get_logger(__name__)
|
||||
#
|
||||
# for _importer, modname, ispkg in pkgutil.iter_modules(impl.__path__):
|
||||
# modname = impl.__name__ + "." + modname
|
||||
# logger.debug("Found onem2m serializer module %s (is a package: %s)" %
|
||||
# (modname, ispkg))
|
||||
# try:
|
||||
# __import__(modname)
|
||||
# except:
|
||||
# logger.error("Failed to import serializer %s", modname)
|
||||
# raise
|
||||
# del _importer
|
||||
# del modname
|
||||
# del ispkg
|
||||
#
|
||||
# del impl
|
||||
# del pkgutil
|
||||
# del logger
|
||||
|
@ -1,203 +0,0 @@
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from datetime import datetime
|
||||
from re import compile as re_compile
|
||||
|
||||
from futile.logging import LoggerMixin
|
||||
from openmtc_onem2m.exc import CSESyntaxError, CSEBadRequest, CSEValueError
|
||||
from openmtc_onem2m.model import (get_onem2m_type, ContentInstance,
|
||||
ResourceTypeE, Notification,
|
||||
get_onem2m_resource_type,
|
||||
get_short_attribute_name,
|
||||
get_short_member_name, get_long_member_name,
|
||||
get_short_resource_name,
|
||||
get_long_attribute_name,
|
||||
OneM2MEntity, OneM2MResource, Container,
|
||||
get_long_resource_name, OneM2MContentResource,
|
||||
URIList, OneM2MIntEnum)
|
||||
|
||||
_typename_matcher = re_compile(r'^m2m:([a-z]+)$')
|
||||
|
||||
|
||||
def get_typename(tn):
|
||||
return _typename_matcher.findall(tn).pop()
|
||||
|
||||
|
||||
class OneM2MSerializer(LoggerMixin):
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
@abstractmethod
|
||||
def encode_resource(self, resource, response, pretty=False,
|
||||
encoding="utf-8", fields=None):
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
def decode_resource_values(self, s):
|
||||
pass
|
||||
|
||||
def decode(self, s):
|
||||
resource_type, data = self.decode_resource_values(s)
|
||||
if issubclass(resource_type, OneM2MContentResource):
|
||||
return resource_type(data)
|
||||
child_resource = data.pop("childResource", None)
|
||||
if child_resource:
|
||||
try:
|
||||
def map_child_resource(v):
|
||||
res_type = ResourceTypeE(v["type"])
|
||||
res_cls = get_onem2m_resource_type(res_type.name)
|
||||
return res_cls(v["name"], resourceID=v["value"], resourceType=res_type)
|
||||
child_resource = map(map_child_resource, child_resource)
|
||||
except (TypeError, AttributeError, KeyError, ValueError):
|
||||
raise CSEValueError("Invalid entry in child resources: %s",
|
||||
child_resource)
|
||||
if resource_type is Notification and "notificationEvent" in data:
|
||||
representation = data["notificationEvent"]["representation"]
|
||||
representation = self.decode(self.dumps(representation))
|
||||
data["notificationEvent"]["representation"] = representation
|
||||
resource = resource_type(**data)
|
||||
if child_resource:
|
||||
resource.childResource = child_resource
|
||||
return resource
|
||||
|
||||
|
||||
class OneM2MDictSerializer(OneM2MSerializer):
|
||||
def encode_resource(self, resource, pretty=False, path=None, encoding="utf-8", fields=None,
|
||||
encapsulated=False):
|
||||
representation = resource.values
|
||||
|
||||
self.logger.debug("Encoding representation: %s", representation)
|
||||
|
||||
if isinstance(resource, Notification):
|
||||
# handle notifications
|
||||
try:
|
||||
event = representation["notificationEvent"]
|
||||
if event:
|
||||
e = event.values
|
||||
e['representation'] = self.encode_resource(
|
||||
event.representation, pretty, path, encoding, fields, True
|
||||
)
|
||||
representation["notificationEvent"] = {
|
||||
get_short_attribute_name(k) or get_short_member_name(k): v
|
||||
for k, v in e.iteritems()
|
||||
}
|
||||
except (AttributeError, KeyError):
|
||||
self.logger.exception("failed to encode notify")
|
||||
|
||||
def make_val(val_path, resource_id):
|
||||
try:
|
||||
if val_path:
|
||||
val_path += '/' if not val_path.endswith('/') else ''
|
||||
except AttributeError:
|
||||
val_path = ''
|
||||
|
||||
if resource_id.startswith(val_path):
|
||||
return resource_id
|
||||
return val_path + resource_id
|
||||
|
||||
if isinstance(resource, OneM2MResource):
|
||||
|
||||
def get_child_rep(c):
|
||||
return {
|
||||
"val": make_val(path, c.resourceID),
|
||||
"nm": c.basename,
|
||||
"typ": c.resourceType
|
||||
}
|
||||
representation["childResource"] = map(get_child_rep, representation["childResource"])
|
||||
|
||||
if isinstance(resource, URIList):
|
||||
representation = [make_val(path, x) for x in representation]
|
||||
|
||||
if isinstance(resource, Container):
|
||||
if isinstance(resource.latest, ContentInstance):
|
||||
representation['latest'] = resource.latest.resourceID
|
||||
if isinstance(resource.oldest, ContentInstance):
|
||||
representation['oldest'] = resource.oldest.resourceID
|
||||
|
||||
# cleans representation
|
||||
def clean_representation(o):
|
||||
try:
|
||||
# removes empty attributes
|
||||
empty_keys = []
|
||||
for k, v in o.items():
|
||||
if v is None:
|
||||
empty_keys.append(k)
|
||||
elif isinstance(v, OneM2MEntity):
|
||||
o[k] = self.encode_resource(v, pretty, path, encoding, fields)
|
||||
elif isinstance(v, list):
|
||||
|
||||
def encode_list_item(item):
|
||||
if isinstance(item, OneM2MEntity):
|
||||
return self.encode_resource(item, pretty, path, encoding, fields)
|
||||
return item
|
||||
o[k] = map(encode_list_item, v)
|
||||
else:
|
||||
try:
|
||||
if len(v) == 0:
|
||||
empty_keys.append(k)
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
for k in empty_keys:
|
||||
del o[k]
|
||||
|
||||
for k, v in o.items():
|
||||
if not isinstance(v, (unicode, str, bool, datetime,
|
||||
OneM2MIntEnum)):
|
||||
clean_representation(v)
|
||||
except AttributeError:
|
||||
if isinstance(o, list):
|
||||
for p in o:
|
||||
clean_representation(p)
|
||||
|
||||
if not isinstance(resource, OneM2MContentResource):
|
||||
representation = {
|
||||
get_short_resource_name(k) or get_short_attribute_name(k) or
|
||||
get_short_member_name(k): v for
|
||||
k, v in representation.items()}
|
||||
|
||||
clean_representation(representation)
|
||||
|
||||
if (not isinstance(resource, OneM2MResource) and
|
||||
not isinstance(resource, Notification) and
|
||||
not isinstance(resource, OneM2MContentResource)):
|
||||
return representation
|
||||
|
||||
typename = 'm2m:' + (get_short_resource_name(resource.typename) or
|
||||
get_short_member_name(resource.typename))
|
||||
|
||||
if encapsulated:
|
||||
return {typename: representation}
|
||||
|
||||
if pretty:
|
||||
return self.pretty_dumps({typename: representation})
|
||||
|
||||
return self.dumps({typename: representation})
|
||||
|
||||
def _handle_partial_addressing(self, resource, pretty):
|
||||
for k, v in resource.iteritems():
|
||||
if k in ('latest', 'oldest') and isinstance(v, ContentInstance):
|
||||
resource[k] = v.resourceID
|
||||
if pretty:
|
||||
return self.pretty_dumps(resource)
|
||||
return self.dumps(resource)
|
||||
|
||||
def decode_resource_values(self, s):
|
||||
|
||||
def convert_to_long_keys(d):
|
||||
return {get_long_resource_name(k) or get_long_attribute_name(k) or
|
||||
get_long_member_name(k) or k: v for k, v in d.iteritems()}
|
||||
|
||||
try:
|
||||
if hasattr(s, "read"):
|
||||
data = self.load(s, object_hook=convert_to_long_keys)
|
||||
else:
|
||||
data = self.loads(s, object_hook=convert_to_long_keys)
|
||||
except (ValueError, TypeError) as exc:
|
||||
raise CSEBadRequest("Failed to parse input: %s" % (exc, ))
|
||||
|
||||
self.logger.debug("Read data: %s", data)
|
||||
|
||||
try:
|
||||
typename, data = data.items()[0]
|
||||
return get_onem2m_type(get_typename(typename)), data
|
||||
except (AttributeError, IndexError, TypeError):
|
||||
raise CSESyntaxError("Not a valid resource representation")
|
@ -1 +0,0 @@
|
||||
__import__('pkg_resources').declare_namespace(__name__)
|
@ -1,62 +0,0 @@
|
||||
from openmtc_onem2m.serializer.base import OneM2MDictSerializer
|
||||
from json import JSONEncoder
|
||||
from futile.logging import get_logger
|
||||
from datetime import datetime
|
||||
from openmtc_onem2m.model import ContentInstance
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
# rst: ujson and yajl are not supporting object_hooks, but conversion is needed
|
||||
# rst: some measurements are necessary what is better
|
||||
# try:
|
||||
# from ujson import load, loads
|
||||
# logger.debug("using ujson for decoding JSON")
|
||||
# except ImportError:
|
||||
# try:
|
||||
# from yajl import load, loads
|
||||
# logger.debug("using yajl for decoding JSON")
|
||||
# except ImportError:
|
||||
try:
|
||||
# simplejson is faster on decoding, tiny bit slower on encoding
|
||||
from simplejson import load, loads
|
||||
logger.debug("using simplejson for decoding JSON")
|
||||
except ImportError:
|
||||
logger.debug("using builtin json for decoding JSON")
|
||||
from json import load, loads
|
||||
|
||||
|
||||
del logger
|
||||
|
||||
|
||||
def _default(x):
|
||||
if isinstance(x, datetime):
|
||||
try:
|
||||
isoformat = x.isoformat
|
||||
except AttributeError:
|
||||
raise TypeError("%s (%s)" % (x, type(x)))
|
||||
|
||||
return isoformat()
|
||||
elif isinstance(x, ContentInstance):
|
||||
return x.resourceID
|
||||
else:
|
||||
try: # handle model classes
|
||||
return x.values
|
||||
except AttributeError:
|
||||
raise TypeError("%s (%s)" % (x, type(x)))
|
||||
|
||||
|
||||
_simple_encoder = JSONEncoder(check_circular=False, separators=(',', ':'),
|
||||
default=_default)
|
||||
|
||||
_pretty_encoder = JSONEncoder(default=_default, indent=2,
|
||||
separators=(',', ':'),
|
||||
check_circular=False)
|
||||
|
||||
|
||||
class OneM2MJsonSerializer(OneM2MDictSerializer):
|
||||
def __init__(self, *args, **kw):
|
||||
|
||||
self.loads = loads
|
||||
self.load = load
|
||||
self.dumps = _simple_encoder.encode
|
||||
self.pretty_dumps = _pretty_encoder.encode
|
@ -1,38 +0,0 @@
|
||||
from futile.logging import get_logger
|
||||
from openmtc_onem2m.exc import CSEValueError
|
||||
from openmtc_onem2m.serializer import get_onem2m_encoder, get_onem2m_decoder
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
def decode_onem2m_content(content, content_type):
|
||||
if content == "":
|
||||
content = None
|
||||
if content_type and content is not None:
|
||||
serializer = get_onem2m_decoder(content_type)
|
||||
try:
|
||||
data = serializer.decode(content)
|
||||
except CSEValueError as e:
|
||||
logger.exception("Error reading input")
|
||||
raise e
|
||||
|
||||
return data
|
||||
return None
|
||||
|
||||
|
||||
def encode_onem2m_content(content, content_type, pretty=False, path=None,
|
||||
fields=None):
|
||||
logger.debug("Encoding result: %s - %s", content, content_type)
|
||||
|
||||
if content is None:
|
||||
return None, None
|
||||
|
||||
fields = fields # TODO(rst): maybe necessary
|
||||
#fields = ["resourceID"]
|
||||
|
||||
serializer = get_onem2m_encoder(content_type)
|
||||
|
||||
data = serializer.encode_resource(content, pretty=pretty, path=path,
|
||||
fields=fields)
|
||||
|
||||
return content_type + "; charset=utf-8", data
|
@ -1,444 +0,0 @@
|
||||
import random
|
||||
import string
|
||||
|
||||
from enum import Enum, unique
|
||||
|
||||
from futile.logging import get_logger
|
||||
from openmtc.model import StrEnum
|
||||
from openmtc_onem2m.exc import OneM2MError
|
||||
|
||||
|
||||
@unique
|
||||
class RequestMethod(Enum):
|
||||
create = "create"
|
||||
retrieve = "retrieve"
|
||||
update = "update"
|
||||
delete = "delete"
|
||||
notify = "notify"
|
||||
execute = "execute"
|
||||
observe = "observe"
|
||||
|
||||
|
||||
_logger = get_logger(__name__)
|
||||
|
||||
|
||||
class MetaInformation(object):
|
||||
def __init__(self, ri=None, ot=None, rqet=None, rset=None, rt=None, rd=None,
|
||||
rc=None, rp=None, oet=None, ls=None, ec=None, da=None,
|
||||
gid=None, role=None):
|
||||
"""Meta info about request, contains:
|
||||
ri (Request Identifier),
|
||||
ot (optional originating timestamp),
|
||||
rqet (optional request expiration timestamp),
|
||||
rset (optional result expiration timestamp),
|
||||
rt (optional response type),
|
||||
rd (optional result destination),
|
||||
rc (optional result content),
|
||||
rp (optional response persistence),
|
||||
oet (optional operational execution time),
|
||||
ls (optional lifespan),
|
||||
ec (optional event category),
|
||||
da (optional delivery aggregation),
|
||||
gid (optional group request identifier)
|
||||
role ()
|
||||
"""
|
||||
|
||||
@property
|
||||
def ri(self):
|
||||
return self.identifier
|
||||
|
||||
@ri.setter
|
||||
def ri(self, ri):
|
||||
self.identifier = ri
|
||||
|
||||
@property
|
||||
def ot(self):
|
||||
return self.originating_timestamp
|
||||
|
||||
@ot.setter
|
||||
def ot(self, ot):
|
||||
self.originating_timestamp = ot
|
||||
|
||||
@property
|
||||
def rqet(self):
|
||||
return self.request_expiration_timestamp
|
||||
|
||||
@rqet.setter
|
||||
def rqet(self, rqet):
|
||||
self.request_expiration_timestamp = rqet
|
||||
|
||||
@property
|
||||
def rset(self):
|
||||
return self.result_expiration_timestamp
|
||||
|
||||
@rset.setter
|
||||
def rset(self, rset):
|
||||
self.result_expiration_timestamp = rset
|
||||
|
||||
@property
|
||||
def rt(self):
|
||||
return self.response_type
|
||||
|
||||
@rt.setter
|
||||
def rt(self, rt):
|
||||
self.response_type = rt
|
||||
|
||||
@property
|
||||
def rd(self):
|
||||
return self.result_destination
|
||||
|
||||
@rd.setter
|
||||
def rd(self, rd):
|
||||
self.result_destination = rd
|
||||
|
||||
@property
|
||||
def rc(self):
|
||||
return self.result_content
|
||||
|
||||
@rc.setter
|
||||
def rc(self, rc):
|
||||
self.result_content = rc
|
||||
|
||||
@property
|
||||
def rp(self):
|
||||
return self.response_persistence
|
||||
|
||||
@rp.setter
|
||||
def rp(self, rp):
|
||||
self.response_persistence = rp
|
||||
|
||||
@property
|
||||
def oet(self):
|
||||
return self.operational_execution_time
|
||||
|
||||
@oet.setter
|
||||
def oet(self, oet):
|
||||
self.operational_execution_time = oet
|
||||
|
||||
@property
|
||||
def ec(self):
|
||||
return self.event_category
|
||||
|
||||
@ec.setter
|
||||
def ec(self, ec):
|
||||
self.event_category = ec
|
||||
|
||||
@property
|
||||
def ls(self):
|
||||
return self.lifespan
|
||||
|
||||
@ls.setter
|
||||
def ls(self, ls):
|
||||
self.lifespan = ls
|
||||
|
||||
@property
|
||||
def da(self):
|
||||
return self.delivery_aggregation
|
||||
|
||||
@da.setter
|
||||
def da(self, da):
|
||||
self.delivery_aggregation = da
|
||||
|
||||
@property
|
||||
def gid(self):
|
||||
return self.group_request_identifier
|
||||
|
||||
@gid.setter
|
||||
def gid(self, gid):
|
||||
self.group_request_identifier = gid
|
||||
|
||||
@property
|
||||
def ro(self):
|
||||
return self.role
|
||||
|
||||
@ro.setter
|
||||
def ro(self, ro):
|
||||
self.role = ro
|
||||
|
||||
def __str__(self):
|
||||
s = ''
|
||||
for k in self.__dict__:
|
||||
if getattr(self, k):
|
||||
s = s + ' | mi.' + str(k) + ': ' + str(self.__dict__[k])
|
||||
return s
|
||||
|
||||
|
||||
MI = MetaInformation
|
||||
|
||||
|
||||
class AdditionalInformation(object):
|
||||
def __init__(self, cs=None, ra=None):
|
||||
"""Optional additional information about the request, contains:
|
||||
cs (optional, status codes),
|
||||
ra (optional, address for the temporary storage of end node Responses)
|
||||
"""
|
||||
self.cs = cs
|
||||
self.ra = ra
|
||||
|
||||
def __str__(self):
|
||||
s = ''
|
||||
for k in self.__dict__:
|
||||
if getattr(self, k):
|
||||
s = s + ' | ai.' + str(k) + ': ' + str(self.__dict__[k])
|
||||
return s
|
||||
|
||||
|
||||
AI = AdditionalInformation
|
||||
|
||||
|
||||
class OneM2MOperation(StrEnum):
|
||||
create = "create"
|
||||
retrieve = "retrieve"
|
||||
update = "update"
|
||||
delete = "delete"
|
||||
notify = "notify"
|
||||
|
||||
|
||||
class OneM2MRequest(object):
|
||||
internal = False
|
||||
cascading = False
|
||||
|
||||
"""Class representing a OneM2M request"""
|
||||
|
||||
def __init__(self, op, to, fr=None, rqi=None, ty=None, pc=None, rol=None,
|
||||
ot=None, rqet=None, rset=None, oet=None, rt=None, rp=None,
|
||||
rcn=None, ec=None, da=None, gid=None, filter_criteria=None,
|
||||
drt=None):
|
||||
# Operation
|
||||
self.operation = op
|
||||
# Target uri
|
||||
self.to = to
|
||||
# Originator ID
|
||||
self.originator = fr # original long name is from
|
||||
self.request_identifier = rqi or ''.join(random.sample(string.letters + string.digits, 16))
|
||||
# Type of a created resource
|
||||
self.resource_type = ty
|
||||
# Resource content to be transferred.
|
||||
self.content = pc
|
||||
self.role = rol
|
||||
self.originating_timestamp = ot
|
||||
self.request_expiration_timestamp = rqet
|
||||
self.result_expiration_timestamp = rset
|
||||
self.operation_execution_time = oet
|
||||
self.response_type = rt
|
||||
self.result_persistence = rp
|
||||
self.result_content = rcn
|
||||
self.event_category = ec
|
||||
self.delivery_aggregation = da
|
||||
self.group_request_identifier = gid
|
||||
self.filter_criteria = filter_criteria
|
||||
# Optional Discovery result type
|
||||
self.discovery_result_type = drt
|
||||
|
||||
@property
|
||||
def op(self):
|
||||
return self.operation
|
||||
|
||||
@op.setter
|
||||
def op(self, op):
|
||||
self.operation = op
|
||||
|
||||
@property
|
||||
def fr(self):
|
||||
return self.originator
|
||||
|
||||
@fr.setter
|
||||
def fr(self, fr):
|
||||
self.originator = fr
|
||||
|
||||
@property
|
||||
def rqi(self):
|
||||
return self.request_identifier
|
||||
|
||||
@rqi.setter
|
||||
def rqi(self, rqi):
|
||||
self.request_identifier = rqi
|
||||
|
||||
@property
|
||||
def ty(self):
|
||||
return self.resource_type
|
||||
|
||||
@ty.setter
|
||||
def ty(self, ty):
|
||||
self.resource_type = ty
|
||||
|
||||
@property
|
||||
def pc(self):
|
||||
return self.content
|
||||
|
||||
@pc.setter
|
||||
def pc(self, pc):
|
||||
self.content = pc
|
||||
|
||||
@property
|
||||
def rol(self):
|
||||
return self.role
|
||||
|
||||
@rol.setter
|
||||
def rol(self, rol):
|
||||
self.role = rol
|
||||
|
||||
@property
|
||||
def ot(self):
|
||||
return self.originating_timestamp
|
||||
|
||||
@ot.setter
|
||||
def ot(self, ot):
|
||||
self.originating_timestamp = ot
|
||||
|
||||
@property
|
||||
def rqet(self):
|
||||
return self.request_expiration_timestamp
|
||||
|
||||
@rqet.setter
|
||||
def rqet(self, rqet):
|
||||
self.request_expiration_timestamp = rqet
|
||||
|
||||
@property
|
||||
def rset(self):
|
||||
return self.result_expiration_timestamp
|
||||
|
||||
@rset.setter
|
||||
def rset(self, rset):
|
||||
self.result_expiration_timestamp = rset
|
||||
|
||||
@property
|
||||
def oet(self):
|
||||
return self.operation_execution_time
|
||||
|
||||
@oet.setter
|
||||
def oet(self, oet):
|
||||
self.operation_execution_time = oet
|
||||
|
||||
@property
|
||||
def rt(self):
|
||||
return self.response_type
|
||||
|
||||
@rt.setter
|
||||
def rt(self, rt):
|
||||
self.response_type = rt
|
||||
|
||||
@property
|
||||
def rp(self):
|
||||
return self.result_persistence
|
||||
|
||||
@rp.setter
|
||||
def rp(self, rp):
|
||||
self.result_persistence = rp
|
||||
|
||||
@property
|
||||
def rcn(self):
|
||||
return self.result_content
|
||||
|
||||
@rcn.setter
|
||||
def rcn(self, rcn):
|
||||
self.result_content = rcn
|
||||
|
||||
@property
|
||||
def ec(self):
|
||||
return self.event_category
|
||||
|
||||
@ec.setter
|
||||
def ec(self, ec):
|
||||
self.event_category = ec
|
||||
|
||||
@property
|
||||
def da(self):
|
||||
return self.delivery_aggregation
|
||||
|
||||
@da.setter
|
||||
def da(self, da):
|
||||
self.delivery_aggregation = da
|
||||
|
||||
@property
|
||||
def gid(self):
|
||||
return self.group_request_identifier
|
||||
|
||||
@gid.setter
|
||||
def gid(self, gid):
|
||||
self.group_request_identifier = gid
|
||||
|
||||
@property
|
||||
def fc(self):
|
||||
return self.filter_criteria
|
||||
|
||||
@fc.setter
|
||||
def fc(self, fc):
|
||||
self.filter_criteria = fc
|
||||
|
||||
@property
|
||||
def drt(self):
|
||||
return self.discovery_result_type
|
||||
|
||||
@drt.setter
|
||||
def drt(self, drt):
|
||||
self.discovery_result_type = drt
|
||||
|
||||
def __str__(self):
|
||||
return '%s: %s' % (self.__class__.__name__, ' | '.join([
|
||||
'%s: %s' % (str(k), str(v)) for k, v in self.__dict__.iteritems()
|
||||
]))
|
||||
|
||||
|
||||
class OneM2MResponse(object):
|
||||
"""Class representing a OneM2M response"""
|
||||
|
||||
def __init__(self, status_code, request=None, rqi=None, pc=None, to=None,
|
||||
fr=None, rsc=None):
|
||||
# Operation result
|
||||
self.response_status_code = status_code
|
||||
if request:
|
||||
self.request_identifier = request.rqi
|
||||
# Target uri
|
||||
self.to = request.to
|
||||
# Originator ID
|
||||
self.originator = request.fr
|
||||
else:
|
||||
self.request_identifier = rqi
|
||||
# Target uri
|
||||
self.to = to
|
||||
# Originator ID
|
||||
self.originator = fr
|
||||
# Resource content to be transferred.
|
||||
self.content = pc
|
||||
|
||||
@property
|
||||
def status_code(self):
|
||||
return self.response_status_code.http_status_code
|
||||
|
||||
@property
|
||||
def rsc(self):
|
||||
return self.response_status_code.numeric_code
|
||||
|
||||
@property
|
||||
def rqi(self):
|
||||
return self.request_identifier
|
||||
|
||||
@rqi.setter
|
||||
def rqi(self, rqi):
|
||||
self.request_identifier = rqi
|
||||
|
||||
@property
|
||||
def pc(self):
|
||||
return self.content
|
||||
|
||||
@pc.setter
|
||||
def pc(self, pc):
|
||||
self.content = pc
|
||||
|
||||
@property
|
||||
def fr(self):
|
||||
return self.originator
|
||||
|
||||
@fr.setter
|
||||
def fr(self, fr):
|
||||
self.originator = fr
|
||||
|
||||
def __str__(self):
|
||||
return '%s: %s' % (self.__class__.__name__, ' | '.join([
|
||||
'%s: %s' % (str(k), str(v)) for k, v in self.__dict__.iteritems()
|
||||
]))
|
||||
|
||||
|
||||
class OneM2MErrorResponse(OneM2MResponse, OneM2MError):
|
||||
pass
|
@ -1,37 +0,0 @@
|
||||
from re import compile as re_compile
|
||||
|
||||
|
||||
def _get_regex_path_component():
|
||||
# see http://tools.ietf.org/html/rfc3986#section-3.3
|
||||
# path-abempty = *( "/" segment )
|
||||
# segment = *pchar
|
||||
# pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
|
||||
# pct-encoded = "%" HEXDIG HEXDIG
|
||||
# unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
|
||||
# sub-delims = "!" / "$" / "&" / """ / "(" / ")" / "*" / "+" / "," / ";" /
|
||||
# "="
|
||||
|
||||
unreserved = r"[\w\.\-~]"
|
||||
pct_encoded = "%[A-Fa-f0-9][A-Fa-f0-9]"
|
||||
sub_delims = r"[!$&'()\*\+,;=]"
|
||||
|
||||
pchar = "(?:" + unreserved + "|" + pct_encoded + "|" + sub_delims + "|:|@)"
|
||||
segment = pchar + "+"
|
||||
|
||||
return segment
|
||||
|
||||
|
||||
_sp_id = r'(//%s)?' % _get_regex_path_component()
|
||||
_cse_id = r'(/%s)?' % _get_regex_path_component()
|
||||
_path_suffix = r'(?:/?(%s(?:/%s)*))?' % (_get_regex_path_component(), _get_regex_path_component())
|
||||
|
||||
_onem2m_address_splitter = re_compile(r'^%s%s%s' % (_sp_id, _cse_id, _path_suffix))
|
||||
|
||||
|
||||
def split_onem2m_address(onem2m_address):
|
||||
"""
|
||||
|
||||
:param str onem2m_address:
|
||||
:return: sp_id, cse_id, cse-relative rest
|
||||
"""
|
||||
return _onem2m_address_splitter.findall(onem2m_address).pop()
|
@ -1 +0,0 @@
|
||||
from io import *
|
16
bumb-version
16
bumb-version
@ -1,16 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
VERSION=${1}
|
||||
|
||||
if ! [[ "${VERSION}" =~ ^[0-9]+\.[0-9]+(\.[0-9]+)?$ ]]; then
|
||||
echo "Wrong version number! Only x.y or x.y.z is allowed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
SETUPS=( gevent-all sdk gevent-all-with-abs-gip arduinogip cul868gip roomui
|
||||
testgip zigbeegip )
|
||||
|
||||
for setup in "${SETUPS[@]}"; do
|
||||
sed -i -re 's/(^\W*SETUP_VERSION\W*=\W*")[0-9]+\.[0-9]+(\.[0-9]+)?"/\1'${VERSION}'"/' setup-${setup}.py
|
||||
done
|
||||
|
@ -1,24 +0,0 @@
|
||||
Metadata-Version: 1.1
|
||||
Name: openmtc-sdk
|
||||
Version: 4.99.0
|
||||
Summary: The OpenMTC Python SDK
|
||||
Home-page: http://www.openmtc.org
|
||||
Author: Konrad Campowsky
|
||||
Author-email: konrad.campowsky@fraunhofer.fokus.de
|
||||
License: Fraunhofer FOKUS proprietary
|
||||
Description: UNKNOWN
|
||||
Platform: UNKNOWN
|
||||
Requires: urllib3
|
||||
Requires: gevent (>=1.0)
|
||||
Requires: iso8601 (>=0.1.5)
|
||||
Requires: werkzeug (>=0.9)
|
||||
Requires: blist
|
||||
Requires: simplejson
|
||||
Requires: ujson
|
||||
Requires: python_socketio
|
||||
Requires: gevent_websocket
|
||||
Requires: flask
|
||||
Requires: pyxb (==1.2.3)
|
||||
Requires: enum34
|
||||
Requires: dtls
|
||||
Requires: geventhttpclient
|
@ -1,96 +0,0 @@
|
||||
MANIFEST.in
|
||||
setup-sdk.py
|
||||
utils.py
|
||||
common/openmtc-onem2m/src/openmtc_onem2m/__init__.py
|
||||
common/openmtc-onem2m/src/openmtc_onem2m/exc.py
|
||||
common/openmtc-onem2m/src/openmtc_onem2m/model.py
|
||||
common/openmtc-onem2m/src/openmtc_onem2m/transport.py
|
||||
common/openmtc-onem2m/src/openmtc_onem2m/util.py
|
||||
common/openmtc-onem2m/src/openmtc_onem2m/client/__init__.py
|
||||
common/openmtc-onem2m/src/openmtc_onem2m/client/http.py
|
||||
common/openmtc-onem2m/src/openmtc_onem2m/client/mqtt.py
|
||||
common/openmtc-onem2m/src/openmtc_onem2m/mapper/__init__.py
|
||||
common/openmtc-onem2m/src/openmtc_onem2m/serializer/__init__.py
|
||||
common/openmtc-onem2m/src/openmtc_onem2m/serializer/base.py
|
||||
common/openmtc-onem2m/src/openmtc_onem2m/serializer/util.py
|
||||
common/openmtc-onem2m/src/openmtc_onem2m/serializer/impl/__init__.py
|
||||
common/openmtc-onem2m/src/openmtc_onem2m/serializer/json/__init__.py
|
||||
common/openmtc/lib/pyio.py
|
||||
common/openmtc/lib/aplus/__init__.py
|
||||
common/openmtc/lib/openmtc_sdk.egg-info/PKG-INFO
|
||||
common/openmtc/lib/openmtc_sdk.egg-info/SOURCES.txt
|
||||
common/openmtc/lib/openmtc_sdk.egg-info/dependency_links.txt
|
||||
common/openmtc/lib/openmtc_sdk.egg-info/requires.txt
|
||||
common/openmtc/lib/openmtc_sdk.egg-info/top_level.txt
|
||||
common/openmtc/src/openmtc/__init__.py
|
||||
common/openmtc/src/openmtc/configuration.py
|
||||
common/openmtc/src/openmtc/exc.py
|
||||
common/openmtc/src/openmtc/util.py
|
||||
common/openmtc/src/openmtc/version.py
|
||||
common/openmtc/src/openmtc/mapper/__init__.py
|
||||
common/openmtc/src/openmtc/mapper/exc.py
|
||||
common/openmtc/src/openmtc/model/__init__.py
|
||||
common/openmtc/src/openmtc/model/exc.py
|
||||
futile/src/futile/__init__.py
|
||||
futile/src/futile/abchelper.py
|
||||
futile/src/futile/basictypes.py
|
||||
futile/src/futile/contextlib.py
|
||||
futile/src/futile/etree.py
|
||||
futile/src/futile/exc.py
|
||||
futile/src/futile/singleton.py
|
||||
futile/src/futile/StringIO/__init__.py
|
||||
futile/src/futile/caching/__init__.py
|
||||
futile/src/futile/collections/OrderedSet.py
|
||||
futile/src/futile/collections/__init__.py
|
||||
futile/src/futile/collections/ordereddict.py
|
||||
futile/src/futile/collections/sortedlist.py
|
||||
futile/src/futile/logging/__init__.py
|
||||
futile/src/futile/logging/handlers.py
|
||||
futile/src/futile/logging/logbook.py
|
||||
futile/src/futile/logging/logtap.py
|
||||
futile/src/futile/multiprocess/RWLock.py
|
||||
futile/src/futile/multiprocess/__init__.py
|
||||
futile/src/futile/net/PortTester.py
|
||||
futile/src/futile/net/__init__.py
|
||||
futile/src/futile/net/exc.py
|
||||
futile/src/futile/net/sockethelper.py
|
||||
futile/src/futile/net/wsgi.py
|
||||
futile/src/futile/net/xmlrpc.py
|
||||
futile/src/futile/net/http/__init__.py
|
||||
futile/src/futile/net/http/exc.py
|
||||
futile/src/futile/net/http/client/ConnectionPoolManager.py
|
||||
futile/src/futile/net/http/client/RestClient.py
|
||||
futile/src/futile/net/http/client/RestClientAsync.py
|
||||
futile/src/futile/net/http/client/SimpleConnectionManager.py
|
||||
futile/src/futile/net/http/client/__init__.py
|
||||
futile/src/futile/net/http/server/__init__.py
|
||||
futile/src/futile/net/http/server/ssl/__init__.py
|
||||
futile/src/futile/net/http/server/wsgi/__init__.py
|
||||
futile/src/futile/net/http/server/wsgi/ssl.py
|
||||
futile/src/futile/operator/__init__.py
|
||||
futile/src/futile/os/__init__.py
|
||||
futile/src/futile/os/mount.py
|
||||
futile/src/futile/path/__init__.py
|
||||
futile/src/futile/profile/__init__.py
|
||||
futile/src/futile/serializer/__init__.py
|
||||
futile/src/futile/serializer/exc.py
|
||||
futile/src/futile/serializer/xml.py
|
||||
futile/src/futile/signal/__init__.py
|
||||
futile/src/futile/signal/timeout.py
|
||||
futile/src/futile/string/__init__.py
|
||||
futile/src/futile/subprocess/__init__.py
|
||||
futile/src/futile/subprocess/daemon.py
|
||||
futile/src/futile/tempfile/__init__.py
|
||||
futile/src/futile/threading/RWLock.py
|
||||
futile/src/futile/threading/__init__.py
|
||||
futile/src/futile/threading/synchronized.py
|
||||
futile/src/futile/traceback/__init__.py
|
||||
futile/src/futile/types/TypeManager.py
|
||||
futile/src/futile/types/__init__.py
|
||||
openmtc-app/src/openmtc_app/__init__.py
|
||||
openmtc-app/src/openmtc_app/exc.py
|
||||
openmtc-app/src/openmtc_app/onem2m.py
|
||||
openmtc-app/src/openmtc_app/util.py
|
||||
openmtc-app/src/openmtc_app/flask_runner/__init__.py
|
||||
openmtc-app/src/openmtc_app/notification/__init__.py
|
||||
openmtc-app/src/openmtc_app/runner/__init__.py
|
@ -1,14 +0,0 @@
|
||||
urllib3
|
||||
gevent >= 1.0
|
||||
iso8601 >= 0.1.5
|
||||
werkzeug >= 0.9
|
||||
blist
|
||||
simplejson
|
||||
ujson
|
||||
python_socketio
|
||||
gevent_websocket
|
||||
flask
|
||||
pyxb == 1.2.3
|
||||
enum34
|
||||
dtls
|
||||
geventhttpclient
|
@ -1,6 +0,0 @@
|
||||
aplus
|
||||
futile
|
||||
openmtc
|
||||
openmtc_app
|
||||
openmtc_onem2m
|
||||
pyio
|
BIN
dist/openmtc_sdk-4.99.0-py2.7.egg
vendored
BIN
dist/openmtc_sdk-4.99.0-py2.7.egg
vendored
Binary file not shown.
Loading…
x
Reference in New Issue
Block a user