diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..35ea017 --- /dev/null +++ b/.gitignore @@ -0,0 +1,6 @@ +revert_changes_to_vmachine +push_to_vmachine* +.vscode +*.pyc +*.pyo +*.bak \ No newline at end of file diff --git a/libs_crutch/__init__.py b/libs_crutch/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/contrib/__init__.py b/libs_crutch/contrib/__init__.py deleted file mode 100644 index 89c78e7..0000000 --- a/libs_crutch/contrib/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -import site -import os -import sys - -if not any("calculate/contrib" in x for x in sys.path): - for sitedir in site.getsitepackages(): - if sitedir.endswith("python3.9/site-packages"): - for i, syspathdir in enumerate(sys.path): - if syspathdir.endswith("python3.9/site-packages"): - contribpath = "%s/calculate/contrib" % syspathdir - if os.path.exists(contribpath): - sys.path.insert(i, contribpath) - break - if not any("calculate/contrib" in x for x in sys.path): - raise ImportError("Failed to install calculate contribution directory") diff --git a/libs_crutch/contrib/cherrypy/LICENSE.txt b/libs_crutch/contrib/cherrypy/LICENSE.txt deleted file mode 100644 index 87c8bd0..0000000 --- a/libs_crutch/contrib/cherrypy/LICENSE.txt +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) 2004-2015, CherryPy Team (team@cherrypy.org) -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - * Neither the name of the CherryPy Team nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/libs_crutch/contrib/cherrypy/__init__.py b/libs_crutch/contrib/cherrypy/__init__.py deleted file mode 100644 index 6779f17..0000000 --- a/libs_crutch/contrib/cherrypy/__init__.py +++ /dev/null @@ -1,652 +0,0 @@ -"""CherryPy is a pythonic, object-oriented HTTP framework. - - -CherryPy consists of not one, but four separate API layers. - -The APPLICATION LAYER is the simplest. CherryPy applications are written as -a tree of classes and methods, where each branch in the tree corresponds to -a branch in the URL path. Each method is a 'page handler', which receives -GET and POST params as keyword arguments, and returns or yields the (HTML) -body of the response. The special method name 'index' is used for paths -that end in a slash, and the special method name 'default' is used to -handle multiple paths via a single handler. This layer also includes: - - * the 'exposed' attribute (and cherrypy.expose) - * cherrypy.quickstart() - * _cp_config attributes - * cherrypy.tools (including cherrypy.session) - * cherrypy.url() - -The ENVIRONMENT LAYER is used by developers at all levels. It provides -information about the current request and response, plus the application -and server environment, via a (default) set of top-level objects: - - * cherrypy.request - * cherrypy.response - * cherrypy.engine - * cherrypy.server - * cherrypy.tree - * cherrypy.config - * cherrypy.thread_data - * cherrypy.log - * cherrypy.HTTPError, NotFound, and HTTPRedirect - * cherrypy.lib - -The EXTENSION LAYER allows advanced users to construct and share their own -plugins. It consists of: - - * Hook API - * Tool API - * Toolbox API - * Dispatch API - * Config Namespace API - -Finally, there is the CORE LAYER, which uses the core API's to construct -the default components which are available at higher layers. You can think -of the default components as the 'reference implementation' for CherryPy. -Megaframeworks (and advanced users) may replace the default components -with customized or extended components. The core API's are: - - * Application API - * Engine API - * Request API - * Server API - * WSGI API - -These API's are described in the `CherryPy specification `_. -""" - -__version__ = "3.8.0" - -from cherrypy._cpcompat import urljoin as _urljoin, urlencode as _urlencode -from cherrypy._cpcompat import basestring, unicodestr, set - -from cherrypy._cperror import HTTPError, HTTPRedirect, InternalRedirect -from cherrypy._cperror import NotFound, CherryPyException, TimeoutError - -from cherrypy import _cpdispatch as dispatch - -from cherrypy import _cptools -tools = _cptools.default_toolbox -Tool = _cptools.Tool - -from cherrypy import _cprequest -from cherrypy.lib import httputil as _httputil - -from cherrypy import _cptree -tree = _cptree.Tree() -from cherrypy._cptree import Application -from cherrypy import _cpwsgi as wsgi - -from cherrypy import process -try: - from cherrypy.process import win32 - engine = win32.Win32Bus() - engine.console_control_handler = win32.ConsoleCtrlHandler(engine) - del win32 -except ImportError: - engine = process.bus - - -# Timeout monitor. We add two channels to the engine -# to which cherrypy.Application will publish. -engine.listeners['before_request'] = set() -engine.listeners['after_request'] = set() - - -class _TimeoutMonitor(process.plugins.Monitor): - - def __init__(self, bus): - self.servings = [] - process.plugins.Monitor.__init__(self, bus, self.run) - - def before_request(self): - self.servings.append((serving.request, serving.response)) - - def after_request(self): - try: - self.servings.remove((serving.request, serving.response)) - except ValueError: - pass - - def run(self): - """Check timeout on all responses. (Internal)""" - for req, resp in self.servings: - resp.check_timeout() -engine.timeout_monitor = _TimeoutMonitor(engine) -engine.timeout_monitor.subscribe() - -engine.autoreload = process.plugins.Autoreloader(engine) -engine.autoreload.subscribe() - -engine.thread_manager = process.plugins.ThreadManager(engine) -engine.thread_manager.subscribe() - -engine.signal_handler = process.plugins.SignalHandler(engine) - - -class _HandleSignalsPlugin(object): - - """Handle signals from other processes based on the configured - platform handlers above.""" - - def __init__(self, bus): - self.bus = bus - - def subscribe(self): - """Add the handlers based on the platform""" - if hasattr(self.bus, "signal_handler"): - self.bus.signal_handler.subscribe() - if hasattr(self.bus, "console_control_handler"): - self.bus.console_control_handler.subscribe() - -engine.signals = _HandleSignalsPlugin(engine) - - -from cherrypy import _cpserver -server = _cpserver.Server() -server.subscribe() - - -def quickstart(root=None, script_name="", config=None): - """Mount the given root, start the builtin server (and engine), then block. - - root: an instance of a "controller class" (a collection of page handler - methods) which represents the root of the application. - script_name: a string containing the "mount point" of the application. - This should start with a slash, and be the path portion of the URL - at which to mount the given root. For example, if root.index() will - handle requests to "http://www.example.com:8080/dept/app1/", then - the script_name argument would be "/dept/app1". - - It MUST NOT end in a slash. If the script_name refers to the root - of the URI, it MUST be an empty string (not "/"). - config: a file or dict containing application config. If this contains - a [global] section, those entries will be used in the global - (site-wide) config. - """ - if config: - _global_conf_alias.update(config) - - tree.mount(root, script_name, config) - - engine.signals.subscribe() - engine.start() - engine.block() - - -from cherrypy._cpcompat import threadlocal as _local - - -class _Serving(_local): - - """An interface for registering request and response objects. - - Rather than have a separate "thread local" object for the request and - the response, this class works as a single threadlocal container for - both objects (and any others which developers wish to define). In this - way, we can easily dump those objects when we stop/start a new HTTP - conversation, yet still refer to them as module-level globals in a - thread-safe way. - """ - - request = _cprequest.Request(_httputil.Host("127.0.0.1", 80), - _httputil.Host("127.0.0.1", 1111)) - """ - The request object for the current thread. In the main thread, - and any threads which are not receiving HTTP requests, this is None.""" - - response = _cprequest.Response() - """ - The response object for the current thread. In the main thread, - and any threads which are not receiving HTTP requests, this is None.""" - - def load(self, request, response): - self.request = request - self.response = response - - def clear(self): - """Remove all attributes of self.""" - self.__dict__.clear() - -serving = _Serving() - - -class _ThreadLocalProxy(object): - - __slots__ = ['__attrname__', '__dict__'] - - def __init__(self, attrname): - self.__attrname__ = attrname - - def __getattr__(self, name): - child = getattr(serving, self.__attrname__) - return getattr(child, name) - - def __setattr__(self, name, value): - if name in ("__attrname__", ): - object.__setattr__(self, name, value) - else: - child = getattr(serving, self.__attrname__) - setattr(child, name, value) - - def __delattr__(self, name): - child = getattr(serving, self.__attrname__) - delattr(child, name) - - def _get_dict(self): - child = getattr(serving, self.__attrname__) - d = child.__class__.__dict__.copy() - d.update(child.__dict__) - return d - __dict__ = property(_get_dict) - - def __getitem__(self, key): - child = getattr(serving, self.__attrname__) - return child[key] - - def __setitem__(self, key, value): - child = getattr(serving, self.__attrname__) - child[key] = value - - def __delitem__(self, key): - child = getattr(serving, self.__attrname__) - del child[key] - - def __contains__(self, key): - child = getattr(serving, self.__attrname__) - return key in child - - def __len__(self): - child = getattr(serving, self.__attrname__) - return len(child) - - def __nonzero__(self): - child = getattr(serving, self.__attrname__) - return bool(child) - # Python 3 - __bool__ = __nonzero__ - -# Create request and response object (the same objects will be used -# throughout the entire life of the webserver, but will redirect -# to the "serving" object) -request = _ThreadLocalProxy('request') -response = _ThreadLocalProxy('response') - -# Create thread_data object as a thread-specific all-purpose storage - - -class _ThreadData(_local): - - """A container for thread-specific data.""" -thread_data = _ThreadData() - - -# Monkeypatch pydoc to allow help() to go through the threadlocal proxy. -# Jan 2007: no Googleable examples of anyone else replacing pydoc.resolve. -# The only other way would be to change what is returned from type(request) -# and that's not possible in pure Python (you'd have to fake ob_type). -def _cherrypy_pydoc_resolve(thing, forceload=0): - """Given an object or a path to an object, get the object and its name.""" - if isinstance(thing, _ThreadLocalProxy): - thing = getattr(serving, thing.__attrname__) - return _pydoc._builtin_resolve(thing, forceload) - -try: - import pydoc as _pydoc - _pydoc._builtin_resolve = _pydoc.resolve - _pydoc.resolve = _cherrypy_pydoc_resolve -except ImportError: - pass - - -from cherrypy import _cplogging - - -class _GlobalLogManager(_cplogging.LogManager): - - """A site-wide LogManager; routes to app.log or global log as appropriate. - - This :class:`LogManager` implements - cherrypy.log() and cherrypy.log.access(). If either - function is called during a request, the message will be sent to the - logger for the current Application. If they are called outside of a - request, the message will be sent to the site-wide logger. - """ - - def __call__(self, *args, **kwargs): - """Log the given message to the app.log or global log as appropriate. - """ - # Do NOT use try/except here. See - # https://bitbucket.org/cherrypy/cherrypy/issue/945 - if hasattr(request, 'app') and hasattr(request.app, 'log'): - log = request.app.log - else: - log = self - return log.error(*args, **kwargs) - - def access(self): - """Log an access message to the app.log or global log as appropriate. - """ - try: - return request.app.log.access() - except AttributeError: - return _cplogging.LogManager.access(self) - - -log = _GlobalLogManager() -# Set a default screen handler on the global log. -log.screen = True -log.error_file = '' -# Using an access file makes CP about 10% slower. Leave off by default. -log.access_file = '' - - -def _buslog(msg, level): - log.error(msg, 'ENGINE', severity=level) -engine.subscribe('log', _buslog) - -# Helper functions for CP apps # - - -def expose(func=None, alias=None): - """Expose the function, optionally providing an alias or set of aliases.""" - def expose_(func): - func.exposed = True - if alias is not None: - if isinstance(alias, basestring): - parents[alias.replace(".", "_")] = func - else: - for a in alias: - parents[a.replace(".", "_")] = func - return func - - import sys - import types - if isinstance(func, (types.FunctionType, types.MethodType)): - if alias is None: - # @expose - func.exposed = True - return func - else: - # func = expose(func, alias) - parents = sys._getframe(1).f_locals - return expose_(func) - elif func is None: - if alias is None: - # @expose() - parents = sys._getframe(1).f_locals - return expose_ - else: - # @expose(alias="alias") or - # @expose(alias=["alias1", "alias2"]) - parents = sys._getframe(1).f_locals - return expose_ - else: - # @expose("alias") or - # @expose(["alias1", "alias2"]) - parents = sys._getframe(1).f_locals - alias = func - return expose_ - - -def popargs(*args, **kwargs): - """A decorator for _cp_dispatch - (cherrypy.dispatch.Dispatcher.dispatch_method_name). - - Optional keyword argument: handler=(Object or Function) - - Provides a _cp_dispatch function that pops off path segments into - cherrypy.request.params under the names specified. The dispatch - is then forwarded on to the next vpath element. - - Note that any existing (and exposed) member function of the class that - popargs is applied to will override that value of the argument. For - instance, if you have a method named "list" on the class decorated with - popargs, then accessing "/list" will call that function instead of popping - it off as the requested parameter. This restriction applies to all - _cp_dispatch functions. The only way around this restriction is to create - a "blank class" whose only function is to provide _cp_dispatch. - - If there are path elements after the arguments, or more arguments - are requested than are available in the vpath, then the 'handler' - keyword argument specifies the next object to handle the parameterized - request. If handler is not specified or is None, then self is used. - If handler is a function rather than an instance, then that function - will be called with the args specified and the return value from that - function used as the next object INSTEAD of adding the parameters to - cherrypy.request.args. - - This decorator may be used in one of two ways: - - As a class decorator: - @cherrypy.popargs('year', 'month', 'day') - class Blog: - def index(self, year=None, month=None, day=None): - #Process the parameters here; any url like - #/, /2009, /2009/12, or /2009/12/31 - #will fill in the appropriate parameters. - - def create(self): - #This link will still be available at /create. Defined functions - #take precedence over arguments. - - Or as a member of a class: - class Blog: - _cp_dispatch = cherrypy.popargs('year', 'month', 'day') - #... - - The handler argument may be used to mix arguments with built in functions. - For instance, the following setup allows different activities at the - day, month, and year level: - - class DayHandler: - def index(self, year, month, day): - #Do something with this day; probably list entries - - def delete(self, year, month, day): - #Delete all entries for this day - - @cherrypy.popargs('day', handler=DayHandler()) - class MonthHandler: - def index(self, year, month): - #Do something with this month; probably list entries - - def delete(self, year, month): - #Delete all entries for this month - - @cherrypy.popargs('month', handler=MonthHandler()) - class YearHandler: - def index(self, year): - #Do something with this year - - #... - - @cherrypy.popargs('year', handler=YearHandler()) - class Root: - def index(self): - #... - - """ - - # Since keyword arg comes after *args, we have to process it ourselves - # for lower versions of python. - - handler = None - handler_call = False - for k, v in kwargs.items(): - if k == 'handler': - handler = v - else: - raise TypeError( - "cherrypy.popargs() got an unexpected keyword argument '{0}'" - .format(k) - ) - - import inspect - - if handler is not None \ - and (hasattr(handler, '__call__') or inspect.isclass(handler)): - handler_call = True - - def decorated(cls_or_self=None, vpath=None): - if inspect.isclass(cls_or_self): - # cherrypy.popargs is a class decorator - cls = cls_or_self - setattr(cls, dispatch.Dispatcher.dispatch_method_name, decorated) - return cls - - # We're in the actual function - self = cls_or_self - parms = {} - for arg in args: - if not vpath: - break - parms[arg] = vpath.pop(0) - - if handler is not None: - if handler_call: - return handler(**parms) - else: - request.params.update(parms) - return handler - - request.params.update(parms) - - # If we are the ultimate handler, then to prevent our _cp_dispatch - # from being called again, we will resolve remaining elements through - # getattr() directly. - if vpath: - return getattr(self, vpath.pop(0), None) - else: - return self - - return decorated - - -def url(path="", qs="", script_name=None, base=None, relative=None): - """Create an absolute URL for the given path. - - If 'path' starts with a slash ('/'), this will return - (base + script_name + path + qs). - If it does not start with a slash, this returns - (base + script_name [+ request.path_info] + path + qs). - - If script_name is None, cherrypy.request will be used - to find a script_name, if available. - - If base is None, cherrypy.request.base will be used (if available). - Note that you can use cherrypy.tools.proxy to change this. - - Finally, note that this function can be used to obtain an absolute URL - for the current request path (minus the querystring) by passing no args. - If you call url(qs=cherrypy.request.query_string), you should get the - original browser URL (assuming no internal redirections). - - If relative is None or not provided, request.app.relative_urls will - be used (if available, else False). If False, the output will be an - absolute URL (including the scheme, host, vhost, and script_name). - If True, the output will instead be a URL that is relative to the - current request path, perhaps including '..' atoms. If relative is - the string 'server', the output will instead be a URL that is - relative to the server root; i.e., it will start with a slash. - """ - if isinstance(qs, (tuple, list, dict)): - qs = _urlencode(qs) - if qs: - qs = '?' + qs - - if request.app: - if not path.startswith("/"): - # Append/remove trailing slash from path_info as needed - # (this is to support mistyped URL's without redirecting; - # if you want to redirect, use tools.trailing_slash). - pi = request.path_info - if request.is_index is True: - if not pi.endswith('/'): - pi = pi + '/' - elif request.is_index is False: - if pi.endswith('/') and pi != '/': - pi = pi[:-1] - - if path == "": - path = pi - else: - path = _urljoin(pi, path) - - if script_name is None: - script_name = request.script_name - if base is None: - base = request.base - - newurl = base + script_name + path + qs - else: - # No request.app (we're being called outside a request). - # We'll have to guess the base from server.* attributes. - # This will produce very different results from the above - # if you're using vhosts or tools.proxy. - if base is None: - base = server.base() - - path = (script_name or "") + path - newurl = base + path + qs - - if './' in newurl: - # Normalize the URL by removing ./ and ../ - atoms = [] - for atom in newurl.split('/'): - if atom == '.': - pass - elif atom == '..': - atoms.pop() - else: - atoms.append(atom) - newurl = '/'.join(atoms) - - # At this point, we should have a fully-qualified absolute URL. - - if relative is None: - relative = getattr(request.app, "relative_urls", False) - - # See http://www.ietf.org/rfc/rfc2396.txt - if relative == 'server': - # "A relative reference beginning with a single slash character is - # termed an absolute-path reference, as defined by ..." - # This is also sometimes called "server-relative". - newurl = '/' + '/'.join(newurl.split('/', 3)[3:]) - elif relative: - # "A relative reference that does not begin with a scheme name - # or a slash character is termed a relative-path reference." - old = url(relative=False).split('/')[:-1] - new = newurl.split('/') - while old and new: - a, b = old[0], new[0] - if a != b: - break - old.pop(0) - new.pop(0) - new = (['..'] * len(old)) + new - newurl = '/'.join(new) - - return newurl - - -# import _cpconfig last so it can reference other top-level objects -from cherrypy import _cpconfig -# Use _global_conf_alias so quickstart can use 'config' as an arg -# without shadowing cherrypy.config. -config = _global_conf_alias = _cpconfig.Config() -config.defaults = { - 'tools.log_tracebacks.on': True, - 'tools.log_headers.on': True, - 'tools.trailing_slash.on': True, - 'tools.encode.on': True -} -config.namespaces["log"] = lambda k, v: setattr(log, k, v) -config.namespaces["checker"] = lambda k, v: setattr(checker, k, v) -# Must reset to get our defaults applied. -config.reset() - -from cherrypy import _cpchecker -checker = _cpchecker.Checker() -engine.subscribe('start', checker) diff --git a/libs_crutch/contrib/cherrypy/__main__.py b/libs_crutch/contrib/cherrypy/__main__.py deleted file mode 100644 index b1c9c01..0000000 --- a/libs_crutch/contrib/cherrypy/__main__.py +++ /dev/null @@ -1,4 +0,0 @@ -import cherrypy.daemon - -if __name__ == '__main__': - cherrypy.daemon.run() diff --git a/libs_crutch/contrib/cherrypy/_cpchecker.py b/libs_crutch/contrib/cherrypy/_cpchecker.py deleted file mode 100644 index 4ef8259..0000000 --- a/libs_crutch/contrib/cherrypy/_cpchecker.py +++ /dev/null @@ -1,332 +0,0 @@ -import os -import warnings - -import cherrypy -from cherrypy._cpcompat import iteritems, copykeys, builtins - - -class Checker(object): - - """A checker for CherryPy sites and their mounted applications. - - When this object is called at engine startup, it executes each - of its own methods whose names start with ``check_``. If you wish - to disable selected checks, simply add a line in your global - config which sets the appropriate method to False:: - - [global] - checker.check_skipped_app_config = False - - You may also dynamically add or replace ``check_*`` methods in this way. - """ - - on = True - """If True (the default), run all checks; if False, turn off all checks.""" - - def __init__(self): - self._populate_known_types() - - def __call__(self): - """Run all check_* methods.""" - if self.on: - oldformatwarning = warnings.formatwarning - warnings.formatwarning = self.formatwarning - try: - for name in dir(self): - if name.startswith("check_"): - method = getattr(self, name) - if method and hasattr(method, '__call__'): - method() - finally: - warnings.formatwarning = oldformatwarning - - def formatwarning(self, message, category, filename, lineno, line=None): - """Function to format a warning.""" - return "CherryPy Checker:\n%s\n\n" % message - - # This value should be set inside _cpconfig. - global_config_contained_paths = False - - def check_app_config_entries_dont_start_with_script_name(self): - """Check for Application config with sections that repeat script_name. - """ - for sn, app in cherrypy.tree.apps.items(): - if not isinstance(app, cherrypy.Application): - continue - if not app.config: - continue - if sn == '': - continue - sn_atoms = sn.strip("/").split("/") - for key in app.config.keys(): - key_atoms = key.strip("/").split("/") - if key_atoms[:len(sn_atoms)] == sn_atoms: - warnings.warn( - "The application mounted at %r has config " - "entries that start with its script name: %r" % (sn, - key)) - - def check_site_config_entries_in_app_config(self): - """Check for mounted Applications that have site-scoped config.""" - for sn, app in iteritems(cherrypy.tree.apps): - if not isinstance(app, cherrypy.Application): - continue - - msg = [] - for section, entries in iteritems(app.config): - if section.startswith('/'): - for key, value in iteritems(entries): - for n in ("engine.", "server.", "tree.", "checker."): - if key.startswith(n): - msg.append("[%s] %s = %s" % - (section, key, value)) - if msg: - msg.insert(0, - "The application mounted at %r contains the " - "following config entries, which are only allowed " - "in site-wide config. Move them to a [global] " - "section and pass them to cherrypy.config.update() " - "instead of tree.mount()." % sn) - warnings.warn(os.linesep.join(msg)) - - def check_skipped_app_config(self): - """Check for mounted Applications that have no config.""" - for sn, app in cherrypy.tree.apps.items(): - if not isinstance(app, cherrypy.Application): - continue - if not app.config: - msg = "The Application mounted at %r has an empty config." % sn - if self.global_config_contained_paths: - msg += (" It looks like the config you passed to " - "cherrypy.config.update() contains application-" - "specific sections. You must explicitly pass " - "application config via " - "cherrypy.tree.mount(..., config=app_config)") - warnings.warn(msg) - return - - def check_app_config_brackets(self): - """Check for Application config with extraneous brackets in section - names. - """ - for sn, app in cherrypy.tree.apps.items(): - if not isinstance(app, cherrypy.Application): - continue - if not app.config: - continue - for key in app.config.keys(): - if key.startswith("[") or key.endswith("]"): - warnings.warn( - "The application mounted at %r has config " - "section names with extraneous brackets: %r. " - "Config *files* need brackets; config *dicts* " - "(e.g. passed to tree.mount) do not." % (sn, key)) - - def check_static_paths(self): - """Check Application config for incorrect static paths.""" - # Use the dummy Request object in the main thread. - request = cherrypy.request - for sn, app in cherrypy.tree.apps.items(): - if not isinstance(app, cherrypy.Application): - continue - request.app = app - for section in app.config: - # get_resource will populate request.config - request.get_resource(section + "/dummy.html") - conf = request.config.get - - if conf("tools.staticdir.on", False): - msg = "" - root = conf("tools.staticdir.root") - dir = conf("tools.staticdir.dir") - if dir is None: - msg = "tools.staticdir.dir is not set." - else: - fulldir = "" - if os.path.isabs(dir): - fulldir = dir - if root: - msg = ("dir is an absolute path, even " - "though a root is provided.") - testdir = os.path.join(root, dir[1:]) - if os.path.exists(testdir): - msg += ( - "\nIf you meant to serve the " - "filesystem folder at %r, remove the " - "leading slash from dir." % (testdir,)) - else: - if not root: - msg = ( - "dir is a relative path and " - "no root provided.") - else: - fulldir = os.path.join(root, dir) - if not os.path.isabs(fulldir): - msg = ("%r is not an absolute path." % ( - fulldir,)) - - if fulldir and not os.path.exists(fulldir): - if msg: - msg += "\n" - msg += ("%r (root + dir) is not an existing " - "filesystem path." % fulldir) - - if msg: - warnings.warn("%s\nsection: [%s]\nroot: %r\ndir: %r" - % (msg, section, root, dir)) - - # -------------------------- Compatibility -------------------------- # - obsolete = { - 'server.default_content_type': 'tools.response_headers.headers', - 'log_access_file': 'log.access_file', - 'log_config_options': None, - 'log_file': 'log.error_file', - 'log_file_not_found': None, - 'log_request_headers': 'tools.log_headers.on', - 'log_to_screen': 'log.screen', - 'show_tracebacks': 'request.show_tracebacks', - 'throw_errors': 'request.throw_errors', - 'profiler.on': ('cherrypy.tree.mount(profiler.make_app(' - 'cherrypy.Application(Root())))'), - } - - deprecated = {} - - def _compat(self, config): - """Process config and warn on each obsolete or deprecated entry.""" - for section, conf in config.items(): - if isinstance(conf, dict): - for k, v in conf.items(): - if k in self.obsolete: - warnings.warn("%r is obsolete. Use %r instead.\n" - "section: [%s]" % - (k, self.obsolete[k], section)) - elif k in self.deprecated: - warnings.warn("%r is deprecated. Use %r instead.\n" - "section: [%s]" % - (k, self.deprecated[k], section)) - else: - if section in self.obsolete: - warnings.warn("%r is obsolete. Use %r instead." - % (section, self.obsolete[section])) - elif section in self.deprecated: - warnings.warn("%r is deprecated. Use %r instead." - % (section, self.deprecated[section])) - - def check_compatibility(self): - """Process config and warn on each obsolete or deprecated entry.""" - self._compat(cherrypy.config) - for sn, app in cherrypy.tree.apps.items(): - if not isinstance(app, cherrypy.Application): - continue - self._compat(app.config) - - # ------------------------ Known Namespaces ------------------------ # - extra_config_namespaces = [] - - def _known_ns(self, app): - ns = ["wsgi"] - ns.extend(copykeys(app.toolboxes)) - ns.extend(copykeys(app.namespaces)) - ns.extend(copykeys(app.request_class.namespaces)) - ns.extend(copykeys(cherrypy.config.namespaces)) - ns += self.extra_config_namespaces - - for section, conf in app.config.items(): - is_path_section = section.startswith("/") - if is_path_section and isinstance(conf, dict): - for k, v in conf.items(): - atoms = k.split(".") - if len(atoms) > 1: - if atoms[0] not in ns: - # Spit out a special warning if a known - # namespace is preceded by "cherrypy." - if atoms[0] == "cherrypy" and atoms[1] in ns: - msg = ( - "The config entry %r is invalid; " - "try %r instead.\nsection: [%s]" - % (k, ".".join(atoms[1:]), section)) - else: - msg = ( - "The config entry %r is invalid, " - "because the %r config namespace " - "is unknown.\n" - "section: [%s]" % (k, atoms[0], section)) - warnings.warn(msg) - elif atoms[0] == "tools": - if atoms[1] not in dir(cherrypy.tools): - msg = ( - "The config entry %r may be invalid, " - "because the %r tool was not found.\n" - "section: [%s]" % (k, atoms[1], section)) - warnings.warn(msg) - - def check_config_namespaces(self): - """Process config and warn on each unknown config namespace.""" - for sn, app in cherrypy.tree.apps.items(): - if not isinstance(app, cherrypy.Application): - continue - self._known_ns(app) - - # -------------------------- Config Types -------------------------- # - known_config_types = {} - - def _populate_known_types(self): - b = [x for x in vars(builtins).values() - if type(x) is type(str)] - - def traverse(obj, namespace): - for name in dir(obj): - # Hack for 3.2's warning about body_params - if name == 'body_params': - continue - vtype = type(getattr(obj, name, None)) - if vtype in b: - self.known_config_types[namespace + "." + name] = vtype - - traverse(cherrypy.request, "request") - traverse(cherrypy.response, "response") - traverse(cherrypy.server, "server") - traverse(cherrypy.engine, "engine") - traverse(cherrypy.log, "log") - - def _known_types(self, config): - msg = ("The config entry %r in section %r is of type %r, " - "which does not match the expected type %r.") - - for section, conf in config.items(): - if isinstance(conf, dict): - for k, v in conf.items(): - if v is not None: - expected_type = self.known_config_types.get(k, None) - vtype = type(v) - if expected_type and vtype != expected_type: - warnings.warn(msg % (k, section, vtype.__name__, - expected_type.__name__)) - else: - k, v = section, conf - if v is not None: - expected_type = self.known_config_types.get(k, None) - vtype = type(v) - if expected_type and vtype != expected_type: - warnings.warn(msg % (k, section, vtype.__name__, - expected_type.__name__)) - - def check_config_types(self): - """Assert that config values are of the same type as default values.""" - self._known_types(cherrypy.config) - for sn, app in cherrypy.tree.apps.items(): - if not isinstance(app, cherrypy.Application): - continue - self._known_types(app.config) - - # -------------------- Specific config warnings -------------------- # - def check_localhost(self): - """Warn if any socket_host is 'localhost'. See #711.""" - for k, v in cherrypy.config.items(): - if k == 'server.socket_host' and v == 'localhost': - warnings.warn("The use of 'localhost' as a socket host can " - "cause problems on newer systems, since " - "'localhost' can map to either an IPv4 or an " - "IPv6 address. You should use '127.0.0.1' " - "or '[::1]' instead.") diff --git a/libs_crutch/contrib/cherrypy/_cpcompat.py b/libs_crutch/contrib/cherrypy/_cpcompat.py deleted file mode 100644 index 8a98b38..0000000 --- a/libs_crutch/contrib/cherrypy/_cpcompat.py +++ /dev/null @@ -1,383 +0,0 @@ -"""Compatibility code for using CherryPy with various versions of Python. - -CherryPy 3.2 is compatible with Python versions 2.3+. This module provides a -useful abstraction over the differences between Python versions, sometimes by -preferring a newer idiom, sometimes an older one, and sometimes a custom one. - -In particular, Python 2 uses str and '' for byte strings, while Python 3 -uses str and '' for unicode strings. We will call each of these the 'native -string' type for each version. Because of this major difference, this module -provides new 'bytestr', 'unicodestr', and 'nativestr' attributes, as well as -two functions: 'ntob', which translates native strings (of type 'str') into -byte strings regardless of Python version, and 'ntou', which translates native -strings to unicode strings. This also provides a 'BytesIO' name for dealing -specifically with bytes, and a 'StringIO' name for dealing with native strings. -It also provides a 'base64_decode' function with native strings as input and -output. -""" -import os -import re -import sys -import threading - -if sys.version_info >= (3, 0): - py3k = True - bytestr = bytes - unicodestr = str - nativestr = unicodestr - basestring = (bytes, str) - - def ntob(n, encoding='ISO-8859-1'): - """Return the given native string as a byte string in the given - encoding. - """ - assert_native(n) - # In Python 3, the native string type is unicode - return n.encode(encoding) - - def ntou(n, encoding='ISO-8859-1'): - """Return the given native string as a unicode string with the given - encoding. - """ - assert_native(n) - # In Python 3, the native string type is unicode - return n - - def tonative(n, encoding='ISO-8859-1'): - """Return the given string as a native string in the given encoding.""" - # In Python 3, the native string type is unicode - if isinstance(n, bytes): - return n.decode(encoding) - return n - # type("") - from io import StringIO - # bytes: - from io import BytesIO as BytesIO -else: - # Python 2 - py3k = False - bytestr = str - unicodestr = unicode - nativestr = bytestr - basestring = basestring - - def ntob(n, encoding='ISO-8859-1'): - """Return the given native string as a byte string in the given - encoding. - """ - assert_native(n) - # In Python 2, the native string type is bytes. Assume it's already - # in the given encoding, which for ISO-8859-1 is almost always what - # was intended. - return n - - def ntou(n, encoding='ISO-8859-1'): - """Return the given native string as a unicode string with the given - encoding. - """ - assert_native(n) - # In Python 2, the native string type is bytes. - # First, check for the special encoding 'escape'. The test suite uses - # this to signal that it wants to pass a string with embedded \uXXXX - # escapes, but without having to prefix it with u'' for Python 2, - # but no prefix for Python 3. - if encoding == 'escape': - return unicode( - re.sub(r'\\u([0-9a-zA-Z]{4})', - lambda m: unichr(int(m.group(1), 16)), - n.decode('ISO-8859-1'))) - # Assume it's already in the given encoding, which for ISO-8859-1 - # is almost always what was intended. - return n.decode(encoding) - - def tonative(n, encoding='ISO-8859-1'): - """Return the given string as a native string in the given encoding.""" - # In Python 2, the native string type is bytes. - if isinstance(n, unicode): - return n.encode(encoding) - return n - try: - # type("") - from cStringIO import StringIO - except ImportError: - # type("") - from StringIO import StringIO - # bytes: - BytesIO = StringIO - - -def assert_native(n): - if not isinstance(n, nativestr): - raise TypeError("n must be a native str (got %s)" % type(n).__name__) - -try: - set = set -except NameError: - from sets import Set as set - -try: - # Python 3.1+ - from base64 import decodebytes as _base64_decodebytes -except ImportError: - # Python 3.0- - # since CherryPy claims compability with Python 2.3, we must use - # the legacy API of base64 - from base64 import decodestring as _base64_decodebytes - - -def base64_decode(n, encoding='ISO-8859-1'): - """Return the native string base64-decoded (as a native string).""" - if isinstance(n, unicodestr): - b = n.encode(encoding) - else: - b = n - b = _base64_decodebytes(b) - if nativestr is unicodestr: - return b.decode(encoding) - else: - return b - -try: - # Python 2.5+ - from hashlib import md5 -except ImportError: - from md5 import new as md5 - -try: - # Python 2.5+ - from hashlib import sha1 as sha -except ImportError: - from sha import new as sha - -try: - sorted = sorted -except NameError: - def sorted(i): - i = i[:] - i.sort() - return i - -try: - reversed = reversed -except NameError: - def reversed(x): - i = len(x) - while i > 0: - i -= 1 - yield x[i] - -try: - # Python 3 - from urllib.parse import urljoin, urlencode - from urllib.parse import quote, quote_plus - from urllib.request import unquote, urlopen - from urllib.request import parse_http_list, parse_keqv_list -except ImportError: - # Python 2 - from urlparse import urljoin - from urllib import urlencode, urlopen - from urllib import quote, quote_plus - from urllib import unquote - from urllib2 import parse_http_list, parse_keqv_list - -try: - from threading import local as threadlocal -except ImportError: - from cherrypy._cpthreadinglocal import local as threadlocal - -try: - dict.iteritems - # Python 2 - iteritems = lambda d: d.iteritems() - copyitems = lambda d: d.items() -except AttributeError: - # Python 3 - iteritems = lambda d: d.items() - copyitems = lambda d: list(d.items()) - -try: - dict.iterkeys - # Python 2 - iterkeys = lambda d: d.iterkeys() - copykeys = lambda d: d.keys() -except AttributeError: - # Python 3 - iterkeys = lambda d: d.keys() - copykeys = lambda d: list(d.keys()) - -try: - dict.itervalues - # Python 2 - itervalues = lambda d: d.itervalues() - copyvalues = lambda d: d.values() -except AttributeError: - # Python 3 - itervalues = lambda d: d.values() - copyvalues = lambda d: list(d.values()) - -try: - # Python 3 - import builtins -except ImportError: - # Python 2 - import __builtin__ as builtins - -try: - # Python 2. We try Python 2 first clients on Python 2 - # don't try to import the 'http' module from cherrypy.lib - from Cookie import SimpleCookie, CookieError - from httplib import BadStatusLine, HTTPConnection, IncompleteRead - from httplib import NotConnected - from BaseHTTPServer import BaseHTTPRequestHandler -except ImportError: - # Python 3 - from http.cookies import SimpleCookie, CookieError - from http.client import BadStatusLine, HTTPConnection, IncompleteRead - from http.client import NotConnected - from http.server import BaseHTTPRequestHandler - -# Some platforms don't expose HTTPSConnection, so handle it separately -if py3k: - try: - from http.client import HTTPSConnection - except ImportError: - # Some platforms which don't have SSL don't expose HTTPSConnection - HTTPSConnection = None -else: - try: - from httplib import HTTPSConnection - except ImportError: - HTTPSConnection = None - -try: - # Python 2 - xrange = xrange -except NameError: - # Python 3 - xrange = range - -import threading -if hasattr(threading.Thread, "daemon"): - # Python 2.6+ - def get_daemon(t): - return t.daemon - - def set_daemon(t, val): - t.daemon = val -else: - def get_daemon(t): - return t.isDaemon() - - def set_daemon(t, val): - t.setDaemon(val) - -try: - from email.utils import formatdate - - def HTTPDate(timeval=None): - return formatdate(timeval, usegmt=True) -except ImportError: - from rfc822 import formatdate as HTTPDate - -try: - # Python 3 - from urllib.parse import unquote as parse_unquote - - def unquote_qs(atom, encoding, errors='strict'): - return parse_unquote( - atom.replace('+', ' '), - encoding=encoding, - errors=errors) -except ImportError: - # Python 2 - from urllib import unquote as parse_unquote - - def unquote_qs(atom, encoding, errors='strict'): - return parse_unquote(atom.replace('+', ' ')).decode(encoding, errors) - -try: - # Prefer simplejson, which is usually more advanced than the builtin - # module. - import simplejson as json - json_decode = json.JSONDecoder().decode - _json_encode = json.JSONEncoder().iterencode -except ImportError: - if sys.version_info >= (2, 6): - # Python >=2.6 : json is part of the standard library - import json - json_decode = json.JSONDecoder().decode - _json_encode = json.JSONEncoder().iterencode - else: - json = None - - def json_decode(s): - raise ValueError('No JSON library is available') - - def _json_encode(s): - raise ValueError('No JSON library is available') -finally: - if json and py3k: - # The two Python 3 implementations (simplejson/json) - # outputs str. We need bytes. - def json_encode(value): - for chunk in _json_encode(value): - yield chunk.encode('utf8') - else: - json_encode = _json_encode - - -try: - import cPickle as pickle -except ImportError: - # In Python 2, pickle is a Python version. - # In Python 3, pickle is the sped-up C version. - import pickle - -try: - os.urandom(20) - import binascii - - def random20(): - return binascii.hexlify(os.urandom(20)).decode('ascii') -except (AttributeError, NotImplementedError): - import random - # os.urandom not available until Python 2.4. Fall back to random.random. - - def random20(): - return sha('%s' % random.random()).hexdigest() - -try: - from _thread import get_ident as get_thread_ident -except ImportError: - from thread import get_ident as get_thread_ident - -try: - # Python 3 - next = next -except NameError: - # Python 2 - def next(i): - return i.next() - -if sys.version_info >= (3, 3): - Timer = threading.Timer - Event = threading.Event -else: - # Python 3.2 and earlier - Timer = threading._Timer - Event = threading._Event - -# Prior to Python 2.6, the Thread class did not have a .daemon property. -# This mix-in adds that property. - - -class SetDaemonProperty: - - def __get_daemon(self): - return self.isDaemon() - - def __set_daemon(self, daemon): - self.setDaemon(daemon) - - if sys.version_info < (2, 6): - daemon = property(__get_daemon, __set_daemon) diff --git a/libs_crutch/contrib/cherrypy/_cpcompat_subprocess.py b/libs_crutch/contrib/cherrypy/_cpcompat_subprocess.py deleted file mode 100644 index ce36372..0000000 --- a/libs_crutch/contrib/cherrypy/_cpcompat_subprocess.py +++ /dev/null @@ -1,1544 +0,0 @@ -# subprocess - Subprocesses with accessible I/O streams -# -# For more information about this module, see PEP 324. -# -# This module should remain compatible with Python 2.2, see PEP 291. -# -# Copyright (c) 2003-2005 by Peter Astrand -# -# Licensed to PSF under a Contributor Agreement. -# See http://www.python.org/2.4/license for licensing details. - -r"""subprocess - Subprocesses with accessible I/O streams - -This module allows you to spawn processes, connect to their -input/output/error pipes, and obtain their return codes. This module -intends to replace several other, older modules and functions, like: - -os.system -os.spawn* -os.popen* -popen2.* -commands.* - -Information about how the subprocess module can be used to replace these -modules and functions can be found below. - - - -Using the subprocess module -=========================== -This module defines one class called Popen: - -class Popen(args, bufsize=0, executable=None, - stdin=None, stdout=None, stderr=None, - preexec_fn=None, close_fds=False, shell=False, - cwd=None, env=None, universal_newlines=False, - startupinfo=None, creationflags=0): - - -Arguments are: - -args should be a string, or a sequence of program arguments. The -program to execute is normally the first item in the args sequence or -string, but can be explicitly set by using the executable argument. - -On UNIX, with shell=False (default): In this case, the Popen class -uses os.execvp() to execute the child program. args should normally -be a sequence. A string will be treated as a sequence with the string -as the only item (the program to execute). - -On UNIX, with shell=True: If args is a string, it specifies the -command string to execute through the shell. If args is a sequence, -the first item specifies the command string, and any additional items -will be treated as additional shell arguments. - -On Windows: the Popen class uses CreateProcess() to execute the child -program, which operates on strings. If args is a sequence, it will be -converted to a string using the list2cmdline method. Please note that -not all MS Windows applications interpret the command line the same -way: The list2cmdline is designed for applications using the same -rules as the MS C runtime. - -bufsize, if given, has the same meaning as the corresponding argument -to the built-in open() function: 0 means unbuffered, 1 means line -buffered, any other positive value means use a buffer of -(approximately) that size. A negative bufsize means to use the system -default, which usually means fully buffered. The default value for -bufsize is 0 (unbuffered). - -stdin, stdout and stderr specify the executed programs' standard -input, standard output and standard error file handles, respectively. -Valid values are PIPE, an existing file descriptor (a positive -integer), an existing file object, and None. PIPE indicates that a -new pipe to the child should be created. With None, no redirection -will occur; the child's file handles will be inherited from the -parent. Additionally, stderr can be STDOUT, which indicates that the -stderr data from the applications should be captured into the same -file handle as for stdout. - -If preexec_fn is set to a callable object, this object will be called -in the child process just before the child is executed. - -If close_fds is true, all file descriptors except 0, 1 and 2 will be -closed before the child process is executed. - -if shell is true, the specified command will be executed through the -shell. - -If cwd is not None, the current directory will be changed to cwd -before the child is executed. - -If env is not None, it defines the environment variables for the new -process. - -If universal_newlines is true, the file objects stdout and stderr are -opened as a text files, but lines may be terminated by any of '\n', -the Unix end-of-line convention, '\r', the Macintosh convention or -'\r\n', the Windows convention. All of these external representations -are seen as '\n' by the Python program. Note: This feature is only -available if Python is built with universal newline support (the -default). Also, the newlines attribute of the file objects stdout, -stdin and stderr are not updated by the communicate() method. - -The startupinfo and creationflags, if given, will be passed to the -underlying CreateProcess() function. They can specify things such as -appearance of the main window and priority for the new process. -(Windows only) - - -This module also defines some shortcut functions: - -call(*popenargs, **kwargs): - Run command with arguments. Wait for command to complete, then - return the returncode attribute. - - The arguments are the same as for the Popen constructor. Example: - - retcode = call(["ls", "-l"]) - -check_call(*popenargs, **kwargs): - Run command with arguments. Wait for command to complete. If the - exit code was zero then return, otherwise raise - CalledProcessError. The CalledProcessError object will have the - return code in the returncode attribute. - - The arguments are the same as for the Popen constructor. Example: - - check_call(["ls", "-l"]) - -check_output(*popenargs, **kwargs): - Run command with arguments and return its output as a byte string. - - If the exit code was non-zero it raises a CalledProcessError. The - CalledProcessError object will have the return code in the returncode - attribute and output in the output attribute. - - The arguments are the same as for the Popen constructor. Example: - - output = check_output(["ls", "-l", "/dev/null"]) - - -Exceptions ----------- -Exceptions raised in the child process, before the new program has -started to execute, will be re-raised in the parent. Additionally, -the exception object will have one extra attribute called -'child_traceback', which is a string containing traceback information -from the childs point of view. - -The most common exception raised is OSError. This occurs, for -example, when trying to execute a non-existent file. Applications -should prepare for OSErrors. - -A ValueError will be raised if Popen is called with invalid arguments. - -check_call() and check_output() will raise CalledProcessError, if the -called process returns a non-zero return code. - - -Security --------- -Unlike some other popen functions, this implementation will never call -/bin/sh implicitly. This means that all characters, including shell -metacharacters, can safely be passed to child processes. - - -Popen objects -============= -Instances of the Popen class have the following methods: - -poll() - Check if child process has terminated. Returns returncode - attribute. - -wait() - Wait for child process to terminate. Returns returncode attribute. - -communicate(input=None) - Interact with process: Send data to stdin. Read data from stdout - and stderr, until end-of-file is reached. Wait for process to - terminate. The optional input argument should be a string to be - sent to the child process, or None, if no data should be sent to - the child. - - communicate() returns a tuple (stdout, stderr). - - Note: The data read is buffered in memory, so do not use this - method if the data size is large or unlimited. - -The following attributes are also available: - -stdin - If the stdin argument is PIPE, this attribute is a file object - that provides input to the child process. Otherwise, it is None. - -stdout - If the stdout argument is PIPE, this attribute is a file object - that provides output from the child process. Otherwise, it is - None. - -stderr - If the stderr argument is PIPE, this attribute is file object that - provides error output from the child process. Otherwise, it is - None. - -pid - The process ID of the child process. - -returncode - The child return code. A None value indicates that the process - hasn't terminated yet. A negative value -N indicates that the - child was terminated by signal N (UNIX only). - - -Replacing older functions with the subprocess module -==================================================== -In this section, "a ==> b" means that b can be used as a replacement -for a. - -Note: All functions in this section fail (more or less) silently if -the executed program cannot be found; this module raises an OSError -exception. - -In the following examples, we assume that the subprocess module is -imported with "from subprocess import *". - - -Replacing /bin/sh shell backquote ---------------------------------- -output=`mycmd myarg` -==> -output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0] - - -Replacing shell pipe line -------------------------- -output=`dmesg | grep hda` -==> -p1 = Popen(["dmesg"], stdout=PIPE) -p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) -output = p2.communicate()[0] - - -Replacing os.system() ---------------------- -sts = os.system("mycmd" + " myarg") -==> -p = Popen("mycmd" + " myarg", shell=True) -pid, sts = os.waitpid(p.pid, 0) - -Note: - -* Calling the program through the shell is usually not required. - -* It's easier to look at the returncode attribute than the - exitstatus. - -A more real-world example would look like this: - -try: - retcode = call("mycmd" + " myarg", shell=True) - if retcode < 0: - print >>sys.stderr, "Child was terminated by signal", -retcode - else: - print >>sys.stderr, "Child returned", retcode -except OSError, e: - print >>sys.stderr, "Execution failed:", e - - -Replacing os.spawn* -------------------- -P_NOWAIT example: - -pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg") -==> -pid = Popen(["/bin/mycmd", "myarg"]).pid - - -P_WAIT example: - -retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg") -==> -retcode = call(["/bin/mycmd", "myarg"]) - - -Vector example: - -os.spawnvp(os.P_NOWAIT, path, args) -==> -Popen([path] + args[1:]) - - -Environment example: - -os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env) -==> -Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"}) - - -Replacing os.popen* -------------------- -pipe = os.popen("cmd", mode='r', bufsize) -==> -pipe = Popen("cmd", shell=True, bufsize=bufsize, stdout=PIPE).stdout - -pipe = os.popen("cmd", mode='w', bufsize) -==> -pipe = Popen("cmd", shell=True, bufsize=bufsize, stdin=PIPE).stdin - - -(child_stdin, child_stdout) = os.popen2("cmd", mode, bufsize) -==> -p = Popen("cmd", shell=True, bufsize=bufsize, - stdin=PIPE, stdout=PIPE, close_fds=True) -(child_stdin, child_stdout) = (p.stdin, p.stdout) - - -(child_stdin, - child_stdout, - child_stderr) = os.popen3("cmd", mode, bufsize) -==> -p = Popen("cmd", shell=True, bufsize=bufsize, - stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True) -(child_stdin, - child_stdout, - child_stderr) = (p.stdin, p.stdout, p.stderr) - - -(child_stdin, child_stdout_and_stderr) = os.popen4("cmd", mode, - bufsize) -==> -p = Popen("cmd", shell=True, bufsize=bufsize, - stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True) -(child_stdin, child_stdout_and_stderr) = (p.stdin, p.stdout) - -On Unix, os.popen2, os.popen3 and os.popen4 also accept a sequence as -the command to execute, in which case arguments will be passed -directly to the program without shell intervention. This usage can be -replaced as follows: - -(child_stdin, child_stdout) = os.popen2(["/bin/ls", "-l"], mode, - bufsize) -==> -p = Popen(["/bin/ls", "-l"], bufsize=bufsize, stdin=PIPE, stdout=PIPE) -(child_stdin, child_stdout) = (p.stdin, p.stdout) - -Return code handling translates as follows: - -pipe = os.popen("cmd", 'w') -... -rc = pipe.close() -if rc is not None and rc % 256: - print "There were some errors" -==> -process = Popen("cmd", 'w', shell=True, stdin=PIPE) -... -process.stdin.close() -if process.wait() != 0: - print "There were some errors" - - -Replacing popen2.* ------------------- -(child_stdout, child_stdin) = popen2.popen2("somestring", bufsize, mode) -==> -p = Popen(["somestring"], shell=True, bufsize=bufsize - stdin=PIPE, stdout=PIPE, close_fds=True) -(child_stdout, child_stdin) = (p.stdout, p.stdin) - -On Unix, popen2 also accepts a sequence as the command to execute, in -which case arguments will be passed directly to the program without -shell intervention. This usage can be replaced as follows: - -(child_stdout, child_stdin) = popen2.popen2(["mycmd", "myarg"], bufsize, - mode) -==> -p = Popen(["mycmd", "myarg"], bufsize=bufsize, - stdin=PIPE, stdout=PIPE, close_fds=True) -(child_stdout, child_stdin) = (p.stdout, p.stdin) - -The popen2.Popen3 and popen2.Popen4 basically works as subprocess.Popen, -except that: - -* subprocess.Popen raises an exception if the execution fails -* the capturestderr argument is replaced with the stderr argument. -* stdin=PIPE and stdout=PIPE must be specified. -* popen2 closes all filedescriptors by default, but you have to specify - close_fds=True with subprocess.Popen. -""" - -import sys -mswindows = (sys.platform == "win32") - -import os -import types -import traceback -import gc -import signal -import errno - -try: - set -except NameError: - from sets import Set as set - -# Exception classes used by this module. - - -class CalledProcessError(Exception): - - """This exception is raised when a process run by check_call() or - check_output() returns a non-zero exit status. - The exit status will be stored in the returncode attribute; - check_output() will also store the output in the output attribute. - """ - - def __init__(self, returncode, cmd, output=None): - self.returncode = returncode - self.cmd = cmd - self.output = output - - def __str__(self): - return "Command '%s' returned non-zero exit status %d" % ( - self.cmd, self.returncode) - - -if mswindows: - import threading - import msvcrt - import _subprocess - - class STARTUPINFO: - dwFlags = 0 - hStdInput = None - hStdOutput = None - hStdError = None - wShowWindow = 0 - - class pywintypes: - error = IOError -else: - import select - _has_poll = hasattr(select, 'poll') - import fcntl - import pickle - - # When select or poll has indicated that the file is writable, - # we can write up to _PIPE_BUF bytes without risk of blocking. - # POSIX defines PIPE_BUF as >= 512. - _PIPE_BUF = getattr(select, 'PIPE_BUF', 512) - - -__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", - "check_output", "CalledProcessError"] - -if mswindows: - from _subprocess import CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP, \ - STD_INPUT_HANDLE, STD_OUTPUT_HANDLE, \ - STD_ERROR_HANDLE, SW_HIDE, \ - STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW - - __all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP", - "STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE", - "STD_ERROR_HANDLE", "SW_HIDE", - "STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW"]) -try: - MAXFD = os.sysconf("SC_OPEN_MAX") -except: - MAXFD = 256 - -_active = [] - - -def _cleanup(): - for inst in _active[:]: - res = inst._internal_poll(_deadstate=sys.maxint) - if res is not None: - try: - _active.remove(inst) - except ValueError: - # This can happen if two threads create a new Popen instance. - # It's harmless that it was already removed, so ignore. - pass - -PIPE = -1 -STDOUT = -2 - - -def _eintr_retry_call(func, *args): - while True: - try: - return func(*args) - except (OSError, IOError), e: - if e.errno == errno.EINTR: - continue - raise - - -def call(*popenargs, **kwargs): - """Run command with arguments. Wait for command to complete, then - return the returncode attribute. - - The arguments are the same as for the Popen constructor. Example: - - retcode = call(["ls", "-l"]) - """ - return Popen(*popenargs, **kwargs).wait() - - -def check_call(*popenargs, **kwargs): - """Run command with arguments. Wait for command to complete. If - the exit code was zero then return, otherwise raise - CalledProcessError. The CalledProcessError object will have the - return code in the returncode attribute. - - The arguments are the same as for the Popen constructor. Example: - - check_call(["ls", "-l"]) - """ - retcode = call(*popenargs, **kwargs) - if retcode: - cmd = kwargs.get("args") - if cmd is None: - cmd = popenargs[0] - raise CalledProcessError(retcode, cmd) - return 0 - - -def check_output(*popenargs, **kwargs): - r"""Run command with arguments and return its output as a byte string. - - If the exit code was non-zero it raises a CalledProcessError. The - CalledProcessError object will have the return code in the returncode - attribute and output in the output attribute. - - The arguments are the same as for the Popen constructor. Example: - - >>> check_output(["ls", "-l", "/dev/null"]) - 'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n' - - The stdout argument is not allowed as it is used internally. - To capture standard error in the result, use stderr=STDOUT. - - >>> check_output(["/bin/sh", "-c", - ... "ls -l non_existent_file ; exit 0"], - ... stderr=STDOUT) - 'ls: non_existent_file: No such file or directory\n' - """ - if 'stdout' in kwargs: - raise ValueError('stdout argument not allowed, it will be overridden.') - process = Popen(stdout=PIPE, *popenargs, **kwargs) - output, unused_err = process.communicate() - retcode = process.poll() - if retcode: - cmd = kwargs.get("args") - if cmd is None: - cmd = popenargs[0] - raise CalledProcessError(retcode, cmd, output=output) - return output - - -def list2cmdline(seq): - """ - Translate a sequence of arguments into a command line - string, using the same rules as the MS C runtime: - - 1) Arguments are delimited by white space, which is either a - space or a tab. - - 2) A string surrounded by double quotation marks is - interpreted as a single argument, regardless of white space - contained within. A quoted string can be embedded in an - argument. - - 3) A double quotation mark preceded by a backslash is - interpreted as a literal double quotation mark. - - 4) Backslashes are interpreted literally, unless they - immediately precede a double quotation mark. - - 5) If backslashes immediately precede a double quotation mark, - every pair of backslashes is interpreted as a literal - backslash. If the number of backslashes is odd, the last - backslash escapes the next double quotation mark as - described in rule 3. - """ - - # See - # http://msdn.microsoft.com/en-us/library/17w5ykft.aspx - # or search http://msdn.microsoft.com for - # "Parsing C++ Command-Line Arguments" - result = [] - needquote = False - for arg in seq: - bs_buf = [] - - # Add a space to separate this argument from the others - if result: - result.append(' ') - - needquote = (" " in arg) or ("\t" in arg) or not arg - if needquote: - result.append('"') - - for c in arg: - if c == '\\': - # Don't know if we need to double yet. - bs_buf.append(c) - elif c == '"': - # Double backslashes. - result.append('\\' * len(bs_buf) * 2) - bs_buf = [] - result.append('\\"') - else: - # Normal char - if bs_buf: - result.extend(bs_buf) - bs_buf = [] - result.append(c) - - # Add remaining backslashes, if any. - if bs_buf: - result.extend(bs_buf) - - if needquote: - result.extend(bs_buf) - result.append('"') - - return ''.join(result) - - -class Popen(object): - - def __init__(self, args, bufsize=0, executable=None, - stdin=None, stdout=None, stderr=None, - preexec_fn=None, close_fds=False, shell=False, - cwd=None, env=None, universal_newlines=False, - startupinfo=None, creationflags=0): - """Create new Popen instance.""" - _cleanup() - - self._child_created = False - if not isinstance(bufsize, (int, long)): - raise TypeError("bufsize must be an integer") - - if mswindows: - if preexec_fn is not None: - raise ValueError("preexec_fn is not supported on Windows " - "platforms") - if close_fds and (stdin is not None or stdout is not None or - stderr is not None): - raise ValueError("close_fds is not supported on Windows " - "platforms if you redirect " - "stdin/stdout/stderr") - else: - # POSIX - if startupinfo is not None: - raise ValueError("startupinfo is only supported on Windows " - "platforms") - if creationflags != 0: - raise ValueError("creationflags is only supported on Windows " - "platforms") - - self.stdin = None - self.stdout = None - self.stderr = None - self.pid = None - self.returncode = None - self.universal_newlines = universal_newlines - - # Input and output objects. The general principle is like - # this: - # - # Parent Child - # ------ ----- - # p2cwrite ---stdin---> p2cread - # c2pread <--stdout--- c2pwrite - # errread <--stderr--- errwrite - # - # On POSIX, the child objects are file descriptors. On - # Windows, these are Windows file handles. The parent objects - # are file descriptors on both platforms. The parent objects - # are None when not using PIPEs. The child objects are None - # when not redirecting. - - (p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) = self._get_handles(stdin, stdout, stderr) - - self._execute_child(args, executable, preexec_fn, close_fds, - cwd, env, universal_newlines, - startupinfo, creationflags, shell, - p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) - - if mswindows: - if p2cwrite is not None: - p2cwrite = msvcrt.open_osfhandle(p2cwrite.Detach(), 0) - if c2pread is not None: - c2pread = msvcrt.open_osfhandle(c2pread.Detach(), 0) - if errread is not None: - errread = msvcrt.open_osfhandle(errread.Detach(), 0) - - if p2cwrite is not None: - self.stdin = os.fdopen(p2cwrite, 'wb', bufsize) - if c2pread is not None: - if universal_newlines: - self.stdout = os.fdopen(c2pread, 'rU', bufsize) - else: - self.stdout = os.fdopen(c2pread, 'rb', bufsize) - if errread is not None: - if universal_newlines: - self.stderr = os.fdopen(errread, 'rU', bufsize) - else: - self.stderr = os.fdopen(errread, 'rb', bufsize) - - def _translate_newlines(self, data): - data = data.replace("\r\n", "\n") - data = data.replace("\r", "\n") - return data - - def __del__(self, _maxint=sys.maxint, _active=_active): - # If __init__ hasn't had a chance to execute (e.g. if it - # was passed an undeclared keyword argument), we don't - # have a _child_created attribute at all. - if not getattr(self, '_child_created', False): - # We didn't get to successfully create a child process. - return - # In case the child hasn't been waited on, check if it's done. - self._internal_poll(_deadstate=_maxint) - if self.returncode is None and _active is not None: - # Child is still running, keep us alive until we can wait on it. - _active.append(self) - - def communicate(self, input=None): - """Interact with process: Send data to stdin. Read data from - stdout and stderr, until end-of-file is reached. Wait for - process to terminate. The optional input argument should be a - string to be sent to the child process, or None, if no data - should be sent to the child. - - communicate() returns a tuple (stdout, stderr).""" - - # Optimization: If we are only using one pipe, or no pipe at - # all, using select() or threads is unnecessary. - if [self.stdin, self.stdout, self.stderr].count(None) >= 2: - stdout = None - stderr = None - if self.stdin: - if input: - try: - self.stdin.write(input) - except IOError, e: - if e.errno != errno.EPIPE and e.errno != errno.EINVAL: - raise - self.stdin.close() - elif self.stdout: - stdout = _eintr_retry_call(self.stdout.read) - self.stdout.close() - elif self.stderr: - stderr = _eintr_retry_call(self.stderr.read) - self.stderr.close() - self.wait() - return (stdout, stderr) - - return self._communicate(input) - - def poll(self): - return self._internal_poll() - - if mswindows: - # - # Windows methods - # - def _get_handles(self, stdin, stdout, stderr): - """Construct and return tuple with IO objects: - p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite - """ - if stdin is None and stdout is None and stderr is None: - return (None, None, None, None, None, None) - - p2cread, p2cwrite = None, None - c2pread, c2pwrite = None, None - errread, errwrite = None, None - - if stdin is None: - p2cread = _subprocess.GetStdHandle( - _subprocess.STD_INPUT_HANDLE) - if p2cread is None: - p2cread, _ = _subprocess.CreatePipe(None, 0) - elif stdin == PIPE: - p2cread, p2cwrite = _subprocess.CreatePipe(None, 0) - elif isinstance(stdin, int): - p2cread = msvcrt.get_osfhandle(stdin) - else: - # Assuming file-like object - p2cread = msvcrt.get_osfhandle(stdin.fileno()) - p2cread = self._make_inheritable(p2cread) - - if stdout is None: - c2pwrite = _subprocess.GetStdHandle( - _subprocess.STD_OUTPUT_HANDLE) - if c2pwrite is None: - _, c2pwrite = _subprocess.CreatePipe(None, 0) - elif stdout == PIPE: - c2pread, c2pwrite = _subprocess.CreatePipe(None, 0) - elif isinstance(stdout, int): - c2pwrite = msvcrt.get_osfhandle(stdout) - else: - # Assuming file-like object - c2pwrite = msvcrt.get_osfhandle(stdout.fileno()) - c2pwrite = self._make_inheritable(c2pwrite) - - if stderr is None: - errwrite = _subprocess.GetStdHandle( - _subprocess.STD_ERROR_HANDLE) - if errwrite is None: - _, errwrite = _subprocess.CreatePipe(None, 0) - elif stderr == PIPE: - errread, errwrite = _subprocess.CreatePipe(None, 0) - elif stderr == STDOUT: - errwrite = c2pwrite - elif isinstance(stderr, int): - errwrite = msvcrt.get_osfhandle(stderr) - else: - # Assuming file-like object - errwrite = msvcrt.get_osfhandle(stderr.fileno()) - errwrite = self._make_inheritable(errwrite) - - return (p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) - - def _make_inheritable(self, handle): - """Return a duplicate of handle, which is inheritable""" - return _subprocess.DuplicateHandle( - _subprocess.GetCurrentProcess(), - handle, - _subprocess.GetCurrentProcess(), - 0, - 1, - _subprocess.DUPLICATE_SAME_ACCESS - ) - - def _find_w9xpopen(self): - """Find and return absolut path to w9xpopen.exe""" - w9xpopen = os.path.join( - os.path.dirname(_subprocess.GetModuleFileName(0)), - "w9xpopen.exe") - if not os.path.exists(w9xpopen): - # Eeek - file-not-found - possibly an embedding - # situation - see if we can locate it in sys.exec_prefix - w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), - "w9xpopen.exe") - if not os.path.exists(w9xpopen): - raise RuntimeError("Cannot locate w9xpopen.exe, which is " - "needed for Popen to work with your " - "shell or platform.") - return w9xpopen - - def _execute_child(self, args, executable, preexec_fn, close_fds, - cwd, env, universal_newlines, - startupinfo, creationflags, shell, - p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite): - """Execute program (MS Windows version)""" - - if not isinstance(args, types.StringTypes): - args = list2cmdline(args) - - # Process startup details - if startupinfo is None: - startupinfo = STARTUPINFO() - if None not in (p2cread, c2pwrite, errwrite): - startupinfo.dwFlags |= _subprocess.STARTF_USESTDHANDLES - startupinfo.hStdInput = p2cread - startupinfo.hStdOutput = c2pwrite - startupinfo.hStdError = errwrite - - if shell: - startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW - startupinfo.wShowWindow = _subprocess.SW_HIDE - comspec = os.environ.get("COMSPEC", "cmd.exe") - args = '{} /c "{}"'.format(comspec, args) - if (_subprocess.GetVersion() >= 0x80000000 or - os.path.basename(comspec).lower() == "command.com"): - # Win9x, or using command.com on NT. We need to - # use the w9xpopen intermediate program. For more - # information, see KB Q150956 - # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp) - w9xpopen = self._find_w9xpopen() - args = '"%s" %s' % (w9xpopen, args) - # Not passing CREATE_NEW_CONSOLE has been known to - # cause random failures on win9x. Specifically a - # dialog: "Your program accessed mem currently in - # use at xxx" and a hopeful warning about the - # stability of your system. Cost is Ctrl+C wont - # kill children. - creationflags |= _subprocess.CREATE_NEW_CONSOLE - - # Start the process - try: - try: - hp, ht, pid, tid = _subprocess.CreateProcess( - executable, args, - # no special - # security - None, None, - int(not close_fds), - creationflags, - env, - cwd, - startupinfo) - except pywintypes.error, e: - # Translate pywintypes.error to WindowsError, which is - # a subclass of OSError. FIXME: We should really - # translate errno using _sys_errlist (or similar), but - # how can this be done from Python? - raise WindowsError(*e.args) - finally: - # Child is launched. Close the parent's copy of those pipe - # handles that only the child should have open. You need - # to make sure that no handles to the write end of the - # output pipe are maintained in this process or else the - # pipe will not close when the child process exits and the - # ReadFile will hang. - if p2cread is not None: - p2cread.Close() - if c2pwrite is not None: - c2pwrite.Close() - if errwrite is not None: - errwrite.Close() - - # Retain the process handle, but close the thread handle - self._child_created = True - self._handle = hp - self.pid = pid - ht.Close() - - def _internal_poll( - self, _deadstate=None, - _WaitForSingleObject=_subprocess.WaitForSingleObject, - _WAIT_OBJECT_0=_subprocess.WAIT_OBJECT_0, - _GetExitCodeProcess=_subprocess.GetExitCodeProcess - ): - """Check if child process has terminated. Returns returncode - attribute. - - This method is called by __del__, so it can only refer to objects - in its local scope. - - """ - if self.returncode is None: - if _WaitForSingleObject(self._handle, 0) == _WAIT_OBJECT_0: - self.returncode = _GetExitCodeProcess(self._handle) - return self.returncode - - def wait(self): - """Wait for child process to terminate. Returns returncode - attribute.""" - if self.returncode is None: - _subprocess.WaitForSingleObject(self._handle, - _subprocess.INFINITE) - self.returncode = _subprocess.GetExitCodeProcess(self._handle) - return self.returncode - - def _readerthread(self, fh, buffer): - buffer.append(fh.read()) - - def _communicate(self, input): - stdout = None # Return - stderr = None # Return - - if self.stdout: - stdout = [] - stdout_thread = threading.Thread(target=self._readerthread, - args=(self.stdout, stdout)) - stdout_thread.setDaemon(True) - stdout_thread.start() - if self.stderr: - stderr = [] - stderr_thread = threading.Thread(target=self._readerthread, - args=(self.stderr, stderr)) - stderr_thread.setDaemon(True) - stderr_thread.start() - - if self.stdin: - if input is not None: - try: - self.stdin.write(input) - except IOError, e: - if e.errno != errno.EPIPE: - raise - self.stdin.close() - - if self.stdout: - stdout_thread.join() - if self.stderr: - stderr_thread.join() - - # All data exchanged. Translate lists into strings. - if stdout is not None: - stdout = stdout[0] - if stderr is not None: - stderr = stderr[0] - - # Translate newlines, if requested. We cannot let the file - # object do the translation: It is based on stdio, which is - # impossible to combine with select (unless forcing no - # buffering). - if self.universal_newlines and hasattr(file, 'newlines'): - if stdout: - stdout = self._translate_newlines(stdout) - if stderr: - stderr = self._translate_newlines(stderr) - - self.wait() - return (stdout, stderr) - - def send_signal(self, sig): - """Send a signal to the process - """ - if sig == signal.SIGTERM: - self.terminate() - elif sig == signal.CTRL_C_EVENT: - os.kill(self.pid, signal.CTRL_C_EVENT) - elif sig == signal.CTRL_BREAK_EVENT: - os.kill(self.pid, signal.CTRL_BREAK_EVENT) - else: - raise ValueError("Unsupported signal: {}".format(sig)) - - def terminate(self): - """Terminates the process - """ - _subprocess.TerminateProcess(self._handle, 1) - - kill = terminate - - else: - # - # POSIX methods - # - def _get_handles(self, stdin, stdout, stderr): - """Construct and return tuple with IO objects: - p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite - """ - p2cread, p2cwrite = None, None - c2pread, c2pwrite = None, None - errread, errwrite = None, None - - if stdin is None: - pass - elif stdin == PIPE: - p2cread, p2cwrite = self.pipe_cloexec() - elif isinstance(stdin, int): - p2cread = stdin - else: - # Assuming file-like object - p2cread = stdin.fileno() - - if stdout is None: - pass - elif stdout == PIPE: - c2pread, c2pwrite = self.pipe_cloexec() - elif isinstance(stdout, int): - c2pwrite = stdout - else: - # Assuming file-like object - c2pwrite = stdout.fileno() - - if stderr is None: - pass - elif stderr == PIPE: - errread, errwrite = self.pipe_cloexec() - elif stderr == STDOUT: - errwrite = c2pwrite - elif isinstance(stderr, int): - errwrite = stderr - else: - # Assuming file-like object - errwrite = stderr.fileno() - - return (p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite) - - def _set_cloexec_flag(self, fd, cloexec=True): - try: - cloexec_flag = fcntl.FD_CLOEXEC - except AttributeError: - cloexec_flag = 1 - - old = fcntl.fcntl(fd, fcntl.F_GETFD) - if cloexec: - fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag) - else: - fcntl.fcntl(fd, fcntl.F_SETFD, old & ~cloexec_flag) - - def pipe_cloexec(self): - """Create a pipe with FDs set CLOEXEC.""" - # Pipes' FDs are set CLOEXEC by default because we don't want them - # to be inherited by other subprocesses: the CLOEXEC flag is - # removed from the child's FDs by _dup2(), between fork() and - # exec(). - # This is not atomic: we would need the pipe2() syscall for that. - r, w = os.pipe() - self._set_cloexec_flag(r) - self._set_cloexec_flag(w) - return r, w - - def _close_fds(self, but): - if hasattr(os, 'closerange'): - os.closerange(3, but) - os.closerange(but + 1, MAXFD) - else: - for i in xrange(3, MAXFD): - if i == but: - continue - try: - os.close(i) - except: - pass - - def _execute_child(self, args, executable, preexec_fn, close_fds, - cwd, env, universal_newlines, - startupinfo, creationflags, shell, - p2cread, p2cwrite, - c2pread, c2pwrite, - errread, errwrite): - """Execute program (POSIX version)""" - - if isinstance(args, types.StringTypes): - args = [args] - else: - args = list(args) - - if shell: - args = ["/bin/sh", "-c"] + args - if executable: - args[0] = executable - - if executable is None: - executable = args[0] - - # For transferring possible exec failure from child to parent - # The first char specifies the exception type: 0 means - # OSError, 1 means some other error. - errpipe_read, errpipe_write = self.pipe_cloexec() - try: - try: - gc_was_enabled = gc.isenabled() - # Disable gc to avoid bug where gc -> file_dealloc -> - # write to stderr -> hang. - # http://bugs.python.org/issue1336 - gc.disable() - try: - self.pid = os.fork() - except: - if gc_was_enabled: - gc.enable() - raise - self._child_created = True - if self.pid == 0: - # Child - try: - # Close parent's pipe ends - if p2cwrite is not None: - os.close(p2cwrite) - if c2pread is not None: - os.close(c2pread) - if errread is not None: - os.close(errread) - os.close(errpipe_read) - - # When duping fds, if there arises a situation - # where one of the fds is either 0, 1 or 2, it - # is possible that it is overwritten (#12607). - if c2pwrite == 0: - c2pwrite = os.dup(c2pwrite) - if errwrite == 0 or errwrite == 1: - errwrite = os.dup(errwrite) - - # Dup fds for child - def _dup2(a, b): - # dup2() removes the CLOEXEC flag but - # we must do it ourselves if dup2() - # would be a no-op (issue #10806). - if a == b: - self._set_cloexec_flag(a, False) - elif a is not None: - os.dup2(a, b) - _dup2(p2cread, 0) - _dup2(c2pwrite, 1) - _dup2(errwrite, 2) - - # Close pipe fds. Make sure we don't close the - # same fd more than once, or standard fds. - closed = set([None]) - for fd in [p2cread, c2pwrite, errwrite]: - if fd not in closed and fd > 2: - os.close(fd) - closed.add(fd) - - # Close all other fds, if asked for - if close_fds: - self._close_fds(but=errpipe_write) - - if cwd is not None: - os.chdir(cwd) - - if preexec_fn: - preexec_fn() - - if env is None: - os.execvp(executable, args) - else: - os.execvpe(executable, args, env) - - except: - exc_type, exc_value, tb = sys.exc_info() - # Save the traceback and attach it to the exception - # object - exc_lines = traceback.format_exception(exc_type, - exc_value, - tb) - exc_value.child_traceback = ''.join(exc_lines) - os.write(errpipe_write, pickle.dumps(exc_value)) - - # This exitcode won't be reported to applications, - # so it really doesn't matter what we return. - os._exit(255) - - # Parent - if gc_was_enabled: - gc.enable() - finally: - # be sure the FD is closed no matter what - os.close(errpipe_write) - - if p2cread is not None and p2cwrite is not None: - os.close(p2cread) - if c2pwrite is not None and c2pread is not None: - os.close(c2pwrite) - if errwrite is not None and errread is not None: - os.close(errwrite) - - # Wait for exec to fail or succeed; possibly raising exception - # Exception limited to 1M - data = _eintr_retry_call(os.read, errpipe_read, 1048576) - finally: - # be sure the FD is closed no matter what - os.close(errpipe_read) - - if data != "": - try: - _eintr_retry_call(os.waitpid, self.pid, 0) - except OSError, e: - if e.errno != errno.ECHILD: - raise - child_exception = pickle.loads(data) - for fd in (p2cwrite, c2pread, errread): - if fd is not None: - os.close(fd) - raise child_exception - - def _handle_exitstatus(self, sts, _WIFSIGNALED=os.WIFSIGNALED, - _WTERMSIG=os.WTERMSIG, _WIFEXITED=os.WIFEXITED, - _WEXITSTATUS=os.WEXITSTATUS): - # This method is called (indirectly) by __del__, so it cannot - # refer to anything outside of its local scope.""" - if _WIFSIGNALED(sts): - self.returncode = -_WTERMSIG(sts) - elif _WIFEXITED(sts): - self.returncode = _WEXITSTATUS(sts) - else: - # Should never happen - raise RuntimeError("Unknown child exit status!") - - def _internal_poll(self, _deadstate=None, _waitpid=os.waitpid, - _WNOHANG=os.WNOHANG, _os_error=os.error): - """Check if child process has terminated. Returns returncode - attribute. - - This method is called by __del__, so it cannot reference anything - outside of the local scope (nor can any methods it calls). - - """ - if self.returncode is None: - try: - pid, sts = _waitpid(self.pid, _WNOHANG) - if pid == self.pid: - self._handle_exitstatus(sts) - except _os_error: - if _deadstate is not None: - self.returncode = _deadstate - return self.returncode - - def wait(self): - """Wait for child process to terminate. Returns returncode - attribute.""" - if self.returncode is None: - try: - pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0) - except OSError, e: - if e.errno != errno.ECHILD: - raise - # This happens if SIGCLD is set to be ignored or waiting - # for child processes has otherwise been disabled for our - # process. This child is dead, we can't get the status. - sts = 0 - self._handle_exitstatus(sts) - return self.returncode - - def _communicate(self, input): - if self.stdin: - # Flush stdio buffer. This might block, if the user has - # been writing to .stdin in an uncontrolled fashion. - self.stdin.flush() - if not input: - self.stdin.close() - - if _has_poll: - stdout, stderr = self._communicate_with_poll(input) - else: - stdout, stderr = self._communicate_with_select(input) - - # All data exchanged. Translate lists into strings. - if stdout is not None: - stdout = ''.join(stdout) - if stderr is not None: - stderr = ''.join(stderr) - - # Translate newlines, if requested. We cannot let the file - # object do the translation: It is based on stdio, which is - # impossible to combine with select (unless forcing no - # buffering). - if self.universal_newlines and hasattr(file, 'newlines'): - if stdout: - stdout = self._translate_newlines(stdout) - if stderr: - stderr = self._translate_newlines(stderr) - - self.wait() - return (stdout, stderr) - - def _communicate_with_poll(self, input): - stdout = None # Return - stderr = None # Return - fd2file = {} - fd2output = {} - - poller = select.poll() - - def register_and_append(file_obj, eventmask): - poller.register(file_obj.fileno(), eventmask) - fd2file[file_obj.fileno()] = file_obj - - def close_unregister_and_remove(fd): - poller.unregister(fd) - fd2file[fd].close() - fd2file.pop(fd) - - if self.stdin and input: - register_and_append(self.stdin, select.POLLOUT) - - select_POLLIN_POLLPRI = select.POLLIN | select.POLLPRI - if self.stdout: - register_and_append(self.stdout, select_POLLIN_POLLPRI) - fd2output[self.stdout.fileno()] = stdout = [] - if self.stderr: - register_and_append(self.stderr, select_POLLIN_POLLPRI) - fd2output[self.stderr.fileno()] = stderr = [] - - input_offset = 0 - while fd2file: - try: - ready = poller.poll() - except select.error, e: - if e.args[0] == errno.EINTR: - continue - raise - - for fd, mode in ready: - if mode & select.POLLOUT: - chunk = input[input_offset: input_offset + _PIPE_BUF] - try: - input_offset += os.write(fd, chunk) - except OSError, e: - if e.errno == errno.EPIPE: - close_unregister_and_remove(fd) - else: - raise - else: - if input_offset >= len(input): - close_unregister_and_remove(fd) - elif mode & select_POLLIN_POLLPRI: - data = os.read(fd, 4096) - if not data: - close_unregister_and_remove(fd) - fd2output[fd].append(data) - else: - # Ignore hang up or errors. - close_unregister_and_remove(fd) - - return (stdout, stderr) - - def _communicate_with_select(self, input): - read_set = [] - write_set = [] - stdout = None # Return - stderr = None # Return - - if self.stdin and input: - write_set.append(self.stdin) - if self.stdout: - read_set.append(self.stdout) - stdout = [] - if self.stderr: - read_set.append(self.stderr) - stderr = [] - - input_offset = 0 - while read_set or write_set: - try: - rlist, wlist, xlist = select.select( - read_set, write_set, []) - except select.error, e: - if e.args[0] == errno.EINTR: - continue - raise - - if self.stdin in wlist: - chunk = input[input_offset: input_offset + _PIPE_BUF] - try: - bytes_written = os.write(self.stdin.fileno(), chunk) - except OSError, e: - if e.errno == errno.EPIPE: - self.stdin.close() - write_set.remove(self.stdin) - else: - raise - else: - input_offset += bytes_written - if input_offset >= len(input): - self.stdin.close() - write_set.remove(self.stdin) - - if self.stdout in rlist: - data = os.read(self.stdout.fileno(), 1024) - if data == "": - self.stdout.close() - read_set.remove(self.stdout) - stdout.append(data) - - if self.stderr in rlist: - data = os.read(self.stderr.fileno(), 1024) - if data == "": - self.stderr.close() - read_set.remove(self.stderr) - stderr.append(data) - - return (stdout, stderr) - - def send_signal(self, sig): - """Send a signal to the process - """ - os.kill(self.pid, sig) - - def terminate(self): - """Terminate the process with SIGTERM - """ - self.send_signal(signal.SIGTERM) - - def kill(self): - """Kill the process with SIGKILL - """ - self.send_signal(signal.SIGKILL) - - -def _demo_posix(): - # - # Example 1: Simple redirection: Get process list - # - plist = Popen(["ps"], stdout=PIPE).communicate()[0] - print "Process list:" - print plist - - # - # Example 2: Change uid before executing child - # - if os.getuid() == 0: - p = Popen(["id"], preexec_fn=lambda: os.setuid(100)) - p.wait() - - # - # Example 3: Connecting several subprocesses - # - print "Looking for 'hda'..." - p1 = Popen(["dmesg"], stdout=PIPE) - p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE) - print repr(p2.communicate()[0]) - - # - # Example 4: Catch execution error - # - print - print "Trying a weird file..." - try: - print Popen(["/this/path/does/not/exist"]).communicate() - except OSError, e: - if e.errno == errno.ENOENT: - print "The file didn't exist. I thought so..." - print "Child traceback:" - print e.child_traceback - else: - print "Error", e.errno - else: - print >>sys.stderr, "Gosh. No error." - - -def _demo_windows(): - # - # Example 1: Connecting several subprocesses - # - print "Looking for 'PROMPT' in set output..." - p1 = Popen("set", stdout=PIPE, shell=True) - p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE) - print repr(p2.communicate()[0]) - - # - # Example 2: Simple execution of program - # - print "Executing calc..." - p = Popen("calc") - p.wait() - - -if __name__ == "__main__": - if mswindows: - _demo_windows() - else: - _demo_posix() diff --git a/libs_crutch/contrib/cherrypy/_cpconfig.py b/libs_crutch/contrib/cherrypy/_cpconfig.py deleted file mode 100644 index c11bc1d..0000000 --- a/libs_crutch/contrib/cherrypy/_cpconfig.py +++ /dev/null @@ -1,317 +0,0 @@ -""" -Configuration system for CherryPy. - -Configuration in CherryPy is implemented via dictionaries. Keys are strings -which name the mapped value, which may be of any type. - - -Architecture ------------- - -CherryPy Requests are part of an Application, which runs in a global context, -and configuration data may apply to any of those three scopes: - -Global - Configuration entries which apply everywhere are stored in - cherrypy.config. - -Application - Entries which apply to each mounted application are stored - on the Application object itself, as 'app.config'. This is a two-level - dict where each key is a path, or "relative URL" (for example, "/" or - "/path/to/my/page"), and each value is a config dict. Usually, this - data is provided in the call to tree.mount(root(), config=conf), - although you may also use app.merge(conf). - -Request - Each Request object possesses a single 'Request.config' dict. - Early in the request process, this dict is populated by merging global - config entries, Application entries (whose path equals or is a parent - of Request.path_info), and any config acquired while looking up the - page handler (see next). - - -Declaration ------------ - -Configuration data may be supplied as a Python dictionary, as a filename, -or as an open file object. When you supply a filename or file, CherryPy -uses Python's builtin ConfigParser; you declare Application config by -writing each path as a section header:: - - [/path/to/my/page] - request.stream = True - -To declare global configuration entries, place them in a [global] section. - -You may also declare config entries directly on the classes and methods -(page handlers) that make up your CherryPy application via the ``_cp_config`` -attribute. For example:: - - class Demo: - _cp_config = {'tools.gzip.on': True} - - def index(self): - return "Hello world" - index.exposed = True - index._cp_config = {'request.show_tracebacks': False} - -.. note:: - - This behavior is only guaranteed for the default dispatcher. - Other dispatchers may have different restrictions on where - you can attach _cp_config attributes. - - -Namespaces ----------- - -Configuration keys are separated into namespaces by the first "." in the key. -Current namespaces: - -engine - Controls the 'application engine', including autoreload. - These can only be declared in the global config. - -tree - Grafts cherrypy.Application objects onto cherrypy.tree. - These can only be declared in the global config. - -hooks - Declares additional request-processing functions. - -log - Configures the logging for each application. - These can only be declared in the global or / config. - -request - Adds attributes to each Request. - -response - Adds attributes to each Response. - -server - Controls the default HTTP server via cherrypy.server. - These can only be declared in the global config. - -tools - Runs and configures additional request-processing packages. - -wsgi - Adds WSGI middleware to an Application's "pipeline". - These can only be declared in the app's root config ("/"). - -checker - Controls the 'checker', which looks for common errors in - app state (including config) when the engine starts. - Global config only. - -The only key that does not exist in a namespace is the "environment" entry. -This special entry 'imports' other config entries from a template stored in -cherrypy._cpconfig.environments[environment]. It only applies to the global -config, and only when you use cherrypy.config.update. - -You can define your own namespaces to be called at the Global, Application, -or Request level, by adding a named handler to cherrypy.config.namespaces, -app.namespaces, or app.request_class.namespaces. The name can -be any string, and the handler must be either a callable or a (Python 2.5 -style) context manager. -""" - -import cherrypy -from cherrypy._cpcompat import set, basestring -from cherrypy.lib import reprconf - -# Deprecated in CherryPy 3.2--remove in 3.3 -NamespaceSet = reprconf.NamespaceSet - - -def merge(base, other): - """Merge one app config (from a dict, file, or filename) into another. - - If the given config is a filename, it will be appended to - the list of files to monitor for "autoreload" changes. - """ - if isinstance(other, basestring): - cherrypy.engine.autoreload.files.add(other) - - # Load other into base - for section, value_map in reprconf.as_dict(other).items(): - if not isinstance(value_map, dict): - raise ValueError( - "Application config must include section headers, but the " - "config you tried to merge doesn't have any sections. " - "Wrap your config in another dict with paths as section " - "headers, for example: {'/': config}.") - base.setdefault(section, {}).update(value_map) - - -class Config(reprconf.Config): - - """The 'global' configuration data for the entire CherryPy process.""" - - def update(self, config): - """Update self from a dict, file or filename.""" - if isinstance(config, basestring): - # Filename - cherrypy.engine.autoreload.files.add(config) - reprconf.Config.update(self, config) - - def _apply(self, config): - """Update self from a dict.""" - if isinstance(config.get("global"), dict): - if len(config) > 1: - cherrypy.checker.global_config_contained_paths = True - config = config["global"] - if 'tools.staticdir.dir' in config: - config['tools.staticdir.section'] = "global" - reprconf.Config._apply(self, config) - - def __call__(self, *args, **kwargs): - """Decorator for page handlers to set _cp_config.""" - if args: - raise TypeError( - "The cherrypy.config decorator does not accept positional " - "arguments; you must use keyword arguments.") - - def tool_decorator(f): - if not hasattr(f, "_cp_config"): - f._cp_config = {} - for k, v in kwargs.items(): - f._cp_config[k] = v - return f - return tool_decorator - - -# Sphinx begin config.environments -Config.environments = environments = { - "staging": { - 'engine.autoreload.on': False, - 'checker.on': False, - 'tools.log_headers.on': False, - 'request.show_tracebacks': False, - 'request.show_mismatched_params': False, - }, - "production": { - 'engine.autoreload.on': False, - 'checker.on': False, - 'tools.log_headers.on': False, - 'request.show_tracebacks': False, - 'request.show_mismatched_params': False, - 'log.screen': False, - }, - "embedded": { - # For use with CherryPy embedded in another deployment stack. - 'engine.autoreload.on': False, - 'checker.on': False, - 'tools.log_headers.on': False, - 'request.show_tracebacks': False, - 'request.show_mismatched_params': False, - 'log.screen': False, - 'engine.SIGHUP': None, - 'engine.SIGTERM': None, - }, - "test_suite": { - 'engine.autoreload.on': False, - 'checker.on': False, - 'tools.log_headers.on': False, - 'request.show_tracebacks': True, - 'request.show_mismatched_params': True, - 'log.screen': False, - }, -} -# Sphinx end config.environments - - -def _server_namespace_handler(k, v): - """Config handler for the "server" namespace.""" - atoms = k.split(".", 1) - if len(atoms) > 1: - # Special-case config keys of the form 'server.servername.socket_port' - # to configure additional HTTP servers. - if not hasattr(cherrypy, "servers"): - cherrypy.servers = {} - - servername, k = atoms - if servername not in cherrypy.servers: - from cherrypy import _cpserver - cherrypy.servers[servername] = _cpserver.Server() - # On by default, but 'on = False' can unsubscribe it (see below). - cherrypy.servers[servername].subscribe() - - if k == 'on': - if v: - cherrypy.servers[servername].subscribe() - else: - cherrypy.servers[servername].unsubscribe() - else: - setattr(cherrypy.servers[servername], k, v) - else: - setattr(cherrypy.server, k, v) -Config.namespaces["server"] = _server_namespace_handler - - -def _engine_namespace_handler(k, v): - """Backward compatibility handler for the "engine" namespace.""" - engine = cherrypy.engine - - deprecated = { - 'autoreload_on': 'autoreload.on', - 'autoreload_frequency': 'autoreload.frequency', - 'autoreload_match': 'autoreload.match', - 'reload_files': 'autoreload.files', - 'deadlock_poll_freq': 'timeout_monitor.frequency' - } - - if k in deprecated: - engine.log( - 'WARNING: Use of engine.%s is deprecated and will be removed in a ' - 'future version. Use engine.%s instead.' % (k, deprecated[k])) - - if k == 'autoreload_on': - if v: - engine.autoreload.subscribe() - else: - engine.autoreload.unsubscribe() - elif k == 'autoreload_frequency': - engine.autoreload.frequency = v - elif k == 'autoreload_match': - engine.autoreload.match = v - elif k == 'reload_files': - engine.autoreload.files = set(v) - elif k == 'deadlock_poll_freq': - engine.timeout_monitor.frequency = v - elif k == 'SIGHUP': - engine.listeners['SIGHUP'] = set([v]) - elif k == 'SIGTERM': - engine.listeners['SIGTERM'] = set([v]) - elif "." in k: - plugin, attrname = k.split(".", 1) - plugin = getattr(engine, plugin) - if attrname == 'on': - if v and hasattr(getattr(plugin, 'subscribe', None), '__call__'): - plugin.subscribe() - return - elif ( - (not v) and - hasattr(getattr(plugin, 'unsubscribe', None), '__call__') - ): - plugin.unsubscribe() - return - setattr(plugin, attrname, v) - else: - setattr(engine, k, v) -Config.namespaces["engine"] = _engine_namespace_handler - - -def _tree_namespace_handler(k, v): - """Namespace handler for the 'tree' config namespace.""" - if isinstance(v, dict): - for script_name, app in v.items(): - cherrypy.tree.graft(app, script_name) - cherrypy.engine.log("Mounted: %s on %s" % - (app, script_name or "/")) - else: - cherrypy.tree.graft(v, v.script_name) - cherrypy.engine.log("Mounted: %s on %s" % (v, v.script_name or "/")) -Config.namespaces["tree"] = _tree_namespace_handler diff --git a/libs_crutch/contrib/cherrypy/_cpdispatch.py b/libs_crutch/contrib/cherrypy/_cpdispatch.py deleted file mode 100644 index 1c2d7df..0000000 --- a/libs_crutch/contrib/cherrypy/_cpdispatch.py +++ /dev/null @@ -1,686 +0,0 @@ -"""CherryPy dispatchers. - -A 'dispatcher' is the object which looks up the 'page handler' callable -and collects config for the current request based on the path_info, other -request attributes, and the application architecture. The core calls the -dispatcher as early as possible, passing it a 'path_info' argument. - -The default dispatcher discovers the page handler by matching path_info -to a hierarchical arrangement of objects, starting at request.app.root. -""" - -import string -import sys -import types -try: - classtype = (type, types.ClassType) -except AttributeError: - classtype = type - -import cherrypy -from cherrypy._cpcompat import set - - -class PageHandler(object): - - """Callable which sets response.body.""" - - def __init__(self, callable, *args, **kwargs): - self.callable = callable - self.args = args - self.kwargs = kwargs - - def get_args(self): - return cherrypy.serving.request.args - - def set_args(self, args): - cherrypy.serving.request.args = args - return cherrypy.serving.request.args - - args = property( - get_args, - set_args, - doc="The ordered args should be accessible from post dispatch hooks" - ) - - def get_kwargs(self): - return cherrypy.serving.request.kwargs - - def set_kwargs(self, kwargs): - cherrypy.serving.request.kwargs = kwargs - return cherrypy.serving.request.kwargs - - kwargs = property( - get_kwargs, - set_kwargs, - doc="The named kwargs should be accessible from post dispatch hooks" - ) - - def __call__(self): - try: - return self.callable(*self.args, **self.kwargs) - except TypeError: - x = sys.exc_info()[1] - try: - test_callable_spec(self.callable, self.args, self.kwargs) - except cherrypy.HTTPError: - raise sys.exc_info()[1] - except: - raise x - raise - - -def test_callable_spec(callable, callable_args, callable_kwargs): - """ - Inspect callable and test to see if the given args are suitable for it. - - When an error occurs during the handler's invoking stage there are 2 - erroneous cases: - 1. Too many parameters passed to a function which doesn't define - one of *args or **kwargs. - 2. Too little parameters are passed to the function. - - There are 3 sources of parameters to a cherrypy handler. - 1. query string parameters are passed as keyword parameters to the - handler. - 2. body parameters are also passed as keyword parameters. - 3. when partial matching occurs, the final path atoms are passed as - positional args. - Both the query string and path atoms are part of the URI. If they are - incorrect, then a 404 Not Found should be raised. Conversely the body - parameters are part of the request; if they are invalid a 400 Bad Request. - """ - show_mismatched_params = getattr( - cherrypy.serving.request, 'show_mismatched_params', False) - try: - (args, varargs, varkw, defaults) = getargspec(callable) - except TypeError: - if isinstance(callable, object) and hasattr(callable, '__call__'): - (args, varargs, varkw, - defaults) = getargspec(callable.__call__) - else: - # If it wasn't one of our own types, re-raise - # the original error - raise - - if args and args[0] == 'self': - args = args[1:] - - arg_usage = dict([(arg, 0,) for arg in args]) - vararg_usage = 0 - varkw_usage = 0 - extra_kwargs = set() - - for i, value in enumerate(callable_args): - try: - arg_usage[args[i]] += 1 - except IndexError: - vararg_usage += 1 - - for key in callable_kwargs.keys(): - try: - arg_usage[key] += 1 - except KeyError: - varkw_usage += 1 - extra_kwargs.add(key) - - # figure out which args have defaults. - args_with_defaults = args[-len(defaults or []):] - for i, val in enumerate(defaults or []): - # Defaults take effect only when the arg hasn't been used yet. - if arg_usage[args_with_defaults[i]] == 0: - arg_usage[args_with_defaults[i]] += 1 - - missing_args = [] - multiple_args = [] - for key, usage in arg_usage.items(): - if usage == 0: - missing_args.append(key) - elif usage > 1: - multiple_args.append(key) - - if missing_args: - # In the case where the method allows body arguments - # there are 3 potential errors: - # 1. not enough query string parameters -> 404 - # 2. not enough body parameters -> 400 - # 3. not enough path parts (partial matches) -> 404 - # - # We can't actually tell which case it is, - # so I'm raising a 404 because that covers 2/3 of the - # possibilities - # - # In the case where the method does not allow body - # arguments it's definitely a 404. - message = None - if show_mismatched_params: - message = "Missing parameters: %s" % ",".join(missing_args) - raise cherrypy.HTTPError(404, message=message) - - # the extra positional arguments come from the path - 404 Not Found - if not varargs and vararg_usage > 0: - raise cherrypy.HTTPError(404) - - body_params = cherrypy.serving.request.body.params or {} - body_params = set(body_params.keys()) - qs_params = set(callable_kwargs.keys()) - body_params - - if multiple_args: - if qs_params.intersection(set(multiple_args)): - # If any of the multiple parameters came from the query string then - # it's a 404 Not Found - error = 404 - else: - # Otherwise it's a 400 Bad Request - error = 400 - - message = None - if show_mismatched_params: - message = "Multiple values for parameters: "\ - "%s" % ",".join(multiple_args) - raise cherrypy.HTTPError(error, message=message) - - if not varkw and varkw_usage > 0: - - # If there were extra query string parameters, it's a 404 Not Found - extra_qs_params = set(qs_params).intersection(extra_kwargs) - if extra_qs_params: - message = None - if show_mismatched_params: - message = "Unexpected query string "\ - "parameters: %s" % ", ".join(extra_qs_params) - raise cherrypy.HTTPError(404, message=message) - - # If there were any extra body parameters, it's a 400 Not Found - extra_body_params = set(body_params).intersection(extra_kwargs) - if extra_body_params: - message = None - if show_mismatched_params: - message = "Unexpected body parameters: "\ - "%s" % ", ".join(extra_body_params) - raise cherrypy.HTTPError(400, message=message) - - -try: - import inspect -except ImportError: - test_callable_spec = lambda callable, args, kwargs: None -else: - getargspec = inspect.getargspec - # Python 3 requires using getfullargspec if keyword-only arguments are present - if hasattr(inspect, 'getfullargspec'): - def getargspec(callable): - return inspect.getfullargspec(callable)[:4] - - -class LateParamPageHandler(PageHandler): - - """When passing cherrypy.request.params to the page handler, we do not - want to capture that dict too early; we want to give tools like the - decoding tool a chance to modify the params dict in-between the lookup - of the handler and the actual calling of the handler. This subclass - takes that into account, and allows request.params to be 'bound late' - (it's more complicated than that, but that's the effect). - """ - - def _get_kwargs(self): - kwargs = cherrypy.serving.request.params.copy() - if self._kwargs: - kwargs.update(self._kwargs) - return kwargs - - def _set_kwargs(self, kwargs): - cherrypy.serving.request.kwargs = kwargs - self._kwargs = kwargs - - kwargs = property(_get_kwargs, _set_kwargs, - doc='page handler kwargs (with ' - 'cherrypy.request.params copied in)') - - -if sys.version_info < (3, 0): - punctuation_to_underscores = string.maketrans( - string.punctuation, '_' * len(string.punctuation)) - - def validate_translator(t): - if not isinstance(t, str) or len(t) != 256: - raise ValueError( - "The translate argument must be a str of len 256.") -else: - punctuation_to_underscores = str.maketrans( - string.punctuation, '_' * len(string.punctuation)) - - def validate_translator(t): - if not isinstance(t, dict): - raise ValueError("The translate argument must be a dict.") - - -class Dispatcher(object): - - """CherryPy Dispatcher which walks a tree of objects to find a handler. - - The tree is rooted at cherrypy.request.app.root, and each hierarchical - component in the path_info argument is matched to a corresponding nested - attribute of the root object. Matching handlers must have an 'exposed' - attribute which evaluates to True. The special method name "index" - matches a URI which ends in a slash ("/"). The special method name - "default" may match a portion of the path_info (but only when no longer - substring of the path_info matches some other object). - - This is the default, built-in dispatcher for CherryPy. - """ - - dispatch_method_name = '_cp_dispatch' - """ - The name of the dispatch method that nodes may optionally implement - to provide their own dynamic dispatch algorithm. - """ - - def __init__(self, dispatch_method_name=None, - translate=punctuation_to_underscores): - validate_translator(translate) - self.translate = translate - if dispatch_method_name: - self.dispatch_method_name = dispatch_method_name - - def __call__(self, path_info): - """Set handler and config for the current request.""" - request = cherrypy.serving.request - func, vpath = self.find_handler(path_info) - - if func: - # Decode any leftover %2F in the virtual_path atoms. - vpath = [x.replace("%2F", "/") for x in vpath] - request.handler = LateParamPageHandler(func, *vpath) - else: - request.handler = cherrypy.NotFound() - - def find_handler(self, path): - """Return the appropriate page handler, plus any virtual path. - - This will return two objects. The first will be a callable, - which can be used to generate page output. Any parameters from - the query string or request body will be sent to that callable - as keyword arguments. - - The callable is found by traversing the application's tree, - starting from cherrypy.request.app.root, and matching path - components to successive objects in the tree. For example, the - URL "/path/to/handler" might return root.path.to.handler. - - The second object returned will be a list of names which are - 'virtual path' components: parts of the URL which are dynamic, - and were not used when looking up the handler. - These virtual path components are passed to the handler as - positional arguments. - """ - request = cherrypy.serving.request - app = request.app - root = app.root - dispatch_name = self.dispatch_method_name - - # Get config for the root object/path. - fullpath = [x for x in path.strip('/').split('/') if x] + ['index'] - fullpath_len = len(fullpath) - segleft = fullpath_len - nodeconf = {} - if hasattr(root, "_cp_config"): - nodeconf.update(root._cp_config) - if "/" in app.config: - nodeconf.update(app.config["/"]) - object_trail = [['root', root, nodeconf, segleft]] - - node = root - iternames = fullpath[:] - while iternames: - name = iternames[0] - # map to legal Python identifiers (e.g. replace '.' with '_') - objname = name.translate(self.translate) - - nodeconf = {} - subnode = getattr(node, objname, None) - pre_len = len(iternames) - if subnode is None: - dispatch = getattr(node, dispatch_name, None) - if dispatch and hasattr(dispatch, '__call__') and not \ - getattr(dispatch, 'exposed', False) and \ - pre_len > 1: - # Don't expose the hidden 'index' token to _cp_dispatch - # We skip this if pre_len == 1 since it makes no sense - # to call a dispatcher when we have no tokens left. - index_name = iternames.pop() - subnode = dispatch(vpath=iternames) - iternames.append(index_name) - else: - # We didn't find a path, but keep processing in case there - # is a default() handler. - iternames.pop(0) - else: - # We found the path, remove the vpath entry - iternames.pop(0) - segleft = len(iternames) - if segleft > pre_len: - # No path segment was removed. Raise an error. - raise cherrypy.CherryPyException( - "A vpath segment was added. Custom dispatchers may only " - + "remove elements. While trying to process " - + "{0} in {1}".format(name, fullpath) - ) - elif segleft == pre_len: - # Assume that the handler used the current path segment, but - # did not pop it. This allows things like - # return getattr(self, vpath[0], None) - iternames.pop(0) - segleft -= 1 - node = subnode - - if node is not None: - # Get _cp_config attached to this node. - if hasattr(node, "_cp_config"): - nodeconf.update(node._cp_config) - - # Mix in values from app.config for this path. - existing_len = fullpath_len - pre_len - if existing_len != 0: - curpath = '/' + '/'.join(fullpath[0:existing_len]) - else: - curpath = '' - new_segs = fullpath[fullpath_len - pre_len:fullpath_len - segleft] - for seg in new_segs: - curpath += '/' + seg - if curpath in app.config: - nodeconf.update(app.config[curpath]) - - object_trail.append([name, node, nodeconf, segleft]) - - def set_conf(): - """Collapse all object_trail config into cherrypy.request.config. - """ - base = cherrypy.config.copy() - # Note that we merge the config from each node - # even if that node was None. - for name, obj, conf, segleft in object_trail: - base.update(conf) - if 'tools.staticdir.dir' in conf: - base['tools.staticdir.section'] = '/' + \ - '/'.join(fullpath[0:fullpath_len - segleft]) - return base - - # Try successive objects (reverse order) - num_candidates = len(object_trail) - 1 - for i in range(num_candidates, -1, -1): - - name, candidate, nodeconf, segleft = object_trail[i] - if candidate is None: - continue - - # Try a "default" method on the current leaf. - if hasattr(candidate, "default"): - defhandler = candidate.default - if getattr(defhandler, 'exposed', False): - # Insert any extra _cp_config from the default handler. - conf = getattr(defhandler, "_cp_config", {}) - object_trail.insert( - i + 1, ["default", defhandler, conf, segleft]) - request.config = set_conf() - # See https://bitbucket.org/cherrypy/cherrypy/issue/613 - request.is_index = path.endswith("/") - return defhandler, fullpath[fullpath_len - segleft:-1] - - # Uncomment the next line to restrict positional params to - # "default". - # if i < num_candidates - 2: continue - - # Try the current leaf. - if getattr(candidate, 'exposed', False): - request.config = set_conf() - if i == num_candidates: - # We found the extra ".index". Mark request so tools - # can redirect if path_info has no trailing slash. - request.is_index = True - else: - # We're not at an 'index' handler. Mark request so tools - # can redirect if path_info has NO trailing slash. - # Note that this also includes handlers which take - # positional parameters (virtual paths). - request.is_index = False - return candidate, fullpath[fullpath_len - segleft:-1] - - # We didn't find anything - request.config = set_conf() - return None, [] - - -class MethodDispatcher(Dispatcher): - - """Additional dispatch based on cherrypy.request.method.upper(). - - Methods named GET, POST, etc will be called on an exposed class. - The method names must be all caps; the appropriate Allow header - will be output showing all capitalized method names as allowable - HTTP verbs. - - Note that the containing class must be exposed, not the methods. - """ - - def __call__(self, path_info): - """Set handler and config for the current request.""" - request = cherrypy.serving.request - resource, vpath = self.find_handler(path_info) - - if resource: - # Set Allow header - avail = [m for m in dir(resource) if m.isupper()] - if "GET" in avail and "HEAD" not in avail: - avail.append("HEAD") - avail.sort() - cherrypy.serving.response.headers['Allow'] = ", ".join(avail) - - # Find the subhandler - meth = request.method.upper() - func = getattr(resource, meth, None) - if func is None and meth == "HEAD": - func = getattr(resource, "GET", None) - if func: - # Grab any _cp_config on the subhandler. - if hasattr(func, "_cp_config"): - request.config.update(func._cp_config) - - # Decode any leftover %2F in the virtual_path atoms. - vpath = [x.replace("%2F", "/") for x in vpath] - request.handler = LateParamPageHandler(func, *vpath) - else: - request.handler = cherrypy.HTTPError(405) - else: - request.handler = cherrypy.NotFound() - - -class RoutesDispatcher(object): - - """A Routes based dispatcher for CherryPy.""" - - def __init__(self, full_result=False, **mapper_options): - """ - Routes dispatcher - - Set full_result to True if you wish the controller - and the action to be passed on to the page handler - parameters. By default they won't be. - """ - import routes - self.full_result = full_result - self.controllers = {} - self.mapper = routes.Mapper(**mapper_options) - self.mapper.controller_scan = self.controllers.keys - - def connect(self, name, route, controller, **kwargs): - self.controllers[name] = controller - self.mapper.connect(name, route, controller=name, **kwargs) - - def redirect(self, url): - raise cherrypy.HTTPRedirect(url) - - def __call__(self, path_info): - """Set handler and config for the current request.""" - func = self.find_handler(path_info) - if func: - cherrypy.serving.request.handler = LateParamPageHandler(func) - else: - cherrypy.serving.request.handler = cherrypy.NotFound() - - def find_handler(self, path_info): - """Find the right page handler, and set request.config.""" - import routes - - request = cherrypy.serving.request - - config = routes.request_config() - config.mapper = self.mapper - if hasattr(request, 'wsgi_environ'): - config.environ = request.wsgi_environ - config.host = request.headers.get('Host', None) - config.protocol = request.scheme - config.redirect = self.redirect - - result = self.mapper.match(path_info) - - config.mapper_dict = result - params = {} - if result: - params = result.copy() - if not self.full_result: - params.pop('controller', None) - params.pop('action', None) - request.params.update(params) - - # Get config for the root object/path. - request.config = base = cherrypy.config.copy() - curpath = "" - - def merge(nodeconf): - if 'tools.staticdir.dir' in nodeconf: - nodeconf['tools.staticdir.section'] = curpath or "/" - base.update(nodeconf) - - app = request.app - root = app.root - if hasattr(root, "_cp_config"): - merge(root._cp_config) - if "/" in app.config: - merge(app.config["/"]) - - # Mix in values from app.config. - atoms = [x for x in path_info.split("/") if x] - if atoms: - last = atoms.pop() - else: - last = None - for atom in atoms: - curpath = "/".join((curpath, atom)) - if curpath in app.config: - merge(app.config[curpath]) - - handler = None - if result: - controller = result.get('controller') - controller = self.controllers.get(controller, controller) - if controller: - if isinstance(controller, classtype): - controller = controller() - # Get config from the controller. - if hasattr(controller, "_cp_config"): - merge(controller._cp_config) - - action = result.get('action') - if action is not None: - handler = getattr(controller, action, None) - # Get config from the handler - if hasattr(handler, "_cp_config"): - merge(handler._cp_config) - else: - handler = controller - - # Do the last path atom here so it can - # override the controller's _cp_config. - if last: - curpath = "/".join((curpath, last)) - if curpath in app.config: - merge(app.config[curpath]) - - return handler - - -def XMLRPCDispatcher(next_dispatcher=Dispatcher()): - from cherrypy.lib import xmlrpcutil - - def xmlrpc_dispatch(path_info): - path_info = xmlrpcutil.patched_path(path_info) - return next_dispatcher(path_info) - return xmlrpc_dispatch - - -def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True, - **domains): - """ - Select a different handler based on the Host header. - - This can be useful when running multiple sites within one CP server. - It allows several domains to point to different parts of a single - website structure. For example:: - - http://www.domain.example -> root - http://www.domain2.example -> root/domain2/ - http://www.domain2.example:443 -> root/secure - - can be accomplished via the following config:: - - [/] - request.dispatch = cherrypy.dispatch.VirtualHost( - **{'www.domain2.example': '/domain2', - 'www.domain2.example:443': '/secure', - }) - - next_dispatcher - The next dispatcher object in the dispatch chain. - The VirtualHost dispatcher adds a prefix to the URL and calls - another dispatcher. Defaults to cherrypy.dispatch.Dispatcher(). - - use_x_forwarded_host - If True (the default), any "X-Forwarded-Host" - request header will be used instead of the "Host" header. This - is commonly added by HTTP servers (such as Apache) when proxying. - - ``**domains`` - A dict of {host header value: virtual prefix} pairs. - The incoming "Host" request header is looked up in this dict, - and, if a match is found, the corresponding "virtual prefix" - value will be prepended to the URL path before calling the - next dispatcher. Note that you often need separate entries - for "example.com" and "www.example.com". In addition, "Host" - headers may contain the port number. - """ - from cherrypy.lib import httputil - - def vhost_dispatch(path_info): - request = cherrypy.serving.request - header = request.headers.get - - domain = header('Host', '') - if use_x_forwarded_host: - domain = header("X-Forwarded-Host", domain) - - prefix = domains.get(domain, "") - if prefix: - path_info = httputil.urljoin(prefix, path_info) - - result = next_dispatcher(path_info) - - # Touch up staticdir config. See - # https://bitbucket.org/cherrypy/cherrypy/issue/614. - section = request.config.get('tools.staticdir.section') - if section: - section = section[len(prefix):] - request.config['tools.staticdir.section'] = section - - return result - return vhost_dispatch diff --git a/libs_crutch/contrib/cherrypy/_cperror.py b/libs_crutch/contrib/cherrypy/_cperror.py deleted file mode 100644 index 6256595..0000000 --- a/libs_crutch/contrib/cherrypy/_cperror.py +++ /dev/null @@ -1,609 +0,0 @@ -"""Exception classes for CherryPy. - -CherryPy provides (and uses) exceptions for declaring that the HTTP response -should be a status other than the default "200 OK". You can ``raise`` them like -normal Python exceptions. You can also call them and they will raise -themselves; this means you can set an -:class:`HTTPError` -or :class:`HTTPRedirect` as the -:attr:`request.handler`. - -.. _redirectingpost: - -Redirecting POST -================ - -When you GET a resource and are redirected by the server to another Location, -there's generally no problem since GET is both a "safe method" (there should -be no side-effects) and an "idempotent method" (multiple calls are no different -than a single call). - -POST, however, is neither safe nor idempotent--if you -charge a credit card, you don't want to be charged twice by a redirect! - -For this reason, *none* of the 3xx responses permit a user-agent (browser) to -resubmit a POST on redirection without first confirming the action with the -user: - -===== ================================= =========== -300 Multiple Choices Confirm with the user -301 Moved Permanently Confirm with the user -302 Found (Object moved temporarily) Confirm with the user -303 See Other GET the new URI--no confirmation -304 Not modified (for conditional GET only--POST should not raise this error) -305 Use Proxy Confirm with the user -307 Temporary Redirect Confirm with the user -===== ================================= =========== - -However, browsers have historically implemented these restrictions poorly; -in particular, many browsers do not force the user to confirm 301, 302 -or 307 when redirecting POST. For this reason, CherryPy defaults to 303, -which most user-agents appear to have implemented correctly. Therefore, if -you raise HTTPRedirect for a POST request, the user-agent will most likely -attempt to GET the new URI (without asking for confirmation from the user). -We realize this is confusing for developers, but it's the safest thing we -could do. You are of course free to raise ``HTTPRedirect(uri, status=302)`` -or any other 3xx status if you know what you're doing, but given the -environment, we couldn't let any of those be the default. - -Custom Error Handling -===================== - -.. image:: /refman/cperrors.gif - -Anticipated HTTP responses --------------------------- - -The 'error_page' config namespace can be used to provide custom HTML output for -expected responses (like 404 Not Found). Supply a filename from which the -output will be read. The contents will be interpolated with the values -%(status)s, %(message)s, %(traceback)s, and %(version)s using plain old Python -`string formatting `_. - -:: - - _cp_config = { - 'error_page.404': os.path.join(localDir, "static/index.html") - } - - -Beginning in version 3.1, you may also provide a function or other callable as -an error_page entry. It will be passed the same status, message, traceback and -version arguments that are interpolated into templates:: - - def error_page_402(status, message, traceback, version): - return "Error %s - Well, I'm very sorry but you haven't paid!" % status - cherrypy.config.update({'error_page.402': error_page_402}) - -Also in 3.1, in addition to the numbered error codes, you may also supply -"error_page.default" to handle all codes which do not have their own error_page -entry. - - - -Unanticipated errors --------------------- - -CherryPy also has a generic error handling mechanism: whenever an unanticipated -error occurs in your code, it will call -:func:`Request.error_response` to -set the response status, headers, and body. By default, this is the same -output as -:class:`HTTPError(500) `. If you want to provide -some other behavior, you generally replace "request.error_response". - -Here is some sample code that shows how to display a custom error message and -send an e-mail containing the error:: - - from cherrypy import _cperror - - def handle_error(): - cherrypy.response.status = 500 - cherrypy.response.body = [ - "Sorry, an error occured" - ] - sendMail('error@domain.com', - 'Error in your web app', - _cperror.format_exc()) - - class Root: - _cp_config = {'request.error_response': handle_error} - - -Note that you have to explicitly set -:attr:`response.body ` -and not simply return an error message as a result. -""" - -from cgi import escape as _escape -from sys import exc_info as _exc_info -from traceback import format_exception as _format_exception -from cherrypy._cpcompat import basestring, bytestr, iteritems, ntob -from cherrypy._cpcompat import tonative, urljoin as _urljoin -from cherrypy.lib import httputil as _httputil - - -class CherryPyException(Exception): - - """A base class for CherryPy exceptions.""" - pass - - -class TimeoutError(CherryPyException): - - """Exception raised when Response.timed_out is detected.""" - pass - - -class InternalRedirect(CherryPyException): - - """Exception raised to switch to the handler for a different URL. - - This exception will redirect processing to another path within the site - (without informing the client). Provide the new path as an argument when - raising the exception. Provide any params in the querystring for the new - URL. - """ - - def __init__(self, path, query_string=""): - import cherrypy - self.request = cherrypy.serving.request - - self.query_string = query_string - if "?" in path: - # Separate any params included in the path - path, self.query_string = path.split("?", 1) - - # Note that urljoin will "do the right thing" whether url is: - # 1. a URL relative to root (e.g. "/dummy") - # 2. a URL relative to the current path - # Note that any query string will be discarded. - path = _urljoin(self.request.path_info, path) - - # Set a 'path' member attribute so that code which traps this - # error can have access to it. - self.path = path - - CherryPyException.__init__(self, path, self.query_string) - - -class HTTPRedirect(CherryPyException): - - """Exception raised when the request should be redirected. - - This exception will force a HTTP redirect to the URL or URL's you give it. - The new URL must be passed as the first argument to the Exception, - e.g., HTTPRedirect(newUrl). Multiple URLs are allowed in a list. - If a URL is absolute, it will be used as-is. If it is relative, it is - assumed to be relative to the current cherrypy.request.path_info. - - If one of the provided URL is a unicode object, it will be encoded - using the default encoding or the one passed in parameter. - - There are multiple types of redirect, from which you can select via the - ``status`` argument. If you do not provide a ``status`` arg, it defaults to - 303 (or 302 if responding with HTTP/1.0). - - Examples:: - - raise cherrypy.HTTPRedirect("") - raise cherrypy.HTTPRedirect("/abs/path", 307) - raise cherrypy.HTTPRedirect(["path1", "path2?a=1&b=2"], 301) - - See :ref:`redirectingpost` for additional caveats. - """ - - status = None - """The integer HTTP status code to emit.""" - - urls = None - """The list of URL's to emit.""" - - encoding = 'utf-8' - """The encoding when passed urls are not native strings""" - - def __init__(self, urls, status=None, encoding=None): - import cherrypy - request = cherrypy.serving.request - - if isinstance(urls, basestring): - urls = [urls] - - abs_urls = [] - for url in urls: - url = tonative(url, encoding or self.encoding) - - # Note that urljoin will "do the right thing" whether url is: - # 1. a complete URL with host (e.g. "http://www.example.com/test") - # 2. a URL relative to root (e.g. "/dummy") - # 3. a URL relative to the current path - # Note that any query string in cherrypy.request is discarded. - url = _urljoin(cherrypy.url(), url) - abs_urls.append(url) - self.urls = abs_urls - - # RFC 2616 indicates a 301 response code fits our goal; however, - # browser support for 301 is quite messy. Do 302/303 instead. See - # http://www.alanflavell.org.uk/www/post-redirect.html - if status is None: - if request.protocol >= (1, 1): - status = 303 - else: - status = 302 - else: - status = int(status) - if status < 300 or status > 399: - raise ValueError("status must be between 300 and 399.") - - self.status = status - CherryPyException.__init__(self, abs_urls, status) - - def set_response(self): - """Modify cherrypy.response status, headers, and body to represent - self. - - CherryPy uses this internally, but you can also use it to create an - HTTPRedirect object and set its output without *raising* the exception. - """ - import cherrypy - response = cherrypy.serving.response - response.status = status = self.status - - if status in (300, 301, 302, 303, 307): - response.headers['Content-Type'] = "text/html;charset=utf-8" - # "The ... URI SHOULD be given by the Location field - # in the response." - response.headers['Location'] = self.urls[0] - - # "Unless the request method was HEAD, the entity of the response - # SHOULD contain a short hypertext note with a hyperlink to the - # new URI(s)." - msg = { - 300: "This resource can be found at ", - 301: "This resource has permanently moved to ", - 302: "This resource resides temporarily at ", - 303: "This resource can be found at ", - 307: "This resource has moved temporarily to ", - }[status] - msg += '%s.' - from xml.sax import saxutils - msgs = [msg % (saxutils.quoteattr(u), u) for u in self.urls] - response.body = ntob("
\n".join(msgs), 'utf-8') - # Previous code may have set C-L, so we have to reset it - # (allow finalize to set it). - response.headers.pop('Content-Length', None) - elif status == 304: - # Not Modified. - # "The response MUST include the following header fields: - # Date, unless its omission is required by section 14.18.1" - # The "Date" header should have been set in Response.__init__ - - # "...the response SHOULD NOT include other entity-headers." - for key in ('Allow', 'Content-Encoding', 'Content-Language', - 'Content-Length', 'Content-Location', 'Content-MD5', - 'Content-Range', 'Content-Type', 'Expires', - 'Last-Modified'): - if key in response.headers: - del response.headers[key] - - # "The 304 response MUST NOT contain a message-body." - response.body = None - # Previous code may have set C-L, so we have to reset it. - response.headers.pop('Content-Length', None) - elif status == 305: - # Use Proxy. - # self.urls[0] should be the URI of the proxy. - response.headers['Location'] = self.urls[0] - response.body = None - # Previous code may have set C-L, so we have to reset it. - response.headers.pop('Content-Length', None) - else: - raise ValueError("The %s status code is unknown." % status) - - def __call__(self): - """Use this exception as a request.handler (raise self).""" - raise self - - -def clean_headers(status): - """Remove any headers which should not apply to an error response.""" - import cherrypy - - response = cherrypy.serving.response - - # Remove headers which applied to the original content, - # but do not apply to the error page. - respheaders = response.headers - for key in ["Accept-Ranges", "Age", "ETag", "Location", "Retry-After", - "Vary", "Content-Encoding", "Content-Length", "Expires", - "Content-Location", "Content-MD5", "Last-Modified"]: - if key in respheaders: - del respheaders[key] - - if status != 416: - # A server sending a response with status code 416 (Requested - # range not satisfiable) SHOULD include a Content-Range field - # with a byte-range-resp-spec of "*". The instance-length - # specifies the current length of the selected resource. - # A response with status code 206 (Partial Content) MUST NOT - # include a Content-Range field with a byte-range- resp-spec of "*". - if "Content-Range" in respheaders: - del respheaders["Content-Range"] - - -class HTTPError(CherryPyException): - - """Exception used to return an HTTP error code (4xx-5xx) to the client. - - This exception can be used to automatically send a response using a - http status code, with an appropriate error page. It takes an optional - ``status`` argument (which must be between 400 and 599); it defaults to 500 - ("Internal Server Error"). It also takes an optional ``message`` argument, - which will be returned in the response body. See - `RFC2616 `_ - for a complete list of available error codes and when to use them. - - Examples:: - - raise cherrypy.HTTPError(403) - raise cherrypy.HTTPError( - "403 Forbidden", "You are not allowed to access this resource.") - """ - - status = None - """The HTTP status code. May be of type int or str (with a Reason-Phrase). - """ - - code = None - """The integer HTTP status code.""" - - reason = None - """The HTTP Reason-Phrase string.""" - - def __init__(self, status=500, message=None): - self.status = status - try: - self.code, self.reason, defaultmsg = _httputil.valid_status(status) - except ValueError: - raise self.__class__(500, _exc_info()[1].args[0]) - - if self.code < 400 or self.code > 599: - raise ValueError("status must be between 400 and 599.") - - # See http://www.python.org/dev/peps/pep-0352/ - # self.message = message - self._message = message or defaultmsg - CherryPyException.__init__(self, status, message) - - def set_response(self): - """Modify cherrypy.response status, headers, and body to represent - self. - - CherryPy uses this internally, but you can also use it to create an - HTTPError object and set its output without *raising* the exception. - """ - import cherrypy - - response = cherrypy.serving.response - - clean_headers(self.code) - - # In all cases, finalize will be called after this method, - # so don't bother cleaning up response values here. - response.status = self.status - tb = None - if cherrypy.serving.request.show_tracebacks: - tb = format_exc() - - response.headers.pop('Content-Length', None) - - content = self.get_error_page(self.status, traceback=tb, - message=self._message) - response.body = content - - _be_ie_unfriendly(self.code) - - def get_error_page(self, *args, **kwargs): - return get_error_page(*args, **kwargs) - - def __call__(self): - """Use this exception as a request.handler (raise self).""" - raise self - - -class NotFound(HTTPError): - - """Exception raised when a URL could not be mapped to any handler (404). - - This is equivalent to raising - :class:`HTTPError("404 Not Found") `. - """ - - def __init__(self, path=None): - if path is None: - import cherrypy - request = cherrypy.serving.request - path = request.script_name + request.path_info - self.args = (path,) - HTTPError.__init__(self, 404, "The path '%s' was not found." % path) - - -_HTTPErrorTemplate = ''' - - - - %(status)s - - - -

%(status)s

-

%(message)s

-
%(traceback)s
-
- - Powered by CherryPy %(version)s - -
- - -''' - - -def get_error_page(status, **kwargs): - """Return an HTML page, containing a pretty error response. - - status should be an int or a str. - kwargs will be interpolated into the page template. - """ - import cherrypy - - try: - code, reason, message = _httputil.valid_status(status) - except ValueError: - raise cherrypy.HTTPError(500, _exc_info()[1].args[0]) - - # We can't use setdefault here, because some - # callers send None for kwarg values. - if kwargs.get('status') is None: - kwargs['status'] = "%s %s" % (code, reason) - if kwargs.get('message') is None: - kwargs['message'] = message - if kwargs.get('traceback') is None: - kwargs['traceback'] = '' - if kwargs.get('version') is None: - kwargs['version'] = cherrypy.__version__ - - for k, v in iteritems(kwargs): - if v is None: - kwargs[k] = "" - else: - kwargs[k] = _escape(kwargs[k]) - - # Use a custom template or callable for the error page? - pages = cherrypy.serving.request.error_page - error_page = pages.get(code) or pages.get('default') - - # Default template, can be overridden below. - template = _HTTPErrorTemplate - if error_page: - try: - if hasattr(error_page, '__call__'): - # The caller function may be setting headers manually, - # so we delegate to it completely. We may be returning - # an iterator as well as a string here. - # - # We *must* make sure any content is not unicode. - result = error_page(**kwargs) - if cherrypy.lib.is_iterator(result): - from cherrypy.lib.encoding import UTF8StreamEncoder - return UTF8StreamEncoder(result) - elif isinstance(result, cherrypy._cpcompat.unicodestr): - return result.encode('utf-8') - else: - if not isinstance(result, cherrypy._cpcompat.bytestr): - raise ValueError('error page function did not ' - 'return a bytestring, unicodestring or an ' - 'iterator - returned object of type %s.' - % (type(result).__name__)) - return result - else: - # Load the template from this path. - template = tonative(open(error_page, 'rb').read()) - except: - e = _format_exception(*_exc_info())[-1] - m = kwargs['message'] - if m: - m += "
" - m += "In addition, the custom error page failed:\n
%s" % e - kwargs['message'] = m - - response = cherrypy.serving.response - response.headers['Content-Type'] = "text/html;charset=utf-8" - result = template % kwargs - return result.encode('utf-8') - - - -_ie_friendly_error_sizes = { - 400: 512, 403: 256, 404: 512, 405: 256, - 406: 512, 408: 512, 409: 512, 410: 256, - 500: 512, 501: 512, 505: 512, -} - - -def _be_ie_unfriendly(status): - import cherrypy - response = cherrypy.serving.response - - # For some statuses, Internet Explorer 5+ shows "friendly error - # messages" instead of our response.body if the body is smaller - # than a given size. Fix this by returning a body over that size - # (by adding whitespace). - # See http://support.microsoft.com/kb/q218155/ - s = _ie_friendly_error_sizes.get(status, 0) - if s: - s += 1 - # Since we are issuing an HTTP error status, we assume that - # the entity is short, and we should just collapse it. - content = response.collapse_body() - l = len(content) - if l and l < s: - # IN ADDITION: the response must be written to IE - # in one chunk or it will still get replaced! Bah. - content = content + (ntob(" ") * (s - l)) - response.body = content - response.headers['Content-Length'] = str(len(content)) - - -def format_exc(exc=None): - """Return exc (or sys.exc_info if None), formatted.""" - try: - if exc is None: - exc = _exc_info() - if exc == (None, None, None): - return "" - import traceback - return "".join(traceback.format_exception(*exc)) - finally: - del exc - - -def bare_error(extrabody=None): - """Produce status, headers, body for a critical error. - - Returns a triple without calling any other questionable functions, - so it should be as error-free as possible. Call it from an HTTP server - if you get errors outside of the request. - - If extrabody is None, a friendly but rather unhelpful error message - is set in the body. If extrabody is a string, it will be appended - as-is to the body. - """ - - # The whole point of this function is to be a last line-of-defense - # in handling errors. That is, it must not raise any errors itself; - # it cannot be allowed to fail. Therefore, don't add to it! - # In particular, don't call any other CP functions. - - body = ntob("Unrecoverable error in the server.") - if extrabody is not None: - if not isinstance(extrabody, bytestr): - extrabody = extrabody.encode('utf-8') - body += ntob("\n") + extrabody - - return (ntob("500 Internal Server Error"), - [(ntob('Content-Type'), ntob('text/plain')), - (ntob('Content-Length'), ntob(str(len(body)), 'ISO-8859-1'))], - [body]) diff --git a/libs_crutch/contrib/cherrypy/_cplogging.py b/libs_crutch/contrib/cherrypy/_cplogging.py deleted file mode 100644 index 19d1d91..0000000 --- a/libs_crutch/contrib/cherrypy/_cplogging.py +++ /dev/null @@ -1,461 +0,0 @@ -""" -Simple config -============= - -Although CherryPy uses the :mod:`Python logging module `, it does so -behind the scenes so that simple logging is simple, but complicated logging -is still possible. "Simple" logging means that you can log to the screen -(i.e. console/stdout) or to a file, and that you can easily have separate -error and access log files. - -Here are the simplified logging settings. You use these by adding lines to -your config file or dict. You should set these at either the global level or -per application (see next), but generally not both. - - * ``log.screen``: Set this to True to have both "error" and "access" messages - printed to stdout. - * ``log.access_file``: Set this to an absolute filename where you want - "access" messages written. - * ``log.error_file``: Set this to an absolute filename where you want "error" - messages written. - -Many events are automatically logged; to log your own application events, call -:func:`cherrypy.log`. - -Architecture -============ - -Separate scopes ---------------- - -CherryPy provides log managers at both the global and application layers. -This means you can have one set of logging rules for your entire site, -and another set of rules specific to each application. The global log -manager is found at :func:`cherrypy.log`, and the log manager for each -application is found at :attr:`app.log`. -If you're inside a request, the latter is reachable from -``cherrypy.request.app.log``; if you're outside a request, you'll have to -obtain a reference to the ``app``: either the return value of -:func:`tree.mount()` or, if you used -:func:`quickstart()` instead, via -``cherrypy.tree.apps['/']``. - -By default, the global logs are named "cherrypy.error" and "cherrypy.access", -and the application logs are named "cherrypy.error.2378745" and -"cherrypy.access.2378745" (the number is the id of the Application object). -This means that the application logs "bubble up" to the site logs, so if your -application has no log handlers, the site-level handlers will still log the -messages. - -Errors vs. Access ------------------ - -Each log manager handles both "access" messages (one per HTTP request) and -"error" messages (everything else). Note that the "error" log is not just for -errors! The format of access messages is highly formalized, but the error log -isn't--it receives messages from a variety of sources (including full error -tracebacks, if enabled). - -If you are logging the access log and error log to the same source, then there -is a possibility that a specially crafted error message may replicate an access -log message as described in CWE-117. In this case it is the application -developer's responsibility to manually escape data before using CherryPy's log() -functionality, or they may create an application that is vulnerable to CWE-117. -This would be achieved by using a custom handler escape any special characters, -and attached as described below. - -Custom Handlers -=============== - -The simple settings above work by manipulating Python's standard :mod:`logging` -module. So when you need something more complex, the full power of the standard -module is yours to exploit. You can borrow or create custom handlers, formats, -filters, and much more. Here's an example that skips the standard FileHandler -and uses a RotatingFileHandler instead: - -:: - - #python - log = app.log - - # Remove the default FileHandlers if present. - log.error_file = "" - log.access_file = "" - - maxBytes = getattr(log, "rot_maxBytes", 10000000) - backupCount = getattr(log, "rot_backupCount", 1000) - - # Make a new RotatingFileHandler for the error log. - fname = getattr(log, "rot_error_file", "error.log") - h = handlers.RotatingFileHandler(fname, 'a', maxBytes, backupCount) - h.setLevel(DEBUG) - h.setFormatter(_cplogging.logfmt) - log.error_log.addHandler(h) - - # Make a new RotatingFileHandler for the access log. - fname = getattr(log, "rot_access_file", "access.log") - h = handlers.RotatingFileHandler(fname, 'a', maxBytes, backupCount) - h.setLevel(DEBUG) - h.setFormatter(_cplogging.logfmt) - log.access_log.addHandler(h) - - -The ``rot_*`` attributes are pulled straight from the application log object. -Since "log.*" config entries simply set attributes on the log object, you can -add custom attributes to your heart's content. Note that these handlers are -used ''instead'' of the default, simple handlers outlined above (so don't set -the "log.error_file" config entry, for example). -""" - -import datetime -import logging -# Silence the no-handlers "warning" (stderr write!) in stdlib logging -logging.Logger.manager.emittedNoHandlerWarning = 1 -logfmt = logging.Formatter("%(message)s") -import os -import sys - -import cherrypy -from cherrypy import _cperror -from cherrypy._cpcompat import ntob, py3k - - -class NullHandler(logging.Handler): - - """A no-op logging handler to silence the logging.lastResort handler.""" - - def handle(self, record): - pass - - def emit(self, record): - pass - - def createLock(self): - self.lock = None - - -class LogManager(object): - - """An object to assist both simple and advanced logging. - - ``cherrypy.log`` is an instance of this class. - """ - - appid = None - """The id() of the Application object which owns this log manager. If this - is a global log manager, appid is None.""" - - error_log = None - """The actual :class:`logging.Logger` instance for error messages.""" - - access_log = None - """The actual :class:`logging.Logger` instance for access messages.""" - - if py3k: - access_log_format = \ - '{h} {l} {u} {t} "{r}" {s} {b} "{f}" "{a}"' - else: - access_log_format = \ - '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"' - - logger_root = None - """The "top-level" logger name. - - This string will be used as the first segment in the Logger names. - The default is "cherrypy", for example, in which case the Logger names - will be of the form:: - - cherrypy.error. - cherrypy.access. - """ - - def __init__(self, appid=None, logger_root="cherrypy"): - self.logger_root = logger_root - self.appid = appid - if appid is None: - self.error_log = logging.getLogger("%s.error" % logger_root) - self.access_log = logging.getLogger("%s.access" % logger_root) - else: - self.error_log = logging.getLogger( - "%s.error.%s" % (logger_root, appid)) - self.access_log = logging.getLogger( - "%s.access.%s" % (logger_root, appid)) - self.error_log.setLevel(logging.INFO) - self.access_log.setLevel(logging.INFO) - - # Silence the no-handlers "warning" (stderr write!) in stdlib logging - self.error_log.addHandler(NullHandler()) - self.access_log.addHandler(NullHandler()) - - cherrypy.engine.subscribe('graceful', self.reopen_files) - - def reopen_files(self): - """Close and reopen all file handlers.""" - for log in (self.error_log, self.access_log): - for h in log.handlers: - if isinstance(h, logging.FileHandler): - h.acquire() - h.stream.close() - h.stream = open(h.baseFilename, h.mode) - h.release() - - def error(self, msg='', context='', severity=logging.INFO, - traceback=False): - """Write the given ``msg`` to the error log. - - This is not just for errors! Applications may call this at any time - to log application-specific information. - - If ``traceback`` is True, the traceback of the current exception - (if any) will be appended to ``msg``. - """ - exc_info = None - if traceback: - exc_info = _cperror._exc_info() - - self.error_log.log(severity, ' '.join((self.time(), context, msg)), exc_info=exc_info) - - def __call__(self, *args, **kwargs): - """An alias for ``error``.""" - return self.error(*args, **kwargs) - - def access(self): - """Write to the access log (in Apache/NCSA Combined Log format). - - See the - `apache documentation `_ - for format details. - - CherryPy calls this automatically for you. Note there are no arguments; - it collects the data itself from - :class:`cherrypy.request`. - - Like Apache started doing in 2.0.46, non-printable and other special - characters in %r (and we expand that to all parts) are escaped using - \\xhh sequences, where hh stands for the hexadecimal representation - of the raw byte. Exceptions from this rule are " and \\, which are - escaped by prepending a backslash, and all whitespace characters, - which are written in their C-style notation (\\n, \\t, etc). - """ - request = cherrypy.serving.request - remote = request.remote - response = cherrypy.serving.response - outheaders = response.headers - inheaders = request.headers - if response.output_status is None: - status = "-" - else: - status = response.output_status.split(ntob(" "), 1)[0] - if py3k: - status = status.decode('ISO-8859-1') - - atoms = {'h': remote.name or remote.ip, - 'l': '-', - 'u': getattr(request, "login", None) or "-", - 't': self.time(), - 'r': request.request_line, - 's': status, - 'b': dict.get(outheaders, 'Content-Length', '') or "-", - 'f': dict.get(inheaders, 'Referer', ''), - 'a': dict.get(inheaders, 'User-Agent', ''), - 'o': dict.get(inheaders, 'Host', '-'), - } - if py3k: - for k, v in atoms.items(): - if not isinstance(v, str): - v = str(v) - v = v.replace('"', '\\"').encode('utf8') - # Fortunately, repr(str) escapes unprintable chars, \n, \t, etc - # and backslash for us. All we have to do is strip the quotes. - v = repr(v)[2:-1] - - # in python 3.0 the repr of bytes (as returned by encode) - # uses double \'s. But then the logger escapes them yet, again - # resulting in quadruple slashes. Remove the extra one here. - v = v.replace('\\\\', '\\') - - # Escape double-quote. - atoms[k] = v - - try: - self.access_log.log( - logging.INFO, self.access_log_format.format(**atoms)) - except: - self(traceback=True) - else: - for k, v in atoms.items(): - if isinstance(v, unicode): - v = v.encode('utf8') - elif not isinstance(v, str): - v = str(v) - # Fortunately, repr(str) escapes unprintable chars, \n, \t, etc - # and backslash for us. All we have to do is strip the quotes. - v = repr(v)[1:-1] - # Escape double-quote. - atoms[k] = v.replace('"', '\\"') - - try: - self.access_log.log( - logging.INFO, self.access_log_format % atoms) - except: - self(traceback=True) - - def time(self): - """Return now() in Apache Common Log Format (no timezone).""" - now = datetime.datetime.now() - monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', - 'jul', 'aug', 'sep', 'oct', 'nov', 'dec'] - month = monthnames[now.month - 1].capitalize() - return ('[%02d/%s/%04d:%02d:%02d:%02d]' % - (now.day, month, now.year, now.hour, now.minute, now.second)) - - def _get_builtin_handler(self, log, key): - for h in log.handlers: - if getattr(h, "_cpbuiltin", None) == key: - return h - - # ------------------------- Screen handlers ------------------------- # - def _set_screen_handler(self, log, enable, stream=None): - h = self._get_builtin_handler(log, "screen") - if enable: - if not h: - if stream is None: - stream = sys.stderr - h = logging.StreamHandler(stream) - h.setFormatter(logfmt) - h._cpbuiltin = "screen" - log.addHandler(h) - elif h: - log.handlers.remove(h) - - def _get_screen(self): - h = self._get_builtin_handler - has_h = h(self.error_log, "screen") or h(self.access_log, "screen") - return bool(has_h) - - def _set_screen(self, newvalue): - self._set_screen_handler(self.error_log, newvalue, stream=sys.stderr) - self._set_screen_handler(self.access_log, newvalue, stream=sys.stdout) - screen = property(_get_screen, _set_screen, - doc="""Turn stderr/stdout logging on or off. - - If you set this to True, it'll add the appropriate StreamHandler for - you. If you set it to False, it will remove the handler. - """) - - # -------------------------- File handlers -------------------------- # - - def _add_builtin_file_handler(self, log, fname): - h = logging.FileHandler(fname) - h.setFormatter(logfmt) - h._cpbuiltin = "file" - log.addHandler(h) - - def _set_file_handler(self, log, filename): - h = self._get_builtin_handler(log, "file") - if filename: - if h: - if h.baseFilename != os.path.abspath(filename): - h.close() - log.handlers.remove(h) - self._add_builtin_file_handler(log, filename) - else: - self._add_builtin_file_handler(log, filename) - else: - if h: - h.close() - log.handlers.remove(h) - - def _get_error_file(self): - h = self._get_builtin_handler(self.error_log, "file") - if h: - return h.baseFilename - return '' - - def _set_error_file(self, newvalue): - self._set_file_handler(self.error_log, newvalue) - error_file = property(_get_error_file, _set_error_file, - doc="""The filename for self.error_log. - - If you set this to a string, it'll add the appropriate FileHandler for - you. If you set it to ``None`` or ``''``, it will remove the handler. - """) - - def _get_access_file(self): - h = self._get_builtin_handler(self.access_log, "file") - if h: - return h.baseFilename - return '' - - def _set_access_file(self, newvalue): - self._set_file_handler(self.access_log, newvalue) - access_file = property(_get_access_file, _set_access_file, - doc="""The filename for self.access_log. - - If you set this to a string, it'll add the appropriate FileHandler for - you. If you set it to ``None`` or ``''``, it will remove the handler. - """) - - # ------------------------- WSGI handlers ------------------------- # - - def _set_wsgi_handler(self, log, enable): - h = self._get_builtin_handler(log, "wsgi") - if enable: - if not h: - h = WSGIErrorHandler() - h.setFormatter(logfmt) - h._cpbuiltin = "wsgi" - log.addHandler(h) - elif h: - log.handlers.remove(h) - - def _get_wsgi(self): - return bool(self._get_builtin_handler(self.error_log, "wsgi")) - - def _set_wsgi(self, newvalue): - self._set_wsgi_handler(self.error_log, newvalue) - wsgi = property(_get_wsgi, _set_wsgi, - doc="""Write errors to wsgi.errors. - - If you set this to True, it'll add the appropriate - :class:`WSGIErrorHandler` for you - (which writes errors to ``wsgi.errors``). - If you set it to False, it will remove the handler. - """) - - -class WSGIErrorHandler(logging.Handler): - - "A handler class which writes logging records to environ['wsgi.errors']." - - def flush(self): - """Flushes the stream.""" - try: - stream = cherrypy.serving.request.wsgi_environ.get('wsgi.errors') - except (AttributeError, KeyError): - pass - else: - stream.flush() - - def emit(self, record): - """Emit a record.""" - try: - stream = cherrypy.serving.request.wsgi_environ.get('wsgi.errors') - except (AttributeError, KeyError): - pass - else: - try: - msg = self.format(record) - fs = "%s\n" - import types - # if no unicode support... - if not hasattr(types, "UnicodeType"): - stream.write(fs % msg) - else: - try: - stream.write(fs % msg) - except UnicodeError: - stream.write(fs % msg.encode("UTF-8")) - self.flush() - except: - self.handleError(record) diff --git a/libs_crutch/contrib/cherrypy/_cpmodpy.py b/libs_crutch/contrib/cherrypy/_cpmodpy.py deleted file mode 100644 index 02154d6..0000000 --- a/libs_crutch/contrib/cherrypy/_cpmodpy.py +++ /dev/null @@ -1,353 +0,0 @@ -"""Native adapter for serving CherryPy via mod_python - -Basic usage: - -########################################## -# Application in a module called myapp.py -########################################## - -import cherrypy - -class Root: - @cherrypy.expose - def index(self): - return 'Hi there, Ho there, Hey there' - - -# We will use this method from the mod_python configuration -# as the entry point to our application -def setup_server(): - cherrypy.tree.mount(Root()) - cherrypy.config.update({'environment': 'production', - 'log.screen': False, - 'show_tracebacks': False}) - -########################################## -# mod_python settings for apache2 -# This should reside in your httpd.conf -# or a file that will be loaded at -# apache startup -########################################## - -# Start -DocumentRoot "/" -Listen 8080 -LoadModule python_module /usr/lib/apache2/modules/mod_python.so - - - PythonPath "sys.path+['/path/to/my/application']" - SetHandler python-program - PythonHandler cherrypy._cpmodpy::handler - PythonOption cherrypy.setup myapp::setup_server - PythonDebug On - -# End - -The actual path to your mod_python.so is dependent on your -environment. In this case we suppose a global mod_python -installation on a Linux distribution such as Ubuntu. - -We do set the PythonPath configuration setting so that -your application can be found by from the user running -the apache2 instance. Of course if your application -resides in the global site-package this won't be needed. - -Then restart apache2 and access http://127.0.0.1:8080 -""" - -import logging -import sys - -import cherrypy -from cherrypy._cpcompat import BytesIO, copyitems, ntob -from cherrypy._cperror import format_exc, bare_error -from cherrypy.lib import httputil - - -# ------------------------------ Request-handling - - -def setup(req): - from mod_python import apache - - # Run any setup functions defined by a "PythonOption cherrypy.setup" - # directive. - options = req.get_options() - if 'cherrypy.setup' in options: - for function in options['cherrypy.setup'].split(): - atoms = function.split('::', 1) - if len(atoms) == 1: - mod = __import__(atoms[0], globals(), locals()) - else: - modname, fname = atoms - mod = __import__(modname, globals(), locals(), [fname]) - func = getattr(mod, fname) - func() - - cherrypy.config.update({'log.screen': False, - "tools.ignore_headers.on": True, - "tools.ignore_headers.headers": ['Range'], - }) - - engine = cherrypy.engine - if hasattr(engine, "signal_handler"): - engine.signal_handler.unsubscribe() - if hasattr(engine, "console_control_handler"): - engine.console_control_handler.unsubscribe() - engine.autoreload.unsubscribe() - cherrypy.server.unsubscribe() - - def _log(msg, level): - newlevel = apache.APLOG_ERR - if logging.DEBUG >= level: - newlevel = apache.APLOG_DEBUG - elif logging.INFO >= level: - newlevel = apache.APLOG_INFO - elif logging.WARNING >= level: - newlevel = apache.APLOG_WARNING - # On Windows, req.server is required or the msg will vanish. See - # http://www.modpython.org/pipermail/mod_python/2003-October/014291.html - # Also, "When server is not specified...LogLevel does not apply..." - apache.log_error(msg, newlevel, req.server) - engine.subscribe('log', _log) - - engine.start() - - def cherrypy_cleanup(data): - engine.exit() - try: - # apache.register_cleanup wasn't available until 3.1.4. - apache.register_cleanup(cherrypy_cleanup) - except AttributeError: - req.server.register_cleanup(req, cherrypy_cleanup) - - -class _ReadOnlyRequest: - expose = ('read', 'readline', 'readlines') - - def __init__(self, req): - for method in self.expose: - self.__dict__[method] = getattr(req, method) - - -recursive = False - -_isSetUp = False - - -def handler(req): - from mod_python import apache - try: - global _isSetUp - if not _isSetUp: - setup(req) - _isSetUp = True - - # Obtain a Request object from CherryPy - local = req.connection.local_addr - local = httputil.Host( - local[0], local[1], req.connection.local_host or "") - remote = req.connection.remote_addr - remote = httputil.Host( - remote[0], remote[1], req.connection.remote_host or "") - - scheme = req.parsed_uri[0] or 'http' - req.get_basic_auth_pw() - - try: - # apache.mpm_query only became available in mod_python 3.1 - q = apache.mpm_query - threaded = q(apache.AP_MPMQ_IS_THREADED) - forked = q(apache.AP_MPMQ_IS_FORKED) - except AttributeError: - bad_value = ("You must provide a PythonOption '%s', " - "either 'on' or 'off', when running a version " - "of mod_python < 3.1") - - threaded = options.get('multithread', '').lower() - if threaded == 'on': - threaded = True - elif threaded == 'off': - threaded = False - else: - raise ValueError(bad_value % "multithread") - - forked = options.get('multiprocess', '').lower() - if forked == 'on': - forked = True - elif forked == 'off': - forked = False - else: - raise ValueError(bad_value % "multiprocess") - - sn = cherrypy.tree.script_name(req.uri or "/") - if sn is None: - send_response(req, '404 Not Found', [], '') - else: - app = cherrypy.tree.apps[sn] - method = req.method - path = req.uri - qs = req.args or "" - reqproto = req.protocol - headers = copyitems(req.headers_in) - rfile = _ReadOnlyRequest(req) - prev = None - - try: - redirections = [] - while True: - request, response = app.get_serving(local, remote, scheme, - "HTTP/1.1") - request.login = req.user - request.multithread = bool(threaded) - request.multiprocess = bool(forked) - request.app = app - request.prev = prev - - # Run the CherryPy Request object and obtain the response - try: - request.run(method, path, qs, reqproto, headers, rfile) - break - except cherrypy.InternalRedirect: - ir = sys.exc_info()[1] - app.release_serving() - prev = request - - if not recursive: - if ir.path in redirections: - raise RuntimeError( - "InternalRedirector visited the same URL " - "twice: %r" % ir.path) - else: - # Add the *previous* path_info + qs to - # redirections. - if qs: - qs = "?" + qs - redirections.append(sn + path + qs) - - # Munge environment and try again. - method = "GET" - path = ir.path - qs = ir.query_string - rfile = BytesIO() - - send_response( - req, response.output_status, response.header_list, - response.body, response.stream) - finally: - app.release_serving() - except: - tb = format_exc() - cherrypy.log(tb, 'MOD_PYTHON', severity=logging.ERROR) - s, h, b = bare_error() - send_response(req, s, h, b) - return apache.OK - - -def send_response(req, status, headers, body, stream=False): - # Set response status - req.status = int(status[:3]) - - # Set response headers - req.content_type = "text/plain" - for header, value in headers: - if header.lower() == 'content-type': - req.content_type = value - continue - req.headers_out.add(header, value) - - if stream: - # Flush now so the status and headers are sent immediately. - req.flush() - - # Set response body - if isinstance(body, basestring): - req.write(body) - else: - for seg in body: - req.write(seg) - - -# --------------- Startup tools for CherryPy + mod_python --------------- # -import os -import re -try: - import subprocess - - def popen(fullcmd): - p = subprocess.Popen(fullcmd, shell=True, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - close_fds=True) - return p.stdout -except ImportError: - def popen(fullcmd): - pipein, pipeout = os.popen4(fullcmd) - return pipeout - - -def read_process(cmd, args=""): - fullcmd = "%s %s" % (cmd, args) - pipeout = popen(fullcmd) - try: - firstline = pipeout.readline() - cmd_not_found = re.search( - ntob("(not recognized|No such file|not found)"), - firstline, - re.IGNORECASE - ) - if cmd_not_found: - raise IOError('%s must be on your system path.' % cmd) - output = firstline + pipeout.read() - finally: - pipeout.close() - return output - - -class ModPythonServer(object): - - template = """ -# Apache2 server configuration file for running CherryPy with mod_python. - -DocumentRoot "/" -Listen %(port)s -LoadModule python_module modules/mod_python.so - - - SetHandler python-program - PythonHandler %(handler)s - PythonDebug On -%(opts)s - -""" - - def __init__(self, loc="/", port=80, opts=None, apache_path="apache", - handler="cherrypy._cpmodpy::handler"): - self.loc = loc - self.port = port - self.opts = opts - self.apache_path = apache_path - self.handler = handler - - def start(self): - opts = "".join([" PythonOption %s %s\n" % (k, v) - for k, v in self.opts]) - conf_data = self.template % {"port": self.port, - "loc": self.loc, - "opts": opts, - "handler": self.handler, - } - - mpconf = os.path.join(os.path.dirname(__file__), "cpmodpy.conf") - f = open(mpconf, 'wb') - try: - f.write(conf_data) - finally: - f.close() - - response = read_process(self.apache_path, "-k start -f %s" % mpconf) - self.ready = True - return response - - def stop(self): - os.popen("apache -k stop") - self.ready = False diff --git a/libs_crutch/contrib/cherrypy/_cpnative_server.py b/libs_crutch/contrib/cherrypy/_cpnative_server.py deleted file mode 100644 index e303573..0000000 --- a/libs_crutch/contrib/cherrypy/_cpnative_server.py +++ /dev/null @@ -1,154 +0,0 @@ -"""Native adapter for serving CherryPy via its builtin server.""" - -import logging -import sys - -import cherrypy -from cherrypy._cpcompat import BytesIO -from cherrypy._cperror import format_exc, bare_error -from cherrypy.lib import httputil -from cherrypy import wsgiserver - - -class NativeGateway(wsgiserver.Gateway): - - recursive = False - - def respond(self): - req = self.req - try: - # Obtain a Request object from CherryPy - local = req.server.bind_addr - local = httputil.Host(local[0], local[1], "") - remote = req.conn.remote_addr, req.conn.remote_port - remote = httputil.Host(remote[0], remote[1], "") - - scheme = req.scheme - sn = cherrypy.tree.script_name(req.uri or "/") - if sn is None: - self.send_response('404 Not Found', [], ['']) - else: - app = cherrypy.tree.apps[sn] - method = req.method - path = req.path - qs = req.qs or "" - headers = req.inheaders.items() - rfile = req.rfile - prev = None - - try: - redirections = [] - while True: - request, response = app.get_serving( - local, remote, scheme, "HTTP/1.1") - request.multithread = True - request.multiprocess = False - request.app = app - request.prev = prev - - # Run the CherryPy Request object and obtain the - # response - try: - request.run(method, path, qs, - req.request_protocol, headers, rfile) - break - except cherrypy.InternalRedirect: - ir = sys.exc_info()[1] - app.release_serving() - prev = request - - if not self.recursive: - if ir.path in redirections: - raise RuntimeError( - "InternalRedirector visited the same " - "URL twice: %r" % ir.path) - else: - # Add the *previous* path_info + qs to - # redirections. - if qs: - qs = "?" + qs - redirections.append(sn + path + qs) - - # Munge environment and try again. - method = "GET" - path = ir.path - qs = ir.query_string - rfile = BytesIO() - - self.send_response( - response.output_status, response.header_list, - response.body) - finally: - app.release_serving() - except: - tb = format_exc() - # print tb - cherrypy.log(tb, 'NATIVE_ADAPTER', severity=logging.ERROR) - s, h, b = bare_error() - self.send_response(s, h, b) - - def send_response(self, status, headers, body): - req = self.req - - # Set response status - req.status = str(status or "500 Server Error") - - # Set response headers - for header, value in headers: - req.outheaders.append((header, value)) - if (req.ready and not req.sent_headers): - req.sent_headers = True - req.send_headers() - - # Set response body - for seg in body: - req.write(seg) - - -class CPHTTPServer(wsgiserver.HTTPServer): - - """Wrapper for wsgiserver.HTTPServer. - - wsgiserver has been designed to not reference CherryPy in any way, - so that it can be used in other frameworks and applications. - Therefore, we wrap it here, so we can apply some attributes - from config -> cherrypy.server -> HTTPServer. - """ - - def __init__(self, server_adapter=cherrypy.server): - self.server_adapter = server_adapter - - server_name = (self.server_adapter.socket_host or - self.server_adapter.socket_file or - None) - - wsgiserver.HTTPServer.__init__( - self, server_adapter.bind_addr, NativeGateway, - minthreads=server_adapter.thread_pool, - maxthreads=server_adapter.thread_pool_max, - server_name=server_name) - - self.max_request_header_size = ( - self.server_adapter.max_request_header_size or 0) - self.max_request_body_size = ( - self.server_adapter.max_request_body_size or 0) - self.request_queue_size = self.server_adapter.socket_queue_size - self.timeout = self.server_adapter.socket_timeout - self.shutdown_timeout = self.server_adapter.shutdown_timeout - self.protocol = self.server_adapter.protocol_version - self.nodelay = self.server_adapter.nodelay - - ssl_module = self.server_adapter.ssl_module or 'pyopenssl' - if self.server_adapter.ssl_context: - adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module) - self.ssl_adapter = adapter_class( - self.server_adapter.ssl_certificate, - self.server_adapter.ssl_private_key, - self.server_adapter.ssl_certificate_chain) - self.ssl_adapter.context = self.server_adapter.ssl_context - elif self.server_adapter.ssl_certificate: - adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module) - self.ssl_adapter = adapter_class( - self.server_adapter.ssl_certificate, - self.server_adapter.ssl_private_key, - self.server_adapter.ssl_certificate_chain) diff --git a/libs_crutch/contrib/cherrypy/_cpreqbody.py b/libs_crutch/contrib/cherrypy/_cpreqbody.py deleted file mode 100644 index d2dbbc9..0000000 --- a/libs_crutch/contrib/cherrypy/_cpreqbody.py +++ /dev/null @@ -1,1013 +0,0 @@ -"""Request body processing for CherryPy. - -.. versionadded:: 3.2 - -Application authors have complete control over the parsing of HTTP request -entities. In short, -:attr:`cherrypy.request.body` -is now always set to an instance of -:class:`RequestBody`, -and *that* class is a subclass of :class:`Entity`. - -When an HTTP request includes an entity body, it is often desirable to -provide that information to applications in a form other than the raw bytes. -Different content types demand different approaches. Examples: - - * For a GIF file, we want the raw bytes in a stream. - * An HTML form is better parsed into its component fields, and each text field - decoded from bytes to unicode. - * A JSON body should be deserialized into a Python dict or list. - -When the request contains a Content-Type header, the media type is used as a -key to look up a value in the -:attr:`request.body.processors` dict. -If the full media -type is not found, then the major type is tried; for example, if no processor -is found for the 'image/jpeg' type, then we look for a processor for the -'image' types altogether. If neither the full type nor the major type has a -matching processor, then a default processor is used -(:func:`default_proc`). For most -types, this means no processing is done, and the body is left unread as a -raw byte stream. Processors are configurable in an 'on_start_resource' hook. - -Some processors, especially those for the 'text' types, attempt to decode bytes -to unicode. If the Content-Type request header includes a 'charset' parameter, -this is used to decode the entity. Otherwise, one or more default charsets may -be attempted, although this decision is up to each processor. If a processor -successfully decodes an Entity or Part, it should set the -:attr:`charset` attribute -on the Entity or Part to the name of the successful charset, so that -applications can easily re-encode or transcode the value if they wish. - -If the Content-Type of the request entity is of major type 'multipart', then -the above parsing process, and possibly a decoding process, is performed for -each part. - -For both the full entity and multipart parts, a Content-Disposition header may -be used to fill :attr:`name` and -:attr:`filename` attributes on the -request.body or the Part. - -.. _custombodyprocessors: - -Custom Processors -================= - -You can add your own processors for any specific or major MIME type. Simply add -it to the :attr:`processors` dict in a -hook/tool that runs at ``on_start_resource`` or ``before_request_body``. -Here's the built-in JSON tool for an example:: - - def json_in(force=True, debug=False): - request = cherrypy.serving.request - def json_processor(entity): - \"""Read application/json data into request.json.\""" - if not entity.headers.get("Content-Length", ""): - raise cherrypy.HTTPError(411) - - body = entity.fp.read() - try: - request.json = json_decode(body) - except ValueError: - raise cherrypy.HTTPError(400, 'Invalid JSON document') - if force: - request.body.processors.clear() - request.body.default_proc = cherrypy.HTTPError( - 415, 'Expected an application/json content type') - request.body.processors['application/json'] = json_processor - -We begin by defining a new ``json_processor`` function to stick in the -``processors`` dictionary. All processor functions take a single argument, -the ``Entity`` instance they are to process. It will be called whenever a -request is received (for those URI's where the tool is turned on) which -has a ``Content-Type`` of "application/json". - -First, it checks for a valid ``Content-Length`` (raising 411 if not valid), -then reads the remaining bytes on the socket. The ``fp`` object knows its -own length, so it won't hang waiting for data that never arrives. It will -return when all data has been read. Then, we decode those bytes using -Python's built-in ``json`` module, and stick the decoded result onto -``request.json`` . If it cannot be decoded, we raise 400. - -If the "force" argument is True (the default), the ``Tool`` clears the -``processors`` dict so that request entities of other ``Content-Types`` -aren't parsed at all. Since there's no entry for those invalid MIME -types, the ``default_proc`` method of ``cherrypy.request.body`` is -called. But this does nothing by default (usually to provide the page -handler an opportunity to handle it.) -But in our case, we want to raise 415, so we replace -``request.body.default_proc`` -with the error (``HTTPError`` instances, when called, raise themselves). - -If we were defining a custom processor, we can do so without making a ``Tool``. -Just add the config entry:: - - request.body.processors = {'application/json': json_processor} - -Note that you can only replace the ``processors`` dict wholesale this way, -not update the existing one. -""" - -try: - from io import DEFAULT_BUFFER_SIZE -except ImportError: - DEFAULT_BUFFER_SIZE = 8192 -import re -import sys -import tempfile -try: - from urllib import unquote_plus -except ImportError: - def unquote_plus(bs): - """Bytes version of urllib.parse.unquote_plus.""" - bs = bs.replace(ntob('+'), ntob(' ')) - atoms = bs.split(ntob('%')) - for i in range(1, len(atoms)): - item = atoms[i] - try: - pct = int(item[:2], 16) - atoms[i] = bytes([pct]) + item[2:] - except ValueError: - pass - return ntob('').join(atoms) - -import cherrypy -from cherrypy._cpcompat import basestring, ntob, ntou -from cherrypy.lib import httputil - - -# ------------------------------- Processors -------------------------------- # - -def process_urlencoded(entity): - """Read application/x-www-form-urlencoded data into entity.params.""" - qs = entity.fp.read() - for charset in entity.attempt_charsets: - try: - params = {} - for aparam in qs.split(ntob('&')): - for pair in aparam.split(ntob(';')): - if not pair: - continue - - atoms = pair.split(ntob('='), 1) - if len(atoms) == 1: - atoms.append(ntob('')) - - key = unquote_plus(atoms[0]).decode(charset) - value = unquote_plus(atoms[1]).decode(charset) - - if key in params: - if not isinstance(params[key], list): - params[key] = [params[key]] - params[key].append(value) - else: - params[key] = value - except UnicodeDecodeError: - pass - else: - entity.charset = charset - break - else: - raise cherrypy.HTTPError( - 400, "The request entity could not be decoded. The following " - "charsets were attempted: %s" % repr(entity.attempt_charsets)) - - # Now that all values have been successfully parsed and decoded, - # apply them to the entity.params dict. - for key, value in params.items(): - if key in entity.params: - if not isinstance(entity.params[key], list): - entity.params[key] = [entity.params[key]] - entity.params[key].append(value) - else: - entity.params[key] = value - - -def process_multipart(entity): - """Read all multipart parts into entity.parts.""" - ib = "" - if 'boundary' in entity.content_type.params: - # http://tools.ietf.org/html/rfc2046#section-5.1.1 - # "The grammar for parameters on the Content-type field is such that it - # is often necessary to enclose the boundary parameter values in quotes - # on the Content-type line" - ib = entity.content_type.params['boundary'].strip('"') - - if not re.match("^[ -~]{0,200}[!-~]$", ib): - raise ValueError('Invalid boundary in multipart form: %r' % (ib,)) - - ib = ('--' + ib).encode('ascii') - - # Find the first marker - while True: - b = entity.readline() - if not b: - return - - b = b.strip() - if b == ib: - break - - # Read all parts - while True: - part = entity.part_class.from_fp(entity.fp, ib) - entity.parts.append(part) - part.process() - if part.fp.done: - break - - -def process_multipart_form_data(entity): - """Read all multipart/form-data parts into entity.parts or entity.params. - """ - process_multipart(entity) - - kept_parts = [] - for part in entity.parts: - if part.name is None: - kept_parts.append(part) - else: - if part.filename is None: - # It's a regular field - value = part.fullvalue() - else: - # It's a file upload. Retain the whole part so consumer code - # has access to its .file and .filename attributes. - value = part - - if part.name in entity.params: - if not isinstance(entity.params[part.name], list): - entity.params[part.name] = [entity.params[part.name]] - entity.params[part.name].append(value) - else: - entity.params[part.name] = value - - entity.parts = kept_parts - - -def _old_process_multipart(entity): - """The behavior of 3.2 and lower. Deprecated and will be changed in 3.3.""" - process_multipart(entity) - - params = entity.params - - for part in entity.parts: - if part.name is None: - key = ntou('parts') - else: - key = part.name - - if part.filename is None: - # It's a regular field - value = part.fullvalue() - else: - # It's a file upload. Retain the whole part so consumer code - # has access to its .file and .filename attributes. - value = part - - if key in params: - if not isinstance(params[key], list): - params[key] = [params[key]] - params[key].append(value) - else: - params[key] = value - - -# -------------------------------- Entities --------------------------------- # -class Entity(object): - - """An HTTP request body, or MIME multipart body. - - This class collects information about the HTTP request entity. When a - given entity is of MIME type "multipart", each part is parsed into its own - Entity instance, and the set of parts stored in - :attr:`entity.parts`. - - Between the ``before_request_body`` and ``before_handler`` tools, CherryPy - tries to process the request body (if any) by calling - :func:`request.body.process`. - This uses the ``content_type`` of the Entity to look up a suitable - processor in - :attr:`Entity.processors`, - a dict. - If a matching processor cannot be found for the complete Content-Type, - it tries again using the major type. For example, if a request with an - entity of type "image/jpeg" arrives, but no processor can be found for - that complete type, then one is sought for the major type "image". If a - processor is still not found, then the - :func:`default_proc` method - of the Entity is called (which does nothing by default; you can - override this too). - - CherryPy includes processors for the "application/x-www-form-urlencoded" - type, the "multipart/form-data" type, and the "multipart" major type. - CherryPy 3.2 processes these types almost exactly as older versions. - Parts are passed as arguments to the page handler using their - ``Content-Disposition.name`` if given, otherwise in a generic "parts" - argument. Each such part is either a string, or the - :class:`Part` itself if it's a file. (In this - case it will have ``file`` and ``filename`` attributes, or possibly a - ``value`` attribute). Each Part is itself a subclass of - Entity, and has its own ``process`` method and ``processors`` dict. - - There is a separate processor for the "multipart" major type which is more - flexible, and simply stores all multipart parts in - :attr:`request.body.parts`. You can - enable it with:: - - cherrypy.request.body.processors['multipart'] = _cpreqbody.process_multipart - - in an ``on_start_resource`` tool. - """ - - # http://tools.ietf.org/html/rfc2046#section-4.1.2: - # "The default character set, which must be assumed in the - # absence of a charset parameter, is US-ASCII." - # However, many browsers send data in utf-8 with no charset. - attempt_charsets = ['utf-8'] - """A list of strings, each of which should be a known encoding. - - When the Content-Type of the request body warrants it, each of the given - encodings will be tried in order. The first one to successfully decode the - entity without raising an error is stored as - :attr:`entity.charset`. This defaults - to ``['utf-8']`` (plus 'ISO-8859-1' for "text/\*" types, as required by - `HTTP/1.1 `_), - but ``['us-ascii', 'utf-8']`` for multipart parts. - """ - - charset = None - """The successful decoding; see "attempt_charsets" above.""" - - content_type = None - """The value of the Content-Type request header. - - If the Entity is part of a multipart payload, this will be the Content-Type - given in the MIME headers for this part. - """ - - default_content_type = 'application/x-www-form-urlencoded' - """This defines a default ``Content-Type`` to use if no Content-Type header - is given. The empty string is used for RequestBody, which results in the - request body not being read or parsed at all. This is by design; a missing - ``Content-Type`` header in the HTTP request entity is an error at best, - and a security hole at worst. For multipart parts, however, the MIME spec - declares that a part with no Content-Type defaults to "text/plain" - (see :class:`Part`). - """ - - filename = None - """The ``Content-Disposition.filename`` header, if available.""" - - fp = None - """The readable socket file object.""" - - headers = None - """A dict of request/multipart header names and values. - - This is a copy of the ``request.headers`` for the ``request.body``; - for multipart parts, it is the set of headers for that part. - """ - - length = None - """The value of the ``Content-Length`` header, if provided.""" - - name = None - """The "name" parameter of the ``Content-Disposition`` header, if any.""" - - params = None - """ - If the request Content-Type is 'application/x-www-form-urlencoded' or - multipart, this will be a dict of the params pulled from the entity - body; that is, it will be the portion of request.params that come - from the message body (sometimes called "POST params", although they - can be sent with various HTTP method verbs). This value is set between - the 'before_request_body' and 'before_handler' hooks (assuming that - process_request_body is True).""" - - processors = {'application/x-www-form-urlencoded': process_urlencoded, - 'multipart/form-data': process_multipart_form_data, - 'multipart': process_multipart, - } - """A dict of Content-Type names to processor methods.""" - - parts = None - """A list of Part instances if ``Content-Type`` is of major type - "multipart".""" - - part_class = None - """The class used for multipart parts. - - You can replace this with custom subclasses to alter the processing of - multipart parts. - """ - - def __init__(self, fp, headers, params=None, parts=None): - # Make an instance-specific copy of the class processors - # so Tools, etc. can replace them per-request. - self.processors = self.processors.copy() - - self.fp = fp - self.headers = headers - - if params is None: - params = {} - self.params = params - - if parts is None: - parts = [] - self.parts = parts - - # Content-Type - self.content_type = headers.elements('Content-Type') - if self.content_type: - self.content_type = self.content_type[0] - else: - self.content_type = httputil.HeaderElement.from_str( - self.default_content_type) - - # Copy the class 'attempt_charsets', prepending any Content-Type - # charset - dec = self.content_type.params.get("charset", None) - if dec: - self.attempt_charsets = [dec] + [c for c in self.attempt_charsets - if c != dec] - else: - self.attempt_charsets = self.attempt_charsets[:] - - # Length - self.length = None - clen = headers.get('Content-Length', None) - # If Transfer-Encoding is 'chunked', ignore any Content-Length. - if ( - clen is not None and - 'chunked' not in headers.get('Transfer-Encoding', '') - ): - try: - self.length = int(clen) - except ValueError: - pass - - # Content-Disposition - self.name = None - self.filename = None - disp = headers.elements('Content-Disposition') - if disp: - disp = disp[0] - if 'name' in disp.params: - self.name = disp.params['name'] - if self.name.startswith('"') and self.name.endswith('"'): - self.name = self.name[1:-1] - if 'filename' in disp.params: - self.filename = disp.params['filename'] - if ( - self.filename.startswith('"') and - self.filename.endswith('"') - ): - self.filename = self.filename[1:-1] - - # The 'type' attribute is deprecated in 3.2; remove it in 3.3. - type = property( - lambda self: self.content_type, - doc="A deprecated alias for " - ":attr:`content_type`." - ) - - def read(self, size=None, fp_out=None): - return self.fp.read(size, fp_out) - - def readline(self, size=None): - return self.fp.readline(size) - - def readlines(self, sizehint=None): - return self.fp.readlines(sizehint) - - def __iter__(self): - return self - - def __next__(self): - line = self.readline() - if not line: - raise StopIteration - return line - - def next(self): - return self.__next__() - - def read_into_file(self, fp_out=None): - """Read the request body into fp_out (or make_file() if None). - - Return fp_out. - """ - if fp_out is None: - fp_out = self.make_file() - self.read(fp_out=fp_out) - return fp_out - - def make_file(self): - """Return a file-like object into which the request body will be read. - - By default, this will return a TemporaryFile. Override as needed. - See also :attr:`cherrypy._cpreqbody.Part.maxrambytes`.""" - return tempfile.TemporaryFile() - - def fullvalue(self): - """Return this entity as a string, whether stored in a file or not.""" - if self.file: - # It was stored in a tempfile. Read it. - self.file.seek(0) - value = self.file.read() - self.file.seek(0) - else: - value = self.value - return value - - def process(self): - """Execute the best-match processor for the given media type.""" - proc = None - ct = self.content_type.value - try: - proc = self.processors[ct] - except KeyError: - toptype = ct.split('/', 1)[0] - try: - proc = self.processors[toptype] - except KeyError: - pass - if proc is None: - self.default_proc() - else: - proc(self) - - def default_proc(self): - """Called if a more-specific processor is not found for the - ``Content-Type``. - """ - # Leave the fp alone for someone else to read. This works fine - # for request.body, but the Part subclasses need to override this - # so they can move on to the next part. - pass - - -class Part(Entity): - - """A MIME part entity, part of a multipart entity.""" - - # "The default character set, which must be assumed in the absence of a - # charset parameter, is US-ASCII." - attempt_charsets = ['us-ascii', 'utf-8'] - """A list of strings, each of which should be a known encoding. - - When the Content-Type of the request body warrants it, each of the given - encodings will be tried in order. The first one to successfully decode the - entity without raising an error is stored as - :attr:`entity.charset`. This defaults - to ``['utf-8']`` (plus 'ISO-8859-1' for "text/\*" types, as required by - `HTTP/1.1 `_), - but ``['us-ascii', 'utf-8']`` for multipart parts. - """ - - boundary = None - """The MIME multipart boundary.""" - - default_content_type = 'text/plain' - """This defines a default ``Content-Type`` to use if no Content-Type header - is given. The empty string is used for RequestBody, which results in the - request body not being read or parsed at all. This is by design; a missing - ``Content-Type`` header in the HTTP request entity is an error at best, - and a security hole at worst. For multipart parts, however (this class), - the MIME spec declares that a part with no Content-Type defaults to - "text/plain". - """ - - # This is the default in stdlib cgi. We may want to increase it. - maxrambytes = 1000 - """The threshold of bytes after which point the ``Part`` will store - its data in a file (generated by - :func:`make_file`) - instead of a string. Defaults to 1000, just like the :mod:`cgi` - module in Python's standard library. - """ - - def __init__(self, fp, headers, boundary): - Entity.__init__(self, fp, headers) - self.boundary = boundary - self.file = None - self.value = None - - def from_fp(cls, fp, boundary): - headers = cls.read_headers(fp) - return cls(fp, headers, boundary) - from_fp = classmethod(from_fp) - - def read_headers(cls, fp): - headers = httputil.HeaderMap() - while True: - line = fp.readline() - if not line: - # No more data--illegal end of headers - raise EOFError("Illegal end of headers.") - - if line == ntob('\r\n'): - # Normal end of headers - break - if not line.endswith(ntob('\r\n')): - raise ValueError("MIME requires CRLF terminators: %r" % line) - - if line[0] in ntob(' \t'): - # It's a continuation line. - v = line.strip().decode('ISO-8859-1') - else: - k, v = line.split(ntob(":"), 1) - k = k.strip().decode('ISO-8859-1') - v = v.strip().decode('ISO-8859-1') - - existing = headers.get(k) - if existing: - v = ", ".join((existing, v)) - headers[k] = v - - return headers - read_headers = classmethod(read_headers) - - def read_lines_to_boundary(self, fp_out=None): - """Read bytes from self.fp and return or write them to a file. - - If the 'fp_out' argument is None (the default), all bytes read are - returned in a single byte string. - - If the 'fp_out' argument is not None, it must be a file-like - object that supports the 'write' method; all bytes read will be - written to the fp, and that fp is returned. - """ - endmarker = self.boundary + ntob("--") - delim = ntob("") - prev_lf = True - lines = [] - seen = 0 - while True: - line = self.fp.readline(1 << 16) - if not line: - raise EOFError("Illegal end of multipart body.") - if line.startswith(ntob("--")) and prev_lf: - strippedline = line.strip() - if strippedline == self.boundary: - break - if strippedline == endmarker: - self.fp.finish() - break - - line = delim + line - - if line.endswith(ntob("\r\n")): - delim = ntob("\r\n") - line = line[:-2] - prev_lf = True - elif line.endswith(ntob("\n")): - delim = ntob("\n") - line = line[:-1] - prev_lf = True - else: - delim = ntob("") - prev_lf = False - - if fp_out is None: - lines.append(line) - seen += len(line) - if seen > self.maxrambytes: - fp_out = self.make_file() - for line in lines: - fp_out.write(line) - else: - fp_out.write(line) - - if fp_out is None: - result = ntob('').join(lines) - for charset in self.attempt_charsets: - try: - result = result.decode(charset) - except UnicodeDecodeError: - pass - else: - self.charset = charset - return result - else: - raise cherrypy.HTTPError( - 400, - "The request entity could not be decoded. The following " - "charsets were attempted: %s" % repr(self.attempt_charsets) - ) - else: - fp_out.seek(0) - return fp_out - - def default_proc(self): - """Called if a more-specific processor is not found for the - ``Content-Type``. - """ - if self.filename: - # Always read into a file if a .filename was given. - self.file = self.read_into_file() - else: - result = self.read_lines_to_boundary() - if isinstance(result, basestring): - self.value = result - else: - self.file = result - - def read_into_file(self, fp_out=None): - """Read the request body into fp_out (or make_file() if None). - - Return fp_out. - """ - if fp_out is None: - fp_out = self.make_file() - self.read_lines_to_boundary(fp_out=fp_out) - return fp_out - -Entity.part_class = Part - -try: - inf = float('inf') -except ValueError: - # Python 2.4 and lower - class Infinity(object): - - def __cmp__(self, other): - return 1 - - def __sub__(self, other): - return self - inf = Infinity() - - -comma_separated_headers = [ - 'Accept', 'Accept-Charset', 'Accept-Encoding', - 'Accept-Language', 'Accept-Ranges', 'Allow', - 'Cache-Control', 'Connection', 'Content-Encoding', - 'Content-Language', 'Expect', 'If-Match', - 'If-None-Match', 'Pragma', 'Proxy-Authenticate', - 'Te', 'Trailer', 'Transfer-Encoding', 'Upgrade', - 'Vary', 'Via', 'Warning', 'Www-Authenticate' -] - - -class SizedReader: - - def __init__(self, fp, length, maxbytes, bufsize=DEFAULT_BUFFER_SIZE, - has_trailers=False): - # Wrap our fp in a buffer so peek() works - self.fp = fp - self.length = length - self.maxbytes = maxbytes - self.buffer = ntob('') - self.bufsize = bufsize - self.bytes_read = 0 - self.done = False - self.has_trailers = has_trailers - - def read(self, size=None, fp_out=None): - """Read bytes from the request body and return or write them to a file. - - A number of bytes less than or equal to the 'size' argument are read - off the socket. The actual number of bytes read are tracked in - self.bytes_read. The number may be smaller than 'size' when 1) the - client sends fewer bytes, 2) the 'Content-Length' request header - specifies fewer bytes than requested, or 3) the number of bytes read - exceeds self.maxbytes (in which case, 413 is raised). - - If the 'fp_out' argument is None (the default), all bytes read are - returned in a single byte string. - - If the 'fp_out' argument is not None, it must be a file-like - object that supports the 'write' method; all bytes read will be - written to the fp, and None is returned. - """ - - if self.length is None: - if size is None: - remaining = inf - else: - remaining = size - else: - remaining = self.length - self.bytes_read - if size and size < remaining: - remaining = size - if remaining == 0: - self.finish() - if fp_out is None: - return ntob('') - else: - return None - - chunks = [] - - # Read bytes from the buffer. - if self.buffer: - if remaining is inf: - data = self.buffer - self.buffer = ntob('') - else: - data = self.buffer[:remaining] - self.buffer = self.buffer[remaining:] - datalen = len(data) - remaining -= datalen - - # Check lengths. - self.bytes_read += datalen - if self.maxbytes and self.bytes_read > self.maxbytes: - raise cherrypy.HTTPError(413) - - # Store the data. - if fp_out is None: - chunks.append(data) - else: - fp_out.write(data) - - # Read bytes from the socket. - while remaining > 0: - chunksize = min(remaining, self.bufsize) - try: - data = self.fp.read(chunksize) - except Exception: - e = sys.exc_info()[1] - if e.__class__.__name__ == 'MaxSizeExceeded': - # Post data is too big - raise cherrypy.HTTPError( - 413, "Maximum request length: %r" % e.args[1]) - else: - raise - if not data: - self.finish() - break - datalen = len(data) - remaining -= datalen - - # Check lengths. - self.bytes_read += datalen - if self.maxbytes and self.bytes_read > self.maxbytes: - raise cherrypy.HTTPError(413) - - # Store the data. - if fp_out is None: - chunks.append(data) - else: - fp_out.write(data) - - if fp_out is None: - return ntob('').join(chunks) - - def readline(self, size=None): - """Read a line from the request body and return it.""" - chunks = [] - while size is None or size > 0: - chunksize = self.bufsize - if size is not None and size < self.bufsize: - chunksize = size - data = self.read(chunksize) - if not data: - break - pos = data.find(ntob('\n')) + 1 - if pos: - chunks.append(data[:pos]) - remainder = data[pos:] - self.buffer += remainder - self.bytes_read -= len(remainder) - break - else: - chunks.append(data) - return ntob('').join(chunks) - - def readlines(self, sizehint=None): - """Read lines from the request body and return them.""" - if self.length is not None: - if sizehint is None: - sizehint = self.length - self.bytes_read - else: - sizehint = min(sizehint, self.length - self.bytes_read) - - lines = [] - seen = 0 - while True: - line = self.readline() - if not line: - break - lines.append(line) - seen += len(line) - if seen >= sizehint: - break - return lines - - def finish(self): - self.done = True - if self.has_trailers and hasattr(self.fp, 'read_trailer_lines'): - self.trailers = {} - - try: - for line in self.fp.read_trailer_lines(): - if line[0] in ntob(' \t'): - # It's a continuation line. - v = line.strip() - else: - try: - k, v = line.split(ntob(":"), 1) - except ValueError: - raise ValueError("Illegal header line.") - k = k.strip().title() - v = v.strip() - - if k in comma_separated_headers: - existing = self.trailers.get(envname) - if existing: - v = ntob(", ").join((existing, v)) - self.trailers[k] = v - except Exception: - e = sys.exc_info()[1] - if e.__class__.__name__ == 'MaxSizeExceeded': - # Post data is too big - raise cherrypy.HTTPError( - 413, "Maximum request length: %r" % e.args[1]) - else: - raise - - -class RequestBody(Entity): - - """The entity of the HTTP request.""" - - bufsize = 8 * 1024 - """The buffer size used when reading the socket.""" - - # Don't parse the request body at all if the client didn't provide - # a Content-Type header. See - # https://bitbucket.org/cherrypy/cherrypy/issue/790 - default_content_type = '' - """This defines a default ``Content-Type`` to use if no Content-Type header - is given. The empty string is used for RequestBody, which results in the - request body not being read or parsed at all. This is by design; a missing - ``Content-Type`` header in the HTTP request entity is an error at best, - and a security hole at worst. For multipart parts, however, the MIME spec - declares that a part with no Content-Type defaults to "text/plain" - (see :class:`Part`). - """ - - maxbytes = None - """Raise ``MaxSizeExceeded`` if more bytes than this are read from - the socket. - """ - - def __init__(self, fp, headers, params=None, request_params=None): - Entity.__init__(self, fp, headers, params) - - # http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1 - # When no explicit charset parameter is provided by the - # sender, media subtypes of the "text" type are defined - # to have a default charset value of "ISO-8859-1" when - # received via HTTP. - if self.content_type.value.startswith('text/'): - for c in ('ISO-8859-1', 'iso-8859-1', 'Latin-1', 'latin-1'): - if c in self.attempt_charsets: - break - else: - self.attempt_charsets.append('ISO-8859-1') - - # Temporary fix while deprecating passing .parts as .params. - self.processors['multipart'] = _old_process_multipart - - if request_params is None: - request_params = {} - self.request_params = request_params - - def process(self): - """Process the request entity based on its Content-Type.""" - # "The presence of a message-body in a request is signaled by the - # inclusion of a Content-Length or Transfer-Encoding header field in - # the request's message-headers." - # It is possible to send a POST request with no body, for example; - # however, app developers are responsible in that case to set - # cherrypy.request.process_body to False so this method isn't called. - h = cherrypy.serving.request.headers - if 'Content-Length' not in h and 'Transfer-Encoding' not in h: - raise cherrypy.HTTPError(411) - - self.fp = SizedReader(self.fp, self.length, - self.maxbytes, bufsize=self.bufsize, - has_trailers='Trailer' in h) - super(RequestBody, self).process() - - # Body params should also be a part of the request_params - # add them in here. - request_params = self.request_params - for key, value in self.params.items(): - # Python 2 only: keyword arguments must be byte strings (type - # 'str'). - if sys.version_info < (3, 0): - if isinstance(key, unicode): - key = key.encode('ISO-8859-1') - - if key in request_params: - if not isinstance(request_params[key], list): - request_params[key] = [request_params[key]] - request_params[key].append(value) - else: - request_params[key] = value diff --git a/libs_crutch/contrib/cherrypy/_cprequest.py b/libs_crutch/contrib/cherrypy/_cprequest.py deleted file mode 100644 index 290bd2e..0000000 --- a/libs_crutch/contrib/cherrypy/_cprequest.py +++ /dev/null @@ -1,973 +0,0 @@ - -import os -import sys -import time -import warnings - -import cherrypy -from cherrypy._cpcompat import basestring, copykeys, ntob, unicodestr -from cherrypy._cpcompat import SimpleCookie, CookieError, py3k -from cherrypy import _cpreqbody, _cpconfig -from cherrypy._cperror import format_exc, bare_error -from cherrypy.lib import httputil, file_generator - - -class Hook(object): - - """A callback and its metadata: failsafe, priority, and kwargs.""" - - callback = None - """ - The bare callable that this Hook object is wrapping, which will - be called when the Hook is called.""" - - failsafe = False - """ - If True, the callback is guaranteed to run even if other callbacks - from the same call point raise exceptions.""" - - priority = 50 - """ - Defines the order of execution for a list of Hooks. Priority numbers - should be limited to the closed interval [0, 100], but values outside - this range are acceptable, as are fractional values.""" - - kwargs = {} - """ - A set of keyword arguments that will be passed to the - callable on each call.""" - - def __init__(self, callback, failsafe=None, priority=None, **kwargs): - self.callback = callback - - if failsafe is None: - failsafe = getattr(callback, "failsafe", False) - self.failsafe = failsafe - - if priority is None: - priority = getattr(callback, "priority", 50) - self.priority = priority - - self.kwargs = kwargs - - def __lt__(self, other): - # Python 3 - return self.priority < other.priority - - def __cmp__(self, other): - # Python 2 - return cmp(self.priority, other.priority) - - def __call__(self): - """Run self.callback(**self.kwargs).""" - return self.callback(**self.kwargs) - - def __repr__(self): - cls = self.__class__ - return ("%s.%s(callback=%r, failsafe=%r, priority=%r, %s)" - % (cls.__module__, cls.__name__, self.callback, - self.failsafe, self.priority, - ", ".join(['%s=%r' % (k, v) - for k, v in self.kwargs.items()]))) - - -class HookMap(dict): - - """A map of call points to lists of callbacks (Hook objects).""" - - def __new__(cls, points=None): - d = dict.__new__(cls) - for p in points or []: - d[p] = [] - return d - - def __init__(self, *a, **kw): - pass - - def attach(self, point, callback, failsafe=None, priority=None, **kwargs): - """Append a new Hook made from the supplied arguments.""" - self[point].append(Hook(callback, failsafe, priority, **kwargs)) - - def run(self, point): - """Execute all registered Hooks (callbacks) for the given point.""" - exc = None - hooks = self[point] - hooks.sort() - for hook in hooks: - # Some hooks are guaranteed to run even if others at - # the same hookpoint fail. We will still log the failure, - # but proceed on to the next hook. The only way - # to stop all processing from one of these hooks is - # to raise SystemExit and stop the whole server. - if exc is None or hook.failsafe: - try: - hook() - except (KeyboardInterrupt, SystemExit): - raise - except (cherrypy.HTTPError, cherrypy.HTTPRedirect, - cherrypy.InternalRedirect): - exc = sys.exc_info()[1] - except: - exc = sys.exc_info()[1] - cherrypy.log(traceback=True, severity=40) - if exc: - raise exc - - def __copy__(self): - newmap = self.__class__() - # We can't just use 'update' because we want copies of the - # mutable values (each is a list) as well. - for k, v in self.items(): - newmap[k] = v[:] - return newmap - copy = __copy__ - - def __repr__(self): - cls = self.__class__ - return "%s.%s(points=%r)" % ( - cls.__module__, - cls.__name__, - copykeys(self) - ) - - -# Config namespace handlers - -def hooks_namespace(k, v): - """Attach bare hooks declared in config.""" - # Use split again to allow multiple hooks for a single - # hookpoint per path (e.g. "hooks.before_handler.1"). - # Little-known fact you only get from reading source ;) - hookpoint = k.split(".", 1)[0] - if isinstance(v, basestring): - v = cherrypy.lib.attributes(v) - if not isinstance(v, Hook): - v = Hook(v) - cherrypy.serving.request.hooks[hookpoint].append(v) - - -def request_namespace(k, v): - """Attach request attributes declared in config.""" - # Provides config entries to set request.body attrs (like - # attempt_charsets). - if k[:5] == 'body.': - setattr(cherrypy.serving.request.body, k[5:], v) - else: - setattr(cherrypy.serving.request, k, v) - - -def response_namespace(k, v): - """Attach response attributes declared in config.""" - # Provides config entries to set default response headers - # http://cherrypy.org/ticket/889 - if k[:8] == 'headers.': - cherrypy.serving.response.headers[k.split('.', 1)[1]] = v - else: - setattr(cherrypy.serving.response, k, v) - - -def error_page_namespace(k, v): - """Attach error pages declared in config.""" - if k != 'default': - k = int(k) - cherrypy.serving.request.error_page[k] = v - - -hookpoints = ['on_start_resource', 'before_request_body', - 'before_handler', 'before_finalize', - 'on_end_resource', 'on_end_request', - 'before_error_response', 'after_error_response'] - - -class Request(object): - - """An HTTP request. - - This object represents the metadata of an HTTP request message; - that is, it contains attributes which describe the environment - in which the request URL, headers, and body were sent (if you - want tools to interpret the headers and body, those are elsewhere, - mostly in Tools). This 'metadata' consists of socket data, - transport characteristics, and the Request-Line. This object - also contains data regarding the configuration in effect for - the given URL, and the execution plan for generating a response. - """ - - prev = None - """ - The previous Request object (if any). This should be None - unless we are processing an InternalRedirect.""" - - # Conversation/connection attributes - local = httputil.Host("127.0.0.1", 80) - "An httputil.Host(ip, port, hostname) object for the server socket." - - remote = httputil.Host("127.0.0.1", 1111) - "An httputil.Host(ip, port, hostname) object for the client socket." - - scheme = "http" - """ - The protocol used between client and server. In most cases, - this will be either 'http' or 'https'.""" - - server_protocol = "HTTP/1.1" - """ - The HTTP version for which the HTTP server is at least - conditionally compliant.""" - - base = "" - """The (scheme://host) portion of the requested URL. - In some cases (e.g. when proxying via mod_rewrite), this may contain - path segments which cherrypy.url uses when constructing url's, but - which otherwise are ignored by CherryPy. Regardless, this value - MUST NOT end in a slash.""" - - # Request-Line attributes - request_line = "" - """ - The complete Request-Line received from the client. This is a - single string consisting of the request method, URI, and protocol - version (joined by spaces). Any final CRLF is removed.""" - - method = "GET" - """ - Indicates the HTTP method to be performed on the resource identified - by the Request-URI. Common methods include GET, HEAD, POST, PUT, and - DELETE. CherryPy allows any extension method; however, various HTTP - servers and gateways may restrict the set of allowable methods. - CherryPy applications SHOULD restrict the set (on a per-URI basis).""" - - query_string = "" - """ - The query component of the Request-URI, a string of information to be - interpreted by the resource. The query portion of a URI follows the - path component, and is separated by a '?'. For example, the URI - 'http://www.cherrypy.org/wiki?a=3&b=4' has the query component, - 'a=3&b=4'.""" - - query_string_encoding = 'utf8' - """ - The encoding expected for query string arguments after % HEX HEX decoding). - If a query string is provided that cannot be decoded with this encoding, - 404 is raised (since technically it's a different URI). If you want - arbitrary encodings to not error, set this to 'Latin-1'; you can then - encode back to bytes and re-decode to whatever encoding you like later. - """ - - protocol = (1, 1) - """The HTTP protocol version corresponding to the set - of features which should be allowed in the response. If BOTH - the client's request message AND the server's level of HTTP - compliance is HTTP/1.1, this attribute will be the tuple (1, 1). - If either is 1.0, this attribute will be the tuple (1, 0). - Lower HTTP protocol versions are not explicitly supported.""" - - params = {} - """ - A dict which combines query string (GET) and request entity (POST) - variables. This is populated in two stages: GET params are added - before the 'on_start_resource' hook, and POST params are added - between the 'before_request_body' and 'before_handler' hooks.""" - - # Message attributes - header_list = [] - """ - A list of the HTTP request headers as (name, value) tuples. - In general, you should use request.headers (a dict) instead.""" - - headers = httputil.HeaderMap() - """ - A dict-like object containing the request headers. Keys are header - names (in Title-Case format); however, you may get and set them in - a case-insensitive manner. That is, headers['Content-Type'] and - headers['content-type'] refer to the same value. Values are header - values (decoded according to :rfc:`2047` if necessary). See also: - httputil.HeaderMap, httputil.HeaderElement.""" - - cookie = SimpleCookie() - """See help(Cookie).""" - - rfile = None - """ - If the request included an entity (body), it will be available - as a stream in this attribute. However, the rfile will normally - be read for you between the 'before_request_body' hook and the - 'before_handler' hook, and the resulting string is placed into - either request.params or the request.body attribute. - - You may disable the automatic consumption of the rfile by setting - request.process_request_body to False, either in config for the desired - path, or in an 'on_start_resource' or 'before_request_body' hook. - - WARNING: In almost every case, you should not attempt to read from the - rfile stream after CherryPy's automatic mechanism has read it. If you - turn off the automatic parsing of rfile, you should read exactly the - number of bytes specified in request.headers['Content-Length']. - Ignoring either of these warnings may result in a hung request thread - or in corruption of the next (pipelined) request. - """ - - process_request_body = True - """ - If True, the rfile (if any) is automatically read and parsed, - and the result placed into request.params or request.body.""" - - methods_with_bodies = ("POST", "PUT") - """ - A sequence of HTTP methods for which CherryPy will automatically - attempt to read a body from the rfile. If you are going to change - this property, modify it on the configuration (recommended) - or on the "hook point" `on_start_resource`. - """ - - body = None - """ - If the request Content-Type is 'application/x-www-form-urlencoded' - or multipart, this will be None. Otherwise, this will be an instance - of :class:`RequestBody` (which you - can .read()); this value is set between the 'before_request_body' and - 'before_handler' hooks (assuming that process_request_body is True).""" - - # Dispatch attributes - dispatch = cherrypy.dispatch.Dispatcher() - """ - The object which looks up the 'page handler' callable and collects - config for the current request based on the path_info, other - request attributes, and the application architecture. The core - calls the dispatcher as early as possible, passing it a 'path_info' - argument. - - The default dispatcher discovers the page handler by matching path_info - to a hierarchical arrangement of objects, starting at request.app.root. - See help(cherrypy.dispatch) for more information.""" - - script_name = "" - """ - The 'mount point' of the application which is handling this request. - - This attribute MUST NOT end in a slash. If the script_name refers to - the root of the URI, it MUST be an empty string (not "/"). - """ - - path_info = "/" - """ - The 'relative path' portion of the Request-URI. This is relative - to the script_name ('mount point') of the application which is - handling this request.""" - - login = None - """ - When authentication is used during the request processing this is - set to 'False' if it failed and to the 'username' value if it succeeded. - The default 'None' implies that no authentication happened.""" - - # Note that cherrypy.url uses "if request.app:" to determine whether - # the call is during a real HTTP request or not. So leave this None. - app = None - """The cherrypy.Application object which is handling this request.""" - - handler = None - """ - The function, method, or other callable which CherryPy will call to - produce the response. The discovery of the handler and the arguments - it will receive are determined by the request.dispatch object. - By default, the handler is discovered by walking a tree of objects - starting at request.app.root, and is then passed all HTTP params - (from the query string and POST body) as keyword arguments.""" - - toolmaps = {} - """ - A nested dict of all Toolboxes and Tools in effect for this request, - of the form: {Toolbox.namespace: {Tool.name: config dict}}.""" - - config = None - """ - A flat dict of all configuration entries which apply to the - current request. These entries are collected from global config, - application config (based on request.path_info), and from handler - config (exactly how is governed by the request.dispatch object in - effect for this request; by default, handler config can be attached - anywhere in the tree between request.app.root and the final handler, - and inherits downward).""" - - is_index = None - """ - This will be True if the current request is mapped to an 'index' - resource handler (also, a 'default' handler if path_info ends with - a slash). The value may be used to automatically redirect the - user-agent to a 'more canonical' URL which either adds or removes - the trailing slash. See cherrypy.tools.trailing_slash.""" - - hooks = HookMap(hookpoints) - """ - A HookMap (dict-like object) of the form: {hookpoint: [hook, ...]}. - Each key is a str naming the hook point, and each value is a list - of hooks which will be called at that hook point during this request. - The list of hooks is generally populated as early as possible (mostly - from Tools specified in config), but may be extended at any time. - See also: _cprequest.Hook, _cprequest.HookMap, and cherrypy.tools.""" - - error_response = cherrypy.HTTPError(500).set_response - """ - The no-arg callable which will handle unexpected, untrapped errors - during request processing. This is not used for expected exceptions - (like NotFound, HTTPError, or HTTPRedirect) which are raised in - response to expected conditions (those should be customized either - via request.error_page or by overriding HTTPError.set_response). - By default, error_response uses HTTPError(500) to return a generic - error response to the user-agent.""" - - error_page = {} - """ - A dict of {error code: response filename or callable} pairs. - - The error code must be an int representing a given HTTP error code, - or the string 'default', which will be used if no matching entry - is found for a given numeric code. - - If a filename is provided, the file should contain a Python string- - formatting template, and can expect by default to receive format - values with the mapping keys %(status)s, %(message)s, %(traceback)s, - and %(version)s. The set of format mappings can be extended by - overriding HTTPError.set_response. - - If a callable is provided, it will be called by default with keyword - arguments 'status', 'message', 'traceback', and 'version', as for a - string-formatting template. The callable must return a string or - iterable of strings which will be set to response.body. It may also - override headers or perform any other processing. - - If no entry is given for an error code, and no 'default' entry exists, - a default template will be used. - """ - - show_tracebacks = True - """ - If True, unexpected errors encountered during request processing will - include a traceback in the response body.""" - - show_mismatched_params = True - """ - If True, mismatched parameters encountered during PageHandler invocation - processing will be included in the response body.""" - - throws = (KeyboardInterrupt, SystemExit, cherrypy.InternalRedirect) - """The sequence of exceptions which Request.run does not trap.""" - - throw_errors = False - """ - If True, Request.run will not trap any errors (except HTTPRedirect and - HTTPError, which are more properly called 'exceptions', not errors).""" - - closed = False - """True once the close method has been called, False otherwise.""" - - stage = None - """ - A string containing the stage reached in the request-handling process. - This is useful when debugging a live server with hung requests.""" - - namespaces = _cpconfig.NamespaceSet( - **{"hooks": hooks_namespace, - "request": request_namespace, - "response": response_namespace, - "error_page": error_page_namespace, - "tools": cherrypy.tools, - }) - - def __init__(self, local_host, remote_host, scheme="http", - server_protocol="HTTP/1.1"): - """Populate a new Request object. - - local_host should be an httputil.Host object with the server info. - remote_host should be an httputil.Host object with the client info. - scheme should be a string, either "http" or "https". - """ - self.local = local_host - self.remote = remote_host - self.scheme = scheme - self.server_protocol = server_protocol - - self.closed = False - - # Put a *copy* of the class error_page into self. - self.error_page = self.error_page.copy() - - # Put a *copy* of the class namespaces into self. - self.namespaces = self.namespaces.copy() - - self.stage = None - - def close(self): - """Run cleanup code. (Core)""" - if not self.closed: - self.closed = True - self.stage = 'on_end_request' - self.hooks.run('on_end_request') - self.stage = 'close' - - def run(self, method, path, query_string, req_protocol, headers, rfile): - r"""Process the Request. (Core) - - method, path, query_string, and req_protocol should be pulled directly - from the Request-Line (e.g. "GET /path?key=val HTTP/1.0"). - - path - This should be %XX-unquoted, but query_string should not be. - - When using Python 2, they both MUST be byte strings, - not unicode strings. - - When using Python 3, they both MUST be unicode strings, - not byte strings, and preferably not bytes \x00-\xFF - disguised as unicode. - - headers - A list of (name, value) tuples. - - rfile - A file-like object containing the HTTP request entity. - - When run() is done, the returned object should have 3 attributes: - - * status, e.g. "200 OK" - * header_list, a list of (name, value) tuples - * body, an iterable yielding strings - - Consumer code (HTTP servers) should then access these response - attributes to build the outbound stream. - - """ - response = cherrypy.serving.response - self.stage = 'run' - try: - self.error_response = cherrypy.HTTPError(500).set_response - - self.method = method - path = path or "/" - self.query_string = query_string or '' - self.params = {} - - # Compare request and server HTTP protocol versions, in case our - # server does not support the requested protocol. Limit our output - # to min(req, server). We want the following output: - # request server actual written supported response - # protocol protocol response protocol feature set - # a 1.0 1.0 1.0 1.0 - # b 1.0 1.1 1.1 1.0 - # c 1.1 1.0 1.0 1.0 - # d 1.1 1.1 1.1 1.1 - # Notice that, in (b), the response will be "HTTP/1.1" even though - # the client only understands 1.0. RFC 2616 10.5.6 says we should - # only return 505 if the _major_ version is different. - rp = int(req_protocol[5]), int(req_protocol[7]) - sp = int(self.server_protocol[5]), int(self.server_protocol[7]) - self.protocol = min(rp, sp) - response.headers.protocol = self.protocol - - # Rebuild first line of the request (e.g. "GET /path HTTP/1.0"). - url = path - if query_string: - url += '?' + query_string - self.request_line = '%s %s %s' % (method, url, req_protocol) - - self.header_list = list(headers) - self.headers = httputil.HeaderMap() - - self.rfile = rfile - self.body = None - - self.cookie = SimpleCookie() - self.handler = None - - # path_info should be the path from the - # app root (script_name) to the handler. - self.script_name = self.app.script_name - self.path_info = pi = path[len(self.script_name):] - - self.stage = 'respond' - self.respond(pi) - - except self.throws: - raise - except: - if self.throw_errors: - raise - else: - # Failure in setup, error handler or finalize. Bypass them. - # Can't use handle_error because we may not have hooks yet. - cherrypy.log(traceback=True, severity=40) - if self.show_tracebacks: - body = format_exc() - else: - body = "" - r = bare_error(body) - response.output_status, response.header_list, response.body = r - - if self.method == "HEAD": - # HEAD requests MUST NOT return a message-body in the response. - response.body = [] - - try: - cherrypy.log.access() - except: - cherrypy.log.error(traceback=True) - - if response.timed_out: - raise cherrypy.TimeoutError() - - return response - - # Uncomment for stage debugging - # stage = property(lambda self: self._stage, lambda self, v: print(v)) - - def respond(self, path_info): - """Generate a response for the resource at self.path_info. (Core)""" - response = cherrypy.serving.response - try: - try: - try: - if self.app is None: - raise cherrypy.NotFound() - - # Get the 'Host' header, so we can HTTPRedirect properly. - self.stage = 'process_headers' - self.process_headers() - - # Make a copy of the class hooks - self.hooks = self.__class__.hooks.copy() - self.toolmaps = {} - - self.stage = 'get_resource' - self.get_resource(path_info) - - self.body = _cpreqbody.RequestBody( - self.rfile, self.headers, request_params=self.params) - - self.namespaces(self.config) - - self.stage = 'on_start_resource' - self.hooks.run('on_start_resource') - - # Parse the querystring - self.stage = 'process_query_string' - self.process_query_string() - - # Process the body - if self.process_request_body: - if self.method not in self.methods_with_bodies: - self.process_request_body = False - self.stage = 'before_request_body' - self.hooks.run('before_request_body') - if self.process_request_body: - self.body.process() - - # Run the handler - self.stage = 'before_handler' - self.hooks.run('before_handler') - if self.handler: - self.stage = 'handler' - response.body = self.handler() - - # Finalize - self.stage = 'before_finalize' - self.hooks.run('before_finalize') - response.finalize() - except (cherrypy.HTTPRedirect, cherrypy.HTTPError): - inst = sys.exc_info()[1] - inst.set_response() - self.stage = 'before_finalize (HTTPError)' - self.hooks.run('before_finalize') - response.finalize() - finally: - self.stage = 'on_end_resource' - self.hooks.run('on_end_resource') - except self.throws: - raise - except: - if self.throw_errors: - raise - self.handle_error() - - def process_query_string(self): - """Parse the query string into Python structures. (Core)""" - try: - p = httputil.parse_query_string( - self.query_string, encoding=self.query_string_encoding) - except UnicodeDecodeError: - raise cherrypy.HTTPError( - 404, "The given query string could not be processed. Query " - "strings for this resource must be encoded with %r." % - self.query_string_encoding) - - # Python 2 only: keyword arguments must be byte strings (type 'str'). - if not py3k: - for key, value in p.items(): - if isinstance(key, unicode): - del p[key] - p[key.encode(self.query_string_encoding)] = value - self.params.update(p) - - def process_headers(self): - """Parse HTTP header data into Python structures. (Core)""" - # Process the headers into self.headers - headers = self.headers - for name, value in self.header_list: - # Call title() now (and use dict.__method__(headers)) - # so title doesn't have to be called twice. - name = name.title() - value = value.strip() - - # Warning: if there is more than one header entry for cookies - # (AFAIK, only Konqueror does that), only the last one will - # remain in headers (but they will be correctly stored in - # request.cookie). - if "=?" in value: - dict.__setitem__(headers, name, httputil.decode_TEXT(value)) - else: - dict.__setitem__(headers, name, value) - - # Handle cookies differently because on Konqueror, multiple - # cookies come on different lines with the same key - if name == 'Cookie': - try: - self.cookie.load(value) - except CookieError: - msg = "Illegal cookie name %s" % value.split('=')[0] - raise cherrypy.HTTPError(400, msg) - - if not dict.__contains__(headers, 'Host'): - # All Internet-based HTTP/1.1 servers MUST respond with a 400 - # (Bad Request) status code to any HTTP/1.1 request message - # which lacks a Host header field. - if self.protocol >= (1, 1): - msg = "HTTP/1.1 requires a 'Host' request header." - raise cherrypy.HTTPError(400, msg) - host = dict.get(headers, 'Host') - if not host: - host = self.local.name or self.local.ip - self.base = "%s://%s" % (self.scheme, host) - - def get_resource(self, path): - """Call a dispatcher (which sets self.handler and .config). (Core)""" - # First, see if there is a custom dispatch at this URI. Custom - # dispatchers can only be specified in app.config, not in _cp_config - # (since custom dispatchers may not even have an app.root). - dispatch = self.app.find_config( - path, "request.dispatch", self.dispatch) - - # dispatch() should set self.handler and self.config - dispatch(path) - - def handle_error(self): - """Handle the last unanticipated exception. (Core)""" - try: - self.hooks.run("before_error_response") - if self.error_response: - self.error_response() - self.hooks.run("after_error_response") - cherrypy.serving.response.finalize() - except cherrypy.HTTPRedirect: - inst = sys.exc_info()[1] - inst.set_response() - cherrypy.serving.response.finalize() - - # ------------------------- Properties ------------------------- # - - def _get_body_params(self): - warnings.warn( - "body_params is deprecated in CherryPy 3.2, will be removed in " - "CherryPy 3.3.", - DeprecationWarning - ) - return self.body.params - body_params = property(_get_body_params, - doc=""" - If the request Content-Type is 'application/x-www-form-urlencoded' or - multipart, this will be a dict of the params pulled from the entity - body; that is, it will be the portion of request.params that come - from the message body (sometimes called "POST params", although they - can be sent with various HTTP method verbs). This value is set between - the 'before_request_body' and 'before_handler' hooks (assuming that - process_request_body is True). - - Deprecated in 3.2, will be removed for 3.3 in favor of - :attr:`request.body.params`.""") - - -class ResponseBody(object): - - """The body of the HTTP response (the response entity).""" - - if py3k: - unicode_err = ("Page handlers MUST return bytes. Use tools.encode " - "if you wish to return unicode.") - - def __get__(self, obj, objclass=None): - if obj is None: - # When calling on the class instead of an instance... - return self - else: - return obj._body - - def __set__(self, obj, value): - # Convert the given value to an iterable object. - if py3k and isinstance(value, str): - raise ValueError(self.unicode_err) - - if isinstance(value, basestring): - # strings get wrapped in a list because iterating over a single - # item list is much faster than iterating over every character - # in a long string. - if value: - value = [value] - else: - # [''] doesn't evaluate to False, so replace it with []. - value = [] - elif py3k and isinstance(value, list): - # every item in a list must be bytes... - for i, item in enumerate(value): - if isinstance(item, str): - raise ValueError(self.unicode_err) - # Don't use isinstance here; io.IOBase which has an ABC takes - # 1000 times as long as, say, isinstance(value, str) - elif hasattr(value, 'read'): - value = file_generator(value) - elif value is None: - value = [] - obj._body = value - - -class Response(object): - - """An HTTP Response, including status, headers, and body.""" - - status = "" - """The HTTP Status-Code and Reason-Phrase.""" - - header_list = [] - """ - A list of the HTTP response headers as (name, value) tuples. - In general, you should use response.headers (a dict) instead. This - attribute is generated from response.headers and is not valid until - after the finalize phase.""" - - headers = httputil.HeaderMap() - """ - A dict-like object containing the response headers. Keys are header - names (in Title-Case format); however, you may get and set them in - a case-insensitive manner. That is, headers['Content-Type'] and - headers['content-type'] refer to the same value. Values are header - values (decoded according to :rfc:`2047` if necessary). - - .. seealso:: classes :class:`HeaderMap`, :class:`HeaderElement` - """ - - cookie = SimpleCookie() - """See help(Cookie).""" - - body = ResponseBody() - """The body (entity) of the HTTP response.""" - - time = None - """The value of time.time() when created. Use in HTTP dates.""" - - timeout = 300 - """Seconds after which the response will be aborted.""" - - timed_out = False - """ - Flag to indicate the response should be aborted, because it has - exceeded its timeout.""" - - stream = False - """If False, buffer the response body.""" - - def __init__(self): - self.status = None - self.header_list = None - self._body = [] - self.time = time.time() - - self.headers = httputil.HeaderMap() - # Since we know all our keys are titled strings, we can - # bypass HeaderMap.update and get a big speed boost. - dict.update(self.headers, { - "Content-Type": 'text/html', - "Server": "CherryPy/" + cherrypy.__version__, - "Date": httputil.HTTPDate(self.time), - }) - self.cookie = SimpleCookie() - - def collapse_body(self): - """Collapse self.body to a single string; replace it and return it.""" - if isinstance(self.body, basestring): - return self.body - - newbody = [] - for chunk in self.body: - if py3k and not isinstance(chunk, bytes): - raise TypeError("Chunk %s is not of type 'bytes'." % - repr(chunk)) - newbody.append(chunk) - newbody = ntob('').join(newbody) - - self.body = newbody - return newbody - - def finalize(self): - """Transform headers (and cookies) into self.header_list. (Core)""" - try: - code, reason, _ = httputil.valid_status(self.status) - except ValueError: - raise cherrypy.HTTPError(500, sys.exc_info()[1].args[0]) - - headers = self.headers - - self.status = "%s %s" % (code, reason) - self.output_status = ntob(str(code), 'ascii') + \ - ntob(" ") + headers.encode(reason) - - if self.stream: - # The upshot: wsgiserver will chunk the response if - # you pop Content-Length (or set it explicitly to None). - # Note that lib.static sets C-L to the file's st_size. - if dict.get(headers, 'Content-Length') is None: - dict.pop(headers, 'Content-Length', None) - elif code < 200 or code in (204, 205, 304): - # "All 1xx (informational), 204 (no content), - # and 304 (not modified) responses MUST NOT - # include a message-body." - dict.pop(headers, 'Content-Length', None) - self.body = ntob("") - else: - # Responses which are not streamed should have a Content-Length, - # but allow user code to set Content-Length if desired. - if dict.get(headers, 'Content-Length') is None: - content = self.collapse_body() - dict.__setitem__(headers, 'Content-Length', len(content)) - - # Transform our header dict into a list of tuples. - self.header_list = h = headers.output() - - cookie = self.cookie.output() - if cookie: - for line in cookie.split("\n"): - if line.endswith("\r"): - # Python 2.4 emits cookies joined by LF but 2.5+ by CRLF. - line = line[:-1] - name, value = line.split(": ", 1) - if isinstance(name, unicodestr): - name = name.encode("ISO-8859-1") - if isinstance(value, unicodestr): - value = headers.encode(value) - h.append((name, value)) - - def check_timeout(self): - """If now > self.time + self.timeout, set self.timed_out. - - This purposefully sets a flag, rather than raising an error, - so that a monitor thread can interrupt the Response thread. - """ - if time.time() > self.time + self.timeout: - self.timed_out = True diff --git a/libs_crutch/contrib/cherrypy/_cpserver.py b/libs_crutch/contrib/cherrypy/_cpserver.py deleted file mode 100644 index a31e742..0000000 --- a/libs_crutch/contrib/cherrypy/_cpserver.py +++ /dev/null @@ -1,226 +0,0 @@ -"""Manage HTTP servers with CherryPy.""" - -import warnings - -import cherrypy -from cherrypy.lib import attributes -from cherrypy._cpcompat import basestring, py3k - -# We import * because we want to export check_port -# et al as attributes of this module. -from cherrypy.process.servers import * - - -class Server(ServerAdapter): - - """An adapter for an HTTP server. - - You can set attributes (like socket_host and socket_port) - on *this* object (which is probably cherrypy.server), and call - quickstart. For example:: - - cherrypy.server.socket_port = 80 - cherrypy.quickstart() - """ - - socket_port = 8080 - """The TCP port on which to listen for connections.""" - - _socket_host = '127.0.0.1' - - def _get_socket_host(self): - return self._socket_host - - def _set_socket_host(self, value): - if value == '': - raise ValueError("The empty string ('') is not an allowed value. " - "Use '0.0.0.0' instead to listen on all active " - "interfaces (INADDR_ANY).") - self._socket_host = value - socket_host = property( - _get_socket_host, - _set_socket_host, - doc="""The hostname or IP address on which to listen for connections. - - Host values may be any IPv4 or IPv6 address, or any valid hostname. - The string 'localhost' is a synonym for '127.0.0.1' (or '::1', if - your hosts file prefers IPv6). The string '0.0.0.0' is a special - IPv4 entry meaning "any active interface" (INADDR_ANY), and '::' - is the similar IN6ADDR_ANY for IPv6. The empty string or None are - not allowed.""") - - socket_file = None - """If given, the name of the UNIX socket to use instead of TCP/IP. - - When this option is not None, the `socket_host` and `socket_port` options - are ignored.""" - - socket_queue_size = 5 - """The 'backlog' argument to socket.listen(); specifies the maximum number - of queued connections (default 5).""" - - socket_timeout = 10 - """The timeout in seconds for accepted connections (default 10).""" - - accepted_queue_size = -1 - """The maximum number of requests which will be queued up before - the server refuses to accept it (default -1, meaning no limit).""" - - accepted_queue_timeout = 10 - """The timeout in seconds for attempting to add a request to the - queue when the queue is full (default 10).""" - - shutdown_timeout = 5 - """The time to wait for HTTP worker threads to clean up.""" - - protocol_version = 'HTTP/1.1' - """The version string to write in the Status-Line of all HTTP responses, - for example, "HTTP/1.1" (the default). Depending on the HTTP server used, - this should also limit the supported features used in the response.""" - - thread_pool = 10 - """The number of worker threads to start up in the pool.""" - - thread_pool_max = -1 - """The maximum size of the worker-thread pool. Use -1 to indicate no limit. - """ - - max_request_header_size = 500 * 1024 - """The maximum number of bytes allowable in the request headers. - If exceeded, the HTTP server should return "413 Request Entity Too Large". - """ - - max_request_body_size = 100 * 1024 * 1024 - """The maximum number of bytes allowable in the request body. If exceeded, - the HTTP server should return "413 Request Entity Too Large".""" - - instance = None - """If not None, this should be an HTTP server instance (such as - CPWSGIServer) which cherrypy.server will control. Use this when you need - more control over object instantiation than is available in the various - configuration options.""" - - ssl_context = None - """When using PyOpenSSL, an instance of SSL.Context.""" - - ssl_certificate = None - """The filename of the SSL certificate to use.""" - - ssl_certificate_chain = None - """When using PyOpenSSL, the certificate chain to pass to - Context.load_verify_locations.""" - - ssl_private_key = None - """The filename of the private key to use with SSL.""" - - if py3k: - ssl_module = 'builtin' - """The name of a registered SSL adaptation module to use with - the builtin WSGI server. Builtin options are: 'builtin' (to - use the SSL library built into recent versions of Python). - You may also register your own classes in the - wsgiserver.ssl_adapters dict.""" - else: - ssl_module = 'pyopenssl' - """The name of a registered SSL adaptation module to use with the - builtin WSGI server. Builtin options are 'builtin' (to use the SSL - library built into recent versions of Python) and 'pyopenssl' (to - use the PyOpenSSL project, which you must install separately). You - may also register your own classes in the wsgiserver.ssl_adapters - dict.""" - - statistics = False - """Turns statistics-gathering on or off for aware HTTP servers.""" - - nodelay = True - """If True (the default since 3.1), sets the TCP_NODELAY socket option.""" - - wsgi_version = (1, 0) - """The WSGI version tuple to use with the builtin WSGI server. - The provided options are (1, 0) [which includes support for PEP 3333, - which declares it covers WSGI version 1.0.1 but still mandates the - wsgi.version (1, 0)] and ('u', 0), an experimental unicode version. - You may create and register your own experimental versions of the WSGI - protocol by adding custom classes to the wsgiserver.wsgi_gateways dict.""" - - def __init__(self): - self.bus = cherrypy.engine - self.httpserver = None - self.interrupt = None - self.running = False - - def httpserver_from_self(self, httpserver=None): - """Return a (httpserver, bind_addr) pair based on self attributes.""" - if httpserver is None: - httpserver = self.instance - if httpserver is None: - from cherrypy import _cpwsgi_server - httpserver = _cpwsgi_server.CPWSGIServer(self) - if isinstance(httpserver, basestring): - # Is anyone using this? Can I add an arg? - httpserver = attributes(httpserver)(self) - return httpserver, self.bind_addr - - def start(self): - """Start the HTTP server.""" - if not self.httpserver: - self.httpserver, self.bind_addr = self.httpserver_from_self() - ServerAdapter.start(self) - start.priority = 75 - - def _get_bind_addr(self): - if self.socket_file: - return self.socket_file - if self.socket_host is None and self.socket_port is None: - return None - return (self.socket_host, self.socket_port) - - def _set_bind_addr(self, value): - if value is None: - self.socket_file = None - self.socket_host = None - self.socket_port = None - elif isinstance(value, basestring): - self.socket_file = value - self.socket_host = None - self.socket_port = None - else: - try: - self.socket_host, self.socket_port = value - self.socket_file = None - except ValueError: - raise ValueError("bind_addr must be a (host, port) tuple " - "(for TCP sockets) or a string (for Unix " - "domain sockets), not %r" % value) - bind_addr = property( - _get_bind_addr, - _set_bind_addr, - doc='A (host, port) tuple for TCP sockets or ' - 'a str for Unix domain sockets.') - - def base(self): - """Return the base (scheme://host[:port] or sock file) for this server. - """ - if self.socket_file: - return self.socket_file - - host = self.socket_host - if host in ('0.0.0.0', '::'): - # 0.0.0.0 is INADDR_ANY and :: is IN6ADDR_ANY. - # Look up the host name, which should be the - # safest thing to spit out in a URL. - import socket - host = socket.gethostname() - - port = self.socket_port - - if self.ssl_certificate: - scheme = "https" - if port != 443: - host += ":%s" % port - else: - scheme = "http" - if port != 80: - host += ":%s" % port - - return "%s://%s" % (scheme, host) diff --git a/libs_crutch/contrib/cherrypy/_cpthreadinglocal.py b/libs_crutch/contrib/cherrypy/_cpthreadinglocal.py deleted file mode 100644 index 238c322..0000000 --- a/libs_crutch/contrib/cherrypy/_cpthreadinglocal.py +++ /dev/null @@ -1,241 +0,0 @@ -# This is a backport of Python-2.4's threading.local() implementation - -"""Thread-local objects - -(Note that this module provides a Python version of thread - threading.local class. Depending on the version of Python you're - using, there may be a faster one available. You should always import - the local class from threading.) - -Thread-local objects support the management of thread-local data. -If you have data that you want to be local to a thread, simply create -a thread-local object and use its attributes: - - >>> mydata = local() - >>> mydata.number = 42 - >>> mydata.number - 42 - -You can also access the local-object's dictionary: - - >>> mydata.__dict__ - {'number': 42} - >>> mydata.__dict__.setdefault('widgets', []) - [] - >>> mydata.widgets - [] - -What's important about thread-local objects is that their data are -local to a thread. If we access the data in a different thread: - - >>> log = [] - >>> def f(): - ... items = mydata.__dict__.items() - ... items.sort() - ... log.append(items) - ... mydata.number = 11 - ... log.append(mydata.number) - - >>> import threading - >>> thread = threading.Thread(target=f) - >>> thread.start() - >>> thread.join() - >>> log - [[], 11] - -we get different data. Furthermore, changes made in the other thread -don't affect data seen in this thread: - - >>> mydata.number - 42 - -Of course, values you get from a local object, including a __dict__ -attribute, are for whatever thread was current at the time the -attribute was read. For that reason, you generally don't want to save -these values across threads, as they apply only to the thread they -came from. - -You can create custom local objects by subclassing the local class: - - >>> class MyLocal(local): - ... number = 2 - ... initialized = False - ... def __init__(self, **kw): - ... if self.initialized: - ... raise SystemError('__init__ called too many times') - ... self.initialized = True - ... self.__dict__.update(kw) - ... def squared(self): - ... return self.number ** 2 - -This can be useful to support default values, methods and -initialization. Note that if you define an __init__ method, it will be -called each time the local object is used in a separate thread. This -is necessary to initialize each thread's dictionary. - -Now if we create a local object: - - >>> mydata = MyLocal(color='red') - -Now we have a default number: - - >>> mydata.number - 2 - -an initial color: - - >>> mydata.color - 'red' - >>> del mydata.color - -And a method that operates on the data: - - >>> mydata.squared() - 4 - -As before, we can access the data in a separate thread: - - >>> log = [] - >>> thread = threading.Thread(target=f) - >>> thread.start() - >>> thread.join() - >>> log - [[('color', 'red'), ('initialized', True)], 11] - -without affecting this thread's data: - - >>> mydata.number - 2 - >>> mydata.color - Traceback (most recent call last): - ... - AttributeError: 'MyLocal' object has no attribute 'color' - -Note that subclasses can define slots, but they are not thread -local. They are shared across threads: - - >>> class MyLocal(local): - ... __slots__ = 'number' - - >>> mydata = MyLocal() - >>> mydata.number = 42 - >>> mydata.color = 'red' - -So, the separate thread: - - >>> thread = threading.Thread(target=f) - >>> thread.start() - >>> thread.join() - -affects what we see: - - >>> mydata.number - 11 - ->>> del mydata -""" - -# Threading import is at end - - -class _localbase(object): - __slots__ = '_local__key', '_local__args', '_local__lock' - - def __new__(cls, *args, **kw): - self = object.__new__(cls) - key = 'thread.local.' + str(id(self)) - object.__setattr__(self, '_local__key', key) - object.__setattr__(self, '_local__args', (args, kw)) - object.__setattr__(self, '_local__lock', RLock()) - - if args or kw and (cls.__init__ is object.__init__): - raise TypeError("Initialization arguments are not supported") - - # We need to create the thread dict in anticipation of - # __init__ being called, to make sure we don't call it - # again ourselves. - dict = object.__getattribute__(self, '__dict__') - currentThread().__dict__[key] = dict - - return self - - -def _patch(self): - key = object.__getattribute__(self, '_local__key') - d = currentThread().__dict__.get(key) - if d is None: - d = {} - currentThread().__dict__[key] = d - object.__setattr__(self, '__dict__', d) - - # we have a new instance dict, so call out __init__ if we have - # one - cls = type(self) - if cls.__init__ is not object.__init__: - args, kw = object.__getattribute__(self, '_local__args') - cls.__init__(self, *args, **kw) - else: - object.__setattr__(self, '__dict__', d) - - -class local(_localbase): - - def __getattribute__(self, name): - lock = object.__getattribute__(self, '_local__lock') - lock.acquire() - try: - _patch(self) - return object.__getattribute__(self, name) - finally: - lock.release() - - def __setattr__(self, name, value): - lock = object.__getattribute__(self, '_local__lock') - lock.acquire() - try: - _patch(self) - return object.__setattr__(self, name, value) - finally: - lock.release() - - def __delattr__(self, name): - lock = object.__getattribute__(self, '_local__lock') - lock.acquire() - try: - _patch(self) - return object.__delattr__(self, name) - finally: - lock.release() - - def __del__(): - threading_enumerate = enumerate - __getattribute__ = object.__getattribute__ - - def __del__(self): - key = __getattribute__(self, '_local__key') - - try: - threads = list(threading_enumerate()) - except: - # if enumerate fails, as it seems to do during - # shutdown, we'll skip cleanup under the assumption - # that there is nothing to clean up - return - - for thread in threads: - try: - __dict__ = thread.__dict__ - except AttributeError: - # Thread is dying, rest in peace - continue - - if key in __dict__: - try: - del __dict__[key] - except KeyError: - pass # didn't have anything in this thread - - return __del__ - __del__ = __del__() - -from threading import currentThread, enumerate, RLock diff --git a/libs_crutch/contrib/cherrypy/_cptools.py b/libs_crutch/contrib/cherrypy/_cptools.py deleted file mode 100644 index 06a56e8..0000000 --- a/libs_crutch/contrib/cherrypy/_cptools.py +++ /dev/null @@ -1,529 +0,0 @@ -"""CherryPy tools. A "tool" is any helper, adapted to CP. - -Tools are usually designed to be used in a variety of ways (although some -may only offer one if they choose): - - Library calls - All tools are callables that can be used wherever needed. - The arguments are straightforward and should be detailed within the - docstring. - - Function decorators - All tools, when called, may be used as decorators which configure - individual CherryPy page handlers (methods on the CherryPy tree). - That is, "@tools.anytool()" should "turn on" the tool via the - decorated function's _cp_config attribute. - - CherryPy config - If a tool exposes a "_setup" callable, it will be called - once per Request (if the feature is "turned on" via config). - -Tools may be implemented as any object with a namespace. The builtins -are generally either modules or instances of the tools.Tool class. -""" - -import sys -import warnings - -import cherrypy - - -def _getargs(func): - """Return the names of all static arguments to the given function.""" - # Use this instead of importing inspect for less mem overhead. - import types - if sys.version_info >= (3, 0): - if isinstance(func, types.MethodType): - func = func.__func__ - co = func.__code__ - else: - if isinstance(func, types.MethodType): - func = func.im_func - co = func.func_code - return co.co_varnames[:co.co_argcount] - - -_attr_error = ( - "CherryPy Tools cannot be turned on directly. Instead, turn them " - "on via config, or use them as decorators on your page handlers." -) - - -class Tool(object): - - """A registered function for use with CherryPy request-processing hooks. - - help(tool.callable) should give you more information about this Tool. - """ - - namespace = "tools" - - def __init__(self, point, callable, name=None, priority=50): - self._point = point - self.callable = callable - self._name = name - self._priority = priority - self.__doc__ = self.callable.__doc__ - self._setargs() - - def _get_on(self): - raise AttributeError(_attr_error) - - def _set_on(self, value): - raise AttributeError(_attr_error) - on = property(_get_on, _set_on) - - def _setargs(self): - """Copy func parameter names to obj attributes.""" - try: - for arg in _getargs(self.callable): - setattr(self, arg, None) - except (TypeError, AttributeError): - if hasattr(self.callable, "__call__"): - for arg in _getargs(self.callable.__call__): - setattr(self, arg, None) - # IronPython 1.0 raises NotImplementedError because - # inspect.getargspec tries to access Python bytecode - # in co_code attribute. - except NotImplementedError: - pass - # IronPython 1B1 may raise IndexError in some cases, - # but if we trap it here it doesn't prevent CP from working. - except IndexError: - pass - - def _merged_args(self, d=None): - """Return a dict of configuration entries for this Tool.""" - if d: - conf = d.copy() - else: - conf = {} - - tm = cherrypy.serving.request.toolmaps[self.namespace] - if self._name in tm: - conf.update(tm[self._name]) - - if "on" in conf: - del conf["on"] - - return conf - - def __call__(self, *args, **kwargs): - """Compile-time decorator (turn on the tool in config). - - For example:: - - @tools.proxy() - def whats_my_base(self): - return cherrypy.request.base - whats_my_base.exposed = True - """ - if args: - raise TypeError("The %r Tool does not accept positional " - "arguments; you must use keyword arguments." - % self._name) - - def tool_decorator(f): - if not hasattr(f, "_cp_config"): - f._cp_config = {} - subspace = self.namespace + "." + self._name + "." - f._cp_config[subspace + "on"] = True - for k, v in kwargs.items(): - f._cp_config[subspace + k] = v - return f - return tool_decorator - - def _setup(self): - """Hook this tool into cherrypy.request. - - The standard CherryPy request object will automatically call this - method when the tool is "turned on" in config. - """ - conf = self._merged_args() - p = conf.pop("priority", None) - if p is None: - p = getattr(self.callable, "priority", self._priority) - cherrypy.serving.request.hooks.attach(self._point, self.callable, - priority=p, **conf) - - -class HandlerTool(Tool): - - """Tool which is called 'before main', that may skip normal handlers. - - If the tool successfully handles the request (by setting response.body), - if should return True. This will cause CherryPy to skip any 'normal' page - handler. If the tool did not handle the request, it should return False - to tell CherryPy to continue on and call the normal page handler. If the - tool is declared AS a page handler (see the 'handler' method), returning - False will raise NotFound. - """ - - def __init__(self, callable, name=None): - Tool.__init__(self, 'before_handler', callable, name) - - def handler(self, *args, **kwargs): - """Use this tool as a CherryPy page handler. - - For example:: - - class Root: - nav = tools.staticdir.handler(section="/nav", dir="nav", - root=absDir) - """ - def handle_func(*a, **kw): - handled = self.callable(*args, **self._merged_args(kwargs)) - if not handled: - raise cherrypy.NotFound() - return cherrypy.serving.response.body - handle_func.exposed = True - return handle_func - - def _wrapper(self, **kwargs): - if self.callable(**kwargs): - cherrypy.serving.request.handler = None - - def _setup(self): - """Hook this tool into cherrypy.request. - - The standard CherryPy request object will automatically call this - method when the tool is "turned on" in config. - """ - conf = self._merged_args() - p = conf.pop("priority", None) - if p is None: - p = getattr(self.callable, "priority", self._priority) - cherrypy.serving.request.hooks.attach(self._point, self._wrapper, - priority=p, **conf) - - -class HandlerWrapperTool(Tool): - - """Tool which wraps request.handler in a provided wrapper function. - - The 'newhandler' arg must be a handler wrapper function that takes a - 'next_handler' argument, plus ``*args`` and ``**kwargs``. Like all - page handler - functions, it must return an iterable for use as cherrypy.response.body. - - For example, to allow your 'inner' page handlers to return dicts - which then get interpolated into a template:: - - def interpolator(next_handler, *args, **kwargs): - filename = cherrypy.request.config.get('template') - cherrypy.response.template = env.get_template(filename) - response_dict = next_handler(*args, **kwargs) - return cherrypy.response.template.render(**response_dict) - cherrypy.tools.jinja = HandlerWrapperTool(interpolator) - """ - - def __init__(self, newhandler, point='before_handler', name=None, - priority=50): - self.newhandler = newhandler - self._point = point - self._name = name - self._priority = priority - - def callable(self, *args, **kwargs): - innerfunc = cherrypy.serving.request.handler - - def wrap(*args, **kwargs): - return self.newhandler(innerfunc, *args, **kwargs) - cherrypy.serving.request.handler = wrap - - -class ErrorTool(Tool): - - """Tool which is used to replace the default request.error_response.""" - - def __init__(self, callable, name=None): - Tool.__init__(self, None, callable, name) - - def _wrapper(self): - self.callable(**self._merged_args()) - - def _setup(self): - """Hook this tool into cherrypy.request. - - The standard CherryPy request object will automatically call this - method when the tool is "turned on" in config. - """ - cherrypy.serving.request.error_response = self._wrapper - - -# Builtin tools # - -from cherrypy.lib import cptools, encoding, auth, static, jsontools -from cherrypy.lib import sessions as _sessions, xmlrpcutil as _xmlrpc -from cherrypy.lib import caching as _caching -from cherrypy.lib import auth_basic, auth_digest - - -class SessionTool(Tool): - - """Session Tool for CherryPy. - - sessions.locking - When 'implicit' (the default), the session will be locked for you, - just before running the page handler. - - When 'early', the session will be locked before reading the request - body. This is off by default for safety reasons; for example, - a large upload would block the session, denying an AJAX - progress meter - (`issue `_). - - When 'explicit' (or any other value), you need to call - cherrypy.session.acquire_lock() yourself before using - session data. - """ - - def __init__(self): - # _sessions.init must be bound after headers are read - Tool.__init__(self, 'before_request_body', _sessions.init) - - def _lock_session(self): - cherrypy.serving.session.acquire_lock() - - def _setup(self): - """Hook this tool into cherrypy.request. - - The standard CherryPy request object will automatically call this - method when the tool is "turned on" in config. - """ - hooks = cherrypy.serving.request.hooks - - conf = self._merged_args() - - p = conf.pop("priority", None) - if p is None: - p = getattr(self.callable, "priority", self._priority) - - hooks.attach(self._point, self.callable, priority=p, **conf) - - locking = conf.pop('locking', 'implicit') - if locking == 'implicit': - hooks.attach('before_handler', self._lock_session) - elif locking == 'early': - # Lock before the request body (but after _sessions.init runs!) - hooks.attach('before_request_body', self._lock_session, - priority=60) - else: - # Don't lock - pass - - hooks.attach('before_finalize', _sessions.save) - hooks.attach('on_end_request', _sessions.close) - - def regenerate(self): - """Drop the current session and make a new one (with a new id).""" - sess = cherrypy.serving.session - sess.regenerate() - - # Grab cookie-relevant tool args - conf = dict([(k, v) for k, v in self._merged_args().items() - if k in ('path', 'path_header', 'name', 'timeout', - 'domain', 'secure')]) - _sessions.set_response_cookie(**conf) - - -class XMLRPCController(object): - - """A Controller (page handler collection) for XML-RPC. - - To use it, have your controllers subclass this base class (it will - turn on the tool for you). - - You can also supply the following optional config entries:: - - tools.xmlrpc.encoding: 'utf-8' - tools.xmlrpc.allow_none: 0 - - XML-RPC is a rather discontinuous layer over HTTP; dispatching to the - appropriate handler must first be performed according to the URL, and - then a second dispatch step must take place according to the RPC method - specified in the request body. It also allows a superfluous "/RPC2" - prefix in the URL, supplies its own handler args in the body, and - requires a 200 OK "Fault" response instead of 404 when the desired - method is not found. - - Therefore, XML-RPC cannot be implemented for CherryPy via a Tool alone. - This Controller acts as the dispatch target for the first half (based - on the URL); it then reads the RPC method from the request body and - does its own second dispatch step based on that method. It also reads - body params, and returns a Fault on error. - - The XMLRPCDispatcher strips any /RPC2 prefix; if you aren't using /RPC2 - in your URL's, you can safely skip turning on the XMLRPCDispatcher. - Otherwise, you need to use declare it in config:: - - request.dispatch: cherrypy.dispatch.XMLRPCDispatcher() - """ - - # Note we're hard-coding this into the 'tools' namespace. We could do - # a huge amount of work to make it relocatable, but the only reason why - # would be if someone actually disabled the default_toolbox. Meh. - _cp_config = {'tools.xmlrpc.on': True} - - def default(self, *vpath, **params): - rpcparams, rpcmethod = _xmlrpc.process_body() - - subhandler = self - for attr in str(rpcmethod).split('.'): - subhandler = getattr(subhandler, attr, None) - - if subhandler and getattr(subhandler, "exposed", False): - body = subhandler(*(vpath + rpcparams), **params) - - else: - # https://bitbucket.org/cherrypy/cherrypy/issue/533 - # if a method is not found, an xmlrpclib.Fault should be returned - # raising an exception here will do that; see - # cherrypy.lib.xmlrpcutil.on_error - raise Exception('method "%s" is not supported' % attr) - - conf = cherrypy.serving.request.toolmaps['tools'].get("xmlrpc", {}) - _xmlrpc.respond(body, - conf.get('encoding', 'utf-8'), - conf.get('allow_none', 0)) - return cherrypy.serving.response.body - default.exposed = True - - -class SessionAuthTool(HandlerTool): - - def _setargs(self): - for name in dir(cptools.SessionAuth): - if not name.startswith("__"): - setattr(self, name, None) - - -class CachingTool(Tool): - - """Caching Tool for CherryPy.""" - - def _wrapper(self, **kwargs): - request = cherrypy.serving.request - if _caching.get(**kwargs): - request.handler = None - else: - if request.cacheable: - # Note the devious technique here of adding hooks on the fly - request.hooks.attach('before_finalize', _caching.tee_output, - priority=90) - _wrapper.priority = 20 - - def _setup(self): - """Hook caching into cherrypy.request.""" - conf = self._merged_args() - - p = conf.pop("priority", None) - cherrypy.serving.request.hooks.attach('before_handler', self._wrapper, - priority=p, **conf) - - -class Toolbox(object): - - """A collection of Tools. - - This object also functions as a config namespace handler for itself. - Custom toolboxes should be added to each Application's toolboxes dict. - """ - - def __init__(self, namespace): - self.namespace = namespace - - def __setattr__(self, name, value): - # If the Tool._name is None, supply it from the attribute name. - if isinstance(value, Tool): - if value._name is None: - value._name = name - value.namespace = self.namespace - object.__setattr__(self, name, value) - - def __enter__(self): - """Populate request.toolmaps from tools specified in config.""" - cherrypy.serving.request.toolmaps[self.namespace] = map = {} - - def populate(k, v): - toolname, arg = k.split(".", 1) - bucket = map.setdefault(toolname, {}) - bucket[arg] = v - return populate - - def __exit__(self, exc_type, exc_val, exc_tb): - """Run tool._setup() for each tool in our toolmap.""" - map = cherrypy.serving.request.toolmaps.get(self.namespace) - if map: - for name, settings in map.items(): - if settings.get("on", False): - tool = getattr(self, name) - tool._setup() - - -class DeprecatedTool(Tool): - - _name = None - warnmsg = "This Tool is deprecated." - - def __init__(self, point, warnmsg=None): - self.point = point - if warnmsg is not None: - self.warnmsg = warnmsg - - def __call__(self, *args, **kwargs): - warnings.warn(self.warnmsg) - - def tool_decorator(f): - return f - return tool_decorator - - def _setup(self): - warnings.warn(self.warnmsg) - - -default_toolbox = _d = Toolbox("tools") -_d.session_auth = SessionAuthTool(cptools.session_auth) -_d.allow = Tool('on_start_resource', cptools.allow) -_d.proxy = Tool('before_request_body', cptools.proxy, priority=30) -_d.response_headers = Tool('on_start_resource', cptools.response_headers) -_d.log_tracebacks = Tool('before_error_response', cptools.log_traceback) -_d.log_headers = Tool('before_error_response', cptools.log_request_headers) -_d.log_hooks = Tool('on_end_request', cptools.log_hooks, priority=100) -_d.err_redirect = ErrorTool(cptools.redirect) -_d.etags = Tool('before_finalize', cptools.validate_etags, priority=75) -_d.decode = Tool('before_request_body', encoding.decode) -# the order of encoding, gzip, caching is important -_d.encode = Tool('before_handler', encoding.ResponseEncoder, priority=70) -_d.gzip = Tool('before_finalize', encoding.gzip, priority=80) -_d.staticdir = HandlerTool(static.staticdir) -_d.staticfile = HandlerTool(static.staticfile) -_d.sessions = SessionTool() -_d.xmlrpc = ErrorTool(_xmlrpc.on_error) -_d.caching = CachingTool('before_handler', _caching.get, 'caching') -_d.expires = Tool('before_finalize', _caching.expires) -_d.tidy = DeprecatedTool( - 'before_finalize', - "The tidy tool has been removed from the standard distribution of " - "CherryPy. The most recent version can be found at " - "http://tools.cherrypy.org/browser.") -_d.nsgmls = DeprecatedTool( - 'before_finalize', - "The nsgmls tool has been removed from the standard distribution of " - "CherryPy. The most recent version can be found at " - "http://tools.cherrypy.org/browser.") -_d.ignore_headers = Tool('before_request_body', cptools.ignore_headers) -_d.referer = Tool('before_request_body', cptools.referer) -_d.basic_auth = Tool('on_start_resource', auth.basic_auth) -_d.digest_auth = Tool('on_start_resource', auth.digest_auth) -_d.trailing_slash = Tool('before_handler', cptools.trailing_slash, priority=60) -_d.flatten = Tool('before_finalize', cptools.flatten) -_d.accept = Tool('on_start_resource', cptools.accept) -_d.redirect = Tool('on_start_resource', cptools.redirect) -_d.autovary = Tool('on_start_resource', cptools.autovary, priority=0) -_d.json_in = Tool('before_request_body', jsontools.json_in, priority=30) -_d.json_out = Tool('before_handler', jsontools.json_out, priority=30) -_d.auth_basic = Tool('before_handler', auth_basic.basic_auth, priority=1) -_d.auth_digest = Tool('before_handler', auth_digest.digest_auth, priority=1) - -del _d, cptools, encoding, auth, static diff --git a/libs_crutch/contrib/cherrypy/_cptree.py b/libs_crutch/contrib/cherrypy/_cptree.py deleted file mode 100644 index a31b279..0000000 --- a/libs_crutch/contrib/cherrypy/_cptree.py +++ /dev/null @@ -1,299 +0,0 @@ -"""CherryPy Application and Tree objects.""" - -import os - -import cherrypy -from cherrypy._cpcompat import ntou, py3k -from cherrypy import _cpconfig, _cplogging, _cprequest, _cpwsgi, tools -from cherrypy.lib import httputil - - -class Application(object): - - """A CherryPy Application. - - Servers and gateways should not instantiate Request objects directly. - Instead, they should ask an Application object for a request object. - - An instance of this class may also be used as a WSGI callable - (WSGI application object) for itself. - """ - - root = None - """The top-most container of page handlers for this app. Handlers should - be arranged in a hierarchy of attributes, matching the expected URI - hierarchy; the default dispatcher then searches this hierarchy for a - matching handler. When using a dispatcher other than the default, - this value may be None.""" - - config = {} - """A dict of {path: pathconf} pairs, where 'pathconf' is itself a dict - of {key: value} pairs.""" - - namespaces = _cpconfig.NamespaceSet() - toolboxes = {'tools': cherrypy.tools} - - log = None - """A LogManager instance. See _cplogging.""" - - wsgiapp = None - """A CPWSGIApp instance. See _cpwsgi.""" - - request_class = _cprequest.Request - response_class = _cprequest.Response - - relative_urls = False - - def __init__(self, root, script_name="", config=None): - self.log = _cplogging.LogManager(id(self), cherrypy.log.logger_root) - self.root = root - self.script_name = script_name - self.wsgiapp = _cpwsgi.CPWSGIApp(self) - - self.namespaces = self.namespaces.copy() - self.namespaces["log"] = lambda k, v: setattr(self.log, k, v) - self.namespaces["wsgi"] = self.wsgiapp.namespace_handler - - self.config = self.__class__.config.copy() - if config: - self.merge(config) - - def __repr__(self): - return "%s.%s(%r, %r)" % (self.__module__, self.__class__.__name__, - self.root, self.script_name) - - script_name_doc = """The URI "mount point" for this app. A mount point - is that portion of the URI which is constant for all URIs that are - serviced by this application; it does not include scheme, host, or proxy - ("virtual host") portions of the URI. - - For example, if script_name is "/my/cool/app", then the URL - "http://www.example.com/my/cool/app/page1" might be handled by a - "page1" method on the root object. - - The value of script_name MUST NOT end in a slash. If the script_name - refers to the root of the URI, it MUST be an empty string (not "/"). - - If script_name is explicitly set to None, then the script_name will be - provided for each call from request.wsgi_environ['SCRIPT_NAME']. - """ - - def _get_script_name(self): - if self._script_name is not None: - return self._script_name - - # A `_script_name` with a value of None signals that the script name - # should be pulled from WSGI environ. - return cherrypy.serving.request.wsgi_environ['SCRIPT_NAME'].rstrip("/") - - def _set_script_name(self, value): - if value: - value = value.rstrip("/") - self._script_name = value - script_name = property(fget=_get_script_name, fset=_set_script_name, - doc=script_name_doc) - - def merge(self, config): - """Merge the given config into self.config.""" - _cpconfig.merge(self.config, config) - - # Handle namespaces specified in config. - self.namespaces(self.config.get("/", {})) - - def find_config(self, path, key, default=None): - """Return the most-specific value for key along path, or default.""" - trail = path or "/" - while trail: - nodeconf = self.config.get(trail, {}) - - if key in nodeconf: - return nodeconf[key] - - lastslash = trail.rfind("/") - if lastslash == -1: - break - elif lastslash == 0 and trail != "/": - trail = "/" - else: - trail = trail[:lastslash] - - return default - - def get_serving(self, local, remote, scheme, sproto): - """Create and return a Request and Response object.""" - req = self.request_class(local, remote, scheme, sproto) - req.app = self - - for name, toolbox in self.toolboxes.items(): - req.namespaces[name] = toolbox - - resp = self.response_class() - cherrypy.serving.load(req, resp) - cherrypy.engine.publish('acquire_thread') - cherrypy.engine.publish('before_request') - - return req, resp - - def release_serving(self): - """Release the current serving (request and response).""" - req = cherrypy.serving.request - - cherrypy.engine.publish('after_request') - - try: - req.close() - except: - cherrypy.log(traceback=True, severity=40) - - cherrypy.serving.clear() - - def __call__(self, environ, start_response): - return self.wsgiapp(environ, start_response) - - -class Tree(object): - - """A registry of CherryPy applications, mounted at diverse points. - - An instance of this class may also be used as a WSGI callable - (WSGI application object), in which case it dispatches to all - mounted apps. - """ - - apps = {} - """ - A dict of the form {script name: application}, where "script name" - is a string declaring the URI mount point (no trailing slash), and - "application" is an instance of cherrypy.Application (or an arbitrary - WSGI callable if you happen to be using a WSGI server).""" - - def __init__(self): - self.apps = {} - - def mount(self, root, script_name="", config=None): - """Mount a new app from a root object, script_name, and config. - - root - An instance of a "controller class" (a collection of page - handler methods) which represents the root of the application. - This may also be an Application instance, or None if using - a dispatcher other than the default. - - script_name - A string containing the "mount point" of the application. - This should start with a slash, and be the path portion of the - URL at which to mount the given root. For example, if root.index() - will handle requests to "http://www.example.com:8080/dept/app1/", - then the script_name argument would be "/dept/app1". - - It MUST NOT end in a slash. If the script_name refers to the - root of the URI, it MUST be an empty string (not "/"). - - config - A file or dict containing application config. - """ - if script_name is None: - raise TypeError( - "The 'script_name' argument may not be None. Application " - "objects may, however, possess a script_name of None (in " - "order to inpect the WSGI environ for SCRIPT_NAME upon each " - "request). You cannot mount such Applications on this Tree; " - "you must pass them to a WSGI server interface directly.") - - # Next line both 1) strips trailing slash and 2) maps "/" -> "". - script_name = script_name.rstrip("/") - - if isinstance(root, Application): - app = root - if script_name != "" and script_name != app.script_name: - raise ValueError( - "Cannot specify a different script name and pass an " - "Application instance to cherrypy.mount") - script_name = app.script_name - else: - app = Application(root, script_name) - - # If mounted at "", add favicon.ico - if (script_name == "" and root is not None - and not hasattr(root, "favicon_ico")): - favicon = os.path.join(os.getcwd(), os.path.dirname(__file__), - "favicon.ico") - root.favicon_ico = tools.staticfile.handler(favicon) - - if config: - app.merge(config) - - self.apps[script_name] = app - - return app - - def graft(self, wsgi_callable, script_name=""): - """Mount a wsgi callable at the given script_name.""" - # Next line both 1) strips trailing slash and 2) maps "/" -> "". - script_name = script_name.rstrip("/") - self.apps[script_name] = wsgi_callable - - def script_name(self, path=None): - """The script_name of the app at the given path, or None. - - If path is None, cherrypy.request is used. - """ - if path is None: - try: - request = cherrypy.serving.request - path = httputil.urljoin(request.script_name, - request.path_info) - except AttributeError: - return None - - while True: - if path in self.apps: - return path - - if path == "": - return None - - # Move one node up the tree and try again. - path = path[:path.rfind("/")] - - def __call__(self, environ, start_response): - # If you're calling this, then you're probably setting SCRIPT_NAME - # to '' (some WSGI servers always set SCRIPT_NAME to ''). - # Try to look up the app using the full path. - env1x = environ - if environ.get(ntou('wsgi.version')) == (ntou('u'), 0): - env1x = _cpwsgi.downgrade_wsgi_ux_to_1x(environ) - path = httputil.urljoin(env1x.get('SCRIPT_NAME', ''), - env1x.get('PATH_INFO', '')) - sn = self.script_name(path or "/") - if sn is None: - start_response('404 Not Found', []) - return [] - - app = self.apps[sn] - - # Correct the SCRIPT_NAME and PATH_INFO environ entries. - environ = environ.copy() - if not py3k: - if environ.get(ntou('wsgi.version')) == (ntou('u'), 0): - # Python 2/WSGI u.0: all strings MUST be of type unicode - enc = environ[ntou('wsgi.url_encoding')] - environ[ntou('SCRIPT_NAME')] = sn.decode(enc) - environ[ntou('PATH_INFO')] = path[ - len(sn.rstrip("/")):].decode(enc) - else: - # Python 2/WSGI 1.x: all strings MUST be of type str - environ['SCRIPT_NAME'] = sn - environ['PATH_INFO'] = path[len(sn.rstrip("/")):] - else: - if environ.get(ntou('wsgi.version')) == (ntou('u'), 0): - # Python 3/WSGI u.0: all strings MUST be full unicode - environ['SCRIPT_NAME'] = sn - environ['PATH_INFO'] = path[len(sn.rstrip("/")):] - else: - # Python 3/WSGI 1.x: all strings MUST be ISO-8859-1 str - environ['SCRIPT_NAME'] = sn.encode( - 'utf-8').decode('ISO-8859-1') - environ['PATH_INFO'] = path[ - len(sn.rstrip("/")):].encode('utf-8').decode('ISO-8859-1') - return app(environ, start_response) diff --git a/libs_crutch/contrib/cherrypy/_cpwsgi.py b/libs_crutch/contrib/cherrypy/_cpwsgi.py deleted file mode 100644 index f6db68b..0000000 --- a/libs_crutch/contrib/cherrypy/_cpwsgi.py +++ /dev/null @@ -1,438 +0,0 @@ -"""WSGI interface (see PEP 333 and 3333). - -Note that WSGI environ keys and values are 'native strings'; that is, -whatever the type of "" is. For Python 2, that's a byte string; for Python 3, -it's a unicode string. But PEP 3333 says: "even if Python's str type is -actually Unicode "under the hood", the content of native strings must -still be translatable to bytes via the Latin-1 encoding!" -""" - -import sys as _sys - -import cherrypy as _cherrypy -from cherrypy._cpcompat import BytesIO, bytestr, ntob, ntou, py3k, unicodestr -from cherrypy import _cperror -from cherrypy.lib import httputil -from cherrypy.lib import is_closable_iterator - -def downgrade_wsgi_ux_to_1x(environ): - """Return a new environ dict for WSGI 1.x from the given WSGI u.x environ. - """ - env1x = {} - - url_encoding = environ[ntou('wsgi.url_encoding')] - for k, v in list(environ.items()): - if k in [ntou('PATH_INFO'), ntou('SCRIPT_NAME'), ntou('QUERY_STRING')]: - v = v.encode(url_encoding) - elif isinstance(v, unicodestr): - v = v.encode('ISO-8859-1') - env1x[k.encode('ISO-8859-1')] = v - - return env1x - - -class VirtualHost(object): - - """Select a different WSGI application based on the Host header. - - This can be useful when running multiple sites within one CP server. - It allows several domains to point to different applications. For example:: - - root = Root() - RootApp = cherrypy.Application(root) - Domain2App = cherrypy.Application(root) - SecureApp = cherrypy.Application(Secure()) - - vhost = cherrypy._cpwsgi.VirtualHost(RootApp, - domains={'www.domain2.example': Domain2App, - 'www.domain2.example:443': SecureApp, - }) - - cherrypy.tree.graft(vhost) - """ - default = None - """Required. The default WSGI application.""" - - use_x_forwarded_host = True - """If True (the default), any "X-Forwarded-Host" - request header will be used instead of the "Host" header. This - is commonly added by HTTP servers (such as Apache) when proxying.""" - - domains = {} - """A dict of {host header value: application} pairs. - The incoming "Host" request header is looked up in this dict, - and, if a match is found, the corresponding WSGI application - will be called instead of the default. Note that you often need - separate entries for "example.com" and "www.example.com". - In addition, "Host" headers may contain the port number. - """ - - def __init__(self, default, domains=None, use_x_forwarded_host=True): - self.default = default - self.domains = domains or {} - self.use_x_forwarded_host = use_x_forwarded_host - - def __call__(self, environ, start_response): - domain = environ.get('HTTP_HOST', '') - if self.use_x_forwarded_host: - domain = environ.get("HTTP_X_FORWARDED_HOST", domain) - - nextapp = self.domains.get(domain) - if nextapp is None: - nextapp = self.default - return nextapp(environ, start_response) - - -class InternalRedirector(object): - - """WSGI middleware that handles raised cherrypy.InternalRedirect.""" - - def __init__(self, nextapp, recursive=False): - self.nextapp = nextapp - self.recursive = recursive - - def __call__(self, environ, start_response): - redirections = [] - while True: - environ = environ.copy() - try: - return self.nextapp(environ, start_response) - except _cherrypy.InternalRedirect: - ir = _sys.exc_info()[1] - sn = environ.get('SCRIPT_NAME', '') - path = environ.get('PATH_INFO', '') - qs = environ.get('QUERY_STRING', '') - - # Add the *previous* path_info + qs to redirections. - old_uri = sn + path - if qs: - old_uri += "?" + qs - redirections.append(old_uri) - - if not self.recursive: - # Check to see if the new URI has been redirected to - # already - new_uri = sn + ir.path - if ir.query_string: - new_uri += "?" + ir.query_string - if new_uri in redirections: - ir.request.close() - raise RuntimeError("InternalRedirector visited the " - "same URL twice: %r" % new_uri) - - # Munge the environment and try again. - environ['REQUEST_METHOD'] = "GET" - environ['PATH_INFO'] = ir.path - environ['QUERY_STRING'] = ir.query_string - environ['wsgi.input'] = BytesIO() - environ['CONTENT_LENGTH'] = "0" - environ['cherrypy.previous_request'] = ir.request - - -class ExceptionTrapper(object): - - """WSGI middleware that traps exceptions.""" - - def __init__(self, nextapp, throws=(KeyboardInterrupt, SystemExit)): - self.nextapp = nextapp - self.throws = throws - - def __call__(self, environ, start_response): - return _TrappedResponse( - self.nextapp, - environ, - start_response, - self.throws - ) - - -class _TrappedResponse(object): - - response = iter([]) - - def __init__(self, nextapp, environ, start_response, throws): - self.nextapp = nextapp - self.environ = environ - self.start_response = start_response - self.throws = throws - self.started_response = False - self.response = self.trap( - self.nextapp, self.environ, self.start_response) - self.iter_response = iter(self.response) - - def __iter__(self): - self.started_response = True - return self - - if py3k: - def __next__(self): - return self.trap(next, self.iter_response) - else: - def next(self): - return self.trap(self.iter_response.next) - - def close(self): - if hasattr(self.response, 'close'): - self.response.close() - - def trap(self, func, *args, **kwargs): - try: - return func(*args, **kwargs) - except self.throws: - raise - except StopIteration: - raise - except: - tb = _cperror.format_exc() - #print('trapped (started %s):' % self.started_response, tb) - _cherrypy.log(tb, severity=40) - if not _cherrypy.request.show_tracebacks: - tb = "" - s, h, b = _cperror.bare_error(tb) - if py3k: - # What fun. - s = s.decode('ISO-8859-1') - h = [(k.decode('ISO-8859-1'), v.decode('ISO-8859-1')) - for k, v in h] - if self.started_response: - # Empty our iterable (so future calls raise StopIteration) - self.iter_response = iter([]) - else: - self.iter_response = iter(b) - - try: - self.start_response(s, h, _sys.exc_info()) - except: - # "The application must not trap any exceptions raised by - # start_response, if it called start_response with exc_info. - # Instead, it should allow such exceptions to propagate - # back to the server or gateway." - # But we still log and call close() to clean up ourselves. - _cherrypy.log(traceback=True, severity=40) - raise - - if self.started_response: - return ntob("").join(b) - else: - return b - - -# WSGI-to-CP Adapter # - - -class AppResponse(object): - - """WSGI response iterable for CherryPy applications.""" - - def __init__(self, environ, start_response, cpapp): - self.cpapp = cpapp - try: - if not py3k: - if environ.get(ntou('wsgi.version')) == (ntou('u'), 0): - environ = downgrade_wsgi_ux_to_1x(environ) - self.environ = environ - self.run() - - r = _cherrypy.serving.response - - outstatus = r.output_status - if not isinstance(outstatus, bytestr): - raise TypeError("response.output_status is not a byte string.") - - outheaders = [] - for k, v in r.header_list: - if not isinstance(k, bytestr): - raise TypeError( - "response.header_list key %r is not a byte string." % - k) - if not isinstance(v, bytestr): - raise TypeError( - "response.header_list value %r is not a byte string." % - v) - outheaders.append((k, v)) - - if py3k: - # According to PEP 3333, when using Python 3, the response - # status and headers must be bytes masquerading as unicode; - # that is, they must be of type "str" but are restricted to - # code points in the "latin-1" set. - outstatus = outstatus.decode('ISO-8859-1') - outheaders = [(k.decode('ISO-8859-1'), v.decode('ISO-8859-1')) - for k, v in outheaders] - - self.iter_response = iter(r.body) - self.write = start_response(outstatus, outheaders) - except: - self.close() - raise - - def __iter__(self): - return self - - if py3k: - def __next__(self): - return next(self.iter_response) - else: - def next(self): - return self.iter_response.next() - - def close(self): - """Close and de-reference the current request and response. (Core)""" - streaming = _cherrypy.serving.response.stream - self.cpapp.release_serving() - - # We avoid the expense of examining the iterator to see if it's - # closable unless we are streaming the response, as that's the - # only situation where we are going to have an iterator which - # may not have been exhausted yet. - if streaming and is_closable_iterator(self.iter_response): - iter_close = self.iter_response.close - try: - iter_close() - except Exception: - _cherrypy.log(traceback=True, severity=40) - - def run(self): - """Create a Request object using environ.""" - env = self.environ.get - - local = httputil.Host('', int(env('SERVER_PORT', 80)), - env('SERVER_NAME', '')) - remote = httputil.Host(env('REMOTE_ADDR', ''), - int(env('REMOTE_PORT', -1) or -1), - env('REMOTE_HOST', '')) - scheme = env('wsgi.url_scheme') - sproto = env('ACTUAL_SERVER_PROTOCOL', "HTTP/1.1") - request, resp = self.cpapp.get_serving(local, remote, scheme, sproto) - - # LOGON_USER is served by IIS, and is the name of the - # user after having been mapped to a local account. - # Both IIS and Apache set REMOTE_USER, when possible. - request.login = env('LOGON_USER') or env('REMOTE_USER') or None - request.multithread = self.environ['wsgi.multithread'] - request.multiprocess = self.environ['wsgi.multiprocess'] - request.wsgi_environ = self.environ - request.prev = env('cherrypy.previous_request', None) - - meth = self.environ['REQUEST_METHOD'] - - path = httputil.urljoin(self.environ.get('SCRIPT_NAME', ''), - self.environ.get('PATH_INFO', '')) - qs = self.environ.get('QUERY_STRING', '') - - if py3k: - # This isn't perfect; if the given PATH_INFO is in the - # wrong encoding, it may fail to match the appropriate config - # section URI. But meh. - old_enc = self.environ.get('wsgi.url_encoding', 'ISO-8859-1') - new_enc = self.cpapp.find_config(self.environ.get('PATH_INFO', ''), - "request.uri_encoding", 'utf-8') - if new_enc.lower() != old_enc.lower(): - # Even though the path and qs are unicode, the WSGI server - # is required by PEP 3333 to coerce them to ISO-8859-1 - # masquerading as unicode. So we have to encode back to - # bytes and then decode again using the "correct" encoding. - try: - u_path = path.encode(old_enc).decode(new_enc) - u_qs = qs.encode(old_enc).decode(new_enc) - except (UnicodeEncodeError, UnicodeDecodeError): - # Just pass them through without transcoding and hope. - pass - else: - # Only set transcoded values if they both succeed. - path = u_path - qs = u_qs - - rproto = self.environ.get('SERVER_PROTOCOL') - headers = self.translate_headers(self.environ) - rfile = self.environ['wsgi.input'] - request.run(meth, path, qs, rproto, headers, rfile) - - headerNames = {'HTTP_CGI_AUTHORIZATION': 'Authorization', - 'CONTENT_LENGTH': 'Content-Length', - 'CONTENT_TYPE': 'Content-Type', - 'REMOTE_HOST': 'Remote-Host', - 'REMOTE_ADDR': 'Remote-Addr', - } - - def translate_headers(self, environ): - """Translate CGI-environ header names to HTTP header names.""" - for cgiName in environ: - # We assume all incoming header keys are uppercase already. - if cgiName in self.headerNames: - yield self.headerNames[cgiName], environ[cgiName] - elif cgiName[:5] == "HTTP_": - # Hackish attempt at recovering original header names. - translatedHeader = cgiName[5:].replace("_", "-") - yield translatedHeader, environ[cgiName] - - -class CPWSGIApp(object): - - """A WSGI application object for a CherryPy Application.""" - - pipeline = [('ExceptionTrapper', ExceptionTrapper), - ('InternalRedirector', InternalRedirector), - ] - """A list of (name, wsgiapp) pairs. Each 'wsgiapp' MUST be a - constructor that takes an initial, positional 'nextapp' argument, - plus optional keyword arguments, and returns a WSGI application - (that takes environ and start_response arguments). The 'name' can - be any you choose, and will correspond to keys in self.config.""" - - head = None - """Rather than nest all apps in the pipeline on each call, it's only - done the first time, and the result is memoized into self.head. Set - this to None again if you change self.pipeline after calling self.""" - - config = {} - """A dict whose keys match names listed in the pipeline. Each - value is a further dict which will be passed to the corresponding - named WSGI callable (from the pipeline) as keyword arguments.""" - - response_class = AppResponse - """The class to instantiate and return as the next app in the WSGI chain. - """ - - def __init__(self, cpapp, pipeline=None): - self.cpapp = cpapp - self.pipeline = self.pipeline[:] - if pipeline: - self.pipeline.extend(pipeline) - self.config = self.config.copy() - - def tail(self, environ, start_response): - """WSGI application callable for the actual CherryPy application. - - You probably shouldn't call this; call self.__call__ instead, - so that any WSGI middleware in self.pipeline can run first. - """ - return self.response_class(environ, start_response, self.cpapp) - - def __call__(self, environ, start_response): - head = self.head - if head is None: - # Create and nest the WSGI apps in our pipeline (in reverse order). - # Then memoize the result in self.head. - head = self.tail - for name, callable in self.pipeline[::-1]: - conf = self.config.get(name, {}) - head = callable(head, **conf) - self.head = head - return head(environ, start_response) - - def namespace_handler(self, k, v): - """Config handler for the 'wsgi' namespace.""" - if k == "pipeline": - # Note this allows multiple 'wsgi.pipeline' config entries - # (but each entry will be processed in a 'random' order). - # It should also allow developers to set default middleware - # in code (passed to self.__init__) that deployers can add to - # (but not remove) via config. - self.pipeline.extend(v) - elif k == "response_class": - self.response_class = v - else: - name, arg = k.split(".", 1) - bucket = self.config.setdefault(name, {}) - bucket[arg] = v diff --git a/libs_crutch/contrib/cherrypy/_cpwsgi_server.py b/libs_crutch/contrib/cherrypy/_cpwsgi_server.py deleted file mode 100644 index 874e2e9..0000000 --- a/libs_crutch/contrib/cherrypy/_cpwsgi_server.py +++ /dev/null @@ -1,70 +0,0 @@ -"""WSGI server interface (see PEP 333). This adds some CP-specific bits to -the framework-agnostic wsgiserver package. -""" -import sys - -import cherrypy -from cherrypy import wsgiserver - - -class CPWSGIServer(wsgiserver.CherryPyWSGIServer): - - """Wrapper for wsgiserver.CherryPyWSGIServer. - - wsgiserver has been designed to not reference CherryPy in any way, - so that it can be used in other frameworks and applications. Therefore, - we wrap it here, so we can set our own mount points from cherrypy.tree - and apply some attributes from config -> cherrypy.server -> wsgiserver. - """ - - def __init__(self, server_adapter=cherrypy.server): - self.server_adapter = server_adapter - self.max_request_header_size = ( - self.server_adapter.max_request_header_size or 0 - ) - self.max_request_body_size = ( - self.server_adapter.max_request_body_size or 0 - ) - - server_name = (self.server_adapter.socket_host or - self.server_adapter.socket_file or - None) - - self.wsgi_version = self.server_adapter.wsgi_version - s = wsgiserver.CherryPyWSGIServer - s.__init__(self, server_adapter.bind_addr, cherrypy.tree, - self.server_adapter.thread_pool, - server_name, - max=self.server_adapter.thread_pool_max, - request_queue_size=self.server_adapter.socket_queue_size, - timeout=self.server_adapter.socket_timeout, - shutdown_timeout=self.server_adapter.shutdown_timeout, - accepted_queue_size=self.server_adapter.accepted_queue_size, - accepted_queue_timeout=self.server_adapter.accepted_queue_timeout, - ) - self.protocol = self.server_adapter.protocol_version - self.nodelay = self.server_adapter.nodelay - - if sys.version_info >= (3, 0): - ssl_module = self.server_adapter.ssl_module or 'builtin' - else: - ssl_module = self.server_adapter.ssl_module or 'pyopenssl' - if self.server_adapter.ssl_context: - adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module) - self.ssl_adapter = adapter_class( - self.server_adapter.ssl_certificate, - self.server_adapter.ssl_private_key, - self.server_adapter.ssl_certificate_chain) - self.ssl_adapter.context = self.server_adapter.ssl_context - elif self.server_adapter.ssl_certificate: - adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module) - self.ssl_adapter = adapter_class( - self.server_adapter.ssl_certificate, - self.server_adapter.ssl_private_key, - self.server_adapter.ssl_certificate_chain) - - self.stats['Enabled'] = getattr( - self.server_adapter, 'statistics', False) - - def error_log(self, msg="", level=20, traceback=False): - cherrypy.engine.log(msg, level, traceback) diff --git a/libs_crutch/contrib/cherrypy/daemon.py b/libs_crutch/contrib/cherrypy/daemon.py deleted file mode 100644 index d71e632..0000000 --- a/libs_crutch/contrib/cherrypy/daemon.py +++ /dev/null @@ -1,109 +0,0 @@ -"""The CherryPy daemon.""" - -import sys - -import cherrypy -from cherrypy.process import plugins, servers -from cherrypy import Application - - -def start(configfiles=None, daemonize=False, environment=None, - fastcgi=False, scgi=False, pidfile=None, imports=None, - cgi=False): - """Subscribe all engine plugins and start the engine.""" - sys.path = [''] + sys.path - for i in imports or []: - exec("import %s" % i) - - for c in configfiles or []: - cherrypy.config.update(c) - # If there's only one app mounted, merge config into it. - if len(cherrypy.tree.apps) == 1: - for app in cherrypy.tree.apps.values(): - if isinstance(app, Application): - app.merge(c) - - engine = cherrypy.engine - - if environment is not None: - cherrypy.config.update({'environment': environment}) - - # Only daemonize if asked to. - if daemonize: - # Don't print anything to stdout/sterr. - cherrypy.config.update({'log.screen': False}) - plugins.Daemonizer(engine).subscribe() - - if pidfile: - plugins.PIDFile(engine, pidfile).subscribe() - - if hasattr(engine, "signal_handler"): - engine.signal_handler.subscribe() - if hasattr(engine, "console_control_handler"): - engine.console_control_handler.subscribe() - - if (fastcgi and (scgi or cgi)) or (scgi and cgi): - cherrypy.log.error("You may only specify one of the cgi, fastcgi, and " - "scgi options.", 'ENGINE') - sys.exit(1) - elif fastcgi or scgi or cgi: - # Turn off autoreload when using *cgi. - cherrypy.config.update({'engine.autoreload_on': False}) - # Turn off the default HTTP server (which is subscribed by default). - cherrypy.server.unsubscribe() - - addr = cherrypy.server.bind_addr - if fastcgi: - f = servers.FlupFCGIServer(application=cherrypy.tree, - bindAddress=addr) - elif scgi: - f = servers.FlupSCGIServer(application=cherrypy.tree, - bindAddress=addr) - else: - f = servers.FlupCGIServer(application=cherrypy.tree, - bindAddress=addr) - s = servers.ServerAdapter(engine, httpserver=f, bind_addr=addr) - s.subscribe() - - # Always start the engine; this will start all other services - try: - engine.start() - except: - # Assume the error has been logged already via bus.log. - sys.exit(1) - else: - engine.block() - - -def run(): - from optparse import OptionParser - - p = OptionParser() - p.add_option('-c', '--config', action="append", dest='config', - help="specify config file(s)") - p.add_option('-d', action="store_true", dest='daemonize', - help="run the server as a daemon") - p.add_option('-e', '--environment', dest='environment', default=None, - help="apply the given config environment") - p.add_option('-f', action="store_true", dest='fastcgi', - help="start a fastcgi server instead of the default HTTP " - "server") - p.add_option('-s', action="store_true", dest='scgi', - help="start a scgi server instead of the default HTTP server") - p.add_option('-x', action="store_true", dest='cgi', - help="start a cgi server instead of the default HTTP server") - p.add_option('-i', '--import', action="append", dest='imports', - help="specify modules to import") - p.add_option('-p', '--pidfile', dest='pidfile', default=None, - help="store the process id in the given file") - p.add_option('-P', '--Path', action="append", dest='Path', - help="add the given paths to sys.path") - options, args = p.parse_args() - - if options.Path: - for p in options.Path: - sys.path.insert(0, p) - - start(options.config, options.daemonize, - options.environment, options.fastcgi, options.scgi, - options.pidfile, options.imports, options.cgi) diff --git a/libs_crutch/contrib/cherrypy/lib/__init__.py b/libs_crutch/contrib/cherrypy/lib/__init__.py deleted file mode 100644 index a75a53d..0000000 --- a/libs_crutch/contrib/cherrypy/lib/__init__.py +++ /dev/null @@ -1,85 +0,0 @@ -"""CherryPy Library""" - -# Deprecated in CherryPy 3.2 -- remove in CherryPy 3.3 -from cherrypy.lib.reprconf import unrepr, modules, attributes - -def is_iterator(obj): - '''Returns a boolean indicating if the object provided implements - the iterator protocol (i.e. like a generator). This will return - false for objects which iterable, but not iterators themselves.''' - from types import GeneratorType - if isinstance(obj, GeneratorType): - return True - elif not hasattr(obj, '__iter__'): - return False - else: - # Types which implement the protocol must return themselves when - # invoking 'iter' upon them. - return iter(obj) is obj - -def is_closable_iterator(obj): - - # Not an iterator. - if not is_iterator(obj): - return False - - # A generator - the easiest thing to deal with. - import inspect - if inspect.isgenerator(obj): - return True - - # A custom iterator. Look for a close method... - if not (hasattr(obj, 'close') and callable(obj.close)): - return False - - # ... which doesn't require any arguments. - try: - inspect.getcallargs(obj.close) - except TypeError: - return False - else: - return True - -class file_generator(object): - - """Yield the given input (a file object) in chunks (default 64k). (Core)""" - - def __init__(self, input, chunkSize=65536): - self.input = input - self.chunkSize = chunkSize - - def __iter__(self): - return self - - def __next__(self): - chunk = self.input.read(self.chunkSize) - if chunk: - return chunk - else: - if hasattr(self.input, 'close'): - self.input.close() - raise StopIteration() - next = __next__ - - -def file_generator_limited(fileobj, count, chunk_size=65536): - """Yield the given file object in chunks, stopping after `count` - bytes has been emitted. Default chunk size is 64kB. (Core) - """ - remaining = count - while remaining > 0: - chunk = fileobj.read(min(chunk_size, remaining)) - chunklen = len(chunk) - if chunklen == 0: - return - remaining -= chunklen - yield chunk - - -def set_vary_header(response, header_name): - "Add a Vary header to a response" - varies = response.headers.get("Vary", "") - varies = [x.strip() for x in varies.split(",") if x.strip()] - if header_name not in varies: - varies.append(header_name) - response.headers['Vary'] = ", ".join(varies) diff --git a/libs_crutch/contrib/cherrypy/lib/auth.py b/libs_crutch/contrib/cherrypy/lib/auth.py deleted file mode 100644 index 71591aa..0000000 --- a/libs_crutch/contrib/cherrypy/lib/auth.py +++ /dev/null @@ -1,97 +0,0 @@ -import cherrypy -from cherrypy.lib import httpauth - - -def check_auth(users, encrypt=None, realm=None): - """If an authorization header contains credentials, return True or False. - """ - request = cherrypy.serving.request - if 'authorization' in request.headers: - # make sure the provided credentials are correctly set - ah = httpauth.parseAuthorization(request.headers['authorization']) - if ah is None: - raise cherrypy.HTTPError(400, 'Bad Request') - - if not encrypt: - encrypt = httpauth.DIGEST_AUTH_ENCODERS[httpauth.MD5] - - if hasattr(users, '__call__'): - try: - # backward compatibility - users = users() # expect it to return a dictionary - - if not isinstance(users, dict): - raise ValueError( - "Authentication users must be a dictionary") - - # fetch the user password - password = users.get(ah["username"], None) - except TypeError: - # returns a password (encrypted or clear text) - password = users(ah["username"]) - else: - if not isinstance(users, dict): - raise ValueError("Authentication users must be a dictionary") - - # fetch the user password - password = users.get(ah["username"], None) - - # validate the authorization by re-computing it here - # and compare it with what the user-agent provided - if httpauth.checkResponse(ah, password, method=request.method, - encrypt=encrypt, realm=realm): - request.login = ah["username"] - return True - - request.login = False - return False - - -def basic_auth(realm, users, encrypt=None, debug=False): - """If auth fails, raise 401 with a basic authentication header. - - realm - A string containing the authentication realm. - - users - A dict of the form: {username: password} or a callable returning - a dict. - - encrypt - callable used to encrypt the password returned from the user-agent. - if None it defaults to a md5 encryption. - - """ - if check_auth(users, encrypt): - if debug: - cherrypy.log('Auth successful', 'TOOLS.BASIC_AUTH') - return - - # inform the user-agent this path is protected - cherrypy.serving.response.headers[ - 'www-authenticate'] = httpauth.basicAuth(realm) - - raise cherrypy.HTTPError( - 401, "You are not authorized to access that resource") - - -def digest_auth(realm, users, debug=False): - """If auth fails, raise 401 with a digest authentication header. - - realm - A string containing the authentication realm. - users - A dict of the form: {username: password} or a callable returning - a dict. - """ - if check_auth(users, realm=realm): - if debug: - cherrypy.log('Auth successful', 'TOOLS.DIGEST_AUTH') - return - - # inform the user-agent this path is protected - cherrypy.serving.response.headers[ - 'www-authenticate'] = httpauth.digestAuth(realm) - - raise cherrypy.HTTPError( - 401, "You are not authorized to access that resource") diff --git a/libs_crutch/contrib/cherrypy/lib/auth_basic.py b/libs_crutch/contrib/cherrypy/lib/auth_basic.py deleted file mode 100644 index 5ba16f7..0000000 --- a/libs_crutch/contrib/cherrypy/lib/auth_basic.py +++ /dev/null @@ -1,90 +0,0 @@ -# This file is part of CherryPy -# -*- coding: utf-8 -*- -# vim:ts=4:sw=4:expandtab:fileencoding=utf-8 - -__doc__ = """This module provides a CherryPy 3.x tool which implements -the server-side of HTTP Basic Access Authentication, as described in -:rfc:`2617`. - -Example usage, using the built-in checkpassword_dict function which uses a dict -as the credentials store:: - - userpassdict = {'bird' : 'bebop', 'ornette' : 'wayout'} - checkpassword = cherrypy.lib.auth_basic.checkpassword_dict(userpassdict) - basic_auth = {'tools.auth_basic.on': True, - 'tools.auth_basic.realm': 'earth', - 'tools.auth_basic.checkpassword': checkpassword, - } - app_config = { '/' : basic_auth } - -""" - -__author__ = 'visteya' -__date__ = 'April 2009' - -import binascii -from cherrypy._cpcompat import base64_decode -import cherrypy - - -def checkpassword_dict(user_password_dict): - """Returns a checkpassword function which checks credentials - against a dictionary of the form: {username : password}. - - If you want a simple dictionary-based authentication scheme, use - checkpassword_dict(my_credentials_dict) as the value for the - checkpassword argument to basic_auth(). - """ - def checkpassword(realm, user, password): - p = user_password_dict.get(user) - return p and p == password or False - - return checkpassword - - -def basic_auth(realm, checkpassword, debug=False): - """A CherryPy tool which hooks at before_handler to perform - HTTP Basic Access Authentication, as specified in :rfc:`2617`. - - If the request has an 'authorization' header with a 'Basic' scheme, this - tool attempts to authenticate the credentials supplied in that header. If - the request has no 'authorization' header, or if it does but the scheme is - not 'Basic', or if authentication fails, the tool sends a 401 response with - a 'WWW-Authenticate' Basic header. - - realm - A string containing the authentication realm. - - checkpassword - A callable which checks the authentication credentials. - Its signature is checkpassword(realm, username, password). where - username and password are the values obtained from the request's - 'authorization' header. If authentication succeeds, checkpassword - returns True, else it returns False. - - """ - - if '"' in realm: - raise ValueError('Realm cannot contain the " (quote) character.') - request = cherrypy.serving.request - - auth_header = request.headers.get('authorization') - if auth_header is not None: - try: - scheme, params = auth_header.split(' ', 1) - if scheme.lower() == 'basic': - username, password = base64_decode(params).split(':', 1) - if checkpassword(realm, username, password): - if debug: - cherrypy.log('Auth succeeded', 'TOOLS.AUTH_BASIC') - request.login = username - return # successful authentication - # split() error, base64.decodestring() error - except (ValueError, binascii.Error): - raise cherrypy.HTTPError(400, 'Bad Request') - - # Respond with 401 status and a WWW-Authenticate header - cherrypy.serving.response.headers[ - 'www-authenticate'] = 'Basic realm="%s"' % realm - raise cherrypy.HTTPError( - 401, "You are not authorized to access that resource") diff --git a/libs_crutch/contrib/cherrypy/lib/auth_digest.py b/libs_crutch/contrib/cherrypy/lib/auth_digest.py deleted file mode 100644 index e06535d..0000000 --- a/libs_crutch/contrib/cherrypy/lib/auth_digest.py +++ /dev/null @@ -1,390 +0,0 @@ -# This file is part of CherryPy -# -*- coding: utf-8 -*- -# vim:ts=4:sw=4:expandtab:fileencoding=utf-8 - -__doc__ = """An implementation of the server-side of HTTP Digest Access -Authentication, which is described in :rfc:`2617`. - -Example usage, using the built-in get_ha1_dict_plain function which uses a dict -of plaintext passwords as the credentials store:: - - userpassdict = {'alice' : '4x5istwelve'} - get_ha1 = cherrypy.lib.auth_digest.get_ha1_dict_plain(userpassdict) - digest_auth = {'tools.auth_digest.on': True, - 'tools.auth_digest.realm': 'wonderland', - 'tools.auth_digest.get_ha1': get_ha1, - 'tools.auth_digest.key': 'a565c27146791cfb', - } - app_config = { '/' : digest_auth } -""" - -__author__ = 'visteya' -__date__ = 'April 2009' - - -import time -from cherrypy._cpcompat import parse_http_list, parse_keqv_list - -import cherrypy -from cherrypy._cpcompat import md5, ntob -md5_hex = lambda s: md5(ntob(s)).hexdigest() - -qop_auth = 'auth' -qop_auth_int = 'auth-int' -valid_qops = (qop_auth, qop_auth_int) - -valid_algorithms = ('MD5', 'MD5-sess') - - -def TRACE(msg): - cherrypy.log(msg, context='TOOLS.AUTH_DIGEST') - -# Three helper functions for users of the tool, providing three variants -# of get_ha1() functions for three different kinds of credential stores. - - -def get_ha1_dict_plain(user_password_dict): - """Returns a get_ha1 function which obtains a plaintext password from a - dictionary of the form: {username : password}. - - If you want a simple dictionary-based authentication scheme, with plaintext - passwords, use get_ha1_dict_plain(my_userpass_dict) as the value for the - get_ha1 argument to digest_auth(). - """ - def get_ha1(realm, username): - password = user_password_dict.get(username) - if password: - return md5_hex('%s:%s:%s' % (username, realm, password)) - return None - - return get_ha1 - - -def get_ha1_dict(user_ha1_dict): - """Returns a get_ha1 function which obtains a HA1 password hash from a - dictionary of the form: {username : HA1}. - - If you want a dictionary-based authentication scheme, but with - pre-computed HA1 hashes instead of plain-text passwords, use - get_ha1_dict(my_userha1_dict) as the value for the get_ha1 - argument to digest_auth(). - """ - def get_ha1(realm, username): - return user_ha1_dict.get(username) - - return get_ha1 - - -def get_ha1_file_htdigest(filename): - """Returns a get_ha1 function which obtains a HA1 password hash from a - flat file with lines of the same format as that produced by the Apache - htdigest utility. For example, for realm 'wonderland', username 'alice', - and password '4x5istwelve', the htdigest line would be:: - - alice:wonderland:3238cdfe91a8b2ed8e39646921a02d4c - - If you want to use an Apache htdigest file as the credentials store, - then use get_ha1_file_htdigest(my_htdigest_file) as the value for the - get_ha1 argument to digest_auth(). It is recommended that the filename - argument be an absolute path, to avoid problems. - """ - def get_ha1(realm, username): - result = None - f = open(filename, 'r') - for line in f: - u, r, ha1 = line.rstrip().split(':') - if u == username and r == realm: - result = ha1 - break - f.close() - return result - - return get_ha1 - - -def synthesize_nonce(s, key, timestamp=None): - """Synthesize a nonce value which resists spoofing and can be checked - for staleness. Returns a string suitable as the value for 'nonce' in - the www-authenticate header. - - s - A string related to the resource, such as the hostname of the server. - - key - A secret string known only to the server. - - timestamp - An integer seconds-since-the-epoch timestamp - - """ - if timestamp is None: - timestamp = int(time.time()) - h = md5_hex('%s:%s:%s' % (timestamp, s, key)) - nonce = '%s:%s' % (timestamp, h) - return nonce - - -def H(s): - """The hash function H""" - return md5_hex(s) - - -class HttpDigestAuthorization (object): - - """Class to parse a Digest Authorization header and perform re-calculation - of the digest. - """ - - def errmsg(self, s): - return 'Digest Authorization header: %s' % s - - def __init__(self, auth_header, http_method, debug=False): - self.http_method = http_method - self.debug = debug - scheme, params = auth_header.split(" ", 1) - self.scheme = scheme.lower() - if self.scheme != 'digest': - raise ValueError('Authorization scheme is not "Digest"') - - self.auth_header = auth_header - - # make a dict of the params - items = parse_http_list(params) - paramsd = parse_keqv_list(items) - - self.realm = paramsd.get('realm') - self.username = paramsd.get('username') - self.nonce = paramsd.get('nonce') - self.uri = paramsd.get('uri') - self.method = paramsd.get('method') - self.response = paramsd.get('response') # the response digest - self.algorithm = paramsd.get('algorithm', 'MD5').upper() - self.cnonce = paramsd.get('cnonce') - self.opaque = paramsd.get('opaque') - self.qop = paramsd.get('qop') # qop - self.nc = paramsd.get('nc') # nonce count - - # perform some correctness checks - if self.algorithm not in valid_algorithms: - raise ValueError( - self.errmsg("Unsupported value for algorithm: '%s'" % - self.algorithm)) - - has_reqd = ( - self.username and - self.realm and - self.nonce and - self.uri and - self.response - ) - if not has_reqd: - raise ValueError( - self.errmsg("Not all required parameters are present.")) - - if self.qop: - if self.qop not in valid_qops: - raise ValueError( - self.errmsg("Unsupported value for qop: '%s'" % self.qop)) - if not (self.cnonce and self.nc): - raise ValueError( - self.errmsg("If qop is sent then " - "cnonce and nc MUST be present")) - else: - if self.cnonce or self.nc: - raise ValueError( - self.errmsg("If qop is not sent, " - "neither cnonce nor nc can be present")) - - def __str__(self): - return 'authorization : %s' % self.auth_header - - def validate_nonce(self, s, key): - """Validate the nonce. - Returns True if nonce was generated by synthesize_nonce() and the - timestamp is not spoofed, else returns False. - - s - A string related to the resource, such as the hostname of - the server. - - key - A secret string known only to the server. - - Both s and key must be the same values which were used to synthesize - the nonce we are trying to validate. - """ - try: - timestamp, hashpart = self.nonce.split(':', 1) - s_timestamp, s_hashpart = synthesize_nonce( - s, key, timestamp).split(':', 1) - is_valid = s_hashpart == hashpart - if self.debug: - TRACE('validate_nonce: %s' % is_valid) - return is_valid - except ValueError: # split() error - pass - return False - - def is_nonce_stale(self, max_age_seconds=600): - """Returns True if a validated nonce is stale. The nonce contains a - timestamp in plaintext and also a secure hash of the timestamp. - You should first validate the nonce to ensure the plaintext - timestamp is not spoofed. - """ - try: - timestamp, hashpart = self.nonce.split(':', 1) - if int(timestamp) + max_age_seconds > int(time.time()): - return False - except ValueError: # int() error - pass - if self.debug: - TRACE("nonce is stale") - return True - - def HA2(self, entity_body=''): - """Returns the H(A2) string. See :rfc:`2617` section 3.2.2.3.""" - # RFC 2617 3.2.2.3 - # If the "qop" directive's value is "auth" or is unspecified, - # then A2 is: - # A2 = method ":" digest-uri-value - # - # If the "qop" value is "auth-int", then A2 is: - # A2 = method ":" digest-uri-value ":" H(entity-body) - if self.qop is None or self.qop == "auth": - a2 = '%s:%s' % (self.http_method, self.uri) - elif self.qop == "auth-int": - a2 = "%s:%s:%s" % (self.http_method, self.uri, H(entity_body)) - else: - # in theory, this should never happen, since I validate qop in - # __init__() - raise ValueError(self.errmsg("Unrecognized value for qop!")) - return H(a2) - - def request_digest(self, ha1, entity_body=''): - """Calculates the Request-Digest. See :rfc:`2617` section 3.2.2.1. - - ha1 - The HA1 string obtained from the credentials store. - - entity_body - If 'qop' is set to 'auth-int', then A2 includes a hash - of the "entity body". The entity body is the part of the - message which follows the HTTP headers. See :rfc:`2617` section - 4.3. This refers to the entity the user agent sent in the - request which has the Authorization header. Typically GET - requests don't have an entity, and POST requests do. - - """ - ha2 = self.HA2(entity_body) - # Request-Digest -- RFC 2617 3.2.2.1 - if self.qop: - req = "%s:%s:%s:%s:%s" % ( - self.nonce, self.nc, self.cnonce, self.qop, ha2) - else: - req = "%s:%s" % (self.nonce, ha2) - - # RFC 2617 3.2.2.2 - # - # If the "algorithm" directive's value is "MD5" or is unspecified, - # then A1 is: - # A1 = unq(username-value) ":" unq(realm-value) ":" passwd - # - # If the "algorithm" directive's value is "MD5-sess", then A1 is - # calculated only once - on the first request by the client following - # receipt of a WWW-Authenticate challenge from the server. - # A1 = H( unq(username-value) ":" unq(realm-value) ":" passwd ) - # ":" unq(nonce-value) ":" unq(cnonce-value) - if self.algorithm == 'MD5-sess': - ha1 = H('%s:%s:%s' % (ha1, self.nonce, self.cnonce)) - - digest = H('%s:%s' % (ha1, req)) - return digest - - -def www_authenticate(realm, key, algorithm='MD5', nonce=None, qop=qop_auth, - stale=False): - """Constructs a WWW-Authenticate header for Digest authentication.""" - if qop not in valid_qops: - raise ValueError("Unsupported value for qop: '%s'" % qop) - if algorithm not in valid_algorithms: - raise ValueError("Unsupported value for algorithm: '%s'" % algorithm) - - if nonce is None: - nonce = synthesize_nonce(realm, key) - s = 'Digest realm="%s", nonce="%s", algorithm="%s", qop="%s"' % ( - realm, nonce, algorithm, qop) - if stale: - s += ', stale="true"' - return s - - -def digest_auth(realm, get_ha1, key, debug=False): - """A CherryPy tool which hooks at before_handler to perform - HTTP Digest Access Authentication, as specified in :rfc:`2617`. - - If the request has an 'authorization' header with a 'Digest' scheme, - this tool authenticates the credentials supplied in that header. - If the request has no 'authorization' header, or if it does but the - scheme is not "Digest", or if authentication fails, the tool sends - a 401 response with a 'WWW-Authenticate' Digest header. - - realm - A string containing the authentication realm. - - get_ha1 - A callable which looks up a username in a credentials store - and returns the HA1 string, which is defined in the RFC to be - MD5(username : realm : password). The function's signature is: - ``get_ha1(realm, username)`` - where username is obtained from the request's 'authorization' header. - If username is not found in the credentials store, get_ha1() returns - None. - - key - A secret string known only to the server, used in the synthesis - of nonces. - - """ - request = cherrypy.serving.request - - auth_header = request.headers.get('authorization') - nonce_is_stale = False - if auth_header is not None: - try: - auth = HttpDigestAuthorization( - auth_header, request.method, debug=debug) - except ValueError: - raise cherrypy.HTTPError( - 400, "The Authorization header could not be parsed.") - - if debug: - TRACE(str(auth)) - - if auth.validate_nonce(realm, key): - ha1 = get_ha1(realm, auth.username) - if ha1 is not None: - # note that for request.body to be available we need to - # hook in at before_handler, not on_start_resource like - # 3.1.x digest_auth does. - digest = auth.request_digest(ha1, entity_body=request.body) - if digest == auth.response: # authenticated - if debug: - TRACE("digest matches auth.response") - # Now check if nonce is stale. - # The choice of ten minutes' lifetime for nonce is somewhat - # arbitrary - nonce_is_stale = auth.is_nonce_stale(max_age_seconds=600) - if not nonce_is_stale: - request.login = auth.username - if debug: - TRACE("authentication of %s successful" % - auth.username) - return - - # Respond with 401 status and a WWW-Authenticate header - header = www_authenticate(realm, key, stale=nonce_is_stale) - if debug: - TRACE(header) - cherrypy.serving.response.headers['WWW-Authenticate'] = header - raise cherrypy.HTTPError( - 401, "You are not authorized to access that resource") diff --git a/libs_crutch/contrib/cherrypy/lib/caching.py b/libs_crutch/contrib/cherrypy/lib/caching.py deleted file mode 100644 index fab6b56..0000000 --- a/libs_crutch/contrib/cherrypy/lib/caching.py +++ /dev/null @@ -1,470 +0,0 @@ -""" -CherryPy implements a simple caching system as a pluggable Tool. This tool -tries to be an (in-process) HTTP/1.1-compliant cache. It's not quite there -yet, but it's probably good enough for most sites. - -In general, GET responses are cached (along with selecting headers) and, if -another request arrives for the same resource, the caching Tool will return 304 -Not Modified if possible, or serve the cached response otherwise. It also sets -request.cached to True if serving a cached representation, and sets -request.cacheable to False (so it doesn't get cached again). - -If POST, PUT, or DELETE requests are made for a cached resource, they -invalidate (delete) any cached response. - -Usage -===== - -Configuration file example:: - - [/] - tools.caching.on = True - tools.caching.delay = 3600 - -You may use a class other than the default -:class:`MemoryCache` by supplying the config -entry ``cache_class``; supply the full dotted name of the replacement class -as the config value. It must implement the basic methods ``get``, ``put``, -``delete``, and ``clear``. - -You may set any attribute, including overriding methods, on the cache -instance by providing them in config. The above sets the -:attr:`delay` attribute, for example. -""" - -import datetime -import sys -import threading -import time - -import cherrypy -from cherrypy.lib import cptools, httputil -from cherrypy._cpcompat import copyitems, ntob, set_daemon, sorted, Event - - -class Cache(object): - - """Base class for Cache implementations.""" - - def get(self): - """Return the current variant if in the cache, else None.""" - raise NotImplemented - - def put(self, obj, size): - """Store the current variant in the cache.""" - raise NotImplemented - - def delete(self): - """Remove ALL cached variants of the current resource.""" - raise NotImplemented - - def clear(self): - """Reset the cache to its initial, empty state.""" - raise NotImplemented - - -# ------------------------------ Memory Cache ------------------------------- # -class AntiStampedeCache(dict): - - """A storage system for cached items which reduces stampede collisions.""" - - def wait(self, key, timeout=5, debug=False): - """Return the cached value for the given key, or None. - - If timeout is not None, and the value is already - being calculated by another thread, wait until the given timeout has - elapsed. If the value is available before the timeout expires, it is - returned. If not, None is returned, and a sentinel placed in the cache - to signal other threads to wait. - - If timeout is None, no waiting is performed nor sentinels used. - """ - value = self.get(key) - if isinstance(value, Event): - if timeout is None: - # Ignore the other thread and recalc it ourselves. - if debug: - cherrypy.log('No timeout', 'TOOLS.CACHING') - return None - - # Wait until it's done or times out. - if debug: - cherrypy.log('Waiting up to %s seconds' % - timeout, 'TOOLS.CACHING') - value.wait(timeout) - if value.result is not None: - # The other thread finished its calculation. Use it. - if debug: - cherrypy.log('Result!', 'TOOLS.CACHING') - return value.result - # Timed out. Stick an Event in the slot so other threads wait - # on this one to finish calculating the value. - if debug: - cherrypy.log('Timed out', 'TOOLS.CACHING') - e = threading.Event() - e.result = None - dict.__setitem__(self, key, e) - - return None - elif value is None: - # Stick an Event in the slot so other threads wait - # on this one to finish calculating the value. - if debug: - cherrypy.log('Timed out', 'TOOLS.CACHING') - e = threading.Event() - e.result = None - dict.__setitem__(self, key, e) - return value - - def __setitem__(self, key, value): - """Set the cached value for the given key.""" - existing = self.get(key) - dict.__setitem__(self, key, value) - if isinstance(existing, Event): - # Set Event.result so other threads waiting on it have - # immediate access without needing to poll the cache again. - existing.result = value - existing.set() - - -class MemoryCache(Cache): - - """An in-memory cache for varying response content. - - Each key in self.store is a URI, and each value is an AntiStampedeCache. - The response for any given URI may vary based on the values of - "selecting request headers"; that is, those named in the Vary - response header. We assume the list of header names to be constant - for each URI throughout the lifetime of the application, and store - that list in ``self.store[uri].selecting_headers``. - - The items contained in ``self.store[uri]`` have keys which are tuples of - request header values (in the same order as the names in its - selecting_headers), and values which are the actual responses. - """ - - maxobjects = 1000 - """The maximum number of cached objects; defaults to 1000.""" - - maxobj_size = 100000 - """The maximum size of each cached object in bytes; defaults to 100 KB.""" - - maxsize = 10000000 - """The maximum size of the entire cache in bytes; defaults to 10 MB.""" - - delay = 600 - """Seconds until the cached content expires; defaults to 600 (10 minutes). - """ - - antistampede_timeout = 5 - """Seconds to wait for other threads to release a cache lock.""" - - expire_freq = 0.1 - """Seconds to sleep between cache expiration sweeps.""" - - debug = False - - def __init__(self): - self.clear() - - # Run self.expire_cache in a separate daemon thread. - t = threading.Thread(target=self.expire_cache, name='expire_cache') - self.expiration_thread = t - set_daemon(t, True) - t.start() - - def clear(self): - """Reset the cache to its initial, empty state.""" - self.store = {} - self.expirations = {} - self.tot_puts = 0 - self.tot_gets = 0 - self.tot_hist = 0 - self.tot_expires = 0 - self.tot_non_modified = 0 - self.cursize = 0 - - def expire_cache(self): - """Continuously examine cached objects, expiring stale ones. - - This function is designed to be run in its own daemon thread, - referenced at ``self.expiration_thread``. - """ - # It's possible that "time" will be set to None - # arbitrarily, so we check "while time" to avoid exceptions. - # See tickets #99 and #180 for more information. - while time: - now = time.time() - # Must make a copy of expirations so it doesn't change size - # during iteration - for expiration_time, objects in copyitems(self.expirations): - if expiration_time <= now: - for obj_size, uri, sel_header_values in objects: - try: - del self.store[uri][tuple(sel_header_values)] - self.tot_expires += 1 - self.cursize -= obj_size - except KeyError: - # the key may have been deleted elsewhere - pass - del self.expirations[expiration_time] - time.sleep(self.expire_freq) - - def get(self): - """Return the current variant if in the cache, else None.""" - request = cherrypy.serving.request - self.tot_gets += 1 - - uri = cherrypy.url(qs=request.query_string) - uricache = self.store.get(uri) - if uricache is None: - return None - - header_values = [request.headers.get(h, '') - for h in uricache.selecting_headers] - variant = uricache.wait(key=tuple(sorted(header_values)), - timeout=self.antistampede_timeout, - debug=self.debug) - if variant is not None: - self.tot_hist += 1 - return variant - - def put(self, variant, size): - """Store the current variant in the cache.""" - request = cherrypy.serving.request - response = cherrypy.serving.response - - uri = cherrypy.url(qs=request.query_string) - uricache = self.store.get(uri) - if uricache is None: - uricache = AntiStampedeCache() - uricache.selecting_headers = [ - e.value for e in response.headers.elements('Vary')] - self.store[uri] = uricache - - if len(self.store) < self.maxobjects: - total_size = self.cursize + size - - # checks if there's space for the object - if (size < self.maxobj_size and total_size < self.maxsize): - # add to the expirations list - expiration_time = response.time + self.delay - bucket = self.expirations.setdefault(expiration_time, []) - bucket.append((size, uri, uricache.selecting_headers)) - - # add to the cache - header_values = [request.headers.get(h, '') - for h in uricache.selecting_headers] - uricache[tuple(sorted(header_values))] = variant - self.tot_puts += 1 - self.cursize = total_size - - def delete(self): - """Remove ALL cached variants of the current resource.""" - uri = cherrypy.url(qs=cherrypy.serving.request.query_string) - self.store.pop(uri, None) - - -def get(invalid_methods=("POST", "PUT", "DELETE"), debug=False, **kwargs): - """Try to obtain cached output. If fresh enough, raise HTTPError(304). - - If POST, PUT, or DELETE: - * invalidates (deletes) any cached response for this resource - * sets request.cached = False - * sets request.cacheable = False - - else if a cached copy exists: - * sets request.cached = True - * sets request.cacheable = False - * sets response.headers to the cached values - * checks the cached Last-Modified response header against the - current If-(Un)Modified-Since request headers; raises 304 - if necessary. - * sets response.status and response.body to the cached values - * returns True - - otherwise: - * sets request.cached = False - * sets request.cacheable = True - * returns False - """ - request = cherrypy.serving.request - response = cherrypy.serving.response - - if not hasattr(cherrypy, "_cache"): - # Make a process-wide Cache object. - cherrypy._cache = kwargs.pop("cache_class", MemoryCache)() - - # Take all remaining kwargs and set them on the Cache object. - for k, v in kwargs.items(): - setattr(cherrypy._cache, k, v) - cherrypy._cache.debug = debug - - # POST, PUT, DELETE should invalidate (delete) the cached copy. - # See http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.10. - if request.method in invalid_methods: - if debug: - cherrypy.log('request.method %r in invalid_methods %r' % - (request.method, invalid_methods), 'TOOLS.CACHING') - cherrypy._cache.delete() - request.cached = False - request.cacheable = False - return False - - if 'no-cache' in [e.value for e in request.headers.elements('Pragma')]: - request.cached = False - request.cacheable = True - return False - - cache_data = cherrypy._cache.get() - request.cached = bool(cache_data) - request.cacheable = not request.cached - if request.cached: - # Serve the cached copy. - max_age = cherrypy._cache.delay - for v in [e.value for e in request.headers.elements('Cache-Control')]: - atoms = v.split('=', 1) - directive = atoms.pop(0) - if directive == 'max-age': - if len(atoms) != 1 or not atoms[0].isdigit(): - raise cherrypy.HTTPError( - 400, "Invalid Cache-Control header") - max_age = int(atoms[0]) - break - elif directive == 'no-cache': - if debug: - cherrypy.log( - 'Ignoring cache due to Cache-Control: no-cache', - 'TOOLS.CACHING') - request.cached = False - request.cacheable = True - return False - - if debug: - cherrypy.log('Reading response from cache', 'TOOLS.CACHING') - s, h, b, create_time = cache_data - age = int(response.time - create_time) - if (age > max_age): - if debug: - cherrypy.log('Ignoring cache due to age > %d' % max_age, - 'TOOLS.CACHING') - request.cached = False - request.cacheable = True - return False - - # Copy the response headers. See - # https://bitbucket.org/cherrypy/cherrypy/issue/721. - response.headers = rh = httputil.HeaderMap() - for k in h: - dict.__setitem__(rh, k, dict.__getitem__(h, k)) - - # Add the required Age header - response.headers["Age"] = str(age) - - try: - # Note that validate_since depends on a Last-Modified header; - # this was put into the cached copy, and should have been - # resurrected just above (response.headers = cache_data[1]). - cptools.validate_since() - except cherrypy.HTTPRedirect: - x = sys.exc_info()[1] - if x.status == 304: - cherrypy._cache.tot_non_modified += 1 - raise - - # serve it & get out from the request - response.status = s - response.body = b - else: - if debug: - cherrypy.log('request is not cached', 'TOOLS.CACHING') - return request.cached - - -def tee_output(): - """Tee response output to cache storage. Internal.""" - # Used by CachingTool by attaching to request.hooks - - request = cherrypy.serving.request - if 'no-store' in request.headers.values('Cache-Control'): - return - - def tee(body): - """Tee response.body into a list.""" - if ('no-cache' in response.headers.values('Pragma') or - 'no-store' in response.headers.values('Cache-Control')): - for chunk in body: - yield chunk - return - - output = [] - for chunk in body: - output.append(chunk) - yield chunk - - # save the cache data - body = ntob('').join(output) - cherrypy._cache.put((response.status, response.headers or {}, - body, response.time), len(body)) - - response = cherrypy.serving.response - response.body = tee(response.body) - - -def expires(secs=0, force=False, debug=False): - """Tool for influencing cache mechanisms using the 'Expires' header. - - secs - Must be either an int or a datetime.timedelta, and indicates the - number of seconds between response.time and when the response should - expire. The 'Expires' header will be set to response.time + secs. - If secs is zero, the 'Expires' header is set one year in the past, and - the following "cache prevention" headers are also set: - - * Pragma: no-cache - * Cache-Control': no-cache, must-revalidate - - force - If False, the following headers are checked: - - * Etag - * Last-Modified - * Age - * Expires - - If any are already present, none of the above response headers are set. - - """ - - response = cherrypy.serving.response - headers = response.headers - - cacheable = False - if not force: - # some header names that indicate that the response can be cached - for indicator in ('Etag', 'Last-Modified', 'Age', 'Expires'): - if indicator in headers: - cacheable = True - break - - if not cacheable and not force: - if debug: - cherrypy.log('request is not cacheable', 'TOOLS.EXPIRES') - else: - if debug: - cherrypy.log('request is cacheable', 'TOOLS.EXPIRES') - if isinstance(secs, datetime.timedelta): - secs = (86400 * secs.days) + secs.seconds - - if secs == 0: - if force or ("Pragma" not in headers): - headers["Pragma"] = "no-cache" - if cherrypy.serving.request.protocol >= (1, 1): - if force or "Cache-Control" not in headers: - headers["Cache-Control"] = "no-cache, must-revalidate" - # Set an explicit Expires date in the past. - expiry = httputil.HTTPDate(1169942400.0) - else: - expiry = httputil.HTTPDate(response.time + secs) - if force or "Expires" not in headers: - headers["Expires"] = expiry diff --git a/libs_crutch/contrib/cherrypy/lib/covercp.py b/libs_crutch/contrib/cherrypy/lib/covercp.py deleted file mode 100644 index a74ec34..0000000 --- a/libs_crutch/contrib/cherrypy/lib/covercp.py +++ /dev/null @@ -1,387 +0,0 @@ -"""Code-coverage tools for CherryPy. - -To use this module, or the coverage tools in the test suite, -you need to download 'coverage.py', either Gareth Rees' `original -implementation `_ -or Ned Batchelder's `enhanced version: -`_ - -To turn on coverage tracing, use the following code:: - - cherrypy.engine.subscribe('start', covercp.start) - -DO NOT subscribe anything on the 'start_thread' channel, as previously -recommended. Calling start once in the main thread should be sufficient -to start coverage on all threads. Calling start again in each thread -effectively clears any coverage data gathered up to that point. - -Run your code, then use the ``covercp.serve()`` function to browse the -results in a web browser. If you run this module from the command line, -it will call ``serve()`` for you. -""" - -import re -import sys -import cgi -from cherrypy._cpcompat import quote_plus -import os -import os.path -localFile = os.path.join(os.path.dirname(__file__), "coverage.cache") - -the_coverage = None -try: - from coverage import coverage - the_coverage = coverage(data_file=localFile) - - def start(): - the_coverage.start() -except ImportError: - # Setting the_coverage to None will raise errors - # that need to be trapped downstream. - the_coverage = None - - import warnings - warnings.warn( - "No code coverage will be performed; " - "coverage.py could not be imported.") - - def start(): - pass -start.priority = 20 - -TEMPLATE_MENU = """ - - CherryPy Coverage Menu - - - -

CherryPy Coverage

""" - -TEMPLATE_FORM = """ -
-
- - Show percentages -
- Hide files over - %%
- Exclude files matching
- -
- - -
-
""" - -TEMPLATE_FRAMESET = """ -CherryPy coverage data - - - - - -""" - -TEMPLATE_COVERAGE = """ - - Coverage for %(name)s - - - -

%(name)s

-

%(fullpath)s

-

Coverage: %(pc)s%%

""" - -TEMPLATE_LOC_COVERED = """ - %s  - %s -\n""" -TEMPLATE_LOC_NOT_COVERED = """ - %s  - %s -\n""" -TEMPLATE_LOC_EXCLUDED = """ - %s  - %s -\n""" - -TEMPLATE_ITEM = ( - "%s%s%s\n" -) - - -def _percent(statements, missing): - s = len(statements) - e = s - len(missing) - if s > 0: - return int(round(100.0 * e / s)) - return 0 - - -def _show_branch(root, base, path, pct=0, showpct=False, exclude="", - coverage=the_coverage): - - # Show the directory name and any of our children - dirs = [k for k, v in root.items() if v] - dirs.sort() - for name in dirs: - newpath = os.path.join(path, name) - - if newpath.lower().startswith(base): - relpath = newpath[len(base):] - yield "| " * relpath.count(os.sep) - yield ( - "%s\n" % - (newpath, quote_plus(exclude), name) - ) - - for chunk in _show_branch( - root[name], base, newpath, pct, showpct, - exclude, coverage=coverage - ): - yield chunk - - # Now list the files - if path.lower().startswith(base): - relpath = path[len(base):] - files = [k for k, v in root.items() if not v] - files.sort() - for name in files: - newpath = os.path.join(path, name) - - pc_str = "" - if showpct: - try: - _, statements, _, missing, _ = coverage.analysis2(newpath) - except: - # Yes, we really want to pass on all errors. - pass - else: - pc = _percent(statements, missing) - pc_str = ("%3d%% " % pc).replace(' ', ' ') - if pc < float(pct) or pc == -1: - pc_str = "%s" % pc_str - else: - pc_str = "%s" % pc_str - - yield TEMPLATE_ITEM % ("| " * (relpath.count(os.sep) + 1), - pc_str, newpath, name) - - -def _skip_file(path, exclude): - if exclude: - return bool(re.search(exclude, path)) - - -def _graft(path, tree): - d = tree - - p = path - atoms = [] - while True: - p, tail = os.path.split(p) - if not tail: - break - atoms.append(tail) - atoms.append(p) - if p != "/": - atoms.append("/") - - atoms.reverse() - for node in atoms: - if node: - d = d.setdefault(node, {}) - - -def get_tree(base, exclude, coverage=the_coverage): - """Return covered module names as a nested dict.""" - tree = {} - runs = coverage.data.executed_files() - for path in runs: - if not _skip_file(path, exclude) and not os.path.isdir(path): - _graft(path, tree) - return tree - - -class CoverStats(object): - - def __init__(self, coverage, root=None): - self.coverage = coverage - if root is None: - # Guess initial depth. Files outside this path will not be - # reachable from the web interface. - import cherrypy - root = os.path.dirname(cherrypy.__file__) - self.root = root - - def index(self): - return TEMPLATE_FRAMESET % self.root.lower() - index.exposed = True - - def menu(self, base="/", pct="50", showpct="", - exclude=r'python\d\.\d|test|tut\d|tutorial'): - - # The coverage module uses all-lower-case names. - base = base.lower().rstrip(os.sep) - - yield TEMPLATE_MENU - yield TEMPLATE_FORM % locals() - - # Start by showing links for parent paths - yield "
" - path = "" - atoms = base.split(os.sep) - atoms.pop() - for atom in atoms: - path += atom + os.sep - yield ("%s %s" - % (path, quote_plus(exclude), atom, os.sep)) - yield "
" - - yield "
" - - # Then display the tree - tree = get_tree(base, exclude, self.coverage) - if not tree: - yield "

No modules covered.

" - else: - for chunk in _show_branch(tree, base, "/", pct, - showpct == 'checked', exclude, - coverage=self.coverage): - yield chunk - - yield "
" - yield "" - menu.exposed = True - - def annotated_file(self, filename, statements, excluded, missing): - source = open(filename, 'r') - buffer = [] - for lineno, line in enumerate(source.readlines()): - lineno += 1 - line = line.strip("\n\r") - empty_the_buffer = True - if lineno in excluded: - template = TEMPLATE_LOC_EXCLUDED - elif lineno in missing: - template = TEMPLATE_LOC_NOT_COVERED - elif lineno in statements: - template = TEMPLATE_LOC_COVERED - else: - empty_the_buffer = False - buffer.append((lineno, line)) - if empty_the_buffer: - for lno, pastline in buffer: - yield template % (lno, cgi.escape(pastline)) - buffer = [] - yield template % (lineno, cgi.escape(line)) - - def report(self, name): - filename, statements, excluded, missing, _ = self.coverage.analysis2( - name) - pc = _percent(statements, missing) - yield TEMPLATE_COVERAGE % dict(name=os.path.basename(name), - fullpath=name, - pc=pc) - yield '\n' - for line in self.annotated_file(filename, statements, excluded, - missing): - yield line - yield '
' - yield '' - yield '' - report.exposed = True - - -def serve(path=localFile, port=8080, root=None): - if coverage is None: - raise ImportError("The coverage module could not be imported.") - from coverage import coverage - cov = coverage(data_file=path) - cov.load() - - import cherrypy - cherrypy.config.update({'server.socket_port': int(port), - 'server.thread_pool': 10, - 'environment': "production", - }) - cherrypy.quickstart(CoverStats(cov, root)) - -if __name__ == "__main__": - serve(*tuple(sys.argv[1:])) diff --git a/libs_crutch/contrib/cherrypy/lib/cpstats.py b/libs_crutch/contrib/cherrypy/lib/cpstats.py deleted file mode 100644 index a8661a1..0000000 --- a/libs_crutch/contrib/cherrypy/lib/cpstats.py +++ /dev/null @@ -1,687 +0,0 @@ -"""CPStats, a package for collecting and reporting on program statistics. - -Overview -======== - -Statistics about program operation are an invaluable monitoring and debugging -tool. Unfortunately, the gathering and reporting of these critical values is -usually ad-hoc. This package aims to add a centralized place for gathering -statistical performance data, a structure for recording that data which -provides for extrapolation of that data into more useful information, -and a method of serving that data to both human investigators and -monitoring software. Let's examine each of those in more detail. - -Data Gathering --------------- - -Just as Python's `logging` module provides a common importable for gathering -and sending messages, performance statistics would benefit from a similar -common mechanism, and one that does *not* require each package which wishes -to collect stats to import a third-party module. Therefore, we choose to -re-use the `logging` module by adding a `statistics` object to it. - -That `logging.statistics` object is a nested dict. It is not a custom class, -because that would: - - 1. require libraries and applications to import a third-party module in - order to participate - 2. inhibit innovation in extrapolation approaches and in reporting tools, and - 3. be slow. - -There are, however, some specifications regarding the structure of the dict.:: - - { - +----"SQLAlchemy": { - | "Inserts": 4389745, - | "Inserts per Second": - | lambda s: s["Inserts"] / (time() - s["Start"]), - | C +---"Table Statistics": { - | o | "widgets": {-----------+ - N | l | "Rows": 1.3M, | Record - a | l | "Inserts": 400, | - m | e | },---------------------+ - e | c | "froobles": { - s | t | "Rows": 7845, - p | i | "Inserts": 0, - a | o | }, - c | n +---}, - e | "Slow Queries": - | [{"Query": "SELECT * FROM widgets;", - | "Processing Time": 47.840923343, - | }, - | ], - +----}, - } - -The `logging.statistics` dict has four levels. The topmost level is nothing -more than a set of names to introduce modularity, usually along the lines of -package names. If the SQLAlchemy project wanted to participate, for example, -it might populate the item `logging.statistics['SQLAlchemy']`, whose value -would be a second-layer dict we call a "namespace". Namespaces help multiple -packages to avoid collisions over key names, and make reports easier to read, -to boot. The maintainers of SQLAlchemy should feel free to use more than one -namespace if needed (such as 'SQLAlchemy ORM'). Note that there are no case -or other syntax constraints on the namespace names; they should be chosen -to be maximally readable by humans (neither too short nor too long). - -Each namespace, then, is a dict of named statistical values, such as -'Requests/sec' or 'Uptime'. You should choose names which will look -good on a report: spaces and capitalization are just fine. - -In addition to scalars, values in a namespace MAY be a (third-layer) -dict, or a list, called a "collection". For example, the CherryPy -:class:`StatsTool` keeps track of what each request is doing (or has most -recently done) in a 'Requests' collection, where each key is a thread ID; each -value in the subdict MUST be a fourth dict (whew!) of statistical data about -each thread. We call each subdict in the collection a "record". Similarly, -the :class:`StatsTool` also keeps a list of slow queries, where each record -contains data about each slow query, in order. - -Values in a namespace or record may also be functions, which brings us to: - -Extrapolation -------------- - -The collection of statistical data needs to be fast, as close to unnoticeable -as possible to the host program. That requires us to minimize I/O, for example, -but in Python it also means we need to minimize function calls. So when you -are designing your namespace and record values, try to insert the most basic -scalar values you already have on hand. - -When it comes time to report on the gathered data, however, we usually have -much more freedom in what we can calculate. Therefore, whenever reporting -tools (like the provided :class:`StatsPage` CherryPy class) fetch the contents -of `logging.statistics` for reporting, they first call -`extrapolate_statistics` (passing the whole `statistics` dict as the only -argument). This makes a deep copy of the statistics dict so that the -reporting tool can both iterate over it and even change it without harming -the original. But it also expands any functions in the dict by calling them. -For example, you might have a 'Current Time' entry in the namespace with the -value "lambda scope: time.time()". The "scope" parameter is the current -namespace dict (or record, if we're currently expanding one of those -instead), allowing you access to existing static entries. If you're truly -evil, you can even modify more than one entry at a time. - -However, don't try to calculate an entry and then use its value in further -extrapolations; the order in which the functions are called is not guaranteed. -This can lead to a certain amount of duplicated work (or a redesign of your -schema), but that's better than complicating the spec. - -After the whole thing has been extrapolated, it's time for: - -Reporting ---------- - -The :class:`StatsPage` class grabs the `logging.statistics` dict, extrapolates -it all, and then transforms it to HTML for easy viewing. Each namespace gets -its own header and attribute table, plus an extra table for each collection. -This is NOT part of the statistics specification; other tools can format how -they like. - -You can control which columns are output and how they are formatted by updating -StatsPage.formatting, which is a dict that mirrors the keys and nesting of -`logging.statistics`. The difference is that, instead of data values, it has -formatting values. Use None for a given key to indicate to the StatsPage that a -given column should not be output. Use a string with formatting -(such as '%.3f') to interpolate the value(s), or use a callable (such as -lambda v: v.isoformat()) for more advanced formatting. Any entry which is not -mentioned in the formatting dict is output unchanged. - -Monitoring ----------- - -Although the HTML output takes pains to assign unique id's to each with -statistical data, you're probably better off fetching /cpstats/data, which -outputs the whole (extrapolated) `logging.statistics` dict in JSON format. -That is probably easier to parse, and doesn't have any formatting controls, -so you get the "original" data in a consistently-serialized format. -Note: there's no treatment yet for datetime objects. Try time.time() instead -for now if you can. Nagios will probably thank you. - -Turning Collection Off ----------------------- - -It is recommended each namespace have an "Enabled" item which, if False, -stops collection (but not reporting) of statistical data. Applications -SHOULD provide controls to pause and resume collection by setting these -entries to False or True, if present. - - -Usage -===== - -To collect statistics on CherryPy applications:: - - from cherrypy.lib import cpstats - appconfig['/']['tools.cpstats.on'] = True - -To collect statistics on your own code:: - - import logging - # Initialize the repository - if not hasattr(logging, 'statistics'): logging.statistics = {} - # Initialize my namespace - mystats = logging.statistics.setdefault('My Stuff', {}) - # Initialize my namespace's scalars and collections - mystats.update({ - 'Enabled': True, - 'Start Time': time.time(), - 'Important Events': 0, - 'Events/Second': lambda s: ( - (s['Important Events'] / (time.time() - s['Start Time']))), - }) - ... - for event in events: - ... - # Collect stats - if mystats.get('Enabled', False): - mystats['Important Events'] += 1 - -To report statistics:: - - root.cpstats = cpstats.StatsPage() - -To format statistics reports:: - - See 'Reporting', above. - -""" - -# ------------------------------- Statistics -------------------------------- # - -import logging -if not hasattr(logging, 'statistics'): - logging.statistics = {} - - -def extrapolate_statistics(scope): - """Return an extrapolated copy of the given scope.""" - c = {} - for k, v in list(scope.items()): - if isinstance(v, dict): - v = extrapolate_statistics(v) - elif isinstance(v, (list, tuple)): - v = [extrapolate_statistics(record) for record in v] - elif hasattr(v, '__call__'): - v = v(scope) - c[k] = v - return c - - -# -------------------- CherryPy Applications Statistics --------------------- # - -import threading -import time - -import cherrypy - -appstats = logging.statistics.setdefault('CherryPy Applications', {}) -appstats.update({ - 'Enabled': True, - 'Bytes Read/Request': lambda s: ( - s['Total Requests'] and - (s['Total Bytes Read'] / float(s['Total Requests'])) or - 0.0 - ), - 'Bytes Read/Second': lambda s: s['Total Bytes Read'] / s['Uptime'](s), - 'Bytes Written/Request': lambda s: ( - s['Total Requests'] and - (s['Total Bytes Written'] / float(s['Total Requests'])) or - 0.0 - ), - 'Bytes Written/Second': lambda s: ( - s['Total Bytes Written'] / s['Uptime'](s) - ), - 'Current Time': lambda s: time.time(), - 'Current Requests': 0, - 'Requests/Second': lambda s: float(s['Total Requests']) / s['Uptime'](s), - 'Server Version': cherrypy.__version__, - 'Start Time': time.time(), - 'Total Bytes Read': 0, - 'Total Bytes Written': 0, - 'Total Requests': 0, - 'Total Time': 0, - 'Uptime': lambda s: time.time() - s['Start Time'], - 'Requests': {}, -}) - -proc_time = lambda s: time.time() - s['Start Time'] - - -class ByteCountWrapper(object): - - """Wraps a file-like object, counting the number of bytes read.""" - - def __init__(self, rfile): - self.rfile = rfile - self.bytes_read = 0 - - def read(self, size=-1): - data = self.rfile.read(size) - self.bytes_read += len(data) - return data - - def readline(self, size=-1): - data = self.rfile.readline(size) - self.bytes_read += len(data) - return data - - def readlines(self, sizehint=0): - # Shamelessly stolen from StringIO - total = 0 - lines = [] - line = self.readline() - while line: - lines.append(line) - total += len(line) - if 0 < sizehint <= total: - break - line = self.readline() - return lines - - def close(self): - self.rfile.close() - - def __iter__(self): - return self - - def next(self): - data = self.rfile.next() - self.bytes_read += len(data) - return data - - -average_uriset_time = lambda s: s['Count'] and (s['Sum'] / s['Count']) or 0 - - -class StatsTool(cherrypy.Tool): - - """Record various information about the current request.""" - - def __init__(self): - cherrypy.Tool.__init__(self, 'on_end_request', self.record_stop) - - def _setup(self): - """Hook this tool into cherrypy.request. - - The standard CherryPy request object will automatically call this - method when the tool is "turned on" in config. - """ - if appstats.get('Enabled', False): - cherrypy.Tool._setup(self) - self.record_start() - - def record_start(self): - """Record the beginning of a request.""" - request = cherrypy.serving.request - if not hasattr(request.rfile, 'bytes_read'): - request.rfile = ByteCountWrapper(request.rfile) - request.body.fp = request.rfile - - r = request.remote - - appstats['Current Requests'] += 1 - appstats['Total Requests'] += 1 - appstats['Requests'][threading._get_ident()] = { - 'Bytes Read': None, - 'Bytes Written': None, - # Use a lambda so the ip gets updated by tools.proxy later - 'Client': lambda s: '%s:%s' % (r.ip, r.port), - 'End Time': None, - 'Processing Time': proc_time, - 'Request-Line': request.request_line, - 'Response Status': None, - 'Start Time': time.time(), - } - - def record_stop( - self, uriset=None, slow_queries=1.0, slow_queries_count=100, - debug=False, **kwargs): - """Record the end of a request.""" - resp = cherrypy.serving.response - w = appstats['Requests'][threading._get_ident()] - - r = cherrypy.request.rfile.bytes_read - w['Bytes Read'] = r - appstats['Total Bytes Read'] += r - - if resp.stream: - w['Bytes Written'] = 'chunked' - else: - cl = int(resp.headers.get('Content-Length', 0)) - w['Bytes Written'] = cl - appstats['Total Bytes Written'] += cl - - w['Response Status'] = getattr( - resp, 'output_status', None) or resp.status - - w['End Time'] = time.time() - p = w['End Time'] - w['Start Time'] - w['Processing Time'] = p - appstats['Total Time'] += p - - appstats['Current Requests'] -= 1 - - if debug: - cherrypy.log('Stats recorded: %s' % repr(w), 'TOOLS.CPSTATS') - - if uriset: - rs = appstats.setdefault('URI Set Tracking', {}) - r = rs.setdefault(uriset, { - 'Min': None, 'Max': None, 'Count': 0, 'Sum': 0, - 'Avg': average_uriset_time}) - if r['Min'] is None or p < r['Min']: - r['Min'] = p - if r['Max'] is None or p > r['Max']: - r['Max'] = p - r['Count'] += 1 - r['Sum'] += p - - if slow_queries and p > slow_queries: - sq = appstats.setdefault('Slow Queries', []) - sq.append(w.copy()) - if len(sq) > slow_queries_count: - sq.pop(0) - - -import cherrypy -cherrypy.tools.cpstats = StatsTool() - - -# ---------------------- CherryPy Statistics Reporting ---------------------- # - -import os -thisdir = os.path.abspath(os.path.dirname(__file__)) - -try: - import json -except ImportError: - try: - import simplejson as json - except ImportError: - json = None - - -missing = object() - -locale_date = lambda v: time.strftime('%c', time.gmtime(v)) -iso_format = lambda v: time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(v)) - - -def pause_resume(ns): - def _pause_resume(enabled): - pause_disabled = '' - resume_disabled = '' - if enabled: - resume_disabled = 'disabled="disabled" ' - else: - pause_disabled = 'disabled="disabled" ' - return """ -
- - -
-
- - -
- """ % (ns, pause_disabled, ns, resume_disabled) - return _pause_resume - - -class StatsPage(object): - - formatting = { - 'CherryPy Applications': { - 'Enabled': pause_resume('CherryPy Applications'), - 'Bytes Read/Request': '%.3f', - 'Bytes Read/Second': '%.3f', - 'Bytes Written/Request': '%.3f', - 'Bytes Written/Second': '%.3f', - 'Current Time': iso_format, - 'Requests/Second': '%.3f', - 'Start Time': iso_format, - 'Total Time': '%.3f', - 'Uptime': '%.3f', - 'Slow Queries': { - 'End Time': None, - 'Processing Time': '%.3f', - 'Start Time': iso_format, - }, - 'URI Set Tracking': { - 'Avg': '%.3f', - 'Max': '%.3f', - 'Min': '%.3f', - 'Sum': '%.3f', - }, - 'Requests': { - 'Bytes Read': '%s', - 'Bytes Written': '%s', - 'End Time': None, - 'Processing Time': '%.3f', - 'Start Time': None, - }, - }, - 'CherryPy WSGIServer': { - 'Enabled': pause_resume('CherryPy WSGIServer'), - 'Connections/second': '%.3f', - 'Start time': iso_format, - }, - } - - def index(self): - # Transform the raw data into pretty output for HTML - yield """ - - - Statistics - - - -""" - for title, scalars, collections in self.get_namespaces(): - yield """ -

%s

- - - -""" % title - for i, (key, value) in enumerate(scalars): - colnum = i % 3 - if colnum == 0: - yield """ - """ - yield ( - """ - """ % - vars() - ) - if colnum == 2: - yield """ - """ - - if colnum == 0: - yield """ - - - """ - elif colnum == 1: - yield """ - - """ - yield """ - -
%(key)s%(value)s
""" - - for subtitle, headers, subrows in collections: - yield """ -

%s

- - - """ % subtitle - for key in headers: - yield """ - """ % key - yield """ - - - """ - for subrow in subrows: - yield """ - """ - for value in subrow: - yield """ - """ % value - yield """ - """ - yield """ - -
%s
%s
""" - yield """ - - -""" - index.exposed = True - - def get_namespaces(self): - """Yield (title, scalars, collections) for each namespace.""" - s = extrapolate_statistics(logging.statistics) - for title, ns in sorted(s.items()): - scalars = [] - collections = [] - ns_fmt = self.formatting.get(title, {}) - for k, v in sorted(ns.items()): - fmt = ns_fmt.get(k, {}) - if isinstance(v, dict): - headers, subrows = self.get_dict_collection(v, fmt) - collections.append((k, ['ID'] + headers, subrows)) - elif isinstance(v, (list, tuple)): - headers, subrows = self.get_list_collection(v, fmt) - collections.append((k, headers, subrows)) - else: - format = ns_fmt.get(k, missing) - if format is None: - # Don't output this column. - continue - if hasattr(format, '__call__'): - v = format(v) - elif format is not missing: - v = format % v - scalars.append((k, v)) - yield title, scalars, collections - - def get_dict_collection(self, v, formatting): - """Return ([headers], [rows]) for the given collection.""" - # E.g., the 'Requests' dict. - headers = [] - for record in v.itervalues(): - for k3 in record: - format = formatting.get(k3, missing) - if format is None: - # Don't output this column. - continue - if k3 not in headers: - headers.append(k3) - headers.sort() - - subrows = [] - for k2, record in sorted(v.items()): - subrow = [k2] - for k3 in headers: - v3 = record.get(k3, '') - format = formatting.get(k3, missing) - if format is None: - # Don't output this column. - continue - if hasattr(format, '__call__'): - v3 = format(v3) - elif format is not missing: - v3 = format % v3 - subrow.append(v3) - subrows.append(subrow) - - return headers, subrows - - def get_list_collection(self, v, formatting): - """Return ([headers], [subrows]) for the given collection.""" - # E.g., the 'Slow Queries' list. - headers = [] - for record in v: - for k3 in record: - format = formatting.get(k3, missing) - if format is None: - # Don't output this column. - continue - if k3 not in headers: - headers.append(k3) - headers.sort() - - subrows = [] - for record in v: - subrow = [] - for k3 in headers: - v3 = record.get(k3, '') - format = formatting.get(k3, missing) - if format is None: - # Don't output this column. - continue - if hasattr(format, '__call__'): - v3 = format(v3) - elif format is not missing: - v3 = format % v3 - subrow.append(v3) - subrows.append(subrow) - - return headers, subrows - - if json is not None: - def data(self): - s = extrapolate_statistics(logging.statistics) - cherrypy.response.headers['Content-Type'] = 'application/json' - return json.dumps(s, sort_keys=True, indent=4) - data.exposed = True - - def pause(self, namespace): - logging.statistics.get(namespace, {})['Enabled'] = False - raise cherrypy.HTTPRedirect('./') - pause.exposed = True - pause.cp_config = {'tools.allow.on': True, - 'tools.allow.methods': ['POST']} - - def resume(self, namespace): - logging.statistics.get(namespace, {})['Enabled'] = True - raise cherrypy.HTTPRedirect('./') - resume.exposed = True - resume.cp_config = {'tools.allow.on': True, - 'tools.allow.methods': ['POST']} diff --git a/libs_crutch/contrib/cherrypy/lib/cptools.py b/libs_crutch/contrib/cherrypy/lib/cptools.py deleted file mode 100644 index f376282..0000000 --- a/libs_crutch/contrib/cherrypy/lib/cptools.py +++ /dev/null @@ -1,630 +0,0 @@ -"""Functions for builtin CherryPy tools.""" - -import logging -import re - -import cherrypy -from cherrypy._cpcompat import basestring, md5, set, unicodestr -from cherrypy.lib import httputil as _httputil -from cherrypy.lib import is_iterator - - -# Conditional HTTP request support # - -def validate_etags(autotags=False, debug=False): - """Validate the current ETag against If-Match, If-None-Match headers. - - If autotags is True, an ETag response-header value will be provided - from an MD5 hash of the response body (unless some other code has - already provided an ETag header). If False (the default), the ETag - will not be automatic. - - WARNING: the autotags feature is not designed for URL's which allow - methods other than GET. For example, if a POST to the same URL returns - no content, the automatic ETag will be incorrect, breaking a fundamental - use for entity tags in a possibly destructive fashion. Likewise, if you - raise 304 Not Modified, the response body will be empty, the ETag hash - will be incorrect, and your application will break. - See :rfc:`2616` Section 14.24. - """ - response = cherrypy.serving.response - - # Guard against being run twice. - if hasattr(response, "ETag"): - return - - status, reason, msg = _httputil.valid_status(response.status) - - etag = response.headers.get('ETag') - - # Automatic ETag generation. See warning in docstring. - if etag: - if debug: - cherrypy.log('ETag already set: %s' % etag, 'TOOLS.ETAGS') - elif not autotags: - if debug: - cherrypy.log('Autotags off', 'TOOLS.ETAGS') - elif status != 200: - if debug: - cherrypy.log('Status not 200', 'TOOLS.ETAGS') - else: - etag = response.collapse_body() - etag = '"%s"' % md5(etag).hexdigest() - if debug: - cherrypy.log('Setting ETag: %s' % etag, 'TOOLS.ETAGS') - response.headers['ETag'] = etag - - response.ETag = etag - - # "If the request would, without the If-Match header field, result in - # anything other than a 2xx or 412 status, then the If-Match header - # MUST be ignored." - if debug: - cherrypy.log('Status: %s' % status, 'TOOLS.ETAGS') - if status >= 200 and status <= 299: - request = cherrypy.serving.request - - conditions = request.headers.elements('If-Match') or [] - conditions = [str(x) for x in conditions] - if debug: - cherrypy.log('If-Match conditions: %s' % repr(conditions), - 'TOOLS.ETAGS') - if conditions and not (conditions == ["*"] or etag in conditions): - raise cherrypy.HTTPError(412, "If-Match failed: ETag %r did " - "not match %r" % (etag, conditions)) - - conditions = request.headers.elements('If-None-Match') or [] - conditions = [str(x) for x in conditions] - if debug: - cherrypy.log('If-None-Match conditions: %s' % repr(conditions), - 'TOOLS.ETAGS') - if conditions == ["*"] or etag in conditions: - if debug: - cherrypy.log('request.method: %s' % - request.method, 'TOOLS.ETAGS') - if request.method in ("GET", "HEAD"): - raise cherrypy.HTTPRedirect([], 304) - else: - raise cherrypy.HTTPError(412, "If-None-Match failed: ETag %r " - "matched %r" % (etag, conditions)) - - -def validate_since(): - """Validate the current Last-Modified against If-Modified-Since headers. - - If no code has set the Last-Modified response header, then no validation - will be performed. - """ - response = cherrypy.serving.response - lastmod = response.headers.get('Last-Modified') - if lastmod: - status, reason, msg = _httputil.valid_status(response.status) - - request = cherrypy.serving.request - - since = request.headers.get('If-Unmodified-Since') - if since and since != lastmod: - if (status >= 200 and status <= 299) or status == 412: - raise cherrypy.HTTPError(412) - - since = request.headers.get('If-Modified-Since') - if since and since == lastmod: - if (status >= 200 and status <= 299) or status == 304: - if request.method in ("GET", "HEAD"): - raise cherrypy.HTTPRedirect([], 304) - else: - raise cherrypy.HTTPError(412) - - -# Tool code # - -def allow(methods=None, debug=False): - """Raise 405 if request.method not in methods (default ['GET', 'HEAD']). - - The given methods are case-insensitive, and may be in any order. - If only one method is allowed, you may supply a single string; - if more than one, supply a list of strings. - - Regardless of whether the current method is allowed or not, this - also emits an 'Allow' response header, containing the given methods. - """ - if not isinstance(methods, (tuple, list)): - methods = [methods] - methods = [m.upper() for m in methods if m] - if not methods: - methods = ['GET', 'HEAD'] - elif 'GET' in methods and 'HEAD' not in methods: - methods.append('HEAD') - - cherrypy.response.headers['Allow'] = ', '.join(methods) - if cherrypy.request.method not in methods: - if debug: - cherrypy.log('request.method %r not in methods %r' % - (cherrypy.request.method, methods), 'TOOLS.ALLOW') - raise cherrypy.HTTPError(405) - else: - if debug: - cherrypy.log('request.method %r in methods %r' % - (cherrypy.request.method, methods), 'TOOLS.ALLOW') - - -def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For', - scheme='X-Forwarded-Proto', debug=False): - """Change the base URL (scheme://host[:port][/path]). - - For running a CP server behind Apache, lighttpd, or other HTTP server. - - For Apache and lighttpd, you should leave the 'local' argument at the - default value of 'X-Forwarded-Host'. For Squid, you probably want to set - tools.proxy.local = 'Origin'. - - If you want the new request.base to include path info (not just the host), - you must explicitly set base to the full base path, and ALSO set 'local' - to '', so that the X-Forwarded-Host request header (which never includes - path info) does not override it. Regardless, the value for 'base' MUST - NOT end in a slash. - - cherrypy.request.remote.ip (the IP address of the client) will be - rewritten if the header specified by the 'remote' arg is valid. - By default, 'remote' is set to 'X-Forwarded-For'. If you do not - want to rewrite remote.ip, set the 'remote' arg to an empty string. - """ - - request = cherrypy.serving.request - - if scheme: - s = request.headers.get(scheme, None) - if debug: - cherrypy.log('Testing scheme %r:%r' % (scheme, s), 'TOOLS.PROXY') - if s == 'on' and 'ssl' in scheme.lower(): - # This handles e.g. webfaction's 'X-Forwarded-Ssl: on' header - scheme = 'https' - else: - # This is for lighttpd/pound/Mongrel's 'X-Forwarded-Proto: https' - scheme = s - if not scheme: - scheme = request.base[:request.base.find("://")] - - if local: - lbase = request.headers.get(local, None) - if debug: - cherrypy.log('Testing local %r:%r' % (local, lbase), 'TOOLS.PROXY') - if lbase is not None: - base = lbase.split(',')[0] - if not base: - port = request.local.port - if port == 80: - base = '127.0.0.1' - else: - base = '127.0.0.1:%s' % port - - if base.find("://") == -1: - # add http:// or https:// if needed - base = scheme + "://" + base - - request.base = base - - if remote: - xff = request.headers.get(remote) - if debug: - cherrypy.log('Testing remote %r:%r' % (remote, xff), 'TOOLS.PROXY') - if xff: - if remote == 'X-Forwarded-For': - #Bug #1268 - xff = xff.split(',')[0].strip() - request.remote.ip = xff - - -def ignore_headers(headers=('Range',), debug=False): - """Delete request headers whose field names are included in 'headers'. - - This is a useful tool for working behind certain HTTP servers; - for example, Apache duplicates the work that CP does for 'Range' - headers, and will doubly-truncate the response. - """ - request = cherrypy.serving.request - for name in headers: - if name in request.headers: - if debug: - cherrypy.log('Ignoring request header %r' % name, - 'TOOLS.IGNORE_HEADERS') - del request.headers[name] - - -def response_headers(headers=None, debug=False): - """Set headers on the response.""" - if debug: - cherrypy.log('Setting response headers: %s' % repr(headers), - 'TOOLS.RESPONSE_HEADERS') - for name, value in (headers or []): - cherrypy.serving.response.headers[name] = value -response_headers.failsafe = True - - -def referer(pattern, accept=True, accept_missing=False, error=403, - message='Forbidden Referer header.', debug=False): - """Raise HTTPError if Referer header does/does not match the given pattern. - - pattern - A regular expression pattern to test against the Referer. - - accept - If True, the Referer must match the pattern; if False, - the Referer must NOT match the pattern. - - accept_missing - If True, permit requests with no Referer header. - - error - The HTTP error code to return to the client on failure. - - message - A string to include in the response body on failure. - - """ - try: - ref = cherrypy.serving.request.headers['Referer'] - match = bool(re.match(pattern, ref)) - if debug: - cherrypy.log('Referer %r matches %r' % (ref, pattern), - 'TOOLS.REFERER') - if accept == match: - return - except KeyError: - if debug: - cherrypy.log('No Referer header', 'TOOLS.REFERER') - if accept_missing: - return - - raise cherrypy.HTTPError(error, message) - - -class SessionAuth(object): - - """Assert that the user is logged in.""" - - session_key = "username" - debug = False - - def check_username_and_password(self, username, password): - pass - - def anonymous(self): - """Provide a temporary user name for anonymous users.""" - pass - - def on_login(self, username): - pass - - def on_logout(self, username): - pass - - def on_check(self, username): - pass - - def login_screen(self, from_page='..', username='', error_msg='', - **kwargs): - return (unicodestr(""" -Message: %(error_msg)s -
- Login: -
- Password: -
- -
- -
-""") % vars()).encode("utf-8") - - def do_login(self, username, password, from_page='..', **kwargs): - """Login. May raise redirect, or return True if request handled.""" - response = cherrypy.serving.response - error_msg = self.check_username_and_password(username, password) - if error_msg: - body = self.login_screen(from_page, username, error_msg) - response.body = body - if "Content-Length" in response.headers: - # Delete Content-Length header so finalize() recalcs it. - del response.headers["Content-Length"] - return True - else: - cherrypy.serving.request.login = username - cherrypy.session[self.session_key] = username - self.on_login(username) - raise cherrypy.HTTPRedirect(from_page or "/") - - def do_logout(self, from_page='..', **kwargs): - """Logout. May raise redirect, or return True if request handled.""" - sess = cherrypy.session - username = sess.get(self.session_key) - sess[self.session_key] = None - if username: - cherrypy.serving.request.login = None - self.on_logout(username) - raise cherrypy.HTTPRedirect(from_page) - - def do_check(self): - """Assert username. Raise redirect, or return True if request handled. - """ - sess = cherrypy.session - request = cherrypy.serving.request - response = cherrypy.serving.response - - username = sess.get(self.session_key) - if not username: - sess[self.session_key] = username = self.anonymous() - self._debug_message('No session[username], trying anonymous') - if not username: - url = cherrypy.url(qs=request.query_string) - self._debug_message( - 'No username, routing to login_screen with from_page %(url)r', - locals(), - ) - response.body = self.login_screen(url) - if "Content-Length" in response.headers: - # Delete Content-Length header so finalize() recalcs it. - del response.headers["Content-Length"] - return True - self._debug_message('Setting request.login to %(username)r', locals()) - request.login = username - self.on_check(username) - - def _debug_message(self, template, context={}): - if not self.debug: - return - cherrypy.log(template % context, 'TOOLS.SESSAUTH') - - def run(self): - request = cherrypy.serving.request - response = cherrypy.serving.response - - path = request.path_info - if path.endswith('login_screen'): - self._debug_message('routing %(path)r to login_screen', locals()) - response.body = self.login_screen() - return True - elif path.endswith('do_login'): - if request.method != 'POST': - response.headers['Allow'] = "POST" - self._debug_message('do_login requires POST') - raise cherrypy.HTTPError(405) - self._debug_message('routing %(path)r to do_login', locals()) - return self.do_login(**request.params) - elif path.endswith('do_logout'): - if request.method != 'POST': - response.headers['Allow'] = "POST" - raise cherrypy.HTTPError(405) - self._debug_message('routing %(path)r to do_logout', locals()) - return self.do_logout(**request.params) - else: - self._debug_message('No special path, running do_check') - return self.do_check() - - -def session_auth(**kwargs): - sa = SessionAuth() - for k, v in kwargs.items(): - setattr(sa, k, v) - return sa.run() -session_auth.__doc__ = """Session authentication hook. - -Any attribute of the SessionAuth class may be overridden via a keyword arg -to this function: - -""" + "\n".join(["%s: %s" % (k, type(getattr(SessionAuth, k)).__name__) - for k in dir(SessionAuth) if not k.startswith("__")]) - - -def log_traceback(severity=logging.ERROR, debug=False): - """Write the last error's traceback to the cherrypy error log.""" - cherrypy.log("", "HTTP", severity=severity, traceback=True) - - -def log_request_headers(debug=False): - """Write request headers to the cherrypy error log.""" - h = [" %s: %s" % (k, v) for k, v in cherrypy.serving.request.header_list] - cherrypy.log('\nRequest Headers:\n' + '\n'.join(h), "HTTP") - - -def log_hooks(debug=False): - """Write request.hooks to the cherrypy error log.""" - request = cherrypy.serving.request - - msg = [] - # Sort by the standard points if possible. - from cherrypy import _cprequest - points = _cprequest.hookpoints - for k in request.hooks.keys(): - if k not in points: - points.append(k) - - for k in points: - msg.append(" %s:" % k) - v = request.hooks.get(k, []) - v.sort() - for h in v: - msg.append(" %r" % h) - cherrypy.log('\nRequest Hooks for ' + cherrypy.url() + - ':\n' + '\n'.join(msg), "HTTP") - - -def redirect(url='', internal=True, debug=False): - """Raise InternalRedirect or HTTPRedirect to the given url.""" - if debug: - cherrypy.log('Redirecting %sto: %s' % - ({True: 'internal ', False: ''}[internal], url), - 'TOOLS.REDIRECT') - if internal: - raise cherrypy.InternalRedirect(url) - else: - raise cherrypy.HTTPRedirect(url) - - -def trailing_slash(missing=True, extra=False, status=None, debug=False): - """Redirect if path_info has (missing|extra) trailing slash.""" - request = cherrypy.serving.request - pi = request.path_info - - if debug: - cherrypy.log('is_index: %r, missing: %r, extra: %r, path_info: %r' % - (request.is_index, missing, extra, pi), - 'TOOLS.TRAILING_SLASH') - if request.is_index is True: - if missing: - if not pi.endswith('/'): - new_url = cherrypy.url(pi + '/', request.query_string) - raise cherrypy.HTTPRedirect(new_url, status=status or 301) - elif request.is_index is False: - if extra: - # If pi == '/', don't redirect to ''! - if pi.endswith('/') and pi != '/': - new_url = cherrypy.url(pi[:-1], request.query_string) - raise cherrypy.HTTPRedirect(new_url, status=status or 301) - - -def flatten(debug=False): - """Wrap response.body in a generator that recursively iterates over body. - - This allows cherrypy.response.body to consist of 'nested generators'; - that is, a set of generators that yield generators. - """ - def flattener(input): - numchunks = 0 - for x in input: - if not is_iterator(x): - numchunks += 1 - yield x - else: - for y in flattener(x): - numchunks += 1 - yield y - if debug: - cherrypy.log('Flattened %d chunks' % numchunks, 'TOOLS.FLATTEN') - response = cherrypy.serving.response - response.body = flattener(response.body) - - -def accept(media=None, debug=False): - """Return the client's preferred media-type (from the given Content-Types). - - If 'media' is None (the default), no test will be performed. - - If 'media' is provided, it should be the Content-Type value (as a string) - or values (as a list or tuple of strings) which the current resource - can emit. The client's acceptable media ranges (as declared in the - Accept request header) will be matched in order to these Content-Type - values; the first such string is returned. That is, the return value - will always be one of the strings provided in the 'media' arg (or None - if 'media' is None). - - If no match is found, then HTTPError 406 (Not Acceptable) is raised. - Note that most web browsers send */* as a (low-quality) acceptable - media range, which should match any Content-Type. In addition, "...if - no Accept header field is present, then it is assumed that the client - accepts all media types." - - Matching types are checked in order of client preference first, - and then in the order of the given 'media' values. - - Note that this function does not honor accept-params (other than "q"). - """ - if not media: - return - if isinstance(media, basestring): - media = [media] - request = cherrypy.serving.request - - # Parse the Accept request header, and try to match one - # of the requested media-ranges (in order of preference). - ranges = request.headers.elements('Accept') - if not ranges: - # Any media type is acceptable. - if debug: - cherrypy.log('No Accept header elements', 'TOOLS.ACCEPT') - return media[0] - else: - # Note that 'ranges' is sorted in order of preference - for element in ranges: - if element.qvalue > 0: - if element.value == "*/*": - # Matches any type or subtype - if debug: - cherrypy.log('Match due to */*', 'TOOLS.ACCEPT') - return media[0] - elif element.value.endswith("/*"): - # Matches any subtype - mtype = element.value[:-1] # Keep the slash - for m in media: - if m.startswith(mtype): - if debug: - cherrypy.log('Match due to %s' % element.value, - 'TOOLS.ACCEPT') - return m - else: - # Matches exact value - if element.value in media: - if debug: - cherrypy.log('Match due to %s' % element.value, - 'TOOLS.ACCEPT') - return element.value - - # No suitable media-range found. - ah = request.headers.get('Accept') - if ah is None: - msg = "Your client did not send an Accept header." - else: - msg = "Your client sent this Accept header: %s." % ah - msg += (" But this resource only emits these media types: %s." % - ", ".join(media)) - raise cherrypy.HTTPError(406, msg) - - -class MonitoredHeaderMap(_httputil.HeaderMap): - - def __init__(self): - self.accessed_headers = set() - - def __getitem__(self, key): - self.accessed_headers.add(key) - return _httputil.HeaderMap.__getitem__(self, key) - - def __contains__(self, key): - self.accessed_headers.add(key) - return _httputil.HeaderMap.__contains__(self, key) - - def get(self, key, default=None): - self.accessed_headers.add(key) - return _httputil.HeaderMap.get(self, key, default=default) - - if hasattr({}, 'has_key'): - # Python 2 - def has_key(self, key): - self.accessed_headers.add(key) - return _httputil.HeaderMap.has_key(self, key) - - -def autovary(ignore=None, debug=False): - """Auto-populate the Vary response header based on request.header access. - """ - request = cherrypy.serving.request - - req_h = request.headers - request.headers = MonitoredHeaderMap() - request.headers.update(req_h) - if ignore is None: - ignore = set(['Content-Disposition', 'Content-Length', 'Content-Type']) - - def set_response_header(): - resp_h = cherrypy.serving.response.headers - v = set([e.value for e in resp_h.elements('Vary')]) - if debug: - cherrypy.log( - 'Accessed headers: %s' % request.headers.accessed_headers, - 'TOOLS.AUTOVARY') - v = v.union(request.headers.accessed_headers) - v = v.difference(ignore) - v = list(v) - v.sort() - resp_h['Vary'] = ', '.join(v) - request.hooks.attach('before_finalize', set_response_header, 95) diff --git a/libs_crutch/contrib/cherrypy/lib/encoding.py b/libs_crutch/contrib/cherrypy/lib/encoding.py deleted file mode 100644 index a4c2cbd..0000000 --- a/libs_crutch/contrib/cherrypy/lib/encoding.py +++ /dev/null @@ -1,421 +0,0 @@ -import struct -import time - -import cherrypy -from cherrypy._cpcompat import basestring, BytesIO, ntob, set, unicodestr -from cherrypy.lib import file_generator -from cherrypy.lib import is_closable_iterator -from cherrypy.lib import set_vary_header - - -def decode(encoding=None, default_encoding='utf-8'): - """Replace or extend the list of charsets used to decode a request entity. - - Either argument may be a single string or a list of strings. - - encoding - If not None, restricts the set of charsets attempted while decoding - a request entity to the given set (even if a different charset is - given in the Content-Type request header). - - default_encoding - Only in effect if the 'encoding' argument is not given. - If given, the set of charsets attempted while decoding a request - entity is *extended* with the given value(s). - - """ - body = cherrypy.request.body - if encoding is not None: - if not isinstance(encoding, list): - encoding = [encoding] - body.attempt_charsets = encoding - elif default_encoding: - if not isinstance(default_encoding, list): - default_encoding = [default_encoding] - body.attempt_charsets = body.attempt_charsets + default_encoding - -class UTF8StreamEncoder: - def __init__(self, iterator): - self._iterator = iterator - - def __iter__(self): - return self - - def next(self): - return self.__next__() - - def __next__(self): - res = next(self._iterator) - if isinstance(res, unicodestr): - res = res.encode('utf-8') - return res - - def close(self): - if is_closable_iterator(self._iterator): - self._iterator.close() - - def __getattr__(self, attr): - if attr.startswith('__'): - raise AttributeError(self, attr) - return getattr(self._iterator, attr) - - -class ResponseEncoder: - - default_encoding = 'utf-8' - failmsg = "Response body could not be encoded with %r." - encoding = None - errors = 'strict' - text_only = True - add_charset = True - debug = False - - def __init__(self, **kwargs): - for k, v in kwargs.items(): - setattr(self, k, v) - - self.attempted_charsets = set() - request = cherrypy.serving.request - if request.handler is not None: - # Replace request.handler with self - if self.debug: - cherrypy.log('Replacing request.handler', 'TOOLS.ENCODE') - self.oldhandler = request.handler - request.handler = self - - def encode_stream(self, encoding): - """Encode a streaming response body. - - Use a generator wrapper, and just pray it works as the stream is - being written out. - """ - if encoding in self.attempted_charsets: - return False - self.attempted_charsets.add(encoding) - - def encoder(body): - for chunk in body: - if isinstance(chunk, unicodestr): - chunk = chunk.encode(encoding, self.errors) - yield chunk - self.body = encoder(self.body) - return True - - def encode_string(self, encoding): - """Encode a buffered response body.""" - if encoding in self.attempted_charsets: - return False - self.attempted_charsets.add(encoding) - body = [] - for chunk in self.body: - if isinstance(chunk, unicodestr): - try: - chunk = chunk.encode(encoding, self.errors) - except (LookupError, UnicodeError): - return False - body.append(chunk) - self.body = body - return True - - def find_acceptable_charset(self): - request = cherrypy.serving.request - response = cherrypy.serving.response - - if self.debug: - cherrypy.log('response.stream %r' % - response.stream, 'TOOLS.ENCODE') - if response.stream: - encoder = self.encode_stream - else: - encoder = self.encode_string - if "Content-Length" in response.headers: - # Delete Content-Length header so finalize() recalcs it. - # Encoded strings may be of different lengths from their - # unicode equivalents, and even from each other. For example: - # >>> t = u"\u7007\u3040" - # >>> len(t) - # 2 - # >>> len(t.encode("UTF-8")) - # 6 - # >>> len(t.encode("utf7")) - # 8 - del response.headers["Content-Length"] - - # Parse the Accept-Charset request header, and try to provide one - # of the requested charsets (in order of user preference). - encs = request.headers.elements('Accept-Charset') - charsets = [enc.value.lower() for enc in encs] - if self.debug: - cherrypy.log('charsets %s' % repr(charsets), 'TOOLS.ENCODE') - - if self.encoding is not None: - # If specified, force this encoding to be used, or fail. - encoding = self.encoding.lower() - if self.debug: - cherrypy.log('Specified encoding %r' % - encoding, 'TOOLS.ENCODE') - if (not charsets) or "*" in charsets or encoding in charsets: - if self.debug: - cherrypy.log('Attempting encoding %r' % - encoding, 'TOOLS.ENCODE') - if encoder(encoding): - return encoding - else: - if not encs: - if self.debug: - cherrypy.log('Attempting default encoding %r' % - self.default_encoding, 'TOOLS.ENCODE') - # Any character-set is acceptable. - if encoder(self.default_encoding): - return self.default_encoding - else: - raise cherrypy.HTTPError(500, self.failmsg % - self.default_encoding) - else: - for element in encs: - if element.qvalue > 0: - if element.value == "*": - # Matches any charset. Try our default. - if self.debug: - cherrypy.log('Attempting default encoding due ' - 'to %r' % element, 'TOOLS.ENCODE') - if encoder(self.default_encoding): - return self.default_encoding - else: - encoding = element.value - if self.debug: - cherrypy.log('Attempting encoding %s (qvalue >' - '0)' % element, 'TOOLS.ENCODE') - if encoder(encoding): - return encoding - - if "*" not in charsets: - # If no "*" is present in an Accept-Charset field, then all - # character sets not explicitly mentioned get a quality - # value of 0, except for ISO-8859-1, which gets a quality - # value of 1 if not explicitly mentioned. - iso = 'iso-8859-1' - if iso not in charsets: - if self.debug: - cherrypy.log('Attempting ISO-8859-1 encoding', - 'TOOLS.ENCODE') - if encoder(iso): - return iso - - # No suitable encoding found. - ac = request.headers.get('Accept-Charset') - if ac is None: - msg = "Your client did not send an Accept-Charset header." - else: - msg = "Your client sent this Accept-Charset header: %s." % ac - _charsets = ", ".join(sorted(self.attempted_charsets)) - msg += " We tried these charsets: %s." % (_charsets,) - raise cherrypy.HTTPError(406, msg) - - def __call__(self, *args, **kwargs): - response = cherrypy.serving.response - self.body = self.oldhandler(*args, **kwargs) - - if isinstance(self.body, basestring): - # strings get wrapped in a list because iterating over a single - # item list is much faster than iterating over every character - # in a long string. - if self.body: - self.body = [self.body] - else: - # [''] doesn't evaluate to False, so replace it with []. - self.body = [] - elif hasattr(self.body, 'read'): - self.body = file_generator(self.body) - elif self.body is None: - self.body = [] - - ct = response.headers.elements("Content-Type") - if self.debug: - cherrypy.log('Content-Type: %r' % [str(h) - for h in ct], 'TOOLS.ENCODE') - if ct and self.add_charset: - ct = ct[0] - if self.text_only: - if ct.value.lower().startswith("text/"): - if self.debug: - cherrypy.log( - 'Content-Type %s starts with "text/"' % ct, - 'TOOLS.ENCODE') - do_find = True - else: - if self.debug: - cherrypy.log('Not finding because Content-Type %s ' - 'does not start with "text/"' % ct, - 'TOOLS.ENCODE') - do_find = False - else: - if self.debug: - cherrypy.log('Finding because not text_only', - 'TOOLS.ENCODE') - do_find = True - - if do_find: - # Set "charset=..." param on response Content-Type header - ct.params['charset'] = self.find_acceptable_charset() - if self.debug: - cherrypy.log('Setting Content-Type %s' % ct, - 'TOOLS.ENCODE') - response.headers["Content-Type"] = str(ct) - - return self.body - -# GZIP - - -def compress(body, compress_level): - """Compress 'body' at the given compress_level.""" - import zlib - - # See http://www.gzip.org/zlib/rfc-gzip.html - yield ntob('\x1f\x8b') # ID1 and ID2: gzip marker - yield ntob('\x08') # CM: compression method - yield ntob('\x00') # FLG: none set - # MTIME: 4 bytes - yield struct.pack(" 0 is present - * The 'identity' value is given with a qvalue > 0. - - """ - request = cherrypy.serving.request - response = cherrypy.serving.response - - set_vary_header(response, "Accept-Encoding") - - if not response.body: - # Response body is empty (might be a 304 for instance) - if debug: - cherrypy.log('No response body', context='TOOLS.GZIP') - return - - # If returning cached content (which should already have been gzipped), - # don't re-zip. - if getattr(request, "cached", False): - if debug: - cherrypy.log('Not gzipping cached response', context='TOOLS.GZIP') - return - - acceptable = request.headers.elements('Accept-Encoding') - if not acceptable: - # If no Accept-Encoding field is present in a request, - # the server MAY assume that the client will accept any - # content coding. In this case, if "identity" is one of - # the available content-codings, then the server SHOULD use - # the "identity" content-coding, unless it has additional - # information that a different content-coding is meaningful - # to the client. - if debug: - cherrypy.log('No Accept-Encoding', context='TOOLS.GZIP') - return - - ct = response.headers.get('Content-Type', '').split(';')[0] - for coding in acceptable: - if coding.value == 'identity' and coding.qvalue != 0: - if debug: - cherrypy.log('Non-zero identity qvalue: %s' % coding, - context='TOOLS.GZIP') - return - if coding.value in ('gzip', 'x-gzip'): - if coding.qvalue == 0: - if debug: - cherrypy.log('Zero gzip qvalue: %s' % coding, - context='TOOLS.GZIP') - return - - if ct not in mime_types: - # If the list of provided mime-types contains tokens - # such as 'text/*' or 'application/*+xml', - # we go through them and find the most appropriate one - # based on the given content-type. - # The pattern matching is only caring about the most - # common cases, as stated above, and doesn't support - # for extra parameters. - found = False - if '/' in ct: - ct_media_type, ct_sub_type = ct.split('/') - for mime_type in mime_types: - if '/' in mime_type: - media_type, sub_type = mime_type.split('/') - if ct_media_type == media_type: - if sub_type == '*': - found = True - break - elif '+' in sub_type and '+' in ct_sub_type: - ct_left, ct_right = ct_sub_type.split('+') - left, right = sub_type.split('+') - if left == '*' and ct_right == right: - found = True - break - - if not found: - if debug: - cherrypy.log('Content-Type %s not in mime_types %r' % - (ct, mime_types), context='TOOLS.GZIP') - return - - if debug: - cherrypy.log('Gzipping', context='TOOLS.GZIP') - # Return a generator that compresses the page - response.headers['Content-Encoding'] = 'gzip' - response.body = compress(response.body, compress_level) - if "Content-Length" in response.headers: - # Delete Content-Length header so finalize() recalcs it. - del response.headers["Content-Length"] - - return - - if debug: - cherrypy.log('No acceptable encoding found.', context='GZIP') - cherrypy.HTTPError(406, "identity, gzip").set_response() diff --git a/libs_crutch/contrib/cherrypy/lib/gctools.py b/libs_crutch/contrib/cherrypy/lib/gctools.py deleted file mode 100644 index 4b616c5..0000000 --- a/libs_crutch/contrib/cherrypy/lib/gctools.py +++ /dev/null @@ -1,217 +0,0 @@ -import gc -import inspect -import os -import sys -import time - -try: - import objgraph -except ImportError: - objgraph = None - -import cherrypy -from cherrypy import _cprequest, _cpwsgi -from cherrypy.process.plugins import SimplePlugin - - -class ReferrerTree(object): - - """An object which gathers all referrers of an object to a given depth.""" - - peek_length = 40 - - def __init__(self, ignore=None, maxdepth=2, maxparents=10): - self.ignore = ignore or [] - self.ignore.append(inspect.currentframe().f_back) - self.maxdepth = maxdepth - self.maxparents = maxparents - - def ascend(self, obj, depth=1): - """Return a nested list containing referrers of the given object.""" - depth += 1 - parents = [] - - # Gather all referrers in one step to minimize - # cascading references due to repr() logic. - refs = gc.get_referrers(obj) - self.ignore.append(refs) - if len(refs) > self.maxparents: - return [("[%s referrers]" % len(refs), [])] - - try: - ascendcode = self.ascend.__code__ - except AttributeError: - ascendcode = self.ascend.im_func.func_code - for parent in refs: - if inspect.isframe(parent) and parent.f_code is ascendcode: - continue - if parent in self.ignore: - continue - if depth <= self.maxdepth: - parents.append((parent, self.ascend(parent, depth))) - else: - parents.append((parent, [])) - - return parents - - def peek(self, s): - """Return s, restricted to a sane length.""" - if len(s) > (self.peek_length + 3): - half = self.peek_length // 2 - return s[:half] + '...' + s[-half:] - else: - return s - - def _format(self, obj, descend=True): - """Return a string representation of a single object.""" - if inspect.isframe(obj): - filename, lineno, func, context, index = inspect.getframeinfo(obj) - return "" % func - - if not descend: - return self.peek(repr(obj)) - - if isinstance(obj, dict): - return "{" + ", ".join(["%s: %s" % (self._format(k, descend=False), - self._format(v, descend=False)) - for k, v in obj.items()]) + "}" - elif isinstance(obj, list): - return "[" + ", ".join([self._format(item, descend=False) - for item in obj]) + "]" - elif isinstance(obj, tuple): - return "(" + ", ".join([self._format(item, descend=False) - for item in obj]) + ")" - - r = self.peek(repr(obj)) - if isinstance(obj, (str, int, float)): - return r - return "%s: %s" % (type(obj), r) - - def format(self, tree): - """Return a list of string reprs from a nested list of referrers.""" - output = [] - - def ascend(branch, depth=1): - for parent, grandparents in branch: - output.append((" " * depth) + self._format(parent)) - if grandparents: - ascend(grandparents, depth + 1) - ascend(tree) - return output - - -def get_instances(cls): - return [x for x in gc.get_objects() if isinstance(x, cls)] - - -class RequestCounter(SimplePlugin): - - def start(self): - self.count = 0 - - def before_request(self): - self.count += 1 - - def after_request(self): - self.count -= 1 -request_counter = RequestCounter(cherrypy.engine) -request_counter.subscribe() - - -def get_context(obj): - if isinstance(obj, _cprequest.Request): - return "path=%s;stage=%s" % (obj.path_info, obj.stage) - elif isinstance(obj, _cprequest.Response): - return "status=%s" % obj.status - elif isinstance(obj, _cpwsgi.AppResponse): - return "PATH_INFO=%s" % obj.environ.get('PATH_INFO', '') - elif hasattr(obj, "tb_lineno"): - return "tb_lineno=%s" % obj.tb_lineno - return "" - - -class GCRoot(object): - - """A CherryPy page handler for testing reference leaks.""" - - classes = [ - (_cprequest.Request, 2, 2, - "Should be 1 in this request thread and 1 in the main thread."), - (_cprequest.Response, 2, 2, - "Should be 1 in this request thread and 1 in the main thread."), - (_cpwsgi.AppResponse, 1, 1, - "Should be 1 in this request thread only."), - ] - - def index(self): - return "Hello, world!" - index.exposed = True - - def stats(self): - output = ["Statistics:"] - - for trial in range(10): - if request_counter.count > 0: - break - time.sleep(0.5) - else: - output.append("\nNot all requests closed properly.") - - # gc_collect isn't perfectly synchronous, because it may - # break reference cycles that then take time to fully - # finalize. Call it thrice and hope for the best. - gc.collect() - gc.collect() - unreachable = gc.collect() - if unreachable: - if objgraph is not None: - final = objgraph.by_type('Nondestructible') - if final: - objgraph.show_backrefs(final, filename='finalizers.png') - - trash = {} - for x in gc.garbage: - trash[type(x)] = trash.get(type(x), 0) + 1 - if trash: - output.insert(0, "\n%s unreachable objects:" % unreachable) - trash = [(v, k) for k, v in trash.items()] - trash.sort() - for pair in trash: - output.append(" " + repr(pair)) - - # Check declared classes to verify uncollected instances. - # These don't have to be part of a cycle; they can be - # any objects that have unanticipated referrers that keep - # them from being collected. - allobjs = {} - for cls, minobj, maxobj, msg in self.classes: - allobjs[cls] = get_instances(cls) - - for cls, minobj, maxobj, msg in self.classes: - objs = allobjs[cls] - lenobj = len(objs) - if lenobj < minobj or lenobj > maxobj: - if minobj == maxobj: - output.append( - "\nExpected %s %r references, got %s." % - (minobj, cls, lenobj)) - else: - output.append( - "\nExpected %s to %s %r references, got %s." % - (minobj, maxobj, cls, lenobj)) - - for obj in objs: - if objgraph is not None: - ig = [id(objs), id(inspect.currentframe())] - fname = "graph_%s_%s.png" % (cls.__name__, id(obj)) - objgraph.show_backrefs( - obj, extra_ignore=ig, max_depth=4, too_many=20, - filename=fname, extra_info=get_context) - output.append("\nReferrers for %s (refcount=%s):" % - (repr(obj), sys.getrefcount(obj))) - t = ReferrerTree(ignore=[objs], maxdepth=3) - tree = t.ascend(obj) - output.extend(t.format(tree)) - - return "\n".join(output) - stats.exposed = True diff --git a/libs_crutch/contrib/cherrypy/lib/http.py b/libs_crutch/contrib/cherrypy/lib/http.py deleted file mode 100644 index 12043ad..0000000 --- a/libs_crutch/contrib/cherrypy/lib/http.py +++ /dev/null @@ -1,6 +0,0 @@ -import warnings -warnings.warn('cherrypy.lib.http has been deprecated and will be removed ' - 'in CherryPy 3.3 use cherrypy.lib.httputil instead.', - DeprecationWarning) - -from cherrypy.lib.httputil import * diff --git a/libs_crutch/contrib/cherrypy/lib/httpauth.py b/libs_crutch/contrib/cherrypy/lib/httpauth.py deleted file mode 100644 index 0897ea2..0000000 --- a/libs_crutch/contrib/cherrypy/lib/httpauth.py +++ /dev/null @@ -1,371 +0,0 @@ -""" -This module defines functions to implement HTTP Digest Authentication -(:rfc:`2617`). -This has full compliance with 'Digest' and 'Basic' authentication methods. In -'Digest' it supports both MD5 and MD5-sess algorithms. - -Usage: - First use 'doAuth' to request the client authentication for a - certain resource. You should send an httplib.UNAUTHORIZED response to the - client so he knows he has to authenticate itself. - - Then use 'parseAuthorization' to retrieve the 'auth_map' used in - 'checkResponse'. - - To use 'checkResponse' you must have already verified the password - associated with the 'username' key in 'auth_map' dict. Then you use the - 'checkResponse' function to verify if the password matches the one sent - by the client. - -SUPPORTED_ALGORITHM - list of supported 'Digest' algorithms -SUPPORTED_QOP - list of supported 'Digest' 'qop'. -""" -__version__ = 1, 0, 1 -__author__ = "Tiago Cogumbreiro " -__credits__ = """ - Peter van Kampen for its recipe which implement most of Digest - authentication: - http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/302378 -""" - -__license__ = """ -Copyright (c) 2005, Tiago Cogumbreiro -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - * Neither the name of Sylvain Hellegouarch nor the names of his - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""" - -__all__ = ("digestAuth", "basicAuth", "doAuth", "checkResponse", - "parseAuthorization", "SUPPORTED_ALGORITHM", "md5SessionKey", - "calculateNonce", "SUPPORTED_QOP") - -########################################################################## -import time -from cherrypy._cpcompat import base64_decode, ntob, md5 -from cherrypy._cpcompat import parse_http_list, parse_keqv_list - -MD5 = "MD5" -MD5_SESS = "MD5-sess" -AUTH = "auth" -AUTH_INT = "auth-int" - -SUPPORTED_ALGORITHM = (MD5, MD5_SESS) -SUPPORTED_QOP = (AUTH, AUTH_INT) - -########################################################################## -# doAuth -# -DIGEST_AUTH_ENCODERS = { - MD5: lambda val: md5(ntob(val)).hexdigest(), - MD5_SESS: lambda val: md5(ntob(val)).hexdigest(), - # SHA: lambda val: sha.new(ntob(val)).hexdigest (), -} - - -def calculateNonce(realm, algorithm=MD5): - """This is an auxaliary function that calculates 'nonce' value. It is used - to handle sessions.""" - - global SUPPORTED_ALGORITHM, DIGEST_AUTH_ENCODERS - assert algorithm in SUPPORTED_ALGORITHM - - try: - encoder = DIGEST_AUTH_ENCODERS[algorithm] - except KeyError: - raise NotImplementedError("The chosen algorithm (%s) does not have " - "an implementation yet" % algorithm) - - return encoder("%d:%s" % (time.time(), realm)) - - -def digestAuth(realm, algorithm=MD5, nonce=None, qop=AUTH): - """Challenges the client for a Digest authentication.""" - global SUPPORTED_ALGORITHM, DIGEST_AUTH_ENCODERS, SUPPORTED_QOP - assert algorithm in SUPPORTED_ALGORITHM - assert qop in SUPPORTED_QOP - - if nonce is None: - nonce = calculateNonce(realm, algorithm) - - return 'Digest realm="%s", nonce="%s", algorithm="%s", qop="%s"' % ( - realm, nonce, algorithm, qop - ) - - -def basicAuth(realm): - """Challengenes the client for a Basic authentication.""" - assert '"' not in realm, "Realms cannot contain the \" (quote) character." - - return 'Basic realm="%s"' % realm - - -def doAuth(realm): - """'doAuth' function returns the challenge string b giving priority over - Digest and fallback to Basic authentication when the browser doesn't - support the first one. - - This should be set in the HTTP header under the key 'WWW-Authenticate'.""" - - return digestAuth(realm) + " " + basicAuth(realm) - - -########################################################################## -# Parse authorization parameters -# -def _parseDigestAuthorization(auth_params): - # Convert the auth params to a dict - items = parse_http_list(auth_params) - params = parse_keqv_list(items) - - # Now validate the params - - # Check for required parameters - required = ["username", "realm", "nonce", "uri", "response"] - for k in required: - if k not in params: - return None - - # If qop is sent then cnonce and nc MUST be present - if "qop" in params and not ("cnonce" in params - and "nc" in params): - return None - - # If qop is not sent, neither cnonce nor nc can be present - if ("cnonce" in params or "nc" in params) and \ - "qop" not in params: - return None - - return params - - -def _parseBasicAuthorization(auth_params): - username, password = base64_decode(auth_params).split(":", 1) - return {"username": username, "password": password} - -AUTH_SCHEMES = { - "basic": _parseBasicAuthorization, - "digest": _parseDigestAuthorization, -} - - -def parseAuthorization(credentials): - """parseAuthorization will convert the value of the 'Authorization' key in - the HTTP header to a map itself. If the parsing fails 'None' is returned. - """ - - global AUTH_SCHEMES - - auth_scheme, auth_params = credentials.split(" ", 1) - auth_scheme = auth_scheme.lower() - - parser = AUTH_SCHEMES[auth_scheme] - params = parser(auth_params) - - if params is None: - return - - assert "auth_scheme" not in params - params["auth_scheme"] = auth_scheme - return params - - -########################################################################## -# Check provided response for a valid password -# -def md5SessionKey(params, password): - """ - If the "algorithm" directive's value is "MD5-sess", then A1 - [the session key] is calculated only once - on the first request by the - client following receipt of a WWW-Authenticate challenge from the server. - - This creates a 'session key' for the authentication of subsequent - requests and responses which is different for each "authentication - session", thus limiting the amount of material hashed with any one - key. - - Because the server need only use the hash of the user - credentials in order to create the A1 value, this construction could - be used in conjunction with a third party authentication service so - that the web server would not need the actual password value. The - specification of such a protocol is beyond the scope of this - specification. -""" - - keys = ("username", "realm", "nonce", "cnonce") - params_copy = {} - for key in keys: - params_copy[key] = params[key] - - params_copy["algorithm"] = MD5_SESS - return _A1(params_copy, password) - - -def _A1(params, password): - algorithm = params.get("algorithm", MD5) - H = DIGEST_AUTH_ENCODERS[algorithm] - - if algorithm == MD5: - # If the "algorithm" directive's value is "MD5" or is - # unspecified, then A1 is: - # A1 = unq(username-value) ":" unq(realm-value) ":" passwd - return "%s:%s:%s" % (params["username"], params["realm"], password) - - elif algorithm == MD5_SESS: - - # This is A1 if qop is set - # A1 = H( unq(username-value) ":" unq(realm-value) ":" passwd ) - # ":" unq(nonce-value) ":" unq(cnonce-value) - h_a1 = H("%s:%s:%s" % (params["username"], params["realm"], password)) - return "%s:%s:%s" % (h_a1, params["nonce"], params["cnonce"]) - - -def _A2(params, method, kwargs): - # If the "qop" directive's value is "auth" or is unspecified, then A2 is: - # A2 = Method ":" digest-uri-value - - qop = params.get("qop", "auth") - if qop == "auth": - return method + ":" + params["uri"] - elif qop == "auth-int": - # If the "qop" value is "auth-int", then A2 is: - # A2 = Method ":" digest-uri-value ":" H(entity-body) - entity_body = kwargs.get("entity_body", "") - H = kwargs["H"] - - return "%s:%s:%s" % ( - method, - params["uri"], - H(entity_body) - ) - - else: - raise NotImplementedError("The 'qop' method is unknown: %s" % qop) - - -def _computeDigestResponse(auth_map, password, method="GET", A1=None, - **kwargs): - """ - Generates a response respecting the algorithm defined in RFC 2617 - """ - params = auth_map - - algorithm = params.get("algorithm", MD5) - - H = DIGEST_AUTH_ENCODERS[algorithm] - KD = lambda secret, data: H(secret + ":" + data) - - qop = params.get("qop", None) - - H_A2 = H(_A2(params, method, kwargs)) - - if algorithm == MD5_SESS and A1 is not None: - H_A1 = H(A1) - else: - H_A1 = H(_A1(params, password)) - - if qop in ("auth", "auth-int"): - # If the "qop" value is "auth" or "auth-int": - # request-digest = <"> < KD ( H(A1), unq(nonce-value) - # ":" nc-value - # ":" unq(cnonce-value) - # ":" unq(qop-value) - # ":" H(A2) - # ) <"> - request = "%s:%s:%s:%s:%s" % ( - params["nonce"], - params["nc"], - params["cnonce"], - params["qop"], - H_A2, - ) - elif qop is None: - # If the "qop" directive is not present (this construction is - # for compatibility with RFC 2069): - # request-digest = - # <"> < KD ( H(A1), unq(nonce-value) ":" H(A2) ) > <"> - request = "%s:%s" % (params["nonce"], H_A2) - - return KD(H_A1, request) - - -def _checkDigestResponse(auth_map, password, method="GET", A1=None, **kwargs): - """This function is used to verify the response given by the client when - he tries to authenticate. - Optional arguments: - entity_body - when 'qop' is set to 'auth-int' you MUST provide the - raw data you are going to send to the client (usually the - HTML page. - request_uri - the uri from the request line compared with the 'uri' - directive of the authorization map. They must represent - the same resource (unused at this time). - """ - - if auth_map['realm'] != kwargs.get('realm', None): - return False - - response = _computeDigestResponse( - auth_map, password, method, A1, **kwargs) - - return response == auth_map["response"] - - -def _checkBasicResponse(auth_map, password, method='GET', encrypt=None, - **kwargs): - # Note that the Basic response doesn't provide the realm value so we cannot - # test it - pass_through = lambda password, username=None: password - encrypt = encrypt or pass_through - try: - candidate = encrypt(auth_map["password"], auth_map["username"]) - except TypeError: - # if encrypt only takes one parameter, it's the password - candidate = encrypt(auth_map["password"]) - return candidate == password - -AUTH_RESPONSES = { - "basic": _checkBasicResponse, - "digest": _checkDigestResponse, -} - - -def checkResponse(auth_map, password, method="GET", encrypt=None, **kwargs): - """'checkResponse' compares the auth_map with the password and optionally - other arguments that each implementation might need. - - If the response is of type 'Basic' then the function has the following - signature:: - - checkBasicResponse(auth_map, password) -> bool - - If the response is of type 'Digest' then the function has the following - signature:: - - checkDigestResponse(auth_map, password, method='GET', A1=None) -> bool - - The 'A1' argument is only used in MD5_SESS algorithm based responses. - Check md5SessionKey() for more info. - """ - checker = AUTH_RESPONSES[auth_map["auth_scheme"]] - return checker(auth_map, password, method=method, encrypt=encrypt, - **kwargs) diff --git a/libs_crutch/contrib/cherrypy/lib/httputil.py b/libs_crutch/contrib/cherrypy/lib/httputil.py deleted file mode 100644 index 69a18d4..0000000 --- a/libs_crutch/contrib/cherrypy/lib/httputil.py +++ /dev/null @@ -1,536 +0,0 @@ -"""HTTP library functions. - -This module contains functions for building an HTTP application -framework: any one, not just one whose name starts with "Ch". ;) If you -reference any modules from some popular framework inside *this* module, -FuManChu will personally hang you up by your thumbs and submit you -to a public caning. -""" - -from binascii import b2a_base64 -from cherrypy._cpcompat import BaseHTTPRequestHandler, HTTPDate, ntob, ntou -from cherrypy._cpcompat import basestring, bytestr, iteritems, nativestr -from cherrypy._cpcompat import reversed, sorted, unicodestr, unquote_qs -response_codes = BaseHTTPRequestHandler.responses.copy() - -# From https://bitbucket.org/cherrypy/cherrypy/issue/361 -response_codes[500] = ('Internal Server Error', - 'The server encountered an unexpected condition ' - 'which prevented it from fulfilling the request.') -response_codes[503] = ('Service Unavailable', - 'The server is currently unable to handle the ' - 'request due to a temporary overloading or ' - 'maintenance of the server.') - -import re -import urllib - - -def urljoin(*atoms): - """Return the given path \*atoms, joined into a single URL. - - This will correctly join a SCRIPT_NAME and PATH_INFO into the - original URL, even if either atom is blank. - """ - url = "/".join([x for x in atoms if x]) - while "//" in url: - url = url.replace("//", "/") - # Special-case the final url of "", and return "/" instead. - return url or "/" - - -def urljoin_bytes(*atoms): - """Return the given path *atoms, joined into a single URL. - - This will correctly join a SCRIPT_NAME and PATH_INFO into the - original URL, even if either atom is blank. - """ - url = ntob("/").join([x for x in atoms if x]) - while ntob("//") in url: - url = url.replace(ntob("//"), ntob("/")) - # Special-case the final url of "", and return "/" instead. - return url or ntob("/") - - -def protocol_from_http(protocol_str): - """Return a protocol tuple from the given 'HTTP/x.y' string.""" - return int(protocol_str[5]), int(protocol_str[7]) - - -def get_ranges(headervalue, content_length): - """Return a list of (start, stop) indices from a Range header, or None. - - Each (start, stop) tuple will be composed of two ints, which are suitable - for use in a slicing operation. That is, the header "Range: bytes=3-6", - if applied against a Python string, is requesting resource[3:7]. This - function will return the list [(3, 7)]. - - If this function returns an empty list, you should return HTTP 416. - """ - - if not headervalue: - return None - - result = [] - bytesunit, byteranges = headervalue.split("=", 1) - for brange in byteranges.split(","): - start, stop = [x.strip() for x in brange.split("-", 1)] - if start: - if not stop: - stop = content_length - 1 - start, stop = int(start), int(stop) - if start >= content_length: - # From rfc 2616 sec 14.16: - # "If the server receives a request (other than one - # including an If-Range request-header field) with an - # unsatisfiable Range request-header field (that is, - # all of whose byte-range-spec values have a first-byte-pos - # value greater than the current length of the selected - # resource), it SHOULD return a response code of 416 - # (Requested range not satisfiable)." - continue - if stop < start: - # From rfc 2616 sec 14.16: - # "If the server ignores a byte-range-spec because it - # is syntactically invalid, the server SHOULD treat - # the request as if the invalid Range header field - # did not exist. (Normally, this means return a 200 - # response containing the full entity)." - return None - result.append((start, stop + 1)) - else: - if not stop: - # See rfc quote above. - return None - # Negative subscript (last N bytes) - # - # RFC 2616 Section 14.35.1: - # If the entity is shorter than the specified suffix-length, - # the entire entity-body is used. - if int(stop) > content_length: - result.append((0, content_length)) - else: - result.append((content_length - int(stop), content_length)) - - return result - - -class HeaderElement(object): - - """An element (with parameters) from an HTTP header's element list.""" - - def __init__(self, value, params=None): - self.value = value - if params is None: - params = {} - self.params = params - - def __cmp__(self, other): - return cmp(self.value, other.value) - - def __lt__(self, other): - return self.value < other.value - - def __str__(self): - p = [";%s=%s" % (k, v) for k, v in iteritems(self.params)] - return str("%s%s" % (self.value, "".join(p))) - - def __bytes__(self): - return ntob(self.__str__()) - - def __unicode__(self): - return ntou(self.__str__()) - - def parse(elementstr): - """Transform 'token;key=val' to ('token', {'key': 'val'}).""" - # Split the element into a value and parameters. The 'value' may - # be of the form, "token=token", but we don't split that here. - atoms = [x.strip() for x in elementstr.split(";") if x.strip()] - if not atoms: - initial_value = '' - else: - initial_value = atoms.pop(0).strip() - params = {} - for atom in atoms: - atom = [x.strip() for x in atom.split("=", 1) if x.strip()] - key = atom.pop(0) - if atom: - val = atom[0] - else: - val = "" - params[key] = val - return initial_value, params - parse = staticmethod(parse) - - def from_str(cls, elementstr): - """Construct an instance from a string of the form 'token;key=val'.""" - ival, params = cls.parse(elementstr) - return cls(ival, params) - from_str = classmethod(from_str) - - -q_separator = re.compile(r'; *q *=') - - -class AcceptElement(HeaderElement): - - """An element (with parameters) from an Accept* header's element list. - - AcceptElement objects are comparable; the more-preferred object will be - "less than" the less-preferred object. They are also therefore sortable; - if you sort a list of AcceptElement objects, they will be listed in - priority order; the most preferred value will be first. Yes, it should - have been the other way around, but it's too late to fix now. - """ - - def from_str(cls, elementstr): - qvalue = None - # The first "q" parameter (if any) separates the initial - # media-range parameter(s) (if any) from the accept-params. - atoms = q_separator.split(elementstr, 1) - media_range = atoms.pop(0).strip() - if atoms: - # The qvalue for an Accept header can have extensions. The other - # headers cannot, but it's easier to parse them as if they did. - qvalue = HeaderElement.from_str(atoms[0].strip()) - - media_type, params = cls.parse(media_range) - if qvalue is not None: - params["q"] = qvalue - return cls(media_type, params) - from_str = classmethod(from_str) - - def qvalue(self): - val = self.params.get("q", "1") - if isinstance(val, HeaderElement): - val = val.value - return float(val) - qvalue = property(qvalue, doc="The qvalue, or priority, of this value.") - - def __cmp__(self, other): - diff = cmp(self.qvalue, other.qvalue) - if diff == 0: - diff = cmp(str(self), str(other)) - return diff - - def __lt__(self, other): - if self.qvalue == other.qvalue: - return str(self) < str(other) - else: - return self.qvalue < other.qvalue - -RE_HEADER_SPLIT = re.compile(',(?=(?:[^"]*"[^"]*")*[^"]*$)') -def header_elements(fieldname, fieldvalue): - """Return a sorted HeaderElement list from a comma-separated header string. - """ - if not fieldvalue: - return [] - - result = [] - for element in RE_HEADER_SPLIT.split(fieldvalue): - if fieldname.startswith("Accept") or fieldname == 'TE': - hv = AcceptElement.from_str(element) - else: - hv = HeaderElement.from_str(element) - result.append(hv) - - return list(reversed(sorted(result))) - - -def decode_TEXT(value): - r"""Decode :rfc:`2047` TEXT (e.g. "=?utf-8?q?f=C3=BCr?=" -> "f\xfcr").""" - try: - # Python 3 - from email.header import decode_header - except ImportError: - from email.Header import decode_header - atoms = decode_header(value) - decodedvalue = "" - for atom, charset in atoms: - if charset is not None: - atom = atom.decode(charset) - decodedvalue += atom - return decodedvalue - - -def valid_status(status): - """Return legal HTTP status Code, Reason-phrase and Message. - - The status arg must be an int, or a str that begins with an int. - - If status is an int, or a str and no reason-phrase is supplied, - a default reason-phrase will be provided. - """ - - if not status: - status = 200 - - status = str(status) - parts = status.split(" ", 1) - if len(parts) == 1: - # No reason supplied. - code, = parts - reason = None - else: - code, reason = parts - reason = reason.strip() - - try: - code = int(code) - except ValueError: - raise ValueError("Illegal response status from server " - "(%s is non-numeric)." % repr(code)) - - if code < 100 or code > 599: - raise ValueError("Illegal response status from server " - "(%s is out of range)." % repr(code)) - - if code not in response_codes: - # code is unknown but not illegal - default_reason, message = "", "" - else: - default_reason, message = response_codes[code] - - if reason is None: - reason = default_reason - - return code, reason, message - - -# NOTE: the parse_qs functions that follow are modified version of those -# in the python3.0 source - we need to pass through an encoding to the unquote -# method, but the default parse_qs function doesn't allow us to. These do. - -def _parse_qs(qs, keep_blank_values=0, strict_parsing=0, encoding='utf-8'): - """Parse a query given as a string argument. - - Arguments: - - qs: URL-encoded query string to be parsed - - keep_blank_values: flag indicating whether blank values in - URL encoded queries should be treated as blank strings. A - true value indicates that blanks should be retained as blank - strings. The default false value indicates that blank values - are to be ignored and treated as if they were not included. - - strict_parsing: flag indicating what to do with parsing errors. If - false (the default), errors are silently ignored. If true, - errors raise a ValueError exception. - - Returns a dict, as G-d intended. - """ - pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')] - d = {} - for name_value in pairs: - if not name_value and not strict_parsing: - continue - nv = name_value.split('=', 1) - if len(nv) != 2: - if strict_parsing: - raise ValueError("bad query field: %r" % (name_value,)) - # Handle case of a control-name with no equal sign - if keep_blank_values: - nv.append('') - else: - continue - if len(nv[1]) or keep_blank_values: - name = unquote_qs(nv[0], encoding) - value = unquote_qs(nv[1], encoding) - if name in d: - if not isinstance(d[name], list): - d[name] = [d[name]] - d[name].append(value) - else: - d[name] = value - return d - - -image_map_pattern = re.compile(r"[0-9]+,[0-9]+") - - -def parse_query_string(query_string, keep_blank_values=True, encoding='utf-8'): - """Build a params dictionary from a query_string. - - Duplicate key/value pairs in the provided query_string will be - returned as {'key': [val1, val2, ...]}. Single key/values will - be returned as strings: {'key': 'value'}. - """ - if image_map_pattern.match(query_string): - # Server-side image map. Map the coords to 'x' and 'y' - # (like CGI::Request does). - pm = query_string.split(",") - pm = {'x': int(pm[0]), 'y': int(pm[1])} - else: - pm = _parse_qs(query_string, keep_blank_values, encoding=encoding) - return pm - - -class CaseInsensitiveDict(dict): - - """A case-insensitive dict subclass. - - Each key is changed on entry to str(key).title(). - """ - - def __getitem__(self, key): - return dict.__getitem__(self, str(key).title()) - - def __setitem__(self, key, value): - dict.__setitem__(self, str(key).title(), value) - - def __delitem__(self, key): - dict.__delitem__(self, str(key).title()) - - def __contains__(self, key): - return dict.__contains__(self, str(key).title()) - - def get(self, key, default=None): - return dict.get(self, str(key).title(), default) - - if hasattr({}, 'has_key'): - def has_key(self, key): - return str(key).title() in self - - def update(self, E): - for k in E.keys(): - self[str(k).title()] = E[k] - - def fromkeys(cls, seq, value=None): - newdict = cls() - for k in seq: - newdict[str(k).title()] = value - return newdict - fromkeys = classmethod(fromkeys) - - def setdefault(self, key, x=None): - key = str(key).title() - try: - return self[key] - except KeyError: - self[key] = x - return x - - def pop(self, key, default): - return dict.pop(self, str(key).title(), default) - - -# TEXT = -# -# A CRLF is allowed in the definition of TEXT only as part of a header -# field continuation. It is expected that the folding LWS will be -# replaced with a single SP before interpretation of the TEXT value." -if nativestr == bytestr: - header_translate_table = ''.join([chr(i) for i in xrange(256)]) - header_translate_deletechars = ''.join( - [chr(i) for i in xrange(32)]) + chr(127) -else: - header_translate_table = None - header_translate_deletechars = bytes(range(32)) + bytes([127]) - - -class HeaderMap(CaseInsensitiveDict): - - """A dict subclass for HTTP request and response headers. - - Each key is changed on entry to str(key).title(). This allows headers - to be case-insensitive and avoid duplicates. - - Values are header values (decoded according to :rfc:`2047` if necessary). - """ - - protocol = (1, 1) - encodings = ["ISO-8859-1"] - - # Someday, when http-bis is done, this will probably get dropped - # since few servers, clients, or intermediaries do it. But until then, - # we're going to obey the spec as is. - # "Words of *TEXT MAY contain characters from character sets other than - # ISO-8859-1 only when encoded according to the rules of RFC 2047." - use_rfc_2047 = True - - def elements(self, key): - """Return a sorted list of HeaderElements for the given header.""" - key = str(key).title() - value = self.get(key) - return header_elements(key, value) - - def values(self, key): - """Return a sorted list of HeaderElement.value for the given header.""" - return [e.value for e in self.elements(key)] - - def output(self): - """Transform self into a list of (name, value) tuples.""" - return list(self.encode_header_items(self.items())) - - def encode_header_items(cls, header_items): - """ - Prepare the sequence of name, value tuples into a form suitable for - transmitting on the wire for HTTP. - """ - for k, v in header_items: - if isinstance(k, unicodestr): - k = cls.encode(k) - - if not isinstance(v, basestring): - v = str(v) - - if isinstance(v, unicodestr): - v = cls.encode(v) - - # See header_translate_* constants above. - # Replace only if you really know what you're doing. - k = k.translate(header_translate_table, - header_translate_deletechars) - v = v.translate(header_translate_table, - header_translate_deletechars) - - yield (k, v) - encode_header_items = classmethod(encode_header_items) - - def encode(cls, v): - """Return the given header name or value, encoded for HTTP output.""" - for enc in cls.encodings: - try: - return v.encode(enc) - except UnicodeEncodeError: - continue - - if cls.protocol == (1, 1) and cls.use_rfc_2047: - # Encode RFC-2047 TEXT - # (e.g. u"\u8200" -> "=?utf-8?b?6IiA?="). - # We do our own here instead of using the email module - # because we never want to fold lines--folding has - # been deprecated by the HTTP working group. - v = b2a_base64(v.encode('utf-8')) - return (ntob('=?utf-8?b?') + v.strip(ntob('\n')) + ntob('?=')) - - raise ValueError("Could not encode header part %r using " - "any of the encodings %r." % - (v, cls.encodings)) - encode = classmethod(encode) - - -class Host(object): - - """An internet address. - - name - Should be the client's host name. If not available (because no DNS - lookup is performed), the IP address should be used instead. - - """ - - ip = "0.0.0.0" - port = 80 - name = "unknown.tld" - - def __init__(self, ip, port, name=None): - self.ip = ip - self.port = port - if name is None: - name = ip - self.name = name - - def __repr__(self): - return "httputil.Host(%r, %r, %r)" % (self.ip, self.port, self.name) diff --git a/libs_crutch/contrib/cherrypy/lib/jsontools.py b/libs_crutch/contrib/cherrypy/lib/jsontools.py deleted file mode 100644 index 90b3ff8..0000000 --- a/libs_crutch/contrib/cherrypy/lib/jsontools.py +++ /dev/null @@ -1,96 +0,0 @@ -import cherrypy -from cherrypy._cpcompat import basestring, ntou, json_encode, json_decode - - -def json_processor(entity): - """Read application/json data into request.json.""" - if not entity.headers.get(ntou("Content-Length"), ntou("")): - raise cherrypy.HTTPError(411) - - body = entity.fp.read() - try: - cherrypy.serving.request.json = json_decode(body.decode('utf-8')) - except ValueError: - raise cherrypy.HTTPError(400, 'Invalid JSON document') - - -def json_in(content_type=[ntou('application/json'), ntou('text/javascript')], - force=True, debug=False, processor=json_processor): - """Add a processor to parse JSON request entities: - The default processor places the parsed data into request.json. - - Incoming request entities which match the given content_type(s) will - be deserialized from JSON to the Python equivalent, and the result - stored at cherrypy.request.json. The 'content_type' argument may - be a Content-Type string or a list of allowable Content-Type strings. - - If the 'force' argument is True (the default), then entities of other - content types will not be allowed; "415 Unsupported Media Type" is - raised instead. - - Supply your own processor to use a custom decoder, or to handle the parsed - data differently. The processor can be configured via - tools.json_in.processor or via the decorator method. - - Note that the deserializer requires the client send a Content-Length - request header, or it will raise "411 Length Required". If for any - other reason the request entity cannot be deserialized from JSON, - it will raise "400 Bad Request: Invalid JSON document". - - You must be using Python 2.6 or greater, or have the 'simplejson' - package importable; otherwise, ValueError is raised during processing. - """ - request = cherrypy.serving.request - if isinstance(content_type, basestring): - content_type = [content_type] - - if force: - if debug: - cherrypy.log('Removing body processors %s' % - repr(request.body.processors.keys()), 'TOOLS.JSON_IN') - request.body.processors.clear() - request.body.default_proc = cherrypy.HTTPError( - 415, 'Expected an entity of content type %s' % - ', '.join(content_type)) - - for ct in content_type: - if debug: - cherrypy.log('Adding body processor for %s' % ct, 'TOOLS.JSON_IN') - request.body.processors[ct] = processor - - -def json_handler(*args, **kwargs): - value = cherrypy.serving.request._json_inner_handler(*args, **kwargs) - return json_encode(value) - - -def json_out(content_type='application/json', debug=False, - handler=json_handler): - """Wrap request.handler to serialize its output to JSON. Sets Content-Type. - - If the given content_type is None, the Content-Type response header - is not set. - - Provide your own handler to use a custom encoder. For example - cherrypy.config['tools.json_out.handler'] = , or - @json_out(handler=function). - - You must be using Python 2.6 or greater, or have the 'simplejson' - package importable; otherwise, ValueError is raised during processing. - """ - request = cherrypy.serving.request - # request.handler may be set to None by e.g. the caching tool - # to signal to all components that a response body has already - # been attached, in which case we don't need to wrap anything. - if request.handler is None: - return - if debug: - cherrypy.log('Replacing %s with JSON handler' % request.handler, - 'TOOLS.JSON_OUT') - request._json_inner_handler = request.handler - request.handler = handler - if content_type is not None: - if debug: - cherrypy.log('Setting Content-Type to %s' % - content_type, 'TOOLS.JSON_OUT') - cherrypy.serving.response.headers['Content-Type'] = content_type diff --git a/libs_crutch/contrib/cherrypy/lib/lockfile.py b/libs_crutch/contrib/cherrypy/lib/lockfile.py deleted file mode 100644 index b9d8e02..0000000 --- a/libs_crutch/contrib/cherrypy/lib/lockfile.py +++ /dev/null @@ -1,147 +0,0 @@ -""" -Platform-independent file locking. Inspired by and modeled after zc.lockfile. -""" - -import os - -try: - import msvcrt -except ImportError: - pass - -try: - import fcntl -except ImportError: - pass - - -class LockError(Exception): - - "Could not obtain a lock" - - msg = "Unable to lock %r" - - def __init__(self, path): - super(LockError, self).__init__(self.msg % path) - - -class UnlockError(LockError): - - "Could not release a lock" - - msg = "Unable to unlock %r" - - -# first, a default, naive locking implementation -class LockFile(object): - - """ - A default, naive locking implementation. Always fails if the file - already exists. - """ - - def __init__(self, path): - self.path = path - try: - fd = os.open(path, os.O_CREAT | os.O_WRONLY | os.O_EXCL) - except OSError: - raise LockError(self.path) - os.close(fd) - - def release(self): - os.remove(self.path) - - def remove(self): - pass - - -class SystemLockFile(object): - - """ - An abstract base class for platform-specific locking. - """ - - def __init__(self, path): - self.path = path - - try: - # Open lockfile for writing without truncation: - self.fp = open(path, 'r+') - except IOError: - # If the file doesn't exist, IOError is raised; Use a+ instead. - # Note that there may be a race here. Multiple processes - # could fail on the r+ open and open the file a+, but only - # one will get the the lock and write a pid. - self.fp = open(path, 'a+') - - try: - self._lock_file() - except: - self.fp.seek(1) - self.fp.close() - del self.fp - raise - - self.fp.write(" %s\n" % os.getpid()) - self.fp.truncate() - self.fp.flush() - - def release(self): - if not hasattr(self, 'fp'): - return - self._unlock_file() - self.fp.close() - del self.fp - - def remove(self): - """ - Attempt to remove the file - """ - try: - os.remove(self.path) - except: - pass - - #@abc.abstract_method - # def _lock_file(self): - # """Attempt to obtain the lock on self.fp. Raise LockError if not - # acquired.""" - - def _unlock_file(self): - """Attempt to obtain the lock on self.fp. Raise UnlockError if not - released.""" - - -class WindowsLockFile(SystemLockFile): - - def _lock_file(self): - # Lock just the first byte - try: - msvcrt.locking(self.fp.fileno(), msvcrt.LK_NBLCK, 1) - except IOError: - raise LockError(self.fp.name) - - def _unlock_file(self): - try: - self.fp.seek(0) - msvcrt.locking(self.fp.fileno(), msvcrt.LK_UNLCK, 1) - except IOError: - raise UnlockError(self.fp.name) - -if 'msvcrt' in globals(): - LockFile = WindowsLockFile - - -class UnixLockFile(SystemLockFile): - - def _lock_file(self): - flags = fcntl.LOCK_EX | fcntl.LOCK_NB - try: - fcntl.flock(self.fp.fileno(), flags) - except IOError: - raise LockError(self.fp.name) - - # no need to implement _unlock_file, it will be unlocked on close() - -if 'fcntl' in globals(): - LockFile = UnixLockFile diff --git a/libs_crutch/contrib/cherrypy/lib/locking.py b/libs_crutch/contrib/cherrypy/lib/locking.py deleted file mode 100644 index 72dda9b..0000000 --- a/libs_crutch/contrib/cherrypy/lib/locking.py +++ /dev/null @@ -1,47 +0,0 @@ -import datetime - - -class NeverExpires(object): - def expired(self): - return False - - -class Timer(object): - """ - A simple timer that will indicate when an expiration time has passed. - """ - def __init__(self, expiration): - "Create a timer that expires at `expiration` (UTC datetime)" - self.expiration = expiration - - @classmethod - def after(cls, elapsed): - """ - Return a timer that will expire after `elapsed` passes. - """ - return cls(datetime.datetime.utcnow() + elapsed) - - def expired(self): - return datetime.datetime.utcnow() >= self.expiration - - -class LockTimeout(Exception): - "An exception when a lock could not be acquired before a timeout period" - - -class LockChecker(object): - """ - Keep track of the time and detect if a timeout has expired - """ - def __init__(self, session_id, timeout): - self.session_id = session_id - if timeout: - self.timer = Timer.after(timeout) - else: - self.timer = NeverExpires() - - def expired(self): - if self.timer.expired(): - raise LockTimeout( - "Timeout acquiring lock for %(session_id)s" % vars(self)) - return False diff --git a/libs_crutch/contrib/cherrypy/lib/profiler.py b/libs_crutch/contrib/cherrypy/lib/profiler.py deleted file mode 100644 index 5dac386..0000000 --- a/libs_crutch/contrib/cherrypy/lib/profiler.py +++ /dev/null @@ -1,216 +0,0 @@ -"""Profiler tools for CherryPy. - -CherryPy users -============== - -You can profile any of your pages as follows:: - - from cherrypy.lib import profiler - - class Root: - p = profile.Profiler("/path/to/profile/dir") - - def index(self): - self.p.run(self._index) - index.exposed = True - - def _index(self): - return "Hello, world!" - - cherrypy.tree.mount(Root()) - -You can also turn on profiling for all requests -using the ``make_app`` function as WSGI middleware. - -CherryPy developers -=================== - -This module can be used whenever you make changes to CherryPy, -to get a quick sanity-check on overall CP performance. Use the -``--profile`` flag when running the test suite. Then, use the ``serve()`` -function to browse the results in a web browser. If you run this -module from the command line, it will call ``serve()`` for you. - -""" - - -def new_func_strip_path(func_name): - """Make profiler output more readable by adding `__init__` modules' parents - """ - filename, line, name = func_name - if filename.endswith("__init__.py"): - return os.path.basename(filename[:-12]) + filename[-12:], line, name - return os.path.basename(filename), line, name - -try: - import profile - import pstats - pstats.func_strip_path = new_func_strip_path -except ImportError: - profile = None - pstats = None - -import os -import os.path -import sys -import warnings - -from cherrypy._cpcompat import StringIO - -_count = 0 - - -class Profiler(object): - - def __init__(self, path=None): - if not path: - path = os.path.join(os.path.dirname(__file__), "profile") - self.path = path - if not os.path.exists(path): - os.makedirs(path) - - def run(self, func, *args, **params): - """Dump profile data into self.path.""" - global _count - c = _count = _count + 1 - path = os.path.join(self.path, "cp_%04d.prof" % c) - prof = profile.Profile() - result = prof.runcall(func, *args, **params) - prof.dump_stats(path) - return result - - def statfiles(self): - """:rtype: list of available profiles. - """ - return [f for f in os.listdir(self.path) - if f.startswith("cp_") and f.endswith(".prof")] - - def stats(self, filename, sortby='cumulative'): - """:rtype stats(index): output of print_stats() for the given profile. - """ - sio = StringIO() - if sys.version_info >= (2, 5): - s = pstats.Stats(os.path.join(self.path, filename), stream=sio) - s.strip_dirs() - s.sort_stats(sortby) - s.print_stats() - else: - # pstats.Stats before Python 2.5 didn't take a 'stream' arg, - # but just printed to stdout. So re-route stdout. - s = pstats.Stats(os.path.join(self.path, filename)) - s.strip_dirs() - s.sort_stats(sortby) - oldout = sys.stdout - try: - sys.stdout = sio - s.print_stats() - finally: - sys.stdout = oldout - response = sio.getvalue() - sio.close() - return response - - def index(self): - return """ - CherryPy profile data - - - - - - """ - index.exposed = True - - def menu(self): - yield "

Profiling runs

" - yield "

Click on one of the runs below to see profiling data.

" - runs = self.statfiles() - runs.sort() - for i in runs: - yield "%s
" % ( - i, i) - menu.exposed = True - - def report(self, filename): - import cherrypy - cherrypy.response.headers['Content-Type'] = 'text/plain' - return self.stats(filename) - report.exposed = True - - -class ProfileAggregator(Profiler): - - def __init__(self, path=None): - Profiler.__init__(self, path) - global _count - self.count = _count = _count + 1 - self.profiler = profile.Profile() - - def run(self, func, *args, **params): - path = os.path.join(self.path, "cp_%04d.prof" % self.count) - result = self.profiler.runcall(func, *args, **params) - self.profiler.dump_stats(path) - return result - - -class make_app: - - def __init__(self, nextapp, path=None, aggregate=False): - """Make a WSGI middleware app which wraps 'nextapp' with profiling. - - nextapp - the WSGI application to wrap, usually an instance of - cherrypy.Application. - - path - where to dump the profiling output. - - aggregate - if True, profile data for all HTTP requests will go in - a single file. If False (the default), each HTTP request will - dump its profile data into a separate file. - - """ - if profile is None or pstats is None: - msg = ("Your installation of Python does not have a profile " - "module. If you're on Debian, try " - "`sudo apt-get install python-profiler`. " - "See http://www.cherrypy.org/wiki/ProfilingOnDebian " - "for details.") - warnings.warn(msg) - - self.nextapp = nextapp - self.aggregate = aggregate - if aggregate: - self.profiler = ProfileAggregator(path) - else: - self.profiler = Profiler(path) - - def __call__(self, environ, start_response): - def gather(): - result = [] - for line in self.nextapp(environ, start_response): - result.append(line) - return result - return self.profiler.run(gather) - - -def serve(path=None, port=8080): - if profile is None or pstats is None: - msg = ("Your installation of Python does not have a profile module. " - "If you're on Debian, try " - "`sudo apt-get install python-profiler`. " - "See http://www.cherrypy.org/wiki/ProfilingOnDebian " - "for details.") - warnings.warn(msg) - - import cherrypy - cherrypy.config.update({'server.socket_port': int(port), - 'server.thread_pool': 10, - 'environment': "production", - }) - cherrypy.quickstart(Profiler(path)) - - -if __name__ == "__main__": - serve(*tuple(sys.argv[1:])) diff --git a/libs_crutch/contrib/cherrypy/lib/reprconf.py b/libs_crutch/contrib/cherrypy/lib/reprconf.py deleted file mode 100644 index 6e70b5e..0000000 --- a/libs_crutch/contrib/cherrypy/lib/reprconf.py +++ /dev/null @@ -1,503 +0,0 @@ -"""Generic configuration system using unrepr. - -Configuration data may be supplied as a Python dictionary, as a filename, -or as an open file object. When you supply a filename or file, Python's -builtin ConfigParser is used (with some extensions). - -Namespaces ----------- - -Configuration keys are separated into namespaces by the first "." in the key. - -The only key that cannot exist in a namespace is the "environment" entry. -This special entry 'imports' other config entries from a template stored in -the Config.environments dict. - -You can define your own namespaces to be called when new config is merged -by adding a named handler to Config.namespaces. The name can be any string, -and the handler must be either a callable or a context manager. -""" - -try: - # Python 3.0+ - from configparser import ConfigParser -except ImportError: - from ConfigParser import ConfigParser - -try: - set -except NameError: - from sets import Set as set - -try: - basestring -except NameError: - basestring = str - -try: - # Python 3 - import builtins -except ImportError: - # Python 2 - import __builtin__ as builtins - -import operator as _operator -import sys - - -def as_dict(config): - """Return a dict from 'config' whether it is a dict, file, or filename.""" - if isinstance(config, basestring): - config = Parser().dict_from_file(config) - elif hasattr(config, 'read'): - config = Parser().dict_from_file(config) - return config - - -class NamespaceSet(dict): - - """A dict of config namespace names and handlers. - - Each config entry should begin with a namespace name; the corresponding - namespace handler will be called once for each config entry in that - namespace, and will be passed two arguments: the config key (with the - namespace removed) and the config value. - - Namespace handlers may be any Python callable; they may also be - Python 2.5-style 'context managers', in which case their __enter__ - method should return a callable to be used as the handler. - See cherrypy.tools (the Toolbox class) for an example. - """ - - def __call__(self, config): - """Iterate through config and pass it to each namespace handler. - - config - A flat dict, where keys use dots to separate - namespaces, and values are arbitrary. - - The first name in each config key is used to look up the corresponding - namespace handler. For example, a config entry of {'tools.gzip.on': v} - will call the 'tools' namespace handler with the args: ('gzip.on', v) - """ - # Separate the given config into namespaces - ns_confs = {} - for k in config: - if "." in k: - ns, name = k.split(".", 1) - bucket = ns_confs.setdefault(ns, {}) - bucket[name] = config[k] - - # I chose __enter__ and __exit__ so someday this could be - # rewritten using Python 2.5's 'with' statement: - # for ns, handler in self.iteritems(): - # with handler as callable: - # for k, v in ns_confs.get(ns, {}).iteritems(): - # callable(k, v) - for ns, handler in self.items(): - exit = getattr(handler, "__exit__", None) - if exit: - callable = handler.__enter__() - no_exc = True - try: - try: - for k, v in ns_confs.get(ns, {}).items(): - callable(k, v) - except: - # The exceptional case is handled here - no_exc = False - if exit is None: - raise - if not exit(*sys.exc_info()): - raise - # The exception is swallowed if exit() returns true - finally: - # The normal and non-local-goto cases are handled here - if no_exc and exit: - exit(None, None, None) - else: - for k, v in ns_confs.get(ns, {}).items(): - handler(k, v) - - def __repr__(self): - return "%s.%s(%s)" % (self.__module__, self.__class__.__name__, - dict.__repr__(self)) - - def __copy__(self): - newobj = self.__class__() - newobj.update(self) - return newobj - copy = __copy__ - - -class Config(dict): - - """A dict-like set of configuration data, with defaults and namespaces. - - May take a file, filename, or dict. - """ - - defaults = {} - environments = {} - namespaces = NamespaceSet() - - def __init__(self, file=None, **kwargs): - self.reset() - if file is not None: - self.update(file) - if kwargs: - self.update(kwargs) - - def reset(self): - """Reset self to default values.""" - self.clear() - dict.update(self, self.defaults) - - def update(self, config): - """Update self from a dict, file or filename.""" - if isinstance(config, basestring): - # Filename - config = Parser().dict_from_file(config) - elif hasattr(config, 'read'): - # Open file object - config = Parser().dict_from_file(config) - else: - config = config.copy() - self._apply(config) - - def _apply(self, config): - """Update self from a dict.""" - which_env = config.get('environment') - if which_env: - env = self.environments[which_env] - for k in env: - if k not in config: - config[k] = env[k] - - dict.update(self, config) - self.namespaces(config) - - def __setitem__(self, k, v): - dict.__setitem__(self, k, v) - self.namespaces({k: v}) - - -class Parser(ConfigParser): - - """Sub-class of ConfigParser that keeps the case of options and that - raises an exception if the file cannot be read. - """ - - def optionxform(self, optionstr): - return optionstr - - def read(self, filenames): - if isinstance(filenames, basestring): - filenames = [filenames] - for filename in filenames: - # try: - # fp = open(filename) - # except IOError: - # continue - fp = open(filename) - try: - self._read(fp, filename) - finally: - fp.close() - - def as_dict(self, raw=False, vars=None): - """Convert an INI file to a dictionary""" - # Load INI file into a dict - result = {} - for section in self.sections(): - if section not in result: - result[section] = {} - for option in self.options(section): - value = self.get(section, option, raw=raw, vars=vars) - try: - value = unrepr(value) - except Exception: - x = sys.exc_info()[1] - msg = ("Config error in section: %r, option: %r, " - "value: %r. Config values must be valid Python." % - (section, option, value)) - raise ValueError(msg, x.__class__.__name__, x.args) - result[section][option] = value - return result - - def dict_from_file(self, file): - if hasattr(file, 'read'): - self.readfp(file) - else: - self.read(file) - return self.as_dict() - - -# public domain "unrepr" implementation, found on the web and then improved. - - -class _Builder2: - - def build(self, o): - m = getattr(self, 'build_' + o.__class__.__name__, None) - if m is None: - raise TypeError("unrepr does not recognize %s" % - repr(o.__class__.__name__)) - return m(o) - - def astnode(self, s): - """Return a Python2 ast Node compiled from a string.""" - try: - import compiler - except ImportError: - # Fallback to eval when compiler package is not available, - # e.g. IronPython 1.0. - return eval(s) - - p = compiler.parse("__tempvalue__ = " + s) - return p.getChildren()[1].getChildren()[0].getChildren()[1] - - def build_Subscript(self, o): - expr, flags, subs = o.getChildren() - expr = self.build(expr) - subs = self.build(subs) - return expr[subs] - - def build_CallFunc(self, o): - children = o.getChildren() - # Build callee from first child - callee = self.build(children[0]) - # Build args and kwargs from remaining children - args = [] - kwargs = {} - for child in children[1:]: - class_name = child.__class__.__name__ - # None is ignored - if class_name == 'NoneType': - continue - # Keywords become kwargs - if class_name == 'Keyword': - kwargs.update(self.build(child)) - # Everything else becomes args - else : - args.append(self.build(child)) - return callee(*args, **kwargs) - - def build_Keyword(self, o): - key, value_obj = o.getChildren() - value = self.build(value_obj) - kw_dict = {key: value} - return kw_dict - - def build_List(self, o): - return map(self.build, o.getChildren()) - - def build_Const(self, o): - return o.value - - def build_Dict(self, o): - d = {} - i = iter(map(self.build, o.getChildren())) - for el in i: - d[el] = i.next() - return d - - def build_Tuple(self, o): - return tuple(self.build_List(o)) - - def build_Name(self, o): - name = o.name - if name == 'None': - return None - if name == 'True': - return True - if name == 'False': - return False - - # See if the Name is a package or module. If it is, import it. - try: - return modules(name) - except ImportError: - pass - - # See if the Name is in builtins. - try: - return getattr(builtins, name) - except AttributeError: - pass - - raise TypeError("unrepr could not resolve the name %s" % repr(name)) - - def build_Add(self, o): - left, right = map(self.build, o.getChildren()) - return left + right - - def build_Mul(self, o): - left, right = map(self.build, o.getChildren()) - return left * right - - def build_Getattr(self, o): - parent = self.build(o.expr) - return getattr(parent, o.attrname) - - def build_NoneType(self, o): - return None - - def build_UnarySub(self, o): - return -self.build(o.getChildren()[0]) - - def build_UnaryAdd(self, o): - return self.build(o.getChildren()[0]) - - -class _Builder3: - - def build(self, o): - m = getattr(self, 'build_' + o.__class__.__name__, None) - if m is None: - raise TypeError("unrepr does not recognize %s" % - repr(o.__class__.__name__)) - return m(o) - - def astnode(self, s): - """Return a Python3 ast Node compiled from a string.""" - try: - import ast - except ImportError: - # Fallback to eval when ast package is not available, - # e.g. IronPython 1.0. - return eval(s) - - p = ast.parse("__tempvalue__ = " + s) - return p.body[0].value - - def build_Subscript(self, o): - return self.build(o.value)[self.build(o.slice)] - - def build_Index(self, o): - return self.build(o.value) - - def build_Call(self, o): - callee = self.build(o.func) - - if o.args is None: - args = () - else: - args = tuple([self.build(a) for a in o.args]) - - if o.starargs is None: - starargs = () - else: - starargs = self.build(o.starargs) - - if o.kwargs is None: - kwargs = {} - else: - kwargs = self.build(o.kwargs) - - return callee(*(args + starargs), **kwargs) - - def build_List(self, o): - return list(map(self.build, o.elts)) - - def build_Str(self, o): - return o.s - - def build_Num(self, o): - return o.n - - def build_Dict(self, o): - return dict([(self.build(k), self.build(v)) - for k, v in zip(o.keys, o.values)]) - - def build_Tuple(self, o): - return tuple(self.build_List(o)) - - def build_Name(self, o): - name = o.id - if name == 'None': - return None - if name == 'True': - return True - if name == 'False': - return False - - # See if the Name is a package or module. If it is, import it. - try: - return modules(name) - except ImportError: - pass - - # See if the Name is in builtins. - try: - import builtins - return getattr(builtins, name) - except AttributeError: - pass - - raise TypeError("unrepr could not resolve the name %s" % repr(name)) - - def build_NameConstant(self, o): - return o.value - - def build_UnaryOp(self, o): - op, operand = map(self.build, [o.op, o.operand]) - return op(operand) - - def build_BinOp(self, o): - left, op, right = map(self.build, [o.left, o.op, o.right]) - return op(left, right) - - def build_Add(self, o): - return _operator.add - - def build_Mult(self, o): - return _operator.mul - - def build_USub(self, o): - return _operator.neg - - def build_Attribute(self, o): - parent = self.build(o.value) - return getattr(parent, o.attr) - - def build_NoneType(self, o): - return None - - -def unrepr(s): - """Return a Python object compiled from a string.""" - if not s: - return s - if sys.version_info < (3, 0): - b = _Builder2() - else: - b = _Builder3() - obj = b.astnode(s) - return b.build(obj) - - -def modules(modulePath): - """Load a module and retrieve a reference to that module.""" - __import__(modulePath) - return sys.modules[modulePath] - - -def attributes(full_attribute_name): - """Load a module and retrieve an attribute of that module.""" - - # Parse out the path, module, and attribute - last_dot = full_attribute_name.rfind(".") - attr_name = full_attribute_name[last_dot + 1:] - mod_path = full_attribute_name[:last_dot] - - mod = modules(mod_path) - # Let an AttributeError propagate outward. - try: - attr = getattr(mod, attr_name) - except AttributeError: - raise AttributeError("'%s' object has no attribute '%s'" - % (mod_path, attr_name)) - - # Return a reference to the attribute. - return attr diff --git a/libs_crutch/contrib/cherrypy/lib/sessions.py b/libs_crutch/contrib/cherrypy/lib/sessions.py deleted file mode 100644 index 3755636..0000000 --- a/libs_crutch/contrib/cherrypy/lib/sessions.py +++ /dev/null @@ -1,968 +0,0 @@ -"""Session implementation for CherryPy. - -You need to edit your config file to use sessions. Here's an example:: - - [/] - tools.sessions.on = True - tools.sessions.storage_type = "file" - tools.sessions.storage_path = "/home/site/sessions" - tools.sessions.timeout = 60 - -This sets the session to be stored in files in the directory -/home/site/sessions, and the session timeout to 60 minutes. If you omit -``storage_type`` the sessions will be saved in RAM. -``tools.sessions.on`` is the only required line for working sessions, -the rest are optional. - -By default, the session ID is passed in a cookie, so the client's browser must -have cookies enabled for your site. - -To set data for the current session, use -``cherrypy.session['fieldname'] = 'fieldvalue'``; -to get data use ``cherrypy.session.get('fieldname')``. - -================ -Locking sessions -================ - -By default, the ``'locking'`` mode of sessions is ``'implicit'``, which means -the session is locked early and unlocked late. Be mindful of this default mode -for any requests that take a long time to process (streaming responses, -expensive calculations, database lookups, API calls, etc), as other concurrent -requests that also utilize sessions will hang until the session is unlocked. - -If you want to control when the session data is locked and unlocked, -set ``tools.sessions.locking = 'explicit'``. Then call -``cherrypy.session.acquire_lock()`` and ``cherrypy.session.release_lock()``. -Regardless of which mode you use, the session is guaranteed to be unlocked when -the request is complete. - -================= -Expiring Sessions -================= - -You can force a session to expire with :func:`cherrypy.lib.sessions.expire`. -Simply call that function at the point you want the session to expire, and it -will cause the session cookie to expire client-side. - -=========================== -Session Fixation Protection -=========================== - -If CherryPy receives, via a request cookie, a session id that it does not -recognize, it will reject that id and create a new one to return in the -response cookie. This `helps prevent session fixation attacks -`_. -However, CherryPy "recognizes" a session id by looking up the saved session -data for that id. Therefore, if you never save any session data, -**you will get a new session id for every request**. - -================ -Sharing Sessions -================ - -If you run multiple instances of CherryPy (for example via mod_python behind -Apache prefork), you most likely cannot use the RAM session backend, since each -instance of CherryPy will have its own memory space. Use a different backend -instead, and verify that all instances are pointing at the same file or db -location. Alternately, you might try a load balancer which makes sessions -"sticky". Google is your friend, there. - -================ -Expiration Dates -================ - -The response cookie will possess an expiration date to inform the client at -which point to stop sending the cookie back in requests. If the server time -and client time differ, expect sessions to be unreliable. **Make sure the -system time of your server is accurate**. - -CherryPy defaults to a 60-minute session timeout, which also applies to the -cookie which is sent to the client. Unfortunately, some versions of Safari -("4 public beta" on Windows XP at least) appear to have a bug in their parsing -of the GMT expiration date--they appear to interpret the date as one hour in -the past. Sixty minutes minus one hour is pretty close to zero, so you may -experience this bug as a new session id for every request, unless the requests -are less than one second apart. To fix, try increasing the session.timeout. - -On the other extreme, some users report Firefox sending cookies after their -expiration date, although this was on a system with an inaccurate system time. -Maybe FF doesn't trust system time. -""" -import sys -import datetime -import os -import time -import threading -import types - -import cherrypy -from cherrypy._cpcompat import copyitems, pickle, random20, unicodestr -from cherrypy.lib import httputil -from cherrypy.lib import lockfile -from cherrypy.lib import locking -from cherrypy.lib import is_iterator - -missing = object() - - -class Session(object): - - """A CherryPy dict-like Session object (one per request).""" - - _id = None - - id_observers = None - "A list of callbacks to which to pass new id's." - - def _get_id(self): - return self._id - - def _set_id(self, value): - self._id = value - for o in self.id_observers: - o(value) - id = property(_get_id, _set_id, doc="The current session ID.") - - timeout = 60 - "Number of minutes after which to delete session data." - - locked = False - """ - If True, this session instance has exclusive read/write access - to session data.""" - - loaded = False - """ - If True, data has been retrieved from storage. This should happen - automatically on the first attempt to access session data.""" - - clean_thread = None - "Class-level Monitor which calls self.clean_up." - - clean_freq = 5 - "The poll rate for expired session cleanup in minutes." - - originalid = None - "The session id passed by the client. May be missing or unsafe." - - missing = False - "True if the session requested by the client did not exist." - - regenerated = False - """ - True if the application called session.regenerate(). This is not set by - internal calls to regenerate the session id.""" - - debug = False - "If True, log debug information." - - # --------------------- Session management methods --------------------- # - - def __init__(self, id=None, **kwargs): - self.id_observers = [] - self._data = {} - - for k, v in kwargs.items(): - setattr(self, k, v) - - self.originalid = id - self.missing = False - if id is None: - if self.debug: - cherrypy.log('No id given; making a new one', 'TOOLS.SESSIONS') - self._regenerate() - else: - self.id = id - if self._exists(): - if self.debug: - cherrypy.log('Set id to %s.' % id, 'TOOLS.SESSIONS') - else: - if self.debug: - cherrypy.log('Expired or malicious session %r; ' - 'making a new one' % id, 'TOOLS.SESSIONS') - # Expired or malicious session. Make a new one. - # See https://bitbucket.org/cherrypy/cherrypy/issue/709. - self.id = None - self.missing = True - self._regenerate() - - def now(self): - """Generate the session specific concept of 'now'. - - Other session providers can override this to use alternative, - possibly timezone aware, versions of 'now'. - """ - return datetime.datetime.now() - - def regenerate(self): - """Replace the current session (with a new id).""" - self.regenerated = True - self._regenerate() - - def _regenerate(self): - if self.id is not None: - if self.debug: - cherrypy.log( - 'Deleting the existing session %r before ' - 'regeneration.' % self.id, - 'TOOLS.SESSIONS') - self.delete() - - old_session_was_locked = self.locked - if old_session_was_locked: - self.release_lock() - if self.debug: - cherrypy.log('Old lock released.', 'TOOLS.SESSIONS') - - self.id = None - while self.id is None: - self.id = self.generate_id() - # Assert that the generated id is not already stored. - if self._exists(): - self.id = None - if self.debug: - cherrypy.log('Set id to generated %s.' % self.id, - 'TOOLS.SESSIONS') - - if old_session_was_locked: - self.acquire_lock() - if self.debug: - cherrypy.log('Regenerated lock acquired.', 'TOOLS.SESSIONS') - - def clean_up(self): - """Clean up expired sessions.""" - pass - - def generate_id(self): - """Return a new session id.""" - return random20() - - def save(self): - """Save session data.""" - try: - # If session data has never been loaded then it's never been - # accessed: no need to save it - if self.loaded: - t = datetime.timedelta(seconds=self.timeout * 60) - expiration_time = self.now() + t - if self.debug: - cherrypy.log('Saving session %r with expiry %s' % - (self.id, expiration_time), - 'TOOLS.SESSIONS') - self._save(expiration_time) - else: - if self.debug: - cherrypy.log( - 'Skipping save of session %r (no session loaded).' % - self.id, 'TOOLS.SESSIONS') - finally: - if self.locked: - # Always release the lock if the user didn't release it - self.release_lock() - if self.debug: - cherrypy.log('Lock released after save.', 'TOOLS.SESSIONS') - - def load(self): - """Copy stored session data into this session instance.""" - data = self._load() - # data is either None or a tuple (session_data, expiration_time) - if data is None or data[1] < self.now(): - if self.debug: - cherrypy.log('Expired session %r, flushing data.' % self.id, - 'TOOLS.SESSIONS') - self._data = {} - else: - if self.debug: - cherrypy.log('Data loaded for session %r.' % self.id, - 'TOOLS.SESSIONS') - self._data = data[0] - self.loaded = True - - # Stick the clean_thread in the class, not the instance. - # The instances are created and destroyed per-request. - cls = self.__class__ - if self.clean_freq and not cls.clean_thread: - # clean_up is an instancemethod and not a classmethod, - # so that tool config can be accessed inside the method. - t = cherrypy.process.plugins.Monitor( - cherrypy.engine, self.clean_up, self.clean_freq * 60, - name='Session cleanup') - t.subscribe() - cls.clean_thread = t - t.start() - if self.debug: - cherrypy.log('Started cleanup thread.', 'TOOLS.SESSIONS') - - def delete(self): - """Delete stored session data.""" - self._delete() - if self.debug: - cherrypy.log('Deleted session %s.' % self.id, - 'TOOLS.SESSIONS') - - # -------------------- Application accessor methods -------------------- # - - def __getitem__(self, key): - if not self.loaded: - self.load() - return self._data[key] - - def __setitem__(self, key, value): - if not self.loaded: - self.load() - self._data[key] = value - - def __delitem__(self, key): - if not self.loaded: - self.load() - del self._data[key] - - def pop(self, key, default=missing): - """Remove the specified key and return the corresponding value. - If key is not found, default is returned if given, - otherwise KeyError is raised. - """ - if not self.loaded: - self.load() - if default is missing: - return self._data.pop(key) - else: - return self._data.pop(key, default) - - def __contains__(self, key): - if not self.loaded: - self.load() - return key in self._data - - if hasattr({}, 'has_key'): - def has_key(self, key): - """D.has_key(k) -> True if D has a key k, else False.""" - if not self.loaded: - self.load() - return key in self._data - - def get(self, key, default=None): - """D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.""" - if not self.loaded: - self.load() - return self._data.get(key, default) - - def update(self, d): - """D.update(E) -> None. Update D from E: for k in E: D[k] = E[k].""" - if not self.loaded: - self.load() - self._data.update(d) - - def setdefault(self, key, default=None): - """D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D.""" - if not self.loaded: - self.load() - return self._data.setdefault(key, default) - - def clear(self): - """D.clear() -> None. Remove all items from D.""" - if not self.loaded: - self.load() - self._data.clear() - - def keys(self): - """D.keys() -> list of D's keys.""" - if not self.loaded: - self.load() - return self._data.keys() - - def items(self): - """D.items() -> list of D's (key, value) pairs, as 2-tuples.""" - if not self.loaded: - self.load() - return self._data.items() - - def values(self): - """D.values() -> list of D's values.""" - if not self.loaded: - self.load() - return self._data.values() - - -class RamSession(Session): - - # Class-level objects. Don't rebind these! - cache = {} - locks = {} - - def clean_up(self): - """Clean up expired sessions.""" - - now = self.now() - for _id, (data, expiration_time) in copyitems(self.cache): - if expiration_time <= now: - try: - del self.cache[_id] - except KeyError: - pass - try: - if self.locks[_id].acquire(blocking=False): - lock = self.locks.pop(_id) - lock.release() - except KeyError: - pass - - # added to remove obsolete lock objects - for _id in list(self.locks): - if _id not in self.cache and self.locks[_id].acquire(blocking=False): - lock = self.locks.pop(_id) - lock.release() - - def _exists(self): - return self.id in self.cache - - def _load(self): - return self.cache.get(self.id) - - def _save(self, expiration_time): - self.cache[self.id] = (self._data, expiration_time) - - def _delete(self): - self.cache.pop(self.id, None) - - def acquire_lock(self): - """Acquire an exclusive lock on the currently-loaded session data.""" - self.locked = True - self.locks.setdefault(self.id, threading.RLock()).acquire() - - def release_lock(self): - """Release the lock on the currently-loaded session data.""" - self.locks[self.id].release() - self.locked = False - - def __len__(self): - """Return the number of active sessions.""" - return len(self.cache) - - -class FileSession(Session): - - """Implementation of the File backend for sessions - - storage_path - The folder where session data will be saved. Each session - will be saved as pickle.dump(data, expiration_time) in its own file; - the filename will be self.SESSION_PREFIX + self.id. - - lock_timeout - A timedelta or numeric seconds indicating how long - to block acquiring a lock. If None (default), acquiring a lock - will block indefinitely. - """ - - SESSION_PREFIX = 'session-' - LOCK_SUFFIX = '.lock' - pickle_protocol = pickle.HIGHEST_PROTOCOL - - def __init__(self, id=None, **kwargs): - # The 'storage_path' arg is required for file-based sessions. - kwargs['storage_path'] = os.path.abspath(kwargs['storage_path']) - kwargs.setdefault('lock_timeout', None) - - Session.__init__(self, id=id, **kwargs) - - # validate self.lock_timeout - if isinstance(self.lock_timeout, (int, float)): - self.lock_timeout = datetime.timedelta(seconds=self.lock_timeout) - if not isinstance(self.lock_timeout, (datetime.timedelta, type(None))): - raise ValueError("Lock timeout must be numeric seconds or " - "a timedelta instance.") - - def setup(cls, **kwargs): - """Set up the storage system for file-based sessions. - - This should only be called once per process; this will be done - automatically when using sessions.init (as the built-in Tool does). - """ - # The 'storage_path' arg is required for file-based sessions. - kwargs['storage_path'] = os.path.abspath(kwargs['storage_path']) - - for k, v in kwargs.items(): - setattr(cls, k, v) - setup = classmethod(setup) - - def _get_file_path(self): - f = os.path.join(self.storage_path, self.SESSION_PREFIX + self.id) - if not os.path.abspath(f).startswith(self.storage_path): - raise cherrypy.HTTPError(400, "Invalid session id in cookie.") - return f - - def _exists(self): - path = self._get_file_path() - return os.path.exists(path) - - def _load(self, path=None): - assert self.locked, ("The session load without being locked. " - "Check your tools' priority levels.") - if path is None: - path = self._get_file_path() - try: - f = open(path, "rb") - try: - return pickle.load(f) - finally: - f.close() - except (IOError, EOFError): - e = sys.exc_info()[1] - if self.debug: - cherrypy.log("Error loading the session pickle: %s" % - e, 'TOOLS.SESSIONS') - return None - - def _save(self, expiration_time): - assert self.locked, ("The session was saved without being locked. " - "Check your tools' priority levels.") - f = open(self._get_file_path(), "wb") - try: - pickle.dump((self._data, expiration_time), f, self.pickle_protocol) - finally: - f.close() - - def _delete(self): - assert self.locked, ("The session deletion without being locked. " - "Check your tools' priority levels.") - try: - os.unlink(self._get_file_path()) - except OSError: - pass - - def acquire_lock(self, path=None): - """Acquire an exclusive lock on the currently-loaded session data.""" - if path is None: - path = self._get_file_path() - path += self.LOCK_SUFFIX - checker = locking.LockChecker(self.id, self.lock_timeout) - while not checker.expired(): - try: - self.lock = lockfile.LockFile(path) - except lockfile.LockError: - time.sleep(0.1) - else: - break - self.locked = True - if self.debug: - cherrypy.log('Lock acquired.', 'TOOLS.SESSIONS') - - def release_lock(self, path=None): - """Release the lock on the currently-loaded session data.""" - self.lock.release() - self.lock.remove() - self.locked = False - - def clean_up(self): - """Clean up expired sessions.""" - now = self.now() - # Iterate over all session files in self.storage_path - for fname in os.listdir(self.storage_path): - if (fname.startswith(self.SESSION_PREFIX) - and not fname.endswith(self.LOCK_SUFFIX)): - # We have a session file: lock and load it and check - # if it's expired. If it fails, nevermind. - path = os.path.join(self.storage_path, fname) - self.acquire_lock(path) - if self.debug: - # This is a bit of a hack, since we're calling clean_up - # on the first instance rather than the entire class, - # so depending on whether you have "debug" set on the - # path of the first session called, this may not run. - cherrypy.log('Cleanup lock acquired.', 'TOOLS.SESSIONS') - - try: - contents = self._load(path) - # _load returns None on IOError - if contents is not None: - data, expiration_time = contents - if expiration_time < now: - # Session expired: deleting it - os.unlink(path) - finally: - self.release_lock(path) - - def __len__(self): - """Return the number of active sessions.""" - return len([fname for fname in os.listdir(self.storage_path) - if (fname.startswith(self.SESSION_PREFIX) - and not fname.endswith(self.LOCK_SUFFIX))]) - - -class PostgresqlSession(Session): - - """ Implementation of the PostgreSQL backend for sessions. It assumes - a table like this:: - - create table session ( - id varchar(40), - data text, - expiration_time timestamp - ) - - You must provide your own get_db function. - """ - - pickle_protocol = pickle.HIGHEST_PROTOCOL - - def __init__(self, id=None, **kwargs): - Session.__init__(self, id, **kwargs) - self.cursor = self.db.cursor() - - def setup(cls, **kwargs): - """Set up the storage system for Postgres-based sessions. - - This should only be called once per process; this will be done - automatically when using sessions.init (as the built-in Tool does). - """ - for k, v in kwargs.items(): - setattr(cls, k, v) - - self.db = self.get_db() - setup = classmethod(setup) - - def __del__(self): - if self.cursor: - self.cursor.close() - self.db.commit() - - def _exists(self): - # Select session data from table - self.cursor.execute('select data, expiration_time from session ' - 'where id=%s', (self.id,)) - rows = self.cursor.fetchall() - return bool(rows) - - def _load(self): - # Select session data from table - self.cursor.execute('select data, expiration_time from session ' - 'where id=%s', (self.id,)) - rows = self.cursor.fetchall() - if not rows: - return None - - pickled_data, expiration_time = rows[0] - data = pickle.loads(pickled_data) - return data, expiration_time - - def _save(self, expiration_time): - pickled_data = pickle.dumps(self._data, self.pickle_protocol) - self.cursor.execute('update session set data = %s, ' - 'expiration_time = %s where id = %s', - (pickled_data, expiration_time, self.id)) - - def _delete(self): - self.cursor.execute('delete from session where id=%s', (self.id,)) - - def acquire_lock(self): - """Acquire an exclusive lock on the currently-loaded session data.""" - # We use the "for update" clause to lock the row - self.locked = True - self.cursor.execute('select id from session where id=%s for update', - (self.id,)) - if self.debug: - cherrypy.log('Lock acquired.', 'TOOLS.SESSIONS') - - def release_lock(self): - """Release the lock on the currently-loaded session data.""" - # We just close the cursor and that will remove the lock - # introduced by the "for update" clause - self.cursor.close() - self.locked = False - - def clean_up(self): - """Clean up expired sessions.""" - self.cursor.execute('delete from session where expiration_time < %s', - (self.now(),)) - - -class MemcachedSession(Session): - - # The most popular memcached client for Python isn't thread-safe. - # Wrap all .get and .set operations in a single lock. - mc_lock = threading.RLock() - - # This is a seperate set of locks per session id. - locks = {} - - servers = ['127.0.0.1:11211'] - - def setup(cls, **kwargs): - """Set up the storage system for memcached-based sessions. - - This should only be called once per process; this will be done - automatically when using sessions.init (as the built-in Tool does). - """ - for k, v in kwargs.items(): - setattr(cls, k, v) - - import memcache - cls.cache = memcache.Client(cls.servers) - setup = classmethod(setup) - - def _get_id(self): - return self._id - - def _set_id(self, value): - # This encode() call is where we differ from the superclass. - # Memcache keys MUST be byte strings, not unicode. - if isinstance(value, unicodestr): - value = value.encode('utf-8') - - self._id = value - for o in self.id_observers: - o(value) - id = property(_get_id, _set_id, doc="The current session ID.") - - def _exists(self): - self.mc_lock.acquire() - try: - return bool(self.cache.get(self.id)) - finally: - self.mc_lock.release() - - def _load(self): - self.mc_lock.acquire() - try: - return self.cache.get(self.id) - finally: - self.mc_lock.release() - - def _save(self, expiration_time): - # Send the expiration time as "Unix time" (seconds since 1/1/1970) - td = int(time.mktime(expiration_time.timetuple())) - self.mc_lock.acquire() - try: - if not self.cache.set(self.id, (self._data, expiration_time), td): - raise AssertionError( - "Session data for id %r not set." % self.id) - finally: - self.mc_lock.release() - - def _delete(self): - self.cache.delete(self.id) - - def acquire_lock(self): - """Acquire an exclusive lock on the currently-loaded session data.""" - self.locked = True - self.locks.setdefault(self.id, threading.RLock()).acquire() - if self.debug: - cherrypy.log('Lock acquired.', 'TOOLS.SESSIONS') - - def release_lock(self): - """Release the lock on the currently-loaded session data.""" - self.locks[self.id].release() - self.locked = False - - def __len__(self): - """Return the number of active sessions.""" - raise NotImplementedError - - -# Hook functions (for CherryPy tools) - -def save(): - """Save any changed session data.""" - - if not hasattr(cherrypy.serving, "session"): - return - request = cherrypy.serving.request - response = cherrypy.serving.response - - # Guard against running twice - if hasattr(request, "_sessionsaved"): - return - request._sessionsaved = True - - if response.stream: - # If the body is being streamed, we have to save the data - # *after* the response has been written out - request.hooks.attach('on_end_request', cherrypy.session.save) - else: - # If the body is not being streamed, we save the data now - # (so we can release the lock). - if is_iterator(response.body): - response.collapse_body() - cherrypy.session.save() -save.failsafe = True - - -def close(): - """Close the session object for this request.""" - sess = getattr(cherrypy.serving, "session", None) - if getattr(sess, "locked", False): - # If the session is still locked we release the lock - sess.release_lock() - if sess.debug: - cherrypy.log('Lock released on close.', 'TOOLS.SESSIONS') -close.failsafe = True -close.priority = 90 - - -def init(storage_type='ram', path=None, path_header=None, name='session_id', - timeout=60, domain=None, secure=False, clean_freq=5, - persistent=True, httponly=False, debug=False, **kwargs): - """Initialize session object (using cookies). - - storage_type - One of 'ram', 'file', 'postgresql', 'memcached'. This will be - used to look up the corresponding class in cherrypy.lib.sessions - globals. For example, 'file' will use the FileSession class. - - path - The 'path' value to stick in the response cookie metadata. - - path_header - If 'path' is None (the default), then the response - cookie 'path' will be pulled from request.headers[path_header]. - - name - The name of the cookie. - - timeout - The expiration timeout (in minutes) for the stored session data. - If 'persistent' is True (the default), this is also the timeout - for the cookie. - - domain - The cookie domain. - - secure - If False (the default) the cookie 'secure' value will not - be set. If True, the cookie 'secure' value will be set (to 1). - - clean_freq (minutes) - The poll rate for expired session cleanup. - - persistent - If True (the default), the 'timeout' argument will be used - to expire the cookie. If False, the cookie will not have an expiry, - and the cookie will be a "session cookie" which expires when the - browser is closed. - - httponly - If False (the default) the cookie 'httponly' value will not be set. - If True, the cookie 'httponly' value will be set (to 1). - - Any additional kwargs will be bound to the new Session instance, - and may be specific to the storage type. See the subclass of Session - you're using for more information. - """ - - request = cherrypy.serving.request - - # Guard against running twice - if hasattr(request, "_session_init_flag"): - return - request._session_init_flag = True - - # Check if request came with a session ID - id = None - if name in request.cookie: - id = request.cookie[name].value - if debug: - cherrypy.log('ID obtained from request.cookie: %r' % id, - 'TOOLS.SESSIONS') - - # Find the storage class and call setup (first time only). - storage_class = storage_type.title() + 'Session' - storage_class = globals()[storage_class] - if not hasattr(cherrypy, "session"): - if hasattr(storage_class, "setup"): - storage_class.setup(**kwargs) - - # Create and attach a new Session instance to cherrypy.serving. - # It will possess a reference to (and lock, and lazily load) - # the requested session data. - kwargs['timeout'] = timeout - kwargs['clean_freq'] = clean_freq - cherrypy.serving.session = sess = storage_class(id, **kwargs) - sess.debug = debug - - def update_cookie(id): - """Update the cookie every time the session id changes.""" - cherrypy.serving.response.cookie[name] = id - sess.id_observers.append(update_cookie) - - # Create cherrypy.session which will proxy to cherrypy.serving.session - if not hasattr(cherrypy, "session"): - cherrypy.session = cherrypy._ThreadLocalProxy('session') - - if persistent: - cookie_timeout = timeout - else: - # See http://support.microsoft.com/kb/223799/EN-US/ - # and http://support.mozilla.com/en-US/kb/Cookies - cookie_timeout = None - set_response_cookie(path=path, path_header=path_header, name=name, - timeout=cookie_timeout, domain=domain, secure=secure, - httponly=httponly) - - -def set_response_cookie(path=None, path_header=None, name='session_id', - timeout=60, domain=None, secure=False, httponly=False): - """Set a response cookie for the client. - - path - the 'path' value to stick in the response cookie metadata. - - path_header - if 'path' is None (the default), then the response - cookie 'path' will be pulled from request.headers[path_header]. - - name - the name of the cookie. - - timeout - the expiration timeout for the cookie. If 0 or other boolean - False, no 'expires' param will be set, and the cookie will be a - "session cookie" which expires when the browser is closed. - - domain - the cookie domain. - - secure - if False (the default) the cookie 'secure' value will not - be set. If True, the cookie 'secure' value will be set (to 1). - - httponly - If False (the default) the cookie 'httponly' value will not be set. - If True, the cookie 'httponly' value will be set (to 1). - - """ - # Set response cookie - cookie = cherrypy.serving.response.cookie - cookie[name] = cherrypy.serving.session.id - cookie[name]['path'] = ( - path or - cherrypy.serving.request.headers.get(path_header) or - '/' - ) - - # We'd like to use the "max-age" param as indicated in - # http://www.faqs.org/rfcs/rfc2109.html but IE doesn't - # save it to disk and the session is lost if people close - # the browser. So we have to use the old "expires" ... sigh ... -## cookie[name]['max-age'] = timeout * 60 - if timeout: - e = time.time() + (timeout * 60) - cookie[name]['expires'] = httputil.HTTPDate(e) - if domain is not None: - cookie[name]['domain'] = domain - if secure: - cookie[name]['secure'] = 1 - if httponly: - if not cookie[name].isReservedKey('httponly'): - raise ValueError("The httponly cookie token is not supported.") - cookie[name]['httponly'] = 1 - - -def expire(): - """Expire the current session cookie.""" - name = cherrypy.serving.request.config.get( - 'tools.sessions.name', 'session_id') - one_year = 60 * 60 * 24 * 365 - e = time.time() - one_year - cherrypy.serving.response.cookie[name]['expires'] = httputil.HTTPDate(e) diff --git a/libs_crutch/contrib/cherrypy/lib/static.py b/libs_crutch/contrib/cherrypy/lib/static.py deleted file mode 100644 index a630dae..0000000 --- a/libs_crutch/contrib/cherrypy/lib/static.py +++ /dev/null @@ -1,378 +0,0 @@ -import os -import re -import stat -import mimetypes - -try: - from io import UnsupportedOperation -except ImportError: - UnsupportedOperation = object() - -import cherrypy -from cherrypy._cpcompat import ntob, unquote -from cherrypy.lib import cptools, httputil, file_generator_limited - - -mimetypes.init() -mimetypes.types_map['.dwg'] = 'image/x-dwg' -mimetypes.types_map['.ico'] = 'image/x-icon' -mimetypes.types_map['.bz2'] = 'application/x-bzip2' -mimetypes.types_map['.gz'] = 'application/x-gzip' - - -def serve_file(path, content_type=None, disposition=None, name=None, - debug=False): - """Set status, headers, and body in order to serve the given path. - - The Content-Type header will be set to the content_type arg, if provided. - If not provided, the Content-Type will be guessed by the file extension - of the 'path' argument. - - If disposition is not None, the Content-Disposition header will be set - to "; filename=". If name is None, it will be set - to the basename of path. If disposition is None, no Content-Disposition - header will be written. - """ - - response = cherrypy.serving.response - - # If path is relative, users should fix it by making path absolute. - # That is, CherryPy should not guess where the application root is. - # It certainly should *not* use cwd (since CP may be invoked from a - # variety of paths). If using tools.staticdir, you can make your relative - # paths become absolute by supplying a value for "tools.staticdir.root". - if not os.path.isabs(path): - msg = "'%s' is not an absolute path." % path - if debug: - cherrypy.log(msg, 'TOOLS.STATICFILE') - raise ValueError(msg) - - try: - st = os.stat(path) - except OSError: - if debug: - cherrypy.log('os.stat(%r) failed' % path, 'TOOLS.STATIC') - raise cherrypy.NotFound() - - # Check if path is a directory. - if stat.S_ISDIR(st.st_mode): - # Let the caller deal with it as they like. - if debug: - cherrypy.log('%r is a directory' % path, 'TOOLS.STATIC') - raise cherrypy.NotFound() - - # Set the Last-Modified response header, so that - # modified-since validation code can work. - response.headers['Last-Modified'] = httputil.HTTPDate(st.st_mtime) - cptools.validate_since() - - if content_type is None: - # Set content-type based on filename extension - ext = "" - i = path.rfind('.') - if i != -1: - ext = path[i:].lower() - content_type = mimetypes.types_map.get(ext, None) - if content_type is not None: - response.headers['Content-Type'] = content_type - if debug: - cherrypy.log('Content-Type: %r' % content_type, 'TOOLS.STATIC') - - cd = None - if disposition is not None: - if name is None: - name = os.path.basename(path) - cd = '%s; filename="%s"' % (disposition, name) - response.headers["Content-Disposition"] = cd - if debug: - cherrypy.log('Content-Disposition: %r' % cd, 'TOOLS.STATIC') - - # Set Content-Length and use an iterable (file object) - # this way CP won't load the whole file in memory - content_length = st.st_size - fileobj = open(path, 'rb') - return _serve_fileobj(fileobj, content_type, content_length, debug=debug) - - -def serve_fileobj(fileobj, content_type=None, disposition=None, name=None, - debug=False): - """Set status, headers, and body in order to serve the given file object. - - The Content-Type header will be set to the content_type arg, if provided. - - If disposition is not None, the Content-Disposition header will be set - to "; filename=". If name is None, 'filename' will - not be set. If disposition is None, no Content-Disposition header will - be written. - - CAUTION: If the request contains a 'Range' header, one or more seek()s will - be performed on the file object. This may cause undesired behavior if - the file object is not seekable. It could also produce undesired results - if the caller set the read position of the file object prior to calling - serve_fileobj(), expecting that the data would be served starting from that - position. - """ - - response = cherrypy.serving.response - - try: - st = os.fstat(fileobj.fileno()) - except AttributeError: - if debug: - cherrypy.log('os has no fstat attribute', 'TOOLS.STATIC') - content_length = None - except UnsupportedOperation: - content_length = None - else: - # Set the Last-Modified response header, so that - # modified-since validation code can work. - response.headers['Last-Modified'] = httputil.HTTPDate(st.st_mtime) - cptools.validate_since() - content_length = st.st_size - - if content_type is not None: - response.headers['Content-Type'] = content_type - if debug: - cherrypy.log('Content-Type: %r' % content_type, 'TOOLS.STATIC') - - cd = None - if disposition is not None: - if name is None: - cd = disposition - else: - cd = '%s; filename="%s"' % (disposition, name) - response.headers["Content-Disposition"] = cd - if debug: - cherrypy.log('Content-Disposition: %r' % cd, 'TOOLS.STATIC') - - return _serve_fileobj(fileobj, content_type, content_length, debug=debug) - - -def _serve_fileobj(fileobj, content_type, content_length, debug=False): - """Internal. Set response.body to the given file object, perhaps ranged.""" - response = cherrypy.serving.response - - # HTTP/1.0 didn't have Range/Accept-Ranges headers, or the 206 code - request = cherrypy.serving.request - if request.protocol >= (1, 1): - response.headers["Accept-Ranges"] = "bytes" - r = httputil.get_ranges(request.headers.get('Range'), content_length) - if r == []: - response.headers['Content-Range'] = "bytes */%s" % content_length - message = ("Invalid Range (first-byte-pos greater than " - "Content-Length)") - if debug: - cherrypy.log(message, 'TOOLS.STATIC') - raise cherrypy.HTTPError(416, message) - - if r: - if len(r) == 1: - # Return a single-part response. - start, stop = r[0] - if stop > content_length: - stop = content_length - r_len = stop - start - if debug: - cherrypy.log( - 'Single part; start: %r, stop: %r' % (start, stop), - 'TOOLS.STATIC') - response.status = "206 Partial Content" - response.headers['Content-Range'] = ( - "bytes %s-%s/%s" % (start, stop - 1, content_length)) - response.headers['Content-Length'] = r_len - fileobj.seek(start) - response.body = file_generator_limited(fileobj, r_len) - else: - # Return a multipart/byteranges response. - response.status = "206 Partial Content" - try: - # Python 3 - from email.generator import _make_boundary as make_boundary - except ImportError: - # Python 2 - from mimetools import choose_boundary as make_boundary - boundary = make_boundary() - ct = "multipart/byteranges; boundary=%s" % boundary - response.headers['Content-Type'] = ct - if "Content-Length" in response.headers: - # Delete Content-Length header so finalize() recalcs it. - del response.headers["Content-Length"] - - def file_ranges(): - # Apache compatibility: - yield ntob("\r\n") - - for start, stop in r: - if debug: - cherrypy.log( - 'Multipart; start: %r, stop: %r' % ( - start, stop), - 'TOOLS.STATIC') - yield ntob("--" + boundary, 'ascii') - yield ntob("\r\nContent-type: %s" % content_type, - 'ascii') - yield ntob( - "\r\nContent-range: bytes %s-%s/%s\r\n\r\n" % ( - start, stop - 1, content_length), - 'ascii') - fileobj.seek(start) - gen = file_generator_limited(fileobj, stop - start) - for chunk in gen: - yield chunk - yield ntob("\r\n") - # Final boundary - yield ntob("--" + boundary + "--", 'ascii') - - # Apache compatibility: - yield ntob("\r\n") - response.body = file_ranges() - return response.body - else: - if debug: - cherrypy.log('No byteranges requested', 'TOOLS.STATIC') - - # Set Content-Length and use an iterable (file object) - # this way CP won't load the whole file in memory - response.headers['Content-Length'] = content_length - response.body = fileobj - return response.body - - -def serve_download(path, name=None): - """Serve 'path' as an application/x-download attachment.""" - # This is such a common idiom I felt it deserved its own wrapper. - return serve_file(path, "application/x-download", "attachment", name) - - -def _attempt(filename, content_types, debug=False): - if debug: - cherrypy.log('Attempting %r (content_types %r)' % - (filename, content_types), 'TOOLS.STATICDIR') - try: - # you can set the content types for a - # complete directory per extension - content_type = None - if content_types: - r, ext = os.path.splitext(filename) - content_type = content_types.get(ext[1:], None) - serve_file(filename, content_type=content_type, debug=debug) - return True - except cherrypy.NotFound: - # If we didn't find the static file, continue handling the - # request. We might find a dynamic handler instead. - if debug: - cherrypy.log('NotFound', 'TOOLS.STATICFILE') - return False - - -def staticdir(section, dir, root="", match="", content_types=None, index="", - debug=False): - """Serve a static resource from the given (root +) dir. - - match - If given, request.path_info will be searched for the given - regular expression before attempting to serve static content. - - content_types - If given, it should be a Python dictionary of - {file-extension: content-type} pairs, where 'file-extension' is - a string (e.g. "gif") and 'content-type' is the value to write - out in the Content-Type response header (e.g. "image/gif"). - - index - If provided, it should be the (relative) name of a file to - serve for directory requests. For example, if the dir argument is - '/home/me', the Request-URI is 'myapp', and the index arg is - 'index.html', the file '/home/me/myapp/index.html' will be sought. - """ - request = cherrypy.serving.request - if request.method not in ('GET', 'HEAD'): - if debug: - cherrypy.log('request.method not GET or HEAD', 'TOOLS.STATICDIR') - return False - - if match and not re.search(match, request.path_info): - if debug: - cherrypy.log('request.path_info %r does not match pattern %r' % - (request.path_info, match), 'TOOLS.STATICDIR') - return False - - # Allow the use of '~' to refer to a user's home directory. - dir = os.path.expanduser(dir) - - # If dir is relative, make absolute using "root". - if not os.path.isabs(dir): - if not root: - msg = "Static dir requires an absolute dir (or root)." - if debug: - cherrypy.log(msg, 'TOOLS.STATICDIR') - raise ValueError(msg) - dir = os.path.join(root, dir) - - # Determine where we are in the object tree relative to 'section' - # (where the static tool was defined). - if section == 'global': - section = "/" - section = section.rstrip(r"\/") - branch = request.path_info[len(section) + 1:] - branch = unquote(branch.lstrip(r"\/")) - - # If branch is "", filename will end in a slash - filename = os.path.join(dir, branch) - if debug: - cherrypy.log('Checking file %r to fulfill %r' % - (filename, request.path_info), 'TOOLS.STATICDIR') - - # There's a chance that the branch pulled from the URL might - # have ".." or similar uplevel attacks in it. Check that the final - # filename is a child of dir. - if not os.path.normpath(filename).startswith(os.path.normpath(dir)): - raise cherrypy.HTTPError(403) # Forbidden - - handled = _attempt(filename, content_types) - if not handled: - # Check for an index file if a folder was requested. - if index: - handled = _attempt(os.path.join(filename, index), content_types) - if handled: - request.is_index = filename[-1] in (r"\/") - return handled - - -def staticfile(filename, root=None, match="", content_types=None, debug=False): - """Serve a static resource from the given (root +) filename. - - match - If given, request.path_info will be searched for the given - regular expression before attempting to serve static content. - - content_types - If given, it should be a Python dictionary of - {file-extension: content-type} pairs, where 'file-extension' is - a string (e.g. "gif") and 'content-type' is the value to write - out in the Content-Type response header (e.g. "image/gif"). - - """ - request = cherrypy.serving.request - if request.method not in ('GET', 'HEAD'): - if debug: - cherrypy.log('request.method not GET or HEAD', 'TOOLS.STATICFILE') - return False - - if match and not re.search(match, request.path_info): - if debug: - cherrypy.log('request.path_info %r does not match pattern %r' % - (request.path_info, match), 'TOOLS.STATICFILE') - return False - - # If filename is relative, make absolute using "root". - if not os.path.isabs(filename): - if not root: - msg = "Static tool requires an absolute filename (got '%s')." % ( - filename,) - if debug: - cherrypy.log(msg, 'TOOLS.STATICFILE') - raise ValueError(msg) - filename = os.path.join(root, filename) - - return _attempt(filename, content_types, debug=debug) diff --git a/libs_crutch/contrib/cherrypy/lib/xmlrpcutil.py b/libs_crutch/contrib/cherrypy/lib/xmlrpcutil.py deleted file mode 100644 index 9fc9564..0000000 --- a/libs_crutch/contrib/cherrypy/lib/xmlrpcutil.py +++ /dev/null @@ -1,57 +0,0 @@ -import sys - -import cherrypy -from cherrypy._cpcompat import ntob - - -def get_xmlrpclib(): - try: - import xmlrpc.client as x - except ImportError: - import xmlrpclib as x - return x - - -def process_body(): - """Return (params, method) from request body.""" - try: - return get_xmlrpclib().loads(cherrypy.request.body.read()) - except Exception: - return ('ERROR PARAMS', ), 'ERRORMETHOD' - - -def patched_path(path): - """Return 'path', doctored for RPC.""" - if not path.endswith('/'): - path += '/' - if path.startswith('/RPC2/'): - # strip the first /rpc2 - path = path[5:] - return path - - -def _set_response(body): - # The XML-RPC spec (http://www.xmlrpc.com/spec) says: - # "Unless there's a lower-level error, always return 200 OK." - # Since Python's xmlrpclib interprets a non-200 response - # as a "Protocol Error", we'll just return 200 every time. - response = cherrypy.response - response.status = '200 OK' - response.body = ntob(body, 'utf-8') - response.headers['Content-Type'] = 'text/xml' - response.headers['Content-Length'] = len(body) - - -def respond(body, encoding='utf-8', allow_none=0): - xmlrpclib = get_xmlrpclib() - if not isinstance(body, xmlrpclib.Fault): - body = (body,) - _set_response(xmlrpclib.dumps(body, methodresponse=1, - encoding=encoding, - allow_none=allow_none)) - - -def on_error(*args, **kwargs): - body = str(sys.exc_info()[1]) - xmlrpclib = get_xmlrpclib() - _set_response(xmlrpclib.dumps(xmlrpclib.Fault(1, body))) diff --git a/libs_crutch/contrib/cherrypy/process/__init__.py b/libs_crutch/contrib/cherrypy/process/__init__.py deleted file mode 100644 index f15b123..0000000 --- a/libs_crutch/contrib/cherrypy/process/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -"""Site container for an HTTP server. - -A Web Site Process Bus object is used to connect applications, servers, -and frameworks with site-wide services such as daemonization, process -reload, signal handling, drop privileges, PID file management, logging -for all of these, and many more. - -The 'plugins' module defines a few abstract and concrete services for -use with the bus. Some use tool-specific channels; see the documentation -for each class. -""" - -from cherrypy.process.wspbus import bus -from cherrypy.process import plugins, servers diff --git a/libs_crutch/contrib/cherrypy/process/plugins.py b/libs_crutch/contrib/cherrypy/process/plugins.py deleted file mode 100644 index c787ba9..0000000 --- a/libs_crutch/contrib/cherrypy/process/plugins.py +++ /dev/null @@ -1,717 +0,0 @@ -"""Site services for use with a Web Site Process Bus.""" - -import os -import re -import signal as _signal -import sys -import time -import threading - -from cherrypy._cpcompat import basestring, get_daemon, get_thread_ident -from cherrypy._cpcompat import ntob, set, Timer, SetDaemonProperty - -# _module__file__base is used by Autoreload to make -# absolute any filenames retrieved from sys.modules which are not -# already absolute paths. This is to work around Python's quirk -# of importing the startup script and using a relative filename -# for it in sys.modules. -# -# Autoreload examines sys.modules afresh every time it runs. If an application -# changes the current directory by executing os.chdir(), then the next time -# Autoreload runs, it will not be able to find any filenames which are -# not absolute paths, because the current directory is not the same as when the -# module was first imported. Autoreload will then wrongly conclude the file -# has "changed", and initiate the shutdown/re-exec sequence. -# See ticket #917. -# For this workaround to have a decent probability of success, this module -# needs to be imported as early as possible, before the app has much chance -# to change the working directory. -_module__file__base = os.getcwd() - - -class SimplePlugin(object): - - """Plugin base class which auto-subscribes methods for known channels.""" - - bus = None - """A :class:`Bus `, usually cherrypy.engine. - """ - - def __init__(self, bus): - self.bus = bus - - def subscribe(self): - """Register this object as a (multi-channel) listener on the bus.""" - for channel in self.bus.listeners: - # Subscribe self.start, self.exit, etc. if present. - method = getattr(self, channel, None) - if method is not None: - self.bus.subscribe(channel, method) - - def unsubscribe(self): - """Unregister this object as a listener on the bus.""" - for channel in self.bus.listeners: - # Unsubscribe self.start, self.exit, etc. if present. - method = getattr(self, channel, None) - if method is not None: - self.bus.unsubscribe(channel, method) - - -class SignalHandler(object): - - """Register bus channels (and listeners) for system signals. - - You can modify what signals your application listens for, and what it does - when it receives signals, by modifying :attr:`SignalHandler.handlers`, - a dict of {signal name: callback} pairs. The default set is:: - - handlers = {'SIGTERM': self.bus.exit, - 'SIGHUP': self.handle_SIGHUP, - 'SIGUSR1': self.bus.graceful, - } - - The :func:`SignalHandler.handle_SIGHUP`` method calls - :func:`bus.restart()` - if the process is daemonized, but - :func:`bus.exit()` - if the process is attached to a TTY. This is because Unix window - managers tend to send SIGHUP to terminal windows when the user closes them. - - Feel free to add signals which are not available on every platform. - The :class:`SignalHandler` will ignore errors raised from attempting - to register handlers for unknown signals. - """ - - handlers = {} - """A map from signal names (e.g. 'SIGTERM') to handlers (e.g. bus.exit).""" - - signals = {} - """A map from signal numbers to names.""" - - for k, v in vars(_signal).items(): - if k.startswith('SIG') and not k.startswith('SIG_'): - signals[v] = k - del k, v - - def __init__(self, bus): - self.bus = bus - # Set default handlers - self.handlers = {'SIGTERM': self.bus.exit, - 'SIGHUP': self.handle_SIGHUP, - 'SIGUSR1': self.bus.graceful, - } - - if sys.platform[:4] == 'java': - del self.handlers['SIGUSR1'] - self.handlers['SIGUSR2'] = self.bus.graceful - self.bus.log("SIGUSR1 cannot be set on the JVM platform. " - "Using SIGUSR2 instead.") - self.handlers['SIGINT'] = self._jython_SIGINT_handler - - self._previous_handlers = {} - - def _jython_SIGINT_handler(self, signum=None, frame=None): - # See http://bugs.jython.org/issue1313 - self.bus.log('Keyboard Interrupt: shutting down bus') - self.bus.exit() - - def subscribe(self): - """Subscribe self.handlers to signals.""" - for sig, func in self.handlers.items(): - try: - self.set_handler(sig, func) - except ValueError: - pass - - def unsubscribe(self): - """Unsubscribe self.handlers from signals.""" - for signum, handler in self._previous_handlers.items(): - signame = self.signals[signum] - - if handler is None: - self.bus.log("Restoring %s handler to SIG_DFL." % signame) - handler = _signal.SIG_DFL - else: - self.bus.log("Restoring %s handler %r." % (signame, handler)) - - try: - our_handler = _signal.signal(signum, handler) - if our_handler is None: - self.bus.log("Restored old %s handler %r, but our " - "handler was not registered." % - (signame, handler), level=30) - except ValueError: - self.bus.log("Unable to restore %s handler %r." % - (signame, handler), level=40, traceback=True) - - def set_handler(self, signal, listener=None): - """Subscribe a handler for the given signal (number or name). - - If the optional 'listener' argument is provided, it will be - subscribed as a listener for the given signal's channel. - - If the given signal name or number is not available on the current - platform, ValueError is raised. - """ - if isinstance(signal, basestring): - signum = getattr(_signal, signal, None) - if signum is None: - raise ValueError("No such signal: %r" % signal) - signame = signal - else: - try: - signame = self.signals[signal] - except KeyError: - raise ValueError("No such signal: %r" % signal) - signum = signal - - prev = _signal.signal(signum, self._handle_signal) - self._previous_handlers[signum] = prev - - if listener is not None: - self.bus.log("Listening for %s." % signame) - self.bus.subscribe(signame, listener) - - def _handle_signal(self, signum=None, frame=None): - """Python signal handler (self.set_handler subscribes it for you).""" - signame = self.signals[signum] - self.bus.log("Caught signal %s." % signame) - self.bus.publish(signame) - - def handle_SIGHUP(self): - """Restart if daemonized, else exit.""" - if os.isatty(sys.stdin.fileno()): - # not daemonized (may be foreground or background) - self.bus.log("SIGHUP caught but not daemonized. Exiting.") - self.bus.exit() - else: - self.bus.log("SIGHUP caught while daemonized. Restarting.") - self.bus.restart() - - -try: - import pwd - import grp -except ImportError: - pwd, grp = None, None - - -class DropPrivileges(SimplePlugin): - - """Drop privileges. uid/gid arguments not available on Windows. - - Special thanks to `Gavin Baker `_ - """ - - def __init__(self, bus, umask=None, uid=None, gid=None): - SimplePlugin.__init__(self, bus) - self.finalized = False - self.uid = uid - self.gid = gid - self.umask = umask - - def _get_uid(self): - return self._uid - - def _set_uid(self, val): - if val is not None: - if pwd is None: - self.bus.log("pwd module not available; ignoring uid.", - level=30) - val = None - elif isinstance(val, basestring): - val = pwd.getpwnam(val)[2] - self._uid = val - uid = property(_get_uid, _set_uid, - doc="The uid under which to run. Availability: Unix.") - - def _get_gid(self): - return self._gid - - def _set_gid(self, val): - if val is not None: - if grp is None: - self.bus.log("grp module not available; ignoring gid.", - level=30) - val = None - elif isinstance(val, basestring): - val = grp.getgrnam(val)[2] - self._gid = val - gid = property(_get_gid, _set_gid, - doc="The gid under which to run. Availability: Unix.") - - def _get_umask(self): - return self._umask - - def _set_umask(self, val): - if val is not None: - try: - os.umask - except AttributeError: - self.bus.log("umask function not available; ignoring umask.", - level=30) - val = None - self._umask = val - umask = property( - _get_umask, - _set_umask, - doc="""The default permission mode for newly created files and - directories. - - Usually expressed in octal format, for example, ``0644``. - Availability: Unix, Windows. - """) - - def start(self): - # uid/gid - def current_ids(): - """Return the current (uid, gid) if available.""" - name, group = None, None - if pwd: - name = pwd.getpwuid(os.getuid())[0] - if grp: - group = grp.getgrgid(os.getgid())[0] - return name, group - - if self.finalized: - if not (self.uid is None and self.gid is None): - self.bus.log('Already running as uid: %r gid: %r' % - current_ids()) - else: - if self.uid is None and self.gid is None: - if pwd or grp: - self.bus.log('uid/gid not set', level=30) - else: - self.bus.log('Started as uid: %r gid: %r' % current_ids()) - if self.gid is not None: - os.setgid(self.gid) - os.setgroups([]) - if self.uid is not None: - os.setuid(self.uid) - self.bus.log('Running as uid: %r gid: %r' % current_ids()) - - # umask - if self.finalized: - if self.umask is not None: - self.bus.log('umask already set to: %03o' % self.umask) - else: - if self.umask is None: - self.bus.log('umask not set', level=30) - else: - old_umask = os.umask(self.umask) - self.bus.log('umask old: %03o, new: %03o' % - (old_umask, self.umask)) - - self.finalized = True - # This is slightly higher than the priority for server.start - # in order to facilitate the most common use: starting on a low - # port (which requires root) and then dropping to another user. - start.priority = 77 - - -class Daemonizer(SimplePlugin): - - """Daemonize the running script. - - Use this with a Web Site Process Bus via:: - - Daemonizer(bus).subscribe() - - When this component finishes, the process is completely decoupled from - the parent environment. Please note that when this component is used, - the return code from the parent process will still be 0 if a startup - error occurs in the forked children. Errors in the initial daemonizing - process still return proper exit codes. Therefore, if you use this - plugin to daemonize, don't use the return code as an accurate indicator - of whether the process fully started. In fact, that return code only - indicates if the process succesfully finished the first fork. - """ - - def __init__(self, bus, stdin='/dev/null', stdout='/dev/null', - stderr='/dev/null'): - SimplePlugin.__init__(self, bus) - self.stdin = stdin - self.stdout = stdout - self.stderr = stderr - self.finalized = False - - def start(self): - if self.finalized: - self.bus.log('Already deamonized.') - - # forking has issues with threads: - # http://www.opengroup.org/onlinepubs/000095399/functions/fork.html - # "The general problem with making fork() work in a multi-threaded - # world is what to do with all of the threads..." - # So we check for active threads: - if threading.activeCount() != 1: - self.bus.log('There are %r active threads. ' - 'Daemonizing now may cause strange failures.' % - threading.enumerate(), level=30) - - # See http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16 - # (or http://www.faqs.org/faqs/unix-faq/programmer/faq/ section 1.7) - # and http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66012 - - # Finish up with the current stdout/stderr - sys.stdout.flush() - sys.stderr.flush() - - # Do first fork. - try: - pid = os.fork() - if pid == 0: - # This is the child process. Continue. - pass - else: - # This is the first parent. Exit, now that we've forked. - self.bus.log('Forking once.') - os._exit(0) - except OSError: - # Python raises OSError rather than returning negative numbers. - exc = sys.exc_info()[1] - sys.exit("%s: fork #1 failed: (%d) %s\n" - % (sys.argv[0], exc.errno, exc.strerror)) - - os.setsid() - - # Do second fork - try: - pid = os.fork() - if pid > 0: - self.bus.log('Forking twice.') - os._exit(0) # Exit second parent - except OSError: - exc = sys.exc_info()[1] - sys.exit("%s: fork #2 failed: (%d) %s\n" - % (sys.argv[0], exc.errno, exc.strerror)) - - os.chdir("/") - os.umask(0) - - si = open(self.stdin, "r") - so = open(self.stdout, "a+") - se = open(self.stderr, "a+") - - # os.dup2(fd, fd2) will close fd2 if necessary, - # so we don't explicitly close stdin/out/err. - # See http://docs.python.org/lib/os-fd-ops.html - os.dup2(si.fileno(), sys.stdin.fileno()) - os.dup2(so.fileno(), sys.stdout.fileno()) - os.dup2(se.fileno(), sys.stderr.fileno()) - - self.bus.log('Daemonized to PID: %s' % os.getpid()) - self.finalized = True - start.priority = 65 - - -class PIDFile(SimplePlugin): - - """Maintain a PID file via a WSPBus.""" - - def __init__(self, bus, pidfile): - SimplePlugin.__init__(self, bus) - self.pidfile = pidfile - self.finalized = False - - def start(self): - pid = os.getpid() - if self.finalized: - self.bus.log('PID %r already written to %r.' % (pid, self.pidfile)) - else: - open(self.pidfile, "wb").write(ntob("%s\n" % pid, 'utf8')) - self.bus.log('PID %r written to %r.' % (pid, self.pidfile)) - self.finalized = True - start.priority = 70 - - def exit(self): - try: - os.remove(self.pidfile) - self.bus.log('PID file removed: %r.' % self.pidfile) - except (KeyboardInterrupt, SystemExit): - raise - except: - pass - - -class PerpetualTimer(Timer): - - """A responsive subclass of threading.Timer whose run() method repeats. - - Use this timer only when you really need a very interruptible timer; - this checks its 'finished' condition up to 20 times a second, which can - results in pretty high CPU usage - """ - - def __init__(self, *args, **kwargs): - "Override parent constructor to allow 'bus' to be provided." - self.bus = kwargs.pop('bus', None) - super(PerpetualTimer, self).__init__(*args, **kwargs) - - def run(self): - while True: - self.finished.wait(self.interval) - if self.finished.isSet(): - return - try: - self.function(*self.args, **self.kwargs) - except Exception: - if self.bus: - self.bus.log( - "Error in perpetual timer thread function %r." % - self.function, level=40, traceback=True) - # Quit on first error to avoid massive logs. - raise - - -class BackgroundTask(SetDaemonProperty, threading.Thread): - - """A subclass of threading.Thread whose run() method repeats. - - Use this class for most repeating tasks. It uses time.sleep() to wait - for each interval, which isn't very responsive; that is, even if you call - self.cancel(), you'll have to wait until the sleep() call finishes before - the thread stops. To compensate, it defaults to being daemonic, which means - it won't delay stopping the whole process. - """ - - def __init__(self, interval, function, args=[], kwargs={}, bus=None): - threading.Thread.__init__(self) - self.interval = interval - self.function = function - self.args = args - self.kwargs = kwargs - self.running = False - self.bus = bus - - # default to daemonic - self.daemon = True - - def cancel(self): - self.running = False - - def run(self): - self.running = True - while self.running: - time.sleep(self.interval) - if not self.running: - return - try: - self.function(*self.args, **self.kwargs) - except Exception: - if self.bus: - self.bus.log("Error in background task thread function %r." - % self.function, level=40, traceback=True) - # Quit on first error to avoid massive logs. - raise - - -class Monitor(SimplePlugin): - - """WSPBus listener to periodically run a callback in its own thread.""" - - callback = None - """The function to call at intervals.""" - - frequency = 60 - """The time in seconds between callback runs.""" - - thread = None - """A :class:`BackgroundTask` - thread. - """ - - def __init__(self, bus, callback, frequency=60, name=None): - SimplePlugin.__init__(self, bus) - self.callback = callback - self.frequency = frequency - self.thread = None - self.name = name - - def start(self): - """Start our callback in its own background thread.""" - if self.frequency > 0: - threadname = self.name or self.__class__.__name__ - if self.thread is None: - self.thread = BackgroundTask(self.frequency, self.callback, - bus=self.bus) - self.thread.setName(threadname) - self.thread.start() - self.bus.log("Started monitor thread %r." % threadname) - else: - self.bus.log("Monitor thread %r already started." % threadname) - start.priority = 70 - - def stop(self): - """Stop our callback's background task thread.""" - if self.thread is None: - self.bus.log("No thread running for %s." % - self.name or self.__class__.__name__) - else: - if self.thread is not threading.currentThread(): - name = self.thread.getName() - self.thread.cancel() - if not get_daemon(self.thread): - self.bus.log("Joining %r" % name) - self.thread.join() - self.bus.log("Stopped thread %r." % name) - self.thread = None - - def graceful(self): - """Stop the callback's background task thread and restart it.""" - self.stop() - self.start() - - -class Autoreloader(Monitor): - - """Monitor which re-executes the process when files change. - - This :ref:`plugin` restarts the process (via :func:`os.execv`) - if any of the files it monitors change (or is deleted). By default, the - autoreloader monitors all imported modules; you can add to the - set by adding to ``autoreload.files``:: - - cherrypy.engine.autoreload.files.add(myFile) - - If there are imported files you do *not* wish to monitor, you can - adjust the ``match`` attribute, a regular expression. For example, - to stop monitoring cherrypy itself:: - - cherrypy.engine.autoreload.match = r'^(?!cherrypy).+' - - Like all :class:`Monitor` plugins, - the autoreload plugin takes a ``frequency`` argument. The default is - 1 second; that is, the autoreloader will examine files once each second. - """ - - files = None - """The set of files to poll for modifications.""" - - frequency = 1 - """The interval in seconds at which to poll for modified files.""" - - match = '.*' - """A regular expression by which to match filenames.""" - - def __init__(self, bus, frequency=1, match='.*'): - self.mtimes = {} - self.files = set() - self.match = match - Monitor.__init__(self, bus, self.run, frequency) - - def start(self): - """Start our own background task thread for self.run.""" - if self.thread is None: - self.mtimes = {} - Monitor.start(self) - start.priority = 70 - - def sysfiles(self): - """Return a Set of sys.modules filenames to monitor.""" - files = set() - for k, m in list(sys.modules.items()): - if re.match(self.match, k): - if ( - hasattr(m, '__loader__') and - hasattr(m.__loader__, 'archive') - ): - f = m.__loader__.archive - else: - f = getattr(m, '__file__', None) - if f is not None and not os.path.isabs(f): - # ensure absolute paths so a os.chdir() in the app - # doesn't break me - f = os.path.normpath( - os.path.join(_module__file__base, f)) - files.add(f) - return files - - def run(self): - """Reload the process if registered files have been modified.""" - for filename in self.sysfiles() | self.files: - if filename: - if filename.endswith('.pyc'): - filename = filename[:-1] - - oldtime = self.mtimes.get(filename, 0) - if oldtime is None: - # Module with no .py file. Skip it. - continue - - try: - mtime = os.stat(filename).st_mtime - except OSError: - # Either a module with no .py file, or it's been deleted. - mtime = None - - if filename not in self.mtimes: - # If a module has no .py file, this will be None. - self.mtimes[filename] = mtime - else: - if mtime is None or mtime > oldtime: - # The file has been deleted or modified. - self.bus.log("Restarting because %s changed." % - filename) - self.thread.cancel() - self.bus.log("Stopped thread %r." % - self.thread.getName()) - self.bus.restart() - return - - -class ThreadManager(SimplePlugin): - - """Manager for HTTP request threads. - - If you have control over thread creation and destruction, publish to - the 'acquire_thread' and 'release_thread' channels (for each thread). - This will register/unregister the current thread and publish to - 'start_thread' and 'stop_thread' listeners in the bus as needed. - - If threads are created and destroyed by code you do not control - (e.g., Apache), then, at the beginning of every HTTP request, - publish to 'acquire_thread' only. You should not publish to - 'release_thread' in this case, since you do not know whether - the thread will be re-used or not. The bus will call - 'stop_thread' listeners for you when it stops. - """ - - threads = None - """A map of {thread ident: index number} pairs.""" - - def __init__(self, bus): - self.threads = {} - SimplePlugin.__init__(self, bus) - self.bus.listeners.setdefault('acquire_thread', set()) - self.bus.listeners.setdefault('start_thread', set()) - self.bus.listeners.setdefault('release_thread', set()) - self.bus.listeners.setdefault('stop_thread', set()) - - def acquire_thread(self): - """Run 'start_thread' listeners for the current thread. - - If the current thread has already been seen, any 'start_thread' - listeners will not be run again. - """ - thread_ident = get_thread_ident() - if thread_ident not in self.threads: - # We can't just use get_ident as the thread ID - # because some platforms reuse thread ID's. - i = len(self.threads) + 1 - self.threads[thread_ident] = i - self.bus.publish('start_thread', i) - - def release_thread(self): - """Release the current thread and run 'stop_thread' listeners.""" - thread_ident = get_thread_ident() - i = self.threads.pop(thread_ident, None) - if i is not None: - self.bus.publish('stop_thread', i) - - def stop(self): - """Release all threads and run all 'stop_thread' listeners.""" - for thread_ident, i in self.threads.items(): - self.bus.publish('stop_thread', i) - self.threads.clear() - graceful = stop diff --git a/libs_crutch/contrib/cherrypy/process/servers.py b/libs_crutch/contrib/cherrypy/process/servers.py deleted file mode 100644 index 6f8088b..0000000 --- a/libs_crutch/contrib/cherrypy/process/servers.py +++ /dev/null @@ -1,466 +0,0 @@ -""" -Starting in CherryPy 3.1, cherrypy.server is implemented as an -:ref:`Engine Plugin`. It's an instance of -:class:`cherrypy._cpserver.Server`, which is a subclass of -:class:`cherrypy.process.servers.ServerAdapter`. The ``ServerAdapter`` class -is designed to control other servers, as well. - -Multiple servers/ports -====================== - -If you need to start more than one HTTP server (to serve on multiple ports, or -protocols, etc.), you can manually register each one and then start them all -with engine.start:: - - s1 = ServerAdapter(cherrypy.engine, MyWSGIServer(host='0.0.0.0', port=80)) - s2 = ServerAdapter(cherrypy.engine, - another.HTTPServer(host='127.0.0.1', - SSL=True)) - s1.subscribe() - s2.subscribe() - cherrypy.engine.start() - -.. index:: SCGI - -FastCGI/SCGI -============ - -There are also Flup\ **F**\ CGIServer and Flup\ **S**\ CGIServer classes in -:mod:`cherrypy.process.servers`. To start an fcgi server, for example, -wrap an instance of it in a ServerAdapter:: - - addr = ('0.0.0.0', 4000) - f = servers.FlupFCGIServer(application=cherrypy.tree, bindAddress=addr) - s = servers.ServerAdapter(cherrypy.engine, httpserver=f, bind_addr=addr) - s.subscribe() - -The :doc:`cherryd` startup script will do the above for -you via its `-f` flag. -Note that you need to download and install `flup `_ -yourself, whether you use ``cherryd`` or not. - -.. _fastcgi: -.. index:: FastCGI - -FastCGI -------- - -A very simple setup lets your cherry run with FastCGI. -You just need the flup library, -plus a running Apache server (with ``mod_fastcgi``) or lighttpd server. - -CherryPy code -^^^^^^^^^^^^^ - -hello.py:: - - #!/usr/bin/python - import cherrypy - - class HelloWorld: - \"""Sample request handler class.\""" - def index(self): - return "Hello world!" - index.exposed = True - - cherrypy.tree.mount(HelloWorld()) - # CherryPy autoreload must be disabled for the flup server to work - cherrypy.config.update({'engine.autoreload.on':False}) - -Then run :doc:`/deployguide/cherryd` with the '-f' arg:: - - cherryd -c -d -f -i hello.py - -Apache -^^^^^^ - -At the top level in httpd.conf:: - - FastCgiIpcDir /tmp - FastCgiServer /path/to/cherry.fcgi -idle-timeout 120 -processes 4 - -And inside the relevant VirtualHost section:: - - # FastCGI config - AddHandler fastcgi-script .fcgi - ScriptAliasMatch (.*$) /path/to/cherry.fcgi$1 - -Lighttpd -^^^^^^^^ - -For `Lighttpd `_ you can follow these -instructions. Within ``lighttpd.conf`` make sure ``mod_fastcgi`` is -active within ``server.modules``. Then, within your ``$HTTP["host"]`` -directive, configure your fastcgi script like the following:: - - $HTTP["url"] =~ "" { - fastcgi.server = ( - "/" => ( - "script.fcgi" => ( - "bin-path" => "/path/to/your/script.fcgi", - "socket" => "/tmp/script.sock", - "check-local" => "disable", - "disable-time" => 1, - "min-procs" => 1, - "max-procs" => 1, # adjust as needed - ), - ), - ) - } # end of $HTTP["url"] =~ "^/" - -Please see `Lighttpd FastCGI Docs -`_ for -an explanation of the possible configuration options. -""" - -import sys -import time -import warnings - - -class ServerAdapter(object): - - """Adapter for an HTTP server. - - If you need to start more than one HTTP server (to serve on multiple - ports, or protocols, etc.), you can manually register each one and then - start them all with bus.start:: - - s1 = ServerAdapter(bus, MyWSGIServer(host='0.0.0.0', port=80)) - s2 = ServerAdapter(bus, another.HTTPServer(host='127.0.0.1', SSL=True)) - s1.subscribe() - s2.subscribe() - bus.start() - """ - - def __init__(self, bus, httpserver=None, bind_addr=None): - self.bus = bus - self.httpserver = httpserver - self.bind_addr = bind_addr - self.interrupt = None - self.running = False - - def subscribe(self): - self.bus.subscribe('start', self.start) - self.bus.subscribe('stop', self.stop) - - def unsubscribe(self): - self.bus.unsubscribe('start', self.start) - self.bus.unsubscribe('stop', self.stop) - - def start(self): - """Start the HTTP server.""" - if self.bind_addr is None: - on_what = "unknown interface (dynamic?)" - elif isinstance(self.bind_addr, tuple): - on_what = self._get_base() - else: - on_what = "socket file: %s" % self.bind_addr - - if self.running: - self.bus.log("Already serving on %s" % on_what) - return - - self.interrupt = None - if not self.httpserver: - raise ValueError("No HTTP server has been created.") - - # Start the httpserver in a new thread. - if isinstance(self.bind_addr, tuple): - wait_for_free_port(*self.bind_addr) - - import threading - t = threading.Thread(target=self._start_http_thread) - t.setName("HTTPServer " + t.getName()) - t.start() - - self.wait() - self.running = True - self.bus.log("Serving on %s" % on_what) - start.priority = 75 - - def _get_base(self): - if not self.httpserver: - return '' - host, port = self.bind_addr - if getattr(self.httpserver, 'ssl_certificate', None) or \ - getattr(self.httpserver, 'ssl_adapter', None): - scheme = "https" - if port != 443: - host += ":%s" % port - else: - scheme = "http" - if port != 80: - host += ":%s" % port - - return "%s://%s" % (scheme, host) - - def _start_http_thread(self): - """HTTP servers MUST be running in new threads, so that the - main thread persists to receive KeyboardInterrupt's. If an - exception is raised in the httpserver's thread then it's - trapped here, and the bus (and therefore our httpserver) - are shut down. - """ - try: - self.httpserver.start() - except KeyboardInterrupt: - self.bus.log(" hit: shutting down HTTP server") - self.interrupt = sys.exc_info()[1] - self.bus.exit() - except SystemExit: - self.bus.log("SystemExit raised: shutting down HTTP server") - self.interrupt = sys.exc_info()[1] - self.bus.exit() - raise - except: - self.interrupt = sys.exc_info()[1] - self.bus.log("Error in HTTP server: shutting down", - traceback=True, level=40) - self.bus.exit() - raise - - def wait(self): - """Wait until the HTTP server is ready to receive requests.""" - while not getattr(self.httpserver, "ready", False): - if self.interrupt: - raise self.interrupt - time.sleep(.1) - - # Wait for port to be occupied - if isinstance(self.bind_addr, tuple): - host, port = self.bind_addr - wait_for_occupied_port(host, port) - - def stop(self): - """Stop the HTTP server.""" - if self.running: - # stop() MUST block until the server is *truly* stopped. - self.httpserver.stop() - # Wait for the socket to be truly freed. - if isinstance(self.bind_addr, tuple): - wait_for_free_port(*self.bind_addr) - self.running = False - self.bus.log("HTTP Server %s shut down" % self.httpserver) - else: - self.bus.log("HTTP Server %s already shut down" % self.httpserver) - stop.priority = 25 - - def restart(self): - """Restart the HTTP server.""" - self.stop() - self.start() - - -class FlupCGIServer(object): - - """Adapter for a flup.server.cgi.WSGIServer.""" - - def __init__(self, *args, **kwargs): - self.args = args - self.kwargs = kwargs - self.ready = False - - def start(self): - """Start the CGI server.""" - # We have to instantiate the server class here because its __init__ - # starts a threadpool. If we do it too early, daemonize won't work. - from flup.server.cgi import WSGIServer - - self.cgiserver = WSGIServer(*self.args, **self.kwargs) - self.ready = True - self.cgiserver.run() - - def stop(self): - """Stop the HTTP server.""" - self.ready = False - - -class FlupFCGIServer(object): - - """Adapter for a flup.server.fcgi.WSGIServer.""" - - def __init__(self, *args, **kwargs): - if kwargs.get('bindAddress', None) is None: - import socket - if not hasattr(socket, 'fromfd'): - raise ValueError( - 'Dynamic FCGI server not available on this platform. ' - 'You must use a static or external one by providing a ' - 'legal bindAddress.') - self.args = args - self.kwargs = kwargs - self.ready = False - - def start(self): - """Start the FCGI server.""" - # We have to instantiate the server class here because its __init__ - # starts a threadpool. If we do it too early, daemonize won't work. - from flup.server.fcgi import WSGIServer - self.fcgiserver = WSGIServer(*self.args, **self.kwargs) - # TODO: report this bug upstream to flup. - # If we don't set _oldSIGs on Windows, we get: - # File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py", - # line 108, in run - # self._restoreSignalHandlers() - # File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py", - # line 156, in _restoreSignalHandlers - # for signum,handler in self._oldSIGs: - # AttributeError: 'WSGIServer' object has no attribute '_oldSIGs' - self.fcgiserver._installSignalHandlers = lambda: None - self.fcgiserver._oldSIGs = [] - self.ready = True - self.fcgiserver.run() - - def stop(self): - """Stop the HTTP server.""" - # Forcibly stop the fcgi server main event loop. - self.fcgiserver._keepGoing = False - # Force all worker threads to die off. - self.fcgiserver._threadPool.maxSpare = ( - self.fcgiserver._threadPool._idleCount) - self.ready = False - - -class FlupSCGIServer(object): - - """Adapter for a flup.server.scgi.WSGIServer.""" - - def __init__(self, *args, **kwargs): - self.args = args - self.kwargs = kwargs - self.ready = False - - def start(self): - """Start the SCGI server.""" - # We have to instantiate the server class here because its __init__ - # starts a threadpool. If we do it too early, daemonize won't work. - from flup.server.scgi import WSGIServer - self.scgiserver = WSGIServer(*self.args, **self.kwargs) - # TODO: report this bug upstream to flup. - # If we don't set _oldSIGs on Windows, we get: - # File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py", - # line 108, in run - # self._restoreSignalHandlers() - # File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py", - # line 156, in _restoreSignalHandlers - # for signum,handler in self._oldSIGs: - # AttributeError: 'WSGIServer' object has no attribute '_oldSIGs' - self.scgiserver._installSignalHandlers = lambda: None - self.scgiserver._oldSIGs = [] - self.ready = True - self.scgiserver.run() - - def stop(self): - """Stop the HTTP server.""" - self.ready = False - # Forcibly stop the scgi server main event loop. - self.scgiserver._keepGoing = False - # Force all worker threads to die off. - self.scgiserver._threadPool.maxSpare = 0 - - -def client_host(server_host): - """Return the host on which a client can connect to the given listener.""" - if server_host == '0.0.0.0': - # 0.0.0.0 is INADDR_ANY, which should answer on localhost. - return '127.0.0.1' - if server_host in ('::', '::0', '::0.0.0.0'): - # :: is IN6ADDR_ANY, which should answer on localhost. - # ::0 and ::0.0.0.0 are non-canonical but common - # ways to write IN6ADDR_ANY. - return '::1' - return server_host - - -def check_port(host, port, timeout=1.0): - """Raise an error if the given port is not free on the given host.""" - if not host: - raise ValueError("Host values of '' or None are not allowed.") - host = client_host(host) - port = int(port) - - import socket - - # AF_INET or AF_INET6 socket - # Get the correct address family for our host (allows IPv6 addresses) - try: - info = socket.getaddrinfo(host, port, socket.AF_UNSPEC, - socket.SOCK_STREAM) - except socket.gaierror: - if ':' in host: - info = [( - socket.AF_INET6, socket.SOCK_STREAM, 0, "", (host, port, 0, 0) - )] - else: - info = [(socket.AF_INET, socket.SOCK_STREAM, 0, "", (host, port))] - - for res in info: - af, socktype, proto, canonname, sa = res - s = None - try: - s = socket.socket(af, socktype, proto) - # See http://groups.google.com/group/cherrypy-users/ - # browse_frm/thread/bbfe5eb39c904fe0 - s.settimeout(timeout) - s.connect((host, port)) - s.close() - except socket.error: - if s: - s.close() - else: - raise IOError("Port %s is in use on %s; perhaps the previous " - "httpserver did not shut down properly." % - (repr(port), repr(host))) - - -# Feel free to increase these defaults on slow systems: -free_port_timeout = 0.1 -occupied_port_timeout = 1.0 - - -def wait_for_free_port(host, port, timeout=None): - """Wait for the specified port to become free (drop requests).""" - if not host: - raise ValueError("Host values of '' or None are not allowed.") - if timeout is None: - timeout = free_port_timeout - - for trial in range(50): - try: - # we are expecting a free port, so reduce the timeout - check_port(host, port, timeout=timeout) - except IOError: - # Give the old server thread time to free the port. - time.sleep(timeout) - else: - return - - raise IOError("Port %r not free on %r" % (port, host)) - - -def wait_for_occupied_port(host, port, timeout=None): - """Wait for the specified port to become active (receive requests).""" - if not host: - raise ValueError("Host values of '' or None are not allowed.") - if timeout is None: - timeout = occupied_port_timeout - - for trial in range(50): - try: - check_port(host, port, timeout=timeout) - except IOError: - # port is occupied - return - else: - time.sleep(timeout) - - if host == client_host(host): - raise IOError("Port %r not bound on %r" % (port, host)) - - # On systems where a loopback interface is not available and the - # server is bound to all interfaces, it's difficult to determine - # whether the server is in fact occupying the port. In this case, - # just issue a warning and move on. See issue #1100. - msg = "Unable to verify that the server is bound on %r" % port - warnings.warn(msg) diff --git a/libs_crutch/contrib/cherrypy/process/win32.py b/libs_crutch/contrib/cherrypy/process/win32.py deleted file mode 100644 index 4afd3f1..0000000 --- a/libs_crutch/contrib/cherrypy/process/win32.py +++ /dev/null @@ -1,180 +0,0 @@ -"""Windows service. Requires pywin32.""" - -import os -import win32api -import win32con -import win32event -import win32service -import win32serviceutil - -from cherrypy.process import wspbus, plugins - - -class ConsoleCtrlHandler(plugins.SimplePlugin): - - """A WSPBus plugin for handling Win32 console events (like Ctrl-C).""" - - def __init__(self, bus): - self.is_set = False - plugins.SimplePlugin.__init__(self, bus) - - def start(self): - if self.is_set: - self.bus.log('Handler for console events already set.', level=40) - return - - result = win32api.SetConsoleCtrlHandler(self.handle, 1) - if result == 0: - self.bus.log('Could not SetConsoleCtrlHandler (error %r)' % - win32api.GetLastError(), level=40) - else: - self.bus.log('Set handler for console events.', level=40) - self.is_set = True - - def stop(self): - if not self.is_set: - self.bus.log('Handler for console events already off.', level=40) - return - - try: - result = win32api.SetConsoleCtrlHandler(self.handle, 0) - except ValueError: - # "ValueError: The object has not been registered" - result = 1 - - if result == 0: - self.bus.log('Could not remove SetConsoleCtrlHandler (error %r)' % - win32api.GetLastError(), level=40) - else: - self.bus.log('Removed handler for console events.', level=40) - self.is_set = False - - def handle(self, event): - """Handle console control events (like Ctrl-C).""" - if event in (win32con.CTRL_C_EVENT, win32con.CTRL_LOGOFF_EVENT, - win32con.CTRL_BREAK_EVENT, win32con.CTRL_SHUTDOWN_EVENT, - win32con.CTRL_CLOSE_EVENT): - self.bus.log('Console event %s: shutting down bus' % event) - - # Remove self immediately so repeated Ctrl-C doesn't re-call it. - try: - self.stop() - except ValueError: - pass - - self.bus.exit() - # 'First to return True stops the calls' - return 1 - return 0 - - -class Win32Bus(wspbus.Bus): - - """A Web Site Process Bus implementation for Win32. - - Instead of time.sleep, this bus blocks using native win32event objects. - """ - - def __init__(self): - self.events = {} - wspbus.Bus.__init__(self) - - def _get_state_event(self, state): - """Return a win32event for the given state (creating it if needed).""" - try: - return self.events[state] - except KeyError: - event = win32event.CreateEvent(None, 0, 0, - "WSPBus %s Event (pid=%r)" % - (state.name, os.getpid())) - self.events[state] = event - return event - - def _get_state(self): - return self._state - - def _set_state(self, value): - self._state = value - event = self._get_state_event(value) - win32event.PulseEvent(event) - state = property(_get_state, _set_state) - - def wait(self, state, interval=0.1, channel=None): - """Wait for the given state(s), KeyboardInterrupt or SystemExit. - - Since this class uses native win32event objects, the interval - argument is ignored. - """ - if isinstance(state, (tuple, list)): - # Don't wait for an event that beat us to the punch ;) - if self.state not in state: - events = tuple([self._get_state_event(s) for s in state]) - win32event.WaitForMultipleObjects( - events, 0, win32event.INFINITE) - else: - # Don't wait for an event that beat us to the punch ;) - if self.state != state: - event = self._get_state_event(state) - win32event.WaitForSingleObject(event, win32event.INFINITE) - - -class _ControlCodes(dict): - - """Control codes used to "signal" a service via ControlService. - - User-defined control codes are in the range 128-255. We generally use - the standard Python value for the Linux signal and add 128. Example: - - >>> signal.SIGUSR1 - 10 - control_codes['graceful'] = 128 + 10 - """ - - def key_for(self, obj): - """For the given value, return its corresponding key.""" - for key, val in self.items(): - if val is obj: - return key - raise ValueError("The given object could not be found: %r" % obj) - -control_codes = _ControlCodes({'graceful': 138}) - - -def signal_child(service, command): - if command == 'stop': - win32serviceutil.StopService(service) - elif command == 'restart': - win32serviceutil.RestartService(service) - else: - win32serviceutil.ControlService(service, control_codes[command]) - - -class PyWebService(win32serviceutil.ServiceFramework): - - """Python Web Service.""" - - _svc_name_ = "Python Web Service" - _svc_display_name_ = "Python Web Service" - _svc_deps_ = None # sequence of service names on which this depends - _exe_name_ = "pywebsvc" - _exe_args_ = None # Default to no arguments - - # Only exists on Windows 2000 or later, ignored on windows NT - _svc_description_ = "Python Web Service" - - def SvcDoRun(self): - from cherrypy import process - process.bus.start() - process.bus.block() - - def SvcStop(self): - from cherrypy import process - self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) - process.bus.exit() - - def SvcOther(self, control): - process.bus.publish(control_codes.key_for(control)) - - -if __name__ == '__main__': - win32serviceutil.HandleCommandLine(PyWebService) diff --git a/libs_crutch/contrib/cherrypy/process/wspbus.py b/libs_crutch/contrib/cherrypy/process/wspbus.py deleted file mode 100644 index 5409d03..0000000 --- a/libs_crutch/contrib/cherrypy/process/wspbus.py +++ /dev/null @@ -1,448 +0,0 @@ -"""An implementation of the Web Site Process Bus. - -This module is completely standalone, depending only on the stdlib. - -Web Site Process Bus --------------------- - -A Bus object is used to contain and manage site-wide behavior: -daemonization, HTTP server start/stop, process reload, signal handling, -drop privileges, PID file management, logging for all of these, -and many more. - -In addition, a Bus object provides a place for each web framework -to register code that runs in response to site-wide events (like -process start and stop), or which controls or otherwise interacts with -the site-wide components mentioned above. For example, a framework which -uses file-based templates would add known template filenames to an -autoreload component. - -Ideally, a Bus object will be flexible enough to be useful in a variety -of invocation scenarios: - - 1. The deployer starts a site from the command line via a - framework-neutral deployment script; applications from multiple frameworks - are mixed in a single site. Command-line arguments and configuration - files are used to define site-wide components such as the HTTP server, - WSGI component graph, autoreload behavior, signal handling, etc. - 2. The deployer starts a site via some other process, such as Apache; - applications from multiple frameworks are mixed in a single site. - Autoreload and signal handling (from Python at least) are disabled. - 3. The deployer starts a site via a framework-specific mechanism; - for example, when running tests, exploring tutorials, or deploying - single applications from a single framework. The framework controls - which site-wide components are enabled as it sees fit. - -The Bus object in this package uses topic-based publish-subscribe -messaging to accomplish all this. A few topic channels are built in -('start', 'stop', 'exit', 'graceful', 'log', and 'main'). Frameworks and -site containers are free to define their own. If a message is sent to a -channel that has not been defined or has no listeners, there is no effect. - -In general, there should only ever be a single Bus object per process. -Frameworks and site containers share a single Bus object by publishing -messages and subscribing listeners. - -The Bus object works as a finite state machine which models the current -state of the process. Bus methods move it from one state to another; -those methods then publish to subscribed listeners on the channel for -the new state.:: - - O - | - V - STOPPING --> STOPPED --> EXITING -> X - A A | - | \___ | - | \ | - | V V - STARTED <-- STARTING - -""" - -import atexit -import os -import sys -import threading -import time -import traceback as _traceback -import warnings - -from cherrypy._cpcompat import set - -# Here I save the value of os.getcwd(), which, if I am imported early enough, -# will be the directory from which the startup script was run. This is needed -# by _do_execv(), to change back to the original directory before execv()ing a -# new process. This is a defense against the application having changed the -# current working directory (which could make sys.executable "not found" if -# sys.executable is a relative-path, and/or cause other problems). -_startup_cwd = os.getcwd() - - -class ChannelFailures(Exception): - - """Exception raised when errors occur in a listener during Bus.publish(). - """ - delimiter = '\n' - - def __init__(self, *args, **kwargs): - # Don't use 'super' here; Exceptions are old-style in Py2.4 - # See https://bitbucket.org/cherrypy/cherrypy/issue/959 - Exception.__init__(self, *args, **kwargs) - self._exceptions = list() - - def handle_exception(self): - """Append the current exception to self.""" - self._exceptions.append(sys.exc_info()[1]) - - def get_instances(self): - """Return a list of seen exception instances.""" - return self._exceptions[:] - - def __str__(self): - exception_strings = map(repr, self.get_instances()) - return self.delimiter.join(exception_strings) - - __repr__ = __str__ - - def __bool__(self): - return bool(self._exceptions) - __nonzero__ = __bool__ - -# Use a flag to indicate the state of the bus. - - -class _StateEnum(object): - - class State(object): - name = None - - def __repr__(self): - return "states.%s" % self.name - - def __setattr__(self, key, value): - if isinstance(value, self.State): - value.name = key - object.__setattr__(self, key, value) -states = _StateEnum() -states.STOPPED = states.State() -states.STARTING = states.State() -states.STARTED = states.State() -states.STOPPING = states.State() -states.EXITING = states.State() - - -try: - import fcntl -except ImportError: - max_files = 0 -else: - try: - max_files = os.sysconf('SC_OPEN_MAX') - except AttributeError: - max_files = 1024 - - -class Bus(object): - - """Process state-machine and messenger for HTTP site deployment. - - All listeners for a given channel are guaranteed to be called even - if others at the same channel fail. Each failure is logged, but - execution proceeds on to the next listener. The only way to stop all - processing from inside a listener is to raise SystemExit and stop the - whole server. - """ - - states = states - state = states.STOPPED - execv = False - max_cloexec_files = max_files - - def __init__(self): - self.execv = False - self.state = states.STOPPED - self.listeners = dict( - [(channel, set()) for channel - in ('start', 'stop', 'exit', 'graceful', 'log', 'main')]) - self._priorities = {} - - def subscribe(self, channel, callback, priority=None): - """Add the given callback at the given channel (if not present).""" - if channel not in self.listeners: - self.listeners[channel] = set() - self.listeners[channel].add(callback) - - if priority is None: - priority = getattr(callback, 'priority', 50) - self._priorities[(channel, callback)] = priority - - def unsubscribe(self, channel, callback): - """Discard the given callback (if present).""" - listeners = self.listeners.get(channel) - if listeners and callback in listeners: - listeners.discard(callback) - del self._priorities[(channel, callback)] - - def publish(self, channel, *args, **kwargs): - """Return output of all subscribers for the given channel.""" - if channel not in self.listeners: - return [] - - exc = ChannelFailures() - output = [] - - items = [(self._priorities[(channel, listener)], listener) - for listener in self.listeners[channel]] - try: - items.sort(key=lambda item: item[0]) - except TypeError: - # Python 2.3 had no 'key' arg, but that doesn't matter - # since it could sort dissimilar types just fine. - items.sort() - for priority, listener in items: - try: - output.append(listener(*args, **kwargs)) - except KeyboardInterrupt: - raise - except SystemExit: - e = sys.exc_info()[1] - # If we have previous errors ensure the exit code is non-zero - if exc and e.code == 0: - e.code = 1 - raise - except: - exc.handle_exception() - if channel == 'log': - # Assume any further messages to 'log' will fail. - pass - else: - self.log("Error in %r listener %r" % (channel, listener), - level=40, traceback=True) - if exc: - raise exc - return output - - def _clean_exit(self): - """An atexit handler which asserts the Bus is not running.""" - if self.state != states.EXITING: - warnings.warn( - "The main thread is exiting, but the Bus is in the %r state; " - "shutting it down automatically now. You must either call " - "bus.block() after start(), or call bus.exit() before the " - "main thread exits." % self.state, RuntimeWarning) - self.exit() - - def start(self): - """Start all services.""" - atexit.register(self._clean_exit) - - self.state = states.STARTING - self.log('Bus STARTING') - try: - self.publish('start') - self.state = states.STARTED - self.log('Bus STARTED') - except (KeyboardInterrupt, SystemExit): - raise - except: - self.log("Shutting down due to error in start listener:", - level=40, traceback=True) - e_info = sys.exc_info()[1] - try: - self.exit() - except: - # Any stop/exit errors will be logged inside publish(). - pass - # Re-raise the original error - raise e_info - - def exit(self): - """Stop all services and prepare to exit the process.""" - exitstate = self.state - try: - self.stop() - - self.state = states.EXITING - self.log('Bus EXITING') - self.publish('exit') - # This isn't strictly necessary, but it's better than seeing - # "Waiting for child threads to terminate..." and then nothing. - self.log('Bus EXITED') - except: - # This method is often called asynchronously (whether thread, - # signal handler, console handler, or atexit handler), so we - # can't just let exceptions propagate out unhandled. - # Assume it's been logged and just die. - os._exit(70) # EX_SOFTWARE - - if exitstate == states.STARTING: - # exit() was called before start() finished, possibly due to - # Ctrl-C because a start listener got stuck. In this case, - # we could get stuck in a loop where Ctrl-C never exits the - # process, so we just call os.exit here. - os._exit(70) # EX_SOFTWARE - - def restart(self): - """Restart the process (may close connections). - - This method does not restart the process from the calling thread; - instead, it stops the bus and asks the main thread to call execv. - """ - self.execv = True - self.exit() - - def graceful(self): - """Advise all services to reload.""" - self.log('Bus graceful') - self.publish('graceful') - - def block(self, interval=0.1): - """Wait for the EXITING state, KeyboardInterrupt or SystemExit. - - This function is intended to be called only by the main thread. - After waiting for the EXITING state, it also waits for all threads - to terminate, and then calls os.execv if self.execv is True. This - design allows another thread to call bus.restart, yet have the main - thread perform the actual execv call (required on some platforms). - """ - try: - self.wait(states.EXITING, interval=interval, channel='main') - except (KeyboardInterrupt, IOError): - # The time.sleep call might raise - # "IOError: [Errno 4] Interrupted function call" on KBInt. - self.log('Keyboard Interrupt: shutting down bus') - self.exit() - except SystemExit: - self.log('SystemExit raised: shutting down bus') - self.exit() - raise - - # Waiting for ALL child threads to finish is necessary on OS X. - # See https://bitbucket.org/cherrypy/cherrypy/issue/581. - # It's also good to let them all shut down before allowing - # the main thread to call atexit handlers. - # See https://bitbucket.org/cherrypy/cherrypy/issue/751. - self.log("Waiting for child threads to terminate...") - for t in threading.enumerate(): - # Validate the we're not trying to join the MainThread - # that will cause a deadlock and the case exist when - # implemented as a windows service and in any other case - # that another thread executes cherrypy.engine.exit() - if ( - t != threading.currentThread() and - t.isAlive() and - not isinstance(t, threading._MainThread) - ): - # Note that any dummy (external) threads are always daemonic. - if hasattr(threading.Thread, "daemon"): - # Python 2.6+ - d = t.daemon - else: - d = t.isDaemon() - if not d: - self.log("Waiting for thread %s." % t.getName()) - t.join() - - if self.execv: - self._do_execv() - - def wait(self, state, interval=0.1, channel=None): - """Poll for the given state(s) at intervals; publish to channel.""" - if isinstance(state, (tuple, list)): - states = state - else: - states = [state] - - def _wait(): - while self.state not in states: - time.sleep(interval) - self.publish(channel) - - # From http://psyco.sourceforge.net/psycoguide/bugs.html: - # "The compiled machine code does not include the regular polling - # done by Python, meaning that a KeyboardInterrupt will not be - # detected before execution comes back to the regular Python - # interpreter. Your program cannot be interrupted if caught - # into an infinite Psyco-compiled loop." - try: - sys.modules['psyco'].cannotcompile(_wait) - except (KeyError, AttributeError): - pass - - _wait() - - def _do_execv(self): - """Re-execute the current process. - - This must be called from the main thread, because certain platforms - (OS X) don't allow execv to be called in a child thread very well. - """ - args = sys.argv[:] - self.log('Re-spawning %s' % ' '.join(args)) - - if sys.platform[:4] == 'java': - from _systemrestart import SystemRestart - raise SystemRestart - else: - args.insert(0, sys.executable) - if sys.platform == 'win32': - args = ['"%s"' % arg for arg in args] - - os.chdir(_startup_cwd) - if self.max_cloexec_files: - self._set_cloexec() - os.execv(sys.executable, args) - - def _set_cloexec(self): - """Set the CLOEXEC flag on all open files (except stdin/out/err). - - If self.max_cloexec_files is an integer (the default), then on - platforms which support it, it represents the max open files setting - for the operating system. This function will be called just before - the process is restarted via os.execv() to prevent open files - from persisting into the new process. - - Set self.max_cloexec_files to 0 to disable this behavior. - """ - for fd in range(3, self.max_cloexec_files): # skip stdin/out/err - try: - flags = fcntl.fcntl(fd, fcntl.F_GETFD) - except IOError: - continue - fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC) - - def stop(self): - """Stop all services.""" - self.state = states.STOPPING - self.log('Bus STOPPING') - self.publish('stop') - self.state = states.STOPPED - self.log('Bus STOPPED') - - def start_with_callback(self, func, args=None, kwargs=None): - """Start 'func' in a new thread T, then start self (and return T).""" - if args is None: - args = () - if kwargs is None: - kwargs = {} - args = (func,) + args - - def _callback(func, *a, **kw): - self.wait(states.STARTED) - func(*a, **kw) - t = threading.Thread(target=_callback, args=args, kwargs=kwargs) - t.setName('Bus Callback ' + t.getName()) - t.start() - - self.start() - - return t - - def log(self, msg="", level=20, traceback=False): - """Log the given message. Append the last traceback if requested.""" - if traceback: - msg += "\n" + "".join(_traceback.format_exception(*sys.exc_info())) - self.publish('log', msg, level) - -bus = Bus() diff --git a/libs_crutch/contrib/cherrypy/scaffold/__init__.py b/libs_crutch/contrib/cherrypy/scaffold/__init__.py deleted file mode 100644 index 50de34b..0000000 --- a/libs_crutch/contrib/cherrypy/scaffold/__init__.py +++ /dev/null @@ -1,61 +0,0 @@ -""", a CherryPy application. - -Use this as a base for creating new CherryPy applications. When you want -to make a new app, copy and paste this folder to some other location -(maybe site-packages) and rename it to the name of your project, -then tweak as desired. - -Even before any tweaking, this should serve a few demonstration pages. -Change to this directory and run: - - ../cherryd -c site.conf - -""" - -import cherrypy -from cherrypy import tools, url - -import os -local_dir = os.path.join(os.getcwd(), os.path.dirname(__file__)) - - -class Root: - - _cp_config = {'tools.log_tracebacks.on': True, - } - - def index(self): - return """ -Try some other path, -or a default path.
-Or, just look at the pretty picture:
- -""" % (url("other"), url("else"), - url("files/made_with_cherrypy_small.png")) - index.exposed = True - - def default(self, *args, **kwargs): - return "args: %s kwargs: %s" % (args, kwargs) - default.exposed = True - - def other(self, a=2, b='bananas', c=None): - cherrypy.response.headers['Content-Type'] = 'text/plain' - if c is None: - return "Have %d %s." % (int(a), b) - else: - return "Have %d %s, %s." % (int(a), b, c) - other.exposed = True - - files = cherrypy.tools.staticdir.handler( - section="/files", - dir=os.path.join(local_dir, "static"), - # Ignore .php files, etc. - match=r'\.(css|gif|html?|ico|jpe?g|js|png|swf|xml)$', - ) - - -root = Root() - -# Uncomment the following to use your own favicon instead of CP's default. -#favicon_path = os.path.join(local_dir, "favicon.ico") -#root.favicon_ico = tools.staticfile.handler(filename=favicon_path) diff --git a/libs_crutch/contrib/cherrypy/scaffold/example.conf b/libs_crutch/contrib/cherrypy/scaffold/example.conf deleted file mode 100644 index 93a6e53..0000000 --- a/libs_crutch/contrib/cherrypy/scaffold/example.conf +++ /dev/null @@ -1,3 +0,0 @@ -[/] -log.error_file: "error.log" -log.access_file: "access.log" \ No newline at end of file diff --git a/libs_crutch/contrib/cherrypy/scaffold/site.conf b/libs_crutch/contrib/cherrypy/scaffold/site.conf deleted file mode 100644 index 6ed3898..0000000 --- a/libs_crutch/contrib/cherrypy/scaffold/site.conf +++ /dev/null @@ -1,14 +0,0 @@ -[global] -# Uncomment this when you're done developing -#environment: "production" - -server.socket_host: "0.0.0.0" -server.socket_port: 8088 - -# Uncomment the following lines to run on HTTPS at the same time -#server.2.socket_host: "0.0.0.0" -#server.2.socket_port: 8433 -#server.2.ssl_certificate: '../test/test.pem' -#server.2.ssl_private_key: '../test/test.pem' - -tree.myapp: cherrypy.Application(scaffold.root, "/", "example.conf") diff --git a/libs_crutch/contrib/cherrypy/scaffold/static/made_with_cherrypy_small.png b/libs_crutch/contrib/cherrypy/scaffold/static/made_with_cherrypy_small.png deleted file mode 100644 index c3aafee..0000000 Binary files a/libs_crutch/contrib/cherrypy/scaffold/static/made_with_cherrypy_small.png and /dev/null differ diff --git a/libs_crutch/contrib/cherrypy/wsgiserver/__init__.py b/libs_crutch/contrib/cherrypy/wsgiserver/__init__.py deleted file mode 100644 index ee6190f..0000000 --- a/libs_crutch/contrib/cherrypy/wsgiserver/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -__all__ = ['HTTPRequest', 'HTTPConnection', 'HTTPServer', - 'SizeCheckWrapper', 'KnownLengthRFile', 'ChunkedRFile', - 'MaxSizeExceeded', 'NoSSLError', 'FatalSSLAlert', - 'WorkerThread', 'ThreadPool', 'SSLAdapter', - 'CherryPyWSGIServer', - 'Gateway', 'WSGIGateway', 'WSGIGateway_10', 'WSGIGateway_u0', - 'WSGIPathInfoDispatcher', 'get_ssl_adapter_class'] - -import sys -if sys.version_info < (3, 0): - from wsgiserver2 import * -else: - # Le sigh. Boo for backward-incompatible syntax. - exec('from .wsgiserver3 import *') diff --git a/libs_crutch/contrib/cherrypy/wsgiserver/ssl_builtin.py b/libs_crutch/contrib/cherrypy/wsgiserver/ssl_builtin.py deleted file mode 100644 index 2c74ad8..0000000 --- a/libs_crutch/contrib/cherrypy/wsgiserver/ssl_builtin.py +++ /dev/null @@ -1,92 +0,0 @@ -"""A library for integrating Python's builtin ``ssl`` library with CherryPy. - -The ssl module must be importable for SSL functionality. - -To use this module, set ``CherryPyWSGIServer.ssl_adapter`` to an instance of -``BuiltinSSLAdapter``. -""" - -try: - import ssl -except ImportError: - ssl = None - -try: - from _pyio import DEFAULT_BUFFER_SIZE -except ImportError: - try: - from io import DEFAULT_BUFFER_SIZE - except ImportError: - DEFAULT_BUFFER_SIZE = -1 - -import sys - -from cherrypy import wsgiserver - - -class BuiltinSSLAdapter(wsgiserver.SSLAdapter): - - """A wrapper for integrating Python's builtin ssl module with CherryPy.""" - - certificate = None - """The filename of the server SSL certificate.""" - - private_key = None - """The filename of the server's private key file.""" - - def __init__(self, certificate, private_key, certificate_chain=None): - if ssl is None: - raise ImportError("You must install the ssl module to use HTTPS.") - self.certificate = certificate - self.private_key = private_key - self.certificate_chain = certificate_chain - - def bind(self, sock): - """Wrap and return the given socket.""" - return sock - - def wrap(self, sock): - """Wrap and return the given socket, plus WSGI environ entries.""" - try: - s = ssl.wrap_socket(sock, do_handshake_on_connect=True, - server_side=True, certfile=self.certificate, - keyfile=self.private_key, - ssl_version=ssl.PROTOCOL_SSLv23) - except ssl.SSLError: - e = sys.exc_info()[1] - if e.errno == ssl.SSL_ERROR_EOF: - # This is almost certainly due to the cherrypy engine - # 'pinging' the socket to assert it's connectable; - # the 'ping' isn't SSL. - return None, {} - elif e.errno == ssl.SSL_ERROR_SSL: - if e.args[1].endswith('http request'): - # The client is speaking HTTP to an HTTPS server. - raise wsgiserver.NoSSLError - elif e.args[1].endswith('unknown protocol'): - # The client is speaking some non-HTTP protocol. - # Drop the conn. - return None, {} - raise - return s, self.get_environ(s) - - # TODO: fill this out more with mod ssl env - def get_environ(self, sock): - """Create WSGI environ entries to be merged into each request.""" - cipher = sock.cipher() - ssl_environ = { - "wsgi.url_scheme": "https", - "HTTPS": "on", - 'SSL_PROTOCOL': cipher[1], - 'SSL_CIPHER': cipher[0] - # SSL_VERSION_INTERFACE string The mod_ssl program version - # SSL_VERSION_LIBRARY string The OpenSSL program version - } - return ssl_environ - - if sys.version_info >= (3, 0): - def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE): - return wsgiserver.CP_makefile(sock, mode, bufsize) - else: - def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE): - return wsgiserver.CP_fileobject(sock, mode, bufsize) diff --git a/libs_crutch/contrib/cherrypy/wsgiserver/ssl_pyopenssl.py b/libs_crutch/contrib/cherrypy/wsgiserver/ssl_pyopenssl.py deleted file mode 100644 index 1f6d8c6..0000000 --- a/libs_crutch/contrib/cherrypy/wsgiserver/ssl_pyopenssl.py +++ /dev/null @@ -1,253 +0,0 @@ -"""A library for integrating pyOpenSSL with CherryPy. - -The OpenSSL module must be importable for SSL functionality. -You can obtain it from `here `_. - -To use this module, set CherryPyWSGIServer.ssl_adapter to an instance of -SSLAdapter. There are two ways to use SSL: - -Method One ----------- - - * ``ssl_adapter.context``: an instance of SSL.Context. - -If this is not None, it is assumed to be an SSL.Context instance, -and will be passed to SSL.Connection on bind(). The developer is -responsible for forming a valid Context object. This approach is -to be preferred for more flexibility, e.g. if the cert and key are -streams instead of files, or need decryption, or SSL.SSLv3_METHOD -is desired instead of the default SSL.SSLv23_METHOD, etc. Consult -the pyOpenSSL documentation for complete options. - -Method Two (shortcut) ---------------------- - - * ``ssl_adapter.certificate``: the filename of the server SSL certificate. - * ``ssl_adapter.private_key``: the filename of the server's private key file. - -Both are None by default. If ssl_adapter.context is None, but .private_key -and .certificate are both given and valid, they will be read, and the -context will be automatically created from them. -""" - -import socket -import threading -import time - -from cherrypy import wsgiserver - -try: - from OpenSSL import SSL - from OpenSSL import crypto -except ImportError: - SSL = None - - -class SSL_fileobject(wsgiserver.CP_fileobject): - - """SSL file object attached to a socket object.""" - - ssl_timeout = 3 - ssl_retry = .01 - - def _safe_call(self, is_reader, call, *args, **kwargs): - """Wrap the given call with SSL error-trapping. - - is_reader: if False EOF errors will be raised. If True, EOF errors - will return "" (to emulate normal sockets). - """ - start = time.time() - while True: - try: - return call(*args, **kwargs) - except SSL.WantReadError: - # Sleep and try again. This is dangerous, because it means - # the rest of the stack has no way of differentiating - # between a "new handshake" error and "client dropped". - # Note this isn't an endless loop: there's a timeout below. - time.sleep(self.ssl_retry) - except SSL.WantWriteError: - time.sleep(self.ssl_retry) - except SSL.SysCallError, e: - if is_reader and e.args == (-1, 'Unexpected EOF'): - return "" - - errnum = e.args[0] - if is_reader and errnum in wsgiserver.socket_errors_to_ignore: - return "" - raise socket.error(errnum) - except SSL.Error, e: - if is_reader and e.args == (-1, 'Unexpected EOF'): - return "" - - thirdarg = None - try: - thirdarg = e.args[0][0][2] - except IndexError: - pass - - if thirdarg == 'http request': - # The client is talking HTTP to an HTTPS server. - raise wsgiserver.NoSSLError() - - raise wsgiserver.FatalSSLAlert(*e.args) - except: - raise - - if time.time() - start > self.ssl_timeout: - raise socket.timeout("timed out") - - def recv(self, size): - return self._safe_call(True, super(SSL_fileobject, self).recv, size) - - def sendall(self, *args, **kwargs): - return self._safe_call(False, super(SSL_fileobject, self).sendall, - *args, **kwargs) - - def send(self, *args, **kwargs): - return self._safe_call(False, super(SSL_fileobject, self).send, - *args, **kwargs) - - -class SSLConnection: - - """A thread-safe wrapper for an SSL.Connection. - - ``*args``: the arguments to create the wrapped ``SSL.Connection(*args)``. - """ - - def __init__(self, *args): - self._ssl_conn = SSL.Connection(*args) - self._lock = threading.RLock() - - for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read', - 'renegotiate', 'bind', 'listen', 'connect', 'accept', - 'setblocking', 'fileno', 'close', 'get_cipher_list', - 'getpeername', 'getsockname', 'getsockopt', 'setsockopt', - 'makefile', 'get_app_data', 'set_app_data', 'state_string', - 'sock_shutdown', 'get_peer_certificate', 'want_read', - 'want_write', 'set_connect_state', 'set_accept_state', - 'connect_ex', 'sendall', 'settimeout', 'gettimeout'): - exec("""def %s(self, *args): - self._lock.acquire() - try: - return self._ssl_conn.%s(*args) - finally: - self._lock.release() -""" % (f, f)) - - def shutdown(self, *args): - self._lock.acquire() - try: - # pyOpenSSL.socket.shutdown takes no args - return self._ssl_conn.shutdown() - finally: - self._lock.release() - - -class pyOpenSSLAdapter(wsgiserver.SSLAdapter): - - """A wrapper for integrating pyOpenSSL with CherryPy.""" - - context = None - """An instance of SSL.Context.""" - - certificate = None - """The filename of the server SSL certificate.""" - - private_key = None - """The filename of the server's private key file.""" - - certificate_chain = None - """Optional. The filename of CA's intermediate certificate bundle. - - This is needed for cheaper "chained root" SSL certificates, and should be - left as None if not required.""" - - def __init__(self, certificate, private_key, certificate_chain=None): - if SSL is None: - raise ImportError("You must install pyOpenSSL to use HTTPS.") - - self.context = None - self.certificate = certificate - self.private_key = private_key - self.certificate_chain = certificate_chain - self._environ = None - - def bind(self, sock): - """Wrap and return the given socket.""" - if self.context is None: - self.context = self.get_context() - conn = SSLConnection(self.context, sock) - self._environ = self.get_environ() - return conn - - def wrap(self, sock): - """Wrap and return the given socket, plus WSGI environ entries.""" - return sock, self._environ.copy() - - def get_context(self): - """Return an SSL.Context from self attributes.""" - # See http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/442473 - c = SSL.Context(SSL.SSLv23_METHOD) - c.use_privatekey_file(self.private_key) - if self.certificate_chain: - c.load_verify_locations(self.certificate_chain) - c.use_certificate_file(self.certificate) - return c - - def get_environ(self): - """Return WSGI environ entries to be merged into each request.""" - ssl_environ = { - "HTTPS": "on", - # pyOpenSSL doesn't provide access to any of these AFAICT - # 'SSL_PROTOCOL': 'SSLv2', - # SSL_CIPHER string The cipher specification name - # SSL_VERSION_INTERFACE string The mod_ssl program version - # SSL_VERSION_LIBRARY string The OpenSSL program version - } - - if self.certificate: - # Server certificate attributes - cert = open(self.certificate, 'rb').read() - cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert) - ssl_environ.update({ - 'SSL_SERVER_M_VERSION': cert.get_version(), - 'SSL_SERVER_M_SERIAL': cert.get_serial_number(), - # 'SSL_SERVER_V_START': - # Validity of server's certificate (start time), - # 'SSL_SERVER_V_END': - # Validity of server's certificate (end time), - }) - - for prefix, dn in [("I", cert.get_issuer()), - ("S", cert.get_subject())]: - # X509Name objects don't seem to have a way to get the - # complete DN string. Use str() and slice it instead, - # because str(dn) == "" - dnstr = str(dn)[18:-2] - - wsgikey = 'SSL_SERVER_%s_DN' % prefix - ssl_environ[wsgikey] = dnstr - - # The DN should be of the form: /k1=v1/k2=v2, but we must allow - # for any value to contain slashes itself (in a URL). - while dnstr: - pos = dnstr.rfind("=") - dnstr, value = dnstr[:pos], dnstr[pos + 1:] - pos = dnstr.rfind("/") - dnstr, key = dnstr[:pos], dnstr[pos + 1:] - if key and value: - wsgikey = 'SSL_SERVER_%s_DN_%s' % (prefix, key) - ssl_environ[wsgikey] = value - - return ssl_environ - - def makefile(self, sock, mode='r', bufsize=-1): - if SSL and isinstance(sock, SSL.Connection): - timeout = sock.gettimeout() - f = SSL_fileobject(sock, mode, bufsize) - f.ssl_timeout = timeout - return f - else: - return wsgiserver.CP_fileobject(sock, mode, bufsize) diff --git a/libs_crutch/contrib/cherrypy/wsgiserver/wsgiserver2.py b/libs_crutch/contrib/cherrypy/wsgiserver/wsgiserver2.py deleted file mode 100644 index c0896d3..0000000 --- a/libs_crutch/contrib/cherrypy/wsgiserver/wsgiserver2.py +++ /dev/null @@ -1,2481 +0,0 @@ -"""A high-speed, production ready, thread pooled, generic HTTP server. - -Simplest example on how to use this module directly -(without using CherryPy's application machinery):: - - from cherrypy import wsgiserver - - def my_crazy_app(environ, start_response): - status = '200 OK' - response_headers = [('Content-type','text/plain')] - start_response(status, response_headers) - return ['Hello world!'] - - server = wsgiserver.CherryPyWSGIServer( - ('0.0.0.0', 8070), my_crazy_app, - server_name='www.cherrypy.example') - server.start() - -The CherryPy WSGI server can serve as many WSGI applications -as you want in one instance by using a WSGIPathInfoDispatcher:: - - d = WSGIPathInfoDispatcher({'/': my_crazy_app, '/blog': my_blog_app}) - server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 80), d) - -Want SSL support? Just set server.ssl_adapter to an SSLAdapter instance. - -This won't call the CherryPy engine (application side) at all, only the -HTTP server, which is independent from the rest of CherryPy. Don't -let the name "CherryPyWSGIServer" throw you; the name merely reflects -its origin, not its coupling. - -For those of you wanting to understand internals of this module, here's the -basic call flow. The server's listening thread runs a very tight loop, -sticking incoming connections onto a Queue:: - - server = CherryPyWSGIServer(...) - server.start() - while True: - tick() - # This blocks until a request comes in: - child = socket.accept() - conn = HTTPConnection(child, ...) - server.requests.put(conn) - -Worker threads are kept in a pool and poll the Queue, popping off and then -handling each connection in turn. Each connection can consist of an arbitrary -number of requests and their responses, so we run a nested loop:: - - while True: - conn = server.requests.get() - conn.communicate() - -> while True: - req = HTTPRequest(...) - req.parse_request() - -> # Read the Request-Line, e.g. "GET /page HTTP/1.1" - req.rfile.readline() - read_headers(req.rfile, req.inheaders) - req.respond() - -> response = app(...) - try: - for chunk in response: - if chunk: - req.write(chunk) - finally: - if hasattr(response, "close"): - response.close() - if req.close_connection: - return -""" - -__all__ = ['HTTPRequest', 'HTTPConnection', 'HTTPServer', - 'SizeCheckWrapper', 'KnownLengthRFile', 'ChunkedRFile', - 'CP_fileobject', - 'MaxSizeExceeded', 'NoSSLError', 'FatalSSLAlert', - 'WorkerThread', 'ThreadPool', 'SSLAdapter', - 'CherryPyWSGIServer', - 'Gateway', 'WSGIGateway', 'WSGIGateway_10', 'WSGIGateway_u0', - 'WSGIPathInfoDispatcher', 'get_ssl_adapter_class'] - -import os -try: - import queue -except: - import Queue as queue -import re -import rfc822 -import socket -import sys -if 'win' in sys.platform and hasattr(socket, "AF_INET6"): - if not hasattr(socket, 'IPPROTO_IPV6'): - socket.IPPROTO_IPV6 = 41 - if not hasattr(socket, 'IPV6_V6ONLY'): - socket.IPV6_V6ONLY = 27 -try: - import cStringIO as StringIO -except ImportError: - import StringIO -DEFAULT_BUFFER_SIZE = -1 - - -class FauxSocket(object): - - """Faux socket with the minimal interface required by pypy""" - - def _reuse(self): - pass - -_fileobject_uses_str_type = isinstance( - socket._fileobject(FauxSocket())._rbuf, basestring) -del FauxSocket # this class is not longer required for anything. - -import threading -import time -import traceback - - -def format_exc(limit=None): - """Like print_exc() but return a string. Backport for Python 2.3.""" - try: - etype, value, tb = sys.exc_info() - return ''.join(traceback.format_exception(etype, value, tb, limit)) - finally: - etype = value = tb = None - -import operator - -from urllib import unquote -import warnings - -if sys.version_info >= (3, 0): - bytestr = bytes - unicodestr = str - basestring = (bytes, str) - - def ntob(n, encoding='ISO-8859-1'): - """Return the given native string as a byte string in the given - encoding. - """ - # In Python 3, the native string type is unicode - return n.encode(encoding) -else: - bytestr = str - unicodestr = unicode - basestring = basestring - - def ntob(n, encoding='ISO-8859-1'): - """Return the given native string as a byte string in the given - encoding. - """ - # In Python 2, the native string type is bytes. Assume it's already - # in the given encoding, which for ISO-8859-1 is almost always what - # was intended. - return n - -LF = ntob('\n') -CRLF = ntob('\r\n') -TAB = ntob('\t') -SPACE = ntob(' ') -COLON = ntob(':') -SEMICOLON = ntob(';') -EMPTY = ntob('') -NUMBER_SIGN = ntob('#') -QUESTION_MARK = ntob('?') -ASTERISK = ntob('*') -FORWARD_SLASH = ntob('/') -quoted_slash = re.compile(ntob("(?i)%2F")) - -import errno - - -def plat_specific_errors(*errnames): - """Return error numbers for all errors in errnames on this platform. - - The 'errno' module contains different global constants depending on - the specific platform (OS). This function will return the list of - numeric values for a given list of potential names. - """ - errno_names = dir(errno) - nums = [getattr(errno, k) for k in errnames if k in errno_names] - # de-dupe the list - return list(dict.fromkeys(nums).keys()) - -socket_error_eintr = plat_specific_errors("EINTR", "WSAEINTR") - -socket_errors_to_ignore = plat_specific_errors( - "EPIPE", - "EBADF", "WSAEBADF", - "ENOTSOCK", "WSAENOTSOCK", - "ETIMEDOUT", "WSAETIMEDOUT", - "ECONNREFUSED", "WSAECONNREFUSED", - "ECONNRESET", "WSAECONNRESET", - "ECONNABORTED", "WSAECONNABORTED", - "ENETRESET", "WSAENETRESET", - "EHOSTDOWN", "EHOSTUNREACH", -) -socket_errors_to_ignore.append("timed out") -socket_errors_to_ignore.append("The read operation timed out") - -socket_errors_nonblocking = plat_specific_errors( - 'EAGAIN', 'EWOULDBLOCK', 'WSAEWOULDBLOCK') - -comma_separated_headers = [ - ntob(h) for h in - ['Accept', 'Accept-Charset', 'Accept-Encoding', - 'Accept-Language', 'Accept-Ranges', 'Allow', 'Cache-Control', - 'Connection', 'Content-Encoding', 'Content-Language', 'Expect', - 'If-Match', 'If-None-Match', 'Pragma', 'Proxy-Authenticate', 'TE', - 'Trailer', 'Transfer-Encoding', 'Upgrade', 'Vary', 'Via', 'Warning', - 'WWW-Authenticate'] -] - - -import logging -if not hasattr(logging, 'statistics'): - logging.statistics = {} - - -def read_headers(rfile, hdict=None): - """Read headers from the given stream into the given header dict. - - If hdict is None, a new header dict is created. Returns the populated - header dict. - - Headers which are repeated are folded together using a comma if their - specification so dictates. - - This function raises ValueError when the read bytes violate the HTTP spec. - You should probably return "400 Bad Request" if this happens. - """ - if hdict is None: - hdict = {} - - while True: - line = rfile.readline() - if not line: - # No more data--illegal end of headers - raise ValueError("Illegal end of headers.") - - if line == CRLF: - # Normal end of headers - break - if not line.endswith(CRLF): - raise ValueError("HTTP requires CRLF terminators") - - if line[0] in (SPACE, TAB): - # It's a continuation line. - v = line.strip() - else: - try: - k, v = line.split(COLON, 1) - except ValueError: - raise ValueError("Illegal header line.") - # TODO: what about TE and WWW-Authenticate? - k = k.strip().title() - v = v.strip() - hname = k - - if k in comma_separated_headers: - existing = hdict.get(hname) - if existing: - v = ", ".join((existing, v)) - hdict[hname] = v - - return hdict - - -class MaxSizeExceeded(Exception): - pass - - -class SizeCheckWrapper(object): - - """Wraps a file-like object, raising MaxSizeExceeded if too large.""" - - def __init__(self, rfile, maxlen): - self.rfile = rfile - self.maxlen = maxlen - self.bytes_read = 0 - - def _check_length(self): - if self.maxlen and self.bytes_read > self.maxlen: - raise MaxSizeExceeded() - - def read(self, size=None): - data = self.rfile.read(size) - self.bytes_read += len(data) - self._check_length() - return data - - def readline(self, size=None): - if size is not None: - data = self.rfile.readline(size) - self.bytes_read += len(data) - self._check_length() - return data - - # User didn't specify a size ... - # We read the line in chunks to make sure it's not a 100MB line ! - res = [] - while True: - data = self.rfile.readline(256) - self.bytes_read += len(data) - self._check_length() - res.append(data) - # See https://bitbucket.org/cherrypy/cherrypy/issue/421 - if len(data) < 256 or data[-1:] == LF: - return EMPTY.join(res) - - def readlines(self, sizehint=0): - # Shamelessly stolen from StringIO - total = 0 - lines = [] - line = self.readline() - while line: - lines.append(line) - total += len(line) - if 0 < sizehint <= total: - break - line = self.readline() - return lines - - def close(self): - self.rfile.close() - - def __iter__(self): - return self - - def __next__(self): - data = next(self.rfile) - self.bytes_read += len(data) - self._check_length() - return data - - def next(self): - data = self.rfile.next() - self.bytes_read += len(data) - self._check_length() - return data - - -class KnownLengthRFile(object): - - """Wraps a file-like object, returning an empty string when exhausted.""" - - def __init__(self, rfile, content_length): - self.rfile = rfile - self.remaining = content_length - - def read(self, size=None): - if self.remaining == 0: - return '' - if size is None: - size = self.remaining - else: - size = min(size, self.remaining) - - data = self.rfile.read(size) - self.remaining -= len(data) - return data - - def readline(self, size=None): - if self.remaining == 0: - return '' - if size is None: - size = self.remaining - else: - size = min(size, self.remaining) - - data = self.rfile.readline(size) - self.remaining -= len(data) - return data - - def readlines(self, sizehint=0): - # Shamelessly stolen from StringIO - total = 0 - lines = [] - line = self.readline(sizehint) - while line: - lines.append(line) - total += len(line) - if 0 < sizehint <= total: - break - line = self.readline(sizehint) - return lines - - def close(self): - self.rfile.close() - - def __iter__(self): - return self - - def __next__(self): - data = next(self.rfile) - self.remaining -= len(data) - return data - - -class ChunkedRFile(object): - - """Wraps a file-like object, returning an empty string when exhausted. - - This class is intended to provide a conforming wsgi.input value for - request entities that have been encoded with the 'chunked' transfer - encoding. - """ - - def __init__(self, rfile, maxlen, bufsize=8192): - self.rfile = rfile - self.maxlen = maxlen - self.bytes_read = 0 - self.buffer = EMPTY - self.bufsize = bufsize - self.closed = False - - def _fetch(self): - if self.closed: - return - - line = self.rfile.readline() - self.bytes_read += len(line) - - if self.maxlen and self.bytes_read > self.maxlen: - raise MaxSizeExceeded("Request Entity Too Large", self.maxlen) - - line = line.strip().split(SEMICOLON, 1) - - try: - chunk_size = line.pop(0) - chunk_size = int(chunk_size, 16) - except ValueError: - raise ValueError("Bad chunked transfer size: " + repr(chunk_size)) - - if chunk_size <= 0: - self.closed = True - return - -## if line: chunk_extension = line[0] - - if self.maxlen and self.bytes_read + chunk_size > self.maxlen: - raise IOError("Request Entity Too Large") - - chunk = self.rfile.read(chunk_size) - self.bytes_read += len(chunk) - self.buffer += chunk - - crlf = self.rfile.read(2) - if crlf != CRLF: - raise ValueError( - "Bad chunked transfer coding (expected '\\r\\n', " - "got " + repr(crlf) + ")") - - def read(self, size=None): - data = EMPTY - while True: - if size and len(data) >= size: - return data - - if not self.buffer: - self._fetch() - if not self.buffer: - # EOF - return data - - if size: - remaining = size - len(data) - data += self.buffer[:remaining] - self.buffer = self.buffer[remaining:] - else: - data += self.buffer - - def readline(self, size=None): - data = EMPTY - while True: - if size and len(data) >= size: - return data - - if not self.buffer: - self._fetch() - if not self.buffer: - # EOF - return data - - newline_pos = self.buffer.find(LF) - if size: - if newline_pos == -1: - remaining = size - len(data) - data += self.buffer[:remaining] - self.buffer = self.buffer[remaining:] - else: - remaining = min(size - len(data), newline_pos) - data += self.buffer[:remaining] - self.buffer = self.buffer[remaining:] - else: - if newline_pos == -1: - data += self.buffer - else: - data += self.buffer[:newline_pos] - self.buffer = self.buffer[newline_pos:] - - def readlines(self, sizehint=0): - # Shamelessly stolen from StringIO - total = 0 - lines = [] - line = self.readline(sizehint) - while line: - lines.append(line) - total += len(line) - if 0 < sizehint <= total: - break - line = self.readline(sizehint) - return lines - - def read_trailer_lines(self): - if not self.closed: - raise ValueError( - "Cannot read trailers until the request body has been read.") - - while True: - line = self.rfile.readline() - if not line: - # No more data--illegal end of headers - raise ValueError("Illegal end of headers.") - - self.bytes_read += len(line) - if self.maxlen and self.bytes_read > self.maxlen: - raise IOError("Request Entity Too Large") - - if line == CRLF: - # Normal end of headers - break - if not line.endswith(CRLF): - raise ValueError("HTTP requires CRLF terminators") - - yield line - - def close(self): - self.rfile.close() - - def __iter__(self): - # Shamelessly stolen from StringIO - total = 0 - line = self.readline(sizehint) - while line: - yield line - total += len(line) - if 0 < sizehint <= total: - break - line = self.readline(sizehint) - - -class HTTPRequest(object): - - """An HTTP Request (and response). - - A single HTTP connection may consist of multiple request/response pairs. - """ - - server = None - """The HTTPServer object which is receiving this request.""" - - conn = None - """The HTTPConnection object on which this request connected.""" - - inheaders = {} - """A dict of request headers.""" - - outheaders = [] - """A list of header tuples to write in the response.""" - - ready = False - """When True, the request has been parsed and is ready to begin generating - the response. When False, signals the calling Connection that the response - should not be generated and the connection should close.""" - - close_connection = False - """Signals the calling Connection that the request should close. This does - not imply an error! The client and/or server may each request that the - connection be closed.""" - - chunked_write = False - """If True, output will be encoded with the "chunked" transfer-coding. - - This value is set automatically inside send_headers.""" - - def __init__(self, server, conn): - self.server = server - self.conn = conn - - self.ready = False - self.started_request = False - self.scheme = ntob("http") - if self.server.ssl_adapter is not None: - self.scheme = ntob("https") - # Use the lowest-common protocol in case read_request_line errors. - self.response_protocol = 'HTTP/1.0' - self.inheaders = {} - - self.status = "" - self.outheaders = [] - self.sent_headers = False - self.close_connection = self.__class__.close_connection - self.chunked_read = False - self.chunked_write = self.__class__.chunked_write - - def parse_request(self): - """Parse the next HTTP request start-line and message-headers.""" - self.rfile = SizeCheckWrapper(self.conn.rfile, - self.server.max_request_header_size) - try: - success = self.read_request_line() - except MaxSizeExceeded: - self.simple_response( - "414 Request-URI Too Long", - "The Request-URI sent with the request exceeds the maximum " - "allowed bytes.") - return - else: - if not success: - return - - try: - success = self.read_request_headers() - except MaxSizeExceeded: - self.simple_response( - "413 Request Entity Too Large", - "The headers sent with the request exceed the maximum " - "allowed bytes.") - return - else: - if not success: - return - - self.ready = True - - def read_request_line(self): - # HTTP/1.1 connections are persistent by default. If a client - # requests a page, then idles (leaves the connection open), - # then rfile.readline() will raise socket.error("timed out"). - # Note that it does this based on the value given to settimeout(), - # and doesn't need the client to request or acknowledge the close - # (although your TCP stack might suffer for it: cf Apache's history - # with FIN_WAIT_2). - request_line = self.rfile.readline() - - # Set started_request to True so communicate() knows to send 408 - # from here on out. - self.started_request = True - if not request_line: - return False - - if request_line == CRLF: - # RFC 2616 sec 4.1: "...if the server is reading the protocol - # stream at the beginning of a message and receives a CRLF - # first, it should ignore the CRLF." - # But only ignore one leading line! else we enable a DoS. - request_line = self.rfile.readline() - if not request_line: - return False - - if not request_line.endswith(CRLF): - self.simple_response( - "400 Bad Request", "HTTP requires CRLF terminators") - return False - - try: - method, uri, req_protocol = request_line.strip().split(SPACE, 2) - rp = int(req_protocol[5]), int(req_protocol[7]) - except (ValueError, IndexError): - self.simple_response("400 Bad Request", "Malformed Request-Line") - return False - - self.uri = uri - self.method = method - - # uri may be an abs_path (including "http://host.domain.tld"); - scheme, authority, path = self.parse_request_uri(uri) - if NUMBER_SIGN in path: - self.simple_response("400 Bad Request", - "Illegal #fragment in Request-URI.") - return False - - if scheme: - self.scheme = scheme - - qs = EMPTY - if QUESTION_MARK in path: - path, qs = path.split(QUESTION_MARK, 1) - - # Unquote the path+params (e.g. "/this%20path" -> "/this path"). - # http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2 - # - # But note that "...a URI must be separated into its components - # before the escaped characters within those components can be - # safely decoded." http://www.ietf.org/rfc/rfc2396.txt, sec 2.4.2 - # Therefore, "/this%2Fpath" becomes "/this%2Fpath", not "/this/path". - try: - atoms = [unquote(x) for x in quoted_slash.split(path)] - except ValueError: - ex = sys.exc_info()[1] - self.simple_response("400 Bad Request", ex.args[0]) - return False - path = "%2F".join(atoms) - self.path = path - - # Note that, like wsgiref and most other HTTP servers, - # we "% HEX HEX"-unquote the path but not the query string. - self.qs = qs - - # Compare request and server HTTP protocol versions, in case our - # server does not support the requested protocol. Limit our output - # to min(req, server). We want the following output: - # request server actual written supported response - # protocol protocol response protocol feature set - # a 1.0 1.0 1.0 1.0 - # b 1.0 1.1 1.1 1.0 - # c 1.1 1.0 1.0 1.0 - # d 1.1 1.1 1.1 1.1 - # Notice that, in (b), the response will be "HTTP/1.1" even though - # the client only understands 1.0. RFC 2616 10.5.6 says we should - # only return 505 if the _major_ version is different. - sp = int(self.server.protocol[5]), int(self.server.protocol[7]) - - if sp[0] != rp[0]: - self.simple_response("505 HTTP Version Not Supported") - return False - - self.request_protocol = req_protocol - self.response_protocol = "HTTP/%s.%s" % min(rp, sp) - - return True - - def read_request_headers(self): - """Read self.rfile into self.inheaders. Return success.""" - - # then all the http headers - try: - read_headers(self.rfile, self.inheaders) - except ValueError: - ex = sys.exc_info()[1] - self.simple_response("400 Bad Request", ex.args[0]) - return False - - mrbs = self.server.max_request_body_size - if mrbs and int(self.inheaders.get("Content-Length", 0)) > mrbs: - self.simple_response( - "413 Request Entity Too Large", - "The entity sent with the request exceeds the maximum " - "allowed bytes.") - return False - - # Persistent connection support - if self.response_protocol == "HTTP/1.1": - # Both server and client are HTTP/1.1 - if self.inheaders.get("Connection", "") == "close": - self.close_connection = True - else: - # Either the server or client (or both) are HTTP/1.0 - if self.inheaders.get("Connection", "") != "Keep-Alive": - self.close_connection = True - - # Transfer-Encoding support - te = None - if self.response_protocol == "HTTP/1.1": - te = self.inheaders.get("Transfer-Encoding") - if te: - te = [x.strip().lower() for x in te.split(",") if x.strip()] - - self.chunked_read = False - - if te: - for enc in te: - if enc == "chunked": - self.chunked_read = True - else: - # Note that, even if we see "chunked", we must reject - # if there is an extension we don't recognize. - self.simple_response("501 Unimplemented") - self.close_connection = True - return False - - # From PEP 333: - # "Servers and gateways that implement HTTP 1.1 must provide - # transparent support for HTTP 1.1's "expect/continue" mechanism. - # This may be done in any of several ways: - # 1. Respond to requests containing an Expect: 100-continue request - # with an immediate "100 Continue" response, and proceed normally. - # 2. Proceed with the request normally, but provide the application - # with a wsgi.input stream that will send the "100 Continue" - # response if/when the application first attempts to read from - # the input stream. The read request must then remain blocked - # until the client responds. - # 3. Wait until the client decides that the server does not support - # expect/continue, and sends the request body on its own. - # (This is suboptimal, and is not recommended.) - # - # We used to do 3, but are now doing 1. Maybe we'll do 2 someday, - # but it seems like it would be a big slowdown for such a rare case. - if self.inheaders.get("Expect", "") == "100-continue": - # Don't use simple_response here, because it emits headers - # we don't want. See - # https://bitbucket.org/cherrypy/cherrypy/issue/951 - msg = self.server.protocol + " 100 Continue\r\n\r\n" - try: - self.conn.wfile.sendall(msg) - except socket.error: - x = sys.exc_info()[1] - if x.args[0] not in socket_errors_to_ignore: - raise - return True - - def parse_request_uri(self, uri): - """Parse a Request-URI into (scheme, authority, path). - - Note that Request-URI's must be one of:: - - Request-URI = "*" | absoluteURI | abs_path | authority - - Therefore, a Request-URI which starts with a double forward-slash - cannot be a "net_path":: - - net_path = "//" authority [ abs_path ] - - Instead, it must be interpreted as an "abs_path" with an empty first - path segment:: - - abs_path = "/" path_segments - path_segments = segment *( "/" segment ) - segment = *pchar *( ";" param ) - param = *pchar - """ - if uri == ASTERISK: - return None, None, uri - - i = uri.find('://') - if i > 0 and QUESTION_MARK not in uri[:i]: - # An absoluteURI. - # If there's a scheme (and it must be http or https), then: - # http_URL = "http:" "//" host [ ":" port ] [ abs_path [ "?" query - # ]] - scheme, remainder = uri[:i].lower(), uri[i + 3:] - authority, path = remainder.split(FORWARD_SLASH, 1) - path = FORWARD_SLASH + path - return scheme, authority, path - - if uri.startswith(FORWARD_SLASH): - # An abs_path. - return None, None, uri - else: - # An authority. - return None, uri, None - - def respond(self): - """Call the gateway and write its iterable output.""" - mrbs = self.server.max_request_body_size - if self.chunked_read: - self.rfile = ChunkedRFile(self.conn.rfile, mrbs) - else: - cl = int(self.inheaders.get("Content-Length", 0)) - if mrbs and mrbs < cl: - if not self.sent_headers: - self.simple_response( - "413 Request Entity Too Large", - "The entity sent with the request exceeds the maximum " - "allowed bytes.") - return - self.rfile = KnownLengthRFile(self.conn.rfile, cl) - - self.server.gateway(self).respond() - - if (self.ready and not self.sent_headers): - self.sent_headers = True - self.send_headers() - if self.chunked_write: - self.conn.wfile.sendall("0\r\n\r\n") - - def simple_response(self, status, msg=""): - """Write a simple response back to the client.""" - status = str(status) - buf = [self.server.protocol + SPACE + - status + CRLF, - "Content-Length: %s\r\n" % len(msg), - "Content-Type: text/plain\r\n"] - - if status[:3] in ("413", "414"): - # Request Entity Too Large / Request-URI Too Long - self.close_connection = True - if self.response_protocol == 'HTTP/1.1': - # This will not be true for 414, since read_request_line - # usually raises 414 before reading the whole line, and we - # therefore cannot know the proper response_protocol. - buf.append("Connection: close\r\n") - else: - # HTTP/1.0 had no 413/414 status nor Connection header. - # Emit 400 instead and trust the message body is enough. - status = "400 Bad Request" - - buf.append(CRLF) - if msg: - if isinstance(msg, unicodestr): - msg = msg.encode("ISO-8859-1") - buf.append(msg) - - try: - self.conn.wfile.sendall("".join(buf)) - except socket.error: - x = sys.exc_info()[1] - if x.args[0] not in socket_errors_to_ignore: - raise - - def write(self, chunk): - """Write unbuffered data to the client.""" - if self.chunked_write and chunk: - buf = [hex(len(chunk))[2:], CRLF, chunk, CRLF] - self.conn.wfile.sendall(EMPTY.join(buf)) - else: - self.conn.wfile.sendall(chunk) - - def send_headers(self): - """Assert, process, and send the HTTP response message-headers. - - You must set self.status, and self.outheaders before calling this. - """ - hkeys = [key.lower() for key, value in self.outheaders] - status = int(self.status[:3]) - - if status == 413: - # Request Entity Too Large. Close conn to avoid garbage. - self.close_connection = True - elif "content-length" not in hkeys: - # "All 1xx (informational), 204 (no content), - # and 304 (not modified) responses MUST NOT - # include a message-body." So no point chunking. - if status < 200 or status in (204, 205, 304): - pass - else: - if (self.response_protocol == 'HTTP/1.1' - and self.method != 'HEAD'): - # Use the chunked transfer-coding - self.chunked_write = True - self.outheaders.append(("Transfer-Encoding", "chunked")) - else: - # Closing the conn is the only way to determine len. - self.close_connection = True - - if "connection" not in hkeys: - if self.response_protocol == 'HTTP/1.1': - # Both server and client are HTTP/1.1 or better - if self.close_connection: - self.outheaders.append(("Connection", "close")) - else: - # Server and/or client are HTTP/1.0 - if not self.close_connection: - self.outheaders.append(("Connection", "Keep-Alive")) - - if (not self.close_connection) and (not self.chunked_read): - # Read any remaining request body data on the socket. - # "If an origin server receives a request that does not include an - # Expect request-header field with the "100-continue" expectation, - # the request includes a request body, and the server responds - # with a final status code before reading the entire request body - # from the transport connection, then the server SHOULD NOT close - # the transport connection until it has read the entire request, - # or until the client closes the connection. Otherwise, the client - # might not reliably receive the response message. However, this - # requirement is not be construed as preventing a server from - # defending itself against denial-of-service attacks, or from - # badly broken client implementations." - remaining = getattr(self.rfile, 'remaining', 0) - if remaining > 0: - self.rfile.read(remaining) - - if "date" not in hkeys: - self.outheaders.append(("Date", rfc822.formatdate())) - - if "server" not in hkeys: - self.outheaders.append(("Server", self.server.server_name)) - - buf = [self.server.protocol + SPACE + self.status + CRLF] - for k, v in self.outheaders: - buf.append(k + COLON + SPACE + v + CRLF) - buf.append(CRLF) - self.conn.wfile.sendall(EMPTY.join(buf)) - - -class NoSSLError(Exception): - - """Exception raised when a client speaks HTTP to an HTTPS socket.""" - pass - - -class FatalSSLAlert(Exception): - - """Exception raised when the SSL implementation signals a fatal alert.""" - pass - - -class CP_fileobject(socket._fileobject): - - """Faux file object attached to a socket object.""" - - def __init__(self, *args, **kwargs): - self.bytes_read = 0 - self.bytes_written = 0 - socket._fileobject.__init__(self, *args, **kwargs) - - def sendall(self, data): - """Sendall for non-blocking sockets.""" - while data: - try: - bytes_sent = self.send(data) - data = data[bytes_sent:] - except socket.error, e: - if e.args[0] not in socket_errors_nonblocking: - raise - - def send(self, data): - bytes_sent = self._sock.send(data) - self.bytes_written += bytes_sent - return bytes_sent - - def flush(self): - if self._wbuf: - buffer = "".join(self._wbuf) - self._wbuf = [] - self.sendall(buffer) - - def recv(self, size): - while True: - try: - data = self._sock.recv(size) - self.bytes_read += len(data) - return data - except socket.error, e: - if (e.args[0] not in socket_errors_nonblocking - and e.args[0] not in socket_error_eintr): - raise - - if not _fileobject_uses_str_type: - def read(self, size=-1): - # Use max, disallow tiny reads in a loop as they are very - # inefficient. - # We never leave read() with any leftover data from a new recv() - # call in our internal buffer. - rbufsize = max(self._rbufsize, self.default_bufsize) - # Our use of StringIO rather than lists of string objects returned - # by recv() minimizes memory usage and fragmentation that occurs - # when rbufsize is large compared to the typical return value of - # recv(). - buf = self._rbuf - buf.seek(0, 2) # seek end - if size < 0: - # Read until EOF - # reset _rbuf. we consume it via buf. - self._rbuf = StringIO.StringIO() - while True: - data = self.recv(rbufsize) - if not data: - break - buf.write(data) - return buf.getvalue() - else: - # Read until size bytes or EOF seen, whichever comes first - buf_len = buf.tell() - if buf_len >= size: - # Already have size bytes in our buffer? Extract and - # return. - buf.seek(0) - rv = buf.read(size) - self._rbuf = StringIO.StringIO() - self._rbuf.write(buf.read()) - return rv - - # reset _rbuf. we consume it via buf. - self._rbuf = StringIO.StringIO() - while True: - left = size - buf_len - # recv() will malloc the amount of memory given as its - # parameter even though it often returns much less data - # than that. The returned data string is short lived - # as we copy it into a StringIO and free it. This avoids - # fragmentation issues on many platforms. - data = self.recv(left) - if not data: - break - n = len(data) - if n == size and not buf_len: - # Shortcut. Avoid buffer data copies when: - # - We have no data in our buffer. - # AND - # - Our call to recv returned exactly the - # number of bytes we were asked to read. - return data - if n == left: - buf.write(data) - del data # explicit free - break - assert n <= left, "recv(%d) returned %d bytes" % (left, n) - buf.write(data) - buf_len += n - del data # explicit free - #assert buf_len == buf.tell() - return buf.getvalue() - - def readline(self, size=-1): - buf = self._rbuf - buf.seek(0, 2) # seek end - if buf.tell() > 0: - # check if we already have it in our buffer - buf.seek(0) - bline = buf.readline(size) - if bline.endswith('\n') or len(bline) == size: - self._rbuf = StringIO.StringIO() - self._rbuf.write(buf.read()) - return bline - del bline - if size < 0: - # Read until \n or EOF, whichever comes first - if self._rbufsize <= 1: - # Speed up unbuffered case - buf.seek(0) - buffers = [buf.read()] - # reset _rbuf. we consume it via buf. - self._rbuf = StringIO.StringIO() - data = None - recv = self.recv - while data != "\n": - data = recv(1) - if not data: - break - buffers.append(data) - return "".join(buffers) - - buf.seek(0, 2) # seek end - # reset _rbuf. we consume it via buf. - self._rbuf = StringIO.StringIO() - while True: - data = self.recv(self._rbufsize) - if not data: - break - nl = data.find('\n') - if nl >= 0: - nl += 1 - buf.write(data[:nl]) - self._rbuf.write(data[nl:]) - del data - break - buf.write(data) - return buf.getvalue() - else: - # Read until size bytes or \n or EOF seen, whichever comes - # first - buf.seek(0, 2) # seek end - buf_len = buf.tell() - if buf_len >= size: - buf.seek(0) - rv = buf.read(size) - self._rbuf = StringIO.StringIO() - self._rbuf.write(buf.read()) - return rv - # reset _rbuf. we consume it via buf. - self._rbuf = StringIO.StringIO() - while True: - data = self.recv(self._rbufsize) - if not data: - break - left = size - buf_len - # did we just receive a newline? - nl = data.find('\n', 0, left) - if nl >= 0: - nl += 1 - # save the excess data to _rbuf - self._rbuf.write(data[nl:]) - if buf_len: - buf.write(data[:nl]) - break - else: - # Shortcut. Avoid data copy through buf when - # returning a substring of our first recv(). - return data[:nl] - n = len(data) - if n == size and not buf_len: - # Shortcut. Avoid data copy through buf when - # returning exactly all of our first recv(). - return data - if n >= left: - buf.write(data[:left]) - self._rbuf.write(data[left:]) - break - buf.write(data) - buf_len += n - #assert buf_len == buf.tell() - return buf.getvalue() - else: - def read(self, size=-1): - if size < 0: - # Read until EOF - buffers = [self._rbuf] - self._rbuf = "" - if self._rbufsize <= 1: - recv_size = self.default_bufsize - else: - recv_size = self._rbufsize - - while True: - data = self.recv(recv_size) - if not data: - break - buffers.append(data) - return "".join(buffers) - else: - # Read until size bytes or EOF seen, whichever comes first - data = self._rbuf - buf_len = len(data) - if buf_len >= size: - self._rbuf = data[size:] - return data[:size] - buffers = [] - if data: - buffers.append(data) - self._rbuf = "" - while True: - left = size - buf_len - recv_size = max(self._rbufsize, left) - data = self.recv(recv_size) - if not data: - break - buffers.append(data) - n = len(data) - if n >= left: - self._rbuf = data[left:] - buffers[-1] = data[:left] - break - buf_len += n - return "".join(buffers) - - def readline(self, size=-1): - data = self._rbuf - if size < 0: - # Read until \n or EOF, whichever comes first - if self._rbufsize <= 1: - # Speed up unbuffered case - assert data == "" - buffers = [] - while data != "\n": - data = self.recv(1) - if not data: - break - buffers.append(data) - return "".join(buffers) - nl = data.find('\n') - if nl >= 0: - nl += 1 - self._rbuf = data[nl:] - return data[:nl] - buffers = [] - if data: - buffers.append(data) - self._rbuf = "" - while True: - data = self.recv(self._rbufsize) - if not data: - break - buffers.append(data) - nl = data.find('\n') - if nl >= 0: - nl += 1 - self._rbuf = data[nl:] - buffers[-1] = data[:nl] - break - return "".join(buffers) - else: - # Read until size bytes or \n or EOF seen, whichever comes - # first - nl = data.find('\n', 0, size) - if nl >= 0: - nl += 1 - self._rbuf = data[nl:] - return data[:nl] - buf_len = len(data) - if buf_len >= size: - self._rbuf = data[size:] - return data[:size] - buffers = [] - if data: - buffers.append(data) - self._rbuf = "" - while True: - data = self.recv(self._rbufsize) - if not data: - break - buffers.append(data) - left = size - buf_len - nl = data.find('\n', 0, left) - if nl >= 0: - nl += 1 - self._rbuf = data[nl:] - buffers[-1] = data[:nl] - break - n = len(data) - if n >= left: - self._rbuf = data[left:] - buffers[-1] = data[:left] - break - buf_len += n - return "".join(buffers) - - -class HTTPConnection(object): - - """An HTTP connection (active socket). - - server: the Server object which received this connection. - socket: the raw socket object (usually TCP) for this connection. - makefile: a fileobject class for reading from the socket. - """ - - remote_addr = None - remote_port = None - ssl_env = None - rbufsize = DEFAULT_BUFFER_SIZE - wbufsize = DEFAULT_BUFFER_SIZE - RequestHandlerClass = HTTPRequest - - def __init__(self, server, sock, makefile=CP_fileobject): - self.server = server - self.socket = sock - self.rfile = makefile(sock, "rb", self.rbufsize) - self.wfile = makefile(sock, "wb", self.wbufsize) - self.requests_seen = 0 - - def communicate(self): - """Read each request and respond appropriately.""" - request_seen = False - try: - while True: - # (re)set req to None so that if something goes wrong in - # the RequestHandlerClass constructor, the error doesn't - # get written to the previous request. - req = None - req = self.RequestHandlerClass(self.server, self) - - # This order of operations should guarantee correct pipelining. - req.parse_request() - if self.server.stats['Enabled']: - self.requests_seen += 1 - if not req.ready: - # Something went wrong in the parsing (and the server has - # probably already made a simple_response). Return and - # let the conn close. - return - - request_seen = True - req.respond() - if req.close_connection: - return - except socket.error: - e = sys.exc_info()[1] - errnum = e.args[0] - # sadly SSL sockets return a different (longer) time out string - if ( - errnum == 'timed out' or - errnum == 'The read operation timed out' - ): - # Don't error if we're between requests; only error - # if 1) no request has been started at all, or 2) we're - # in the middle of a request. - # See https://bitbucket.org/cherrypy/cherrypy/issue/853 - if (not request_seen) or (req and req.started_request): - # Don't bother writing the 408 if the response - # has already started being written. - if req and not req.sent_headers: - try: - req.simple_response("408 Request Timeout") - except FatalSSLAlert: - # Close the connection. - return - elif errnum not in socket_errors_to_ignore: - self.server.error_log("socket.error %s" % repr(errnum), - level=logging.WARNING, traceback=True) - if req and not req.sent_headers: - try: - req.simple_response("500 Internal Server Error") - except FatalSSLAlert: - # Close the connection. - return - return - except (KeyboardInterrupt, SystemExit): - raise - except FatalSSLAlert: - # Close the connection. - return - except NoSSLError: - if req and not req.sent_headers: - # Unwrap our wfile - self.wfile = CP_fileobject( - self.socket._sock, "wb", self.wbufsize) - req.simple_response( - "400 Bad Request", - "The client sent a plain HTTP request, but " - "this server only speaks HTTPS on this port.") - self.linger = True - except Exception: - e = sys.exc_info()[1] - self.server.error_log(repr(e), level=logging.ERROR, traceback=True) - if req and not req.sent_headers: - try: - req.simple_response("500 Internal Server Error") - except FatalSSLAlert: - # Close the connection. - return - - linger = False - - def close(self): - """Close the socket underlying this connection.""" - self.rfile.close() - - if not self.linger: - # Python's socket module does NOT call close on the kernel - # socket when you call socket.close(). We do so manually here - # because we want this server to send a FIN TCP segment - # immediately. Note this must be called *before* calling - # socket.close(), because the latter drops its reference to - # the kernel socket. - if hasattr(self.socket, '_sock'): - self.socket._sock.close() - self.socket.close() - else: - # On the other hand, sometimes we want to hang around for a bit - # to make sure the client has a chance to read our entire - # response. Skipping the close() calls here delays the FIN - # packet until the socket object is garbage-collected later. - # Someday, perhaps, we'll do the full lingering_close that - # Apache does, but not today. - pass - - -class TrueyZero(object): - - """An object which equals and does math like the integer 0 but evals True. - """ - - def __add__(self, other): - return other - - def __radd__(self, other): - return other -trueyzero = TrueyZero() - - -_SHUTDOWNREQUEST = None - - -class WorkerThread(threading.Thread): - - """Thread which continuously polls a Queue for Connection objects. - - Due to the timing issues of polling a Queue, a WorkerThread does not - check its own 'ready' flag after it has started. To stop the thread, - it is necessary to stick a _SHUTDOWNREQUEST object onto the Queue - (one for each running WorkerThread). - """ - - conn = None - """The current connection pulled off the Queue, or None.""" - - server = None - """The HTTP Server which spawned this thread, and which owns the - Queue and is placing active connections into it.""" - - ready = False - """A simple flag for the calling server to know when this thread - has begun polling the Queue.""" - - def __init__(self, server): - self.ready = False - self.server = server - - self.requests_seen = 0 - self.bytes_read = 0 - self.bytes_written = 0 - self.start_time = None - self.work_time = 0 - self.stats = { - 'Requests': lambda s: self.requests_seen + ( - (self.start_time is None) and - trueyzero or - self.conn.requests_seen - ), - 'Bytes Read': lambda s: self.bytes_read + ( - (self.start_time is None) and - trueyzero or - self.conn.rfile.bytes_read - ), - 'Bytes Written': lambda s: self.bytes_written + ( - (self.start_time is None) and - trueyzero or - self.conn.wfile.bytes_written - ), - 'Work Time': lambda s: self.work_time + ( - (self.start_time is None) and - trueyzero or - time.time() - self.start_time - ), - 'Read Throughput': lambda s: s['Bytes Read'](s) / ( - s['Work Time'](s) or 1e-6), - 'Write Throughput': lambda s: s['Bytes Written'](s) / ( - s['Work Time'](s) or 1e-6), - } - threading.Thread.__init__(self) - - def run(self): - self.server.stats['Worker Threads'][self.getName()] = self.stats - try: - self.ready = True - while True: - conn = self.server.requests.get() - if conn is _SHUTDOWNREQUEST: - return - - self.conn = conn - if self.server.stats['Enabled']: - self.start_time = time.time() - try: - conn.communicate() - finally: - conn.close() - if self.server.stats['Enabled']: - self.requests_seen += self.conn.requests_seen - self.bytes_read += self.conn.rfile.bytes_read - self.bytes_written += self.conn.wfile.bytes_written - self.work_time += time.time() - self.start_time - self.start_time = None - self.conn = None - except (KeyboardInterrupt, SystemExit): - exc = sys.exc_info()[1] - self.server.interrupt = exc - - -class ThreadPool(object): - - """A Request Queue for an HTTPServer which pools threads. - - ThreadPool objects must provide min, get(), put(obj), start() - and stop(timeout) attributes. - """ - - def __init__(self, server, min=10, max=-1, - accepted_queue_size=-1, accepted_queue_timeout=10): - self.server = server - self.min = min - self.max = max - self._threads = [] - self._queue = queue.Queue(maxsize=accepted_queue_size) - self._queue_put_timeout = accepted_queue_timeout - self.get = self._queue.get - - def start(self): - """Start the pool of threads.""" - for i in range(self.min): - self._threads.append(WorkerThread(self.server)) - for worker in self._threads: - worker.setName("CP Server " + worker.getName()) - worker.start() - for worker in self._threads: - while not worker.ready: - time.sleep(.1) - - def _get_idle(self): - """Number of worker threads which are idle. Read-only.""" - return len([t for t in self._threads if t.conn is None]) - idle = property(_get_idle, doc=_get_idle.__doc__) - - def put(self, obj): - self._queue.put(obj, block=True, timeout=self._queue_put_timeout) - if obj is _SHUTDOWNREQUEST: - return - - def grow(self, amount): - """Spawn new worker threads (not above self.max).""" - if self.max > 0: - budget = max(self.max - len(self._threads), 0) - else: - # self.max <= 0 indicates no maximum - budget = float('inf') - - n_new = min(amount, budget) - - workers = [self._spawn_worker() for i in range(n_new)] - while not self._all(operator.attrgetter('ready'), workers): - time.sleep(.1) - self._threads.extend(workers) - - def _spawn_worker(self): - worker = WorkerThread(self.server) - worker.setName("CP Server " + worker.getName()) - worker.start() - return worker - - def _all(func, items): - results = [func(item) for item in items] - return reduce(operator.and_, results, True) - _all = staticmethod(_all) - - def shrink(self, amount): - """Kill off worker threads (not below self.min).""" - # Grow/shrink the pool if necessary. - # Remove any dead threads from our list - for t in self._threads: - if not t.isAlive(): - self._threads.remove(t) - amount -= 1 - - # calculate the number of threads above the minimum - n_extra = max(len(self._threads) - self.min, 0) - - # don't remove more than amount - n_to_remove = min(amount, n_extra) - - # put shutdown requests on the queue equal to the number of threads - # to remove. As each request is processed by a worker, that worker - # will terminate and be culled from the list. - for n in range(n_to_remove): - self._queue.put(_SHUTDOWNREQUEST) - - def stop(self, timeout=5): - # Must shut down threads here so the code that calls - # this method can know when all threads are stopped. - for worker in self._threads: - self._queue.put(_SHUTDOWNREQUEST) - - # Don't join currentThread (when stop is called inside a request). - current = threading.currentThread() - if timeout and timeout >= 0: - endtime = time.time() + timeout - while self._threads: - worker = self._threads.pop() - if worker is not current and worker.isAlive(): - try: - if timeout is None or timeout < 0: - worker.join() - else: - remaining_time = endtime - time.time() - if remaining_time > 0: - worker.join(remaining_time) - if worker.isAlive(): - # We exhausted the timeout. - # Forcibly shut down the socket. - c = worker.conn - if c and not c.rfile.closed: - try: - c.socket.shutdown(socket.SHUT_RD) - except TypeError: - # pyOpenSSL sockets don't take an arg - c.socket.shutdown() - worker.join() - except (AssertionError, - # Ignore repeated Ctrl-C. - # See - # https://bitbucket.org/cherrypy/cherrypy/issue/691. - KeyboardInterrupt): - pass - - def _get_qsize(self): - return self._queue.qsize() - qsize = property(_get_qsize) - - -try: - import fcntl -except ImportError: - try: - from ctypes import windll, WinError - import ctypes.wintypes - _SetHandleInformation = windll.kernel32.SetHandleInformation - _SetHandleInformation.argtypes = [ - ctypes.wintypes.HANDLE, - ctypes.wintypes.DWORD, - ctypes.wintypes.DWORD, - ] - _SetHandleInformation.restype = ctypes.wintypes.BOOL - except ImportError: - def prevent_socket_inheritance(sock): - """Dummy function, since neither fcntl nor ctypes are available.""" - pass - else: - def prevent_socket_inheritance(sock): - """Mark the given socket fd as non-inheritable (Windows).""" - if not _SetHandleInformation(sock.fileno(), 1, 0): - raise WinError() -else: - def prevent_socket_inheritance(sock): - """Mark the given socket fd as non-inheritable (POSIX).""" - fd = sock.fileno() - old_flags = fcntl.fcntl(fd, fcntl.F_GETFD) - fcntl.fcntl(fd, fcntl.F_SETFD, old_flags | fcntl.FD_CLOEXEC) - - -class SSLAdapter(object): - - """Base class for SSL driver library adapters. - - Required methods: - - * ``wrap(sock) -> (wrapped socket, ssl environ dict)`` - * ``makefile(sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE) -> - socket file object`` - """ - - def __init__(self, certificate, private_key, certificate_chain=None): - self.certificate = certificate - self.private_key = private_key - self.certificate_chain = certificate_chain - - def wrap(self, sock): - raise NotImplemented - - def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE): - raise NotImplemented - - -class HTTPServer(object): - - """An HTTP server.""" - - _bind_addr = "127.0.0.1" - _interrupt = None - - gateway = None - """A Gateway instance.""" - - minthreads = None - """The minimum number of worker threads to create (default 10).""" - - maxthreads = None - """The maximum number of worker threads to create (default -1 = no limit). - """ - - server_name = None - """The name of the server; defaults to socket.gethostname().""" - - protocol = "HTTP/1.1" - """The version string to write in the Status-Line of all HTTP responses. - - For example, "HTTP/1.1" is the default. This also limits the supported - features used in the response.""" - - request_queue_size = 5 - """The 'backlog' arg to socket.listen(); max queued connections - (default 5). - """ - - shutdown_timeout = 5 - """The total time, in seconds, to wait for worker threads to cleanly exit. - """ - - timeout = 10 - """The timeout in seconds for accepted connections (default 10).""" - - version = "CherryPy/3.8.0" - """A version string for the HTTPServer.""" - - software = None - """The value to set for the SERVER_SOFTWARE entry in the WSGI environ. - - If None, this defaults to ``'%s Server' % self.version``.""" - - ready = False - """An internal flag which marks whether the socket is accepting connections - """ - - max_request_header_size = 0 - """The maximum size, in bytes, for request headers, or 0 for no limit.""" - - max_request_body_size = 0 - """The maximum size, in bytes, for request bodies, or 0 for no limit.""" - - nodelay = True - """If True (the default since 3.1), sets the TCP_NODELAY socket option.""" - - ConnectionClass = HTTPConnection - """The class to use for handling HTTP connections.""" - - ssl_adapter = None - """An instance of SSLAdapter (or a subclass). - - You must have the corresponding SSL driver library installed.""" - - def __init__(self, bind_addr, gateway, minthreads=10, maxthreads=-1, - server_name=None): - self.bind_addr = bind_addr - self.gateway = gateway - - self.requests = ThreadPool(self, min=minthreads or 1, max=maxthreads) - - if not server_name: - server_name = socket.gethostname() - self.server_name = server_name - self.clear_stats() - - def clear_stats(self): - self._start_time = None - self._run_time = 0 - self.stats = { - 'Enabled': False, - 'Bind Address': lambda s: repr(self.bind_addr), - 'Run time': lambda s: (not s['Enabled']) and -1 or self.runtime(), - 'Accepts': 0, - 'Accepts/sec': lambda s: s['Accepts'] / self.runtime(), - 'Queue': lambda s: getattr(self.requests, "qsize", None), - 'Threads': lambda s: len(getattr(self.requests, "_threads", [])), - 'Threads Idle': lambda s: getattr(self.requests, "idle", None), - 'Socket Errors': 0, - 'Requests': lambda s: (not s['Enabled']) and -1 or sum( - [w['Requests'](w) for w in s['Worker Threads'].values()], 0), - 'Bytes Read': lambda s: (not s['Enabled']) and -1 or sum( - [w['Bytes Read'](w) for w in s['Worker Threads'].values()], 0), - 'Bytes Written': lambda s: (not s['Enabled']) and -1 or sum( - [w['Bytes Written'](w) for w in s['Worker Threads'].values()], - 0), - 'Work Time': lambda s: (not s['Enabled']) and -1 or sum( - [w['Work Time'](w) for w in s['Worker Threads'].values()], 0), - 'Read Throughput': lambda s: (not s['Enabled']) and -1 or sum( - [w['Bytes Read'](w) / (w['Work Time'](w) or 1e-6) - for w in s['Worker Threads'].values()], 0), - 'Write Throughput': lambda s: (not s['Enabled']) and -1 or sum( - [w['Bytes Written'](w) / (w['Work Time'](w) or 1e-6) - for w in s['Worker Threads'].values()], 0), - 'Worker Threads': {}, - } - logging.statistics["CherryPy HTTPServer %d" % id(self)] = self.stats - - def runtime(self): - if self._start_time is None: - return self._run_time - else: - return self._run_time + (time.time() - self._start_time) - - def __str__(self): - return "%s.%s(%r)" % (self.__module__, self.__class__.__name__, - self.bind_addr) - - def _get_bind_addr(self): - return self._bind_addr - - def _set_bind_addr(self, value): - if isinstance(value, tuple) and value[0] in ('', None): - # Despite the socket module docs, using '' does not - # allow AI_PASSIVE to work. Passing None instead - # returns '0.0.0.0' like we want. In other words: - # host AI_PASSIVE result - # '' Y 192.168.x.y - # '' N 192.168.x.y - # None Y 0.0.0.0 - # None N 127.0.0.1 - # But since you can get the same effect with an explicit - # '0.0.0.0', we deny both the empty string and None as values. - raise ValueError("Host values of '' or None are not allowed. " - "Use '0.0.0.0' (IPv4) or '::' (IPv6) instead " - "to listen on all active interfaces.") - self._bind_addr = value - bind_addr = property( - _get_bind_addr, - _set_bind_addr, - doc="""The interface on which to listen for connections. - - For TCP sockets, a (host, port) tuple. Host values may be any IPv4 - or IPv6 address, or any valid hostname. The string 'localhost' is a - synonym for '127.0.0.1' (or '::1', if your hosts file prefers IPv6). - The string '0.0.0.0' is a special IPv4 entry meaning "any active - interface" (INADDR_ANY), and '::' is the similar IN6ADDR_ANY for - IPv6. The empty string or None are not allowed. - - For UNIX sockets, supply the filename as a string.""") - - def start(self): - """Run the server forever.""" - # We don't have to trap KeyboardInterrupt or SystemExit here, - # because cherrpy.server already does so, calling self.stop() for us. - # If you're using this server with another framework, you should - # trap those exceptions in whatever code block calls start(). - self._interrupt = None - - if self.software is None: - self.software = "%s Server" % self.version - - # SSL backward compatibility - if (self.ssl_adapter is None and - getattr(self, 'ssl_certificate', None) and - getattr(self, 'ssl_private_key', None)): - warnings.warn( - "SSL attributes are deprecated in CherryPy 3.2, and will " - "be removed in CherryPy 3.3. Use an ssl_adapter attribute " - "instead.", - DeprecationWarning - ) - try: - from cherrypy.wsgiserver.ssl_pyopenssl import pyOpenSSLAdapter - except ImportError: - pass - else: - self.ssl_adapter = pyOpenSSLAdapter( - self.ssl_certificate, self.ssl_private_key, - getattr(self, 'ssl_certificate_chain', None)) - - # Select the appropriate socket - if isinstance(self.bind_addr, basestring): - # AF_UNIX socket - - # So we can reuse the socket... - try: - os.unlink(self.bind_addr) - except: - pass - - # So everyone can access the socket... - try: - os.chmod(self.bind_addr, 511) # 0777 - except: - pass - - info = [ - (socket.AF_UNIX, socket.SOCK_STREAM, 0, "", self.bind_addr)] - else: - # AF_INET or AF_INET6 socket - # Get the correct address family for our host (allows IPv6 - # addresses) - host, port = self.bind_addr - try: - info = socket.getaddrinfo( - host, port, socket.AF_UNSPEC, - socket.SOCK_STREAM, 0, socket.AI_PASSIVE) - except socket.gaierror: - if ':' in self.bind_addr[0]: - info = [(socket.AF_INET6, socket.SOCK_STREAM, - 0, "", self.bind_addr + (0, 0))] - else: - info = [(socket.AF_INET, socket.SOCK_STREAM, - 0, "", self.bind_addr)] - - self.socket = None - msg = "No socket could be created" - for res in info: - af, socktype, proto, canonname, sa = res - try: - self.bind(af, socktype, proto) - except socket.error, serr: - msg = "%s -- (%s: %s)" % (msg, sa, serr) - if self.socket: - self.socket.close() - self.socket = None - continue - break - if not self.socket: - raise socket.error(msg) - - # Timeout so KeyboardInterrupt can be caught on Win32 - self.socket.settimeout(1) - self.socket.listen(self.request_queue_size) - - # Create worker threads - self.requests.start() - - self.ready = True - self._start_time = time.time() - while self.ready: - try: - self.tick() - except (KeyboardInterrupt, SystemExit): - raise - except: - self.error_log("Error in HTTPServer.tick", level=logging.ERROR, - traceback=True) - - if self.interrupt: - while self.interrupt is True: - # Wait for self.stop() to complete. See _set_interrupt. - time.sleep(0.1) - if self.interrupt: - raise self.interrupt - - def error_log(self, msg="", level=20, traceback=False): - # Override this in subclasses as desired - sys.stderr.write(msg + '\n') - sys.stderr.flush() - if traceback: - tblines = format_exc() - sys.stderr.write(tblines) - sys.stderr.flush() - - def bind(self, family, type, proto=0): - """Create (or recreate) the actual socket object.""" - self.socket = socket.socket(family, type, proto) - prevent_socket_inheritance(self.socket) - self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - if self.nodelay and not isinstance(self.bind_addr, str): - self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - if self.ssl_adapter is not None: - self.socket = self.ssl_adapter.bind(self.socket) - - # If listening on the IPV6 any address ('::' = IN6ADDR_ANY), - # activate dual-stack. See - # https://bitbucket.org/cherrypy/cherrypy/issue/871. - if (hasattr(socket, 'AF_INET6') and family == socket.AF_INET6 - and self.bind_addr[0] in ('::', '::0', '::0.0.0.0')): - try: - self.socket.setsockopt( - socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0) - except (AttributeError, socket.error): - # Apparently, the socket option is not available in - # this machine's TCP stack - pass - - self.socket.bind(self.bind_addr) - - def tick(self): - """Accept a new connection and put it on the Queue.""" - try: - s, addr = self.socket.accept() - if self.stats['Enabled']: - self.stats['Accepts'] += 1 - if not self.ready: - return - - prevent_socket_inheritance(s) - if hasattr(s, 'settimeout'): - s.settimeout(self.timeout) - - makefile = CP_fileobject - ssl_env = {} - # if ssl cert and key are set, we try to be a secure HTTP server - if self.ssl_adapter is not None: - try: - s, ssl_env = self.ssl_adapter.wrap(s) - except NoSSLError: - msg = ("The client sent a plain HTTP request, but " - "this server only speaks HTTPS on this port.") - buf = ["%s 400 Bad Request\r\n" % self.protocol, - "Content-Length: %s\r\n" % len(msg), - "Content-Type: text/plain\r\n\r\n", - msg] - - wfile = makefile(s._sock, "wb", DEFAULT_BUFFER_SIZE) - try: - wfile.sendall("".join(buf)) - except socket.error: - x = sys.exc_info()[1] - if x.args[0] not in socket_errors_to_ignore: - raise - return - if not s: - return - makefile = self.ssl_adapter.makefile - # Re-apply our timeout since we may have a new socket object - if hasattr(s, 'settimeout'): - s.settimeout(self.timeout) - - conn = self.ConnectionClass(self, s, makefile) - - if not isinstance(self.bind_addr, basestring): - # optional values - # Until we do DNS lookups, omit REMOTE_HOST - if addr is None: # sometimes this can happen - # figure out if AF_INET or AF_INET6. - if len(s.getsockname()) == 2: - # AF_INET - addr = ('0.0.0.0', 0) - else: - # AF_INET6 - addr = ('::', 0) - conn.remote_addr = addr[0] - conn.remote_port = addr[1] - - conn.ssl_env = ssl_env - - try: - self.requests.put(conn) - except queue.Full: - # Just drop the conn. TODO: write 503 back? - conn.close() - return - except socket.timeout: - # The only reason for the timeout in start() is so we can - # notice keyboard interrupts on Win32, which don't interrupt - # accept() by default - return - except socket.error: - x = sys.exc_info()[1] - if self.stats['Enabled']: - self.stats['Socket Errors'] += 1 - if x.args[0] in socket_error_eintr: - # I *think* this is right. EINTR should occur when a signal - # is received during the accept() call; all docs say retry - # the call, and I *think* I'm reading it right that Python - # will then go ahead and poll for and handle the signal - # elsewhere. See - # https://bitbucket.org/cherrypy/cherrypy/issue/707. - return - if x.args[0] in socket_errors_nonblocking: - # Just try again. See - # https://bitbucket.org/cherrypy/cherrypy/issue/479. - return - if x.args[0] in socket_errors_to_ignore: - # Our socket was closed. - # See https://bitbucket.org/cherrypy/cherrypy/issue/686. - return - raise - - def _get_interrupt(self): - return self._interrupt - - def _set_interrupt(self, interrupt): - self._interrupt = True - self.stop() - self._interrupt = interrupt - interrupt = property(_get_interrupt, _set_interrupt, - doc="Set this to an Exception instance to " - "interrupt the server.") - - def stop(self): - """Gracefully shutdown a server that is serving forever.""" - self.ready = False - if self._start_time is not None: - self._run_time += (time.time() - self._start_time) - self._start_time = None - - sock = getattr(self, "socket", None) - if sock: - if not isinstance(self.bind_addr, basestring): - # Touch our own socket to make accept() return immediately. - try: - host, port = sock.getsockname()[:2] - except socket.error: - x = sys.exc_info()[1] - if x.args[0] not in socket_errors_to_ignore: - # Changed to use error code and not message - # See - # https://bitbucket.org/cherrypy/cherrypy/issue/860. - raise - else: - # Note that we're explicitly NOT using AI_PASSIVE, - # here, because we want an actual IP to touch. - # localhost won't work if we've bound to a public IP, - # but it will if we bound to '0.0.0.0' (INADDR_ANY). - for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC, - socket.SOCK_STREAM): - af, socktype, proto, canonname, sa = res - s = None - try: - s = socket.socket(af, socktype, proto) - # See - # http://groups.google.com/group/cherrypy-users/ - # browse_frm/thread/bbfe5eb39c904fe0 - s.settimeout(1.0) - s.connect((host, port)) - s.close() - except socket.error: - if s: - s.close() - if hasattr(sock, "close"): - sock.close() - self.socket = None - - self.requests.stop(self.shutdown_timeout) - - -class Gateway(object): - - """A base class to interface HTTPServer with other systems, such as WSGI. - """ - - def __init__(self, req): - self.req = req - - def respond(self): - """Process the current request. Must be overridden in a subclass.""" - raise NotImplemented - - -# These may either be wsgiserver.SSLAdapter subclasses or the string names -# of such classes (in which case they will be lazily loaded). -ssl_adapters = { - 'builtin': 'cherrypy.wsgiserver.ssl_builtin.BuiltinSSLAdapter', - 'pyopenssl': 'cherrypy.wsgiserver.ssl_pyopenssl.pyOpenSSLAdapter', -} - - -def get_ssl_adapter_class(name='pyopenssl'): - """Return an SSL adapter class for the given name.""" - adapter = ssl_adapters[name.lower()] - if isinstance(adapter, basestring): - last_dot = adapter.rfind(".") - attr_name = adapter[last_dot + 1:] - mod_path = adapter[:last_dot] - - try: - mod = sys.modules[mod_path] - if mod is None: - raise KeyError() - except KeyError: - # The last [''] is important. - mod = __import__(mod_path, globals(), locals(), ['']) - - # Let an AttributeError propagate outward. - try: - adapter = getattr(mod, attr_name) - except AttributeError: - raise AttributeError("'%s' object has no attribute '%s'" - % (mod_path, attr_name)) - - return adapter - -# ------------------------------- WSGI Stuff -------------------------------- # - - -class CherryPyWSGIServer(HTTPServer): - - """A subclass of HTTPServer which calls a WSGI application.""" - - wsgi_version = (1, 0) - """The version of WSGI to produce.""" - - def __init__(self, bind_addr, wsgi_app, numthreads=10, server_name=None, - max=-1, request_queue_size=5, timeout=10, shutdown_timeout=5, - accepted_queue_size=-1, accepted_queue_timeout=10): - self.requests = ThreadPool(self, min=numthreads or 1, max=max, - accepted_queue_size=accepted_queue_size, - accepted_queue_timeout=accepted_queue_timeout) - self.wsgi_app = wsgi_app - self.gateway = wsgi_gateways[self.wsgi_version] - - self.bind_addr = bind_addr - if not server_name: - server_name = socket.gethostname() - self.server_name = server_name - self.request_queue_size = request_queue_size - - self.timeout = timeout - self.shutdown_timeout = shutdown_timeout - self.clear_stats() - - def _get_numthreads(self): - return self.requests.min - - def _set_numthreads(self, value): - self.requests.min = value - numthreads = property(_get_numthreads, _set_numthreads) - - -class WSGIGateway(Gateway): - - """A base class to interface HTTPServer with WSGI.""" - - def __init__(self, req): - self.req = req - self.started_response = False - self.env = self.get_environ() - self.remaining_bytes_out = None - - def get_environ(self): - """Return a new environ dict targeting the given wsgi.version""" - raise NotImplemented - - def respond(self): - """Process the current request.""" - response = self.req.server.wsgi_app(self.env, self.start_response) - try: - for chunk in response: - # "The start_response callable must not actually transmit - # the response headers. Instead, it must store them for the - # server or gateway to transmit only after the first - # iteration of the application return value that yields - # a NON-EMPTY string, or upon the application's first - # invocation of the write() callable." (PEP 333) - if chunk: - if isinstance(chunk, unicodestr): - chunk = chunk.encode('ISO-8859-1') - self.write(chunk) - finally: - if hasattr(response, "close"): - response.close() - - def start_response(self, status, headers, exc_info=None): - """WSGI callable to begin the HTTP response.""" - # "The application may call start_response more than once, - # if and only if the exc_info argument is provided." - if self.started_response and not exc_info: - raise AssertionError("WSGI start_response called a second " - "time with no exc_info.") - self.started_response = True - - # "if exc_info is provided, and the HTTP headers have already been - # sent, start_response must raise an error, and should raise the - # exc_info tuple." - if self.req.sent_headers: - try: - raise exc_info[0], exc_info[1], exc_info[2] - finally: - exc_info = None - - self.req.status = status - for k, v in headers: - if not isinstance(k, str): - raise TypeError( - "WSGI response header key %r is not of type str." % k) - if not isinstance(v, str): - raise TypeError( - "WSGI response header value %r is not of type str." % v) - if k.lower() == 'content-length': - self.remaining_bytes_out = int(v) - self.req.outheaders.extend(headers) - - return self.write - - def write(self, chunk): - """WSGI callable to write unbuffered data to the client. - - This method is also used internally by start_response (to write - data from the iterable returned by the WSGI application). - """ - if not self.started_response: - raise AssertionError("WSGI write called before start_response.") - - chunklen = len(chunk) - rbo = self.remaining_bytes_out - if rbo is not None and chunklen > rbo: - if not self.req.sent_headers: - # Whew. We can send a 500 to the client. - self.req.simple_response( - "500 Internal Server Error", - "The requested resource returned more bytes than the " - "declared Content-Length.") - else: - # Dang. We have probably already sent data. Truncate the chunk - # to fit (so the client doesn't hang) and raise an error later. - chunk = chunk[:rbo] - - if not self.req.sent_headers: - self.req.sent_headers = True - self.req.send_headers() - - self.req.write(chunk) - - if rbo is not None: - rbo -= chunklen - if rbo < 0: - raise ValueError( - "Response body exceeds the declared Content-Length.") - - -class WSGIGateway_10(WSGIGateway): - - """A Gateway class to interface HTTPServer with WSGI 1.0.x.""" - - def get_environ(self): - """Return a new environ dict targeting the given wsgi.version""" - req = self.req - env = { - # set a non-standard environ entry so the WSGI app can know what - # the *real* server protocol is (and what features to support). - # See http://www.faqs.org/rfcs/rfc2145.html. - 'ACTUAL_SERVER_PROTOCOL': req.server.protocol, - 'PATH_INFO': req.path, - 'QUERY_STRING': req.qs, - 'REMOTE_ADDR': req.conn.remote_addr or '', - 'REMOTE_PORT': str(req.conn.remote_port or ''), - 'REQUEST_METHOD': req.method, - 'REQUEST_URI': req.uri, - 'SCRIPT_NAME': '', - 'SERVER_NAME': req.server.server_name, - # Bah. "SERVER_PROTOCOL" is actually the REQUEST protocol. - 'SERVER_PROTOCOL': req.request_protocol, - 'SERVER_SOFTWARE': req.server.software, - 'wsgi.errors': sys.stderr, - 'wsgi.input': req.rfile, - 'wsgi.multiprocess': False, - 'wsgi.multithread': True, - 'wsgi.run_once': False, - 'wsgi.url_scheme': req.scheme, - 'wsgi.version': (1, 0), - } - - if isinstance(req.server.bind_addr, basestring): - # AF_UNIX. This isn't really allowed by WSGI, which doesn't - # address unix domain sockets. But it's better than nothing. - env["SERVER_PORT"] = "" - else: - env["SERVER_PORT"] = str(req.server.bind_addr[1]) - - # Request headers - for k, v in req.inheaders.iteritems(): - env["HTTP_" + k.upper().replace("-", "_")] = v - - # CONTENT_TYPE/CONTENT_LENGTH - ct = env.pop("HTTP_CONTENT_TYPE", None) - if ct is not None: - env["CONTENT_TYPE"] = ct - cl = env.pop("HTTP_CONTENT_LENGTH", None) - if cl is not None: - env["CONTENT_LENGTH"] = cl - - if req.conn.ssl_env: - env.update(req.conn.ssl_env) - - return env - - -class WSGIGateway_u0(WSGIGateway_10): - - """A Gateway class to interface HTTPServer with WSGI u.0. - - WSGI u.0 is an experimental protocol, which uses unicode for keys and - values in both Python 2 and Python 3. - """ - - def get_environ(self): - """Return a new environ dict targeting the given wsgi.version""" - req = self.req - env_10 = WSGIGateway_10.get_environ(self) - env = dict([(k.decode('ISO-8859-1'), v) - for k, v in env_10.iteritems()]) - env[u'wsgi.version'] = ('u', 0) - - # Request-URI - env.setdefault(u'wsgi.url_encoding', u'utf-8') - try: - for key in [u"PATH_INFO", u"SCRIPT_NAME", u"QUERY_STRING"]: - env[key] = env_10[str(key)].decode(env[u'wsgi.url_encoding']) - except UnicodeDecodeError: - # Fall back to latin 1 so apps can transcode if needed. - env[u'wsgi.url_encoding'] = u'ISO-8859-1' - for key in [u"PATH_INFO", u"SCRIPT_NAME", u"QUERY_STRING"]: - env[key] = env_10[str(key)].decode(env[u'wsgi.url_encoding']) - - for k, v in sorted(env.items()): - if isinstance(v, str) and k not in ('REQUEST_URI', 'wsgi.input'): - env[k] = v.decode('ISO-8859-1') - - return env - -wsgi_gateways = { - (1, 0): WSGIGateway_10, - ('u', 0): WSGIGateway_u0, -} - - -class WSGIPathInfoDispatcher(object): - - """A WSGI dispatcher for dispatch based on the PATH_INFO. - - apps: a dict or list of (path_prefix, app) pairs. - """ - - def __init__(self, apps): - try: - apps = list(apps.items()) - except AttributeError: - pass - - # Sort the apps by len(path), descending - apps.sort(cmp=lambda x, y: cmp(len(x[0]), len(y[0]))) - apps.reverse() - - # The path_prefix strings must start, but not end, with a slash. - # Use "" instead of "/". - self.apps = [(p.rstrip("/"), a) for p, a in apps] - - def __call__(self, environ, start_response): - path = environ["PATH_INFO"] or "/" - for p, app in self.apps: - # The apps list should be sorted by length, descending. - if path.startswith(p + "/") or path == p: - environ = environ.copy() - environ["SCRIPT_NAME"] = environ["SCRIPT_NAME"] + p - environ["PATH_INFO"] = path[len(p):] - return app(environ, start_response) - - start_response('404 Not Found', [('Content-Type', 'text/plain'), - ('Content-Length', '0')]) - return [''] diff --git a/libs_crutch/contrib/passlib/__init__.py b/libs_crutch/contrib/passlib/__init__.py deleted file mode 100644 index 2a93599..0000000 --- a/libs_crutch/contrib/passlib/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -"""passlib - suite of password hashing & generation routines""" - -__version__ = '1.7.2' diff --git a/libs_crutch/contrib/passlib/_data/wordsets/bip39.txt b/libs_crutch/contrib/passlib/_data/wordsets/bip39.txt deleted file mode 100644 index e29842e..0000000 --- a/libs_crutch/contrib/passlib/_data/wordsets/bip39.txt +++ /dev/null @@ -1,2049 +0,0 @@ -abandon -ability -able -about -above -absent -absorb -abstract -absurd -abuse -access -accident -account -accuse -achieve -acid -acoustic -acquire -across -act -action -actor -actress -actual -adapt -add -addict -address -adjust -admit -adult -advance -advice -aerobic -affair -afford -afraid -again -age -agent -agree -ahead -aim -air -airport -aisle -alarm -album -alcohol -alert -alien -all -alley -allow -almost -alone -alpha -already -also -alter -always -amateur -amazing -among -amount -amused -analyst -anchor -ancient -anger -angle -angry -animal -ankle -announce -annual -another -answer -antenna -antique -anxiety -any -apart -apology -appear -apple -approve -april -arch -arctic -area -arena -argue -arm -armed -armor -army -around -arrange -arrest -arrive -arrow -art -artefact -artist -artwork -ask -aspect -assault -asset -assist -assume -asthma -athlete -atom -attack -attend -attitude -attract -auction -audit -august -aunt -author -auto -autumn -average -avocado -avoid -awake -aware -away -awesome -awful -awkward -axis -baby -bachelor -bacon -badge -bag -balance -balcony -ball -bamboo -banana -banner -bar -barely -bargain -barrel -base -basic -basket -battle -beach -bean -beauty -because -become -beef -before -begin -behave -behind -believe -below -belt -bench -benefit -best -betray -better -between -beyond -bicycle -bid -bike -bind -biology -bird -birth -bitter -black -blade -blame -blanket -blast -bleak -bless -blind -blood -blossom -blouse -blue -blur -blush -board -boat -body -boil -bomb -bone -bonus -book -boost -border -boring -borrow -boss -bottom -bounce -box -boy -bracket -brain -brand -brass -brave -bread -breeze -brick -bridge -brief -bright -bring -brisk -broccoli -broken -bronze -broom -brother -brown -brush -bubble -buddy -budget -buffalo -build -bulb -bulk -bullet -bundle -bunker -burden -burger -burst -bus -business -busy -butter -buyer -buzz -cabbage -cabin -cable -cactus -cage -cake -call -calm -camera -camp -can -canal -cancel -candy -cannon -canoe -canvas -canyon -capable -capital -captain -car -carbon -card -cargo -carpet -carry -cart -case -cash -casino -castle -casual -cat -catalog -catch -category -cattle -caught -cause -caution -cave -ceiling -celery -cement -census -century -cereal -certain -chair -chalk -champion -change -chaos -chapter -charge -chase -chat -cheap -check -cheese -chef -cherry -chest -chicken -chief -child -chimney -choice -choose -chronic -chuckle -chunk -churn -cigar -cinnamon -circle -citizen -city -civil -claim -clap -clarify -claw -clay -clean -clerk -clever -click -client -cliff -climb -clinic -clip -clock -clog -close -cloth -cloud -clown -club -clump -cluster -clutch -coach -coast -coconut -code -coffee -coil -coin -collect -color -column -combine -come -comfort -comic -common -company -concert -conduct -confirm -congress -connect -consider -control -convince -cook -cool -copper -copy -coral -core -corn -correct -cost -cotton -couch -country -couple -course -cousin -cover -coyote -crack -cradle -craft -cram -crane -crash -crater -crawl -crazy -cream -credit -creek -crew -cricket -crime -crisp -critic -crop -cross -crouch -crowd -crucial -cruel -cruise -crumble -crunch -crush -cry -crystal -cube -culture -cup -cupboard -curious -current -curtain -curve -cushion -custom -cute -cycle -dad -damage -damp -dance -danger -daring -dash -daughter -dawn -day -deal -debate -debris -decade -december -decide -decline -decorate -decrease -deer -defense -define -defy -degree -delay -deliver -demand -demise -denial -dentist -deny -depart -depend -deposit -depth -deputy -derive -describe -desert -design -desk -despair -destroy -detail -detect -develop -device -devote -diagram -dial -diamond -diary -dice -diesel -diet -differ -digital -dignity -dilemma -dinner -dinosaur -direct -dirt -disagree -discover -disease -dish -dismiss -disorder -display -distance -divert -divide -divorce -dizzy -doctor -document -dog -doll -dolphin -domain -donate -donkey -donor -door -dose -double -dove -draft -dragon -drama -drastic -draw -dream -dress -drift -drill -drink -drip -drive -drop -drum -dry -duck -dumb -dune -during -dust -dutch -duty -dwarf -dynamic -eager -eagle -early -earn -earth -easily -east -easy -echo -ecology -economy -edge -edit -educate -effort -egg -eight -either -elbow -elder -electric -elegant -element -elephant -elevator -elite -else -embark -embody -embrace -emerge -emotion -employ -empower -empty -enable -enact -end -endless -endorse -enemy -energy -enforce -engage -engine -enhance -enjoy -enlist -enough -enrich -enroll -ensure -enter -entire -entry -envelope -episode -equal -equip -era -erase -erode -erosion -error -erupt -escape -essay -essence -estate -eternal -ethics -evidence -evil -evoke -evolve -exact -example -excess -exchange -excite -exclude -excuse -execute -exercise -exhaust -exhibit -exile -exist -exit -exotic -expand -expect -expire -explain -expose -express -extend -extra -eye -eyebrow -fabric -face -faculty -fade -faint -faith -fall -false -fame -family -famous -fan -fancy -fantasy -farm -fashion -fat -fatal -father -fatigue -fault -favorite -feature -february -federal -fee -feed -feel -female -fence -festival -fetch -fever -few -fiber -fiction -field -figure -file -film -filter -final -find -fine -finger -finish -fire -firm -first -fiscal -fish -fit -fitness -fix -flag -flame -flash -flat -flavor -flee -flight -flip -float -flock -floor -flower -fluid -flush -fly -foam -focus -fog -foil -fold -follow -food -foot -force -forest -forget -fork -fortune -forum -forward -fossil -foster -found -fox -fragile -frame -frequent -fresh -friend -fringe -frog -front -frost -frown -frozen -fruit -fuel -fun -funny -furnace -fury -future -gadget -gain -galaxy -gallery -game -gap -garage -garbage -garden -garlic -garment -gas -gasp -gate -gather -gauge -gaze -general -genius -genre -gentle -genuine -gesture -ghost -giant -gift -giggle -ginger -giraffe -girl -give -glad -glance -glare -glass -glide -glimpse -globe -gloom -glory -glove -glow -glue -goat -goddess -gold -good -goose -gorilla -gospel -gossip -govern -gown -grab -grace -grain -grant -grape -grass -gravity -great -green -grid -grief -grit -grocery -group -grow -grunt -guard -guess -guide -guilt -guitar -gun -gym -habit -hair -half -hammer -hamster -hand -happy -harbor -hard -harsh -harvest -hat -have -hawk -hazard -head -health -heart -heavy -hedgehog -height -hello -helmet -help -hen -hero -hidden -high -hill -hint -hip -hire -history -hobby -hockey -hold -hole -holiday -hollow -home -honey -hood -hope -horn -horror -horse -hospital -host -hotel -hour -hover -hub -huge -human -humble -humor -hundred -hungry -hunt -hurdle -hurry -hurt -husband -hybrid -ice -icon -idea -identify -idle -ignore -ill -illegal -illness -image -imitate -immense -immune -impact -impose -improve -impulse -inch -include -income -increase -index -indicate -indoor -industry -infant -inflict -inform -inhale -inherit -initial -inject -injury -inmate -inner -innocent -input -inquiry -insane -insect -inside -inspire -install -intact -interest -into -invest -invite -involve -iron -island -isolate -issue -item -ivory -jacket -jaguar -jar -jazz -jealous -jeans -jelly -jewel -job -join -joke -journey -joy -judge -juice -jump -jungle -junior -junk -just -kangaroo -keen -keep -ketchup -key -kick -kid -kidney -kind -kingdom -kiss -kit -kitchen -kite -kitten -kiwi -knee -knife -knock -know -lab -label -labor -ladder -lady -lake -lamp -language -laptop -large -later -latin -laugh -laundry -lava -law -lawn -lawsuit -layer -lazy -leader -leaf -learn -leave -lecture -left -leg -legal -legend -leisure -lemon -lend -length -lens -leopard -lesson -letter -level -liar -liberty -library -license -life -lift -light -like -limb -limit -link -lion -liquid -list -little -live -lizard -load -loan -lobster -local -lock -logic -lonely -long -loop -lottery -loud -lounge -love -loyal -lucky -luggage -lumber -lunar -lunch -luxury -lyrics -machine -mad -magic -magnet -maid -mail -main -major -make -mammal -man -manage -mandate -mango -mansion -manual -maple -marble -march -margin -marine -market -marriage -mask -mass -master -match -material -math -matrix -matter -maximum -maze -meadow -mean -measure -meat -mechanic -medal -media -melody -melt -member -memory -mention -menu -mercy -merge -merit -merry -mesh -message -metal -method -middle -midnight -milk -million -mimic -mind -minimum -minor -minute -miracle -mirror -misery -miss -mistake -mix -mixed -mixture -mobile -model -modify -mom -moment -monitor -monkey -monster -month -moon -moral -more -morning -mosquito -mother -motion -motor -mountain -mouse -move -movie -much -muffin -mule -multiply -muscle -museum -mushroom -music -must -mutual -myself -mystery -myth -naive -name -napkin -narrow -nasty -nation -nature -near -neck -need -negative -neglect -neither -nephew -nerve -nest -net -network -neutral -never -news -next -nice -night -noble -noise -nominee -noodle -normal -north -nose -notable -note -nothing -notice -novel -now -nuclear -number -nurse -nut -oak -obey -object -oblige -obscure -observe -obtain -obvious -occur -ocean -october -odor -off -offer -office -often -oil -okay -old -olive -olympic -omit -once -one -onion -online -only -open -opera -opinion -oppose -option -orange -orbit -orchard -order -ordinary -organ -orient -original -orphan -ostrich -other -outdoor -outer -output -outside -oval -oven -over -own -owner -oxygen -oyster -ozone -pact -paddle -page -pair -palace -palm -panda -panel -panic -panther -paper -parade -parent -park -parrot -party -pass -patch -path -patient -patrol -pattern -pause -pave -payment -peace -peanut -pear -peasant -pelican -pen -penalty -pencil -people -pepper -perfect -permit -person -pet -phone -photo -phrase -physical -piano -picnic -picture -piece -pig -pigeon -pill -pilot -pink -pioneer -pipe -pistol -pitch -pizza -place -planet -plastic -plate -play -please -pledge -pluck -plug -plunge -poem -poet -point -polar -pole -police -pond -pony -pool -popular -portion -position -possible -post -potato -pottery -poverty -powder -power -practice -praise -predict -prefer -prepare -present -pretty -prevent -price -pride -primary -print -priority -prison -private -prize -problem -process -produce -profit -program -project -promote -proof -property -prosper -protect -proud -provide -public -pudding -pull -pulp -pulse -pumpkin -punch -pupil -puppy -purchase -purity -purpose -purse -push -put -puzzle -pyramid -quality -quantum -quarter -question -quick -quit -quiz -quote -rabbit -raccoon -race -rack -radar -radio -rail -rain -raise -rally -ramp -ranch -random -range -rapid -rare -rate -rather -raven -raw -razor -ready -real -reason -rebel -rebuild -recall -receive -recipe -record -recycle -reduce -reflect -reform -refuse -region -regret -regular -reject -relax -release -relief -rely -remain -remember -remind -remove -render -renew -rent -reopen -repair -repeat -replace -report -require -rescue -resemble -resist -resource -response -result -retire -retreat -return -reunion -reveal -review -reward -rhythm -rib -ribbon -rice -rich -ride -ridge -rifle -right -rigid -ring -riot -ripple -risk -ritual -rival -river -road -roast -robot -robust -rocket -romance -roof -rookie -room -rose -rotate -rough -round -route -royal -rubber -rude -rug -rule -run -runway -rural -sad -saddle -sadness -safe -sail -salad -salmon -salon -salt -salute -same -sample -sand -satisfy -satoshi -sauce -sausage -save -say -scale -scan -scare -scatter -scene -scheme -school -science -scissors -scorpion -scout -scrap -screen -script -scrub -sea -search -season -seat -second -secret -section -security -seed -seek -segment -select -sell -seminar -senior -sense -sentence -series -service -session -settle -setup -seven -shadow -shaft -shallow -share -shed -shell -sheriff -shield -shift -shine -ship -shiver -shock -shoe -shoot -shop -short -shoulder -shove -shrimp -shrug -shuffle -shy -sibling -sick -side -siege -sight -sign -silent -silk -silly -silver -similar -simple -since -sing -siren -sister -situate -six -size -skate -sketch -ski -skill -skin -skirt -skull -slab -slam -sleep -slender -slice -slide -slight -slim -slogan -slot -slow -slush -small -smart -smile -smoke -smooth -snack -snake -snap -sniff -snow -soap -soccer -social -sock -soda -soft -solar -soldier -solid -solution -solve -someone -song -soon -sorry -sort -soul -sound -soup -source -south -space -spare -spatial -spawn -speak -special -speed -spell -spend -sphere -spice -spider -spike -spin -spirit -split -spoil -sponsor -spoon -sport -spot -spray -spread -spring -spy -square -squeeze -squirrel -stable -stadium -staff -stage -stairs -stamp -stand -start -state -stay -steak -steel -stem -step -stereo -stick -still -sting -stock -stomach -stone -stool -story -stove -strategy -street -strike -strong -struggle -student -stuff -stumble -style -subject -submit -subway -success -such -sudden -suffer -sugar -suggest -suit -summer -sun -sunny -sunset -super -supply -supreme -sure -surface -surge -surprise -surround -survey -suspect -sustain -swallow -swamp -swap -swarm -swear -sweet -swift -swim -swing -switch -sword -symbol -symptom -syrup -system -table -tackle -tag -tail -talent -talk -tank -tape -target -task -taste -tattoo -taxi -teach -team -tell -ten -tenant -tennis -tent -term -test -text -thank -that -theme -then -theory -there -they -thing -this -thought -three -thrive -throw -thumb -thunder -ticket -tide -tiger -tilt -timber -time -tiny -tip -tired -tissue -title -toast -tobacco -today -toddler -toe -together -toilet -token -tomato -tomorrow -tone -tongue -tonight -tool -tooth -top -topic -topple -torch -tornado -tortoise -toss -total -tourist -toward -tower -town -toy -track -trade -traffic -tragic -train -transfer -trap -trash -travel -tray -treat -tree -trend -trial -tribe -trick -trigger -trim -trip -trophy -trouble -truck -true -truly -trumpet -trust -truth -try -tube -tuition -tumble -tuna -tunnel -turkey -turn -turtle -twelve -twenty -twice -twin -twist -two -type -typical -ugly -umbrella -unable -unaware -uncle -uncover -under -undo -unfair -unfold -unhappy -uniform -unique -unit -universe -unknown -unlock -until -unusual -unveil -update -upgrade -uphold -upon -upper -upset -urban -urge -usage -use -used -useful -useless -usual -utility -vacant -vacuum -vague -valid -valley -valve -van -vanish -vapor -various -vast -vault -vehicle -velvet -vendor -venture -venue -verb -verify -version -very -vessel -veteran -viable -vibrant -vicious -victory -video -view -village -vintage -violin -virtual -virus -visa -visit -visual -vital -vivid -vocal -voice -void -volcano -volume -vote -voyage -wage -wagon -wait -walk -wall -walnut -want -warfare -warm -warrior -wash -wasp -waste -water -wave -way -wealth -weapon -wear -weasel -weather -web -wedding -weekend -weird -welcome -west -wet -whale -what -wheat -wheel -when -where -whip -whisper -wide -width -wife -wild -will -win -window -wine -wing -wink -winner -winter -wire -wisdom -wise -wish -witness -wolf -woman -wonder -wood -wool -word -work -world -worry -worth -wrap -wreck -wrestle -wrist -write -wrong -yard -year -yellow -you -young -youth -zebra -zero -zone -zoo - diff --git a/libs_crutch/contrib/passlib/_data/wordsets/eff_long.txt b/libs_crutch/contrib/passlib/_data/wordsets/eff_long.txt deleted file mode 100644 index caf71f5..0000000 --- a/libs_crutch/contrib/passlib/_data/wordsets/eff_long.txt +++ /dev/null @@ -1,7776 +0,0 @@ -abacus -abdomen -abdominal -abide -abiding -ability -ablaze -able -abnormal -abrasion -abrasive -abreast -abridge -abroad -abruptly -absence -absentee -absently -absinthe -absolute -absolve -abstain -abstract -absurd -accent -acclaim -acclimate -accompany -account -accuracy -accurate -accustom -acetone -achiness -aching -acid -acorn -acquaint -acquire -acre -acrobat -acronym -acting -action -activate -activator -active -activism -activist -activity -actress -acts -acutely -acuteness -aeration -aerobics -aerosol -aerospace -afar -affair -affected -affecting -affection -affidavit -affiliate -affirm -affix -afflicted -affluent -afford -affront -aflame -afloat -aflutter -afoot -afraid -afterglow -afterlife -aftermath -aftermost -afternoon -aged -ageless -agency -agenda -agent -aggregate -aghast -agile -agility -aging -agnostic -agonize -agonizing -agony -agreeable -agreeably -agreed -agreeing -agreement -aground -ahead -ahoy -aide -aids -aim -ajar -alabaster -alarm -albatross -album -alfalfa -algebra -algorithm -alias -alibi -alienable -alienate -aliens -alike -alive -alkaline -alkalize -almanac -almighty -almost -aloe -aloft -aloha -alone -alongside -aloof -alphabet -alright -although -altitude -alto -aluminum -alumni -always -amaretto -amaze -amazingly -amber -ambiance -ambiguity -ambiguous -ambition -ambitious -ambulance -ambush -amendable -amendment -amends -amenity -amiable -amicably -amid -amigo -amino -amiss -ammonia -ammonium -amnesty -amniotic -among -amount -amperage -ample -amplifier -amplify -amply -amuck -amulet -amusable -amused -amusement -amuser -amusing -anaconda -anaerobic -anagram -anatomist -anatomy -anchor -anchovy -ancient -android -anemia -anemic -aneurism -anew -angelfish -angelic -anger -angled -angler -angles -angling -angrily -angriness -anguished -angular -animal -animate -animating -animation -animator -anime -animosity -ankle -annex -annotate -announcer -annoying -annually -annuity -anointer -another -answering -antacid -antarctic -anteater -antelope -antennae -anthem -anthill -anthology -antibody -antics -antidote -antihero -antiquely -antiques -antiquity -antirust -antitoxic -antitrust -antiviral -antivirus -antler -antonym -antsy -anvil -anybody -anyhow -anymore -anyone -anyplace -anything -anytime -anyway -anywhere -aorta -apache -apostle -appealing -appear -appease -appeasing -appendage -appendix -appetite -appetizer -applaud -applause -apple -appliance -applicant -applied -apply -appointee -appraisal -appraiser -apprehend -approach -approval -approve -apricot -april -apron -aptitude -aptly -aqua -aqueduct -arbitrary -arbitrate -ardently -area -arena -arguable -arguably -argue -arise -armadillo -armband -armchair -armed -armful -armhole -arming -armless -armoire -armored -armory -armrest -army -aroma -arose -around -arousal -arrange -array -arrest -arrival -arrive -arrogance -arrogant -arson -art -ascend -ascension -ascent -ascertain -ashamed -ashen -ashes -ashy -aside -askew -asleep -asparagus -aspect -aspirate -aspire -aspirin -astonish -astound -astride -astrology -astronaut -astronomy -astute -atlantic -atlas -atom -atonable -atop -atrium -atrocious -atrophy -attach -attain -attempt -attendant -attendee -attention -attentive -attest -attic -attire -attitude -attractor -attribute -atypical -auction -audacious -audacity -audible -audibly -audience -audio -audition -augmented -august -authentic -author -autism -autistic -autograph -automaker -automated -automatic -autopilot -available -avalanche -avatar -avenge -avenging -avenue -average -aversion -avert -aviation -aviator -avid -avoid -await -awaken -award -aware -awhile -awkward -awning -awoke -awry -axis -babble -babbling -babied -baboon -backache -backboard -backboned -backdrop -backed -backer -backfield -backfire -backhand -backing -backlands -backlash -backless -backlight -backlit -backlog -backpack -backpedal -backrest -backroom -backshift -backside -backslid -backspace -backspin -backstab -backstage -backtalk -backtrack -backup -backward -backwash -backwater -backyard -bacon -bacteria -bacterium -badass -badge -badland -badly -badness -baffle -baffling -bagel -bagful -baggage -bagged -baggie -bagginess -bagging -baggy -bagpipe -baguette -baked -bakery -bakeshop -baking -balance -balancing -balcony -balmy -balsamic -bamboo -banana -banish -banister -banjo -bankable -bankbook -banked -banker -banking -banknote -bankroll -banner -bannister -banshee -banter -barbecue -barbed -barbell -barber -barcode -barge -bargraph -barista -baritone -barley -barmaid -barman -barn -barometer -barrack -barracuda -barrel -barrette -barricade -barrier -barstool -bartender -barterer -bash -basically -basics -basil -basin -basis -basket -batboy -batch -bath -baton -bats -battalion -battered -battering -battery -batting -battle -bauble -bazooka -blabber -bladder -blade -blah -blame -blaming -blanching -blandness -blank -blaspheme -blasphemy -blast -blatancy -blatantly -blazer -blazing -bleach -bleak -bleep -blemish -blend -bless -blighted -blimp -bling -blinked -blinker -blinking -blinks -blip -blissful -blitz -blizzard -bloated -bloating -blob -blog -bloomers -blooming -blooper -blot -blouse -blubber -bluff -bluish -blunderer -blunt -blurb -blurred -blurry -blurt -blush -blustery -boaster -boastful -boasting -boat -bobbed -bobbing -bobble -bobcat -bobsled -bobtail -bodacious -body -bogged -boggle -bogus -boil -bok -bolster -bolt -bonanza -bonded -bonding -bondless -boned -bonehead -boneless -bonelike -boney -bonfire -bonnet -bonsai -bonus -bony -boogeyman -boogieman -book -boondocks -booted -booth -bootie -booting -bootlace -bootleg -boots -boozy -borax -boring -borough -borrower -borrowing -boss -botanical -botanist -botany -botch -both -bottle -bottling -bottom -bounce -bouncing -bouncy -bounding -boundless -bountiful -bovine -boxcar -boxer -boxing -boxlike -boxy -breach -breath -breeches -breeching -breeder -breeding -breeze -breezy -brethren -brewery -brewing -briar -bribe -brick -bride -bridged -brigade -bright -brilliant -brim -bring -brink -brisket -briskly -briskness -bristle -brittle -broadband -broadcast -broaden -broadly -broadness -broadside -broadways -broiler -broiling -broken -broker -bronchial -bronco -bronze -bronzing -brook -broom -brought -browbeat -brownnose -browse -browsing -bruising -brunch -brunette -brunt -brush -brussels -brute -brutishly -bubble -bubbling -bubbly -buccaneer -bucked -bucket -buckle -buckshot -buckskin -bucktooth -buckwheat -buddhism -buddhist -budding -buddy -budget -buffalo -buffed -buffer -buffing -buffoon -buggy -bulb -bulge -bulginess -bulgur -bulk -bulldog -bulldozer -bullfight -bullfrog -bullhorn -bullion -bullish -bullpen -bullring -bullseye -bullwhip -bully -bunch -bundle -bungee -bunion -bunkbed -bunkhouse -bunkmate -bunny -bunt -busboy -bush -busily -busload -bust -busybody -buzz -cabana -cabbage -cabbie -cabdriver -cable -caboose -cache -cackle -cacti -cactus -caddie -caddy -cadet -cadillac -cadmium -cage -cahoots -cake -calamari -calamity -calcium -calculate -calculus -caliber -calibrate -calm -caloric -calorie -calzone -camcorder -cameo -camera -camisole -camper -campfire -camping -campsite -campus -canal -canary -cancel -candied -candle -candy -cane -canine -canister -cannabis -canned -canning -cannon -cannot -canola -canon -canopener -canopy -canteen -canyon -capable -capably -capacity -cape -capillary -capital -capitol -capped -capricorn -capsize -capsule -caption -captivate -captive -captivity -capture -caramel -carat -caravan -carbon -cardboard -carded -cardiac -cardigan -cardinal -cardstock -carefully -caregiver -careless -caress -caretaker -cargo -caring -carless -carload -carmaker -carnage -carnation -carnival -carnivore -carol -carpenter -carpentry -carpool -carport -carried -carrot -carrousel -carry -cartel -cartload -carton -cartoon -cartridge -cartwheel -carve -carving -carwash -cascade -case -cash -casing -casino -casket -cassette -casually -casualty -catacomb -catalog -catalyst -catalyze -catapult -cataract -catatonic -catcall -catchable -catcher -catching -catchy -caterer -catering -catfight -catfish -cathedral -cathouse -catlike -catnap -catnip -catsup -cattail -cattishly -cattle -catty -catwalk -caucasian -caucus -causal -causation -cause -causing -cauterize -caution -cautious -cavalier -cavalry -caviar -cavity -cedar -celery -celestial -celibacy -celibate -celtic -cement -census -ceramics -ceremony -certainly -certainty -certified -certify -cesarean -cesspool -chafe -chaffing -chain -chair -chalice -challenge -chamber -chamomile -champion -chance -change -channel -chant -chaos -chaperone -chaplain -chapped -chaps -chapter -character -charbroil -charcoal -charger -charging -chariot -charity -charm -charred -charter -charting -chase -chasing -chaste -chastise -chastity -chatroom -chatter -chatting -chatty -cheating -cheddar -cheek -cheer -cheese -cheesy -chef -chemicals -chemist -chemo -cherisher -cherub -chess -chest -chevron -chevy -chewable -chewer -chewing -chewy -chief -chihuahua -childcare -childhood -childish -childless -childlike -chili -chill -chimp -chip -chirping -chirpy -chitchat -chivalry -chive -chloride -chlorine -choice -chokehold -choking -chomp -chooser -choosing -choosy -chop -chosen -chowder -chowtime -chrome -chubby -chuck -chug -chummy -chump -chunk -churn -chute -cider -cilantro -cinch -cinema -cinnamon -circle -circling -circular -circulate -circus -citable -citadel -citation -citizen -citric -citrus -city -civic -civil -clad -claim -clambake -clammy -clamor -clamp -clamshell -clang -clanking -clapped -clapper -clapping -clarify -clarinet -clarity -clash -clasp -class -clatter -clause -clavicle -claw -clay -clean -clear -cleat -cleaver -cleft -clench -clergyman -clerical -clerk -clever -clicker -client -climate -climatic -cling -clinic -clinking -clip -clique -cloak -clobber -clock -clone -cloning -closable -closure -clothes -clothing -cloud -clover -clubbed -clubbing -clubhouse -clump -clumsily -clumsy -clunky -clustered -clutch -clutter -coach -coagulant -coastal -coaster -coasting -coastland -coastline -coat -coauthor -cobalt -cobbler -cobweb -cocoa -coconut -cod -coeditor -coerce -coexist -coffee -cofounder -cognition -cognitive -cogwheel -coherence -coherent -cohesive -coil -coke -cola -cold -coleslaw -coliseum -collage -collapse -collar -collected -collector -collide -collie -collision -colonial -colonist -colonize -colony -colossal -colt -coma -come -comfort -comfy -comic -coming -comma -commence -commend -comment -commerce -commode -commodity -commodore -common -commotion -commute -commuting -compacted -compacter -compactly -compactor -companion -company -compare -compel -compile -comply -component -composed -composer -composite -compost -composure -compound -compress -comprised -computer -computing -comrade -concave -conceal -conceded -concept -concerned -concert -conch -concierge -concise -conclude -concrete -concur -condense -condiment -condition -condone -conducive -conductor -conduit -cone -confess -confetti -confidant -confident -confider -confiding -configure -confined -confining -confirm -conflict -conform -confound -confront -confused -confusing -confusion -congenial -congested -congrats -congress -conical -conjoined -conjure -conjuror -connected -connector -consensus -consent -console -consoling -consonant -constable -constant -constrain -constrict -construct -consult -consumer -consuming -contact -container -contempt -contend -contented -contently -contents -contest -context -contort -contour -contrite -control -contusion -convene -convent -copartner -cope -copied -copier -copilot -coping -copious -copper -copy -coral -cork -cornball -cornbread -corncob -cornea -corned -corner -cornfield -cornflake -cornhusk -cornmeal -cornstalk -corny -coronary -coroner -corporal -corporate -corral -correct -corridor -corrode -corroding -corrosive -corsage -corset -cortex -cosigner -cosmetics -cosmic -cosmos -cosponsor -cost -cottage -cotton -couch -cough -could -countable -countdown -counting -countless -country -county -courier -covenant -cover -coveted -coveting -coyness -cozily -coziness -cozy -crabbing -crabgrass -crablike -crabmeat -cradle -cradling -crafter -craftily -craftsman -craftwork -crafty -cramp -cranberry -crane -cranial -cranium -crank -crate -crave -craving -crawfish -crawlers -crawling -crayfish -crayon -crazed -crazily -craziness -crazy -creamed -creamer -creamlike -crease -creasing -creatable -create -creation -creative -creature -credible -credibly -credit -creed -creme -creole -crepe -crept -crescent -crested -cresting -crestless -crevice -crewless -crewman -crewmate -crib -cricket -cried -crier -crimp -crimson -cringe -cringing -crinkle -crinkly -crisped -crisping -crisply -crispness -crispy -criteria -critter -croak -crock -crook -croon -crop -cross -crouch -crouton -crowbar -crowd -crown -crucial -crudely -crudeness -cruelly -cruelness -cruelty -crumb -crummiest -crummy -crumpet -crumpled -cruncher -crunching -crunchy -crusader -crushable -crushed -crusher -crushing -crust -crux -crying -cryptic -crystal -cubbyhole -cube -cubical -cubicle -cucumber -cuddle -cuddly -cufflink -culinary -culminate -culpable -culprit -cultivate -cultural -culture -cupbearer -cupcake -cupid -cupped -cupping -curable -curator -curdle -cure -curfew -curing -curled -curler -curliness -curling -curly -curry -curse -cursive -cursor -curtain -curtly -curtsy -curvature -curve -curvy -cushy -cusp -cussed -custard -custodian -custody -customary -customer -customize -customs -cut -cycle -cyclic -cycling -cyclist -cylinder -cymbal -cytoplasm -cytoplast -dab -dad -daffodil -dagger -daily -daintily -dainty -dairy -daisy -dallying -dance -dancing -dandelion -dander -dandruff -dandy -danger -dangle -dangling -daredevil -dares -daringly -darkened -darkening -darkish -darkness -darkroom -darling -darn -dart -darwinism -dash -dastardly -data -datebook -dating -daughter -daunting -dawdler -dawn -daybed -daybreak -daycare -daydream -daylight -daylong -dayroom -daytime -dazzler -dazzling -deacon -deafening -deafness -dealer -dealing -dealmaker -dealt -dean -debatable -debate -debating -debit -debrief -debtless -debtor -debug -debunk -decade -decaf -decal -decathlon -decay -deceased -deceit -deceiver -deceiving -december -decency -decent -deception -deceptive -decibel -decidable -decimal -decimeter -decipher -deck -declared -decline -decode -decompose -decorated -decorator -decoy -decrease -decree -dedicate -dedicator -deduce -deduct -deed -deem -deepen -deeply -deepness -deface -defacing -defame -default -defeat -defection -defective -defendant -defender -defense -defensive -deferral -deferred -defiance -defiant -defile -defiling -define -definite -deflate -deflation -deflator -deflected -deflector -defog -deforest -defraud -defrost -deftly -defuse -defy -degraded -degrading -degrease -degree -dehydrate -deity -dejected -delay -delegate -delegator -delete -deletion -delicacy -delicate -delicious -delighted -delirious -delirium -deliverer -delivery -delouse -delta -deluge -delusion -deluxe -demanding -demeaning -demeanor -demise -democracy -democrat -demote -demotion -demystify -denatured -deniable -denial -denim -denote -dense -density -dental -dentist -denture -deny -deodorant -deodorize -departed -departure -depict -deplete -depletion -deplored -deploy -deport -depose -depraved -depravity -deprecate -depress -deprive -depth -deputize -deputy -derail -deranged -derby -derived -desecrate -deserve -deserving -designate -designed -designer -designing -deskbound -desktop -deskwork -desolate -despair -despise -despite -destiny -destitute -destruct -detached -detail -detection -detective -detector -detention -detergent -detest -detonate -detonator -detoxify -detract -deuce -devalue -deviancy -deviant -deviate -deviation -deviator -device -devious -devotedly -devotee -devotion -devourer -devouring -devoutly -dexterity -dexterous -diabetes -diabetic -diabolic -diagnoses -diagnosis -diagram -dial -diameter -diaper -diaphragm -diary -dice -dicing -dictate -dictation -dictator -difficult -diffused -diffuser -diffusion -diffusive -dig -dilation -diligence -diligent -dill -dilute -dime -diminish -dimly -dimmed -dimmer -dimness -dimple -diner -dingbat -dinghy -dinginess -dingo -dingy -dining -dinner -diocese -dioxide -diploma -dipped -dipper -dipping -directed -direction -directive -directly -directory -direness -dirtiness -disabled -disagree -disallow -disarm -disarray -disaster -disband -disbelief -disburse -discard -discern -discharge -disclose -discolor -discount -discourse -discover -discuss -disdain -disengage -disfigure -disgrace -dish -disinfect -disjoin -disk -dislike -disliking -dislocate -dislodge -disloyal -dismantle -dismay -dismiss -dismount -disobey -disorder -disown -disparate -disparity -dispatch -dispense -dispersal -dispersed -disperser -displace -display -displease -disposal -dispose -disprove -dispute -disregard -disrupt -dissuade -distance -distant -distaste -distill -distinct -distort -distract -distress -district -distrust -ditch -ditto -ditzy -dividable -divided -dividend -dividers -dividing -divinely -diving -divinity -divisible -divisibly -division -divisive -divorcee -dizziness -dizzy -doable -docile -dock -doctrine -document -dodge -dodgy -doily -doing -dole -dollar -dollhouse -dollop -dolly -dolphin -domain -domelike -domestic -dominion -dominoes -donated -donation -donator -donor -donut -doodle -doorbell -doorframe -doorknob -doorman -doormat -doornail -doorpost -doorstep -doorstop -doorway -doozy -dork -dormitory -dorsal -dosage -dose -dotted -doubling -douche -dove -down -dowry -doze -drab -dragging -dragonfly -dragonish -dragster -drainable -drainage -drained -drainer -drainpipe -dramatic -dramatize -drank -drapery -drastic -draw -dreaded -dreadful -dreadlock -dreamboat -dreamily -dreamland -dreamless -dreamlike -dreamt -dreamy -drearily -dreary -drench -dress -drew -dribble -dried -drier -drift -driller -drilling -drinkable -drinking -dripping -drippy -drivable -driven -driver -driveway -driving -drizzle -drizzly -drone -drool -droop -drop-down -dropbox -dropkick -droplet -dropout -dropper -drove -drown -drowsily -drudge -drum -dry -dubbed -dubiously -duchess -duckbill -ducking -duckling -ducktail -ducky -duct -dude -duffel -dugout -duh -duke -duller -dullness -duly -dumping -dumpling -dumpster -duo -dupe -duplex -duplicate -duplicity -durable -durably -duration -duress -during -dusk -dust -dutiful -duty -duvet -dwarf -dweeb -dwelled -dweller -dwelling -dwindle -dwindling -dynamic -dynamite -dynasty -dyslexia -dyslexic -each -eagle -earache -eardrum -earflap -earful -earlobe -early -earmark -earmuff -earphone -earpiece -earplugs -earring -earshot -earthen -earthlike -earthling -earthly -earthworm -earthy -earwig -easeful -easel -easiest -easily -easiness -easing -eastbound -eastcoast -easter -eastward -eatable -eaten -eatery -eating -eats -ebay -ebony -ebook -ecard -eccentric -echo -eclair -eclipse -ecologist -ecology -economic -economist -economy -ecosphere -ecosystem -edge -edginess -edging -edgy -edition -editor -educated -education -educator -eel -effective -effects -efficient -effort -eggbeater -egging -eggnog -eggplant -eggshell -egomaniac -egotism -egotistic -either -eject -elaborate -elastic -elated -elbow -eldercare -elderly -eldest -electable -election -elective -elephant -elevate -elevating -elevation -elevator -eleven -elf -eligible -eligibly -eliminate -elite -elitism -elixir -elk -ellipse -elliptic -elm -elongated -elope -eloquence -eloquent -elsewhere -elude -elusive -elves -email -embargo -embark -embassy -embattled -embellish -ember -embezzle -emblaze -emblem -embody -embolism -emboss -embroider -emcee -emerald -emergency -emission -emit -emote -emoticon -emotion -empathic -empathy -emperor -emphases -emphasis -emphasize -emphatic -empirical -employed -employee -employer -emporium -empower -emptier -emptiness -empty -emu -enable -enactment -enamel -enchanted -enchilada -encircle -enclose -enclosure -encode -encore -encounter -encourage -encroach -encrust -encrypt -endanger -endeared -endearing -ended -ending -endless -endnote -endocrine -endorphin -endorse -endowment -endpoint -endurable -endurance -enduring -energetic -energize -energy -enforced -enforcer -engaged -engaging -engine -engorge -engraved -engraver -engraving -engross -engulf -enhance -enigmatic -enjoyable -enjoyably -enjoyer -enjoying -enjoyment -enlarged -enlarging -enlighten -enlisted -enquirer -enrage -enrich -enroll -enslave -ensnare -ensure -entail -entangled -entering -entertain -enticing -entire -entitle -entity -entomb -entourage -entrap -entree -entrench -entrust -entryway -entwine -enunciate -envelope -enviable -enviably -envious -envision -envoy -envy -enzyme -epic -epidemic -epidermal -epidermis -epidural -epilepsy -epileptic -epilogue -epiphany -episode -equal -equate -equation -equator -equinox -equipment -equity -equivocal -eradicate -erasable -erased -eraser -erasure -ergonomic -errand -errant -erratic -error -erupt -escalate -escalator -escapable -escapade -escapist -escargot -eskimo -esophagus -espionage -espresso -esquire -essay -essence -essential -establish -estate -esteemed -estimate -estimator -estranged -estrogen -etching -eternal -eternity -ethanol -ether -ethically -ethics -euphemism -evacuate -evacuee -evade -evaluate -evaluator -evaporate -evasion -evasive -even -everglade -evergreen -everybody -everyday -everyone -evict -evidence -evident -evil -evoke -evolution -evolve -exact -exalted -example -excavate -excavator -exceeding -exception -excess -exchange -excitable -exciting -exclaim -exclude -excluding -exclusion -exclusive -excretion -excretory -excursion -excusable -excusably -excuse -exemplary -exemplify -exemption -exerciser -exert -exes -exfoliate -exhale -exhaust -exhume -exile -existing -exit -exodus -exonerate -exorcism -exorcist -expand -expanse -expansion -expansive -expectant -expedited -expediter -expel -expend -expenses -expensive -expert -expire -expiring -explain -expletive -explicit -explode -exploit -explore -exploring -exponent -exporter -exposable -expose -exposure -express -expulsion -exquisite -extended -extending -extent -extenuate -exterior -external -extinct -extortion -extradite -extras -extrovert -extrude -extruding -exuberant -fable -fabric -fabulous -facebook -facecloth -facedown -faceless -facelift -faceplate -faceted -facial -facility -facing -facsimile -faction -factoid -factor -factsheet -factual -faculty -fade -fading -failing -falcon -fall -false -falsify -fame -familiar -family -famine -famished -fanatic -fancied -fanciness -fancy -fanfare -fang -fanning -fantasize -fantastic -fantasy -fascism -fastball -faster -fasting -fastness -faucet -favorable -favorably -favored -favoring -favorite -fax -feast -federal -fedora -feeble -feed -feel -feisty -feline -felt-tip -feminine -feminism -feminist -feminize -femur -fence -fencing -fender -ferment -fernlike -ferocious -ferocity -ferret -ferris -ferry -fervor -fester -festival -festive -festivity -fetal -fetch -fever -fiber -fiction -fiddle -fiddling -fidelity -fidgeting -fidgety -fifteen -fifth -fiftieth -fifty -figment -figure -figurine -filing -filled -filler -filling -film -filter -filth -filtrate -finale -finalist -finalize -finally -finance -financial -finch -fineness -finer -finicky -finished -finisher -finishing -finite -finless -finlike -fiscally -fit -five -flaccid -flagman -flagpole -flagship -flagstick -flagstone -flail -flakily -flaky -flame -flammable -flanked -flanking -flannels -flap -flaring -flashback -flashbulb -flashcard -flashily -flashing -flashy -flask -flatbed -flatfoot -flatly -flatness -flatten -flattered -flatterer -flattery -flattop -flatware -flatworm -flavored -flavorful -flavoring -flaxseed -fled -fleshed -fleshy -flick -flier -flight -flinch -fling -flint -flip -flirt -float -flock -flogging -flop -floral -florist -floss -flounder -flyable -flyaway -flyer -flying -flyover -flypaper -foam -foe -fog -foil -folic -folk -follicle -follow -fondling -fondly -fondness -fondue -font -food -fool -footage -football -footbath -footboard -footer -footgear -foothill -foothold -footing -footless -footman -footnote -footpad -footpath -footprint -footrest -footsie -footsore -footwear -footwork -fossil -foster -founder -founding -fountain -fox -foyer -fraction -fracture -fragile -fragility -fragment -fragrance -fragrant -frail -frame -framing -frantic -fraternal -frayed -fraying -frays -freckled -freckles -freebase -freebee -freebie -freedom -freefall -freehand -freeing -freeload -freely -freemason -freeness -freestyle -freeware -freeway -freewill -freezable -freezing -freight -french -frenzied -frenzy -frequency -frequent -fresh -fretful -fretted -friction -friday -fridge -fried -friend -frighten -frightful -frigidity -frigidly -frill -fringe -frisbee -frisk -fritter -frivolous -frolic -from -front -frostbite -frosted -frostily -frosting -frostlike -frosty -froth -frown -frozen -fructose -frugality -frugally -fruit -frustrate -frying -gab -gaffe -gag -gainfully -gaining -gains -gala -gallantly -galleria -gallery -galley -gallon -gallows -gallstone -galore -galvanize -gambling -game -gaming -gamma -gander -gangly -gangrene -gangway -gap -garage -garbage -garden -gargle -garland -garlic -garment -garnet -garnish -garter -gas -gatherer -gathering -gating -gauging -gauntlet -gauze -gave -gawk -gazing -gear -gecko -geek -geiger -gem -gender -generic -generous -genetics -genre -gentile -gentleman -gently -gents -geography -geologic -geologist -geology -geometric -geometry -geranium -gerbil -geriatric -germicide -germinate -germless -germproof -gestate -gestation -gesture -getaway -getting -getup -giant -gibberish -giblet -giddily -giddiness -giddy -gift -gigabyte -gigahertz -gigantic -giggle -giggling -giggly -gigolo -gilled -gills -gimmick -girdle -giveaway -given -giver -giving -gizmo -gizzard -glacial -glacier -glade -gladiator -gladly -glamorous -glamour -glance -glancing -glandular -glare -glaring -glass -glaucoma -glazing -gleaming -gleeful -glider -gliding -glimmer -glimpse -glisten -glitch -glitter -glitzy -gloater -gloating -gloomily -gloomy -glorified -glorifier -glorify -glorious -glory -gloss -glove -glowing -glowworm -glucose -glue -gluten -glutinous -glutton -gnarly -gnat -goal -goatskin -goes -goggles -going -goldfish -goldmine -goldsmith -golf -goliath -gonad -gondola -gone -gong -good -gooey -goofball -goofiness -goofy -google -goon -gopher -gore -gorged -gorgeous -gory -gosling -gossip -gothic -gotten -gout -gown -grab -graceful -graceless -gracious -gradation -graded -grader -gradient -grading -gradually -graduate -graffiti -grafted -grafting -grain -granddad -grandkid -grandly -grandma -grandpa -grandson -granite -granny -granola -grant -granular -grape -graph -grapple -grappling -grasp -grass -gratified -gratify -grating -gratitude -gratuity -gravel -graveness -graves -graveyard -gravitate -gravity -gravy -gray -grazing -greasily -greedily -greedless -greedy -green -greeter -greeting -grew -greyhound -grid -grief -grievance -grieving -grievous -grill -grimace -grimacing -grime -griminess -grimy -grinch -grinning -grip -gristle -grit -groggily -groggy -groin -groom -groove -grooving -groovy -grope -ground -grouped -grout -grove -grower -growing -growl -grub -grudge -grudging -grueling -gruffly -grumble -grumbling -grumbly -grumpily -grunge -grunt -guacamole -guidable -guidance -guide -guiding -guileless -guise -gulf -gullible -gully -gulp -gumball -gumdrop -gumminess -gumming -gummy -gurgle -gurgling -guru -gush -gusto -gusty -gutless -guts -gutter -guy -guzzler -gyration -habitable -habitant -habitat -habitual -hacked -hacker -hacking -hacksaw -had -haggler -haiku -half -halogen -halt -halved -halves -hamburger -hamlet -hammock -hamper -hamster -hamstring -handbag -handball -handbook -handbrake -handcart -handclap -handclasp -handcraft -handcuff -handed -handful -handgrip -handgun -handheld -handiness -handiwork -handlebar -handled -handler -handling -handmade -handoff -handpick -handprint -handrail -handsaw -handset -handsfree -handshake -handstand -handwash -handwork -handwoven -handwrite -handyman -hangnail -hangout -hangover -hangup -hankering -hankie -hanky -haphazard -happening -happier -happiest -happily -happiness -happy -harbor -hardcopy -hardcore -hardcover -harddisk -hardened -hardener -hardening -hardhat -hardhead -hardiness -hardly -hardness -hardship -hardware -hardwired -hardwood -hardy -harmful -harmless -harmonica -harmonics -harmonize -harmony -harness -harpist -harsh -harvest -hash -hassle -haste -hastily -hastiness -hasty -hatbox -hatchback -hatchery -hatchet -hatching -hatchling -hate -hatless -hatred -haunt -haven -hazard -hazelnut -hazily -haziness -hazing -hazy -headache -headband -headboard -headcount -headdress -headed -header -headfirst -headgear -heading -headlamp -headless -headlock -headphone -headpiece -headrest -headroom -headscarf -headset -headsman -headstand -headstone -headway -headwear -heap -heat -heave -heavily -heaviness -heaving -hedge -hedging -heftiness -hefty -helium -helmet -helper -helpful -helping -helpless -helpline -hemlock -hemstitch -hence -henchman -henna -herald -herbal -herbicide -herbs -heritage -hermit -heroics -heroism -herring -herself -hertz -hesitancy -hesitant -hesitate -hexagon -hexagram -hubcap -huddle -huddling -huff -hug -hula -hulk -hull -human -humble -humbling -humbly -humid -humiliate -humility -humming -hummus -humongous -humorist -humorless -humorous -humpback -humped -humvee -hunchback -hundredth -hunger -hungrily -hungry -hunk -hunter -hunting -huntress -huntsman -hurdle -hurled -hurler -hurling -hurray -hurricane -hurried -hurry -hurt -husband -hush -husked -huskiness -hut -hybrid -hydrant -hydrated -hydration -hydrogen -hydroxide -hyperlink -hypertext -hyphen -hypnoses -hypnosis -hypnotic -hypnotism -hypnotist -hypnotize -hypocrisy -hypocrite -ibuprofen -ice -iciness -icing -icky -icon -icy -idealism -idealist -idealize -ideally -idealness -identical -identify -identity -ideology -idiocy -idiom -idly -igloo -ignition -ignore -iguana -illicitly -illusion -illusive -image -imaginary -imagines -imaging -imbecile -imitate -imitation -immature -immerse -immersion -imminent -immobile -immodest -immorally -immortal -immovable -immovably -immunity -immunize -impaired -impale -impart -impatient -impeach -impeding -impending -imperfect -imperial -impish -implant -implement -implicate -implicit -implode -implosion -implosive -imply -impolite -important -importer -impose -imposing -impotence -impotency -impotent -impound -imprecise -imprint -imprison -impromptu -improper -improve -improving -improvise -imprudent -impulse -impulsive -impure -impurity -iodine -iodize -ion -ipad -iphone -ipod -irate -irk -iron -irregular -irrigate -irritable -irritably -irritant -irritate -islamic -islamist -isolated -isolating -isolation -isotope -issue -issuing -italicize -italics -item -itinerary -itunes -ivory -ivy -jab -jackal -jacket -jackknife -jackpot -jailbird -jailbreak -jailer -jailhouse -jalapeno -jam -janitor -january -jargon -jarring -jasmine -jaundice -jaunt -java -jawed -jawless -jawline -jaws -jaybird -jaywalker -jazz -jeep -jeeringly -jellied -jelly -jersey -jester -jet -jiffy -jigsaw -jimmy -jingle -jingling -jinx -jitters -jittery -job -jockey -jockstrap -jogger -jogging -john -joining -jokester -jokingly -jolliness -jolly -jolt -jot -jovial -joyfully -joylessly -joyous -joyride -joystick -jubilance -jubilant -judge -judgingly -judicial -judiciary -judo -juggle -juggling -jugular -juice -juiciness -juicy -jujitsu -jukebox -july -jumble -jumbo -jump -junction -juncture -june -junior -juniper -junkie -junkman -junkyard -jurist -juror -jury -justice -justifier -justify -justly -justness -juvenile -kabob -kangaroo -karaoke -karate -karma -kebab -keenly -keenness -keep -keg -kelp -kennel -kept -kerchief -kerosene -kettle -kick -kiln -kilobyte -kilogram -kilometer -kilowatt -kilt -kimono -kindle -kindling -kindly -kindness -kindred -kinetic -kinfolk -king -kinship -kinsman -kinswoman -kissable -kisser -kissing -kitchen -kite -kitten -kitty -kiwi -kleenex -knapsack -knee -knelt -knickers -knoll -koala -kooky -kosher -krypton -kudos -kung -labored -laborer -laboring -laborious -labrador -ladder -ladies -ladle -ladybug -ladylike -lagged -lagging -lagoon -lair -lake -lance -landed -landfall -landfill -landing -landlady -landless -landline -landlord -landmark -landmass -landmine -landowner -landscape -landside -landslide -language -lankiness -lanky -lantern -lapdog -lapel -lapped -lapping -laptop -lard -large -lark -lash -lasso -last -latch -late -lather -latitude -latrine -latter -latticed -launch -launder -laundry -laurel -lavender -lavish -laxative -lazily -laziness -lazy -lecturer -left -legacy -legal -legend -legged -leggings -legible -legibly -legislate -lego -legroom -legume -legwarmer -legwork -lemon -lend -length -lens -lent -leotard -lesser -letdown -lethargic -lethargy -letter -lettuce -level -leverage -levers -levitate -levitator -liability -liable -liberty -librarian -library -licking -licorice -lid -life -lifter -lifting -liftoff -ligament -likely -likeness -likewise -liking -lilac -lilly -lily -limb -limeade -limelight -limes -limit -limping -limpness -line -lingo -linguini -linguist -lining -linked -linoleum -linseed -lint -lion -lip -liquefy -liqueur -liquid -lisp -list -litigate -litigator -litmus -litter -little -livable -lived -lively -liver -livestock -lividly -living -lizard -lubricant -lubricate -lucid -luckily -luckiness -luckless -lucrative -ludicrous -lugged -lukewarm -lullaby -lumber -luminance -luminous -lumpiness -lumping -lumpish -lunacy -lunar -lunchbox -luncheon -lunchroom -lunchtime -lung -lurch -lure -luridness -lurk -lushly -lushness -luster -lustfully -lustily -lustiness -lustrous -lusty -luxurious -luxury -lying -lyrically -lyricism -lyricist -lyrics -macarena -macaroni -macaw -mace -machine -machinist -magazine -magenta -maggot -magical -magician -magma -magnesium -magnetic -magnetism -magnetize -magnifier -magnify -magnitude -magnolia -mahogany -maimed -majestic -majesty -majorette -majority -makeover -maker -makeshift -making -malformed -malt -mama -mammal -mammary -mammogram -manager -managing -manatee -mandarin -mandate -mandatory -mandolin -manger -mangle -mango -mangy -manhandle -manhole -manhood -manhunt -manicotti -manicure -manifesto -manila -mankind -manlike -manliness -manly -manmade -manned -mannish -manor -manpower -mantis -mantra -manual -many -map -marathon -marauding -marbled -marbles -marbling -march -mardi -margarine -margarita -margin -marigold -marina -marine -marital -maritime -marlin -marmalade -maroon -married -marrow -marry -marshland -marshy -marsupial -marvelous -marxism -mascot -masculine -mashed -mashing -massager -masses -massive -mastiff -matador -matchbook -matchbox -matcher -matching -matchless -material -maternal -maternity -math -mating -matriarch -matrimony -matrix -matron -matted -matter -maturely -maturing -maturity -mauve -maverick -maximize -maximum -maybe -mayday -mayflower -moaner -moaning -mobile -mobility -mobilize -mobster -mocha -mocker -mockup -modified -modify -modular -modulator -module -moisten -moistness -moisture -molar -molasses -mold -molecular -molecule -molehill -mollusk -mom -monastery -monday -monetary -monetize -moneybags -moneyless -moneywise -mongoose -mongrel -monitor -monkhood -monogamy -monogram -monologue -monopoly -monorail -monotone -monotype -monoxide -monsieur -monsoon -monstrous -monthly -monument -moocher -moodiness -moody -mooing -moonbeam -mooned -moonlight -moonlike -moonlit -moonrise -moonscape -moonshine -moonstone -moonwalk -mop -morale -morality -morally -morbidity -morbidly -morphine -morphing -morse -mortality -mortally -mortician -mortified -mortify -mortuary -mosaic -mossy -most -mothball -mothproof -motion -motivate -motivator -motive -motocross -motor -motto -mountable -mountain -mounted -mounting -mourner -mournful -mouse -mousiness -moustache -mousy -mouth -movable -move -movie -moving -mower -mowing -much -muck -mud -mug -mulberry -mulch -mule -mulled -mullets -multiple -multiply -multitask -multitude -mumble -mumbling -mumbo -mummified -mummify -mummy -mumps -munchkin -mundane -municipal -muppet -mural -murkiness -murky -murmuring -muscular -museum -mushily -mushiness -mushroom -mushy -music -musket -muskiness -musky -mustang -mustard -muster -mustiness -musty -mutable -mutate -mutation -mute -mutilated -mutilator -mutiny -mutt -mutual -muzzle -myself -myspace -mystified -mystify -myth -nacho -nag -nail -name -naming -nanny -nanometer -nape -napkin -napped -napping -nappy -narrow -nastily -nastiness -national -native -nativity -natural -nature -naturist -nautical -navigate -navigator -navy -nearby -nearest -nearly -nearness -neatly -neatness -nebula -nebulizer -nectar -negate -negation -negative -neglector -negligee -negligent -negotiate -nemeses -nemesis -neon -nephew -nerd -nervous -nervy -nest -net -neurology -neuron -neurosis -neurotic -neuter -neutron -never -next -nibble -nickname -nicotine -niece -nifty -nimble -nimbly -nineteen -ninetieth -ninja -nintendo -ninth -nuclear -nuclei -nucleus -nugget -nullify -number -numbing -numbly -numbness -numeral -numerate -numerator -numeric -numerous -nuptials -nursery -nursing -nurture -nutcase -nutlike -nutmeg -nutrient -nutshell -nuttiness -nutty -nuzzle -nylon -oaf -oak -oasis -oat -obedience -obedient -obituary -object -obligate -obliged -oblivion -oblivious -oblong -obnoxious -oboe -obscure -obscurity -observant -observer -observing -obsessed -obsession -obsessive -obsolete -obstacle -obstinate -obstruct -obtain -obtrusive -obtuse -obvious -occultist -occupancy -occupant -occupier -occupy -ocean -ocelot -octagon -octane -october -octopus -ogle -oil -oink -ointment -okay -old -olive -olympics -omega -omen -ominous -omission -omit -omnivore -onboard -oncoming -ongoing -onion -online -onlooker -only -onscreen -onset -onshore -onslaught -onstage -onto -onward -onyx -oops -ooze -oozy -opacity -opal -open -operable -operate -operating -operation -operative -operator -opium -opossum -opponent -oppose -opposing -opposite -oppressed -oppressor -opt -opulently -osmosis -other -otter -ouch -ought -ounce -outage -outback -outbid -outboard -outbound -outbreak -outburst -outcast -outclass -outcome -outdated -outdoors -outer -outfield -outfit -outflank -outgoing -outgrow -outhouse -outing -outlast -outlet -outline -outlook -outlying -outmatch -outmost -outnumber -outplayed -outpost -outpour -output -outrage -outrank -outreach -outright -outscore -outsell -outshine -outshoot -outsider -outskirts -outsmart -outsource -outspoken -outtakes -outthink -outward -outweigh -outwit -oval -ovary -oven -overact -overall -overarch -overbid -overbill -overbite -overblown -overboard -overbook -overbuilt -overcast -overcoat -overcome -overcook -overcrowd -overdraft -overdrawn -overdress -overdrive -overdue -overeager -overeater -overexert -overfed -overfeed -overfill -overflow -overfull -overgrown -overhand -overhang -overhaul -overhead -overhear -overheat -overhung -overjoyed -overkill -overlabor -overlaid -overlap -overlay -overload -overlook -overlord -overlying -overnight -overpass -overpay -overplant -overplay -overpower -overprice -overrate -overreach -overreact -override -overripe -overrule -overrun -overshoot -overshot -oversight -oversized -oversleep -oversold -overspend -overstate -overstay -overstep -overstock -overstuff -oversweet -overtake -overthrow -overtime -overtly -overtone -overture -overturn -overuse -overvalue -overview -overwrite -owl -oxford -oxidant -oxidation -oxidize -oxidizing -oxygen -oxymoron -oyster -ozone -paced -pacemaker -pacific -pacifier -pacifism -pacifist -pacify -padded -padding -paddle -paddling -padlock -pagan -pager -paging -pajamas -palace -palatable -palm -palpable -palpitate -paltry -pampered -pamperer -pampers -pamphlet -panama -pancake -pancreas -panda -pandemic -pang -panhandle -panic -panning -panorama -panoramic -panther -pantomime -pantry -pants -pantyhose -paparazzi -papaya -paper -paprika -papyrus -parabola -parachute -parade -paradox -paragraph -parakeet -paralegal -paralyses -paralysis -paralyze -paramedic -parameter -paramount -parasail -parasite -parasitic -parcel -parched -parchment -pardon -parish -parka -parking -parkway -parlor -parmesan -parole -parrot -parsley -parsnip -partake -parted -parting -partition -partly -partner -partridge -party -passable -passably -passage -passcode -passenger -passerby -passing -passion -passive -passivism -passover -passport -password -pasta -pasted -pastel -pastime -pastor -pastrami -pasture -pasty -patchwork -patchy -paternal -paternity -path -patience -patient -patio -patriarch -patriot -patrol -patronage -patronize -pauper -pavement -paver -pavestone -pavilion -paving -pawing -payable -payback -paycheck -payday -payee -payer -paying -payment -payphone -payroll -pebble -pebbly -pecan -pectin -peculiar -peddling -pediatric -pedicure -pedigree -pedometer -pegboard -pelican -pellet -pelt -pelvis -penalize -penalty -pencil -pendant -pending -penholder -penknife -pennant -penniless -penny -penpal -pension -pentagon -pentagram -pep -perceive -percent -perch -percolate -perennial -perfected -perfectly -perfume -periscope -perish -perjurer -perjury -perkiness -perky -perm -peroxide -perpetual -perplexed -persecute -persevere -persuaded -persuader -pesky -peso -pessimism -pessimist -pester -pesticide -petal -petite -petition -petri -petroleum -petted -petticoat -pettiness -petty -petunia -phantom -phobia -phoenix -phonebook -phoney -phonics -phoniness -phony -phosphate -photo -phrase -phrasing -placard -placate -placidly -plank -planner -plant -plasma -plaster -plastic -plated -platform -plating -platinum -platonic -platter -platypus -plausible -plausibly -playable -playback -player -playful -playgroup -playhouse -playing -playlist -playmaker -playmate -playoff -playpen -playroom -playset -plaything -playtime -plaza -pleading -pleat -pledge -plentiful -plenty -plethora -plexiglas -pliable -plod -plop -plot -plow -ploy -pluck -plug -plunder -plunging -plural -plus -plutonium -plywood -poach -pod -poem -poet -pogo -pointed -pointer -pointing -pointless -pointy -poise -poison -poker -poking -polar -police -policy -polio -polish -politely -polka -polo -polyester -polygon -polygraph -polymer -poncho -pond -pony -popcorn -pope -poplar -popper -poppy -popsicle -populace -popular -populate -porcupine -pork -porous -porridge -portable -portal -portfolio -porthole -portion -portly -portside -poser -posh -posing -possible -possibly -possum -postage -postal -postbox -postcard -posted -poster -posting -postnasal -posture -postwar -pouch -pounce -pouncing -pound -pouring -pout -powdered -powdering -powdery -power -powwow -pox -praising -prance -prancing -pranker -prankish -prankster -prayer -praying -preacher -preaching -preachy -preamble -precinct -precise -precision -precook -precut -predator -predefine -predict -preface -prefix -preflight -preformed -pregame -pregnancy -pregnant -preheated -prelaunch -prelaw -prelude -premiere -premises -premium -prenatal -preoccupy -preorder -prepaid -prepay -preplan -preppy -preschool -prescribe -preseason -preset -preshow -president -presoak -press -presume -presuming -preteen -pretended -pretender -pretense -pretext -pretty -pretzel -prevail -prevalent -prevent -preview -previous -prewar -prewashed -prideful -pried -primal -primarily -primary -primate -primer -primp -princess -print -prior -prism -prison -prissy -pristine -privacy -private -privatize -prize -proactive -probable -probably -probation -probe -probing -probiotic -problem -procedure -process -proclaim -procreate -procurer -prodigal -prodigy -produce -product -profane -profanity -professed -professor -profile -profound -profusely -progeny -prognosis -program -progress -projector -prologue -prolonged -promenade -prominent -promoter -promotion -prompter -promptly -prone -prong -pronounce -pronto -proofing -proofread -proofs -propeller -properly -property -proponent -proposal -propose -props -prorate -protector -protegee -proton -prototype -protozoan -protract -protrude -proud -provable -proved -proven -provided -provider -providing -province -proving -provoke -provoking -provolone -prowess -prowler -prowling -proximity -proxy -prozac -prude -prudishly -prune -pruning -pry -psychic -public -publisher -pucker -pueblo -pug -pull -pulmonary -pulp -pulsate -pulse -pulverize -puma -pumice -pummel -punch -punctual -punctuate -punctured -pungent -punisher -punk -pupil -puppet -puppy -purchase -pureblood -purebred -purely -pureness -purgatory -purge -purging -purifier -purify -purist -puritan -purity -purple -purplish -purposely -purr -purse -pursuable -pursuant -pursuit -purveyor -pushcart -pushchair -pusher -pushiness -pushing -pushover -pushpin -pushup -pushy -putdown -putt -puzzle -puzzling -pyramid -pyromania -python -quack -quadrant -quail -quaintly -quake -quaking -qualified -qualifier -qualify -quality -qualm -quantum -quarrel -quarry -quartered -quarterly -quarters -quartet -quench -query -quicken -quickly -quickness -quicksand -quickstep -quiet -quill -quilt -quintet -quintuple -quirk -quit -quiver -quizzical -quotable -quotation -quote -rabid -race -racing -racism -rack -racoon -radar -radial -radiance -radiantly -radiated -radiation -radiator -radio -radish -raffle -raft -rage -ragged -raging -ragweed -raider -railcar -railing -railroad -railway -raisin -rake -raking -rally -ramble -rambling -ramp -ramrod -ranch -rancidity -random -ranged -ranger -ranging -ranked -ranking -ransack -ranting -rants -rare -rarity -rascal -rash -rasping -ravage -raven -ravine -raving -ravioli -ravishing -reabsorb -reach -reacquire -reaction -reactive -reactor -reaffirm -ream -reanalyze -reappear -reapply -reappoint -reapprove -rearrange -rearview -reason -reassign -reassure -reattach -reawake -rebalance -rebate -rebel -rebirth -reboot -reborn -rebound -rebuff -rebuild -rebuilt -reburial -rebuttal -recall -recant -recapture -recast -recede -recent -recess -recharger -recipient -recital -recite -reckless -reclaim -recliner -reclining -recluse -reclusive -recognize -recoil -recollect -recolor -reconcile -reconfirm -reconvene -recopy -record -recount -recoup -recovery -recreate -rectal -rectangle -rectified -rectify -recycled -recycler -recycling -reemerge -reenact -reenter -reentry -reexamine -referable -referee -reference -refill -refinance -refined -refinery -refining -refinish -reflected -reflector -reflex -reflux -refocus -refold -reforest -reformat -reformed -reformer -reformist -refract -refrain -refreeze -refresh -refried -refueling -refund -refurbish -refurnish -refusal -refuse -refusing -refutable -refute -regain -regalia -regally -reggae -regime -region -register -registrar -registry -regress -regretful -regroup -regular -regulate -regulator -rehab -reheat -rehire -rehydrate -reimburse -reissue -reiterate -rejoice -rejoicing -rejoin -rekindle -relapse -relapsing -relatable -related -relation -relative -relax -relay -relearn -release -relenting -reliable -reliably -reliance -reliant -relic -relieve -relieving -relight -relish -relive -reload -relocate -relock -reluctant -rely -remake -remark -remarry -rematch -remedial -remedy -remember -reminder -remindful -remission -remix -remnant -remodeler -remold -remorse -remote -removable -removal -removed -remover -removing -rename -renderer -rendering -rendition -renegade -renewable -renewably -renewal -renewed -renounce -renovate -renovator -rentable -rental -rented -renter -reoccupy -reoccur -reopen -reorder -repackage -repacking -repaint -repair -repave -repaying -repayment -repeal -repeated -repeater -repent -rephrase -replace -replay -replica -reply -reporter -repose -repossess -repost -repressed -reprimand -reprint -reprise -reproach -reprocess -reproduce -reprogram -reps -reptile -reptilian -repugnant -repulsion -repulsive -repurpose -reputable -reputably -request -require -requisite -reroute -rerun -resale -resample -rescuer -reseal -research -reselect -reseller -resemble -resend -resent -reset -reshape -reshoot -reshuffle -residence -residency -resident -residual -residue -resigned -resilient -resistant -resisting -resize -resolute -resolved -resonant -resonate -resort -resource -respect -resubmit -result -resume -resupply -resurface -resurrect -retail -retainer -retaining -retake -retaliate -retention -rethink -retinal -retired -retiree -retiring -retold -retool -retorted -retouch -retrace -retract -retrain -retread -retreat -retrial -retrieval -retriever -retry -return -retying -retype -reunion -reunite -reusable -reuse -reveal -reveler -revenge -revenue -reverb -revered -reverence -reverend -reversal -reverse -reversing -reversion -revert -revisable -revise -revision -revisit -revivable -revival -reviver -reviving -revocable -revoke -revolt -revolver -revolving -reward -rewash -rewind -rewire -reword -rework -rewrap -rewrite -rhyme -ribbon -ribcage -rice -riches -richly -richness -rickety -ricotta -riddance -ridden -ride -riding -rifling -rift -rigging -rigid -rigor -rimless -rimmed -rind -rink -rinse -rinsing -riot -ripcord -ripeness -ripening -ripping -ripple -rippling -riptide -rise -rising -risk -risotto -ritalin -ritzy -rival -riverbank -riverbed -riverboat -riverside -riveter -riveting -roamer -roaming -roast -robbing -robe -robin -robotics -robust -rockband -rocker -rocket -rockfish -rockiness -rocking -rocklike -rockslide -rockstar -rocky -rogue -roman -romp -rope -roping -roster -rosy -rotten -rotting -rotunda -roulette -rounding -roundish -roundness -roundup -roundworm -routine -routing -rover -roving -royal -rubbed -rubber -rubbing -rubble -rubdown -ruby -ruckus -rudder -rug -ruined -rule -rumble -rumbling -rummage -rumor -runaround -rundown -runner -running -runny -runt -runway -rupture -rural -ruse -rush -rust -rut -sabbath -sabotage -sacrament -sacred -sacrifice -sadden -saddlebag -saddled -saddling -sadly -sadness -safari -safeguard -safehouse -safely -safeness -saffron -saga -sage -sagging -saggy -said -saint -sake -salad -salami -salaried -salary -saline -salon -saloon -salsa -salt -salutary -salute -salvage -salvaging -salvation -same -sample -sampling -sanction -sanctity -sanctuary -sandal -sandbag -sandbank -sandbar -sandblast -sandbox -sanded -sandfish -sanding -sandlot -sandpaper -sandpit -sandstone -sandstorm -sandworm -sandy -sanitary -sanitizer -sank -santa -sapling -sappiness -sappy -sarcasm -sarcastic -sardine -sash -sasquatch -sassy -satchel -satiable -satin -satirical -satisfied -satisfy -saturate -saturday -sauciness -saucy -sauna -savage -savanna -saved -savings -savior -savor -saxophone -say -scabbed -scabby -scalded -scalding -scale -scaling -scallion -scallop -scalping -scam -scandal -scanner -scanning -scant -scapegoat -scarce -scarcity -scarecrow -scared -scarf -scarily -scariness -scarring -scary -scavenger -scenic -schedule -schematic -scheme -scheming -schilling -schnapps -scholar -science -scientist -scion -scoff -scolding -scone -scoop -scooter -scope -scorch -scorebook -scorecard -scored -scoreless -scorer -scoring -scorn -scorpion -scotch -scoundrel -scoured -scouring -scouting -scouts -scowling -scrabble -scraggly -scrambled -scrambler -scrap -scratch -scrawny -screen -scribble -scribe -scribing -scrimmage -script -scroll -scrooge -scrounger -scrubbed -scrubber -scruffy -scrunch -scrutiny -scuba -scuff -sculptor -sculpture -scurvy -scuttle -secluded -secluding -seclusion -second -secrecy -secret -sectional -sector -secular -securely -security -sedan -sedate -sedation -sedative -sediment -seduce -seducing -segment -seismic -seizing -seldom -selected -selection -selective -selector -self -seltzer -semantic -semester -semicolon -semifinal -seminar -semisoft -semisweet -senate -senator -send -senior -senorita -sensation -sensitive -sensitize -sensually -sensuous -sepia -september -septic -septum -sequel -sequence -sequester -series -sermon -serotonin -serpent -serrated -serve -service -serving -sesame -sessions -setback -setting -settle -settling -setup -sevenfold -seventeen -seventh -seventy -severity -shabby -shack -shaded -shadily -shadiness -shading -shadow -shady -shaft -shakable -shakily -shakiness -shaking -shaky -shale -shallot -shallow -shame -shampoo -shamrock -shank -shanty -shape -shaping -share -sharpener -sharper -sharpie -sharply -sharpness -shawl -sheath -shed -sheep -sheet -shelf -shell -shelter -shelve -shelving -sherry -shield -shifter -shifting -shiftless -shifty -shimmer -shimmy -shindig -shine -shingle -shininess -shining -shiny -ship -shirt -shivering -shock -shone -shoplift -shopper -shopping -shoptalk -shore -shortage -shortcake -shortcut -shorten -shorter -shorthand -shortlist -shortly -shortness -shorts -shortwave -shorty -shout -shove -showbiz -showcase -showdown -shower -showgirl -showing -showman -shown -showoff -showpiece -showplace -showroom -showy -shrank -shrapnel -shredder -shredding -shrewdly -shriek -shrill -shrimp -shrine -shrink -shrivel -shrouded -shrubbery -shrubs -shrug -shrunk -shucking -shudder -shuffle -shuffling -shun -shush -shut -shy -siamese -siberian -sibling -siding -sierra -siesta -sift -sighing -silenced -silencer -silent -silica -silicon -silk -silliness -silly -silo -silt -silver -similarly -simile -simmering -simple -simplify -simply -sincere -sincerity -singer -singing -single -singular -sinister -sinless -sinner -sinuous -sip -siren -sister -sitcom -sitter -sitting -situated -situation -sixfold -sixteen -sixth -sixties -sixtieth -sixtyfold -sizable -sizably -size -sizing -sizzle -sizzling -skater -skating -skedaddle -skeletal -skeleton -skeptic -sketch -skewed -skewer -skid -skied -skier -skies -skiing -skilled -skillet -skillful -skimmed -skimmer -skimming -skimpily -skincare -skinhead -skinless -skinning -skinny -skintight -skipper -skipping -skirmish -skirt -skittle -skydiver -skylight -skyline -skype -skyrocket -skyward -slab -slacked -slacker -slacking -slackness -slacks -slain -slam -slander -slang -slapping -slapstick -slashed -slashing -slate -slather -slaw -sled -sleek -sleep -sleet -sleeve -slept -sliceable -sliced -slicer -slicing -slick -slider -slideshow -sliding -slighted -slighting -slightly -slimness -slimy -slinging -slingshot -slinky -slip -slit -sliver -slobbery -slogan -sloped -sloping -sloppily -sloppy -slot -slouching -slouchy -sludge -slug -slum -slurp -slush -sly -small -smartly -smartness -smasher -smashing -smashup -smell -smelting -smile -smilingly -smirk -smite -smith -smitten -smock -smog -smoked -smokeless -smokiness -smoking -smoky -smolder -smooth -smother -smudge -smudgy -smuggler -smuggling -smugly -smugness -snack -snagged -snaking -snap -snare -snarl -snazzy -sneak -sneer -sneeze -sneezing -snide -sniff -snippet -snipping -snitch -snooper -snooze -snore -snoring -snorkel -snort -snout -snowbird -snowboard -snowbound -snowcap -snowdrift -snowdrop -snowfall -snowfield -snowflake -snowiness -snowless -snowman -snowplow -snowshoe -snowstorm -snowsuit -snowy -snub -snuff -snuggle -snugly -snugness -speak -spearfish -spearhead -spearman -spearmint -species -specimen -specked -speckled -specks -spectacle -spectator -spectrum -speculate -speech -speed -spellbind -speller -spelling -spendable -spender -spending -spent -spew -sphere -spherical -sphinx -spider -spied -spiffy -spill -spilt -spinach -spinal -spindle -spinner -spinning -spinout -spinster -spiny -spiral -spirited -spiritism -spirits -spiritual -splashed -splashing -splashy -splatter -spleen -splendid -splendor -splice -splicing -splinter -splotchy -splurge -spoilage -spoiled -spoiler -spoiling -spoils -spoken -spokesman -sponge -spongy -sponsor -spoof -spookily -spooky -spool -spoon -spore -sporting -sports -sporty -spotless -spotlight -spotted -spotter -spotting -spotty -spousal -spouse -spout -sprain -sprang -sprawl -spray -spree -sprig -spring -sprinkled -sprinkler -sprint -sprite -sprout -spruce -sprung -spry -spud -spur -sputter -spyglass -squabble -squad -squall -squander -squash -squatted -squatter -squatting -squeak -squealer -squealing -squeamish -squeegee -squeeze -squeezing -squid -squiggle -squiggly -squint -squire -squirt -squishier -squishy -stability -stabilize -stable -stack -stadium -staff -stage -staging -stagnant -stagnate -stainable -stained -staining -stainless -stalemate -staleness -stalling -stallion -stamina -stammer -stamp -stand -stank -staple -stapling -starboard -starch -stardom -stardust -starfish -stargazer -staring -stark -starless -starlet -starlight -starlit -starring -starry -starship -starter -starting -startle -startling -startup -starved -starving -stash -state -static -statistic -statue -stature -status -statute -statutory -staunch -stays -steadfast -steadier -steadily -steadying -steam -steed -steep -steerable -steering -steersman -stegosaur -stellar -stem -stench -stencil -step -stereo -sterile -sterility -sterilize -sterling -sternness -sternum -stew -stick -stiffen -stiffly -stiffness -stifle -stifling -stillness -stilt -stimulant -stimulate -stimuli -stimulus -stinger -stingily -stinging -stingray -stingy -stinking -stinky -stipend -stipulate -stir -stitch -stock -stoic -stoke -stole -stomp -stonewall -stoneware -stonework -stoning -stony -stood -stooge -stool -stoop -stoplight -stoppable -stoppage -stopped -stopper -stopping -stopwatch -storable -storage -storeroom -storewide -storm -stout -stove -stowaway -stowing -straddle -straggler -strained -strainer -straining -strangely -stranger -strangle -strategic -strategy -stratus -straw -stray -streak -stream -street -strength -strenuous -strep -stress -stretch -strewn -stricken -strict -stride -strife -strike -striking -strive -striving -strobe -strode -stroller -strongbox -strongly -strongman -struck -structure -strudel -struggle -strum -strung -strut -stubbed -stubble -stubbly -stubborn -stucco -stuck -student -studied -studio -study -stuffed -stuffing -stuffy -stumble -stumbling -stump -stung -stunned -stunner -stunning -stunt -stupor -sturdily -sturdy -styling -stylishly -stylist -stylized -stylus -suave -subarctic -subatomic -subdivide -subdued -subduing -subfloor -subgroup -subheader -subject -sublease -sublet -sublevel -sublime -submarine -submerge -submersed -submitter -subpanel -subpar -subplot -subprime -subscribe -subscript -subsector -subside -subsiding -subsidize -subsidy -subsoil -subsonic -substance -subsystem -subtext -subtitle -subtly -subtotal -subtract -subtype -suburb -subway -subwoofer -subzero -succulent -such -suction -sudden -sudoku -suds -sufferer -suffering -suffice -suffix -suffocate -suffrage -sugar -suggest -suing -suitable -suitably -suitcase -suitor -sulfate -sulfide -sulfite -sulfur -sulk -sullen -sulphate -sulphuric -sultry -superbowl -superglue -superhero -superior -superjet -superman -supermom -supernova -supervise -supper -supplier -supply -support -supremacy -supreme -surcharge -surely -sureness -surface -surfacing -surfboard -surfer -surgery -surgical -surging -surname -surpass -surplus -surprise -surreal -surrender -surrogate -surround -survey -survival -survive -surviving -survivor -sushi -suspect -suspend -suspense -sustained -sustainer -swab -swaddling -swagger -swampland -swan -swapping -swarm -sway -swear -sweat -sweep -swell -swept -swerve -swifter -swiftly -swiftness -swimmable -swimmer -swimming -swimsuit -swimwear -swinger -swinging -swipe -swirl -switch -swivel -swizzle -swooned -swoop -swoosh -swore -sworn -swung -sycamore -sympathy -symphonic -symphony -symptom -synapse -syndrome -synergy -synopses -synopsis -synthesis -synthetic -syrup -system -t-shirt -tabasco -tabby -tableful -tables -tablet -tableware -tabloid -tackiness -tacking -tackle -tackling -tacky -taco -tactful -tactical -tactics -tactile -tactless -tadpole -taekwondo -tag -tainted -take -taking -talcum -talisman -tall -talon -tamale -tameness -tamer -tamper -tank -tanned -tannery -tanning -tantrum -tapeless -tapered -tapering -tapestry -tapioca -tapping -taps -tarantula -target -tarmac -tarnish -tarot -tartar -tartly -tartness -task -tassel -taste -tastiness -tasting -tasty -tattered -tattle -tattling -tattoo -taunt -tavern -thank -that -thaw -theater -theatrics -thee -theft -theme -theology -theorize -thermal -thermos -thesaurus -these -thesis -thespian -thicken -thicket -thickness -thieving -thievish -thigh -thimble -thing -think -thinly -thinner -thinness -thinning -thirstily -thirsting -thirsty -thirteen -thirty -thong -thorn -those -thousand -thrash -thread -threaten -threefold -thrift -thrill -thrive -thriving -throat -throbbing -throng -throttle -throwaway -throwback -thrower -throwing -thud -thumb -thumping -thursday -thus -thwarting -thyself -tiara -tibia -tidal -tidbit -tidiness -tidings -tidy -tiger -tighten -tightly -tightness -tightrope -tightwad -tigress -tile -tiling -till -tilt -timid -timing -timothy -tinderbox -tinfoil -tingle -tingling -tingly -tinker -tinkling -tinsel -tinsmith -tint -tinwork -tiny -tipoff -tipped -tipper -tipping -tiptoeing -tiptop -tiring -tissue -trace -tracing -track -traction -tractor -trade -trading -tradition -traffic -tragedy -trailing -trailside -train -traitor -trance -tranquil -transfer -transform -translate -transpire -transport -transpose -trapdoor -trapeze -trapezoid -trapped -trapper -trapping -traps -trash -travel -traverse -travesty -tray -treachery -treading -treadmill -treason -treat -treble -tree -trekker -tremble -trembling -tremor -trench -trend -trespass -triage -trial -triangle -tribesman -tribunal -tribune -tributary -tribute -triceps -trickery -trickily -tricking -trickle -trickster -tricky -tricolor -tricycle -trident -tried -trifle -trifocals -trillion -trilogy -trimester -trimmer -trimming -trimness -trinity -trio -tripod -tripping -triumph -trivial -trodden -trolling -trombone -trophy -tropical -tropics -trouble -troubling -trough -trousers -trout -trowel -truce -truck -truffle -trump -trunks -trustable -trustee -trustful -trusting -trustless -truth -try -tubby -tubeless -tubular -tucking -tuesday -tug -tuition -tulip -tumble -tumbling -tummy -turban -turbine -turbofan -turbojet -turbulent -turf -turkey -turmoil -turret -turtle -tusk -tutor -tutu -tux -tweak -tweed -tweet -tweezers -twelve -twentieth -twenty -twerp -twice -twiddle -twiddling -twig -twilight -twine -twins -twirl -twistable -twisted -twister -twisting -twisty -twitch -twitter -tycoon -tying -tyke -udder -ultimate -ultimatum -ultra -umbilical -umbrella -umpire -unabashed -unable -unadorned -unadvised -unafraid -unaired -unaligned -unaltered -unarmored -unashamed -unaudited -unawake -unaware -unbaked -unbalance -unbeaten -unbend -unbent -unbiased -unbitten -unblended -unblessed -unblock -unbolted -unbounded -unboxed -unbraided -unbridle -unbroken -unbuckled -unbundle -unburned -unbutton -uncanny -uncapped -uncaring -uncertain -unchain -unchanged -uncharted -uncheck -uncivil -unclad -unclaimed -unclamped -unclasp -uncle -unclip -uncloak -unclog -unclothed -uncoated -uncoiled -uncolored -uncombed -uncommon -uncooked -uncork -uncorrupt -uncounted -uncouple -uncouth -uncover -uncross -uncrown -uncrushed -uncured -uncurious -uncurled -uncut -undamaged -undated -undaunted -undead -undecided -undefined -underage -underarm -undercoat -undercook -undercut -underdog -underdone -underfed -underfeed -underfoot -undergo -undergrad -underhand -underline -underling -undermine -undermost -underpaid -underpass -underpay -underrate -undertake -undertone -undertook -undertow -underuse -underwear -underwent -underwire -undesired -undiluted -undivided -undocked -undoing -undone -undrafted -undress -undrilled -undusted -undying -unearned -unearth -unease -uneasily -uneasy -uneatable -uneaten -unedited -unelected -unending -unengaged -unenvied -unequal -unethical -uneven -unexpired -unexposed -unfailing -unfair -unfasten -unfazed -unfeeling -unfiled -unfilled -unfitted -unfitting -unfixable -unfixed -unflawed -unfocused -unfold -unfounded -unframed -unfreeze -unfrosted -unfrozen -unfunded -unglazed -ungloved -unglue -ungodly -ungraded -ungreased -unguarded -unguided -unhappily -unhappy -unharmed -unhealthy -unheard -unhearing -unheated -unhelpful -unhidden -unhinge -unhitched -unholy -unhook -unicorn -unicycle -unified -unifier -uniformed -uniformly -unify -unimpeded -uninjured -uninstall -uninsured -uninvited -union -uniquely -unisexual -unison -unissued -unit -universal -universe -unjustly -unkempt -unkind -unknotted -unknowing -unknown -unlaced -unlatch -unlawful -unleaded -unlearned -unleash -unless -unleveled -unlighted -unlikable -unlimited -unlined -unlinked -unlisted -unlit -unlivable -unloaded -unloader -unlocked -unlocking -unlovable -unloved -unlovely -unloving -unluckily -unlucky -unmade -unmanaged -unmanned -unmapped -unmarked -unmasked -unmasking -unmatched -unmindful -unmixable -unmixed -unmolded -unmoral -unmovable -unmoved -unmoving -unnamable -unnamed -unnatural -unneeded -unnerve -unnerving -unnoticed -unopened -unopposed -unpack -unpadded -unpaid -unpainted -unpaired -unpaved -unpeeled -unpicked -unpiloted -unpinned -unplanned -unplanted -unpleased -unpledged -unplowed -unplug -unpopular -unproven -unquote -unranked -unrated -unraveled -unreached -unread -unreal -unreeling -unrefined -unrelated -unrented -unrest -unretired -unrevised -unrigged -unripe -unrivaled -unroasted -unrobed -unroll -unruffled -unruly -unrushed -unsaddle -unsafe -unsaid -unsalted -unsaved -unsavory -unscathed -unscented -unscrew -unsealed -unseated -unsecured -unseeing -unseemly -unseen -unselect -unselfish -unsent -unsettled -unshackle -unshaken -unshaved -unshaven -unsheathe -unshipped -unsightly -unsigned -unskilled -unsliced -unsmooth -unsnap -unsocial -unsoiled -unsold -unsolved -unsorted -unspoiled -unspoken -unstable -unstaffed -unstamped -unsteady -unsterile -unstirred -unstitch -unstopped -unstuck -unstuffed -unstylish -unsubtle -unsubtly -unsuited -unsure -unsworn -untagged -untainted -untaken -untamed -untangled -untapped -untaxed -unthawed -unthread -untidy -untie -until -untimed -untimely -untitled -untoasted -untold -untouched -untracked -untrained -untreated -untried -untrimmed -untrue -untruth -unturned -untwist -untying -unusable -unused -unusual -unvalued -unvaried -unvarying -unveiled -unveiling -unvented -unviable -unvisited -unvocal -unwanted -unwarlike -unwary -unwashed -unwatched -unweave -unwed -unwelcome -unwell -unwieldy -unwilling -unwind -unwired -unwitting -unwomanly -unworldly -unworn -unworried -unworthy -unwound -unwoven -unwrapped -unwritten -unzip -upbeat -upchuck -upcoming -upcountry -update -upfront -upgrade -upheaval -upheld -uphill -uphold -uplifted -uplifting -upload -upon -upper -upright -uprising -upriver -uproar -uproot -upscale -upside -upstage -upstairs -upstart -upstate -upstream -upstroke -upswing -uptake -uptight -uptown -upturned -upward -upwind -uranium -urban -urchin -urethane -urgency -urgent -urging -urologist -urology -usable -usage -useable -used -uselessly -user -usher -usual -utensil -utility -utilize -utmost -utopia -utter -vacancy -vacant -vacate -vacation -vagabond -vagrancy -vagrantly -vaguely -vagueness -valiant -valid -valium -valley -valuables -value -vanilla -vanish -vanity -vanquish -vantage -vaporizer -variable -variably -varied -variety -various -varmint -varnish -varsity -varying -vascular -vaseline -vastly -vastness -veal -vegan -veggie -vehicular -velcro -velocity -velvet -vendetta -vending -vendor -veneering -vengeful -venomous -ventricle -venture -venue -venus -verbalize -verbally -verbose -verdict -verify -verse -version -versus -vertebrae -vertical -vertigo -very -vessel -vest -veteran -veto -vexingly -viability -viable -vibes -vice -vicinity -victory -video -viewable -viewer -viewing -viewless -viewpoint -vigorous -village -villain -vindicate -vineyard -vintage -violate -violation -violator -violet -violin -viper -viral -virtual -virtuous -virus -visa -viscosity -viscous -viselike -visible -visibly -vision -visiting -visitor -visor -vista -vitality -vitalize -vitally -vitamins -vivacious -vividly -vividness -vixen -vocalist -vocalize -vocally -vocation -voice -voicing -void -volatile -volley -voltage -volumes -voter -voting -voucher -vowed -vowel -voyage -wackiness -wad -wafer -waffle -waged -wager -wages -waggle -wagon -wake -waking -walk -walmart -walnut -walrus -waltz -wand -wannabe -wanted -wanting -wasabi -washable -washbasin -washboard -washbowl -washcloth -washday -washed -washer -washhouse -washing -washout -washroom -washstand -washtub -wasp -wasting -watch -water -waviness -waving -wavy -whacking -whacky -wham -wharf -wheat -whenever -whiff -whimsical -whinny -whiny -whisking -whoever -whole -whomever -whoopee -whooping -whoops -why -wick -widely -widen -widget -widow -width -wieldable -wielder -wife -wifi -wikipedia -wildcard -wildcat -wilder -wildfire -wildfowl -wildland -wildlife -wildly -wildness -willed -willfully -willing -willow -willpower -wilt -wimp -wince -wincing -wind -wing -winking -winner -winnings -winter -wipe -wired -wireless -wiring -wiry -wisdom -wise -wish -wisplike -wispy -wistful -wizard -wobble -wobbling -wobbly -wok -wolf -wolverine -womanhood -womankind -womanless -womanlike -womanly -womb -woof -wooing -wool -woozy -word -work -worried -worrier -worrisome -worry -worsening -worshiper -worst -wound -woven -wow -wrangle -wrath -wreath -wreckage -wrecker -wrecking -wrench -wriggle -wriggly -wrinkle -wrinkly -wrist -writing -written -wrongdoer -wronged -wrongful -wrongly -wrongness -wrought -xbox -xerox -yahoo -yam -yanking -yapping -yard -yarn -yeah -yearbook -yearling -yearly -yearning -yeast -yelling -yelp -yen -yesterday -yiddish -yield -yin -yippee -yo-yo -yodel -yoga -yogurt -yonder -yoyo -yummy -zap -zealous -zebra -zen -zeppelin -zero -zestfully -zesty -zigzagged -zipfile -zipping -zippy -zips -zit -zodiac -zombie -zone -zoning -zookeeper -zoologist -zoology -zoom diff --git a/libs_crutch/contrib/passlib/_data/wordsets/eff_prefixed.txt b/libs_crutch/contrib/passlib/_data/wordsets/eff_prefixed.txt deleted file mode 100644 index 9ac732f..0000000 --- a/libs_crutch/contrib/passlib/_data/wordsets/eff_prefixed.txt +++ /dev/null @@ -1,1296 +0,0 @@ -aardvark -abandoned -abbreviate -abdomen -abhorrence -abiding -abnormal -abrasion -absorbing -abundant -abyss -academy -accountant -acetone -achiness -acid -acoustics -acquire -acrobat -actress -acuteness -aerosol -aesthetic -affidavit -afloat -afraid -aftershave -again -agency -aggressor -aghast -agitate -agnostic -agonizing -agreeing -aidless -aimlessly -ajar -alarmclock -albatross -alchemy -alfalfa -algae -aliens -alkaline -almanac -alongside -alphabet -already -also -altitude -aluminum -always -amazingly -ambulance -amendment -amiable -ammunition -amnesty -amoeba -amplifier -amuser -anagram -anchor -android -anesthesia -angelfish -animal -anklet -announcer -anonymous -answer -antelope -anxiety -anyplace -aorta -apartment -apnea -apostrophe -apple -apricot -aquamarine -arachnid -arbitrate -ardently -arena -argument -aristocrat -armchair -aromatic -arrowhead -arsonist -artichoke -asbestos -ascend -aseptic -ashamed -asinine -asleep -asocial -asparagus -astronaut -asymmetric -atlas -atmosphere -atom -atrocious -attic -atypical -auctioneer -auditorium -augmented -auspicious -automobile -auxiliary -avalanche -avenue -aviator -avocado -awareness -awhile -awkward -awning -awoke -axially -azalea -babbling -backpack -badass -bagpipe -bakery -balancing -bamboo -banana -barracuda -basket -bathrobe -bazooka -blade -blender -blimp -blouse -blurred -boatyard -bobcat -body -bogusness -bohemian -boiler -bonnet -boots -borough -bossiness -bottle -bouquet -boxlike -breath -briefcase -broom -brushes -bubblegum -buckle -buddhist -buffalo -bullfrog -bunny -busboy -buzzard -cabin -cactus -cadillac -cafeteria -cage -cahoots -cajoling -cakewalk -calculator -camera -canister -capsule -carrot -cashew -cathedral -caucasian -caviar -ceasefire -cedar -celery -cement -census -ceramics -cesspool -chalkboard -cheesecake -chimney -chlorine -chopsticks -chrome -chute -cilantro -cinnamon -circle -cityscape -civilian -clay -clergyman -clipboard -clock -clubhouse -coathanger -cobweb -coconut -codeword -coexistent -coffeecake -cognitive -cohabitate -collarbone -computer -confetti -copier -cornea -cosmetics -cotton -couch -coverless -coyote -coziness -crawfish -crewmember -crib -croissant -crumble -crystal -cubical -cucumber -cuddly -cufflink -cuisine -culprit -cup -curry -cushion -cuticle -cybernetic -cyclist -cylinder -cymbal -cynicism -cypress -cytoplasm -dachshund -daffodil -dagger -dairy -dalmatian -dandelion -dartboard -dastardly -datebook -daughter -dawn -daytime -dazzler -dealer -debris -decal -dedicate -deepness -defrost -degree -dehydrator -deliverer -democrat -dentist -deodorant -depot -deranged -desktop -detergent -device -dexterity -diamond -dibs -dictionary -diffuser -digit -dilated -dimple -dinnerware -dioxide -diploma -directory -dishcloth -ditto -dividers -dizziness -doctor -dodge -doll -dominoes -donut -doorstep -dorsal -double -downstairs -dozed -drainpipe -dresser -driftwood -droppings -drum -dryer -dubiously -duckling -duffel -dugout -dumpster -duplex -durable -dustpan -dutiful -duvet -dwarfism -dwelling -dwindling -dynamite -dyslexia -eagerness -earlobe -easel -eavesdrop -ebook -eccentric -echoless -eclipse -ecosystem -ecstasy -edged -editor -educator -eelworm -eerie -effects -eggnog -egomaniac -ejection -elastic -elbow -elderly -elephant -elfishly -eliminator -elk -elliptical -elongated -elsewhere -elusive -elves -emancipate -embroidery -emcee -emerald -emission -emoticon -emperor -emulate -enactment -enchilada -endorphin -energy -enforcer -engine -enhance -enigmatic -enjoyably -enlarged -enormous -enquirer -enrollment -ensemble -entryway -enunciate -envoy -enzyme -epidemic -equipment -erasable -ergonomic -erratic -eruption -escalator -eskimo -esophagus -espresso -essay -estrogen -etching -eternal -ethics -etiquette -eucalyptus -eulogy -euphemism -euthanize -evacuation -evergreen -evidence -evolution -exam -excerpt -exerciser -exfoliate -exhale -exist -exorcist -explode -exquisite -exterior -exuberant -fabric -factory -faded -failsafe -falcon -family -fanfare -fasten -faucet -favorite -feasibly -february -federal -feedback -feigned -feline -femur -fence -ferret -festival -fettuccine -feudalist -feverish -fiberglass -fictitious -fiddle -figurine -fillet -finalist -fiscally -fixture -flashlight -fleshiness -flight -florist -flypaper -foamless -focus -foggy -folksong -fondue -footpath -fossil -fountain -fox -fragment -freeway -fridge -frosting -fruit -fryingpan -gadget -gainfully -gallstone -gamekeeper -gangway -garlic -gaslight -gathering -gauntlet -gearbox -gecko -gem -generator -geographer -gerbil -gesture -getaway -geyser -ghoulishly -gibberish -giddiness -giftshop -gigabyte -gimmick -giraffe -giveaway -gizmo -glasses -gleeful -glisten -glove -glucose -glycerin -gnarly -gnomish -goatskin -goggles -goldfish -gong -gooey -gorgeous -gosling -gothic -gourmet -governor -grape -greyhound -grill -groundhog -grumbling -guacamole -guerrilla -guitar -gullible -gumdrop -gurgling -gusto -gutless -gymnast -gynecology -gyration -habitat -hacking -haggard -haiku -halogen -hamburger -handgun -happiness -hardhat -hastily -hatchling -haughty -hazelnut -headband -hedgehog -hefty -heinously -helmet -hemoglobin -henceforth -herbs -hesitation -hexagon -hubcap -huddling -huff -hugeness -hullabaloo -human -hunter -hurricane -hushing -hyacinth -hybrid -hydrant -hygienist -hypnotist -ibuprofen -icepack -icing -iconic -identical -idiocy -idly -igloo -ignition -iguana -illuminate -imaging -imbecile -imitator -immigrant -imprint -iodine -ionosphere -ipad -iphone -iridescent -irksome -iron -irrigation -island -isotope -issueless -italicize -itemizer -itinerary -itunes -ivory -jabbering -jackrabbit -jaguar -jailhouse -jalapeno -jamboree -janitor -jarring -jasmine -jaundice -jawbreaker -jaywalker -jazz -jealous -jeep -jelly -jeopardize -jersey -jetski -jezebel -jiffy -jigsaw -jingling -jobholder -jockstrap -jogging -john -joinable -jokingly -journal -jovial -joystick -jubilant -judiciary -juggle -juice -jujitsu -jukebox -jumpiness -junkyard -juror -justifying -juvenile -kabob -kamikaze -kangaroo -karate -kayak -keepsake -kennel -kerosene -ketchup -khaki -kickstand -kilogram -kimono -kingdom -kiosk -kissing -kite -kleenex -knapsack -kneecap -knickers -koala -krypton -laboratory -ladder -lakefront -lantern -laptop -laryngitis -lasagna -latch -laundry -lavender -laxative -lazybones -lecturer -leftover -leggings -leisure -lemon -length -leopard -leprechaun -lettuce -leukemia -levers -lewdness -liability -library -licorice -lifeboat -lightbulb -likewise -lilac -limousine -lint -lioness -lipstick -liquid -listless -litter -liverwurst -lizard -llama -luau -lubricant -lucidity -ludicrous -luggage -lukewarm -lullaby -lumberjack -lunchbox -luridness -luscious -luxurious -lyrics -macaroni -maestro -magazine -mahogany -maimed -majority -makeover -malformed -mammal -mango -mapmaker -marbles -massager -matchstick -maverick -maximum -mayonnaise -moaning -mobilize -moccasin -modify -moisture -molecule -momentum -monastery -moonshine -mortuary -mosquito -motorcycle -mousetrap -movie -mower -mozzarella -muckiness -mudflow -mugshot -mule -mummy -mundane -muppet -mural -mustard -mutation -myriad -myspace -myth -nail -namesake -nanosecond -napkin -narrator -nastiness -natives -nautically -navigate -nearest -nebula -nectar -nefarious -negotiator -neither -nemesis -neoliberal -nephew -nervously -nest -netting -neuron -nevermore -nextdoor -nicotine -niece -nimbleness -nintendo -nirvana -nuclear -nugget -nuisance -nullify -numbing -nuptials -nursery -nutcracker -nylon -oasis -oat -obediently -obituary -object -obliterate -obnoxious -observer -obtain -obvious -occupation -oceanic -octopus -ocular -office -oftentimes -oiliness -ointment -older -olympics -omissible -omnivorous -oncoming -onion -onlooker -onstage -onward -onyx -oomph -opaquely -opera -opium -opossum -opponent -optical -opulently -oscillator -osmosis -ostrich -otherwise -ought -outhouse -ovation -oven -owlish -oxford -oxidize -oxygen -oyster -ozone -pacemaker -padlock -pageant -pajamas -palm -pamphlet -pantyhose -paprika -parakeet -passport -patio -pauper -pavement -payphone -pebble -peculiarly -pedometer -pegboard -pelican -penguin -peony -pepperoni -peroxide -pesticide -petroleum -pewter -pharmacy -pheasant -phonebook -phrasing -physician -plank -pledge -plotted -plug -plywood -pneumonia -podiatrist -poetic -pogo -poison -poking -policeman -poncho -popcorn -porcupine -postcard -poultry -powerboat -prairie -pretzel -princess -propeller -prune -pry -pseudo -psychopath -publisher -pucker -pueblo -pulley -pumpkin -punchbowl -puppy -purse -pushup -putt -puzzle -pyramid -python -quarters -quesadilla -quilt -quote -racoon -radish -ragweed -railroad -rampantly -rancidity -rarity -raspberry -ravishing -rearrange -rebuilt -receipt -reentry -refinery -register -rehydrate -reimburse -rejoicing -rekindle -relic -remote -renovator -reopen -reporter -request -rerun -reservoir -retriever -reunion -revolver -rewrite -rhapsody -rhetoric -rhino -rhubarb -rhyme -ribbon -riches -ridden -rigidness -rimmed -riptide -riskily -ritzy -riverboat -roamer -robe -rocket -romancer -ropelike -rotisserie -roundtable -royal -rubber -rudderless -rugby -ruined -rulebook -rummage -running -rupture -rustproof -sabotage -sacrifice -saddlebag -saffron -sainthood -saltshaker -samurai -sandworm -sapphire -sardine -sassy -satchel -sauna -savage -saxophone -scarf -scenario -schoolbook -scientist -scooter -scrapbook -sculpture -scythe -secretary -sedative -segregator -seismology -selected -semicolon -senator -septum -sequence -serpent -sesame -settler -severely -shack -shelf -shirt -shovel -shrimp -shuttle -shyness -siamese -sibling -siesta -silicon -simmering -singles -sisterhood -sitcom -sixfold -sizable -skateboard -skeleton -skies -skulk -skylight -slapping -sled -slingshot -sloth -slumbering -smartphone -smelliness -smitten -smokestack -smudge -snapshot -sneezing -sniff -snowsuit -snugness -speakers -sphinx -spider -splashing -sponge -sprout -spur -spyglass -squirrel -statue -steamboat -stingray -stopwatch -strawberry -student -stylus -suave -subway -suction -suds -suffocate -sugar -suitcase -sulphur -superstore -surfer -sushi -swan -sweatshirt -swimwear -sword -sycamore -syllable -symphony -synagogue -syringes -systemize -tablespoon -taco -tadpole -taekwondo -tagalong -takeout -tallness -tamale -tanned -tapestry -tarantula -tastebud -tattoo -tavern -thaw -theater -thimble -thorn -throat -thumb -thwarting -tiara -tidbit -tiebreaker -tiger -timid -tinsel -tiptoeing -tirade -tissue -tractor -tree -tripod -trousers -trucks -tryout -tubeless -tuesday -tugboat -tulip -tumbleweed -tupperware -turtle -tusk -tutorial -tuxedo -tweezers -twins -tyrannical -ultrasound -umbrella -umpire -unarmored -unbuttoned -uncle -underwear -unevenness -unflavored -ungloved -unhinge -unicycle -unjustly -unknown -unlocking -unmarked -unnoticed -unopened -unpaved -unquenched -unroll -unscrewing -untied -unusual -unveiled -unwrinkled -unyielding -unzip -upbeat -upcountry -update -upfront -upgrade -upholstery -upkeep -upload -uppercut -upright -upstairs -uptown -upwind -uranium -urban -urchin -urethane -urgent -urologist -username -usher -utensil -utility -utmost -utopia -utterance -vacuum -vagrancy -valuables -vanquished -vaporizer -varied -vaseline -vegetable -vehicle -velcro -vendor -vertebrae -vestibule -veteran -vexingly -vicinity -videogame -viewfinder -vigilante -village -vinegar -violin -viperfish -virus -visor -vitamins -vivacious -vixen -vocalist -vogue -voicemail -volleyball -voucher -voyage -vulnerable -waffle -wagon -wakeup -walrus -wanderer -wasp -water -waving -wheat -whisper -wholesaler -wick -widow -wielder -wifeless -wikipedia -wildcat -windmill -wipeout -wired -wishbone -wizardry -wobbliness -wolverine -womb -woolworker -workbasket -wound -wrangle -wreckage -wristwatch -wrongdoing -xerox -xylophone -yacht -yahoo -yard -yearbook -yesterday -yiddish -yield -yo-yo -yodel -yogurt -yuppie -zealot -zebra -zeppelin -zestfully -zigzagged -zillion -zipping -zirconium -zodiac -zombie -zookeeper -zucchini diff --git a/libs_crutch/contrib/passlib/_data/wordsets/eff_short.txt b/libs_crutch/contrib/passlib/_data/wordsets/eff_short.txt deleted file mode 100644 index 4c8baa4..0000000 --- a/libs_crutch/contrib/passlib/_data/wordsets/eff_short.txt +++ /dev/null @@ -1,1296 +0,0 @@ -acid -acorn -acre -acts -afar -affix -aged -agent -agile -aging -agony -ahead -aide -aids -aim -ajar -alarm -alias -alibi -alien -alike -alive -aloe -aloft -aloha -alone -amend -amino -ample -amuse -angel -anger -angle -ankle -apple -april -apron -aqua -area -arena -argue -arise -armed -armor -army -aroma -array -arson -art -ashen -ashes -atlas -atom -attic -audio -avert -avoid -awake -award -awoke -axis -bacon -badge -bagel -baggy -baked -baker -balmy -banjo -barge -barn -bash -basil -bask -batch -bath -baton -bats -blade -blank -blast -blaze -bleak -blend -bless -blimp -blink -bloat -blob -blog -blot -blunt -blurt -blush -boast -boat -body -boil -bok -bolt -boned -boney -bonus -bony -book -booth -boots -boss -botch -both -boxer -breed -bribe -brick -bride -brim -bring -brink -brisk -broad -broil -broke -brook -broom -brush -buck -bud -buggy -bulge -bulk -bully -bunch -bunny -bunt -bush -bust -busy -buzz -cable -cache -cadet -cage -cake -calm -cameo -canal -candy -cane -canon -cape -card -cargo -carol -carry -carve -case -cash -cause -cedar -chain -chair -chant -chaos -charm -chase -cheek -cheer -chef -chess -chest -chew -chief -chili -chill -chip -chomp -chop -chow -chuck -chump -chunk -churn -chute -cider -cinch -city -civic -civil -clad -claim -clamp -clap -clash -clasp -class -claw -clay -clean -clear -cleat -cleft -clerk -click -cling -clink -clip -cloak -clock -clone -cloth -cloud -clump -coach -coast -coat -cod -coil -coke -cola -cold -colt -coma -come -comic -comma -cone -cope -copy -coral -cork -cost -cot -couch -cough -cover -cozy -craft -cramp -crane -crank -crate -crave -crawl -crazy -creme -crepe -crept -crib -cried -crisp -crook -crop -cross -crowd -crown -crumb -crush -crust -cub -cult -cupid -cure -curl -curry -curse -curve -curvy -cushy -cut -cycle -dab -dad -daily -dairy -daisy -dance -dandy -darn -dart -dash -data -date -dawn -deaf -deal -dean -debit -debt -debug -decaf -decal -decay -deck -decor -decoy -deed -delay -denim -dense -dent -depth -derby -desk -dial -diary -dice -dig -dill -dime -dimly -diner -dingy -disco -dish -disk -ditch -ditzy -dizzy -dock -dodge -doing -doll -dome -donor -donut -dose -dot -dove -down -dowry -doze -drab -drama -drank -draw -dress -dried -drift -drill -drive -drone -droop -drove -drown -drum -dry -duck -duct -dude -dug -duke -duo -dusk -dust -duty -dwarf -dwell -eagle -early -earth -easel -east -eaten -eats -ebay -ebony -ebook -echo -edge -eel -eject -elbow -elder -elf -elk -elm -elope -elude -elves -email -emit -empty -emu -enter -entry -envoy -equal -erase -error -erupt -essay -etch -evade -even -evict -evil -evoke -exact -exit -fable -faced -fact -fade -fall -false -fancy -fang -fax -feast -feed -femur -fence -fend -ferry -fetal -fetch -fever -fiber -fifth -fifty -film -filth -final -finch -fit -five -flag -flaky -flame -flap -flask -fled -flick -fling -flint -flip -flirt -float -flock -flop -floss -flyer -foam -foe -fog -foil -folic -folk -food -fool -found -fox -foyer -frail -frame -fray -fresh -fried -frill -frisk -from -front -frost -froth -frown -froze -fruit -gag -gains -gala -game -gap -gas -gave -gear -gecko -geek -gem -genre -gift -gig -gills -given -giver -glad -glass -glide -gloss -glove -glow -glue -goal -going -golf -gong -good -gooey -goofy -gore -gown -grab -grain -grant -grape -graph -grasp -grass -grave -gravy -gray -green -greet -grew -grid -grief -grill -grip -grit -groom -grope -growl -grub -grunt -guide -gulf -gulp -gummy -guru -gush -gut -guy -habit -half -halo -halt -happy -harm -hash -hasty -hatch -hate -haven -hazel -hazy -heap -heat -heave -hedge -hefty -help -herbs -hers -hub -hug -hula -hull -human -humid -hump -hung -hunk -hunt -hurry -hurt -hush -hut -ice -icing -icon -icy -igloo -image -ion -iron -islam -issue -item -ivory -ivy -jab -jam -jaws -jazz -jeep -jelly -jet -jiffy -job -jog -jolly -jolt -jot -joy -judge -juice -juicy -july -jumbo -jump -junky -juror -jury -keep -keg -kept -kick -kilt -king -kite -kitty -kiwi -knee -knelt -koala -kung -ladle -lady -lair -lake -lance -land -lapel -large -lash -lasso -last -latch -late -lazy -left -legal -lemon -lend -lens -lent -level -lever -lid -life -lift -lilac -lily -limb -limes -line -lint -lion -lip -list -lived -liver -lunar -lunch -lung -lurch -lure -lurk -lying -lyric -mace -maker -malt -mama -mango -manor -many -map -march -mardi -marry -mash -match -mate -math -moan -mocha -moist -mold -mom -moody -mop -morse -most -motor -motto -mount -mouse -mousy -mouth -move -movie -mower -mud -mug -mulch -mule -mull -mumbo -mummy -mural -muse -music -musky -mute -nacho -nag -nail -name -nanny -nap -navy -near -neat -neon -nerd -nest -net -next -niece -ninth -nutty -oak -oasis -oat -ocean -oil -old -olive -omen -onion -only -ooze -opal -open -opera -opt -otter -ouch -ounce -outer -oval -oven -owl -ozone -pace -pagan -pager -palm -panda -panic -pants -panty -paper -park -party -pasta -patch -path -patio -payer -pecan -penny -pep -perch -perky -perm -pest -petal -petri -petty -photo -plank -plant -plaza -plead -plot -plow -pluck -plug -plus -poach -pod -poem -poet -pogo -point -poise -poker -polar -polio -polka -polo -pond -pony -poppy -pork -poser -pouch -pound -pout -power -prank -press -print -prior -prism -prize -probe -prong -proof -props -prude -prune -pry -pug -pull -pulp -pulse -puma -punch -punk -pupil -puppy -purr -purse -push -putt -quack -quake -query -quiet -quill -quilt -quit -quota -quote -rabid -race -rack -radar -radio -raft -rage -raid -rail -rake -rally -ramp -ranch -range -rank -rant -rash -raven -reach -react -ream -rebel -recap -relax -relay -relic -remix -repay -repel -reply -rerun -reset -rhyme -rice -rich -ride -rigid -rigor -rinse -riot -ripen -rise -risk -ritzy -rival -river -roast -robe -robin -rock -rogue -roman -romp -rope -rover -royal -ruby -rug -ruin -rule -runny -rush -rust -rut -sadly -sage -said -saint -salad -salon -salsa -salt -same -sandy -santa -satin -sauna -saved -savor -sax -say -scale -scam -scan -scare -scarf -scary -scoff -scold -scoop -scoot -scope -score -scorn -scout -scowl -scrap -scrub -scuba -scuff -sect -sedan -self -send -sepia -serve -set -seven -shack -shade -shady -shaft -shaky -sham -shape -share -sharp -shed -sheep -sheet -shelf -shell -shine -shiny -ship -shirt -shock -shop -shore -shout -shove -shown -showy -shred -shrug -shun -shush -shut -shy -sift -silk -silly -silo -sip -siren -sixth -size -skate -skew -skid -skier -skies -skip -skirt -skit -sky -slab -slack -slain -slam -slang -slash -slate -slaw -sled -sleek -sleep -sleet -slept -slice -slick -slimy -sling -slip -slit -slob -slot -slug -slum -slurp -slush -small -smash -smell -smile -smirk -smog -snack -snap -snare -snarl -sneak -sneer -sniff -snore -snort -snout -snowy -snub -snuff -speak -speed -spend -spent -spew -spied -spill -spiny -spoil -spoke -spoof -spool -spoon -sport -spot -spout -spray -spree -spur -squad -squat -squid -stack -staff -stage -stain -stall -stamp -stand -stank -stark -start -stash -state -stays -steam -steep -stem -step -stew -stick -sting -stir -stock -stole -stomp -stony -stood -stool -stoop -stop -storm -stout -stove -straw -stray -strut -stuck -stud -stuff -stump -stung -stunt -suds -sugar -sulk -surf -sushi -swab -swan -swarm -sway -swear -sweat -sweep -swell -swept -swim -swing -swipe -swirl -swoop -swore -syrup -tacky -taco -tag -take -tall -talon -tamer -tank -taper -taps -tarot -tart -task -taste -tasty -taunt -thank -thaw -theft -theme -thigh -thing -think -thong -thorn -those -throb -thud -thumb -thump -thus -tiara -tidal -tidy -tiger -tile -tilt -tint -tiny -trace -track -trade -train -trait -trap -trash -tray -treat -tree -trek -trend -trial -tribe -trick -trio -trout -truce -truck -trump -trunk -try -tug -tulip -tummy -turf -tusk -tutor -tutu -tux -tweak -tweet -twice -twine -twins -twirl -twist -uncle -uncut -undo -unify -union -unit -untie -upon -upper -urban -used -user -usher -utter -value -vapor -vegan -venue -verse -vest -veto -vice -video -view -viral -virus -visa -visor -vixen -vocal -voice -void -volt -voter -vowel -wad -wafer -wager -wages -wagon -wake -walk -wand -wasp -watch -water -wavy -wheat -whiff -whole -whoop -wick -widen -widow -width -wife -wifi -wilt -wimp -wind -wing -wink -wipe -wired -wiry -wise -wish -wispy -wok -wolf -womb -wool -woozy -word -work -worry -wound -woven -wrath -wreck -wrist -xerox -yahoo -yam -yard -year -yeast -yelp -yield -yo-yo -yodel -yoga -yoyo -yummy -zebra -zero -zesty -zippy -zone -zoom diff --git a/libs_crutch/contrib/passlib/_setup/__init__.py b/libs_crutch/contrib/passlib/_setup/__init__.py deleted file mode 100644 index 3881943..0000000 --- a/libs_crutch/contrib/passlib/_setup/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""passlib.setup - helpers used by passlib's setup.py script""" diff --git a/libs_crutch/contrib/passlib/_setup/stamp.py b/libs_crutch/contrib/passlib/_setup/stamp.py deleted file mode 100644 index f14c0d4..0000000 --- a/libs_crutch/contrib/passlib/_setup/stamp.py +++ /dev/null @@ -1,149 +0,0 @@ -"""update version string during build""" -#============================================================================= -# imports -#============================================================================= -from __future__ import absolute_import, division, print_function -# core -import datetime -from distutils.dist import Distribution -import os -import re -import subprocess -import time -# pkg -# local -__all__ = [ - "stamp_source", - "stamp_distutils_output", - "append_hg_revision", - "as_bool", -] -#============================================================================= -# helpers -#============================================================================= -def get_command_class(opts, name): - return opts['cmdclass'].get(name) or Distribution().get_command_class(name) - -def get_command_options(opts, command): - return opts.setdefault("options", {}).setdefault(command, {}) - -def set_command_options(opts, command, **kwds): - get_command_options(opts, command).update(kwds) - -def _get_file(path): - with open(path, "r") as fh: - return fh.read() - - -def _replace_file(path, content, dry_run=False): - if dry_run: - return - if os.path.exists(path): - # sdist likes to use hardlinks, have to remove them first, - # or we modify *source* file - os.unlink(path) - with open(path, "w") as fh: - fh.write(content) - - -def stamp_source(base_dir, version, dry_run=False): - """ - update version info in passlib source - """ - # - # update version string in toplevel package source - # - path = os.path.join(base_dir, "passlib", "__init__.py") - content = _get_file(path) - content, count = re.subn('(?m)^__version__\s*=.*$', - '__version__ = ' + repr(version), - content) - assert count == 1, "failed to replace version string" - _replace_file(path, content, dry_run=dry_run) - - # - # update flag in setup.py - # (not present when called from bdist_wheel, etc) - # - path = os.path.join(base_dir, "setup.py") - if os.path.exists(path): - content = _get_file(path) - content, count = re.subn('(?m)^stamp_build\s*=.*$', - 'stamp_build = False', content) - assert count == 1, "failed to update 'stamp_build' flag" - _replace_file(path, content, dry_run=dry_run) - - -def stamp_distutils_output(opts, version): - - # subclass buildpy to update version string in source - _build_py = get_command_class(opts, "build_py") - class build_py(_build_py): - def build_packages(self): - _build_py.build_packages(self) - stamp_source(self.build_lib, version, self.dry_run) - opts['cmdclass']['build_py'] = build_py - - # subclass sdist to do same thing - _sdist = get_command_class(opts, "sdist") - class sdist(_sdist): - def make_release_tree(self, base_dir, files): - _sdist.make_release_tree(self, base_dir, files) - stamp_source(base_dir, version, self.dry_run) - opts['cmdclass']['sdist'] = sdist - - -def as_bool(value): - return (value or "").lower() in "yes y true t 1".split() - - -def append_hg_revision(version): - - # call HG via subprocess - # NOTE: for py26 compat, using Popen() instead of check_output() - try: - proc = subprocess.Popen(["hg", "tip", "--template", "{date(date, '%Y%m%d%H%M%S')}+hg.{node|short}"], - stdout=subprocess.PIPE) - stamp, _ = proc.communicate() - if proc.returncode: - raise subprocess.CalledProcessError(1, []) - stamp = stamp.decode("ascii") - except (OSError, subprocess.CalledProcessError): - # fallback - just use build date - now = int(os.environ.get('SOURCE_DATE_EPOCH') or time.time()) - build_date = datetime.datetime.utcfromtimestamp(now) - stamp = build_date.strftime("%Y%m%d%H%M%S") - - # modify version - if version.endswith((".dev0", ".post0")): - version = version[:-1] + stamp - else: - version += ".post" + stamp - - return version - -def install_build_py_exclude(opts): - - _build_py = get_command_class(opts, "build_py") - - class build_py(_build_py): - - user_options = _build_py.user_options + [ - ("exclude-packages=", None, - "exclude packages from builds"), - ] - - exclude_packages = None - - def finalize_options(self): - _build_py.finalize_options(self) - target = self.packages - for package in self.exclude_packages or []: - if package in target: - target.remove(package) - - opts['cmdclass']['build_py'] = build_py - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/apache.py b/libs_crutch/contrib/passlib/apache.py deleted file mode 100644 index 35f72e5..0000000 --- a/libs_crutch/contrib/passlib/apache.py +++ /dev/null @@ -1,1255 +0,0 @@ -"""passlib.apache - apache password support""" -# XXX: relocate this to passlib.ext.apache? -#============================================================================= -# imports -#============================================================================= -from __future__ import with_statement -# core -import logging; log = logging.getLogger(__name__) -import os -from warnings import warn -# site -# pkg -from passlib import exc, registry -from passlib.context import CryptContext -from passlib.exc import ExpectedStringError -from passlib.hash import htdigest -from passlib.utils import render_bytes, to_bytes, is_ascii_codec -from passlib.utils.decor import deprecated_method -from passlib.utils.compat import join_bytes, unicode, BytesIO, PY3 -# local -__all__ = [ - 'HtpasswdFile', - 'HtdigestFile', -] - -#============================================================================= -# constants & support -#============================================================================= -_UNSET = object() - -_BCOLON = b":" -_BHASH = b"#" - -# byte values that aren't allowed in fields. -_INVALID_FIELD_CHARS = b":\n\r\t\x00" - -#: _CommonFile._source token types -_SKIPPED = "skipped" -_RECORD = "record" - -#============================================================================= -# common helpers -#============================================================================= -class _CommonFile(object): - """common framework for HtpasswdFile & HtdigestFile""" - #=================================================================== - # instance attrs - #=================================================================== - - # charset encoding used by file (defaults to utf-8) - encoding = None - - # whether users() and other public methods should return unicode or bytes? - # (defaults to False under PY2, True under PY3) - return_unicode = None - - # if bound to local file, these will be set. - _path = None # local file path - _mtime = None # mtime when last loaded, or 0 - - # if true, automatically save to local file after changes are made. - autosave = False - - # dict mapping key -> value for all records in database. - # (e.g. user => hash for Htpasswd) - _records = None - - #: list of tokens for recreating original file contents when saving. if present, - #: will be sequence of (_SKIPPED, b"whitespace/comments") and (_RECORD, ) tuples. - _source = None - - #=================================================================== - # alt constuctors - #=================================================================== - @classmethod - def from_string(cls, data, **kwds): - """create new object from raw string. - - :type data: unicode or bytes - :arg data: - database to load, as single string. - - :param \*\*kwds: - all other keywords are the same as in the class constructor - """ - if 'path' in kwds: - raise TypeError("'path' not accepted by from_string()") - self = cls(**kwds) - self.load_string(data) - return self - - @classmethod - def from_path(cls, path, **kwds): - """create new object from file, without binding object to file. - - :type path: str - :arg path: - local filepath to load from - - :param \*\*kwds: - all other keywords are the same as in the class constructor - """ - self = cls(**kwds) - self.load(path) - return self - - #=================================================================== - # init - #=================================================================== - def __init__(self, path=None, new=False, autoload=True, autosave=False, - encoding="utf-8", return_unicode=PY3, - ): - # set encoding - if not encoding: - warn("``encoding=None`` is deprecated as of Passlib 1.6, " - "and will cause a ValueError in Passlib 1.8, " - "use ``return_unicode=False`` instead.", - DeprecationWarning, stacklevel=2) - encoding = "utf-8" - return_unicode = False - elif not is_ascii_codec(encoding): - # htpasswd/htdigest files assumes 1-byte chars, and use ":" separator, - # so only ascii-compatible encodings are allowed. - raise ValueError("encoding must be 7-bit ascii compatible") - self.encoding = encoding - - # set other attrs - self.return_unicode = return_unicode - self.autosave = autosave - self._path = path - self._mtime = 0 - - # init db - if not autoload: - warn("``autoload=False`` is deprecated as of Passlib 1.6, " - "and will be removed in Passlib 1.8, use ``new=True`` instead", - DeprecationWarning, stacklevel=2) - new = True - if path and not new: - self.load() - else: - self._records = {} - self._source = [] - - def __repr__(self): - tail = '' - if self.autosave: - tail += ' autosave=True' - if self._path: - tail += ' path=%r' % self._path - if self.encoding != "utf-8": - tail += ' encoding=%r' % self.encoding - return "<%s 0x%0x%s>" % (self.__class__.__name__, id(self), tail) - - # NOTE: ``path`` is a property so that ``_mtime`` is wiped when it's set. - - @property - def path(self): - return self._path - - @path.setter - def path(self, value): - if value != self._path: - self._mtime = 0 - self._path = value - - @property - def mtime(self): - """modify time when last loaded (if bound to a local file)""" - return self._mtime - - #=================================================================== - # loading - #=================================================================== - def load_if_changed(self): - """Reload from ``self.path`` only if file has changed since last load""" - if not self._path: - raise RuntimeError("%r is not bound to a local file" % self) - if self._mtime and self._mtime == os.path.getmtime(self._path): - return False - self.load() - return True - - def load(self, path=None, force=True): - """Load state from local file. - If no path is specified, attempts to load from ``self.path``. - - :type path: str - :arg path: local file to load from - - :type force: bool - :param force: - if ``force=False``, only load from ``self.path`` if file - has changed since last load. - - .. deprecated:: 1.6 - This keyword will be removed in Passlib 1.8; - Applications should use :meth:`load_if_changed` instead. - """ - if path is not None: - with open(path, "rb") as fh: - self._mtime = 0 - self._load_lines(fh) - elif not force: - warn("%(name)s.load(force=False) is deprecated as of Passlib 1.6," - "and will be removed in Passlib 1.8; " - "use %(name)s.load_if_changed() instead." % - dict(name=self.__class__.__name__), - DeprecationWarning, stacklevel=2) - return self.load_if_changed() - elif self._path: - with open(self._path, "rb") as fh: - self._mtime = os.path.getmtime(self._path) - self._load_lines(fh) - else: - raise RuntimeError("%s().path is not set, an explicit path is required" % - self.__class__.__name__) - return True - - def load_string(self, data): - """Load state from unicode or bytes string, replacing current state""" - data = to_bytes(data, self.encoding, "data") - self._mtime = 0 - self._load_lines(BytesIO(data)) - - def _load_lines(self, lines): - """load from sequence of lists""" - parse = self._parse_record - records = {} - source = [] - skipped = b'' - for idx, line in enumerate(lines): - # NOTE: per htpasswd source (https://github.com/apache/httpd/blob/trunk/support/htpasswd.c), - # lines with only whitespace, or with "#" as first non-whitespace char, - # are left alone / ignored. - tmp = line.lstrip() - if not tmp or tmp.startswith(_BHASH): - skipped += line - continue - - # parse valid line - key, value = parse(line, idx+1) - - # NOTE: if multiple entries for a key, we use the first one, - # which seems to match htpasswd source - if key in records: - log.warning("username occurs multiple times in source file: %r" % key) - skipped += line - continue - - # flush buffer of skipped whitespace lines - if skipped: - source.append((_SKIPPED, skipped)) - skipped = b'' - - # store new user line - records[key] = value - source.append((_RECORD, key)) - - # don't bother preserving trailing whitespace, but do preserve trailing comments - if skipped.rstrip(): - source.append((_SKIPPED, skipped)) - - # NOTE: not replacing ._records until parsing succeeds, so loading is atomic. - self._records = records - self._source = source - - def _parse_record(self, record, lineno): # pragma: no cover - abstract method - """parse line of file into (key, value) pair""" - raise NotImplementedError("should be implemented in subclass") - - def _set_record(self, key, value): - """ - helper for setting record which takes care of inserting source line if needed; - - :returns: - bool if key already present - """ - records = self._records - existing = (key in records) - records[key] = value - if not existing: - self._source.append((_RECORD, key)) - return existing - - #=================================================================== - # saving - #=================================================================== - def _autosave(self): - """subclass helper to call save() after any changes""" - if self.autosave and self._path: - self.save() - - def save(self, path=None): - """Save current state to file. - If no path is specified, attempts to save to ``self.path``. - """ - if path is not None: - with open(path, "wb") as fh: - fh.writelines(self._iter_lines()) - elif self._path: - self.save(self._path) - self._mtime = os.path.getmtime(self._path) - else: - raise RuntimeError("%s().path is not set, cannot autosave" % - self.__class__.__name__) - - def to_string(self): - """Export current state as a string of bytes""" - return join_bytes(self._iter_lines()) - - # def clean(self): - # """ - # discard any comments or whitespace that were being preserved from the source file, - # and re-sort keys in alphabetical order - # """ - # self._source = [(_RECORD, key) for key in sorted(self._records)] - # self._autosave() - - def _iter_lines(self): - """iterator yielding lines of database""" - # NOTE: this relies on being an OrderedDict so that it outputs - # records in a deterministic order. - records = self._records - if __debug__: - pending = set(records) - for action, content in self._source: - if action == _SKIPPED: - # 'content' is whitespace/comments to write - yield content - else: - assert action == _RECORD - # 'content' is record key - if content not in records: - # record was deleted - # NOTE: doing it lazily like this so deleting & re-adding user - # preserves their original location in the file. - continue - yield self._render_record(content, records[content]) - if __debug__: - pending.remove(content) - if __debug__: - # sanity check that we actually wrote all the records - # (otherwise _source & _records are somehow out of sync) - assert not pending, "failed to write all records: missing=%r" % (pending,) - - def _render_record(self, key, value): # pragma: no cover - abstract method - """given key/value pair, encode as line of file""" - raise NotImplementedError("should be implemented in subclass") - - #=================================================================== - # field encoding - #=================================================================== - def _encode_user(self, user): - """user-specific wrapper for _encode_field()""" - return self._encode_field(user, "user") - - def _encode_realm(self, realm): # pragma: no cover - abstract method - """realm-specific wrapper for _encode_field()""" - return self._encode_field(realm, "realm") - - def _encode_field(self, value, param="field"): - """convert field to internal representation. - - internal representation is always bytes. byte strings are left as-is, - unicode strings encoding using file's default encoding (or ``utf-8`` - if no encoding has been specified). - - :raises UnicodeEncodeError: - if unicode value cannot be encoded using default encoding. - - :raises ValueError: - if resulting byte string contains a forbidden character, - or is too long (>255 bytes). - - :returns: - encoded identifer as bytes - """ - if isinstance(value, unicode): - value = value.encode(self.encoding) - elif not isinstance(value, bytes): - raise ExpectedStringError(value, param) - if len(value) > 255: - raise ValueError("%s must be at most 255 characters: %r" % - (param, value)) - if any(c in _INVALID_FIELD_CHARS for c in value): - raise ValueError("%s contains invalid characters: %r" % - (param, value,)) - return value - - def _decode_field(self, value): - """decode field from internal representation to format - returns by users() method, etc. - - :raises UnicodeDecodeError: - if unicode value cannot be decoded using default encoding. - (usually indicates wrong encoding set for file). - - :returns: - field as unicode or bytes, as appropriate. - """ - assert isinstance(value, bytes), "expected value to be bytes" - if self.return_unicode: - return value.decode(self.encoding) - else: - return value - - # FIXME: htpasswd doc says passwords limited to 255 chars under Windows & MPE, - # and that longer ones are truncated. this may be side-effect of those - # platforms supporting the 'plaintext' scheme. these classes don't currently - # check for this. - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# htpasswd context -# -# This section sets up a CryptContexts to mimic what schemes Apache -# (and the htpasswd tool) should support on the current system. -# -# Apache has long-time supported some basic builtin schemes (listed below), -# as well as the host's crypt() method -- though it's limited to being able -# to *verify* any scheme using that method, but can only generate "des_crypt" hashes. -# -# Apache 2.4 added builtin bcrypt support (even for platforms w/o native support). -# c.f. http://httpd.apache.org/docs/2.4/programs/htpasswd.html vs the 2.2 docs. -#============================================================================= - -#: set of default schemes that (if chosen) should be using bcrypt, -#: but can't due to lack of bcrypt. -_warn_no_bcrypt = set() - -def _init_default_schemes(): - - #: pick strongest one for host - host_best = None - for name in ["bcrypt", "sha256_crypt"]: - if registry.has_os_crypt_support(name): - host_best = name - break - - # check if we have a bcrypt backend -- otherwise issue warning - # XXX: would like to not spam this unless the user *requests* apache 24 - bcrypt = "bcrypt" if registry.has_backend("bcrypt") else None - _warn_no_bcrypt.clear() - if not bcrypt: - _warn_no_bcrypt.update(["portable_apache_24", "host_apache_24", - "linux_apache_24", "portable", "host"]) - - defaults = dict( - # strongest hash builtin to specific apache version - portable_apache_24=bcrypt or "apr_md5_crypt", - portable_apache_22="apr_md5_crypt", - - # strongest hash across current host & specific apache version - host_apache_24=bcrypt or host_best or "apr_md5_crypt", - host_apache_22=host_best or "apr_md5_crypt", - - # strongest hash on a linux host - linux_apache_24=bcrypt or "sha256_crypt", - linux_apache_22="sha256_crypt", - ) - - # set latest-apache version aliases - # XXX: could check for apache install, and pick correct host 22/24 default? - # could reuse _detect_htpasswd() helper in UTs - defaults.update( - portable=defaults['portable_apache_24'], - host=defaults['host_apache_24'], - ) - return defaults - -#: dict mapping default alias -> appropriate scheme -htpasswd_defaults = _init_default_schemes() - -def _init_htpasswd_context(): - - # start with schemes built into apache - schemes = [ - # builtin support added in apache 2.4 - # (https://bz.apache.org/bugzilla/show_bug.cgi?id=49288) - "bcrypt", - - # support not "builtin" to apache, instead it requires support through host's crypt(). - # adding them here to allow editing htpasswd under windows and then deploying under unix. - "sha256_crypt", - "sha512_crypt", - "des_crypt", - - # apache default as of 2.2.18, and still default in 2.4 - "apr_md5_crypt", - - # NOTE: apache says ONLY intended for transitioning htpasswd <-> ldap - "ldap_sha1", - - # NOTE: apache says ONLY supported on Windows, Netware, TPF - "plaintext" - ] - - # apache can verify anything supported by the native crypt(), - # though htpasswd tool can only generate a limited set of hashes. - # (this list may overlap w/ builtin apache schemes) - schemes.extend(registry.get_supported_os_crypt_schemes()) - - # hack to remove dups and sort into preferred order - preferred = schemes[:3] + ["apr_md5_crypt"] + schemes - schemes = sorted(set(schemes), key=preferred.index) - - # create context object - return CryptContext( - schemes=schemes, - - # NOTE: default will change to "portable" in passlib 2.0 - default=htpasswd_defaults['portable_apache_22'], - - # NOTE: bcrypt "2y" is required, "2b" isn't recognized by libapr (issue 95) - bcrypt__ident="2y", - ) - -#: CryptContext configured to match htpasswd -htpasswd_context = _init_htpasswd_context() - -#============================================================================= -# htpasswd editing -#============================================================================= - -class HtpasswdFile(_CommonFile): - """class for reading & writing Htpasswd files. - - The class constructor accepts the following arguments: - - :type path: filepath - :param path: - - Specifies path to htpasswd file, use to implicitly load from and save to. - - This class has two modes of operation: - - 1. It can be "bound" to a local file by passing a ``path`` to the class - constructor. In this case it will load the contents of the file when - created, and the :meth:`load` and :meth:`save` methods will automatically - load from and save to that file if they are called without arguments. - - 2. Alternately, it can exist as an independant object, in which case - :meth:`load` and :meth:`save` will require an explicit path to be - provided whenever they are called. As well, ``autosave`` behavior - will not be available. - - This feature is new in Passlib 1.6, and is the default if no - ``path`` value is provided to the constructor. - - This is also exposed as a readonly instance attribute. - - :type new: bool - :param new: - - Normally, if *path* is specified, :class:`HtpasswdFile` will - immediately load the contents of the file. However, when creating - a new htpasswd file, applications can set ``new=True`` so that - the existing file (if any) will not be loaded. - - .. versionadded:: 1.6 - This feature was previously enabled by setting ``autoload=False``. - That alias has been deprecated, and will be removed in Passlib 1.8 - - :type autosave: bool - :param autosave: - - Normally, any changes made to an :class:`HtpasswdFile` instance - will not be saved until :meth:`save` is explicitly called. However, - if ``autosave=True`` is specified, any changes made will be - saved to disk immediately (assuming *path* has been set). - - This is also exposed as a writeable instance attribute. - - :type encoding: str - :param encoding: - - Optionally specify character encoding used to read/write file - and hash passwords. Defaults to ``utf-8``, though ``latin-1`` - is the only other commonly encountered encoding. - - This is also exposed as a readonly instance attribute. - - :type default_scheme: str - :param default_scheme: - Optionally specify default scheme to use when encoding new passwords. - - This can be any of the schemes with builtin Apache support, - OR natively supported by the host OS's :func:`crypt.crypt` function. - - * Builtin schemes include ``"bcrypt"`` (apache 2.4+), ``"apr_md5_crypt"`, - and ``"des_crypt"``. - - * Schemes commonly supported by Unix hosts - include ``"bcrypt"``, ``"sha256_crypt"``, and ``"des_crypt"``. - - In order to not have to sort out what you should use, - passlib offers a number of aliases, that will resolve - to the most appropriate scheme based on your needs: - - * ``"portable"``, ``"portable_apache_24"`` -- pick scheme that's portable across hosts - running apache >= 2.4. **This will be the default as of Passlib 2.0**. - - * ``"portable_apache_22"`` -- pick scheme that's portable across hosts - running apache >= 2.4. **This is the default up to Passlib 1.9**. - - * ``"host"``, ``"host_apache_24"`` -- pick strongest scheme supported by - apache >= 2.4 and/or host OS. - - * ``"host_apache_22"`` -- pick strongest scheme supported by - apache >= 2.2 and/or host OS. - - .. versionadded:: 1.6 - This keyword was previously named ``default``. That alias - has been deprecated, and will be removed in Passlib 1.8. - - .. versionchanged:: 1.6.3 - - Added support for ``"bcrypt"``, ``"sha256_crypt"``, and ``"portable"`` alias. - - .. versionchanged:: 1.7 - - Added apache 2.4 semantics, and additional aliases. - - :type context: :class:`~passlib.context.CryptContext` - :param context: - :class:`!CryptContext` instance used to create - and verify the hashes found in the htpasswd file. - The default value is a pre-built context which supports all - of the hashes officially allowed in an htpasswd file. - - This is also exposed as a readonly instance attribute. - - .. warning:: - - This option may be used to add support for non-standard hash - formats to an htpasswd file. However, the resulting file - will probably not be usable by another application, - and particularly not by Apache. - - :param autoload: - Set to ``False`` to prevent the constructor from automatically - loaded the file from disk. - - .. deprecated:: 1.6 - This has been replaced by the *new* keyword. - Instead of setting ``autoload=False``, you should use - ``new=True``. Support for this keyword will be removed - in Passlib 1.8. - - :param default: - Change the default algorithm used to hash new passwords. - - .. deprecated:: 1.6 - This has been renamed to *default_scheme* for clarity. - Support for this alias will be removed in Passlib 1.8. - - Loading & Saving - ================ - .. automethod:: load - .. automethod:: load_if_changed - .. automethod:: load_string - .. automethod:: save - .. automethod:: to_string - - Inspection - ================ - .. automethod:: users - .. automethod:: check_password - .. automethod:: get_hash - - Modification - ================ - .. automethod:: set_password - .. automethod:: delete - - Alternate Constructors - ====================== - .. automethod:: from_string - - Attributes - ========== - .. attribute:: path - - Path to local file that will be used as the default - for all :meth:`load` and :meth:`save` operations. - May be written to, initialized by the *path* constructor keyword. - - .. attribute:: autosave - - Writeable flag indicating whether changes will be automatically - written to *path*. - - Errors - ====== - :raises ValueError: - All of the methods in this class will raise a :exc:`ValueError` if - any user name contains a forbidden character (one of ``:\\r\\n\\t\\x00``), - or is longer than 255 characters. - """ - #=================================================================== - # instance attrs - #=================================================================== - - # NOTE: _records map stores for the key, and for the value, - # both in bytes which use self.encoding - - #=================================================================== - # init & serialization - #=================================================================== - def __init__(self, path=None, default_scheme=None, context=htpasswd_context, - **kwds): - if 'default' in kwds: - warn("``default`` is deprecated as of Passlib 1.6, " - "and will be removed in Passlib 1.8, it has been renamed " - "to ``default_scheem``.", - DeprecationWarning, stacklevel=2) - default_scheme = kwds.pop("default") - if default_scheme: - if default_scheme in _warn_no_bcrypt: - warn("HtpasswdFile: no bcrypt backends available, " - "using fallback for default scheme %r" % default_scheme, - exc.PasslibSecurityWarning) - default_scheme = htpasswd_defaults.get(default_scheme, default_scheme) - context = context.copy(default=default_scheme) - self.context = context - super(HtpasswdFile, self).__init__(path, **kwds) - - def _parse_record(self, record, lineno): - # NOTE: should return (user, hash) tuple - result = record.rstrip().split(_BCOLON) - if len(result) != 2: - raise ValueError("malformed htpasswd file (error reading line %d)" - % lineno) - return result - - def _render_record(self, user, hash): - return render_bytes("%s:%s\n", user, hash) - - #=================================================================== - # public methods - #=================================================================== - - def users(self): - """ - Return list of all users in database - """ - return [self._decode_field(user) for user in self._records] - - ##def has_user(self, user): - ## "check whether entry is present for user" - ## return self._encode_user(user) in self._records - - ##def rename(self, old, new): - ## """rename user account""" - ## old = self._encode_user(old) - ## new = self._encode_user(new) - ## hash = self._records.pop(old) - ## self._records[new] = hash - ## self._autosave() - - def set_password(self, user, password): - """Set password for user; adds user if needed. - - :returns: - * ``True`` if existing user was updated. - * ``False`` if user account was added. - - .. versionchanged:: 1.6 - This method was previously called ``update``, it was renamed - to prevent ambiguity with the dictionary method. - The old alias is deprecated, and will be removed in Passlib 1.8. - """ - hash = self.context.hash(password) - return self.set_hash(user, hash) - - @deprecated_method(deprecated="1.6", removed="1.8", - replacement="set_password") - def update(self, user, password): - """set password for user""" - return self.set_password(user, password) - - def get_hash(self, user): - """Return hash stored for user, or ``None`` if user not found. - - .. versionchanged:: 1.6 - This method was previously named ``find``, it was renamed - for clarity. The old name is deprecated, and will be removed - in Passlib 1.8. - """ - try: - return self._records[self._encode_user(user)] - except KeyError: - return None - - def set_hash(self, user, hash): - """ - semi-private helper which allows writing a hash directly; - adds user if needed. - - .. warning:: - does not (currently) do any validation of the hash string - - .. versionadded:: 1.7 - """ - # assert self.context.identify(hash), "unrecognized hash format" - if PY3 and isinstance(hash, str): - hash = hash.encode(self.encoding) - user = self._encode_user(user) - existing = self._set_record(user, hash) - self._autosave() - return existing - - @deprecated_method(deprecated="1.6", removed="1.8", - replacement="get_hash") - def find(self, user): - """return hash for user""" - return self.get_hash(user) - - # XXX: rename to something more explicit, like delete_user()? - def delete(self, user): - """Delete user's entry. - - :returns: - * ``True`` if user deleted. - * ``False`` if user not found. - """ - try: - del self._records[self._encode_user(user)] - except KeyError: - return False - self._autosave() - return True - - def check_password(self, user, password): - """ - Verify password for specified user. - If algorithm marked as deprecated by CryptContext, will automatically be re-hashed. - - :returns: - * ``None`` if user not found. - * ``False`` if user found, but password does not match. - * ``True`` if user found and password matches. - - .. versionchanged:: 1.6 - This method was previously called ``verify``, it was renamed - to prevent ambiguity with the :class:`!CryptContext` method. - The old alias is deprecated, and will be removed in Passlib 1.8. - """ - user = self._encode_user(user) - hash = self._records.get(user) - if hash is None: - return None - if isinstance(password, unicode): - # NOTE: encoding password to match file, making the assumption - # that server will use same encoding to hash the password. - password = password.encode(self.encoding) - ok, new_hash = self.context.verify_and_update(password, hash) - if ok and new_hash is not None: - # rehash user's password if old hash was deprecated - assert user in self._records # otherwise would have to use ._set_record() - self._records[user] = new_hash - self._autosave() - return ok - - @deprecated_method(deprecated="1.6", removed="1.8", - replacement="check_password") - def verify(self, user, password): - """verify password for user""" - return self.check_password(user, password) - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# htdigest editing -#============================================================================= -class HtdigestFile(_CommonFile): - """class for reading & writing Htdigest files. - - The class constructor accepts the following arguments: - - :type path: filepath - :param path: - - Specifies path to htdigest file, use to implicitly load from and save to. - - This class has two modes of operation: - - 1. It can be "bound" to a local file by passing a ``path`` to the class - constructor. In this case it will load the contents of the file when - created, and the :meth:`load` and :meth:`save` methods will automatically - load from and save to that file if they are called without arguments. - - 2. Alternately, it can exist as an independant object, in which case - :meth:`load` and :meth:`save` will require an explicit path to be - provided whenever they are called. As well, ``autosave`` behavior - will not be available. - - This feature is new in Passlib 1.6, and is the default if no - ``path`` value is provided to the constructor. - - This is also exposed as a readonly instance attribute. - - :type default_realm: str - :param default_realm: - - If ``default_realm`` is set, all the :class:`HtdigestFile` - methods that require a realm will use this value if one is not - provided explicitly. If unset, they will raise an error stating - that an explicit realm is required. - - This is also exposed as a writeable instance attribute. - - .. versionadded:: 1.6 - - :type new: bool - :param new: - - Normally, if *path* is specified, :class:`HtdigestFile` will - immediately load the contents of the file. However, when creating - a new htpasswd file, applications can set ``new=True`` so that - the existing file (if any) will not be loaded. - - .. versionadded:: 1.6 - This feature was previously enabled by setting ``autoload=False``. - That alias has been deprecated, and will be removed in Passlib 1.8 - - :type autosave: bool - :param autosave: - - Normally, any changes made to an :class:`HtdigestFile` instance - will not be saved until :meth:`save` is explicitly called. However, - if ``autosave=True`` is specified, any changes made will be - saved to disk immediately (assuming *path* has been set). - - This is also exposed as a writeable instance attribute. - - :type encoding: str - :param encoding: - - Optionally specify character encoding used to read/write file - and hash passwords. Defaults to ``utf-8``, though ``latin-1`` - is the only other commonly encountered encoding. - - This is also exposed as a readonly instance attribute. - - :param autoload: - Set to ``False`` to prevent the constructor from automatically - loaded the file from disk. - - .. deprecated:: 1.6 - This has been replaced by the *new* keyword. - Instead of setting ``autoload=False``, you should use - ``new=True``. Support for this keyword will be removed - in Passlib 1.8. - - Loading & Saving - ================ - .. automethod:: load - .. automethod:: load_if_changed - .. automethod:: load_string - .. automethod:: save - .. automethod:: to_string - - Inspection - ========== - .. automethod:: realms - .. automethod:: users - .. automethod:: check_password(user[, realm], password) - .. automethod:: get_hash - - Modification - ============ - .. automethod:: set_password(user[, realm], password) - .. automethod:: delete - .. automethod:: delete_realm - - Alternate Constructors - ====================== - .. automethod:: from_string - - Attributes - ========== - .. attribute:: default_realm - - The default realm that will be used if one is not provided - to methods that require it. By default this is ``None``, - in which case an explicit realm must be provided for every - method call. Can be written to. - - .. attribute:: path - - Path to local file that will be used as the default - for all :meth:`load` and :meth:`save` operations. - May be written to, initialized by the *path* constructor keyword. - - .. attribute:: autosave - - Writeable flag indicating whether changes will be automatically - written to *path*. - - Errors - ====== - :raises ValueError: - All of the methods in this class will raise a :exc:`ValueError` if - any user name or realm contains a forbidden character (one of ``:\\r\\n\\t\\x00``), - or is longer than 255 characters. - """ - #=================================================================== - # instance attrs - #=================================================================== - - # NOTE: _records map stores (,) for the key, - # and as the value, all as bytes. - - # NOTE: unlike htpasswd, this class doesn't use a CryptContext, - # as only one hash format is supported: htdigest. - - # optionally specify default realm that will be used if none - # is provided to a method call. otherwise realm is always required. - default_realm = None - - #=================================================================== - # init & serialization - #=================================================================== - def __init__(self, path=None, default_realm=None, **kwds): - self.default_realm = default_realm - super(HtdigestFile, self).__init__(path, **kwds) - - def _parse_record(self, record, lineno): - result = record.rstrip().split(_BCOLON) - if len(result) != 3: - raise ValueError("malformed htdigest file (error reading line %d)" - % lineno) - user, realm, hash = result - return (user, realm), hash - - def _render_record(self, key, hash): - user, realm = key - return render_bytes("%s:%s:%s\n", user, realm, hash) - - def _require_realm(self, realm): - if realm is None: - realm = self.default_realm - if realm is None: - raise TypeError("you must specify a realm explicitly, " - "or set the default_realm attribute") - return realm - - def _encode_realm(self, realm): - realm = self._require_realm(realm) - return self._encode_field(realm, "realm") - - def _encode_key(self, user, realm): - return self._encode_user(user), self._encode_realm(realm) - - #=================================================================== - # public methods - #=================================================================== - - def realms(self): - """Return list of all realms in database""" - realms = set(key[1] for key in self._records) - return [self._decode_field(realm) for realm in realms] - - def users(self, realm=None): - """Return list of all users in specified realm. - - * uses ``self.default_realm`` if no realm explicitly provided. - * returns empty list if realm not found. - """ - realm = self._encode_realm(realm) - return [self._decode_field(key[0]) for key in self._records - if key[1] == realm] - - ##def has_user(self, user, realm=None): - ## "check if user+realm combination exists" - ## return self._encode_key(user,realm) in self._records - - ##def rename_realm(self, old, new): - ## """rename all accounts in realm""" - ## old = self._encode_realm(old) - ## new = self._encode_realm(new) - ## keys = [key for key in self._records if key[1] == old] - ## for key in keys: - ## hash = self._records.pop(key) - ## self._set_record((key[0], new), hash) - ## self._autosave() - ## return len(keys) - - ##def rename(self, old, new, realm=None): - ## """rename user account""" - ## old = self._encode_user(old) - ## new = self._encode_user(new) - ## realm = self._encode_realm(realm) - ## hash = self._records.pop((old,realm)) - ## self._set_record((new, realm), hash) - ## self._autosave() - - def set_password(self, user, realm=None, password=_UNSET): - """Set password for user; adds user & realm if needed. - - If ``self.default_realm`` has been set, this may be called - with the syntax ``set_password(user, password)``, - otherwise it must be called with all three arguments: - ``set_password(user, realm, password)``. - - :returns: - * ``True`` if existing user was updated - * ``False`` if user account added. - """ - if password is _UNSET: - # called w/ two args - (user, password), use default realm - realm, password = None, realm - realm = self._require_realm(realm) - hash = htdigest.hash(password, user, realm, encoding=self.encoding) - return self.set_hash(user, realm, hash) - - @deprecated_method(deprecated="1.6", removed="1.8", - replacement="set_password") - def update(self, user, realm, password): - """set password for user""" - return self.set_password(user, realm, password) - - def get_hash(self, user, realm=None): - """Return :class:`~passlib.hash.htdigest` hash stored for user. - - * uses ``self.default_realm`` if no realm explicitly provided. - * returns ``None`` if user or realm not found. - - .. versionchanged:: 1.6 - This method was previously named ``find``, it was renamed - for clarity. The old name is deprecated, and will be removed - in Passlib 1.8. - """ - key = self._encode_key(user, realm) - hash = self._records.get(key) - if hash is None: - return None - if PY3: - hash = hash.decode(self.encoding) - return hash - - def set_hash(self, user, realm=None, hash=_UNSET): - """ - semi-private helper which allows writing a hash directly; - adds user & realm if needed. - - If ``self.default_realm`` has been set, this may be called - with the syntax ``set_hash(user, hash)``, - otherwise it must be called with all three arguments: - ``set_hash(user, realm, hash)``. - - .. warning:: - does not (currently) do any validation of the hash string - - .. versionadded:: 1.7 - """ - if hash is _UNSET: - # called w/ two args - (user, hash), use default realm - realm, hash = None, realm - # assert htdigest.identify(hash), "unrecognized hash format" - if PY3 and isinstance(hash, str): - hash = hash.encode(self.encoding) - key = self._encode_key(user, realm) - existing = self._set_record(key, hash) - self._autosave() - return existing - - @deprecated_method(deprecated="1.6", removed="1.8", - replacement="get_hash") - def find(self, user, realm): - """return hash for user""" - return self.get_hash(user, realm) - - # XXX: rename to something more explicit, like delete_user()? - def delete(self, user, realm=None): - """Delete user's entry for specified realm. - - if realm is not specified, uses ``self.default_realm``. - - :returns: - * ``True`` if user deleted, - * ``False`` if user not found in realm. - """ - key = self._encode_key(user, realm) - try: - del self._records[key] - except KeyError: - return False - self._autosave() - return True - - def delete_realm(self, realm): - """Delete all users for specified realm. - - if realm is not specified, uses ``self.default_realm``. - - :returns: number of users deleted (0 if realm not found) - """ - realm = self._encode_realm(realm) - records = self._records - keys = [key for key in records if key[1] == realm] - for key in keys: - del records[key] - self._autosave() - return len(keys) - - def check_password(self, user, realm=None, password=_UNSET): - """Verify password for specified user + realm. - - If ``self.default_realm`` has been set, this may be called - with the syntax ``check_password(user, password)``, - otherwise it must be called with all three arguments: - ``check_password(user, realm, password)``. - - :returns: - * ``None`` if user or realm not found. - * ``False`` if user found, but password does not match. - * ``True`` if user found and password matches. - - .. versionchanged:: 1.6 - This method was previously called ``verify``, it was renamed - to prevent ambiguity with the :class:`!CryptContext` method. - The old alias is deprecated, and will be removed in Passlib 1.8. - """ - if password is _UNSET: - # called w/ two args - (user, password), use default realm - realm, password = None, realm - user = self._encode_user(user) - realm = self._encode_realm(realm) - hash = self._records.get((user,realm)) - if hash is None: - return None - return htdigest.verify(password, hash, user, realm, - encoding=self.encoding) - - @deprecated_method(deprecated="1.6", removed="1.8", - replacement="check_password") - def verify(self, user, realm, password): - """verify password for user""" - return self.check_password(user, realm, password) - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/apps.py b/libs_crutch/contrib/passlib/apps.py deleted file mode 100644 index 7c4be06..0000000 --- a/libs_crutch/contrib/passlib/apps.py +++ /dev/null @@ -1,197 +0,0 @@ -"""passlib.apps""" -#============================================================================= -# imports -#============================================================================= -# core -import logging; log = logging.getLogger(__name__) -from itertools import chain -# site -# pkg -from passlib import hash -from passlib.context import LazyCryptContext -from passlib.utils import sys_bits -# local -__all__ = [ - 'custom_app_context', - 'django_context', - 'ldap_context', 'ldap_nocrypt_context', - 'mysql_context', 'mysql4_context', 'mysql3_context', - 'phpass_context', - 'phpbb3_context', - 'postgres_context', -] - -#============================================================================= -# master containing all identifiable hashes -#============================================================================= -def _load_master_config(): - from passlib.registry import list_crypt_handlers - - # get master list - schemes = list_crypt_handlers() - - # exclude the ones we know have ambiguous or greedy identify() methods. - excluded = [ - # frequently confused for eachother - 'bigcrypt', - 'crypt16', - - # no good identifiers - 'cisco_pix', - 'cisco_type7', - 'htdigest', - 'mysql323', - 'oracle10', - - # all have same size - 'lmhash', - 'msdcc', - 'msdcc2', - 'nthash', - - # plaintext handlers - 'plaintext', - 'ldap_plaintext', - - # disabled handlers - 'django_disabled', - 'unix_disabled', - 'unix_fallback', - ] - for name in excluded: - schemes.remove(name) - - # return config - return dict(schemes=schemes, default="sha256_crypt") -master_context = LazyCryptContext(onload=_load_master_config) - -#============================================================================= -# for quickly bootstrapping new custom applications -#============================================================================= -custom_app_context = LazyCryptContext( - # choose some reasonbly strong schemes - schemes=["sha512_crypt", "sha256_crypt"], - - # set some useful global options - default="sha256_crypt" if sys_bits < 64 else "sha512_crypt", - - # set a good starting point for rounds selection - sha512_crypt__min_rounds = 535000, - sha256_crypt__min_rounds = 535000, - - # if the admin user category is selected, make a much stronger hash, - admin__sha512_crypt__min_rounds = 1024000, - admin__sha256_crypt__min_rounds = 1024000, - ) - -#============================================================================= -# django -#============================================================================= -_django10_schemes = [ - "django_salted_sha1", "django_salted_md5", "django_des_crypt", - "hex_md5", "django_disabled", -] - -django10_context = LazyCryptContext( - schemes=_django10_schemes, - default="django_salted_sha1", - deprecated=["hex_md5"], -) - -_django14_schemes = ["django_pbkdf2_sha256", "django_pbkdf2_sha1", - "django_bcrypt"] + _django10_schemes -django14_context = LazyCryptContext( - schemes=_django14_schemes, - deprecated=_django10_schemes, -) - -_django16_schemes = _django14_schemes[:] -_django16_schemes.insert(1, "django_bcrypt_sha256") -django16_context = LazyCryptContext( - schemes=_django16_schemes, - deprecated=_django10_schemes, -) - -django110_context = LazyCryptContext( - schemes=["django_pbkdf2_sha256", "django_pbkdf2_sha1", - "django_argon2", "django_bcrypt", "django_bcrypt_sha256", - "django_disabled"], -) - -# this will always point to latest version -django_context = django110_context - -#============================================================================= -# ldap -#============================================================================= -std_ldap_schemes = ["ldap_salted_sha1", "ldap_salted_md5", - "ldap_sha1", "ldap_md5", - "ldap_plaintext" ] - -# create context with all std ldap schemes EXCEPT crypt -ldap_nocrypt_context = LazyCryptContext(std_ldap_schemes) - -# create context with all possible std ldap + ldap crypt schemes -def _iter_ldap_crypt_schemes(): - from passlib.utils import unix_crypt_schemes - return ('ldap_' + name for name in unix_crypt_schemes) - -def _iter_ldap_schemes(): - """helper which iterates over supported std ldap schemes""" - return chain(std_ldap_schemes, _iter_ldap_crypt_schemes()) -ldap_context = LazyCryptContext(_iter_ldap_schemes()) - -### create context with all std ldap schemes + crypt schemes for localhost -##def _iter_host_ldap_schemes(): -## "helper which iterates over supported std ldap schemes" -## from passlib.handlers.ldap_digests import get_host_ldap_crypt_schemes -## return chain(std_ldap_schemes, get_host_ldap_crypt_schemes()) -##ldap_host_context = LazyCryptContext(_iter_host_ldap_schemes()) - -#============================================================================= -# mysql -#============================================================================= -mysql3_context = LazyCryptContext(["mysql323"]) -mysql4_context = LazyCryptContext(["mysql41", "mysql323"], deprecated="mysql323") -mysql_context = mysql4_context # tracks latest mysql version supported - -#============================================================================= -# postgres -#============================================================================= -postgres_context = LazyCryptContext(["postgres_md5"]) - -#============================================================================= -# phpass & variants -#============================================================================= -def _create_phpass_policy(**kwds): - """helper to choose default alg based on bcrypt availability""" - kwds['default'] = 'bcrypt' if hash.bcrypt.has_backend() else 'phpass' - return kwds - -phpass_context = LazyCryptContext( - schemes=["bcrypt", "phpass", "bsdi_crypt"], - onload=_create_phpass_policy, - ) - -phpbb3_context = LazyCryptContext(["phpass"], phpass__ident="H") - -# TODO: support the drupal phpass variants (see phpass homepage) - -#============================================================================= -# roundup -#============================================================================= - -_std_roundup_schemes = [ "ldap_hex_sha1", "ldap_hex_md5", "ldap_des_crypt", "roundup_plaintext" ] -roundup10_context = LazyCryptContext(_std_roundup_schemes) - -# NOTE: 'roundup15' really applies to roundup 1.4.17+ -roundup_context = roundup15_context = LazyCryptContext( - schemes=_std_roundup_schemes + [ "ldap_pbkdf2_sha1" ], - deprecated=_std_roundup_schemes, - default = "ldap_pbkdf2_sha1", - ldap_pbkdf2_sha1__default_rounds = 10000, - ) - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/context.py b/libs_crutch/contrib/passlib/context.py deleted file mode 100644 index fa700f7..0000000 --- a/libs_crutch/contrib/passlib/context.py +++ /dev/null @@ -1,2632 +0,0 @@ -"""passlib.context - CryptContext implementation""" -#============================================================================= -# imports -#============================================================================= -from __future__ import with_statement -# core -import re -import logging; log = logging.getLogger(__name__) -import threading -import time -from warnings import warn -# site -# pkg -from passlib.exc import ExpectedStringError, ExpectedTypeError, PasslibConfigWarning -from passlib.registry import get_crypt_handler, _validate_handler_name -from passlib.utils import (handlers as uh, to_bytes, - to_unicode, splitcomma, - as_bool, timer, rng, getrandstr, - ) -from passlib.utils.binary import BASE64_CHARS -from passlib.utils.compat import (iteritems, num_types, irange, - PY2, PY3, unicode, SafeConfigParser, - NativeStringIO, BytesIO, - unicode_or_bytes_types, native_string_types, - ) -from passlib.utils.decor import deprecated_method, memoized_property -# local -__all__ = [ - 'CryptContext', - 'LazyCryptContext', - 'CryptPolicy', -] - -#============================================================================= -# support -#============================================================================= - -# private object to detect unset params -_UNSET = object() - -def _coerce_vary_rounds(value): - """parse vary_rounds string to percent as [0,1) float, or integer""" - if value.endswith("%"): - # XXX: deprecate this in favor of raw float? - return float(value.rstrip("%"))*.01 - try: - return int(value) - except ValueError: - return float(value) - -# set of options which aren't allowed to be set via policy -_forbidden_scheme_options = set(["salt"]) - # 'salt' - not allowed since a fixed salt would defeat the purpose. - -# dict containing funcs used to coerce strings to correct type for scheme option keys. -# NOTE: this isn't really needed any longer, since Handler.using() handles the actual parsing. -# keeping this around for now, though, since it makes context.to_dict() output cleaner. -_coerce_scheme_options = dict( - min_rounds=int, - max_rounds=int, - default_rounds=int, - vary_rounds=_coerce_vary_rounds, - salt_size=int, -) - -def _is_handler_registered(handler): - """detect if handler is registered or a custom handler""" - return get_crypt_handler(handler.name, None) is handler - -@staticmethod -def _always_needs_update(hash, secret=None): - """ - dummy function patched into handler.needs_update() by _CryptConfig - when hash alg has been deprecated for context. - """ - return True - -#: list of keys allowed under wildcard "all" scheme w/o a security warning. -_global_settings = set(["truncate_error", "vary_rounds"]) - -#============================================================================= -# crypt policy -#============================================================================= -_preamble = ("The CryptPolicy class has been deprecated as of " - "Passlib 1.6, and will be removed in Passlib 1.8. ") - -class CryptPolicy(object): - """ - .. deprecated:: 1.6 - This class has been deprecated, and will be removed in Passlib 1.8. - All of its functionality has been rolled into :class:`CryptContext`. - - This class previously stored the configuration options for the - CryptContext class. In the interest of interface simplification, - all of this class' functionality has been rolled into the CryptContext - class itself. - The documentation for this class is now focused on documenting how to - migrate to the new api. Additionally, where possible, the deprecation - warnings issued by the CryptPolicy methods will list the replacement call - that should be used. - - Constructors - ============ - CryptPolicy objects can be constructed directly using any of - the keywords accepted by :class:`CryptContext`. Direct uses of the - :class:`!CryptPolicy` constructor should either pass the keywords - directly into the CryptContext constructor, or to :meth:`CryptContext.update` - if the policy object was being used to update an existing context object. - - In addition to passing in keywords directly, - CryptPolicy objects can be constructed by the following methods: - - .. automethod:: from_path - .. automethod:: from_string - .. automethod:: from_source - .. automethod:: from_sources - .. automethod:: replace - - Introspection - ============= - All of the informational methods provided by this class have been deprecated - by identical or similar methods in the :class:`CryptContext` class: - - .. automethod:: has_schemes - .. automethod:: schemes - .. automethod:: iter_handlers - .. automethod:: get_handler - .. automethod:: get_options - .. automethod:: handler_is_deprecated - .. automethod:: get_min_verify_time - - Exporting - ========= - .. automethod:: iter_config - .. automethod:: to_dict - .. automethod:: to_file - .. automethod:: to_string - - .. note:: - CryptPolicy are immutable. - Use the :meth:`replace` method to mutate existing instances. - - .. deprecated:: 1.6 - """ - #=================================================================== - # class methods - #=================================================================== - @classmethod - def from_path(cls, path, section="passlib", encoding="utf-8"): - """create a CryptPolicy instance from a local file. - - .. deprecated:: 1.6 - - Creating a new CryptContext from a file, which was previously done via - ``CryptContext(policy=CryptPolicy.from_path(path))``, can now be - done via ``CryptContext.from_path(path)``. - See :meth:`CryptContext.from_path` for details. - - Updating an existing CryptContext from a file, which was previously done - ``context.policy = CryptPolicy.from_path(path)``, can now be - done via ``context.load_path(path)``. - See :meth:`CryptContext.load_path` for details. - """ - warn(_preamble + - "Instead of ``CryptPolicy.from_path(path)``, " - "use ``CryptContext.from_path(path)`` " - " or ``context.load_path(path)`` for an existing CryptContext.", - DeprecationWarning, stacklevel=2) - return cls(_internal_context=CryptContext.from_path(path, section, - encoding)) - - @classmethod - def from_string(cls, source, section="passlib", encoding="utf-8"): - """create a CryptPolicy instance from a string. - - .. deprecated:: 1.6 - - Creating a new CryptContext from a string, which was previously done via - ``CryptContext(policy=CryptPolicy.from_string(data))``, can now be - done via ``CryptContext.from_string(data)``. - See :meth:`CryptContext.from_string` for details. - - Updating an existing CryptContext from a string, which was previously done - ``context.policy = CryptPolicy.from_string(data)``, can now be - done via ``context.load(data)``. - See :meth:`CryptContext.load` for details. - """ - warn(_preamble + - "Instead of ``CryptPolicy.from_string(source)``, " - "use ``CryptContext.from_string(source)`` or " - "``context.load(source)`` for an existing CryptContext.", - DeprecationWarning, stacklevel=2) - return cls(_internal_context=CryptContext.from_string(source, section, - encoding)) - - @classmethod - def from_source(cls, source, _warn=True): - """create a CryptPolicy instance from some source. - - this method autodetects the source type, and invokes - the appropriate constructor automatically. it attempts - to detect whether the source is a configuration string, a filepath, - a dictionary, or an existing CryptPolicy instance. - - .. deprecated:: 1.6 - - Create a new CryptContext, which could previously be done via - ``CryptContext(policy=CryptPolicy.from_source(source))``, should - now be done using an explicit method: the :class:`CryptContext` - constructor itself, :meth:`CryptContext.from_path`, - or :meth:`CryptContext.from_string`. - - Updating an existing CryptContext, which could previously be done via - ``context.policy = CryptPolicy.from_source(source)``, should - now be done using an explicit method: :meth:`CryptContext.update`, - or :meth:`CryptContext.load`. - """ - if _warn: - warn(_preamble + - "Instead of ``CryptPolicy.from_source()``, " - "use ``CryptContext.from_string(path)`` " - " or ``CryptContext.from_path(source)``, as appropriate.", - DeprecationWarning, stacklevel=2) - if isinstance(source, CryptPolicy): - return source - elif isinstance(source, dict): - return cls(_internal_context=CryptContext(**source)) - elif not isinstance(source, (bytes,unicode)): - raise TypeError("source must be CryptPolicy, dict, config string, " - "or file path: %r" % (type(source),)) - elif any(c in source for c in "\n\r\t") or not source.strip(" \t./\;:"): - return cls(_internal_context=CryptContext.from_string(source)) - else: - return cls(_internal_context=CryptContext.from_path(source)) - - @classmethod - def from_sources(cls, sources, _warn=True): - """create a CryptPolicy instance by merging multiple sources. - - each source is interpreted as by :meth:`from_source`, - and the results are merged together. - - .. deprecated:: 1.6 - Instead of using this method to merge multiple policies together, - a :class:`CryptContext` instance should be created, and then - the multiple sources merged together via :meth:`CryptContext.load`. - """ - if _warn: - warn(_preamble + - "Instead of ``CryptPolicy.from_sources()``, " - "use the various CryptContext constructors " - " followed by ``context.update()``.", - DeprecationWarning, stacklevel=2) - if len(sources) == 0: - raise ValueError("no sources specified") - if len(sources) == 1: - return cls.from_source(sources[0], _warn=False) - kwds = {} - for source in sources: - kwds.update(cls.from_source(source, _warn=False)._context.to_dict(resolve=True)) - return cls(_internal_context=CryptContext(**kwds)) - - def replace(self, *args, **kwds): - """create a new CryptPolicy, optionally updating parts of the - existing configuration. - - .. deprecated:: 1.6 - Callers of this method should :meth:`CryptContext.update` or - :meth:`CryptContext.copy` instead. - """ - if self._stub_policy: - warn(_preamble + # pragma: no cover -- deprecated & unused - "Instead of ``context.policy.replace()``, " - "use ``context.update()`` or ``context.copy()``.", - DeprecationWarning, stacklevel=2) - else: - warn(_preamble + - "Instead of ``CryptPolicy().replace()``, " - "create a CryptContext instance and " - "use ``context.update()`` or ``context.copy()``.", - DeprecationWarning, stacklevel=2) - sources = [ self ] - if args: - sources.extend(args) - if kwds: - sources.append(kwds) - return CryptPolicy.from_sources(sources, _warn=False) - - #=================================================================== - # instance attrs - #=================================================================== - - # internal CryptContext we're wrapping to handle everything - # until this class is removed. - _context = None - - # flag indicating this is wrapper generated by the CryptContext.policy - # attribute, rather than one created independantly by the application. - _stub_policy = False - - #=================================================================== - # init - #=================================================================== - def __init__(self, *args, **kwds): - context = kwds.pop("_internal_context", None) - if context: - assert isinstance(context, CryptContext) - self._context = context - self._stub_policy = kwds.pop("_stub_policy", False) - assert not (args or kwds), "unexpected args: %r %r" % (args,kwds) - else: - if args: - if len(args) != 1: - raise TypeError("only one positional argument accepted") - if kwds: - raise TypeError("cannot specify positional arg and kwds") - kwds = args[0] - warn(_preamble + - "Instead of constructing a CryptPolicy instance, " - "create a CryptContext directly, or use ``context.update()`` " - "and ``context.load()`` to reconfigure existing CryptContext " - "instances.", - DeprecationWarning, stacklevel=2) - self._context = CryptContext(**kwds) - - #=================================================================== - # public interface for examining options - #=================================================================== - def has_schemes(self): - """return True if policy defines *any* schemes for use. - - .. deprecated:: 1.6 - applications should use ``bool(context.schemes())`` instead. - see :meth:`CryptContext.schemes`. - """ - if self._stub_policy: - warn(_preamble + # pragma: no cover -- deprecated & unused - "Instead of ``context.policy.has_schemes()``, " - "use ``bool(context.schemes())``.", - DeprecationWarning, stacklevel=2) - else: - warn(_preamble + - "Instead of ``CryptPolicy().has_schemes()``, " - "create a CryptContext instance and " - "use ``bool(context.schemes())``.", - DeprecationWarning, stacklevel=2) - return bool(self._context.schemes()) - - def iter_handlers(self): - """return iterator over handlers defined in policy. - - .. deprecated:: 1.6 - applications should use ``context.schemes(resolve=True))`` instead. - see :meth:`CryptContext.schemes`. - """ - if self._stub_policy: - warn(_preamble + - "Instead of ``context.policy.iter_handlers()``, " - "use ``context.schemes(resolve=True)``.", - DeprecationWarning, stacklevel=2) - else: - warn(_preamble + - "Instead of ``CryptPolicy().iter_handlers()``, " - "create a CryptContext instance and " - "use ``context.schemes(resolve=True)``.", - DeprecationWarning, stacklevel=2) - return self._context.schemes(resolve=True, unconfigured=True) - - def schemes(self, resolve=False): - """return list of schemes defined in policy. - - .. deprecated:: 1.6 - applications should use :meth:`CryptContext.schemes` instead. - """ - if self._stub_policy: - warn(_preamble + # pragma: no cover -- deprecated & unused - "Instead of ``context.policy.schemes()``, " - "use ``context.schemes()``.", - DeprecationWarning, stacklevel=2) - else: - warn(_preamble + - "Instead of ``CryptPolicy().schemes()``, " - "create a CryptContext instance and " - "use ``context.schemes()``.", - DeprecationWarning, stacklevel=2) - return list(self._context.schemes(resolve=resolve, unconfigured=True)) - - def get_handler(self, name=None, category=None, required=False): - """return handler as specified by name, or default handler. - - .. deprecated:: 1.6 - applications should use :meth:`CryptContext.handler` instead, - though note that the ``required`` keyword has been removed, - and the new method will always act as if ``required=True``. - """ - if self._stub_policy: - warn(_preamble + - "Instead of ``context.policy.get_handler()``, " - "use ``context.handler()``.", - DeprecationWarning, stacklevel=2) - else: - warn(_preamble + - "Instead of ``CryptPolicy().get_handler()``, " - "create a CryptContext instance and " - "use ``context.handler()``.", - DeprecationWarning, stacklevel=2) - # CryptContext.handler() doesn't support required=False, - # so wrapping it in try/except - try: - return self._context.handler(name, category, unconfigured=True) - except KeyError: - if required: - raise - else: - return None - - def get_min_verify_time(self, category=None): - """get min_verify_time setting for policy. - - .. deprecated:: 1.6 - min_verify_time option will be removed entirely in passlib 1.8 - - .. versionchanged:: 1.7 - this method now always returns the value automatically - calculated by :meth:`CryptContext.min_verify_time`, - any value specified by policy is ignored. - """ - warn("get_min_verify_time() and min_verify_time option is deprecated and ignored, " - "and will be removed in Passlib 1.8", DeprecationWarning, - stacklevel=2) - return 0 - - def get_options(self, name, category=None): - """return dictionary of options specific to a given handler. - - .. deprecated:: 1.6 - this method has no direct replacement in the 1.6 api, as there - is not a clearly defined use-case. however, examining the output of - :meth:`CryptContext.to_dict` should serve as the closest alternative. - """ - # XXX: might make a public replacement, but need more study of the use cases. - if self._stub_policy: - warn(_preamble + # pragma: no cover -- deprecated & unused - "``context.policy.get_options()`` will no longer be available.", - DeprecationWarning, stacklevel=2) - else: - warn(_preamble + - "``CryptPolicy().get_options()`` will no longer be available.", - DeprecationWarning, stacklevel=2) - if hasattr(name, "name"): - name = name.name - return self._context._config._get_record_options_with_flag(name, category)[0] - - def handler_is_deprecated(self, name, category=None): - """check if handler has been deprecated by policy. - - .. deprecated:: 1.6 - this method has no direct replacement in the 1.6 api, as there - is not a clearly defined use-case. however, examining the output of - :meth:`CryptContext.to_dict` should serve as the closest alternative. - """ - # XXX: might make a public replacement, but need more study of the use cases. - if self._stub_policy: - warn(_preamble + - "``context.policy.handler_is_deprecated()`` will no longer be available.", - DeprecationWarning, stacklevel=2) - else: - warn(_preamble + - "``CryptPolicy().handler_is_deprecated()`` will no longer be available.", - DeprecationWarning, stacklevel=2) - if hasattr(name, "name"): - name = name.name - return self._context.handler(name, category).deprecated - - #=================================================================== - # serialization - #=================================================================== - - def iter_config(self, ini=False, resolve=False): - """iterate over key/value pairs representing the policy object. - - .. deprecated:: 1.6 - applications should use :meth:`CryptContext.to_dict` instead. - """ - if self._stub_policy: - warn(_preamble + # pragma: no cover -- deprecated & unused - "Instead of ``context.policy.iter_config()``, " - "use ``context.to_dict().items()``.", - DeprecationWarning, stacklevel=2) - else: - warn(_preamble + - "Instead of ``CryptPolicy().iter_config()``, " - "create a CryptContext instance and " - "use ``context.to_dict().items()``.", - DeprecationWarning, stacklevel=2) - # hacked code that renders keys & values in manner that approximates - # old behavior. context.to_dict() is much cleaner. - context = self._context - if ini: - def render_key(key): - return context._render_config_key(key).replace("__", ".") - def render_value(value): - if isinstance(value, (list,tuple)): - value = ", ".join(value) - return value - resolve = False - else: - render_key = context._render_config_key - render_value = lambda value: value - return ( - (render_key(key), render_value(value)) - for key, value in context._config.iter_config(resolve) - ) - - def to_dict(self, resolve=False): - """export policy object as dictionary of options. - - .. deprecated:: 1.6 - applications should use :meth:`CryptContext.to_dict` instead. - """ - if self._stub_policy: - warn(_preamble + - "Instead of ``context.policy.to_dict()``, " - "use ``context.to_dict()``.", - DeprecationWarning, stacklevel=2) - else: - warn(_preamble + - "Instead of ``CryptPolicy().to_dict()``, " - "create a CryptContext instance and " - "use ``context.to_dict()``.", - DeprecationWarning, stacklevel=2) - return self._context.to_dict(resolve) - - def to_file(self, stream, section="passlib"): # pragma: no cover -- deprecated & unused - """export policy to file. - - .. deprecated:: 1.6 - applications should use :meth:`CryptContext.to_string` instead, - and then write the output to a file as desired. - """ - if self._stub_policy: - warn(_preamble + - "Instead of ``context.policy.to_file(stream)``, " - "use ``stream.write(context.to_string())``.", - DeprecationWarning, stacklevel=2) - else: - warn(_preamble + - "Instead of ``CryptPolicy().to_file(stream)``, " - "create a CryptContext instance and " - "use ``stream.write(context.to_string())``.", - DeprecationWarning, stacklevel=2) - out = self._context.to_string(section=section) - if PY2: - out = out.encode("utf-8") - stream.write(out) - - def to_string(self, section="passlib", encoding=None): - """export policy to file. - - .. deprecated:: 1.6 - applications should use :meth:`CryptContext.to_string` instead. - """ - if self._stub_policy: - warn(_preamble + # pragma: no cover -- deprecated & unused - "Instead of ``context.policy.to_string()``, " - "use ``context.to_string()``.", - DeprecationWarning, stacklevel=2) - else: - warn(_preamble + - "Instead of ``CryptPolicy().to_string()``, " - "create a CryptContext instance and " - "use ``context.to_string()``.", - DeprecationWarning, stacklevel=2) - out = self._context.to_string(section=section) - if encoding: - out = out.encode(encoding) - return out - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# _CryptConfig helper class -#============================================================================= -class _CryptConfig(object): - """parses, validates, and stores CryptContext config - - this is a helper used internally by CryptContext to handle - parsing, validation, and serialization of its config options. - split out from the main class, but not made public since - that just complicates interface too much (c.f. CryptPolicy) - - :arg source: config as dict mapping ``(cat,scheme,option) -> value`` - """ - #=================================================================== - # instance attrs - #=================================================================== - - # triple-nested dict which maps scheme -> category -> key -> value, - # storing all hash-specific options - _scheme_options = None - - # double-nested dict which maps key -> category -> value - # storing all CryptContext options - _context_options = None - - # tuple of handler objects - handlers = None - - # tuple of scheme objects in same order as handlers - schemes = None - - # tuple of categories in alphabetical order (not including None) - categories = None - - # set of all context keywords used by active schemes - context_kwds = None - - # dict mapping category -> default scheme - _default_schemes = None - - # dict mapping (scheme, category) -> custom handler - _records = None - - # dict mapping category -> list of custom handler instances for that category, - # in order of schemes(). populated on demand by _get_record_list() - _record_lists = None - - #=================================================================== - # constructor - #=================================================================== - def __init__(self, source): - self._init_scheme_list(source.get((None,None,"schemes"))) - self._init_options(source) - self._init_default_schemes() - self._init_records() - - def _init_scheme_list(self, data): - """initialize .handlers and .schemes attributes""" - handlers = [] - schemes = [] - if isinstance(data, native_string_types): - data = splitcomma(data) - for elem in data or (): - # resolve elem -> handler & scheme - if hasattr(elem, "name"): - handler = elem - scheme = handler.name - _validate_handler_name(scheme) - elif isinstance(elem, native_string_types): - handler = get_crypt_handler(elem) - scheme = handler.name - else: - raise TypeError("scheme must be name or CryptHandler, " - "not %r" % type(elem)) - - # check scheme name isn't already in use - if scheme in schemes: - raise KeyError("multiple handlers with same name: %r" % - (scheme,)) - - # add to handler list - handlers.append(handler) - schemes.append(scheme) - - self.handlers = tuple(handlers) - self.schemes = tuple(schemes) - - #=================================================================== - # lowlevel options - #=================================================================== - - #--------------------------------------------------------------- - # init lowlevel option storage - #--------------------------------------------------------------- - def _init_options(self, source): - """load config dict into internal representation, - and init .categories attr - """ - # prepare dicts & locals - norm_scheme_option = self._norm_scheme_option - norm_context_option = self._norm_context_option - self._scheme_options = scheme_options = {} - self._context_options = context_options = {} - categories = set() - - # load source config into internal storage - for (cat, scheme, key), value in iteritems(source): - categories.add(cat) - explicit_scheme = scheme - if not cat and not scheme and key in _global_settings: - # going forward, not using "__all__" format. instead... - # whitelisting set of keys which should be passed to (all) schemes, - # rather than passed to the CryptContext itself - scheme = "all" - if scheme: - # normalize scheme option - key, value = norm_scheme_option(key, value) - - # e.g. things like "min_rounds" should never be set cross-scheme - # this will be fatal under 2.0. - if scheme == "all" and key not in _global_settings: - warn("The '%s' option should be configured per-algorithm, and not set " - "globally in the context; This will be an error in Passlib 2.0" % - (key,), PasslibConfigWarning) - - # this scheme is going away in 2.0; - # but most keys deserve an extra warning since it impacts security. - if explicit_scheme == "all": - warn("The 'all' scheme is deprecated as of Passlib 1.7, " - "and will be removed in Passlib 2.0; Please configure " - "options on a per-algorithm basis.", DeprecationWarning) - - # store in scheme_options - # map structure: scheme_options[scheme][category][key] = value - try: - category_map = scheme_options[scheme] - except KeyError: - scheme_options[scheme] = {cat: {key: value}} - else: - try: - option_map = category_map[cat] - except KeyError: - category_map[cat] = {key: value} - else: - option_map[key] = value - else: - # normalize context option - if cat and key == "schemes": - raise KeyError("'schemes' context option is not allowed " - "per category") - key, value = norm_context_option(cat, key, value) - if key == "min_verify_time": # ignored in 1.7, to be removed in 1.8 - continue - - # store in context_options - # map structure: context_options[key][category] = value - try: - category_map = context_options[key] - except KeyError: - context_options[key] = {cat: value} - else: - category_map[cat] = value - - # store list of configured categories - categories.discard(None) - self.categories = tuple(sorted(categories)) - - def _norm_scheme_option(self, key, value): - # check for invalid options - if key in _forbidden_scheme_options: - raise KeyError("%r option not allowed in CryptContext " - "configuration" % (key,)) - # coerce strings for certain fields (e.g. min_rounds uses ints) - if isinstance(value, native_string_types): - func = _coerce_scheme_options.get(key) - if func: - value = func(value) - return key, value - - def _norm_context_option(self, cat, key, value): - schemes = self.schemes - if key == "default": - if hasattr(value, "name"): - value = value.name - elif not isinstance(value, native_string_types): - raise ExpectedTypeError(value, "str", "default") - if schemes and value not in schemes: - raise KeyError("default scheme not found in policy") - elif key == "deprecated": - if isinstance(value, native_string_types): - value = splitcomma(value) - elif not isinstance(value, (list,tuple)): - raise ExpectedTypeError(value, "str or seq", "deprecated") - if 'auto' in value: - # XXX: have any statements been made about when this is default? - # should do it in 1.8 at latest. - if len(value) > 1: - raise ValueError("cannot list other schemes if " - "``deprecated=['auto']`` is used") - elif schemes: - # make sure list of deprecated schemes is subset of configured schemes - for scheme in value: - if not isinstance(scheme, native_string_types): - raise ExpectedTypeError(value, "str", "deprecated element") - if scheme not in schemes: - raise KeyError("deprecated scheme not found " - "in policy: %r" % (scheme,)) - elif key == "min_verify_time": - warn("'min_verify_time' was deprecated in Passlib 1.6, is " - "ignored in 1.7, and will be removed in 1.8", - DeprecationWarning) - elif key == "harden_verify": - warn("'harden_verify' is deprecated & ignored as of Passlib 1.7.1, " - " and will be removed in 1.8", - DeprecationWarning) - elif key != "schemes": - raise KeyError("unknown CryptContext keyword: %r" % (key,)) - return key, value - - #--------------------------------------------------------------- - # reading context options - #--------------------------------------------------------------- - def get_context_optionmap(self, key, _default={}): - """return dict mapping category->value for specific context option. - - .. warning:: treat return value as readonly! - """ - return self._context_options.get(key, _default) - - def get_context_option_with_flag(self, category, key): - """return value of specific option, handling category inheritance. - also returns flag indicating whether value is category-specific. - """ - try: - category_map = self._context_options[key] - except KeyError: - return None, False - value = category_map.get(None) - if category: - try: - alt = category_map[category] - except KeyError: - pass - else: - if value is None or alt != value: - return alt, True - return value, False - - #--------------------------------------------------------------- - # reading scheme options - #--------------------------------------------------------------- - def _get_scheme_optionmap(self, scheme, category, default={}): - """return all options for (scheme,category) combination - - .. warning:: treat return value as readonly! - """ - try: - return self._scheme_options[scheme][category] - except KeyError: - return default - - def get_base_handler(self, scheme): - return self.handlers[self.schemes.index(scheme)] - - @staticmethod - def expand_settings(handler): - setting_kwds = handler.setting_kwds - if 'rounds' in handler.setting_kwds: - # XXX: historically this extras won't be listed in setting_kwds - setting_kwds += uh.HasRounds.using_rounds_kwds - return setting_kwds - - # NOTE: this is only used by _get_record_options_with_flag()... - def get_scheme_options_with_flag(self, scheme, category): - """return composite dict of all options set for scheme. - includes options inherited from 'all' and from default category. - result can be modified. - returns (kwds, has_cat_specific_options) - """ - # start out with copy of global options - get_optionmap = self._get_scheme_optionmap - kwds = get_optionmap("all", None).copy() - has_cat_options = False - - # add in category-specific global options - if category: - defkwds = kwds.copy() # <-- used to detect category-specific options - kwds.update(get_optionmap("all", category)) - - # filter out global settings not supported by handler - allowed_settings = self.expand_settings(self.get_base_handler(scheme)) - for key in set(kwds).difference(allowed_settings): - kwds.pop(key) - if category: - for key in set(defkwds).difference(allowed_settings): - defkwds.pop(key) - - # add in default options for scheme - other = get_optionmap(scheme, None) - kwds.update(other) - - # load category-specific options for scheme - if category: - defkwds.update(other) - kwds.update(get_optionmap(scheme, category)) - - # compare default category options to see if there's anything - # category-specific - if kwds != defkwds: - has_cat_options = True - - return kwds, has_cat_options - - #=================================================================== - # deprecated & default schemes - #=================================================================== - def _init_default_schemes(self): - """initialize maps containing default scheme for each category. - - have to do this after _init_options(), since the default scheme - is affected by the list of deprecated schemes. - """ - # init maps & locals - get_optionmap = self.get_context_optionmap - default_map = self._default_schemes = get_optionmap("default").copy() - dep_map = get_optionmap("deprecated") - schemes = self.schemes - if not schemes: - return - - # figure out default scheme - deps = dep_map.get(None) or () - default = default_map.get(None) - if not default: - for scheme in schemes: - if scheme not in deps: - default_map[None] = scheme - break - else: - raise ValueError("must have at least one non-deprecated scheme") - elif default in deps: - raise ValueError("default scheme cannot be deprecated") - - # figure out per-category default schemes, - for cat in self.categories: - cdeps = dep_map.get(cat, deps) - cdefault = default_map.get(cat, default) - if not cdefault: - for scheme in schemes: - if scheme not in cdeps: - default_map[cat] = scheme - break - else: - raise ValueError("must have at least one non-deprecated " - "scheme for %r category" % cat) - elif cdefault in cdeps: - raise ValueError("default scheme for %r category " - "cannot be deprecated" % cat) - - def default_scheme(self, category): - """return default scheme for specific category""" - defaults = self._default_schemes - try: - return defaults[category] - except KeyError: - pass - if not self.schemes: - raise KeyError("no hash schemes configured for this " - "CryptContext instance") - return defaults[None] - - def is_deprecated_with_flag(self, scheme, category): - """is scheme deprecated under particular category?""" - depmap = self.get_context_optionmap("deprecated") - def test(cat): - source = depmap.get(cat, depmap.get(None)) - if source is None: - return None - elif 'auto' in source: - return scheme != self.default_scheme(cat) - else: - return scheme in source - value = test(None) or False - if category: - alt = test(category) - if alt is not None and value != alt: - return alt, True - return value, False - - #=================================================================== - # CryptRecord objects - #=================================================================== - def _init_records(self): - # NOTE: this step handles final validation of settings, - # checking for violations against handler's internal invariants. - # this is why we create all the records now, - # so CryptContext throws error immediately rather than later. - self._record_lists = {} - records = self._records = {} - all_context_kwds = self.context_kwds = set() - get_options = self._get_record_options_with_flag - categories = (None,) + self.categories - for handler in self.handlers: - scheme = handler.name - all_context_kwds.update(handler.context_kwds) - for cat in categories: - kwds, has_cat_options = get_options(scheme, cat) - if cat is None or has_cat_options: - records[scheme, cat] = self._create_record(handler, cat, **kwds) - # NOTE: if handler has no category-specific opts, get_record() - # will automatically use the default category's record. - # NOTE: default records for specific category stored under the - # key (None,category); these are populated on-demand by get_record(). - - @staticmethod - def _create_record(handler, category=None, deprecated=False, **settings): - # create custom handler if needed. - try: - # XXX: relaxed=True is mostly here to retain backwards-compat behavior. - # could make this optional flag in future. - subcls = handler.using(relaxed=True, **settings) - except TypeError as err: - m = re.match(r".* unexpected keyword argument '(.*)'$", str(err)) - if m and m.group(1) in settings: - # translate into KeyError, for backwards compat. - # XXX: push this down to GenericHandler.using() implementation? - key = m.group(1) - raise KeyError("keyword not supported by %s handler: %r" % - (handler.name, key)) - raise - - # using private attrs to store some extra metadata in custom handler - assert subcls is not handler, "expected unique variant of handler" - ##subcls._Context__category = category - subcls._Context__orig_handler = handler - subcls.deprecated = deprecated # attr reserved for this purpose - return subcls - - def _get_record_options_with_flag(self, scheme, category): - """return composite dict of options for given scheme + category. - - this is currently a private method, though some variant - of its output may eventually be made public. - - given a scheme & category, it returns two things: - a set of all the keyword options to pass to :meth:`_create_record`, - and a bool flag indicating whether any of these options - were specific to the named category. if this flag is false, - the options are identical to the options for the default category. - - the options dict includes all the scheme-specific settings, - as well as optional *deprecated* keyword. - """ - # get scheme options - kwds, has_cat_options = self.get_scheme_options_with_flag(scheme, category) - - # throw in deprecated flag - value, not_inherited = self.is_deprecated_with_flag(scheme, category) - if value: - kwds['deprecated'] = True - if not_inherited: - has_cat_options = True - - return kwds, has_cat_options - - def get_record(self, scheme, category): - """return record for specific scheme & category (cached)""" - # NOTE: this is part of the critical path shared by - # all of CryptContext's PasswordHash methods, - # hence all the caching and error checking. - - # quick lookup in cache - try: - return self._records[scheme, category] - except KeyError: - pass - - # type check - if category is not None and not isinstance(category, native_string_types): - if PY2 and isinstance(category, unicode): - # for compatibility with unicode-centric py2 apps - return self.get_record(scheme, category.encode("utf-8")) - raise ExpectedTypeError(category, "str or None", "category") - if scheme is not None and not isinstance(scheme, native_string_types): - raise ExpectedTypeError(scheme, "str or None", "scheme") - - # if scheme=None, - # use record for category's default scheme, and cache result. - if not scheme: - default = self.default_scheme(category) - assert default - record = self._records[None, category] = self.get_record(default, - category) - return record - - # if no record for (scheme, category), - # use record for (scheme, None), and cache result. - if category: - try: - cache = self._records - record = cache[scheme, category] = cache[scheme, None] - return record - except KeyError: - pass - - # scheme not found in configuration for default category - raise KeyError("crypt algorithm not found in policy: %r" % (scheme,)) - - def _get_record_list(self, category=None): - """return list of records for category (cached) - - this is an internal helper used only by identify_record() - """ - # type check of category - handled by _get_record() - # quick lookup in cache - try: - return self._record_lists[category] - except KeyError: - pass - # cache miss - build list from scratch - value = self._record_lists[category] = [ - self.get_record(scheme, category) - for scheme in self.schemes - ] - return value - - def identify_record(self, hash, category, required=True): - """internal helper to identify appropriate custom handler for hash""" - # NOTE: this is part of the critical path shared by - # all of CryptContext's PasswordHash methods, - # hence all the caching and error checking. - # FIXME: if multiple hashes could match (e.g. lmhash vs nthash) - # this will only return first match. might want to do something - # about this in future, but for now only hashes with - # unique identifiers will work properly in a CryptContext. - # XXX: if all handlers have a unique prefix (e.g. all are MCF / LDAP), - # could use dict-lookup to speed up this search. - if not isinstance(hash, unicode_or_bytes_types): - raise ExpectedStringError(hash, "hash") - # type check of category - handled by _get_record_list() - for record in self._get_record_list(category): - if record.identify(hash): - return record - if not required: - return None - elif not self.schemes: - raise KeyError("no crypt algorithms supported") - else: - raise ValueError("hash could not be identified") - - @memoized_property - def disabled_record(self): - for record in self._get_record_list(None): - if record.is_disabled: - return record - raise RuntimeError("no disabled hasher present " - "(perhaps add 'unix_disabled' to list of schemes?)") - - #=================================================================== - # serialization - #=================================================================== - def iter_config(self, resolve=False): - """regenerate original config. - - this is an iterator which yields ``(cat,scheme,option),value`` items, - in the order they generally appear inside an INI file. - if interpreted as a dictionary, it should match the original - keywords passed to the CryptContext (aside from any canonization). - - it's mainly used as the internal backend for most of the public - serialization methods. - """ - # grab various bits of data - scheme_options = self._scheme_options - context_options = self._context_options - scheme_keys = sorted(scheme_options) - context_keys = sorted(context_options) - - # write loaded schemes (may differ from 'schemes' local var) - if 'schemes' in context_keys: - context_keys.remove("schemes") - value = self.handlers if resolve else self.schemes - if value: - yield (None, None, "schemes"), list(value) - - # then run through config for each user category - for cat in (None,) + self.categories: - - # write context options - for key in context_keys: - try: - value = context_options[key][cat] - except KeyError: - pass - else: - if isinstance(value, list): - value = list(value) - yield (cat, None, key), value - - # write per-scheme options for all schemes. - for scheme in scheme_keys: - try: - kwds = scheme_options[scheme][cat] - except KeyError: - pass - else: - for key in sorted(kwds): - yield (cat, scheme, key), kwds[key] - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# main CryptContext class -#============================================================================= -class CryptContext(object): - """Helper for hashing & verifying passwords using multiple algorithms. - - Instances of this class allow applications to choose a specific - set of hash algorithms which they wish to support, set limits and defaults - for the rounds and salt sizes those algorithms should use, flag - which algorithms should be deprecated, and automatically handle - migrating users to stronger hashes when they log in. - - Basic usage:: - - >>> ctx = CryptContext(schemes=[...]) - - See the Passlib online documentation for details and full documentation. - """ - # FIXME: altering the configuration of this object isn't threadsafe, - # but is generally only done during application init, so not a major - # issue (just yet). - - # XXX: would like some way to restrict the categories that are allowed, - # to restrict what the app OR the config can use. - - # XXX: add wrap/unwrap callback hooks so app can mutate hash format? - - # XXX: add method for detecting and warning user about schemes - # which don't have any good distinguishing marks? - # or greedy ones (unix_disabled, plaintext) which are not listed at the end? - - #=================================================================== - # instance attrs - #=================================================================== - - # _CryptConfig instance holding current parsed config - _config = None - - # copy of _config methods, stored in CryptContext instance for speed. - _get_record = None - _identify_record = None - - #=================================================================== - # secondary constructors - #=================================================================== - @classmethod - def _norm_source(cls, source): - """internal helper - accepts string, dict, or context""" - if isinstance(source, dict): - return cls(**source) - elif isinstance(source, cls): - return source - else: - self = cls() - self.load(source) - return self - - @classmethod - def from_string(cls, source, section="passlib", encoding="utf-8"): - """create new CryptContext instance from an INI-formatted string. - - :type source: unicode or bytes - :arg source: - string containing INI-formatted content. - - :type section: str - :param section: - option name of section to read from, defaults to ``"passlib"``. - - :type encoding: str - :arg encoding: - optional encoding used when source is bytes, defaults to ``"utf-8"``. - - :returns: - new :class:`CryptContext` instance, configured based on the - parameters in the *source* string. - - Usage example:: - - >>> from passlib.context import CryptContext - >>> context = CryptContext.from_string(''' - ... [passlib] - ... schemes = sha256_crypt, des_crypt - ... sha256_crypt__default_rounds = 30000 - ... ''') - - .. versionadded:: 1.6 - - .. seealso:: :meth:`to_string`, the inverse of this constructor. - """ - if not isinstance(source, unicode_or_bytes_types): - raise ExpectedTypeError(source, "unicode or bytes", "source") - self = cls(_autoload=False) - self.load(source, section=section, encoding=encoding) - return self - - @classmethod - def from_path(cls, path, section="passlib", encoding="utf-8"): - """create new CryptContext instance from an INI-formatted file. - - this functions exactly the same as :meth:`from_string`, - except that it loads from a local file. - - :type path: str - :arg path: - path to local file containing INI-formatted config. - - :type section: str - :param section: - option name of section to read from, defaults to ``"passlib"``. - - :type encoding: str - :arg encoding: - encoding used to load file, defaults to ``"utf-8"``. - - :returns: - new CryptContext instance, configured based on the parameters - stored in the file *path*. - - .. versionadded:: 1.6 - - .. seealso:: :meth:`from_string` for an equivalent usage example. - """ - self = cls(_autoload=False) - self.load_path(path, section=section, encoding=encoding) - return self - - def copy(self, **kwds): - """Return copy of existing CryptContext instance. - - This function returns a new CryptContext instance whose configuration - is exactly the same as the original, with the exception that any keywords - passed in will take precedence over the original settings. - As an example:: - - >>> from passlib.context import CryptContext - - >>> # given an existing context... - >>> ctx1 = CryptContext(["sha256_crypt", "md5_crypt"]) - - >>> # copy can be used to make a clone, and update - >>> # some of the settings at the same time... - >>> ctx2 = custom_app_context.copy(default="md5_crypt") - - >>> # and the original will be unaffected by the change - >>> ctx1.default_scheme() - "sha256_crypt" - >>> ctx2.default_scheme() - "md5_crypt" - - .. versionadded:: 1.6 - This method was previously named :meth:`!replace`. That alias - has been deprecated, and will be removed in Passlib 1.8. - - .. seealso:: :meth:`update` - """ - # XXX: it would be faster to store ref to self._config, - # but don't want to share config objects til sure - # can rely on them being immutable. - other = CryptContext(_autoload=False) - other.load(self) - if kwds: - other.load(kwds, update=True) - return other - - def using(self, **kwds): - """ - alias for :meth:`copy`, to match PasswordHash.using() - """ - return self.copy(**kwds) - - def replace(self, **kwds): - """deprecated alias of :meth:`copy`""" - warn("CryptContext().replace() has been deprecated in Passlib 1.6, " - "and will be removed in Passlib 1.8, " - "it has been renamed to CryptContext().copy()", - DeprecationWarning, stacklevel=2) - return self.copy(**kwds) - - #=================================================================== - # init - #=================================================================== - def __init__(self, schemes=None, - # keyword only... - policy=_UNSET, # <-- deprecated - _autoload=True, **kwds): - # XXX: add ability to make flag certain contexts as immutable, - # e.g. the builtin passlib ones? - # XXX: add a name or import path for the contexts, to help out repr? - if schemes is not None: - kwds['schemes'] = schemes - if policy is not _UNSET: - warn("The CryptContext ``policy`` keyword has been deprecated as of Passlib 1.6, " - "and will be removed in Passlib 1.8; please use " - "``CryptContext.from_string()` or " - "``CryptContext.from_path()`` instead.", - DeprecationWarning) - if policy is None: - self.load(kwds) - elif isinstance(policy, CryptPolicy): - self.load(policy._context) - self.update(kwds) - else: - raise TypeError("policy must be a CryptPolicy instance") - elif _autoload: - self.load(kwds) - else: - assert not kwds, "_autoload=False and kwds are mutually exclusive" - - # XXX: would this be useful? - ##def __str__(self): - ## if PY3: - ## return self.to_string() - ## else: - ## return self.to_string().encode("utf-8") - - def __repr__(self): - return "" % id(self) - - #=================================================================== - # deprecated policy object - #=================================================================== - def _get_policy(self): - # The CryptPolicy class has been deprecated, so to support any - # legacy accesses, we create a stub policy object so .policy attr - # will continue to work. - # - # the code waits until app accesses a specific policy object attribute - # before issuing deprecation warning, so developer gets method-specific - # suggestion for how to upgrade. - - # NOTE: making a copy of the context so the policy acts like a snapshot, - # to retain the pre-1.6 behavior. - return CryptPolicy(_internal_context=self.copy(), _stub_policy=True) - - def _set_policy(self, policy): - warn("The CryptPolicy class and the ``context.policy`` attribute have " - "been deprecated as of Passlib 1.6, and will be removed in " - "Passlib 1.8; please use the ``context.load()`` and " - "``context.update()`` methods instead.", - DeprecationWarning, stacklevel=2) - if isinstance(policy, CryptPolicy): - self.load(policy._context) - else: - raise TypeError("expected CryptPolicy instance") - - policy = property(_get_policy, _set_policy, - doc="[deprecated] returns CryptPolicy instance " - "tied to this CryptContext") - - #=================================================================== - # loading / updating configuration - #=================================================================== - @staticmethod - def _parse_ini_stream(stream, section, filename): - """helper read INI from stream, extract passlib section as dict""" - # NOTE: this expects a unicode stream under py3, - # and a utf-8 bytes stream under py2, - # allowing the resulting dict to always use native strings. - p = SafeConfigParser() - if PY3: - # python 3.2 deprecated readfp in favor of read_file - p.read_file(stream, filename) - else: - p.readfp(stream, filename) - # XXX: could change load() to accept list of items, - # and skip intermediate dict creation - return dict(p.items(section)) - - def load_path(self, path, update=False, section="passlib", encoding="utf-8"): - """Load new configuration into CryptContext from a local file. - - This function is a wrapper for :meth:`load` which - loads a configuration string from the local file *path*, - instead of an in-memory source. Its behavior and options - are otherwise identical to :meth:`!load` when provided with - an INI-formatted string. - - .. versionadded:: 1.6 - """ - def helper(stream): - kwds = self._parse_ini_stream(stream, section, path) - return self.load(kwds, update=update) - if PY3: - # decode to unicode, which load() expected under py3 - with open(path, "rt", encoding=encoding) as stream: - return helper(stream) - elif encoding in ["utf-8", "ascii"]: - # keep as utf-8 bytes, which load() expects under py2 - with open(path, "rb") as stream: - return helper(stream) - else: - # transcode to utf-8 bytes - with open(path, "rb") as fh: - tmp = fh.read().decode(encoding).encode("utf-8") - return helper(BytesIO(tmp)) - - def load(self, source, update=False, section="passlib", encoding="utf-8"): - """Load new configuration into CryptContext, replacing existing config. - - :arg source: - source of new configuration to load. - this value can be a number of different types: - - * a :class:`!dict` object, or compatible Mapping - - the key/value pairs will be interpreted the same - keywords for the :class:`CryptContext` class constructor. - - * a :class:`!unicode` or :class:`!bytes` string - - this will be interpreted as an INI-formatted file, - and appropriate key/value pairs will be loaded from - the specified *section*. - - * another :class:`!CryptContext` object. - - this will export a snapshot of its configuration - using :meth:`to_dict`. - - :type update: bool - :param update: - By default, :meth:`load` will replace the existing configuration - entirely. If ``update=True``, it will preserve any existing - configuration options that are not overridden by the new source, - much like the :meth:`update` method. - - :type section: str - :param section: - When parsing an INI-formatted string, :meth:`load` will look for - a section named ``"passlib"``. This option allows an alternate - section name to be used. Ignored when loading from a dictionary. - - :type encoding: str - :param encoding: - Encoding to use when decode bytes from string. - Defaults to ``"utf-8"``. Ignoring when loading from a dictionary. - - :raises TypeError: - * If the source cannot be identified. - * If an unknown / malformed keyword is encountered. - - :raises ValueError: - If an invalid keyword value is encountered. - - .. note:: - - If an error occurs during a :meth:`!load` call, the :class:`!CryptContext` - instance will be restored to the configuration it was in before - the :meth:`!load` call was made; this is to ensure it is - *never* left in an inconsistent state due to a load error. - - .. versionadded:: 1.6 - """ - #----------------------------------------------------------- - # autodetect source type, convert to dict - #----------------------------------------------------------- - parse_keys = True - if isinstance(source, unicode_or_bytes_types): - if PY3: - source = to_unicode(source, encoding, param="source") - else: - source = to_bytes(source, "utf-8", source_encoding=encoding, - param="source") - source = self._parse_ini_stream(NativeStringIO(source), section, - "") - elif isinstance(source, CryptContext): - # extract dict directly from config, so it can be merged later - source = dict(source._config.iter_config(resolve=True)) - parse_keys = False - elif not hasattr(source, "items"): - # mappings are left alone, otherwise throw an error. - raise ExpectedTypeError(source, "string or dict", "source") - - # XXX: add support for other iterable types, e.g. sequence of pairs? - - #----------------------------------------------------------- - # parse dict keys into (category, scheme, option) format, - # and merge with existing configuration if needed. - #----------------------------------------------------------- - if parse_keys: - parse = self._parse_config_key - source = dict((parse(key), value) - for key, value in iteritems(source)) - if update and self._config is not None: - # if updating, do nothing if source is empty, - if not source: - return - # otherwise overlay source on top of existing config - tmp = source - source = dict(self._config.iter_config(resolve=True)) - source.update(tmp) - - #----------------------------------------------------------- - # compile into _CryptConfig instance, and update state - #----------------------------------------------------------- - config = _CryptConfig(source) - self._config = config - self._reset_dummy_verify() - self._get_record = config.get_record - self._identify_record = config.identify_record - if config.context_kwds: - # (re-)enable method for this instance (in case ELSE clause below ran last load). - self.__dict__.pop("_strip_unused_context_kwds", None) - else: - # disable method for this instance, it's not needed. - self._strip_unused_context_kwds = None - - @staticmethod - def _parse_config_key(ckey): - """helper used to parse ``cat__scheme__option`` keys into a tuple""" - # split string into 1-3 parts - assert isinstance(ckey, native_string_types) - parts = ckey.replace(".", "__").split("__") - count = len(parts) - if count == 1: - cat, scheme, key = None, None, parts[0] - elif count == 2: - cat = None - scheme, key = parts - elif count == 3: - cat, scheme, key = parts - else: - raise TypeError("keys must have less than 3 separators: %r" % - (ckey,)) - # validate & normalize the parts - if cat == "default": - cat = None - elif not cat and cat is not None: - raise TypeError("empty category: %r" % ckey) - if scheme == "context": - scheme = None - elif not scheme and scheme is not None: - raise TypeError("empty scheme: %r" % ckey) - if not key: - raise TypeError("empty option: %r" % ckey) - return cat, scheme, key - - def update(self, *args, **kwds): - """Helper for quickly changing configuration. - - This acts much like the :meth:`!dict.update` method: - it updates the context's configuration, - replacing the original value(s) for the specified keys, - and preserving the rest. - It accepts any :ref:`keyword ` - accepted by the :class:`!CryptContext` constructor. - - .. versionadded:: 1.6 - - .. seealso:: :meth:`copy` - """ - if args: - if len(args) > 1: - raise TypeError("expected at most one positional argument") - if kwds: - raise TypeError("positional arg and keywords mutually exclusive") - self.load(args[0], update=True) - elif kwds: - self.load(kwds, update=True) - - # XXX: make this public? even just as flag to load? - # FIXME: this function suffered some bitrot in 1.6.1, - # will need to be updated before works again. - ##def _simplify(self): - ## "helper to remove redundant/unused options" - ## # don't do anything if no schemes are defined - ## if not self._schemes: - ## return - ## - ## def strip_items(target, filter): - ## keys = [key for key,value in iteritems(target) - ## if filter(key,value)] - ## for key in keys: - ## del target[key] - ## - ## # remove redundant default. - ## defaults = self._default_schemes - ## if defaults.get(None) == self._schemes[0]: - ## del defaults[None] - ## - ## # remove options for unused schemes. - ## scheme_options = self._scheme_options - ## schemes = self._schemes + ("all",) - ## strip_items(scheme_options, lambda k,v: k not in schemes) - ## - ## # remove rendundant cat defaults. - ## cur = self.default_scheme() - ## strip_items(defaults, lambda k,v: k and v==cur) - ## - ## # remove redundant category deprecations. - ## # TODO: this should work w/ 'auto', but needs closer inspection - ## deprecated = self._deprecated_schemes - ## cur = self._deprecated_schemes.get(None) - ## strip_items(deprecated, lambda k,v: k and v==cur) - ## - ## # remove redundant category options. - ## for scheme, config in iteritems(scheme_options): - ## if None in config: - ## cur = config[None] - ## strip_items(config, lambda k,v: k and v==cur) - ## - ## # XXX: anything else? - - #=================================================================== - # reading configuration - #=================================================================== - def schemes(self, resolve=False, category=None, unconfigured=False): - """return schemes loaded into this CryptContext instance. - - :type resolve: bool - :arg resolve: - if ``True``, will return a tuple of :class:`~passlib.ifc.PasswordHash` - objects instead of their names. - - :returns: - returns tuple of the schemes configured for this context - via the *schemes* option. - - .. versionadded:: 1.6 - This was previously available as ``CryptContext().policy.schemes()`` - - .. seealso:: the :ref:`schemes ` option for usage example. - """ - # XXX: should resolv return records rather than handlers? - # or deprecate resolve keyword completely? - # offering up a .hashers Mapping in v1.8 would be great. - # NOTE: supporting 'category' and 'unconfigured' kwds as of 1.7 - # just to pass through to .handler(), but not documenting them... - # may not need to put them to use. - schemes = self._config.schemes - if resolve: - return tuple(self.handler(scheme, category, unconfigured=unconfigured) - for scheme in schemes) - else: - return schemes - - def default_scheme(self, category=None, resolve=False, unconfigured=False): - """return name of scheme that :meth:`hash` will use by default. - - :type resolve: bool - :arg resolve: - if ``True``, will return a :class:`~passlib.ifc.PasswordHash` - object instead of the name. - - :type category: str or None - :param category: - Optional :ref:`user category `. - If specified, this will return the catgory-specific default scheme instead. - - :returns: - name of the default scheme. - - .. seealso:: the :ref:`default ` option for usage example. - - .. versionadded:: 1.6 - - .. versionchanged:: 1.7 - - This now returns a hasher configured with any CryptContext-specific - options (custom rounds settings, etc). Previously this returned - the base hasher from :mod:`passlib.hash`. - """ - # XXX: deprecate this in favor of .handler() or whatever it's replaced with? - # NOTE: supporting 'unconfigured' kwds as of 1.7 - # just to pass through to .handler(), but not documenting them... - # may not need to put them to use. - hasher = self.handler(None, category, unconfigured=unconfigured) - return hasher if resolve else hasher.name - - # XXX: need to decide if exposing this would be useful in any way - ##def categories(self): - ## """return user-categories with algorithm-specific options in this CryptContext. - ## - ## this will always return a tuple. - ## if no categories besides the default category have been configured, - ## the tuple will be empty. - ## """ - ## return self._config.categories - - # XXX: need to decide if exposing this would be useful to applications - # in any meaningful way that isn't already served by to_dict() - ##def options(self, scheme, category=None): - ## kwds, percat = self._config.get_options(scheme, category) - ## return kwds - - def handler(self, scheme=None, category=None, unconfigured=False): - """helper to resolve name of scheme -> :class:`~passlib.ifc.PasswordHash` object used by scheme. - - :arg scheme: - This should identify the scheme to lookup. - If omitted or set to ``None``, this will return the handler - for the default scheme. - - :arg category: - If a user category is specified, and no scheme is provided, - it will use the default for that category. - Otherwise this parameter is ignored. - - :param unconfigured: - - By default, this returns a handler object whose .hash() - and .needs_update() methods will honor the configured - provided by CryptContext. See ``unconfigured=True`` - to get the underlying handler from before any context-specific - configuration was applied. - - :raises KeyError: - If the scheme does not exist OR is not being used within this context. - - :returns: - :class:`~passlib.ifc.PasswordHash` object used to implement - the named scheme within this context (this will usually - be one of the objects from :mod:`passlib.hash`) - - .. versionadded:: 1.6 - This was previously available as ``CryptContext().policy.get_handler()`` - - .. versionchanged:: 1.7 - - This now returns a hasher configured with any CryptContext-specific - options (custom rounds settings, etc). Previously this returned - the base hasher from :mod:`passlib.hash`. - """ - try: - hasher = self._get_record(scheme, category) - if unconfigured: - return hasher._Context__orig_handler - else: - return hasher - except KeyError: - pass - if self._config.handlers: - raise KeyError("crypt algorithm not found in this " - "CryptContext instance: %r" % (scheme,)) - else: - raise KeyError("no crypt algorithms loaded in this " - "CryptContext instance") - - def _get_unregistered_handlers(self): - """check if any handlers in this context aren't in the global registry""" - return tuple(handler for handler in self._config.handlers - if not _is_handler_registered(handler)) - - @property - def context_kwds(self): - """ - return :class:`!set` containing union of all :ref:`contextual keywords ` - supported by the handlers in this context. - - .. versionadded:: 1.6.6 - """ - return self._config.context_kwds - - #=================================================================== - # exporting config - #=================================================================== - @staticmethod - def _render_config_key(key): - """convert 3-part config key to single string""" - cat, scheme, option = key - if cat: - return "%s__%s__%s" % (cat, scheme or "context", option) - elif scheme: - return "%s__%s" % (scheme, option) - else: - return option - - @staticmethod - def _render_ini_value(key, value): - """render value to string suitable for INI file""" - # convert lists to comma separated lists - # (mainly 'schemes' & 'deprecated') - if isinstance(value, (list,tuple)): - value = ", ".join(value) - - # convert numbers to strings - elif isinstance(value, num_types): - if isinstance(value, float) and key[2] == "vary_rounds": - value = ("%.2f" % value).rstrip("0") if value else "0" - else: - value = str(value) - - assert isinstance(value, native_string_types), \ - "expected string for key: %r %r" % (key, value) - - # escape any percent signs. - return value.replace("%", "%%") - - def to_dict(self, resolve=False): - """Return current configuration as a dictionary. - - :type resolve: bool - :arg resolve: - if ``True``, the ``schemes`` key will contain a list of - a :class:`~passlib.ifc.PasswordHash` objects instead of just - their names. - - This method dumps the current configuration of the CryptContext - instance. The key/value pairs should be in the format accepted - by the :class:`!CryptContext` class constructor, in fact - ``CryptContext(**myctx.to_dict())`` will create an exact copy of ``myctx``. - As an example:: - - >>> # you can dump the configuration of any crypt context... - >>> from passlib.apps import ldap_nocrypt_context - >>> ldap_nocrypt_context.to_dict() - {'schemes': ['ldap_salted_sha1', - 'ldap_salted_md5', - 'ldap_sha1', - 'ldap_md5', - 'ldap_plaintext']} - - .. versionadded:: 1.6 - This was previously available as ``CryptContext().policy.to_dict()`` - - .. seealso:: the :ref:`context-serialization-example` example in the tutorial. - """ - # XXX: should resolve default to conditional behavior - # based on presence of unregistered handlers? - render_key = self._render_config_key - return dict((render_key(key), value) - for key, value in self._config.iter_config(resolve)) - - def _write_to_parser(self, parser, section): - """helper to write to ConfigParser instance""" - render_key = self._render_config_key - render_value = self._render_ini_value - parser.add_section(section) - for k,v in self._config.iter_config(): - v = render_value(k, v) - k = render_key(k) - parser.set(section, k, v) - - def to_string(self, section="passlib"): - """serialize to INI format and return as unicode string. - - :param section: - name of INI section to output, defaults to ``"passlib"``. - - :returns: - CryptContext configuration, serialized to a INI unicode string. - - This function acts exactly like :meth:`to_dict`, except that it - serializes all the contents into a single human-readable string, - which can be hand edited, and/or stored in a file. The - output of this method is accepted by :meth:`from_string`, - :meth:`from_path`, and :meth:`load`. As an example:: - - >>> # you can dump the configuration of any crypt context... - >>> from passlib.apps import ldap_nocrypt_context - >>> print ldap_nocrypt_context.to_string() - [passlib] - schemes = ldap_salted_sha1, ldap_salted_md5, ldap_sha1, ldap_md5, ldap_plaintext - - .. versionadded:: 1.6 - This was previously available as ``CryptContext().policy.to_string()`` - - .. seealso:: the :ref:`context-serialization-example` example in the tutorial. - """ - parser = SafeConfigParser() - self._write_to_parser(parser, section) - buf = NativeStringIO() - parser.write(buf) - unregistered = self._get_unregistered_handlers() - if unregistered: - buf.write(( - "# NOTE: the %s handler(s) are not registered with Passlib,\n" - "# this string may not correctly reproduce the current configuration.\n\n" - ) % ", ".join(repr(handler.name) for handler in unregistered)) - out = buf.getvalue() - if not PY3: - out = out.decode("utf-8") - return out - - # XXX: is this useful enough to enable? - ##def write_to_path(self, path, section="passlib", update=False): - ## "write to INI file" - ## parser = ConfigParser() - ## if update and os.path.exists(path): - ## if not parser.read([path]): - ## raise EnvironmentError("failed to read existing file") - ## parser.remove_section(section) - ## self._write_to_parser(parser, section) - ## fh = file(path, "w") - ## parser.write(fh) - ## fh.close() - - #=================================================================== - # verify() hardening - # NOTE: this entire feature has been disabled. - # all contents of this section are NOOPs as of 1.7.1, - # and will be removed in 1.8. - #=================================================================== - - mvt_estimate_max_samples = 20 - mvt_estimate_min_samples = 10 - mvt_estimate_max_time = 2 - mvt_estimate_resolution = 0.01 - harden_verify = None - min_verify_time = 0 - - def reset_min_verify_time(self): - self._reset_dummy_verify() - - #=================================================================== - # password hash api - #=================================================================== - - # NOTE: all the following methods do is look up the appropriate - # custom handler for a given (scheme,category) combination, - # and hand off the real work to the handler itself, - # which is optimized for the specific (scheme,category) configuration. - # - # The custom handlers are cached inside the _CryptConfig - # instance stored in self._config, and are retrieved - # via get_record() and identify_record(). - # - # _get_record() and _identify_record() are references - # to _config methods of the same name, - # stored in CryptContext for speed. - - def _get_or_identify_record(self, hash, scheme=None, category=None): - """return record based on scheme, or failing that, by identifying hash""" - if scheme: - if not isinstance(hash, unicode_or_bytes_types): - raise ExpectedStringError(hash, "hash") - return self._get_record(scheme, category) - else: - # hash typecheck handled by identify_record() - return self._identify_record(hash, category) - - def _strip_unused_context_kwds(self, kwds, record): - """ - helper which removes any context keywords from **kwds** - that are known to be used by another scheme in this context, - but are NOT supported by handler specified by **record**. - - .. note:: - as optimization, load() will set this method to None on a per-instance basis - if there are no context kwds. - """ - if not kwds: - return - unused_kwds = self._config.context_kwds.difference(record.context_kwds) - for key in unused_kwds: - kwds.pop(key, None) - - def needs_update(self, hash, scheme=None, category=None, secret=None): - """Check if hash needs to be replaced for some reason, - in which case the secret should be re-hashed. - - This function is the core of CryptContext's support for hash migration: - This function takes in a hash string, and checks the scheme, - number of rounds, and other properties against the current policy. - It returns ``True`` if the hash is using a deprecated scheme, - or is otherwise outside of the bounds specified by the policy - (e.g. the number of rounds is lower than :ref:`min_rounds ` - configuration for that algorithm). - If so, the password should be re-hashed using :meth:`hash` - Otherwise, it will return ``False``. - - :type hash: unicode or bytes - :arg hash: - The hash string to examine. - - :type scheme: str or None - :param scheme: - - Optional scheme to use. Scheme must be one of the ones - configured for this context (see the - :ref:`schemes ` option). - If no scheme is specified, it will be identified - based on the value of *hash*. - - .. deprecated:: 1.7 - - Support for this keyword is deprecated, and will be removed in Passlib 2.0. - - :type category: str or None - :param category: - Optional :ref:`user category `. - If specified, this will cause any category-specific defaults to - be used when determining if the hash needs to be updated - (e.g. is below the minimum rounds). - - :type secret: unicode, bytes, or None - :param secret: - Optional secret associated with the provided ``hash``. - This is not required, or even currently used for anything... - it's for forward-compatibility with any future - update checks that might need this information. - If provided, Passlib assumes the secret has already been - verified successfully against the hash. - - .. versionadded:: 1.6 - - :returns: ``True`` if hash should be replaced, otherwise ``False``. - - :raises ValueError: - If the hash did not match any of the configured :meth:`schemes`. - - .. versionadded:: 1.6 - This method was previously named :meth:`hash_needs_update`. - - .. seealso:: the :ref:`context-migration-example` example in the tutorial. - """ - if scheme is not None: - # TODO: offer replacement alternative. - # ``context.handler(scheme).needs_update()`` would work, - # but may deprecate .handler() in passlib 1.8. - warn("CryptContext.needs_update(): 'scheme' keyword is deprecated as of " - "Passlib 1.7, and will be removed in Passlib 2.0", - DeprecationWarning) - record = self._get_or_identify_record(hash, scheme, category) - return record.deprecated or record.needs_update(hash, secret=secret) - - @deprecated_method(deprecated="1.6", removed="2.0", replacement="CryptContext.needs_update()") - def hash_needs_update(self, hash, scheme=None, category=None): - """Legacy alias for :meth:`needs_update`. - - .. deprecated:: 1.6 - This method was renamed to :meth:`!needs_update` in version 1.6. - This alias will be removed in version 2.0, and should only - be used for compatibility with Passlib 1.3 - 1.5. - """ - return self.needs_update(hash, scheme, category) - - @deprecated_method(deprecated="1.7", removed="2.0") - def genconfig(self, scheme=None, category=None, **settings): - """Generate a config string for specified scheme. - - .. deprecated:: 1.7 - - This method will be removed in version 2.0, and should only - be used for compatibility with Passlib 1.3 - 1.6. - """ - record = self._get_record(scheme, category) - strip_unused = self._strip_unused_context_kwds - if strip_unused: - strip_unused(settings, record) - return record.genconfig(**settings) - - @deprecated_method(deprecated="1.7", removed="2.0") - def genhash(self, secret, config, scheme=None, category=None, **kwds): - """Generate hash for the specified secret using another hash. - - .. deprecated:: 1.7 - - This method will be removed in version 2.0, and should only - be used for compatibility with Passlib 1.3 - 1.6. - """ - record = self._get_or_identify_record(config, scheme, category) - strip_unused = self._strip_unused_context_kwds - if strip_unused: - strip_unused(kwds, record) - return record.genhash(secret, config, **kwds) - - def identify(self, hash, category=None, resolve=False, required=False, - unconfigured=False): - """Attempt to identify which algorithm the hash belongs to. - - Note that this will only consider the algorithms - currently configured for this context - (see the :ref:`schemes ` option). - All registered algorithms will be checked, from first to last, - and whichever one positively identifies the hash first will be returned. - - :type hash: unicode or bytes - :arg hash: - The hash string to test. - - :type category: str or None - :param category: - Optional :ref:`user category `. - Ignored by this function, this parameter - is provided for symmetry with the other methods. - - :type resolve: bool - :param resolve: - If ``True``, returns the hash handler itself, - instead of the name of the hash. - - :type required: bool - :param required: - If ``True``, this will raise a ValueError if the hash - cannot be identified, instead of returning ``None``. - - :returns: - The handler which first identifies the hash, - or ``None`` if none of the algorithms identify the hash. - """ - record = self._identify_record(hash, category, required) - if record is None: - return None - elif resolve: - if unconfigured: - return record._Context__orig_handler - else: - return record - else: - return record.name - - def hash(self, secret, scheme=None, category=None, **kwds): - """run secret through selected algorithm, returning resulting hash. - - :type secret: unicode or bytes - :arg secret: - the password to hash. - - :type scheme: str or None - :param scheme: - - Optional scheme to use. Scheme must be one of the ones - configured for this context (see the - :ref:`schemes ` option). - If no scheme is specified, the configured default - will be used. - - .. deprecated:: 1.7 - - Support for this keyword is deprecated, and will be removed in Passlib 2.0. - - :type category: str or None - :param category: - Optional :ref:`user category `. - If specified, this will cause any category-specific defaults to - be used when hashing the password (e.g. different default scheme, - different default rounds values, etc). - - :param \*\*kwds: - All other keyword options are passed to the selected algorithm's - :meth:`PasswordHash.hash() ` method. - - :returns: - The secret as encoded by the specified algorithm and options. - The return value will always be a :class:`!str`. - - :raises TypeError, ValueError: - * If any of the arguments have an invalid type or value. - This includes any keywords passed to the underlying hash's - :meth:`PasswordHash.hash() ` method. - - .. seealso:: the :ref:`context-basic-example` example in the tutorial - """ - # XXX: could insert normalization to preferred unicode encoding here - if scheme is not None: - # TODO: offer replacement alternative. - # ``context.handler(scheme).hash()`` would work, - # but may deprecate .handler() in passlib 1.8. - warn("CryptContext.hash(): 'scheme' keyword is deprecated as of " - "Passlib 1.7, and will be removed in Passlib 2.0", - DeprecationWarning) - record = self._get_record(scheme, category) - strip_unused = self._strip_unused_context_kwds - if strip_unused: - strip_unused(kwds, record) - return record.hash(secret, **kwds) - - @deprecated_method(deprecated="1.7", removed="2.0", replacement="CryptContext.hash()") - def encrypt(self, *args, **kwds): - """ - Legacy alias for :meth:`hash`. - - .. deprecated:: 1.7 - This method was renamed to :meth:`!hash` in version 1.7. - This alias will be removed in version 2.0, and should only - be used for compatibility with Passlib 1.3 - 1.6. - """ - return self.hash(*args, **kwds) - - def verify(self, secret, hash, scheme=None, category=None, **kwds): - """verify secret against an existing hash. - - If no scheme is specified, this will attempt to identify - the scheme based on the contents of the provided hash - (limited to the schemes configured for this context). - It will then check whether the password verifies against the hash. - - :type secret: unicode or bytes - :arg secret: - the secret to verify - - :type hash: unicode or bytes - :arg hash: - hash string to compare to - - if ``None`` is passed in, this will be treated as "never verifying" - - :type scheme: str - :param scheme: - Optionally force context to use specific scheme. - This is usually not needed, as most hashes can be unambiguously - identified. Scheme must be one of the ones configured - for this context - (see the :ref:`schemes ` option). - - .. deprecated:: 1.7 - - Support for this keyword is deprecated, and will be removed in Passlib 2.0. - - :type category: str or None - :param category: - Optional :ref:`user category ` string. - This is mainly used when generating new hashes, it has little - effect when verifying; this keyword is mainly provided for symmetry. - - :param \*\*kwds: - All additional keywords are passed to the appropriate handler, - and should match its :attr:`~passlib.ifc.PasswordHash.context_kwds`. - - :returns: - ``True`` if the password matched the hash, else ``False``. - - :raises ValueError: - * if the hash did not match any of the configured :meth:`schemes`. - - * if any of the arguments have an invalid value (this includes - any keywords passed to the underlying hash's - :meth:`PasswordHash.verify() ` method). - - :raises TypeError: - * if any of the arguments have an invalid type (this includes - any keywords passed to the underlying hash's - :meth:`PasswordHash.verify() ` method). - - .. seealso:: the :ref:`context-basic-example` example in the tutorial - """ - # XXX: could insert normalization to preferred unicode encoding here - # XXX: what about supporting a setter() callback ala django 1.4 ? - if scheme is not None: - # TODO: offer replacement alternative. - # ``context.handler(scheme).verify()`` would work, - # but may deprecate .handler() in passlib 1.8. - warn("CryptContext.verify(): 'scheme' keyword is deprecated as of " - "Passlib 1.7, and will be removed in Passlib 2.0", - DeprecationWarning) - if hash is None: - # convenience feature -- let apps pass in hash=None when user - # isn't found / has no hash; useful because it invokes dummy_verify() - self.dummy_verify() - return False - record = self._get_or_identify_record(hash, scheme, category) - strip_unused = self._strip_unused_context_kwds - if strip_unused: - strip_unused(kwds, record) - return record.verify(secret, hash, **kwds) - - def verify_and_update(self, secret, hash, scheme=None, category=None, **kwds): - """verify password and re-hash the password if needed, all in a single call. - - This is a convenience method which takes care of all the following: - first it verifies the password (:meth:`~CryptContext.verify`), if this is successfull - it checks if the hash needs updating (:meth:`~CryptContext.needs_update`), and if so, - re-hashes the password (:meth:`~CryptContext.hash`), returning the replacement hash. - This series of steps is a very common task for applications - which wish to update deprecated hashes, and this call takes - care of all 3 steps efficiently. - - :type secret: unicode or bytes - :arg secret: - the secret to verify - - :type secret: unicode or bytes - :arg hash: - hash string to compare to. - - if ``None`` is passed in, this will be treated as "never verifying" - - :type scheme: str - :param scheme: - Optionally force context to use specific scheme. - This is usually not needed, as most hashes can be unambiguously - identified. Scheme must be one of the ones configured - for this context - (see the :ref:`schemes ` option). - - .. deprecated:: 1.7 - - Support for this keyword is deprecated, and will be removed in Passlib 2.0. - - :type category: str or None - :param category: - Optional :ref:`user category `. - If specified, this will cause any category-specific defaults to - be used if the password has to be re-hashed. - - :param \*\*kwds: - all additional keywords are passed to the appropriate handler, - and should match that hash's - :attr:`PasswordHash.context_kwds `. - - :returns: - This function returns a tuple containing two elements: - ``(verified, replacement_hash)``. The first is a boolean - flag indicating whether the password verified, - and the second an optional replacement hash. - The tuple will always match one of the following 3 cases: - - * ``(False, None)`` indicates the secret failed to verify. - * ``(True, None)`` indicates the secret verified correctly, - and the hash does not need updating. - * ``(True, str)`` indicates the secret verified correctly, - but the current hash needs to be updated. The :class:`!str` - will be the freshly generated hash, to replace the old one. - - :raises TypeError, ValueError: - For the same reasons as :meth:`verify`. - - .. seealso:: the :ref:`context-migration-example` example in the tutorial. - """ - # XXX: could insert normalization to preferred unicode encoding here. - if scheme is not None: - warn("CryptContext.verify(): 'scheme' keyword is deprecated as of " - "Passlib 1.7, and will be removed in Passlib 2.0", - DeprecationWarning) - if hash is None: - # convenience feature -- let apps pass in hash=None when user - # isn't found / has no hash; useful because it invokes dummy_verify() - self.dummy_verify() - return False, None - record = self._get_or_identify_record(hash, scheme, category) - strip_unused = self._strip_unused_context_kwds - if strip_unused and kwds: - clean_kwds = kwds.copy() - strip_unused(clean_kwds, record) - else: - clean_kwds = kwds - # XXX: if record is default scheme, could extend PasswordHash - # api to combine verify & needs_update to single call, - # potentially saving some round-trip parsing. - # but might make these codepaths more complex... - if not record.verify(secret, hash, **clean_kwds): - return False, None - elif record.deprecated or record.needs_update(hash, secret=secret): - # NOTE: we re-hash with default scheme, not current one. - return True, self.hash(secret, category=category, **kwds) - else: - return True, None - - #=================================================================== - # missing-user helper - #=================================================================== - - #: secret used for dummy_verify() - _dummy_secret = "too many secrets" - - @memoized_property - def _dummy_hash(self): - """ - precalculated hash for dummy_verify() to use - """ - return self.hash(self._dummy_secret) - - def _reset_dummy_verify(self): - """ - flush memoized values used by dummy_verify() - """ - type(self)._dummy_hash.clear_cache(self) - - def dummy_verify(self, elapsed=0): - """ - Helper that applications can call when user wasn't found, - in order to simulate time it would take to hash a password. - - Runs verify() against a dummy hash, to simulate verification - of a real account password. - - :param elapsed: - - .. deprecated:: 1.7.1 - - this option is ignored, and will be removed in passlib 1.8. - - .. versionadded:: 1.7 - """ - self.verify(self._dummy_secret, self._dummy_hash) - return False - - #=================================================================== - # disabled hash support - #=================================================================== - - def is_enabled(self, hash): - """ - test if hash represents a usuable password -- - i.e. does not represent an unusuable password such as ``"!"``, - which is recognized by the :class:`~passlib.hash.unix_disabled` hash. - - :raises ValueError: - if the hash is not recognized - (typically solved by adding ``unix_disabled`` to the list of schemes). - """ - return not self._identify_record(hash, None).is_disabled - - def disable(self, hash=None): - """ - return a string to disable logins for user, - usually by returning a non-verifying string such as ``"!"``. - - :param hash: - Callers can optionally provide the account's existing hash. - Some disabled handlers (such as :class:`!unix_disabled`) - will encode this into the returned value, - so that it can be recovered via :meth:`enable`. - - :raises RuntimeError: - if this function is called w/o a disabled hasher - (such as :class:`~passlib.hash.unix_disabled`) included - in the list of schemes. - - :returns: - hash string which will be recognized as valid by the context, - but is guaranteed to not validate against *any* password. - """ - record = self._config.disabled_record - assert record.is_disabled - return record.disable(hash) - - def enable(self, hash): - """ - inverse of :meth:`disable` -- - attempts to recover original hash which was converted - by a :meth:`!disable` call into a disabled hash -- - thus restoring the user's original password. - - :raises ValueError: - if original hash not present, or if the disabled handler doesn't - support encoding the original hash (e.g. ``django_disabled``) - - :returns: - the original hash. - """ - record = self._identify_record(hash, None) - if record.is_disabled: - # XXX: should we throw error if result can't be identified by context? - return record.enable(hash) - else: - # hash wasn't a disabled hash, so return unchanged - return hash - - #=================================================================== - # eoc - #=================================================================== - -class LazyCryptContext(CryptContext): - """CryptContext subclass which doesn't load handlers until needed. - - This is a subclass of CryptContext which takes in a set of arguments - exactly like CryptContext, but won't import any handlers - (or even parse its arguments) until - the first time one of its methods is accessed. - - :arg schemes: - The first positional argument can be a list of schemes, or omitted, - just like CryptContext. - - :param onload: - - If a callable is passed in via this keyword, - it will be invoked at lazy-load time - with the following signature: - ``onload(**kwds) -> kwds``; - where ``kwds`` is all the additional kwds passed to LazyCryptContext. - It should perform any additional deferred initialization, - and return the final dict of options to be passed to CryptContext. - - .. versionadded:: 1.6 - - :param create_policy: - - .. deprecated:: 1.6 - This option will be removed in Passlib 1.8, - applications should use ``onload`` instead. - - :param kwds: - - All additional keywords are passed to CryptContext; - or to the *onload* function (if provided). - - This is mainly used internally by modules such as :mod:`passlib.apps`, - which define a large number of contexts, but only a few of them will be needed - at any one time. Use of this class saves the memory needed to import - the specified handlers until the context instance is actually accessed. - As well, it allows constructing a context at *module-init* time, - but using :func:`!onload()` to provide dynamic configuration - at *application-run* time. - - .. note:: - This class is only useful if you're referencing handler objects by name, - and don't want them imported until runtime. If you want to have the config - validated before your application runs, or are passing in already-imported - handler instances, you should use :class:`CryptContext` instead. - - .. versionadded:: 1.4 - """ - _lazy_kwds = None - - # NOTE: the way this class works changed in 1.6. - # previously it just called _lazy_init() when ``.policy`` was - # first accessed. now that is done whenever any of the public - # attributes are accessed, and the class itself is changed - # to a regular CryptContext, to remove the overhead once it's unneeded. - - def __init__(self, schemes=None, **kwds): - if schemes is not None: - kwds['schemes'] = schemes - self._lazy_kwds = kwds - - def _lazy_init(self): - kwds = self._lazy_kwds - if 'create_policy' in kwds: - warn("The CryptPolicy class, and LazyCryptContext's " - "``create_policy`` keyword have been deprecated as of " - "Passlib 1.6, and will be removed in Passlib 1.8; " - "please use the ``onload`` keyword instead.", - DeprecationWarning) - create_policy = kwds.pop("create_policy") - result = create_policy(**kwds) - policy = CryptPolicy.from_source(result, _warn=False) - kwds = policy._context.to_dict() - elif 'onload' in kwds: - onload = kwds.pop("onload") - kwds = onload(**kwds) - del self._lazy_kwds - super(LazyCryptContext, self).__init__(**kwds) - self.__class__ = CryptContext - - def __getattribute__(self, attr): - if (not attr.startswith("_") or attr.startswith("__")) and \ - self._lazy_kwds is not None: - self._lazy_init() - return object.__getattribute__(self, attr) - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/crypto/__init__.py b/libs_crutch/contrib/passlib/crypto/__init__.py deleted file mode 100644 index 89f5484..0000000 --- a/libs_crutch/contrib/passlib/crypto/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""passlib.crypto -- package containing cryptographic primitives used by passlib""" diff --git a/libs_crutch/contrib/passlib/crypto/_blowfish/__init__.py b/libs_crutch/contrib/passlib/crypto/_blowfish/__init__.py deleted file mode 100644 index 1aa1c85..0000000 --- a/libs_crutch/contrib/passlib/crypto/_blowfish/__init__.py +++ /dev/null @@ -1,169 +0,0 @@ -"""passlib.crypto._blowfish - pure-python eks-blowfish implementation for bcrypt - -This is a pure-python implementation of the EKS-Blowfish algorithm described by -Provos and Mazieres in `A Future-Adaptable Password Scheme -`_. - -This package contains two submodules: - -* ``_blowfish/base.py`` contains a class implementing the eks-blowfish algorithm - using easy-to-examine code. - -* ``_blowfish/unrolled.py`` contains a subclass which replaces some methods - of the original class with sped-up versions, mainly using unrolled loops - and local variables. this is the class which is actually used by - Passlib to perform BCrypt in pure python. - - This module is auto-generated by a script, ``_blowfish/_gen_files.py``. - -Status ------- -This implementation is usable, but is an order of magnitude too slow to be -usable with real security. For "ok" security, BCrypt hashes should have at -least 2**11 rounds (as of 2011). Assuming a desired response time <= 100ms, -this means a BCrypt implementation should get at least 20 rounds/ms in order -to be both usable *and* secure. On a 2 ghz cpu, this implementation gets -roughly 0.09 rounds/ms under CPython (220x too slow), and 1.9 rounds/ms -under PyPy (10x too slow). - -History -------- -While subsequently modified considerly for Passlib, this code was originally -based on `jBcrypt 0.2 `_, which was -released under the BSD license:: - - Copyright (c) 2006 Damien Miller - - Permission to use, copy, modify, and distribute this software for any - purpose with or without fee is hereby granted, provided that the above - copyright notice and this permission notice appear in all copies. - - THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -#============================================================================= -# imports -#============================================================================= -# core -from itertools import chain -import struct -# pkg -from passlib.utils import getrandbytes, rng -from passlib.utils.binary import bcrypt64 -from passlib.utils.compat import BytesIO, unicode, u, native_string_types -from passlib.crypto._blowfish.unrolled import BlowfishEngine -# local -__all__ = [ - 'BlowfishEngine', - 'raw_bcrypt', -] - -#============================================================================= -# bcrypt constants -#============================================================================= - -# bcrypt constant data "OrpheanBeholderScryDoubt" as 6 integers -BCRYPT_CDATA = [ - 0x4f727068, 0x65616e42, 0x65686f6c, - 0x64657253, 0x63727944, 0x6f756274 -] - -# struct used to encode ciphertext as digest (last output byte discarded) -digest_struct = struct.Struct(">6I") - -#============================================================================= -# base bcrypt helper -# -# interface designed only for use by passlib.handlers.bcrypt:BCrypt -# probably not suitable for other purposes -#============================================================================= -BNULL = b'\x00' - -def raw_bcrypt(password, ident, salt, log_rounds): - """perform central password hashing step in bcrypt scheme. - - :param password: the password to hash - :param ident: identifier w/ minor version (e.g. 2, 2a) - :param salt: the binary salt to use (encoded in bcrypt-base64) - :param log_rounds: the log2 of the number of rounds (as int) - :returns: bcrypt-base64 encoded checksum - """ - #=================================================================== - # parse inputs - #=================================================================== - - # parse ident - assert isinstance(ident, native_string_types) - add_null_padding = True - if ident == u('2a') or ident == u('2y') or ident == u('2b'): - pass - elif ident == u('2'): - add_null_padding = False - elif ident == u('2x'): - raise ValueError("crypt_blowfish's buggy '2x' hashes are not " - "currently supported") - else: - raise ValueError("unknown ident: %r" % (ident,)) - - # decode & validate salt - assert isinstance(salt, bytes) - salt = bcrypt64.decode_bytes(salt) - if len(salt) < 16: - raise ValueError("Missing salt bytes") - elif len(salt) > 16: - salt = salt[:16] - - # prepare password - assert isinstance(password, bytes) - if add_null_padding: - password += BNULL - - # validate rounds - if log_rounds < 4 or log_rounds > 31: - raise ValueError("Bad number of rounds") - - #=================================================================== - # - # run EKS-Blowfish algorithm - # - # This uses the "enhanced key schedule" step described by - # Provos and Mazieres in "A Future-Adaptable Password Scheme" - # http://www.openbsd.org/papers/bcrypt-paper.ps - # - #=================================================================== - - engine = BlowfishEngine() - - # convert password & salt into list of 18 32-bit integers (72 bytes total). - pass_words = engine.key_to_words(password) - salt_words = engine.key_to_words(salt) - - # truncate salt_words to original 16 byte salt, or loop won't wrap - # correctly when passed to .eks_salted_expand() - salt_words16 = salt_words[:4] - - # do EKS key schedule setup - engine.eks_salted_expand(pass_words, salt_words16) - - # apply password & salt keys to key schedule a bunch more times. - rounds = 1<> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) -""".strip() - -def render_encipher(write, indent=0): - for i in irange(0, 15, 2): - write(indent, """\ - # Feistel substitution on left word (round %(i)d) - r ^= %(left)s ^ p%(i1)d - - # Feistel substitution on right word (round %(i1)d) - l ^= %(right)s ^ p%(i2)d - """, i=i, i1=i+1, i2=i+2, - left=BFSTR, right=BFSTR.replace("l","r"), - ) - -def write_encipher_function(write, indent=0): - write(indent, """\ - def encipher(self, l, r): - \"""blowfish encipher a single 64-bit block encoded as two 32-bit ints\""" - - (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, - p10, p11, p12, p13, p14, p15, p16, p17) = self.P - S0, S1, S2, S3 = self.S - - l ^= p0 - - """) - render_encipher(write, indent+1) - - write(indent+1, """\ - - return r ^ p17, l - - """) - -def write_expand_function(write, indent=0): - write(indent, """\ - def expand(self, key_words): - \"""unrolled version of blowfish key expansion\""" - ##assert len(key_words) >= 18, "size of key_words must be >= 18" - - P, S = self.P, self.S - S0, S1, S2, S3 = S - - #============================================================= - # integrate key - #============================================================= - """) - for i in irange(18): - write(indent+1, """\ - p%(i)d = P[%(i)d] ^ key_words[%(i)d] - """, i=i) - write(indent+1, """\ - - #============================================================= - # update P - #============================================================= - - #------------------------------------------------ - # update P[0] and P[1] - #------------------------------------------------ - l, r = p0, 0 - - """) - - render_encipher(write, indent+1) - - write(indent+1, """\ - - p0, p1 = l, r = r ^ p17, l - - """) - - for i in irange(2, 18, 2): - write(indent+1, """\ - #------------------------------------------------ - # update P[%(i)d] and P[%(i1)d] - #------------------------------------------------ - l ^= p0 - - """, i=i, i1=i+1) - - render_encipher(write, indent+1) - - write(indent+1, """\ - p%(i)d, p%(i1)d = l, r = r ^ p17, l - - """, i=i, i1=i+1) - - write(indent+1, """\ - - #------------------------------------------------ - # save changes to original P array - #------------------------------------------------ - P[:] = (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, - p10, p11, p12, p13, p14, p15, p16, p17) - - #============================================================= - # update S - #============================================================= - - for box in S: - j = 0 - while j < 256: - l ^= p0 - - """) - - render_encipher(write, indent+3) - - write(indent+3, """\ - - box[j], box[j+1] = l, r = r ^ p17, l - j += 2 - """) - -#============================================================================= -# main -#============================================================================= - -def main(): - target = os.path.join(os.path.dirname(__file__), "unrolled.py") - fh = file(target, "w") - - def write(indent, msg, **kwds): - literal = kwds.pop("literal", False) - if kwds: - msg %= kwds - if not literal: - msg = textwrap.dedent(msg.rstrip(" ")) - if indent: - msg = indent_block(msg, " " * (indent*4)) - fh.write(msg) - - write(0, """\ - \"""passlib.crypto._blowfish.unrolled - unrolled loop implementation of bcrypt, - autogenerated by _gen_files.py - - currently this override the encipher() and expand() methods - with optimized versions, and leaves the other base.py methods alone. - \""" - #================================================================= - # imports - #================================================================= - # pkg - from passlib.crypto._blowfish.base import BlowfishEngine as _BlowfishEngine - # local - __all__ = [ - "BlowfishEngine", - ] - #================================================================= - # - #================================================================= - class BlowfishEngine(_BlowfishEngine): - - """) - - write_encipher_function(write, indent=1) - write_expand_function(write, indent=1) - - write(0, """\ - #================================================================= - # eoc - #================================================================= - - #================================================================= - # eof - #================================================================= - """) - -if __name__ == "__main__": - main() - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/crypto/_blowfish/base.py b/libs_crutch/contrib/passlib/crypto/_blowfish/base.py deleted file mode 100644 index 7b4f2cb..0000000 --- a/libs_crutch/contrib/passlib/crypto/_blowfish/base.py +++ /dev/null @@ -1,441 +0,0 @@ -"""passlib.crypto._blowfish.base - unoptimized pure-python blowfish engine""" -#============================================================================= -# imports -#============================================================================= -# core -import struct -# pkg -from passlib.utils import repeat_string -# local -__all__ = [ - "BlowfishEngine", -] - -#============================================================================= -# blowfish constants -#============================================================================= -BLOWFISH_P = BLOWFISH_S = None - -def _init_constants(): - global BLOWFISH_P, BLOWFISH_S - - # NOTE: blowfish's spec states these numbers are the hex representation - # of the fractional portion of PI, in order. - - # Initial contents of key schedule - 18 integers - BLOWFISH_P = [ - 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, - 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, - 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, - 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, - 0x9216d5d9, 0x8979fb1b, - ] - - # all 4 blowfish S boxes in one array - 256 integers per S box - BLOWFISH_S = [ - # sbox 1 - [ - 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, - 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, - 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, - 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, - 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, - 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, - 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, - 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, - 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, - 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, - 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, - 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, - 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, - 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, - 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, - 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, - 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, - 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, - 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, - 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, - 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, - 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, - 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, - 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, - 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, - 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, - 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, - 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, - 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, - 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, - 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, - 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, - 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, - 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, - 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, - 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, - 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, - 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, - 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, - 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, - 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, - 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, - 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, - 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, - 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, - 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, - 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, - 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, - 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, - 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, - 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, - 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, - 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, - 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, - 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, - 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, - 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, - 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, - 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, - 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, - 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, - 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, - 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, - 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a, - ], - # sbox 2 - [ - 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, - 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, - 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, - 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, - 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, - 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, - 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, - 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, - 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, - 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, - 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, - 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, - 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, - 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, - 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, - 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, - 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, - 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, - 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, - 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, - 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, - 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, - 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, - 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, - 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, - 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, - 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, - 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, - 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, - 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, - 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, - 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, - 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, - 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, - 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, - 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, - 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, - 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, - 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, - 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, - 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, - 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, - 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, - 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, - 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, - 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, - 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, - 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, - 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, - 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, - 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, - 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, - 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, - 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, - 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, - 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, - 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, - 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, - 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, - 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, - 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, - 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, - 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, - 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7, - ], - # sbox 3 - [ - 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, - 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, - 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, - 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, - 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, - 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, - 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, - 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, - 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, - 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, - 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, - 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, - 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, - 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, - 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, - 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, - 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, - 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, - 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, - 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, - 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, - 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, - 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, - 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, - 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, - 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, - 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, - 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, - 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, - 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, - 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, - 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, - 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, - 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, - 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, - 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, - 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, - 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, - 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, - 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, - 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, - 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, - 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, - 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, - 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, - 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, - 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, - 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, - 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, - 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, - 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, - 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, - 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, - 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, - 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, - 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, - 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, - 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, - 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, - 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, - 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, - 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, - 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, - 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0, - ], - # sbox 4 - [ - 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, - 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, - 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, - 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, - 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, - 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, - 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, - 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, - 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, - 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, - 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, - 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, - 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, - 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, - 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, - 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, - 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, - 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, - 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, - 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, - 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, - 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, - 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, - 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, - 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, - 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, - 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, - 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, - 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, - 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, - 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, - 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, - 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, - 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, - 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, - 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, - 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, - 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, - 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, - 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, - 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, - 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, - 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, - 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, - 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, - 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, - 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, - 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, - 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, - 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, - 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, - 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, - 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, - 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, - 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, - 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, - 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, - 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, - 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, - 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, - 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, - 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, - 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, - 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6, - ] - ] - -#============================================================================= -# engine -#============================================================================= -class BlowfishEngine(object): - - def __init__(self): - if BLOWFISH_P is None: - _init_constants() - self.P = list(BLOWFISH_P) - self.S = [ list(box) for box in BLOWFISH_S ] - - #=================================================================== - # common helpers - #=================================================================== - @staticmethod - def key_to_words(data, size=18): - """convert data to tuple of 4-byte integers, repeating or - truncating data as needed to reach specified size""" - assert isinstance(data, bytes) - dlen = len(data) - if not dlen: - # return all zeros - original C code would just read the NUL after - # the password, so mimicing that behavior for this edge case. - return [0]*size - - # repeat data until it fills up 4*size bytes - data = repeat_string(data, size<<2) - - # unpack - return struct.unpack(">%dI" % (size,), data) - - #=================================================================== - # blowfish routines - #=================================================================== - def encipher(self, l, r): - """loop version of blowfish encipher routine""" - P, S = self.P, self.S - l ^= P[0] - i = 1 - while i < 17: - # Feistel substitution on left word - r = ((((S[0][l >> 24] + S[1][(l >> 16) & 0xff]) ^ S[2][(l >> 8) & 0xff]) + - S[3][l & 0xff]) & 0xffffffff) ^ P[i] ^ r - # swap vars so even rounds do Feistel substition on right word - l, r = r, l - i += 1 - return r ^ P[17], l - - # NOTE: decipher is same as above, just with reversed(P) instead. - - def expand(self, key_words): - """perform stock Blowfish keyschedule setup""" - assert len(key_words) >= 18, "key_words must be at least as large as P" - P, S, encipher = self.P, self.S, self.encipher - - i = 0 - while i < 18: - P[i] ^= key_words[i] - i += 1 - - i = l = r = 0 - while i < 18: - P[i], P[i+1] = l,r = encipher(l,r) - i += 2 - - for box in S: - i = 0 - while i < 256: - box[i], box[i+1] = l,r = encipher(l,r) - i += 2 - - #=================================================================== - # eks-blowfish routines - #=================================================================== - def eks_salted_expand(self, key_words, salt_words): - """perform EKS' salted version of Blowfish keyschedule setup""" - # NOTE: this is the same as expand(), except for the addition - # of the operations involving *salt_words*. - - assert len(key_words) >= 18, "key_words must be at least as large as P" - salt_size = len(salt_words) - assert salt_size, "salt_words must not be empty" - assert not salt_size & 1, "salt_words must have even length" - P, S, encipher = self.P, self.S, self.encipher - - i = 0 - while i < 18: - P[i] ^= key_words[i] - i += 1 - - s = i = l = r = 0 - while i < 18: - l ^= salt_words[s] - r ^= salt_words[s+1] - s += 2 - if s == salt_size: - s = 0 - P[i], P[i+1] = l,r = encipher(l,r) # next() - i += 2 - - for box in S: - i = 0 - while i < 256: - l ^= salt_words[s] - r ^= salt_words[s+1] - s += 2 - if s == salt_size: - s = 0 - box[i], box[i+1] = l,r = encipher(l,r) # next() - i += 2 - - def eks_repeated_expand(self, key_words, salt_words, rounds): - """perform rounds stage of EKS keyschedule setup""" - expand = self.expand - n = 0 - while n < rounds: - expand(key_words) - expand(salt_words) - n += 1 - - def repeat_encipher(self, l, r, count): - """repeatedly apply encipher operation to a block""" - encipher = self.encipher - n = 0 - while n < count: - l, r = encipher(l, r) - n += 1 - return l, r - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/crypto/_blowfish/unrolled.py b/libs_crutch/contrib/passlib/crypto/_blowfish/unrolled.py deleted file mode 100644 index 4acf6e1..0000000 --- a/libs_crutch/contrib/passlib/crypto/_blowfish/unrolled.py +++ /dev/null @@ -1,771 +0,0 @@ -"""passlib.crypto._blowfish.unrolled - unrolled loop implementation of bcrypt, -autogenerated by _gen_files.py - -currently this override the encipher() and expand() methods -with optimized versions, and leaves the other base.py methods alone. -""" -#============================================================================= -# imports -#============================================================================= -# pkg -from passlib.crypto._blowfish.base import BlowfishEngine as _BlowfishEngine -# local -__all__ = [ - "BlowfishEngine", -] -#============================================================================= -# -#============================================================================= -class BlowfishEngine(_BlowfishEngine): - - def encipher(self, l, r): - """blowfish encipher a single 64-bit block encoded as two 32-bit ints""" - - (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, - p10, p11, p12, p13, p14, p15, p16, p17) = self.P - S0, S1, S2, S3 = self.S - - l ^= p0 - - # Feistel substitution on left word (round 0) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p1 - - # Feistel substitution on right word (round 1) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p2 - # Feistel substitution on left word (round 2) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p3 - - # Feistel substitution on right word (round 3) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p4 - # Feistel substitution on left word (round 4) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p5 - - # Feistel substitution on right word (round 5) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p6 - # Feistel substitution on left word (round 6) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p7 - - # Feistel substitution on right word (round 7) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p8 - # Feistel substitution on left word (round 8) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p9 - - # Feistel substitution on right word (round 9) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p10 - # Feistel substitution on left word (round 10) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p11 - - # Feistel substitution on right word (round 11) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p12 - # Feistel substitution on left word (round 12) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p13 - - # Feistel substitution on right word (round 13) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p14 - # Feistel substitution on left word (round 14) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p15 - - # Feistel substitution on right word (round 15) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p16 - - return r ^ p17, l - - def expand(self, key_words): - """unrolled version of blowfish key expansion""" - ##assert len(key_words) >= 18, "size of key_words must be >= 18" - - P, S = self.P, self.S - S0, S1, S2, S3 = S - - #============================================================= - # integrate key - #============================================================= - p0 = P[0] ^ key_words[0] - p1 = P[1] ^ key_words[1] - p2 = P[2] ^ key_words[2] - p3 = P[3] ^ key_words[3] - p4 = P[4] ^ key_words[4] - p5 = P[5] ^ key_words[5] - p6 = P[6] ^ key_words[6] - p7 = P[7] ^ key_words[7] - p8 = P[8] ^ key_words[8] - p9 = P[9] ^ key_words[9] - p10 = P[10] ^ key_words[10] - p11 = P[11] ^ key_words[11] - p12 = P[12] ^ key_words[12] - p13 = P[13] ^ key_words[13] - p14 = P[14] ^ key_words[14] - p15 = P[15] ^ key_words[15] - p16 = P[16] ^ key_words[16] - p17 = P[17] ^ key_words[17] - - #============================================================= - # update P - #============================================================= - - #------------------------------------------------ - # update P[0] and P[1] - #------------------------------------------------ - l, r = p0, 0 - - # Feistel substitution on left word (round 0) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p1 - - # Feistel substitution on right word (round 1) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p2 - # Feistel substitution on left word (round 2) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p3 - - # Feistel substitution on right word (round 3) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p4 - # Feistel substitution on left word (round 4) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p5 - - # Feistel substitution on right word (round 5) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p6 - # Feistel substitution on left word (round 6) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p7 - - # Feistel substitution on right word (round 7) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p8 - # Feistel substitution on left word (round 8) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p9 - - # Feistel substitution on right word (round 9) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p10 - # Feistel substitution on left word (round 10) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p11 - - # Feistel substitution on right word (round 11) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p12 - # Feistel substitution on left word (round 12) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p13 - - # Feistel substitution on right word (round 13) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p14 - # Feistel substitution on left word (round 14) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p15 - - # Feistel substitution on right word (round 15) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p16 - - p0, p1 = l, r = r ^ p17, l - - #------------------------------------------------ - # update P[2] and P[3] - #------------------------------------------------ - l ^= p0 - - # Feistel substitution on left word (round 0) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p1 - - # Feistel substitution on right word (round 1) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p2 - # Feistel substitution on left word (round 2) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p3 - - # Feistel substitution on right word (round 3) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p4 - # Feistel substitution on left word (round 4) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p5 - - # Feistel substitution on right word (round 5) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p6 - # Feistel substitution on left word (round 6) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p7 - - # Feistel substitution on right word (round 7) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p8 - # Feistel substitution on left word (round 8) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p9 - - # Feistel substitution on right word (round 9) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p10 - # Feistel substitution on left word (round 10) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p11 - - # Feistel substitution on right word (round 11) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p12 - # Feistel substitution on left word (round 12) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p13 - - # Feistel substitution on right word (round 13) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p14 - # Feistel substitution on left word (round 14) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p15 - - # Feistel substitution on right word (round 15) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p16 - p2, p3 = l, r = r ^ p17, l - - #------------------------------------------------ - # update P[4] and P[5] - #------------------------------------------------ - l ^= p0 - - # Feistel substitution on left word (round 0) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p1 - - # Feistel substitution on right word (round 1) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p2 - # Feistel substitution on left word (round 2) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p3 - - # Feistel substitution on right word (round 3) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p4 - # Feistel substitution on left word (round 4) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p5 - - # Feistel substitution on right word (round 5) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p6 - # Feistel substitution on left word (round 6) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p7 - - # Feistel substitution on right word (round 7) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p8 - # Feistel substitution on left word (round 8) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p9 - - # Feistel substitution on right word (round 9) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p10 - # Feistel substitution on left word (round 10) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p11 - - # Feistel substitution on right word (round 11) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p12 - # Feistel substitution on left word (round 12) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p13 - - # Feistel substitution on right word (round 13) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p14 - # Feistel substitution on left word (round 14) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p15 - - # Feistel substitution on right word (round 15) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p16 - p4, p5 = l, r = r ^ p17, l - - #------------------------------------------------ - # update P[6] and P[7] - #------------------------------------------------ - l ^= p0 - - # Feistel substitution on left word (round 0) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p1 - - # Feistel substitution on right word (round 1) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p2 - # Feistel substitution on left word (round 2) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p3 - - # Feistel substitution on right word (round 3) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p4 - # Feistel substitution on left word (round 4) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p5 - - # Feistel substitution on right word (round 5) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p6 - # Feistel substitution on left word (round 6) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p7 - - # Feistel substitution on right word (round 7) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p8 - # Feistel substitution on left word (round 8) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p9 - - # Feistel substitution on right word (round 9) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p10 - # Feistel substitution on left word (round 10) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p11 - - # Feistel substitution on right word (round 11) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p12 - # Feistel substitution on left word (round 12) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p13 - - # Feistel substitution on right word (round 13) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p14 - # Feistel substitution on left word (round 14) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p15 - - # Feistel substitution on right word (round 15) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p16 - p6, p7 = l, r = r ^ p17, l - - #------------------------------------------------ - # update P[8] and P[9] - #------------------------------------------------ - l ^= p0 - - # Feistel substitution on left word (round 0) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p1 - - # Feistel substitution on right word (round 1) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p2 - # Feistel substitution on left word (round 2) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p3 - - # Feistel substitution on right word (round 3) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p4 - # Feistel substitution on left word (round 4) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p5 - - # Feistel substitution on right word (round 5) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p6 - # Feistel substitution on left word (round 6) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p7 - - # Feistel substitution on right word (round 7) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p8 - # Feistel substitution on left word (round 8) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p9 - - # Feistel substitution on right word (round 9) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p10 - # Feistel substitution on left word (round 10) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p11 - - # Feistel substitution on right word (round 11) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p12 - # Feistel substitution on left word (round 12) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p13 - - # Feistel substitution on right word (round 13) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p14 - # Feistel substitution on left word (round 14) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p15 - - # Feistel substitution on right word (round 15) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p16 - p8, p9 = l, r = r ^ p17, l - - #------------------------------------------------ - # update P[10] and P[11] - #------------------------------------------------ - l ^= p0 - - # Feistel substitution on left word (round 0) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p1 - - # Feistel substitution on right word (round 1) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p2 - # Feistel substitution on left word (round 2) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p3 - - # Feistel substitution on right word (round 3) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p4 - # Feistel substitution on left word (round 4) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p5 - - # Feistel substitution on right word (round 5) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p6 - # Feistel substitution on left word (round 6) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p7 - - # Feistel substitution on right word (round 7) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p8 - # Feistel substitution on left word (round 8) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p9 - - # Feistel substitution on right word (round 9) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p10 - # Feistel substitution on left word (round 10) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p11 - - # Feistel substitution on right word (round 11) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p12 - # Feistel substitution on left word (round 12) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p13 - - # Feistel substitution on right word (round 13) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p14 - # Feistel substitution on left word (round 14) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p15 - - # Feistel substitution on right word (round 15) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p16 - p10, p11 = l, r = r ^ p17, l - - #------------------------------------------------ - # update P[12] and P[13] - #------------------------------------------------ - l ^= p0 - - # Feistel substitution on left word (round 0) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p1 - - # Feistel substitution on right word (round 1) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p2 - # Feistel substitution on left word (round 2) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p3 - - # Feistel substitution on right word (round 3) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p4 - # Feistel substitution on left word (round 4) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p5 - - # Feistel substitution on right word (round 5) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p6 - # Feistel substitution on left word (round 6) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p7 - - # Feistel substitution on right word (round 7) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p8 - # Feistel substitution on left word (round 8) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p9 - - # Feistel substitution on right word (round 9) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p10 - # Feistel substitution on left word (round 10) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p11 - - # Feistel substitution on right word (round 11) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p12 - # Feistel substitution on left word (round 12) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p13 - - # Feistel substitution on right word (round 13) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p14 - # Feistel substitution on left word (round 14) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p15 - - # Feistel substitution on right word (round 15) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p16 - p12, p13 = l, r = r ^ p17, l - - #------------------------------------------------ - # update P[14] and P[15] - #------------------------------------------------ - l ^= p0 - - # Feistel substitution on left word (round 0) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p1 - - # Feistel substitution on right word (round 1) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p2 - # Feistel substitution on left word (round 2) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p3 - - # Feistel substitution on right word (round 3) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p4 - # Feistel substitution on left word (round 4) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p5 - - # Feistel substitution on right word (round 5) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p6 - # Feistel substitution on left word (round 6) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p7 - - # Feistel substitution on right word (round 7) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p8 - # Feistel substitution on left word (round 8) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p9 - - # Feistel substitution on right word (round 9) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p10 - # Feistel substitution on left word (round 10) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p11 - - # Feistel substitution on right word (round 11) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p12 - # Feistel substitution on left word (round 12) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p13 - - # Feistel substitution on right word (round 13) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p14 - # Feistel substitution on left word (round 14) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p15 - - # Feistel substitution on right word (round 15) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p16 - p14, p15 = l, r = r ^ p17, l - - #------------------------------------------------ - # update P[16] and P[17] - #------------------------------------------------ - l ^= p0 - - # Feistel substitution on left word (round 0) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p1 - - # Feistel substitution on right word (round 1) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p2 - # Feistel substitution on left word (round 2) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p3 - - # Feistel substitution on right word (round 3) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p4 - # Feistel substitution on left word (round 4) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p5 - - # Feistel substitution on right word (round 5) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p6 - # Feistel substitution on left word (round 6) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p7 - - # Feistel substitution on right word (round 7) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p8 - # Feistel substitution on left word (round 8) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p9 - - # Feistel substitution on right word (round 9) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p10 - # Feistel substitution on left word (round 10) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p11 - - # Feistel substitution on right word (round 11) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p12 - # Feistel substitution on left word (round 12) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p13 - - # Feistel substitution on right word (round 13) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p14 - # Feistel substitution on left word (round 14) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p15 - - # Feistel substitution on right word (round 15) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p16 - p16, p17 = l, r = r ^ p17, l - - - #------------------------------------------------ - # save changes to original P array - #------------------------------------------------ - P[:] = (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, - p10, p11, p12, p13, p14, p15, p16, p17) - - #============================================================= - # update S - #============================================================= - - for box in S: - j = 0 - while j < 256: - l ^= p0 - - # Feistel substitution on left word (round 0) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p1 - - # Feistel substitution on right word (round 1) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p2 - # Feistel substitution on left word (round 2) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p3 - - # Feistel substitution on right word (round 3) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p4 - # Feistel substitution on left word (round 4) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p5 - - # Feistel substitution on right word (round 5) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p6 - # Feistel substitution on left word (round 6) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p7 - - # Feistel substitution on right word (round 7) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p8 - # Feistel substitution on left word (round 8) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p9 - - # Feistel substitution on right word (round 9) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p10 - # Feistel substitution on left word (round 10) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p11 - - # Feistel substitution on right word (round 11) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p12 - # Feistel substitution on left word (round 12) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p13 - - # Feistel substitution on right word (round 13) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p14 - # Feistel substitution on left word (round 14) - r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + - S3[l & 0xff]) & 0xffffffff) ^ p15 - - # Feistel substitution on right word (round 15) - l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + - S3[r & 0xff]) & 0xffffffff) ^ p16 - - box[j], box[j+1] = l, r = r ^ p17, l - j += 2 - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/crypto/_md4.py b/libs_crutch/contrib/passlib/crypto/_md4.py deleted file mode 100644 index bdc211f..0000000 --- a/libs_crutch/contrib/passlib/crypto/_md4.py +++ /dev/null @@ -1,244 +0,0 @@ -""" -passlib.crypto._md4 -- fallback implementation of MD4 - -Helper implementing insecure and obsolete md4 algorithm. -used for NTHASH format, which is also insecure and broken, -since it's just md4(password). - -Implementated based on rfc at http://www.faqs.org/rfcs/rfc1320.html - -.. note:: - - This shouldn't be imported directly, it's merely used conditionally - by ``passlib.crypto.lookup_hash()`` when a native implementation can't be found. -""" - -#============================================================================= -# imports -#============================================================================= -# core -from binascii import hexlify -import struct -# site -from passlib.utils.compat import bascii_to_str, irange, PY3 -# local -__all__ = ["md4"] - -#============================================================================= -# utils -#============================================================================= -def F(x,y,z): - return (x&y) | ((~x) & z) - -def G(x,y,z): - return (x&y) | (x&z) | (y&z) - -##def H(x,y,z): -## return x ^ y ^ z - -MASK_32 = 2**32-1 - -#============================================================================= -# main class -#============================================================================= -class md4(object): - """pep-247 compatible implementation of MD4 hash algorithm - - .. attribute:: digest_size - - size of md4 digest in bytes (16 bytes) - - .. method:: update - - update digest by appending additional content - - .. method:: copy - - create clone of digest object, including current state - - .. method:: digest - - return bytes representing md4 digest of current content - - .. method:: hexdigest - - return hexadecimal version of digest - """ - # FIXME: make this follow hash object PEP better. - # FIXME: this isn't threadsafe - - name = "md4" - digest_size = digestsize = 16 - block_size = 64 - - _count = 0 # number of 64-byte blocks processed so far (not including _buf) - _state = None # list of [a,b,c,d] 32 bit ints used as internal register - _buf = None # data processed in 64 byte blocks, this holds leftover from last update - - def __init__(self, content=None): - self._count = 0 - self._state = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476] - self._buf = b'' - if content: - self.update(content) - - # round 1 table - [abcd k s] - _round1 = [ - [0,1,2,3, 0,3], - [3,0,1,2, 1,7], - [2,3,0,1, 2,11], - [1,2,3,0, 3,19], - - [0,1,2,3, 4,3], - [3,0,1,2, 5,7], - [2,3,0,1, 6,11], - [1,2,3,0, 7,19], - - [0,1,2,3, 8,3], - [3,0,1,2, 9,7], - [2,3,0,1, 10,11], - [1,2,3,0, 11,19], - - [0,1,2,3, 12,3], - [3,0,1,2, 13,7], - [2,3,0,1, 14,11], - [1,2,3,0, 15,19], - ] - - # round 2 table - [abcd k s] - _round2 = [ - [0,1,2,3, 0,3], - [3,0,1,2, 4,5], - [2,3,0,1, 8,9], - [1,2,3,0, 12,13], - - [0,1,2,3, 1,3], - [3,0,1,2, 5,5], - [2,3,0,1, 9,9], - [1,2,3,0, 13,13], - - [0,1,2,3, 2,3], - [3,0,1,2, 6,5], - [2,3,0,1, 10,9], - [1,2,3,0, 14,13], - - [0,1,2,3, 3,3], - [3,0,1,2, 7,5], - [2,3,0,1, 11,9], - [1,2,3,0, 15,13], - ] - - # round 3 table - [abcd k s] - _round3 = [ - [0,1,2,3, 0,3], - [3,0,1,2, 8,9], - [2,3,0,1, 4,11], - [1,2,3,0, 12,15], - - [0,1,2,3, 2,3], - [3,0,1,2, 10,9], - [2,3,0,1, 6,11], - [1,2,3,0, 14,15], - - [0,1,2,3, 1,3], - [3,0,1,2, 9,9], - [2,3,0,1, 5,11], - [1,2,3,0, 13,15], - - [0,1,2,3, 3,3], - [3,0,1,2, 11,9], - [2,3,0,1, 7,11], - [1,2,3,0, 15,15], - ] - - def _process(self, block): - """process 64 byte block""" - # unpack block into 16 32-bit ints - X = struct.unpack("<16I", block) - - # clone state - orig = self._state - state = list(orig) - - # round 1 - F function - (x&y)|(~x & z) - for a,b,c,d,k,s in self._round1: - t = (state[a] + F(state[b],state[c],state[d]) + X[k]) & MASK_32 - state[a] = ((t<>(32-s)) - - # round 2 - G function - for a,b,c,d,k,s in self._round2: - t = (state[a] + G(state[b],state[c],state[d]) + X[k] + 0x5a827999) & MASK_32 - state[a] = ((t<>(32-s)) - - # round 3 - H function - x ^ y ^ z - for a,b,c,d,k,s in self._round3: - t = (state[a] + (state[b] ^ state[c] ^ state[d]) + X[k] + 0x6ed9eba1) & MASK_32 - state[a] = ((t<>(32-s)) - - # add back into original state - for i in irange(4): - orig[i] = (orig[i]+state[i]) & MASK_32 - - def update(self, content): - if not isinstance(content, bytes): - if PY3: - raise TypeError("expected bytes") - else: - # replicate behavior of hashlib under py2 - content = content.encode("ascii") - buf = self._buf - if buf: - content = buf + content - idx = 0 - end = len(content) - while True: - next = idx + 64 - if next <= end: - self._process(content[idx:next]) - self._count += 1 - idx = next - else: - self._buf = content[idx:] - return - - def copy(self): - other = md4() - other._count = self._count - other._state = list(self._state) - other._buf = self._buf - return other - - def digest(self): - # NOTE: backing up state so we can restore it after _process is called, - # in case object is updated again (this is only attr altered by this method) - orig = list(self._state) - - # final block: buf + 0x80, - # then 0x00 padding until congruent w/ 56 mod 64 bytes - # then last 8 bytes = msg length in bits - buf = self._buf - msglen = self._count*512 + len(buf)*8 - block = buf + b'\x80' + b'\x00' * ((119-len(buf)) % 64) + \ - struct.pack("<2I", msglen & MASK_32, (msglen>>32) & MASK_32) - if len(block) == 128: - self._process(block[:64]) - self._process(block[64:]) - else: - assert len(block) == 64 - self._process(block) - - # render digest & restore un-finalized state - out = struct.pack("<4I", *self._state) - self._state = orig - return out - - def hexdigest(self): - return bascii_to_str(hexlify(self.digest())) - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/crypto/des.py b/libs_crutch/contrib/passlib/crypto/des.py deleted file mode 100644 index 3f87aef..0000000 --- a/libs_crutch/contrib/passlib/crypto/des.py +++ /dev/null @@ -1,848 +0,0 @@ -"""passlib.crypto.des -- DES block encryption routines - -History -======= -These routines (which have since been drastically modified for python) -are based on a Java implementation of the des-crypt algorithm, -found at ``_. - -The copyright & license for that source is as follows:: - - UnixCrypt.java 0.9 96/11/25 - Copyright (c) 1996 Aki Yoshida. All rights reserved. - Permission to use, copy, modify and distribute this software - for non-commercial or commercial purposes and without fee is - hereby granted provided that this copyright notice appears in - all copies. - - --- - - Unix crypt(3C) utility - @version 0.9, 11/25/96 - @author Aki Yoshida - - --- - - modified April 2001 - by Iris Van den Broeke, Daniel Deville - - --- - Unix Crypt. - Implements the one way cryptography used by Unix systems for - simple password protection. - @version $Id: UnixCrypt2.txt,v 1.1.1.1 2005/09/13 22:20:13 christos Exp $ - @author Greg Wilkins (gregw) - -The netbsd des-crypt implementation has some nice notes on how this all works - - http://fxr.googlebit.com/source/lib/libcrypt/crypt.c?v=NETBSD-CURRENT -""" - -# TODO: could use an accelerated C version of this module to speed up lmhash, -# des-crypt, and ext-des-crypt - -#============================================================================= -# imports -#============================================================================= -# core -import struct -# pkg -from passlib import exc -from passlib.utils.compat import join_byte_values, byte_elem_value, \ - irange, irange, int_types -# local -__all__ = [ - "expand_des_key", - "des_encrypt_block", -] - -#============================================================================= -# constants -#============================================================================= - -# masks/upper limits for various integer sizes -INT_24_MASK = 0xffffff -INT_56_MASK = 0xffffffffffffff -INT_64_MASK = 0xffffffffffffffff - -# mask to clear parity bits from 64-bit key -_KDATA_MASK = 0xfefefefefefefefe -_KPARITY_MASK = 0x0101010101010101 - -# mask used to setup key schedule -_KS_MASK = 0xfcfcfcfcffffffff - -#============================================================================= -# static DES tables -#============================================================================= - -# placeholders filled in by _load_tables() -PCXROT = IE3264 = SPE = CF6464 = None - -def _load_tables(): - """delay loading tables until they are actually needed""" - global PCXROT, IE3264, SPE, CF6464 - - #--------------------------------------------------------------- - # Initial key schedule permutation - # PC1ROT - bit reverse, then PC1, then Rotate, then PC2 - #--------------------------------------------------------------- - # NOTE: this was reordered from original table to make perm3264 logic simpler - PC1ROT=( - ( 0x0000000000000000, 0x0000000000000000, 0x0000000000002000, 0x0000000000002000, - 0x0000000000000020, 0x0000000000000020, 0x0000000000002020, 0x0000000000002020, - 0x0000000000000400, 0x0000000000000400, 0x0000000000002400, 0x0000000000002400, - 0x0000000000000420, 0x0000000000000420, 0x0000000000002420, 0x0000000000002420, ), - ( 0x0000000000000000, 0x2000000000000000, 0x0000000400000000, 0x2000000400000000, - 0x0000800000000000, 0x2000800000000000, 0x0000800400000000, 0x2000800400000000, - 0x0008000000000000, 0x2008000000000000, 0x0008000400000000, 0x2008000400000000, - 0x0008800000000000, 0x2008800000000000, 0x0008800400000000, 0x2008800400000000, ), - ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000040, 0x0000000000000040, - 0x0000000020000000, 0x0000000020000000, 0x0000000020000040, 0x0000000020000040, - 0x0000000000200000, 0x0000000000200000, 0x0000000000200040, 0x0000000000200040, - 0x0000000020200000, 0x0000000020200000, 0x0000000020200040, 0x0000000020200040, ), - ( 0x0000000000000000, 0x0002000000000000, 0x0800000000000000, 0x0802000000000000, - 0x0100000000000000, 0x0102000000000000, 0x0900000000000000, 0x0902000000000000, - 0x4000000000000000, 0x4002000000000000, 0x4800000000000000, 0x4802000000000000, - 0x4100000000000000, 0x4102000000000000, 0x4900000000000000, 0x4902000000000000, ), - ( 0x0000000000000000, 0x0000000000000000, 0x0000000000040000, 0x0000000000040000, - 0x0000020000000000, 0x0000020000000000, 0x0000020000040000, 0x0000020000040000, - 0x0000000000000004, 0x0000000000000004, 0x0000000000040004, 0x0000000000040004, - 0x0000020000000004, 0x0000020000000004, 0x0000020000040004, 0x0000020000040004, ), - ( 0x0000000000000000, 0x0000400000000000, 0x0200000000000000, 0x0200400000000000, - 0x0080000000000000, 0x0080400000000000, 0x0280000000000000, 0x0280400000000000, - 0x0000008000000000, 0x0000408000000000, 0x0200008000000000, 0x0200408000000000, - 0x0080008000000000, 0x0080408000000000, 0x0280008000000000, 0x0280408000000000, ), - ( 0x0000000000000000, 0x0000000000000000, 0x0000000010000000, 0x0000000010000000, - 0x0000000000001000, 0x0000000000001000, 0x0000000010001000, 0x0000000010001000, - 0x0000000040000000, 0x0000000040000000, 0x0000000050000000, 0x0000000050000000, - 0x0000000040001000, 0x0000000040001000, 0x0000000050001000, 0x0000000050001000, ), - ( 0x0000000000000000, 0x0000001000000000, 0x0000080000000000, 0x0000081000000000, - 0x1000000000000000, 0x1000001000000000, 0x1000080000000000, 0x1000081000000000, - 0x0004000000000000, 0x0004001000000000, 0x0004080000000000, 0x0004081000000000, - 0x1004000000000000, 0x1004001000000000, 0x1004080000000000, 0x1004081000000000, ), - ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000080, 0x0000000000000080, - 0x0000000000080000, 0x0000000000080000, 0x0000000000080080, 0x0000000000080080, - 0x0000000000800000, 0x0000000000800000, 0x0000000000800080, 0x0000000000800080, - 0x0000000000880000, 0x0000000000880000, 0x0000000000880080, 0x0000000000880080, ), - ( 0x0000000000000000, 0x0000000008000000, 0x0000002000000000, 0x0000002008000000, - 0x0000100000000000, 0x0000100008000000, 0x0000102000000000, 0x0000102008000000, - 0x0000200000000000, 0x0000200008000000, 0x0000202000000000, 0x0000202008000000, - 0x0000300000000000, 0x0000300008000000, 0x0000302000000000, 0x0000302008000000, ), - ( 0x0000000000000000, 0x0000000000000000, 0x0000000000400000, 0x0000000000400000, - 0x0000000004000000, 0x0000000004000000, 0x0000000004400000, 0x0000000004400000, - 0x0000000000000800, 0x0000000000000800, 0x0000000000400800, 0x0000000000400800, - 0x0000000004000800, 0x0000000004000800, 0x0000000004400800, 0x0000000004400800, ), - ( 0x0000000000000000, 0x0000000000008000, 0x0040000000000000, 0x0040000000008000, - 0x0000004000000000, 0x0000004000008000, 0x0040004000000000, 0x0040004000008000, - 0x8000000000000000, 0x8000000000008000, 0x8040000000000000, 0x8040000000008000, - 0x8000004000000000, 0x8000004000008000, 0x8040004000000000, 0x8040004000008000, ), - ( 0x0000000000000000, 0x0000000000000000, 0x0000000000004000, 0x0000000000004000, - 0x0000000000000008, 0x0000000000000008, 0x0000000000004008, 0x0000000000004008, - 0x0000000000000010, 0x0000000000000010, 0x0000000000004010, 0x0000000000004010, - 0x0000000000000018, 0x0000000000000018, 0x0000000000004018, 0x0000000000004018, ), - ( 0x0000000000000000, 0x0000000200000000, 0x0001000000000000, 0x0001000200000000, - 0x0400000000000000, 0x0400000200000000, 0x0401000000000000, 0x0401000200000000, - 0x0020000000000000, 0x0020000200000000, 0x0021000000000000, 0x0021000200000000, - 0x0420000000000000, 0x0420000200000000, 0x0421000000000000, 0x0421000200000000, ), - ( 0x0000000000000000, 0x0000000000000000, 0x0000010000000000, 0x0000010000000000, - 0x0000000100000000, 0x0000000100000000, 0x0000010100000000, 0x0000010100000000, - 0x0000000000100000, 0x0000000000100000, 0x0000010000100000, 0x0000010000100000, - 0x0000000100100000, 0x0000000100100000, 0x0000010100100000, 0x0000010100100000, ), - ( 0x0000000000000000, 0x0000000080000000, 0x0000040000000000, 0x0000040080000000, - 0x0010000000000000, 0x0010000080000000, 0x0010040000000000, 0x0010040080000000, - 0x0000000800000000, 0x0000000880000000, 0x0000040800000000, 0x0000040880000000, - 0x0010000800000000, 0x0010000880000000, 0x0010040800000000, 0x0010040880000000, ), - ) - #--------------------------------------------------------------- - # Subsequent key schedule rotation permutations - # PC2ROT - PC2 inverse, then Rotate, then PC2 - #--------------------------------------------------------------- - # NOTE: this was reordered from original table to make perm3264 logic simpler - PC2ROTA=( - ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000200000, 0x0000000000200000, 0x0000000000200000, 0x0000000000200000, - 0x0000000004000000, 0x0000000004000000, 0x0000000004000000, 0x0000000004000000, - 0x0000000004200000, 0x0000000004200000, 0x0000000004200000, 0x0000000004200000, ), - ( 0x0000000000000000, 0x0000000000000800, 0x0000010000000000, 0x0000010000000800, - 0x0000000000002000, 0x0000000000002800, 0x0000010000002000, 0x0000010000002800, - 0x0000000010000000, 0x0000000010000800, 0x0000010010000000, 0x0000010010000800, - 0x0000000010002000, 0x0000000010002800, 0x0000010010002000, 0x0000010010002800, ), - ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000100000000, 0x0000000100000000, 0x0000000100000000, 0x0000000100000000, - 0x0000000000800000, 0x0000000000800000, 0x0000000000800000, 0x0000000000800000, - 0x0000000100800000, 0x0000000100800000, 0x0000000100800000, 0x0000000100800000, ), - ( 0x0000000000000000, 0x0000020000000000, 0x0000000080000000, 0x0000020080000000, - 0x0000000000400000, 0x0000020000400000, 0x0000000080400000, 0x0000020080400000, - 0x0000000008000000, 0x0000020008000000, 0x0000000088000000, 0x0000020088000000, - 0x0000000008400000, 0x0000020008400000, 0x0000000088400000, 0x0000020088400000, ), - ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000040, 0x0000000000000040, 0x0000000000000040, 0x0000000000000040, - 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, - 0x0000000000001040, 0x0000000000001040, 0x0000000000001040, 0x0000000000001040, ), - ( 0x0000000000000000, 0x0000000000000010, 0x0000000000000400, 0x0000000000000410, - 0x0000000000000080, 0x0000000000000090, 0x0000000000000480, 0x0000000000000490, - 0x0000000040000000, 0x0000000040000010, 0x0000000040000400, 0x0000000040000410, - 0x0000000040000080, 0x0000000040000090, 0x0000000040000480, 0x0000000040000490, ), - ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, - 0x0000000000100000, 0x0000000000100000, 0x0000000000100000, 0x0000000000100000, - 0x0000000000180000, 0x0000000000180000, 0x0000000000180000, 0x0000000000180000, ), - ( 0x0000000000000000, 0x0000000000040000, 0x0000000000000020, 0x0000000000040020, - 0x0000000000000004, 0x0000000000040004, 0x0000000000000024, 0x0000000000040024, - 0x0000000200000000, 0x0000000200040000, 0x0000000200000020, 0x0000000200040020, - 0x0000000200000004, 0x0000000200040004, 0x0000000200000024, 0x0000000200040024, ), - ( 0x0000000000000000, 0x0000000000000008, 0x0000000000008000, 0x0000000000008008, - 0x0010000000000000, 0x0010000000000008, 0x0010000000008000, 0x0010000000008008, - 0x0020000000000000, 0x0020000000000008, 0x0020000000008000, 0x0020000000008008, - 0x0030000000000000, 0x0030000000000008, 0x0030000000008000, 0x0030000000008008, ), - ( 0x0000000000000000, 0x0000400000000000, 0x0000080000000000, 0x0000480000000000, - 0x0000100000000000, 0x0000500000000000, 0x0000180000000000, 0x0000580000000000, - 0x4000000000000000, 0x4000400000000000, 0x4000080000000000, 0x4000480000000000, - 0x4000100000000000, 0x4000500000000000, 0x4000180000000000, 0x4000580000000000, ), - ( 0x0000000000000000, 0x0000000000004000, 0x0000000020000000, 0x0000000020004000, - 0x0001000000000000, 0x0001000000004000, 0x0001000020000000, 0x0001000020004000, - 0x0200000000000000, 0x0200000000004000, 0x0200000020000000, 0x0200000020004000, - 0x0201000000000000, 0x0201000000004000, 0x0201000020000000, 0x0201000020004000, ), - ( 0x0000000000000000, 0x1000000000000000, 0x0004000000000000, 0x1004000000000000, - 0x0002000000000000, 0x1002000000000000, 0x0006000000000000, 0x1006000000000000, - 0x0000000800000000, 0x1000000800000000, 0x0004000800000000, 0x1004000800000000, - 0x0002000800000000, 0x1002000800000000, 0x0006000800000000, 0x1006000800000000, ), - ( 0x0000000000000000, 0x0040000000000000, 0x2000000000000000, 0x2040000000000000, - 0x0000008000000000, 0x0040008000000000, 0x2000008000000000, 0x2040008000000000, - 0x0000001000000000, 0x0040001000000000, 0x2000001000000000, 0x2040001000000000, - 0x0000009000000000, 0x0040009000000000, 0x2000009000000000, 0x2040009000000000, ), - ( 0x0000000000000000, 0x0400000000000000, 0x8000000000000000, 0x8400000000000000, - 0x0000002000000000, 0x0400002000000000, 0x8000002000000000, 0x8400002000000000, - 0x0100000000000000, 0x0500000000000000, 0x8100000000000000, 0x8500000000000000, - 0x0100002000000000, 0x0500002000000000, 0x8100002000000000, 0x8500002000000000, ), - ( 0x0000000000000000, 0x0000800000000000, 0x0800000000000000, 0x0800800000000000, - 0x0000004000000000, 0x0000804000000000, 0x0800004000000000, 0x0800804000000000, - 0x0000000400000000, 0x0000800400000000, 0x0800000400000000, 0x0800800400000000, - 0x0000004400000000, 0x0000804400000000, 0x0800004400000000, 0x0800804400000000, ), - ( 0x0000000000000000, 0x0080000000000000, 0x0000040000000000, 0x0080040000000000, - 0x0008000000000000, 0x0088000000000000, 0x0008040000000000, 0x0088040000000000, - 0x0000200000000000, 0x0080200000000000, 0x0000240000000000, 0x0080240000000000, - 0x0008200000000000, 0x0088200000000000, 0x0008240000000000, 0x0088240000000000, ), - ) - - # NOTE: this was reordered from original table to make perm3264 logic simpler - PC2ROTB=( - ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000400, 0x0000000000000400, 0x0000000000000400, 0x0000000000000400, - 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, - 0x0000000000080400, 0x0000000000080400, 0x0000000000080400, 0x0000000000080400, ), - ( 0x0000000000000000, 0x0000000000800000, 0x0000000000004000, 0x0000000000804000, - 0x0000000080000000, 0x0000000080800000, 0x0000000080004000, 0x0000000080804000, - 0x0000000000040000, 0x0000000000840000, 0x0000000000044000, 0x0000000000844000, - 0x0000000080040000, 0x0000000080840000, 0x0000000080044000, 0x0000000080844000, ), - ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000000008, 0x0000000000000008, 0x0000000000000008, 0x0000000000000008, - 0x0000000040000000, 0x0000000040000000, 0x0000000040000000, 0x0000000040000000, - 0x0000000040000008, 0x0000000040000008, 0x0000000040000008, 0x0000000040000008, ), - ( 0x0000000000000000, 0x0000000020000000, 0x0000000200000000, 0x0000000220000000, - 0x0000000000000080, 0x0000000020000080, 0x0000000200000080, 0x0000000220000080, - 0x0000000000100000, 0x0000000020100000, 0x0000000200100000, 0x0000000220100000, - 0x0000000000100080, 0x0000000020100080, 0x0000000200100080, 0x0000000220100080, ), - ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000002000, 0x0000000000002000, 0x0000000000002000, 0x0000000000002000, - 0x0000020000000000, 0x0000020000000000, 0x0000020000000000, 0x0000020000000000, - 0x0000020000002000, 0x0000020000002000, 0x0000020000002000, 0x0000020000002000, ), - ( 0x0000000000000000, 0x0000000000000800, 0x0000000100000000, 0x0000000100000800, - 0x0000000010000000, 0x0000000010000800, 0x0000000110000000, 0x0000000110000800, - 0x0000000000000004, 0x0000000000000804, 0x0000000100000004, 0x0000000100000804, - 0x0000000010000004, 0x0000000010000804, 0x0000000110000004, 0x0000000110000804, ), - ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, - 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, - 0x0000000000000010, 0x0000000000000010, 0x0000000000000010, 0x0000000000000010, - 0x0000000000001010, 0x0000000000001010, 0x0000000000001010, 0x0000000000001010, ), - ( 0x0000000000000000, 0x0000000000000040, 0x0000010000000000, 0x0000010000000040, - 0x0000000000200000, 0x0000000000200040, 0x0000010000200000, 0x0000010000200040, - 0x0000000000008000, 0x0000000000008040, 0x0000010000008000, 0x0000010000008040, - 0x0000000000208000, 0x0000000000208040, 0x0000010000208000, 0x0000010000208040, ), - ( 0x0000000000000000, 0x0000000004000000, 0x0000000008000000, 0x000000000c000000, - 0x0400000000000000, 0x0400000004000000, 0x0400000008000000, 0x040000000c000000, - 0x8000000000000000, 0x8000000004000000, 0x8000000008000000, 0x800000000c000000, - 0x8400000000000000, 0x8400000004000000, 0x8400000008000000, 0x840000000c000000, ), - ( 0x0000000000000000, 0x0002000000000000, 0x0200000000000000, 0x0202000000000000, - 0x1000000000000000, 0x1002000000000000, 0x1200000000000000, 0x1202000000000000, - 0x0008000000000000, 0x000a000000000000, 0x0208000000000000, 0x020a000000000000, - 0x1008000000000000, 0x100a000000000000, 0x1208000000000000, 0x120a000000000000, ), - ( 0x0000000000000000, 0x0000000000400000, 0x0000000000000020, 0x0000000000400020, - 0x0040000000000000, 0x0040000000400000, 0x0040000000000020, 0x0040000000400020, - 0x0800000000000000, 0x0800000000400000, 0x0800000000000020, 0x0800000000400020, - 0x0840000000000000, 0x0840000000400000, 0x0840000000000020, 0x0840000000400020, ), - ( 0x0000000000000000, 0x0080000000000000, 0x0000008000000000, 0x0080008000000000, - 0x2000000000000000, 0x2080000000000000, 0x2000008000000000, 0x2080008000000000, - 0x0020000000000000, 0x00a0000000000000, 0x0020008000000000, 0x00a0008000000000, - 0x2020000000000000, 0x20a0000000000000, 0x2020008000000000, 0x20a0008000000000, ), - ( 0x0000000000000000, 0x0000002000000000, 0x0000040000000000, 0x0000042000000000, - 0x4000000000000000, 0x4000002000000000, 0x4000040000000000, 0x4000042000000000, - 0x0000400000000000, 0x0000402000000000, 0x0000440000000000, 0x0000442000000000, - 0x4000400000000000, 0x4000402000000000, 0x4000440000000000, 0x4000442000000000, ), - ( 0x0000000000000000, 0x0000004000000000, 0x0000200000000000, 0x0000204000000000, - 0x0000080000000000, 0x0000084000000000, 0x0000280000000000, 0x0000284000000000, - 0x0000800000000000, 0x0000804000000000, 0x0000a00000000000, 0x0000a04000000000, - 0x0000880000000000, 0x0000884000000000, 0x0000a80000000000, 0x0000a84000000000, ), - ( 0x0000000000000000, 0x0000000800000000, 0x0000000400000000, 0x0000000c00000000, - 0x0000100000000000, 0x0000100800000000, 0x0000100400000000, 0x0000100c00000000, - 0x0010000000000000, 0x0010000800000000, 0x0010000400000000, 0x0010000c00000000, - 0x0010100000000000, 0x0010100800000000, 0x0010100400000000, 0x0010100c00000000, ), - ( 0x0000000000000000, 0x0100000000000000, 0x0001000000000000, 0x0101000000000000, - 0x0000001000000000, 0x0100001000000000, 0x0001001000000000, 0x0101001000000000, - 0x0004000000000000, 0x0104000000000000, 0x0005000000000000, 0x0105000000000000, - 0x0004001000000000, 0x0104001000000000, 0x0005001000000000, 0x0105001000000000, ), - ) - #--------------------------------------------------------------- - # PCXROT - PC1ROT, PC2ROTA, PC2ROTB listed in order - # of the PC1 rotation schedule, as used by des_setkey - #--------------------------------------------------------------- - ##ROTATES = (1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1) - ##PCXROT = ( - ## PC1ROT, PC2ROTA, PC2ROTB, PC2ROTB, - ## PC2ROTB, PC2ROTB, PC2ROTB, PC2ROTB, - ## PC2ROTA, PC2ROTB, PC2ROTB, PC2ROTB, - ## PC2ROTB, PC2ROTB, PC2ROTB, PC2ROTA, - ## ) - - # NOTE: modified PCXROT to contain entrys broken into pairs, - # to help generate them in format best used by encoder. - PCXROT = ( - (PC1ROT, PC2ROTA), (PC2ROTB, PC2ROTB), - (PC2ROTB, PC2ROTB), (PC2ROTB, PC2ROTB), - (PC2ROTA, PC2ROTB), (PC2ROTB, PC2ROTB), - (PC2ROTB, PC2ROTB), (PC2ROTB, PC2ROTA), - ) - - #--------------------------------------------------------------- - # Bit reverse, intial permupation, expantion - # Initial permutation/expansion table - #--------------------------------------------------------------- - # NOTE: this was reordered from original table to make perm3264 logic simpler - IE3264=( - ( 0x0000000000000000, 0x0000000000800800, 0x0000000000008008, 0x0000000000808808, - 0x0000008008000000, 0x0000008008800800, 0x0000008008008008, 0x0000008008808808, - 0x0000000080080000, 0x0000000080880800, 0x0000000080088008, 0x0000000080888808, - 0x0000008088080000, 0x0000008088880800, 0x0000008088088008, 0x0000008088888808, ), - ( 0x0000000000000000, 0x0080080000000000, 0x0000800800000000, 0x0080880800000000, - 0x0800000000000080, 0x0880080000000080, 0x0800800800000080, 0x0880880800000080, - 0x8008000000000000, 0x8088080000000000, 0x8008800800000000, 0x8088880800000000, - 0x8808000000000080, 0x8888080000000080, 0x8808800800000080, 0x8888880800000080, ), - ( 0x0000000000000000, 0x0000000000001000, 0x0000000000000010, 0x0000000000001010, - 0x0000000010000000, 0x0000000010001000, 0x0000000010000010, 0x0000000010001010, - 0x0000000000100000, 0x0000000000101000, 0x0000000000100010, 0x0000000000101010, - 0x0000000010100000, 0x0000000010101000, 0x0000000010100010, 0x0000000010101010, ), - ( 0x0000000000000000, 0x0000100000000000, 0x0000001000000000, 0x0000101000000000, - 0x1000000000000000, 0x1000100000000000, 0x1000001000000000, 0x1000101000000000, - 0x0010000000000000, 0x0010100000000000, 0x0010001000000000, 0x0010101000000000, - 0x1010000000000000, 0x1010100000000000, 0x1010001000000000, 0x1010101000000000, ), - ( 0x0000000000000000, 0x0000000000002000, 0x0000000000000020, 0x0000000000002020, - 0x0000000020000000, 0x0000000020002000, 0x0000000020000020, 0x0000000020002020, - 0x0000000000200000, 0x0000000000202000, 0x0000000000200020, 0x0000000000202020, - 0x0000000020200000, 0x0000000020202000, 0x0000000020200020, 0x0000000020202020, ), - ( 0x0000000000000000, 0x0000200000000000, 0x0000002000000000, 0x0000202000000000, - 0x2000000000000000, 0x2000200000000000, 0x2000002000000000, 0x2000202000000000, - 0x0020000000000000, 0x0020200000000000, 0x0020002000000000, 0x0020202000000000, - 0x2020000000000000, 0x2020200000000000, 0x2020002000000000, 0x2020202000000000, ), - ( 0x0000000000000000, 0x0000000000004004, 0x0400000000000040, 0x0400000000004044, - 0x0000000040040000, 0x0000000040044004, 0x0400000040040040, 0x0400000040044044, - 0x0000000000400400, 0x0000000000404404, 0x0400000000400440, 0x0400000000404444, - 0x0000000040440400, 0x0000000040444404, 0x0400000040440440, 0x0400000040444444, ), - ( 0x0000000000000000, 0x0000400400000000, 0x0000004004000000, 0x0000404404000000, - 0x4004000000000000, 0x4004400400000000, 0x4004004004000000, 0x4004404404000000, - 0x0040040000000000, 0x0040440400000000, 0x0040044004000000, 0x0040444404000000, - 0x4044040000000000, 0x4044440400000000, 0x4044044004000000, 0x4044444404000000, ), - ) - - #--------------------------------------------------------------- - # Table that combines the S, P, and E operations. - #--------------------------------------------------------------- - SPE=( - ( 0x0080088008200000, 0x0000008008000000, 0x0000000000200020, 0x0080088008200020, - 0x0000000000200000, 0x0080088008000020, 0x0000008008000020, 0x0000000000200020, - 0x0080088008000020, 0x0080088008200000, 0x0000008008200000, 0x0080080000000020, - 0x0080080000200020, 0x0000000000200000, 0x0000000000000000, 0x0000008008000020, - 0x0000008008000000, 0x0000000000000020, 0x0080080000200000, 0x0080088008000000, - 0x0080088008200020, 0x0000008008200000, 0x0080080000000020, 0x0080080000200000, - 0x0000000000000020, 0x0080080000000000, 0x0080088008000000, 0x0000008008200020, - 0x0080080000000000, 0x0080080000200020, 0x0000008008200020, 0x0000000000000000, - 0x0000000000000000, 0x0080088008200020, 0x0080080000200000, 0x0000008008000020, - 0x0080088008200000, 0x0000008008000000, 0x0080080000000020, 0x0080080000200000, - 0x0000008008200020, 0x0080080000000000, 0x0080088008000000, 0x0000000000200020, - 0x0080088008000020, 0x0000000000000020, 0x0000000000200020, 0x0000008008200000, - 0x0080088008200020, 0x0080088008000000, 0x0000008008200000, 0x0080080000200020, - 0x0000000000200000, 0x0080080000000020, 0x0000008008000020, 0x0000000000000000, - 0x0000008008000000, 0x0000000000200000, 0x0080080000200020, 0x0080088008200000, - 0x0000000000000020, 0x0000008008200020, 0x0080080000000000, 0x0080088008000020, ), - ( 0x1000800810004004, 0x0000000000000000, 0x0000800810000000, 0x0000000010004004, - 0x1000000000004004, 0x1000800800000000, 0x0000800800004004, 0x0000800810000000, - 0x0000800800000000, 0x1000000010004004, 0x1000000000000000, 0x0000800800004004, - 0x1000000010000000, 0x0000800810004004, 0x0000000010004004, 0x1000000000000000, - 0x0000000010000000, 0x1000800800004004, 0x1000000010004004, 0x0000800800000000, - 0x1000800810000000, 0x0000000000004004, 0x0000000000000000, 0x1000000010000000, - 0x1000800800004004, 0x1000800810000000, 0x0000800810004004, 0x1000000000004004, - 0x0000000000004004, 0x0000000010000000, 0x1000800800000000, 0x1000800810004004, - 0x1000000010000000, 0x0000800810004004, 0x0000800800004004, 0x1000800810000000, - 0x1000800810004004, 0x1000000010000000, 0x1000000000004004, 0x0000000000000000, - 0x0000000000004004, 0x1000800800000000, 0x0000000010000000, 0x1000000010004004, - 0x0000800800000000, 0x0000000000004004, 0x1000800810000000, 0x1000800800004004, - 0x0000800810004004, 0x0000800800000000, 0x0000000000000000, 0x1000000000004004, - 0x1000000000000000, 0x1000800810004004, 0x0000800810000000, 0x0000000010004004, - 0x1000000010004004, 0x0000000010000000, 0x1000800800000000, 0x0000800800004004, - 0x1000800800004004, 0x1000000000000000, 0x0000000010004004, 0x0000800810000000, ), - ( 0x0000000000400410, 0x0010004004400400, 0x0010000000000000, 0x0010000000400410, - 0x0000004004000010, 0x0000000000400400, 0x0010000000400410, 0x0010004004000000, - 0x0010000000400400, 0x0000004004000000, 0x0000004004400400, 0x0000000000000010, - 0x0010004004400410, 0x0010000000000010, 0x0000000000000010, 0x0000004004400410, - 0x0000000000000000, 0x0000004004000010, 0x0010004004400400, 0x0010000000000000, - 0x0010000000000010, 0x0010004004400410, 0x0000004004000000, 0x0000000000400410, - 0x0000004004400410, 0x0010000000400400, 0x0010004004000010, 0x0000004004400400, - 0x0010004004000000, 0x0000000000000000, 0x0000000000400400, 0x0010004004000010, - 0x0010004004400400, 0x0010000000000000, 0x0000000000000010, 0x0000004004000000, - 0x0010000000000010, 0x0000004004000010, 0x0000004004400400, 0x0010000000400410, - 0x0000000000000000, 0x0010004004400400, 0x0010004004000000, 0x0000004004400410, - 0x0000004004000010, 0x0000000000400400, 0x0010004004400410, 0x0000000000000010, - 0x0010004004000010, 0x0000000000400410, 0x0000000000400400, 0x0010004004400410, - 0x0000004004000000, 0x0010000000400400, 0x0010000000400410, 0x0010004004000000, - 0x0010000000400400, 0x0000000000000000, 0x0000004004400410, 0x0010000000000010, - 0x0000000000400410, 0x0010004004000010, 0x0010000000000000, 0x0000004004400400, ), - ( 0x0800100040040080, 0x0000100000001000, 0x0800000000000080, 0x0800100040041080, - 0x0000000000000000, 0x0000000040041000, 0x0800100000001080, 0x0800000040040080, - 0x0000100040041000, 0x0800000000001080, 0x0000000000001000, 0x0800100000000080, - 0x0800000000001080, 0x0800100040040080, 0x0000000040040000, 0x0000000000001000, - 0x0800000040041080, 0x0000100040040000, 0x0000100000000000, 0x0800000000000080, - 0x0000100040040000, 0x0800100000001080, 0x0000000040041000, 0x0000100000000000, - 0x0800100000000080, 0x0000000000000000, 0x0800000040040080, 0x0000100040041000, - 0x0000100000001000, 0x0800000040041080, 0x0800100040041080, 0x0000000040040000, - 0x0800000040041080, 0x0800100000000080, 0x0000000040040000, 0x0800000000001080, - 0x0000100040040000, 0x0000100000001000, 0x0800000000000080, 0x0000000040041000, - 0x0800100000001080, 0x0000000000000000, 0x0000100000000000, 0x0800000040040080, - 0x0000000000000000, 0x0800000040041080, 0x0000100040041000, 0x0000100000000000, - 0x0000000000001000, 0x0800100040041080, 0x0800100040040080, 0x0000000040040000, - 0x0800100040041080, 0x0800000000000080, 0x0000100000001000, 0x0800100040040080, - 0x0800000040040080, 0x0000100040040000, 0x0000000040041000, 0x0800100000001080, - 0x0800100000000080, 0x0000000000001000, 0x0800000000001080, 0x0000100040041000, ), - ( 0x0000000000800800, 0x0000001000000000, 0x0040040000000000, 0x2040041000800800, - 0x2000001000800800, 0x0040040000800800, 0x2040041000000000, 0x0000001000800800, - 0x0000001000000000, 0x2000000000000000, 0x2000000000800800, 0x0040041000000000, - 0x2040040000800800, 0x2000001000800800, 0x0040041000800800, 0x0000000000000000, - 0x0040041000000000, 0x0000000000800800, 0x2000001000000000, 0x2040040000000000, - 0x0040040000800800, 0x2040041000000000, 0x0000000000000000, 0x2000000000800800, - 0x2000000000000000, 0x2040040000800800, 0x2040041000800800, 0x2000001000000000, - 0x0000001000800800, 0x0040040000000000, 0x2040040000000000, 0x0040041000800800, - 0x0040041000800800, 0x2040040000800800, 0x2000001000000000, 0x0000001000800800, - 0x0000001000000000, 0x2000000000000000, 0x2000000000800800, 0x0040040000800800, - 0x0000000000800800, 0x0040041000000000, 0x2040041000800800, 0x0000000000000000, - 0x2040041000000000, 0x0000000000800800, 0x0040040000000000, 0x2000001000000000, - 0x2040040000800800, 0x0040040000000000, 0x0000000000000000, 0x2040041000800800, - 0x2000001000800800, 0x0040041000800800, 0x2040040000000000, 0x0000001000000000, - 0x0040041000000000, 0x2000001000800800, 0x0040040000800800, 0x2040040000000000, - 0x2000000000000000, 0x2040041000000000, 0x0000001000800800, 0x2000000000800800, ), - ( 0x4004000000008008, 0x4004000020000000, 0x0000000000000000, 0x0000200020008008, - 0x4004000020000000, 0x0000200000000000, 0x4004200000008008, 0x0000000020000000, - 0x4004200000000000, 0x4004200020008008, 0x0000200020000000, 0x0000000000008008, - 0x0000200000008008, 0x4004000000008008, 0x0000000020008008, 0x4004200020000000, - 0x0000000020000000, 0x4004200000008008, 0x4004000020008008, 0x0000000000000000, - 0x0000200000000000, 0x4004000000000000, 0x0000200020008008, 0x4004000020008008, - 0x4004200020008008, 0x0000000020008008, 0x0000000000008008, 0x4004200000000000, - 0x4004000000000000, 0x0000200020000000, 0x4004200020000000, 0x0000200000008008, - 0x4004200000000000, 0x0000000000008008, 0x0000200000008008, 0x4004200020000000, - 0x0000200020008008, 0x4004000020000000, 0x0000000000000000, 0x0000200000008008, - 0x0000000000008008, 0x0000200000000000, 0x4004000020008008, 0x0000000020000000, - 0x4004000020000000, 0x4004200020008008, 0x0000200020000000, 0x4004000000000000, - 0x4004200020008008, 0x0000200020000000, 0x0000000020000000, 0x4004200000008008, - 0x4004000000008008, 0x0000000020008008, 0x4004200020000000, 0x0000000000000000, - 0x0000200000000000, 0x4004000000008008, 0x4004200000008008, 0x0000200020008008, - 0x0000000020008008, 0x4004200000000000, 0x4004000000000000, 0x4004000020008008, ), - ( 0x0000400400000000, 0x0020000000000000, 0x0020000000100000, 0x0400000000100040, - 0x0420400400100040, 0x0400400400000040, 0x0020400400000000, 0x0000000000000000, - 0x0000000000100000, 0x0420000000100040, 0x0420000000000040, 0x0000400400100000, - 0x0400000000000040, 0x0020400400100000, 0x0000400400100000, 0x0420000000000040, - 0x0420000000100040, 0x0000400400000000, 0x0400400400000040, 0x0420400400100040, - 0x0000000000000000, 0x0020000000100000, 0x0400000000100040, 0x0020400400000000, - 0x0400400400100040, 0x0420400400000040, 0x0020400400100000, 0x0400000000000040, - 0x0420400400000040, 0x0400400400100040, 0x0020000000000000, 0x0000000000100000, - 0x0420400400000040, 0x0000400400100000, 0x0400400400100040, 0x0420000000000040, - 0x0000400400000000, 0x0020000000000000, 0x0000000000100000, 0x0400400400100040, - 0x0420000000100040, 0x0420400400000040, 0x0020400400000000, 0x0000000000000000, - 0x0020000000000000, 0x0400000000100040, 0x0400000000000040, 0x0020000000100000, - 0x0000000000000000, 0x0420000000100040, 0x0020000000100000, 0x0020400400000000, - 0x0420000000000040, 0x0000400400000000, 0x0420400400100040, 0x0000000000100000, - 0x0020400400100000, 0x0400000000000040, 0x0400400400000040, 0x0420400400100040, - 0x0400000000100040, 0x0020400400100000, 0x0000400400100000, 0x0400400400000040, ), - ( 0x8008000080082000, 0x0000002080082000, 0x8008002000000000, 0x0000000000000000, - 0x0000002000002000, 0x8008000080080000, 0x0000000080082000, 0x8008002080082000, - 0x8008000000000000, 0x0000000000002000, 0x0000002080080000, 0x8008002000000000, - 0x8008002080080000, 0x8008002000002000, 0x8008000000002000, 0x0000000080082000, - 0x0000002000000000, 0x8008002080080000, 0x8008000080080000, 0x0000002000002000, - 0x8008002080082000, 0x8008000000002000, 0x0000000000000000, 0x0000002080080000, - 0x0000000000002000, 0x0000000080080000, 0x8008002000002000, 0x8008000080082000, - 0x0000000080080000, 0x0000002000000000, 0x0000002080082000, 0x8008000000000000, - 0x0000000080080000, 0x0000002000000000, 0x8008000000002000, 0x8008002080082000, - 0x8008002000000000, 0x0000000000002000, 0x0000000000000000, 0x0000002080080000, - 0x8008000080082000, 0x8008002000002000, 0x0000002000002000, 0x8008000080080000, - 0x0000002080082000, 0x8008000000000000, 0x8008000080080000, 0x0000002000002000, - 0x8008002080082000, 0x0000000080080000, 0x0000000080082000, 0x8008000000002000, - 0x0000002080080000, 0x8008002000000000, 0x8008002000002000, 0x0000000080082000, - 0x8008000000000000, 0x0000002080082000, 0x8008002080080000, 0x0000000000000000, - 0x0000000000002000, 0x8008000080082000, 0x0000002000000000, 0x8008002080080000, ), - ) - - #--------------------------------------------------------------- - # compressed/interleaved => final permutation table - # Compression, final permutation, bit reverse - #--------------------------------------------------------------- - # NOTE: this was reordered from original table to make perm6464 logic simpler - CF6464=( - ( 0x0000000000000000, 0x0000002000000000, 0x0000200000000000, 0x0000202000000000, - 0x0020000000000000, 0x0020002000000000, 0x0020200000000000, 0x0020202000000000, - 0x2000000000000000, 0x2000002000000000, 0x2000200000000000, 0x2000202000000000, - 0x2020000000000000, 0x2020002000000000, 0x2020200000000000, 0x2020202000000000, ), - ( 0x0000000000000000, 0x0000000200000000, 0x0000020000000000, 0x0000020200000000, - 0x0002000000000000, 0x0002000200000000, 0x0002020000000000, 0x0002020200000000, - 0x0200000000000000, 0x0200000200000000, 0x0200020000000000, 0x0200020200000000, - 0x0202000000000000, 0x0202000200000000, 0x0202020000000000, 0x0202020200000000, ), - ( 0x0000000000000000, 0x0000000000000020, 0x0000000000002000, 0x0000000000002020, - 0x0000000000200000, 0x0000000000200020, 0x0000000000202000, 0x0000000000202020, - 0x0000000020000000, 0x0000000020000020, 0x0000000020002000, 0x0000000020002020, - 0x0000000020200000, 0x0000000020200020, 0x0000000020202000, 0x0000000020202020, ), - ( 0x0000000000000000, 0x0000000000000002, 0x0000000000000200, 0x0000000000000202, - 0x0000000000020000, 0x0000000000020002, 0x0000000000020200, 0x0000000000020202, - 0x0000000002000000, 0x0000000002000002, 0x0000000002000200, 0x0000000002000202, - 0x0000000002020000, 0x0000000002020002, 0x0000000002020200, 0x0000000002020202, ), - ( 0x0000000000000000, 0x0000008000000000, 0x0000800000000000, 0x0000808000000000, - 0x0080000000000000, 0x0080008000000000, 0x0080800000000000, 0x0080808000000000, - 0x8000000000000000, 0x8000008000000000, 0x8000800000000000, 0x8000808000000000, - 0x8080000000000000, 0x8080008000000000, 0x8080800000000000, 0x8080808000000000, ), - ( 0x0000000000000000, 0x0000000800000000, 0x0000080000000000, 0x0000080800000000, - 0x0008000000000000, 0x0008000800000000, 0x0008080000000000, 0x0008080800000000, - 0x0800000000000000, 0x0800000800000000, 0x0800080000000000, 0x0800080800000000, - 0x0808000000000000, 0x0808000800000000, 0x0808080000000000, 0x0808080800000000, ), - ( 0x0000000000000000, 0x0000000000000080, 0x0000000000008000, 0x0000000000008080, - 0x0000000000800000, 0x0000000000800080, 0x0000000000808000, 0x0000000000808080, - 0x0000000080000000, 0x0000000080000080, 0x0000000080008000, 0x0000000080008080, - 0x0000000080800000, 0x0000000080800080, 0x0000000080808000, 0x0000000080808080, ), - ( 0x0000000000000000, 0x0000000000000008, 0x0000000000000800, 0x0000000000000808, - 0x0000000000080000, 0x0000000000080008, 0x0000000000080800, 0x0000000000080808, - 0x0000000008000000, 0x0000000008000008, 0x0000000008000800, 0x0000000008000808, - 0x0000000008080000, 0x0000000008080008, 0x0000000008080800, 0x0000000008080808, ), - ( 0x0000000000000000, 0x0000001000000000, 0x0000100000000000, 0x0000101000000000, - 0x0010000000000000, 0x0010001000000000, 0x0010100000000000, 0x0010101000000000, - 0x1000000000000000, 0x1000001000000000, 0x1000100000000000, 0x1000101000000000, - 0x1010000000000000, 0x1010001000000000, 0x1010100000000000, 0x1010101000000000, ), - ( 0x0000000000000000, 0x0000000100000000, 0x0000010000000000, 0x0000010100000000, - 0x0001000000000000, 0x0001000100000000, 0x0001010000000000, 0x0001010100000000, - 0x0100000000000000, 0x0100000100000000, 0x0100010000000000, 0x0100010100000000, - 0x0101000000000000, 0x0101000100000000, 0x0101010000000000, 0x0101010100000000, ), - ( 0x0000000000000000, 0x0000000000000010, 0x0000000000001000, 0x0000000000001010, - 0x0000000000100000, 0x0000000000100010, 0x0000000000101000, 0x0000000000101010, - 0x0000000010000000, 0x0000000010000010, 0x0000000010001000, 0x0000000010001010, - 0x0000000010100000, 0x0000000010100010, 0x0000000010101000, 0x0000000010101010, ), - ( 0x0000000000000000, 0x0000000000000001, 0x0000000000000100, 0x0000000000000101, - 0x0000000000010000, 0x0000000000010001, 0x0000000000010100, 0x0000000000010101, - 0x0000000001000000, 0x0000000001000001, 0x0000000001000100, 0x0000000001000101, - 0x0000000001010000, 0x0000000001010001, 0x0000000001010100, 0x0000000001010101, ), - ( 0x0000000000000000, 0x0000004000000000, 0x0000400000000000, 0x0000404000000000, - 0x0040000000000000, 0x0040004000000000, 0x0040400000000000, 0x0040404000000000, - 0x4000000000000000, 0x4000004000000000, 0x4000400000000000, 0x4000404000000000, - 0x4040000000000000, 0x4040004000000000, 0x4040400000000000, 0x4040404000000000, ), - ( 0x0000000000000000, 0x0000000400000000, 0x0000040000000000, 0x0000040400000000, - 0x0004000000000000, 0x0004000400000000, 0x0004040000000000, 0x0004040400000000, - 0x0400000000000000, 0x0400000400000000, 0x0400040000000000, 0x0400040400000000, - 0x0404000000000000, 0x0404000400000000, 0x0404040000000000, 0x0404040400000000, ), - ( 0x0000000000000000, 0x0000000000000040, 0x0000000000004000, 0x0000000000004040, - 0x0000000000400000, 0x0000000000400040, 0x0000000000404000, 0x0000000000404040, - 0x0000000040000000, 0x0000000040000040, 0x0000000040004000, 0x0000000040004040, - 0x0000000040400000, 0x0000000040400040, 0x0000000040404000, 0x0000000040404040, ), - ( 0x0000000000000000, 0x0000000000000004, 0x0000000000000400, 0x0000000000000404, - 0x0000000000040000, 0x0000000000040004, 0x0000000000040400, 0x0000000000040404, - 0x0000000004000000, 0x0000000004000004, 0x0000000004000400, 0x0000000004000404, - 0x0000000004040000, 0x0000000004040004, 0x0000000004040400, 0x0000000004040404, ), - ) - #=================================================================== - # eof _load_tables() - #=================================================================== - -#============================================================================= -# support -#============================================================================= - -def _permute(c, p): - """Returns the permutation of the given 32-bit or 64-bit code with - the specified permutation table.""" - # NOTE: only difference between 32 & 64 bit permutations - # is that len(p)==8 for 32 bit, and len(p)==16 for 64 bit. - out = 0 - for r in p: - out |= r[c&0xf] - c >>= 4 - return out - -#============================================================================= -# packing & unpacking -#============================================================================= -# FIXME: more properly named _uint8_struct... -_uint64_struct = struct.Struct(">Q") - -def _pack64(value): - return _uint64_struct.pack(value) - -def _unpack64(value): - return _uint64_struct.unpack(value)[0] - -def _pack56(value): - return _uint64_struct.pack(value)[1:] - -def _unpack56(value): - return _uint64_struct.unpack(b'\x00' + value)[0] - -#============================================================================= -# 56->64 key manipulation -#============================================================================= - -##def expand_7bit(value): -## "expand 7-bit integer => 7-bits + 1 odd-parity bit" -## # parity calc adapted from 32-bit even parity alg found at -## # http://graphics.stanford.edu/~seander/bithacks.html#ParityParallel -## assert 0 <= value < 0x80, "value out of range" -## return (value<<1) | (0x9669 >> ((value ^ (value >> 4)) & 0xf)) & 1 - -_EXPAND_ITER = irange(49,-7,-7) - -def expand_des_key(key): - """convert DES from 7 bytes to 8 bytes (by inserting empty parity bits)""" - if isinstance(key, bytes): - if len(key) != 7: - raise ValueError("key must be 7 bytes in size") - elif isinstance(key, int_types): - if key < 0 or key > INT_56_MASK: - raise ValueError("key must be 56-bit non-negative integer") - return _unpack64(expand_des_key(_pack56(key))) - else: - raise exc.ExpectedTypeError(key, "bytes or int", "key") - key = _unpack56(key) - # NOTE: the following would insert correctly-valued parity bits in each key, - # but the parity bit would just be ignored in des_encrypt_block(), - # so not bothering to use it. - # XXX: could make parity-restoring optionally available via flag - ##return join_byte_values(expand_7bit((key >> shift) & 0x7f) - ## for shift in _EXPAND_ITER) - return join_byte_values(((key>>shift) & 0x7f)<<1 for shift in _EXPAND_ITER) - -def shrink_des_key(key): - """convert DES key from 8 bytes to 7 bytes (by discarding the parity bits)""" - if isinstance(key, bytes): - if len(key) != 8: - raise ValueError("key must be 8 bytes in size") - return _pack56(shrink_des_key(_unpack64(key))) - elif isinstance(key, int_types): - if key < 0 or key > INT_64_MASK: - raise ValueError("key must be 64-bit non-negative integer") - else: - raise exc.ExpectedTypeError(key, "bytes or int", "key") - key >>= 1 - result = 0 - offset = 0 - while offset < 56: - result |= (key & 0x7f)<>= 8 - offset += 7 - assert not (result & ~INT_64_MASK) - return result - -#============================================================================= -# des encryption -#============================================================================= -def des_encrypt_block(key, input, salt=0, rounds=1): - """encrypt single block of data using DES, operates on 8-byte strings. - - :arg key: - DES key as 7 byte string, or 8 byte string with parity bits - (parity bit values are ignored). - - :arg input: - plaintext block to encrypt, as 8 byte string. - - :arg salt: - Optional 24-bit integer used to mutate the base DES algorithm in a - manner specific to :class:`~passlib.hash.des_crypt` and its variants. - The default value ``0`` provides the normal (unsalted) DES behavior. - The salt functions as follows: - if the ``i``'th bit of ``salt`` is set, - bits ``i`` and ``i+24`` are swapped in the DES E-box output. - - :arg rounds: - Optional number of rounds of to apply the DES key schedule. - the default (``rounds=1``) provides the normal DES behavior, - but :class:`~passlib.hash.des_crypt` and its variants use - alternate rounds values. - - :raises TypeError: if any of the provided args are of the wrong type. - :raises ValueError: - if any of the input blocks are the wrong size, - or the salt/rounds values are out of range. - - :returns: - resulting 8-byte ciphertext block. - """ - # validate & unpack key - if isinstance(key, bytes): - if len(key) == 7: - key = expand_des_key(key) - elif len(key) != 8: - raise ValueError("key must be 7 or 8 bytes") - key = _unpack64(key) - else: - raise exc.ExpectedTypeError(key, "bytes", "key") - - # validate & unpack input - if isinstance(input, bytes): - if len(input) != 8: - raise ValueError("input block must be 8 bytes") - input = _unpack64(input) - else: - raise exc.ExpectedTypeError(input, "bytes", "input") - - # hand things off to other func - result = des_encrypt_int_block(key, input, salt, rounds) - - # repack result - return _pack64(result) - -def des_encrypt_int_block(key, input, salt=0, rounds=1): - """encrypt single block of data using DES, operates on 64-bit integers. - - this function is essentially the same as :func:`des_encrypt_block`, - except that it operates on integers, and will NOT automatically - expand 56-bit keys if provided (since there's no way to detect them). - - :arg key: - DES key as 64-bit integer (the parity bits are ignored). - - :arg input: - input block as 64-bit integer - - :arg salt: - optional 24-bit integer used to mutate the base DES algorithm. - defaults to ``0`` (no mutation applied). - - :arg rounds: - optional number of rounds of to apply the DES key schedule. - defaults to ``1``. - - :raises TypeError: if any of the provided args are of the wrong type. - :raises ValueError: - if any of the input blocks are the wrong size, - or the salt/rounds values are out of range. - - :returns: - resulting ciphertext as 64-bit integer. - """ - #--------------------------------------------------------------- - # input validation - #--------------------------------------------------------------- - - # validate salt, rounds - if rounds < 1: - raise ValueError("rounds must be positive integer") - if salt < 0 or salt > INT_24_MASK: - raise ValueError("salt must be 24-bit non-negative integer") - - # validate & unpack key - if not isinstance(key, int_types): - raise exc.ExpectedTypeError(key, "int", "key") - elif key < 0 or key > INT_64_MASK: - raise ValueError("key must be 64-bit non-negative integer") - - # validate & unpack input - if not isinstance(input, int_types): - raise exc.ExpectedTypeError(input, "int", "input") - elif input < 0 or input > INT_64_MASK: - raise ValueError("input must be 64-bit non-negative integer") - - #--------------------------------------------------------------- - # DES setup - #--------------------------------------------------------------- - # load tables if not already done - global SPE, PCXROT, IE3264, CF6464 - if PCXROT is None: - _load_tables() - - # load SPE into local vars to speed things up and remove an array access call - SPE0, SPE1, SPE2, SPE3, SPE4, SPE5, SPE6, SPE7 = SPE - - # NOTE: parity bits are ignored completely - # (UTs do fuzz testing to ensure this) - - # generate key schedule - # NOTE: generation was modified to output two elements at a time, - # so that per-round loop could do two passes at once. - def _iter_key_schedule(ks_odd): - """given 64-bit key, iterates over the 8 (even,odd) key schedule pairs""" - for p_even, p_odd in PCXROT: - ks_even = _permute(ks_odd, p_even) - ks_odd = _permute(ks_even, p_odd) - yield ks_even & _KS_MASK, ks_odd & _KS_MASK - ks_list = list(_iter_key_schedule(key)) - - # expand 24 bit salt -> 32 bit per des_crypt & bsdi_crypt - salt = ( - ((salt & 0x00003f) << 26) | - ((salt & 0x000fc0) << 12) | - ((salt & 0x03f000) >> 2) | - ((salt & 0xfc0000) >> 16) - ) - - # init L & R - if input == 0: - L = R = 0 - else: - L = ((input >> 31) & 0xaaaaaaaa) | (input & 0x55555555) - L = _permute(L, IE3264) - - R = ((input >> 32) & 0xaaaaaaaa) | ((input >> 1) & 0x55555555) - R = _permute(R, IE3264) - - #--------------------------------------------------------------- - # main DES loop - run for specified number of rounds - #--------------------------------------------------------------- - while rounds: - rounds -= 1 - - # run over each part of the schedule, 2 parts at a time - for ks_even, ks_odd in ks_list: - k = ((R>>32) ^ R) & salt # use the salt to flip specific bits - B = (k<<32) ^ k ^ R ^ ks_even - - L ^= (SPE0[(B>>58)&0x3f] ^ SPE1[(B>>50)&0x3f] ^ - SPE2[(B>>42)&0x3f] ^ SPE3[(B>>34)&0x3f] ^ - SPE4[(B>>26)&0x3f] ^ SPE5[(B>>18)&0x3f] ^ - SPE6[(B>>10)&0x3f] ^ SPE7[(B>>2)&0x3f]) - - k = ((L>>32) ^ L) & salt # use the salt to flip specific bits - B = (k<<32) ^ k ^ L ^ ks_odd - - R ^= (SPE0[(B>>58)&0x3f] ^ SPE1[(B>>50)&0x3f] ^ - SPE2[(B>>42)&0x3f] ^ SPE3[(B>>34)&0x3f] ^ - SPE4[(B>>26)&0x3f] ^ SPE5[(B>>18)&0x3f] ^ - SPE6[(B>>10)&0x3f] ^ SPE7[(B>>2)&0x3f]) - - # swap L and R - L, R = R, L - - #--------------------------------------------------------------- - # return final result - #--------------------------------------------------------------- - C = ( - ((L>>3) & 0x0f0f0f0f00000000) - | - ((L<<33) & 0xf0f0f0f000000000) - | - ((R>>35) & 0x000000000f0f0f0f) - | - ((R<<1) & 0x00000000f0f0f0f0) - ) - return _permute(C, CF6464) - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/crypto/digest.py b/libs_crutch/contrib/passlib/crypto/digest.py deleted file mode 100644 index d26f892..0000000 --- a/libs_crutch/contrib/passlib/crypto/digest.py +++ /dev/null @@ -1,891 +0,0 @@ -"""passlib.crypto.digest -- crytographic helpers used by the password hashes in passlib - -.. versionadded:: 1.7 -""" -#============================================================================= -# imports -#============================================================================= -from __future__ import division -# core -import hashlib -import logging; log = logging.getLogger(__name__) -try: - # new in py3.4 - from hashlib import pbkdf2_hmac as _stdlib_pbkdf2_hmac - if _stdlib_pbkdf2_hmac.__module__ == "hashlib": - # builtin pure-python backends are slightly faster than stdlib's pure python fallback, - # so only using stdlib's version if it's backed by openssl's pbkdf2_hmac() - log.debug("ignoring pure-python hashlib.pbkdf2_hmac()") - _stdlib_pbkdf2_hmac = None -except ImportError: - _stdlib_pbkdf2_hmac = None -import re -import os -from struct import Struct -from warnings import warn -# site -try: - # https://pypi.python.org/pypi/fastpbkdf2/ - from fastpbkdf2 import pbkdf2_hmac as _fast_pbkdf2_hmac -except ImportError: - _fast_pbkdf2_hmac = None -# pkg -from passlib import exc -from passlib.utils import join_bytes, to_native_str, join_byte_values, to_bytes, \ - SequenceMixin -from passlib.utils.compat import irange, int_types, unicode_or_bytes_types, PY3 -from passlib.utils.decor import memoized_property -# local -__all__ = [ - # hash utils - "lookup_hash", - "HashInfo", - "norm_hash_name", - - # hmac utils - "compile_hmac", - - # kdfs - "pbkdf1", - "pbkdf2_hmac", -] - -#============================================================================= -# generic constants -#============================================================================= - -#: max 32-bit value -MAX_UINT32 = (1 << 32) - 1 - -#: max 64-bit value -MAX_UINT64 = (1 << 64) - 1 - -#============================================================================= -# hash utils -#============================================================================= - -#: list of known hash names, used by lookup_hash()'s _norm_hash_name() helper -_known_hash_names = [ - # format: (hashlib/ssl name, iana name or standin, other known aliases ...) - - # hashes with official IANA-assigned names - # (as of 2012-03 - http://www.iana.org/assignments/hash-function-text-names) - ("md2", "md2"), - ("md5", "md5"), - ("sha1", "sha-1"), - ("sha224", "sha-224", "sha2-224"), - ("sha256", "sha-256", "sha2-256"), - ("sha384", "sha-384", "sha2-384"), - ("sha512", "sha-512", "sha2-512"), - - # TODO: add sha3 to this table. - - # hashlib/ssl-supported hashes without official IANA names, - # (hopefully-) compatible stand-ins have been chosen. - ("md4", "md4"), - ("sha", "sha-0", "sha0"), - ("ripemd", "ripemd"), - ("ripemd160", "ripemd-160"), -] - -#: cache of hash info instances used by lookup_hash() -_hash_info_cache = {} - -def _get_hash_aliases(name): - """ - internal helper used by :func:`lookup_hash` -- - normalize arbitrary hash name to hashlib format. - if name not recognized, returns dummy record and issues a warning. - - :arg name: - unnormalized name - - :returns: - tuple with 2+ elements: ``(hashlib_name, iana_name|None, ... 0+ aliases)``. - """ - - # normalize input - orig = name - if not isinstance(name, str): - name = to_native_str(name, 'utf-8', 'hash name') - name = re.sub("[_ /]", "-", name.strip().lower()) - if name.startswith("scram-"): # helper for SCRAM protocol (see passlib.handlers.scram) - name = name[6:] - if name.endswith("-plus"): - name = name[:-5] - - # look through standard names and known aliases - def check_table(name): - for row in _known_hash_names: - if name in row: - return row - result = check_table(name) - if result: - return result - - # try to clean name up some more - m = re.match(r"(?i)^(?P[a-z]+)-?(?P\d)?-?(?P\d{3,4})?$", name) - if m: - # roughly follows "SHA2-256" style format, normalize representation, - # and checked table. - iana_name, rev, size = m.group("name", "rev", "size") - if rev: - iana_name += rev - hashlib_name = iana_name - if size: - iana_name += "-" + size - if rev: - hashlib_name += "_" - hashlib_name += size - result = check_table(iana_name) - if result: - return result - - # not found in table, but roughly recognize format. use names we built up as fallback. - log.info("normalizing unrecognized hash name %r => %r / %r", - orig, hashlib_name, iana_name) - - else: - # just can't make sense of it. return something - iana_name = name - hashlib_name = name.replace("-", "_") - log.warning("normalizing unrecognized hash name and format %r => %r / %r", - orig, hashlib_name, iana_name) - - return hashlib_name, iana_name - - -def _get_hash_const(name): - """ - internal helper used by :func:`lookup_hash` -- - lookup hash constructor by name - - :arg name: - name (normalized to hashlib format, e.g. ``"sha256"``) - - :returns: - hash constructor, e.g. ``hashlib.sha256()``; - or None if hash can't be located. - """ - # check hashlib. for an efficient constructor - if not name.startswith("_") and name not in ("new", "algorithms"): - try: - return getattr(hashlib, name) - except AttributeError: - pass - - # check hashlib.new() in case SSL supports the digest - new_ssl_hash = hashlib.new - try: - # new() should throw ValueError if alg is unknown - new_ssl_hash(name, b"") - except ValueError: - pass - else: - # create wrapper function - # XXX: is there a faster way to wrap this? - def const(msg=b""): - return new_ssl_hash(name, msg) - const.__name__ = name - const.__module__ = "hashlib" - const.__doc__ = ("wrapper for hashlib.new(%r),\n" - "generated by passlib.crypto.digest.lookup_hash()") % name - return const - - # use builtin md4 as fallback when not supported by hashlib - if name == "md4": - from passlib.crypto._md4 import md4 - return md4 - - # XXX: any other modules / registries we should check? - # TODO: add pysha3 support. - - return None - -def lookup_hash(digest, return_unknown=False): - """ - Returns a :class:`HashInfo` record containing information about a given hash function. - Can be used to look up a hash constructor by name, normalize hash name representation, etc. - - :arg digest: - This can be any of: - - * A string containing a :mod:`!hashlib` digest name (e.g. ``"sha256"``), - * A string containing an IANA-assigned hash name, - * A digest constructor function (e.g. ``hashlib.sha256``). - - Case is ignored, underscores are converted to hyphens, - and various other cleanups are made. - - :param return_unknown: - By default, this function will throw an :exc:`~passlib.exc.UnknownHashError` if no hash constructor - can be found. However, if this flag is False, it will instead return a dummy record - without a constructor function. This is mainly used by :func:`norm_hash_name`. - - :returns HashInfo: - :class:`HashInfo` instance containing information about specified digest. - - Multiple calls resolving to the same hash should always - return the same :class:`!HashInfo` instance. - """ - # check for cached entry - cache = _hash_info_cache - try: - return cache[digest] - except (KeyError, TypeError): - # NOTE: TypeError is to catch 'TypeError: unhashable type' (e.g. HashInfo) - pass - - # resolve ``digest`` to ``const`` & ``name_record`` - cache_by_name = True - if isinstance(digest, unicode_or_bytes_types): - # normalize name - name_list = _get_hash_aliases(digest) - name = name_list[0] - assert name - - # if name wasn't normalized to hashlib format, - # get info for normalized name and reuse it. - if name != digest: - info = lookup_hash(name, return_unknown=return_unknown) - if info.const is None: - # pass through dummy record - assert return_unknown - return info - cache[digest] = info - return info - - # else look up constructor - const = _get_hash_const(name) - if const is None: - if return_unknown: - # return a dummy record (but don't cache it, so normal lookup still returns error) - return HashInfo(None, name_list) - else: - raise exc.UnknownHashError(name) - - elif isinstance(digest, HashInfo): - # handle border case where HashInfo is passed in. - return digest - - elif callable(digest): - # try to lookup digest based on it's self-reported name - # (which we trust to be the canonical "hashlib" name) - const = digest - name_list = _get_hash_aliases(const().name) - name = name_list[0] - other_const = _get_hash_const(name) - if other_const is None: - # this is probably a third-party digest we don't know about, - # so just pass it on through, and register reverse lookup for it's name. - pass - - elif other_const is const: - # if we got back same constructor, this is just a known stdlib constructor, - # which was passed in before we had cached it by name. proceed normally. - pass - - else: - # if we got back different object, then ``const`` is something else - # (such as a mock object), in which case we want to skip caching it by name, - # as that would conflict with real hash. - cache_by_name = False - - else: - raise exc.ExpectedTypeError(digest, "digest name or constructor", "digest") - - # create new instance - info = HashInfo(const, name_list) - - # populate cache - cache[const] = info - if cache_by_name: - for name in name_list: - if name: # (skips iana name if it's empty) - assert cache.get(name) in [None, info], "%r already in cache" % name - cache[name] = info - return info - -#: UT helper for clearing internal cache -lookup_hash.clear_cache = _hash_info_cache.clear - - -def norm_hash_name(name, format="hashlib"): - """Normalize hash function name (convenience wrapper for :func:`lookup_hash`). - - :arg name: - Original hash function name. - - This name can be a Python :mod:`~hashlib` digest name, - a SCRAM mechanism name, IANA assigned hash name, etc. - Case is ignored, and underscores are converted to hyphens. - - :param format: - Naming convention to normalize to. - Possible values are: - - * ``"hashlib"`` (the default) - normalizes name to be compatible - with Python's :mod:`!hashlib`. - - * ``"iana"`` - normalizes name to IANA-assigned hash function name. - For hashes which IANA hasn't assigned a name for, this issues a warning, - and then uses a heuristic to return a "best guess" name. - - :returns: - Hash name, returned as native :class:`!str`. - """ - info = lookup_hash(name, return_unknown=True) - if not info.const: - warn("norm_hash_name(): unknown hash: %r" % (name,), exc.PasslibRuntimeWarning) - if format == "hashlib": - return info.name - elif format == "iana": - return info.iana_name - else: - raise ValueError("unknown format: %r" % (format,)) - - -class HashInfo(SequenceMixin): - """ - Record containing information about a given hash algorithm, as returned :func:`lookup_hash`. - - This class exposes the following attributes: - - .. autoattribute:: const - .. autoattribute:: digest_size - .. autoattribute:: block_size - .. autoattribute:: name - .. autoattribute:: iana_name - .. autoattribute:: aliases - - This object can also be treated a 3-element sequence - containing ``(const, digest_size, block_size)``. - """ - #========================================================================= - # instance attrs - #========================================================================= - - #: Canonical / hashlib-compatible name (e.g. ``"sha256"``). - name = None - - #: IANA assigned name (e.g. ``"sha-256"``), may be ``None`` if unknown. - iana_name = None - - #: Tuple of other known aliases (may be empty) - aliases = () - - #: Hash constructor function (e.g. :func:`hashlib.sha256`) - const = None - - #: Hash's digest size - digest_size = None - - #: Hash's block size - block_size = None - - def __init__(self, const, names): - """ - initialize new instance. - :arg const: - hash constructor - :arg names: - list of 2+ names. should be list of ``(name, iana_name, ... 0+ aliases)``. - names must be lower-case. only iana name may be None. - """ - self.name = names[0] - self.iana_name = names[1] - self.aliases = names[2:] - - self.const = const - if const is None: - return - - hash = const() - self.digest_size = hash.digest_size - self.block_size = hash.block_size - - # do sanity check on digest size - if len(hash.digest()) != hash.digest_size: - raise RuntimeError("%r constructor failed sanity check" % self.name) - - # do sanity check on name. - if hash.name != self.name: - warn("inconsistent digest name: %r resolved to %r, which reports name as %r" % - (self.name, const, hash.name), exc.PasslibRuntimeWarning) - - #========================================================================= - # methods - #========================================================================= - def __repr__(self): - return " digest output``. - - However, if ``multipart=True``, the returned function has the signature - ``hmac() -> update, finalize``, where ``update(msg)`` may be called multiple times, - and ``finalize() -> digest_output`` may be repeatedly called at any point to - calculate the HMAC digest so far. - - The returned object will also have a ``digest_info`` attribute, containing - a :class:`lookup_hash` instance for the specified digest. - - This function exists, and has the weird signature it does, in order to squeeze as - provide as much efficiency as possible, by omitting much of the setup cost - and features of the stdlib :mod:`hmac` module. - """ - # all the following was adapted from stdlib's hmac module - - # resolve digest (cached) - digest_info = lookup_hash(digest) - const, digest_size, block_size = digest_info - assert block_size >= 16, "block size too small" - - # prepare key - if not isinstance(key, bytes): - key = to_bytes(key, param="key") - klen = len(key) - if klen > block_size: - key = const(key).digest() - klen = digest_size - if klen < block_size: - key += b'\x00' * (block_size - klen) - - # create pre-initialized hash constructors - _inner_copy = const(key.translate(_TRANS_36)).copy - _outer_copy = const(key.translate(_TRANS_5C)).copy - - if multipart: - # create multi-part function - # NOTE: this is slightly slower than the single-shot version, - # and should only be used if needed. - def hmac(): - """generated by compile_hmac(multipart=True)""" - inner = _inner_copy() - def finalize(): - outer = _outer_copy() - outer.update(inner.digest()) - return outer.digest() - return inner.update, finalize - else: - - # single-shot function - def hmac(msg): - """generated by compile_hmac()""" - inner = _inner_copy() - inner.update(msg) - outer = _outer_copy() - outer.update(inner.digest()) - return outer.digest() - - # add info attr - hmac.digest_info = digest_info - return hmac - -#============================================================================= -# pbkdf1 -#============================================================================= -def pbkdf1(digest, secret, salt, rounds, keylen=None): - """pkcs#5 password-based key derivation v1.5 - - :arg digest: - digest name or constructor. - - :arg secret: - secret to use when generating the key. - may be :class:`!bytes` or :class:`unicode` (encoded using UTF-8). - - :arg salt: - salt string to use when generating key. - may be :class:`!bytes` or :class:`unicode` (encoded using UTF-8). - - :param rounds: - number of rounds to use to generate key. - - :arg keylen: - number of bytes to generate (if omitted / ``None``, uses digest's native size) - - :returns: - raw :class:`bytes` of generated key - - .. note:: - - This algorithm has been deprecated, new code should use PBKDF2. - Among other limitations, ``keylen`` cannot be larger - than the digest size of the specified hash. - """ - # resolve digest - const, digest_size, block_size = lookup_hash(digest) - - # validate secret & salt - secret = to_bytes(secret, param="secret") - salt = to_bytes(salt, param="salt") - - # validate rounds - if not isinstance(rounds, int_types): - raise exc.ExpectedTypeError(rounds, "int", "rounds") - if rounds < 1: - raise ValueError("rounds must be at least 1") - - # validate keylen - if keylen is None: - keylen = digest_size - elif not isinstance(keylen, int_types): - raise exc.ExpectedTypeError(keylen, "int or None", "keylen") - elif keylen < 0: - raise ValueError("keylen must be at least 0") - elif keylen > digest_size: - raise ValueError("keylength too large for digest: %r > %r" % - (keylen, digest_size)) - - # main pbkdf1 loop - block = secret + salt - for _ in irange(rounds): - block = const(block).digest() - return block[:keylen] - -#============================================================================= -# pbkdf2 -#============================================================================= - -_pack_uint32 = Struct(">L").pack - -def pbkdf2_hmac(digest, secret, salt, rounds, keylen=None): - """pkcs#5 password-based key derivation v2.0 using HMAC + arbitrary digest. - - :arg digest: - digest name or constructor. - - :arg secret: - passphrase to use to generate key. - may be :class:`!bytes` or :class:`unicode` (encoded using UTF-8). - - :arg salt: - salt string to use when generating key. - may be :class:`!bytes` or :class:`unicode` (encoded using UTF-8). - - :param rounds: - number of rounds to use to generate key. - - :arg keylen: - number of bytes to generate. - if omitted / ``None``, will use digest's native output size. - - :returns: - raw bytes of generated key - - .. versionchanged:: 1.7 - - This function will use the first available of the following backends: - - * `fastpbk2 `_ - * :func:`hashlib.pbkdf2_hmac` (only available in py2 >= 2.7.8, and py3 >= 3.4) - * builtin pure-python backend - - See :data:`passlib.crypto.digest.PBKDF2_BACKENDS` to determine - which backend(s) are in use. - """ - # validate secret & salt - secret = to_bytes(secret, param="secret") - salt = to_bytes(salt, param="salt") - - # resolve digest - digest_info = lookup_hash(digest) - digest_size = digest_info.digest_size - - # validate rounds - if not isinstance(rounds, int_types): - raise exc.ExpectedTypeError(rounds, "int", "rounds") - if rounds < 1: - raise ValueError("rounds must be at least 1") - - # validate keylen - if keylen is None: - keylen = digest_size - elif not isinstance(keylen, int_types): - raise exc.ExpectedTypeError(keylen, "int or None", "keylen") - elif keylen < 1: - # XXX: could allow keylen=0, but want to be compat w/ stdlib - raise ValueError("keylen must be at least 1") - - # find smallest block count s.t. keylen <= block_count * digest_size; - # make sure block count won't overflow (per pbkdf2 spec) - # this corresponds to throwing error if keylen > digest_size * MAX_UINT32 - # NOTE: stdlib will throw error at lower bound (keylen > MAX_SINT32) - # NOTE: have do this before other backends checked, since fastpbkdf2 raises wrong error - # (InvocationError, not OverflowError) - block_count = (keylen + digest_size - 1) // digest_size - if block_count > MAX_UINT32: - raise OverflowError("keylen too long for digest") - - # - # check for various high-speed backends - # - - # ~3x faster than pure-python backend - # NOTE: have to do this after above guards since fastpbkdf2 lacks bounds checks. - if digest_info.supported_by_fastpbkdf2: - return _fast_pbkdf2_hmac(digest_info.name, secret, salt, rounds, keylen) - - # ~1.4x faster than pure-python backend - # NOTE: have to do this after fastpbkdf2 since hashlib-ssl is slower, - # will support larger number of hashes. - if digest_info.supported_by_hashlib_pbkdf2: - return _stdlib_pbkdf2_hmac(digest_info.name, secret, salt, rounds, keylen) - - # - # otherwise use our own implementation - # - - # generated keyed hmac - keyed_hmac = compile_hmac(digest, secret) - - # get helper to calculate pbkdf2 inner loop efficiently - calc_block = _get_pbkdf2_looper(digest_size) - - # assemble & return result - return join_bytes( - calc_block(keyed_hmac, keyed_hmac(salt + _pack_uint32(i)), rounds) - for i in irange(1, block_count + 1) - )[:keylen] - -#------------------------------------------------------------------------------------- -# pick best choice for pure-python helper -# TODO: consider some alternatives, such as C-accelerated xor_bytes helper if available -#------------------------------------------------------------------------------------- -# NOTE: this env var is only present to support the admin/benchmark_pbkdf2 script -_force_backend = os.environ.get("PASSLIB_PBKDF2_BACKEND") or "any" - -if PY3 and _force_backend in ["any", "from-bytes"]: - from functools import partial - - def _get_pbkdf2_looper(digest_size): - return partial(_pbkdf2_looper, digest_size) - - def _pbkdf2_looper(digest_size, keyed_hmac, digest, rounds): - """ - py3-only implementation of pbkdf2 inner loop; - uses 'int.from_bytes' + integer XOR - """ - from_bytes = int.from_bytes - BIG = "big" # endianess doesn't matter, just has to be consistent - accum = from_bytes(digest, BIG) - for _ in irange(rounds - 1): - digest = keyed_hmac(digest) - accum ^= from_bytes(digest, BIG) - return accum.to_bytes(digest_size, BIG) - - _builtin_backend = "from-bytes" - -elif _force_backend in ["any", "unpack", "from-bytes"]: - from struct import Struct - from passlib.utils import sys_bits - - _have_64_bit = (sys_bits >= 64) - - #: cache used by _get_pbkdf2_looper - _looper_cache = {} - - def _get_pbkdf2_looper(digest_size): - """ - We want a helper function which performs equivalent of the following:: - - def helper(keyed_hmac, digest, rounds): - accum = digest - for _ in irange(rounds - 1): - digest = keyed_hmac(digest) - accum ^= digest - return accum - - However, no efficient way to implement "bytes ^ bytes" in python. - Instead, using approach where we dynamically compile a helper function based - on digest size. Instead of a single `accum` var, this helper breaks the digest - into a series of integers. - - It stores these in a series of`accum_` vars, and performs `accum ^= digest` - by unpacking digest and perform xor for each "accum_ ^= digest_". - this keeps everything in locals, avoiding excessive list creation, encoding or decoding, - etc. - - :param digest_size: - digest size to compile for, in bytes. (must be multiple of 4). - - :return: - helper function with call signature outlined above. - """ - # - # cache helpers - # - try: - return _looper_cache[digest_size] - except KeyError: - pass - - # - # figure out most efficient struct format to unpack digest into list of native ints - # - if _have_64_bit and not digest_size & 0x7: - # digest size multiple of 8, on a 64 bit system -- use array of UINT64 - count = (digest_size >> 3) - fmt = "=%dQ" % count - elif not digest_size & 0x3: - if _have_64_bit: - # digest size multiple of 4, on a 64 bit system -- use array of UINT64 + 1 UINT32 - count = (digest_size >> 3) - fmt = "=%dQI" % count - count += 1 - else: - # digest size multiple of 4, on a 32 bit system -- use array of UINT32 - count = (digest_size >> 2) - fmt = "=%dI" % count - else: - # stopping here, cause no known hashes have digest size that isn't multiple of 4 bytes. - # if needed, could go crazy w/ "H" & "B" - raise NotImplementedError("unsupported digest size: %d" % digest_size) - struct = Struct(fmt) - - # - # build helper source - # - tdict = dict( - digest_size=digest_size, - accum_vars=", ".join("acc_%d" % i for i in irange(count)), - digest_vars=", ".join("dig_%d" % i for i in irange(count)), - ) - - # head of function - source = ( - "def helper(keyed_hmac, digest, rounds):\n" - " '''pbkdf2 loop helper for digest_size={digest_size}'''\n" - " unpack_digest = struct.unpack\n" - " {accum_vars} = unpack_digest(digest)\n" - " for _ in irange(1, rounds):\n" - " digest = keyed_hmac(digest)\n" - " {digest_vars} = unpack_digest(digest)\n" - ).format(**tdict) - - # xor digest - for i in irange(count): - source += " acc_%d ^= dig_%d\n" % (i, i) - - # return result - source += " return struct.pack({accum_vars})\n".format(**tdict) - - # - # compile helper - # - code = compile(source, "", "exec") - gdict = dict(irange=irange, struct=struct) - ldict = dict() - eval(code, gdict, ldict) - helper = ldict['helper'] - if __debug__: - helper.__source__ = source - - # - # store in cache - # - _looper_cache[digest_size] = helper - return helper - - _builtin_backend = "unpack" - -else: - assert _force_backend in ["any", "hexlify"] - - # XXX: older & slower approach that used int(hexlify()), - # keeping it around for a little while just for benchmarking. - - from binascii import hexlify as _hexlify - from passlib.utils import int_to_bytes - - def _get_pbkdf2_looper(digest_size): - return _pbkdf2_looper - - def _pbkdf2_looper(keyed_hmac, digest, rounds): - hexlify = _hexlify - accum = int(hexlify(digest), 16) - for _ in irange(rounds - 1): - digest = keyed_hmac(digest) - accum ^= int(hexlify(digest), 16) - return int_to_bytes(accum, len(digest)) - - _builtin_backend = "hexlify" - -# helper for benchmark script -- disable hashlib, fastpbkdf2 support if builtin requested -if _force_backend == _builtin_backend: - _fast_pbkdf2_hmac = _stdlib_pbkdf2_hmac = None - -# expose info about what backends are active -PBKDF2_BACKENDS = [b for b in [ - "fastpbkdf2" if _fast_pbkdf2_hmac else None, - "hashlib-ssl" if _stdlib_pbkdf2_hmac else None, - "builtin-" + _builtin_backend -] if b] - -# *very* rough estimate of relative speed (compared to sha256 using 'unpack' backend on 64bit arch) -if "fastpbkdf2" in PBKDF2_BACKENDS: - PBKDF2_SPEED_FACTOR = 3 -elif "hashlib-ssl" in PBKDF2_BACKENDS: - PBKDF2_SPEED_FACTOR = 1.4 -else: - # remaining backends have *some* difference in performance, but not enough to matter - PBKDF2_SPEED_FACTOR = 1 - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/crypto/scrypt/__init__.py b/libs_crutch/contrib/passlib/crypto/scrypt/__init__.py deleted file mode 100644 index c71873a..0000000 --- a/libs_crutch/contrib/passlib/crypto/scrypt/__init__.py +++ /dev/null @@ -1,281 +0,0 @@ -""" -passlib.utils.scrypt -- scrypt hash frontend and help utilities - -XXX: add this module to public docs? -""" -#========================================================================== -# imports -#========================================================================== -from __future__ import absolute_import -# core -import logging; log = logging.getLogger(__name__) -from warnings import warn -# pkg -from passlib import exc -from passlib.utils import to_bytes -from passlib.utils.compat import PYPY -# local -__all__ =[ - "validate", - "scrypt", -] - -#========================================================================== -# config validation -#========================================================================== - -#: internal global constant for setting stdlib scrypt's maxmem (int bytes). -#: set to -1 to auto-calculate (see _load_stdlib_backend() below) -#: set to 0 for openssl default (32mb according to python docs) -#: TODO: standardize this across backends, and expose support via scrypt hash config; -#: currently not very configurable, and only applies to stdlib backend. -SCRYPT_MAXMEM = -1 - -#: max output length in bytes -MAX_KEYLEN = ((1 << 32) - 1) * 32 - -#: max ``r * p`` limit -MAX_RP = (1 << 30) - 1 - -# TODO: unittests for this function -def validate(n, r, p): - """ - helper which validates a set of scrypt config parameters. - scrypt will take ``O(n * r * p)`` time and ``O(n * r)`` memory. - limitations are that ``n = 2**``, ``n < 2**(16*r)``, ``r * p < 2 ** 30``. - - :param n: scrypt rounds - :param r: scrypt block size - :param p: scrypt parallel factor - """ - if r < 1: - raise ValueError("r must be > 0: r=%r" % r) - - if p < 1: - raise ValueError("p must be > 0: p=%r" % p) - - if r * p > MAX_RP: - # pbkdf2-hmac-sha256 limitation - it will be requested to generate ``p*(2*r)*64`` bytes, - # but pbkdf2 can do max of (2**31-1) blocks, and sha-256 has 32 byte block size... - # so ``(2**31-1)*32 >= p*r*128`` -> ``r*p < 2**30`` - raise ValueError("r * p must be < 2**30: r=%r, p=%r" % (r,p)) - - if n < 2 or n & (n - 1): - raise ValueError("n must be > 1, and a power of 2: n=%r" % n) - - return True - - -UINT32_SIZE = 4 - - -def estimate_maxmem(n, r, p, fudge=1.05): - """ - calculate memory required for parameter combination. - assumes parameters have already been validated. - - .. warning:: - this is derived from OpenSSL's scrypt maxmem formula; - and may not be correct for other implementations - (additional buffers, different parallelism tradeoffs, etc). - """ - # XXX: expand to provide upper bound for diff backends, or max across all of them? - # NOTE: openssl's scrypt() enforces it's maxmem parameter based on calc located at - # , ending in line containing "Blen + Vlen > maxmem" - # using the following formula: - # Blen = p * 128 * r - # Vlen = 32 * r * (N + 2) * sizeof(uint32_t) - # total_bytes = Blen + Vlen - maxmem = r * (128 * p + 32 * (n + 2) * UINT32_SIZE) - # add fudge factor so we don't have off-by-one mismatch w/ openssl - maxmem = int(maxmem * fudge) - return maxmem - - -# TODO: configuration picker (may need psutil for full effect) - -#========================================================================== -# hash frontend -#========================================================================== - -#: backend function used by scrypt(), filled in by _set_backend() -_scrypt = None - -#: name of backend currently in use, exposed for informational purposes. -backend = None - -def scrypt(secret, salt, n, r, p=1, keylen=32): - """run SCrypt key derivation function using specified parameters. - - :arg secret: - passphrase string (unicode is encoded to bytes using utf-8). - - :arg salt: - salt string (unicode is encoded to bytes using utf-8). - - :arg n: - integer 'N' parameter - - :arg r: - integer 'r' parameter - - :arg p: - integer 'p' parameter - - :arg keylen: - number of bytes of key to generate. - defaults to 32 (the internal block size). - - :returns: - a *keylen*-sized bytes instance - - SCrypt imposes a number of constraints on it's input parameters: - - * ``r * p < 2**30`` -- due to a limitation of PBKDF2-HMAC-SHA256. - * ``keylen < (2**32 - 1) * 32`` -- due to a limitation of PBKDF2-HMAC-SHA256. - * ``n`` must a be a power of 2, and > 1 -- internal limitation of scrypt() implementation - - :raises ValueError: if the provided parameters are invalid (see constraints above). - - .. warning:: - - Unless the third-party ``scrypt ``_ package - is installed, passlib will use a builtin pure-python implementation of scrypt, - which is *considerably* slower (and thus requires a much lower / less secure - ``n`` value in order to be usuable). Installing the :mod:`!scrypt` package - is strongly recommended. - """ - validate(n, r, p) - secret = to_bytes(secret, param="secret") - salt = to_bytes(salt, param="salt") - if keylen < 1: - raise ValueError("keylen must be at least 1") - if keylen > MAX_KEYLEN: - raise ValueError("keylen too large, must be <= %d" % MAX_KEYLEN) - return _scrypt(secret, salt, n, r, p, keylen) - - -def _load_builtin_backend(): - """ - Load pure-python scrypt implementation built into passlib. - """ - slowdown = 10 if PYPY else 100 - warn("Using builtin scrypt backend, which is %dx slower than is required " - "for adequate security. Installing scrypt support (via 'pip install scrypt') " - "is strongly recommended" % slowdown, exc.PasslibSecurityWarning) - from ._builtin import ScryptEngine - return ScryptEngine.execute - - -def _load_cffi_backend(): - """ - Try to import the ctypes-based scrypt hash function provided by the - ``scrypt ``_ package. - """ - try: - from scrypt import hash - return hash - except ImportError: - pass - # not available, but check to see if package present but outdated / not installed right - try: - import scrypt - except ImportError as err: - if "scrypt" not in str(err): - # e.g. if cffi isn't set up right - # user should try importing scrypt explicitly to diagnose problem. - warn("'scrypt' package failed to import correctly (possible installation issue?)", - exc.PasslibWarning) - # else: package just isn't installed - else: - warn("'scrypt' package is too old (lacks ``hash()`` method)", exc.PasslibWarning) - return None - - -def _load_stdlib_backend(): - """ - Attempt to load stdlib scrypt() implement and return wrapper. - Returns None if not found. - """ - try: - # new in python 3.6, if compiled with openssl >= 1.1 - from hashlib import scrypt as stdlib_scrypt - except ImportError: - return None - - def stdlib_scrypt_wrapper(secret, salt, n, r, p, keylen): - # work out appropriate "maxmem" parameter - # - # TODO: would like to enforce a single "maxmem" policy across all backends; - # and maybe expose this via scrypt hasher config. - # - # for now, since parameters should all be coming from internally-controlled sources - # (password hashes), using policy of "whatever memory the parameters needs". - # furthermore, since stdlib scrypt is only place that needs this, - # currently calculating exactly what maxmem needs to make things work for stdlib call. - # as hack, this can be overriden via SCRYPT_MAXMEM above, - # would like to formalize all of this. - maxmem = SCRYPT_MAXMEM - if maxmem < 0: - maxmem = estimate_maxmem(n, r, p) - return stdlib_scrypt(password=secret, salt=salt, n=n, r=r, p=p, dklen=keylen, - maxmem=maxmem) - - return stdlib_scrypt_wrapper - - -#: list of potential backends -backend_values = ("stdlib", "scrypt", "builtin") - -#: dict mapping backend name -> loader -_backend_loaders = dict( - stdlib=_load_stdlib_backend, - scrypt=_load_cffi_backend, # XXX: rename backend constant to "cffi"? - builtin=_load_builtin_backend, -) - - -def _set_backend(name, dryrun=False): - """ - set backend for scrypt(). if name not specified, loads first available. - - :raises ~passlib.exc.MissingBackendError: if backend can't be found - - .. note:: mainly intended to be called by unittests, and scrypt hash handler - """ - if name == "any": - return - elif name == "default": - for name in backend_values: - try: - return _set_backend(name, dryrun=dryrun) - except exc.MissingBackendError: - continue - raise exc.MissingBackendError("no scrypt backends available") - else: - loader = _backend_loaders.get(name) - if not loader: - raise ValueError("unknown scrypt backend: %r" % (name,)) - hash = loader() - if not hash: - raise exc.MissingBackendError("scrypt backend %r not available" % name) - if dryrun: - return - global _scrypt, backend - backend = name - _scrypt = hash - -# initialize backend -_set_backend("default") - - -def _has_backend(name): - try: - _set_backend(name, dryrun=True) - return True - except exc.MissingBackendError: - return False - -#========================================================================== -# eof -#========================================================================== diff --git a/libs_crutch/contrib/passlib/crypto/scrypt/_builtin.py b/libs_crutch/contrib/passlib/crypto/scrypt/_builtin.py deleted file mode 100644 index e9bb305..0000000 --- a/libs_crutch/contrib/passlib/crypto/scrypt/_builtin.py +++ /dev/null @@ -1,244 +0,0 @@ -"""passlib.utils.scrypt._builtin -- scrypt() kdf in pure-python""" -#========================================================================== -# imports -#========================================================================== -# core -import operator -import struct -# pkg -from passlib.utils.compat import izip -from passlib.crypto.digest import pbkdf2_hmac -from passlib.crypto.scrypt._salsa import salsa20 -# local -__all__ =[ - "ScryptEngine", -] - -#========================================================================== -# scrypt engine -#========================================================================== -class ScryptEngine(object): - """ - helper class used to run scrypt kdf, see scrypt() for frontend - - .. warning:: - this class does NO validation of the input ranges or types. - - it's not intended to be used directly, - but only as a backend for :func:`passlib.utils.scrypt.scrypt()`. - """ - #================================================================= - # instance attrs - #================================================================= - - # primary scrypt config parameters - n = 0 - r = 0 - p = 0 - - # derived values & objects - smix_bytes = 0 - iv_bytes = 0 - bmix_len = 0 - bmix_half_len = 0 - bmix_struct = None - integerify = None - - #================================================================= - # frontend - #================================================================= - @classmethod - def execute(cls, secret, salt, n, r, p, keylen): - """create engine & run scrypt() hash calculation""" - return cls(n, r, p).run(secret, salt, keylen) - - #================================================================= - # init - #================================================================= - def __init__(self, n, r, p): - # store config - self.n = n - self.r = r - self.p = p - self.smix_bytes = r << 7 # num bytes in smix input - 2*r*16*4 - self.iv_bytes = self.smix_bytes * p - self.bmix_len = bmix_len = r << 5 # length of bmix block list - 32*r integers - self.bmix_half_len = r << 4 - assert struct.calcsize("I") == 4 - self.bmix_struct = struct.Struct("<" + str(bmix_len) + "I") - - # use optimized bmix for certain cases - if r == 1: - self.bmix = self._bmix_1 - - # pick best integerify function - integerify(bmix_block) should - # take last 64 bytes of block and return a little-endian integer. - # since it's immediately converted % n, we only have to extract - # the first 32 bytes if n < 2**32 - which due to the current - # internal representation, is already unpacked as a 32-bit int. - if n <= 0xFFFFffff: - integerify = operator.itemgetter(-16) - else: - assert n <= 0xFFFFffffFFFFffff - ig1 = operator.itemgetter(-16) - ig2 = operator.itemgetter(-17) - def integerify(X): - return ig1(X) | (ig2(X)<<32) - self.integerify = integerify - - #================================================================= - # frontend - #================================================================= - def run(self, secret, salt, keylen): - """ - run scrypt kdf for specified secret, salt, and keylen - - .. note:: - - * time cost is ``O(n * r * p)`` - * mem cost is ``O(n * r)`` - """ - # stretch salt into initial byte array via pbkdf2 - iv_bytes = self.iv_bytes - input = pbkdf2_hmac("sha256", secret, salt, rounds=1, keylen=iv_bytes) - - # split initial byte array into 'p' mflen-sized chunks, - # and run each chunk through smix() to generate output chunk. - smix = self.smix - if self.p == 1: - output = smix(input) - else: - # XXX: *could* use threading here, if really high p values encountered, - # but would tradeoff for more memory usage. - smix_bytes = self.smix_bytes - output = b''.join( - smix(input[offset:offset+smix_bytes]) - for offset in range(0, iv_bytes, smix_bytes) - ) - - # stretch final byte array into output via pbkdf2 - return pbkdf2_hmac("sha256", secret, output, rounds=1, keylen=keylen) - - #================================================================= - # smix() helper - #================================================================= - def smix(self, input): - """run SCrypt smix function on a single input block - - :arg input: - byte string containing input data. - interpreted as 32*r little endian 4 byte integers. - - :returns: - byte string containing output data - derived by mixing input using n & r parameters. - - .. note:: time & mem cost are both ``O(n * r)`` - """ - # gather locals - bmix = self.bmix - bmix_struct = self.bmix_struct - integerify = self.integerify - n = self.n - - # parse input into 32*r integers ('X' in scrypt source) - # mem cost -- O(r) - buffer = list(bmix_struct.unpack(input)) - - # starting with initial buffer contents, derive V s.t. - # V[0]=initial_buffer ... V[i] = bmix(V[i-1], V[i-1]) ... V[n-1] = bmix(V[n-2], V[n-2]) - # final buffer contents should equal bmix(V[n-1], V[n-1]) - # - # time cost -- O(n * r) -- n loops, bmix is O(r) - # mem cost -- O(n * r) -- V is n-element array of r-element tuples - # NOTE: could do time / memory tradeoff to shrink size of V - def vgen(): - i = 0 - while i < n: - last = tuple(buffer) - yield last - bmix(last, buffer) - i += 1 - V = list(vgen()) - - # generate result from X & V. - # - # time cost -- O(n * r) -- loops n times, calls bmix() which has O(r) time cost - # mem cost -- O(1) -- allocates nothing, calls bmix() which has O(1) mem cost - get_v_elem = V.__getitem__ - n_mask = n - 1 - i = 0 - while i < n: - j = integerify(buffer) & n_mask - result = tuple(a ^ b for a, b in izip(buffer, get_v_elem(j))) - bmix(result, buffer) - i += 1 - - # # NOTE: we could easily support arbitrary values of ``n``, not just powers of 2, - # # but very few implementations have that ability, so not enabling it for now... - # if not n_is_log_2: - # while i < n: - # j = integerify(buffer) % n - # tmp = tuple(a^b for a,b in izip(buffer, get_v_elem(j))) - # bmix(tmp,buffer) - # i += 1 - - # repack tmp - return bmix_struct.pack(*buffer) - - #================================================================= - # bmix() helper - #================================================================= - def bmix(self, source, target): - """ - block mixing function used by smix() - uses salsa20/8 core to mix block contents. - - :arg source: - source to read from. - should be list of 32*r 4-byte integers - (2*r salsa20 blocks). - - :arg target: - target to write to. - should be list with same size as source. - the existing value of this buffer is ignored. - - .. warning:: - - this operates *in place* on target, - so source & target should NOT be same list. - - .. note:: - - * time cost is ``O(r)`` -- loops 16*r times, salsa20() has ``O(1)`` cost. - - * memory cost is ``O(1)`` -- salsa20() uses 16 x uint4, - all other operations done in-place. - """ - ## assert source is not target - # Y[-1] = B[2r-1], Y[i] = hash( Y[i-1] xor B[i]) - # B' <-- (Y_0, Y_2 ... Y_{2r-2}, Y_1, Y_3 ... Y_{2r-1}) */ - half = self.bmix_half_len # 16*r out of 32*r - start of Y_1 - tmp = source[-16:] # 'X' in scrypt source - siter = iter(source) - j = 0 - while j < half: - jn = j+16 - target[j:jn] = tmp = salsa20(a ^ b for a, b in izip(tmp, siter)) - target[half+j:half+jn] = tmp = salsa20(a ^ b for a, b in izip(tmp, siter)) - j = jn - - def _bmix_1(self, source, target): - """special bmix() method optimized for ``r=1`` case""" - B = source[16:] - target[:16] = tmp = salsa20(a ^ b for a, b in izip(B, iter(source))) - target[16:] = salsa20(a ^ b for a, b in izip(tmp, B)) - - #================================================================= - # eoc - #================================================================= - -#========================================================================== -# eof -#========================================================================== diff --git a/libs_crutch/contrib/passlib/crypto/scrypt/_gen_files.py b/libs_crutch/contrib/passlib/crypto/scrypt/_gen_files.py deleted file mode 100644 index 55ddfae..0000000 --- a/libs_crutch/contrib/passlib/crypto/scrypt/_gen_files.py +++ /dev/null @@ -1,154 +0,0 @@ -"""passlib.utils.scrypt._gen_files - meta script that generates _salsa.py""" -#========================================================================== -# imports -#========================================================================== -# core -import os -# pkg -# local -#========================================================================== -# constants -#========================================================================== - -_SALSA_OPS = [ - # row = (target idx, source idx 1, source idx 2, rotate) - # interpreted as salsa operation over uint32... - # target = (source1+source2)<> (32 - (b)))) - ##x[ 4] ^= R(x[ 0]+x[12], 7); x[ 8] ^= R(x[ 4]+x[ 0], 9); - ##x[12] ^= R(x[ 8]+x[ 4],13); x[ 0] ^= R(x[12]+x[ 8],18); - ( 4, 0, 12, 7), - ( 8, 4, 0, 9), - ( 12, 8, 4, 13), - ( 0, 12, 8, 18), - - ##x[ 9] ^= R(x[ 5]+x[ 1], 7); x[13] ^= R(x[ 9]+x[ 5], 9); - ##x[ 1] ^= R(x[13]+x[ 9],13); x[ 5] ^= R(x[ 1]+x[13],18); - ( 9, 5, 1, 7), - ( 13, 9, 5, 9), - ( 1, 13, 9, 13), - ( 5, 1, 13, 18), - - ##x[14] ^= R(x[10]+x[ 6], 7); x[ 2] ^= R(x[14]+x[10], 9); - ##x[ 6] ^= R(x[ 2]+x[14],13); x[10] ^= R(x[ 6]+x[ 2],18); - ( 14, 10, 6, 7), - ( 2, 14, 10, 9), - ( 6, 2, 14, 13), - ( 10, 6, 2, 18), - - ##x[ 3] ^= R(x[15]+x[11], 7); x[ 7] ^= R(x[ 3]+x[15], 9); - ##x[11] ^= R(x[ 7]+x[ 3],13); x[15] ^= R(x[11]+x[ 7],18); - ( 3, 15, 11, 7), - ( 7, 3, 15, 9), - ( 11, 7, 3, 13), - ( 15, 11, 7, 18), - - ##/* Operate on rows. */ - ##x[ 1] ^= R(x[ 0]+x[ 3], 7); x[ 2] ^= R(x[ 1]+x[ 0], 9); - ##x[ 3] ^= R(x[ 2]+x[ 1],13); x[ 0] ^= R(x[ 3]+x[ 2],18); - ( 1, 0, 3, 7), - ( 2, 1, 0, 9), - ( 3, 2, 1, 13), - ( 0, 3, 2, 18), - - ##x[ 6] ^= R(x[ 5]+x[ 4], 7); x[ 7] ^= R(x[ 6]+x[ 5], 9); - ##x[ 4] ^= R(x[ 7]+x[ 6],13); x[ 5] ^= R(x[ 4]+x[ 7],18); - ( 6, 5, 4, 7), - ( 7, 6, 5, 9), - ( 4, 7, 6, 13), - ( 5, 4, 7, 18), - - ##x[11] ^= R(x[10]+x[ 9], 7); x[ 8] ^= R(x[11]+x[10], 9); - ##x[ 9] ^= R(x[ 8]+x[11],13); x[10] ^= R(x[ 9]+x[ 8],18); - ( 11, 10, 9, 7), - ( 8, 11, 10, 9), - ( 9, 8, 11, 13), - ( 10, 9, 8, 18), - - ##x[12] ^= R(x[15]+x[14], 7); x[13] ^= R(x[12]+x[15], 9); - ##x[14] ^= R(x[13]+x[12],13); x[15] ^= R(x[14]+x[13],18); - ( 12, 15, 14, 7), - ( 13, 12, 15, 9), - ( 14, 13, 12, 13), - ( 15, 14, 13, 18), -] - -def main(): - target = os.path.join(os.path.dirname(__file__), "_salsa.py") - fh = file(target, "w") - write = fh.write - - VNAMES = ["v%d" % i for i in range(16)] - - PAD = " " * 4 - PAD2 = " " * 8 - PAD3 = " " * 12 - TLIST = ", ".join("b%d" % i for i in range(16)) - VLIST = ", ".join(VNAMES) - kwds = dict( - VLIST=VLIST, - TLIST=TLIST, - ) - - write('''\ -"""passlib.utils.scrypt._salsa - salsa 20/8 core, autogenerated by _gen_salsa.py""" -#================================================================= -# salsa function -#================================================================= - -def salsa20(input): - \"""apply the salsa20/8 core to the provided input - - :args input: input list containing 16 32-bit integers - :returns: result list containing 16 32-bit integers - \""" - - %(TLIST)s = input - %(VLIST)s = \\ - %(TLIST)s - - i = 0 - while i < 4: -''' % kwds) - - for idx, (target, source1, source2, rotate) in enumerate(_SALSA_OPS): - write('''\ - # salsa op %(idx)d: [%(it)d] ^= ([%(is1)d]+[%(is2)d])<<<%(rot1)d - t = (%(src1)s + %(src2)s) & 0xffffffff - %(dst)s ^= ((t & 0x%(rmask)08x) << %(rot1)d) | (t >> %(rot2)d) - -''' % dict( - idx=idx, is1 = source1, is2=source2, it=target, - src1=VNAMES[source1], - src2=VNAMES[source2], - dst=VNAMES[target], - rmask=(1<<(32-rotate))-1, - rot1=rotate, - rot2=32-rotate, - )) - - write('''\ - i += 1 - -''') - - for idx in range(16): - write(PAD + "b%d = (b%d + v%d) & 0xffffffff\n" % (idx,idx,idx)) - - write('''\ - - return %(TLIST)s - -#================================================================= -# eof -#================================================================= -''' % kwds) - -if __name__ == "__main__": - main() - -#========================================================================== -# eof -#========================================================================== diff --git a/libs_crutch/contrib/passlib/crypto/scrypt/_salsa.py b/libs_crutch/contrib/passlib/crypto/scrypt/_salsa.py deleted file mode 100644 index 9112732..0000000 --- a/libs_crutch/contrib/passlib/crypto/scrypt/_salsa.py +++ /dev/null @@ -1,170 +0,0 @@ -"""passlib.utils.scrypt._salsa - salsa 20/8 core, autogenerated by _gen_salsa.py""" -#================================================================= -# salsa function -#================================================================= - -def salsa20(input): - """apply the salsa20/8 core to the provided input - - :args input: input list containing 16 32-bit integers - :returns: result list containing 16 32-bit integers - """ - - b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, b10, b11, b12, b13, b14, b15 = input - v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 = \ - b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, b10, b11, b12, b13, b14, b15 - - i = 0 - while i < 4: - # salsa op 0: [4] ^= ([0]+[12])<<<7 - t = (v0 + v12) & 0xffffffff - v4 ^= ((t & 0x01ffffff) << 7) | (t >> 25) - - # salsa op 1: [8] ^= ([4]+[0])<<<9 - t = (v4 + v0) & 0xffffffff - v8 ^= ((t & 0x007fffff) << 9) | (t >> 23) - - # salsa op 2: [12] ^= ([8]+[4])<<<13 - t = (v8 + v4) & 0xffffffff - v12 ^= ((t & 0x0007ffff) << 13) | (t >> 19) - - # salsa op 3: [0] ^= ([12]+[8])<<<18 - t = (v12 + v8) & 0xffffffff - v0 ^= ((t & 0x00003fff) << 18) | (t >> 14) - - # salsa op 4: [9] ^= ([5]+[1])<<<7 - t = (v5 + v1) & 0xffffffff - v9 ^= ((t & 0x01ffffff) << 7) | (t >> 25) - - # salsa op 5: [13] ^= ([9]+[5])<<<9 - t = (v9 + v5) & 0xffffffff - v13 ^= ((t & 0x007fffff) << 9) | (t >> 23) - - # salsa op 6: [1] ^= ([13]+[9])<<<13 - t = (v13 + v9) & 0xffffffff - v1 ^= ((t & 0x0007ffff) << 13) | (t >> 19) - - # salsa op 7: [5] ^= ([1]+[13])<<<18 - t = (v1 + v13) & 0xffffffff - v5 ^= ((t & 0x00003fff) << 18) | (t >> 14) - - # salsa op 8: [14] ^= ([10]+[6])<<<7 - t = (v10 + v6) & 0xffffffff - v14 ^= ((t & 0x01ffffff) << 7) | (t >> 25) - - # salsa op 9: [2] ^= ([14]+[10])<<<9 - t = (v14 + v10) & 0xffffffff - v2 ^= ((t & 0x007fffff) << 9) | (t >> 23) - - # salsa op 10: [6] ^= ([2]+[14])<<<13 - t = (v2 + v14) & 0xffffffff - v6 ^= ((t & 0x0007ffff) << 13) | (t >> 19) - - # salsa op 11: [10] ^= ([6]+[2])<<<18 - t = (v6 + v2) & 0xffffffff - v10 ^= ((t & 0x00003fff) << 18) | (t >> 14) - - # salsa op 12: [3] ^= ([15]+[11])<<<7 - t = (v15 + v11) & 0xffffffff - v3 ^= ((t & 0x01ffffff) << 7) | (t >> 25) - - # salsa op 13: [7] ^= ([3]+[15])<<<9 - t = (v3 + v15) & 0xffffffff - v7 ^= ((t & 0x007fffff) << 9) | (t >> 23) - - # salsa op 14: [11] ^= ([7]+[3])<<<13 - t = (v7 + v3) & 0xffffffff - v11 ^= ((t & 0x0007ffff) << 13) | (t >> 19) - - # salsa op 15: [15] ^= ([11]+[7])<<<18 - t = (v11 + v7) & 0xffffffff - v15 ^= ((t & 0x00003fff) << 18) | (t >> 14) - - # salsa op 16: [1] ^= ([0]+[3])<<<7 - t = (v0 + v3) & 0xffffffff - v1 ^= ((t & 0x01ffffff) << 7) | (t >> 25) - - # salsa op 17: [2] ^= ([1]+[0])<<<9 - t = (v1 + v0) & 0xffffffff - v2 ^= ((t & 0x007fffff) << 9) | (t >> 23) - - # salsa op 18: [3] ^= ([2]+[1])<<<13 - t = (v2 + v1) & 0xffffffff - v3 ^= ((t & 0x0007ffff) << 13) | (t >> 19) - - # salsa op 19: [0] ^= ([3]+[2])<<<18 - t = (v3 + v2) & 0xffffffff - v0 ^= ((t & 0x00003fff) << 18) | (t >> 14) - - # salsa op 20: [6] ^= ([5]+[4])<<<7 - t = (v5 + v4) & 0xffffffff - v6 ^= ((t & 0x01ffffff) << 7) | (t >> 25) - - # salsa op 21: [7] ^= ([6]+[5])<<<9 - t = (v6 + v5) & 0xffffffff - v7 ^= ((t & 0x007fffff) << 9) | (t >> 23) - - # salsa op 22: [4] ^= ([7]+[6])<<<13 - t = (v7 + v6) & 0xffffffff - v4 ^= ((t & 0x0007ffff) << 13) | (t >> 19) - - # salsa op 23: [5] ^= ([4]+[7])<<<18 - t = (v4 + v7) & 0xffffffff - v5 ^= ((t & 0x00003fff) << 18) | (t >> 14) - - # salsa op 24: [11] ^= ([10]+[9])<<<7 - t = (v10 + v9) & 0xffffffff - v11 ^= ((t & 0x01ffffff) << 7) | (t >> 25) - - # salsa op 25: [8] ^= ([11]+[10])<<<9 - t = (v11 + v10) & 0xffffffff - v8 ^= ((t & 0x007fffff) << 9) | (t >> 23) - - # salsa op 26: [9] ^= ([8]+[11])<<<13 - t = (v8 + v11) & 0xffffffff - v9 ^= ((t & 0x0007ffff) << 13) | (t >> 19) - - # salsa op 27: [10] ^= ([9]+[8])<<<18 - t = (v9 + v8) & 0xffffffff - v10 ^= ((t & 0x00003fff) << 18) | (t >> 14) - - # salsa op 28: [12] ^= ([15]+[14])<<<7 - t = (v15 + v14) & 0xffffffff - v12 ^= ((t & 0x01ffffff) << 7) | (t >> 25) - - # salsa op 29: [13] ^= ([12]+[15])<<<9 - t = (v12 + v15) & 0xffffffff - v13 ^= ((t & 0x007fffff) << 9) | (t >> 23) - - # salsa op 30: [14] ^= ([13]+[12])<<<13 - t = (v13 + v12) & 0xffffffff - v14 ^= ((t & 0x0007ffff) << 13) | (t >> 19) - - # salsa op 31: [15] ^= ([14]+[13])<<<18 - t = (v14 + v13) & 0xffffffff - v15 ^= ((t & 0x00003fff) << 18) | (t >> 14) - - i += 1 - - b0 = (b0 + v0) & 0xffffffff - b1 = (b1 + v1) & 0xffffffff - b2 = (b2 + v2) & 0xffffffff - b3 = (b3 + v3) & 0xffffffff - b4 = (b4 + v4) & 0xffffffff - b5 = (b5 + v5) & 0xffffffff - b6 = (b6 + v6) & 0xffffffff - b7 = (b7 + v7) & 0xffffffff - b8 = (b8 + v8) & 0xffffffff - b9 = (b9 + v9) & 0xffffffff - b10 = (b10 + v10) & 0xffffffff - b11 = (b11 + v11) & 0xffffffff - b12 = (b12 + v12) & 0xffffffff - b13 = (b13 + v13) & 0xffffffff - b14 = (b14 + v14) & 0xffffffff - b15 = (b15 + v15) & 0xffffffff - - return b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, b10, b11, b12, b13, b14, b15 - -#================================================================= -# eof -#================================================================= diff --git a/libs_crutch/contrib/passlib/exc.py b/libs_crutch/contrib/passlib/exc.py deleted file mode 100644 index c4b78b4..0000000 --- a/libs_crutch/contrib/passlib/exc.py +++ /dev/null @@ -1,311 +0,0 @@ -"""passlib.exc -- exceptions & warnings raised by passlib""" -#============================================================================= -# exceptions -#============================================================================= -class UnknownBackendError(ValueError): - """ - Error raised if multi-backend handler doesn't recognize backend name. - Inherits from :exc:`ValueError`. - - .. versionadded:: 1.7 - """ - def __init__(self, hasher, backend): - self.hasher = hasher - self.backend = backend - message = "%s: unknown backend: %r" % (hasher.name, backend) - ValueError.__init__(self, message) - -class MissingBackendError(RuntimeError): - """Error raised if multi-backend handler has no available backends; - or if specifically requested backend is not available. - - :exc:`!MissingBackendError` derives - from :exc:`RuntimeError`, since it usually indicates - lack of an external library or OS feature. - This is primarily raised by handlers which depend on - external libraries (which is currently just - :class:`~passlib.hash.bcrypt`). - """ - -class PasswordSizeError(ValueError): - """ - Error raised if a password exceeds the maximum size allowed - by Passlib (by default, 4096 characters); or if password exceeds - a hash-specific size limitation. - - Many password hash algorithms take proportionately larger amounts of time and/or - memory depending on the size of the password provided. This could present - a potential denial of service (DOS) situation if a maliciously large - password is provided to an application. Because of this, Passlib enforces - a maximum size limit, but one which should be *much* larger - than any legitimate password. :exc:`!PasswordSizeError` derives - from :exc:`!ValueError`. - - .. note:: - Applications wishing to use a different limit should set the - ``PASSLIB_MAX_PASSWORD_SIZE`` environmental variable before - Passlib is loaded. The value can be any large positive integer. - - .. attribute:: max_size - - indicates the maximum allowed size. - - .. versionadded:: 1.6 - """ - - max_size = None - - def __init__(self, max_size, msg=None): - self.max_size = max_size - if msg is None: - msg = "password exceeds maximum allowed size" - ValueError.__init__(self, msg) - - # this also prevents a glibc crypt segfault issue, detailed here ... - # http://www.openwall.com/lists/oss-security/2011/11/15/1 - -class PasswordTruncateError(PasswordSizeError): - """ - Error raised if password would be truncated by hash. - This derives from :exc:`PasswordSizeError` and :exc:`ValueError`. - - Hashers such as :class:`~passlib.hash.bcrypt` can be configured to raises - this error by setting ``truncate_error=True``. - - .. attribute:: max_size - - indicates the maximum allowed size. - - .. versionadded:: 1.7 - """ - - def __init__(self, cls, msg=None): - if msg is None: - msg = ("Password too long (%s truncates to %d characters)" % - (cls.name, cls.truncate_size)) - PasswordSizeError.__init__(self, cls.truncate_size, msg) - -class PasslibSecurityError(RuntimeError): - """ - Error raised if critical security issue is detected - (e.g. an attempt is made to use a vulnerable version of a bcrypt backend). - - .. versionadded:: 1.6.3 - """ - - -class TokenError(ValueError): - """ - Base error raised by v:mod:`passlib.totp` when - a token can't be parsed / isn't valid / etc. - Derives from :exc:`!ValueError`. - - Usually one of the more specific subclasses below will be raised: - - * :class:`MalformedTokenError` -- invalid chars, too few digits - * :class:`InvalidTokenError` -- no match found - * :class:`UsedTokenError` -- match found, but token already used - - .. versionadded:: 1.7 - """ - - #: default message to use if none provided -- subclasses may fill this in - _default_message = 'Token not acceptable' - - def __init__(self, msg=None, *args, **kwds): - if msg is None: - msg = self._default_message - ValueError.__init__(self, msg, *args, **kwds) - - -class MalformedTokenError(TokenError): - """ - Error raised by :mod:`passlib.totp` when a token isn't formatted correctly - (contains invalid characters, wrong number of digits, etc) - """ - _default_message = "Unrecognized token" - - -class InvalidTokenError(TokenError): - """ - Error raised by :mod:`passlib.totp` when a token is formatted correctly, - but doesn't match any tokens within valid range. - """ - _default_message = "Token did not match" - - -class UsedTokenError(TokenError): - """ - Error raised by :mod:`passlib.totp` if a token is reused. - Derives from :exc:`TokenError`. - - .. autoattribute:: expire_time - - .. versionadded:: 1.7 - """ - _default_message = "Token has already been used, please wait for another." - - #: optional value indicating when current counter period will end, - #: and a new token can be generated. - expire_time = None - - def __init__(self, *args, **kwds): - self.expire_time = kwds.pop("expire_time", None) - TokenError.__init__(self, *args, **kwds) - - -class UnknownHashError(ValueError): - """Error raised by :class:`~passlib.crypto.lookup_hash` if hash name is not recognized. - This exception derives from :exc:`!ValueError`. - - .. versionadded:: 1.7 - """ - def __init__(self, name): - self.name = name - ValueError.__init__(self, "unknown hash algorithm: %r" % name) - -#============================================================================= -# warnings -#============================================================================= -class PasslibWarning(UserWarning): - """base class for Passlib's user warnings, - derives from the builtin :exc:`UserWarning`. - - .. versionadded:: 1.6 - """ - -# XXX: there's only one reference to this class, and it will go away in 2.0; -# so can probably remove this along with this / roll this into PasslibHashWarning. -class PasslibConfigWarning(PasslibWarning): - """Warning issued when non-fatal issue is found related to the configuration - of a :class:`~passlib.context.CryptContext` instance. - - This occurs primarily in one of two cases: - - * The CryptContext contains rounds limits which exceed the hard limits - imposed by the underlying algorithm. - * An explicit rounds value was provided which exceeds the limits - imposed by the CryptContext. - - In both of these cases, the code will perform correctly & securely; - but the warning is issued as a sign the configuration may need updating. - - .. versionadded:: 1.6 - """ - -class PasslibHashWarning(PasslibWarning): - """Warning issued when non-fatal issue is found with parameters - or hash string passed to a passlib hash class. - - This occurs primarily in one of two cases: - - * A rounds value or other setting was explicitly provided which - exceeded the handler's limits (and has been clamped - by the :ref:`relaxed` flag). - - * A malformed hash string was encountered which (while parsable) - should be re-encoded. - - .. versionadded:: 1.6 - """ - -class PasslibRuntimeWarning(PasslibWarning): - """Warning issued when something unexpected happens during runtime. - - The fact that it's a warning instead of an error means Passlib - was able to correct for the issue, but that it's anomalous enough - that the developers would love to hear under what conditions it occurred. - - .. versionadded:: 1.6 - """ - -class PasslibSecurityWarning(PasslibWarning): - """Special warning issued when Passlib encounters something - that might affect security. - - .. versionadded:: 1.6 - """ - -#============================================================================= -# error constructors -# -# note: these functions are used by the hashes in Passlib to raise common -# error messages. They are currently just functions which return ValueError, -# rather than subclasses of ValueError, since the specificity isn't needed -# yet; and who wants to import a bunch of error classes when catching -# ValueError will do? -#============================================================================= - -def _get_name(handler): - return handler.name if handler else "" - -#------------------------------------------------------------------------ -# generic helpers -#------------------------------------------------------------------------ -def type_name(value): - """return pretty-printed string containing name of value's type""" - cls = value.__class__ - if cls.__module__ and cls.__module__ not in ["__builtin__", "builtins"]: - return "%s.%s" % (cls.__module__, cls.__name__) - elif value is None: - return 'None' - else: - return cls.__name__ - -def ExpectedTypeError(value, expected, param): - """error message when param was supposed to be one type, but found another""" - # NOTE: value is never displayed, since it may sometimes be a password. - name = type_name(value) - return TypeError("%s must be %s, not %s" % (param, expected, name)) - -def ExpectedStringError(value, param): - """error message when param was supposed to be unicode or bytes""" - return ExpectedTypeError(value, "unicode or bytes", param) - -#------------------------------------------------------------------------ -# hash/verify parameter errors -#------------------------------------------------------------------------ -def MissingDigestError(handler=None): - """raised when verify() method gets passed config string instead of hash""" - name = _get_name(handler) - return ValueError("expected %s hash, got %s config string instead" % - (name, name)) - -def NullPasswordError(handler=None): - """raised by OS crypt() supporting hashes, which forbid NULLs in password""" - name = _get_name(handler) - return ValueError("%s does not allow NULL bytes in password" % name) - -#------------------------------------------------------------------------ -# errors when parsing hashes -#------------------------------------------------------------------------ -def InvalidHashError(handler=None): - """error raised if unrecognized hash provided to handler""" - return ValueError("not a valid %s hash" % _get_name(handler)) - -def MalformedHashError(handler=None, reason=None): - """error raised if recognized-but-malformed hash provided to handler""" - text = "malformed %s hash" % _get_name(handler) - if reason: - text = "%s (%s)" % (text, reason) - return ValueError(text) - -def ZeroPaddedRoundsError(handler=None): - """error raised if hash was recognized but contained zero-padded rounds field""" - return MalformedHashError(handler, "zero-padded rounds") - -#------------------------------------------------------------------------ -# settings / hash component errors -#------------------------------------------------------------------------ -def ChecksumSizeError(handler, raw=False): - """error raised if hash was recognized, but checksum was wrong size""" - # TODO: if handler.use_defaults is set, this came from app-provided value, - # not from parsing a hash string, might want different error msg. - checksum_size = handler.checksum_size - unit = "bytes" if raw else "chars" - reason = "checksum must be exactly %d %s" % (checksum_size, unit) - return MalformedHashError(handler, reason) - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/ext/__init__.py b/libs_crutch/contrib/passlib/ext/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/libs_crutch/contrib/passlib/ext/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/libs_crutch/contrib/passlib/ext/django/__init__.py b/libs_crutch/contrib/passlib/ext/django/__init__.py deleted file mode 100644 index 2dc9b28..0000000 --- a/libs_crutch/contrib/passlib/ext/django/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""passlib.ext.django.models -- monkeypatch django hashing framework - -this plugin monkeypatches django's hashing framework -so that it uses a passlib context object, allowing handling of arbitrary -hashes in Django databases. -""" diff --git a/libs_crutch/contrib/passlib/ext/django/models.py b/libs_crutch/contrib/passlib/ext/django/models.py deleted file mode 100644 index e766c2d..0000000 --- a/libs_crutch/contrib/passlib/ext/django/models.py +++ /dev/null @@ -1,36 +0,0 @@ -"""passlib.ext.django.models -- monkeypatch django hashing framework""" -#============================================================================= -# imports -#============================================================================= -# core -# site -# pkg -from passlib.context import CryptContext -from passlib.ext.django.utils import DjangoContextAdapter -# local -__all__ = ["password_context"] - -#============================================================================= -# global attrs -#============================================================================= - -#: adapter instance used to drive most of this -adapter = DjangoContextAdapter() - -# the context object which this patches contrib.auth to use for password hashing. -# configuration controlled by ``settings.PASSLIB_CONFIG``. -password_context = adapter.context - -#: hook callers should use if context is changed -context_changed = adapter.reset_hashers - -#============================================================================= -# main code -#============================================================================= - -# load config & install monkeypatch -adapter.load_model() - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/ext/django/utils.py b/libs_crutch/contrib/passlib/ext/django/utils.py deleted file mode 100644 index a83cb89..0000000 --- a/libs_crutch/contrib/passlib/ext/django/utils.py +++ /dev/null @@ -1,1233 +0,0 @@ -"""passlib.ext.django.utils - helper functions used by this plugin""" -#============================================================================= -# imports -#============================================================================= -# core -from functools import update_wrapper, wraps -import logging; log = logging.getLogger(__name__) -import sys -import weakref -from warnings import warn -# site -try: - from django import VERSION as DJANGO_VERSION - log.debug("found django %r installation", DJANGO_VERSION) -except ImportError: - log.debug("django installation not found") - DJANGO_VERSION = () -# pkg -from passlib import exc, registry -from passlib.context import CryptContext -from passlib.exc import PasslibRuntimeWarning -from passlib.utils.compat import get_method_function, iteritems, OrderedDict, unicode -from passlib.utils.decor import memoized_property -# local -__all__ = [ - "DJANGO_VERSION", - "MIN_DJANGO_VERSION", - "get_preset_config", - "get_django_hasher", -] - -#: minimum version supported by passlib.ext.django -MIN_DJANGO_VERSION = (1, 8) - -#============================================================================= -# default policies -#============================================================================= - -# map preset names -> passlib.app attrs -_preset_map = { - "django-1.0": "django10_context", - "django-1.4": "django14_context", - "django-1.6": "django16_context", - "django-latest": "django_context", -} - -def get_preset_config(name): - """Returns configuration string for one of the preset strings - supported by the ``PASSLIB_CONFIG`` setting. - Currently supported presets: - - * ``"passlib-default"`` - default config used by this release of passlib. - * ``"django-default"`` - config matching currently installed django version. - * ``"django-latest"`` - config matching newest django version (currently same as ``"django-1.6"``). - * ``"django-1.0"`` - config used by stock Django 1.0 - 1.3 installs - * ``"django-1.4"`` - config used by stock Django 1.4 installs - * ``"django-1.6"`` - config used by stock Django 1.6 installs - """ - # TODO: add preset which includes HASHERS + PREFERRED_HASHERS, - # after having imported any custom hashers. e.g. "django-current" - if name == "django-default": - if not DJANGO_VERSION: - raise ValueError("can't resolve django-default preset, " - "django not installed") - name = "django-1.6" - if name == "passlib-default": - return PASSLIB_DEFAULT - try: - attr = _preset_map[name] - except KeyError: - raise ValueError("unknown preset config name: %r" % name) - import passlib.apps - return getattr(passlib.apps, attr).to_string() - -# default context used by passlib 1.6 -PASSLIB_DEFAULT = """ -[passlib] - -; list of schemes supported by configuration -; currently all django 1.6, 1.4, and 1.0 hashes, -; and three common modular crypt format hashes. -schemes = - django_pbkdf2_sha256, django_pbkdf2_sha1, django_bcrypt, django_bcrypt_sha256, - django_salted_sha1, django_salted_md5, django_des_crypt, hex_md5, - sha512_crypt, bcrypt, phpass - -; default scheme to use for new hashes -default = django_pbkdf2_sha256 - -; hashes using these schemes will automatically be re-hashed -; when the user logs in (currently all django 1.0 hashes) -deprecated = - django_pbkdf2_sha1, django_salted_sha1, django_salted_md5, - django_des_crypt, hex_md5 - -; sets some common options, including minimum rounds for two primary hashes. -; if a hash has less than this number of rounds, it will be re-hashed. -sha512_crypt__min_rounds = 80000 -django_pbkdf2_sha256__min_rounds = 10000 - -; set somewhat stronger iteration counts for ``User.is_staff`` -staff__sha512_crypt__default_rounds = 100000 -staff__django_pbkdf2_sha256__default_rounds = 12500 - -; and even stronger ones for ``User.is_superuser`` -superuser__sha512_crypt__default_rounds = 120000 -superuser__django_pbkdf2_sha256__default_rounds = 15000 -""" - -#============================================================================= -# helpers -#============================================================================= - -#: prefix used to shoehorn passlib's handler names into django hasher namespace -PASSLIB_WRAPPER_PREFIX = "passlib_" - -#: prefix used by all the django-specific hash formats in passlib; -#: all of these hashes should have a ``.django_name`` attribute. -DJANGO_COMPAT_PREFIX = "django_" - -#: set of hashes w/o "django_" prefix, but which also expose ``.django_name``. -_other_django_hashes = set(["hex_md5"]) - -def _wrap_method(method): - """wrap method object in bare function""" - @wraps(method) - def wrapper(*args, **kwds): - return method(*args, **kwds) - return wrapper - -#============================================================================= -# translator -#============================================================================= -class DjangoTranslator(object): - """ - Object which helps translate passlib hasher objects / names - to and from django hasher objects / names. - - These methods are wrapped in a class so that results can be cached, - but with the ability to have independant caches, since django hasher - names may / may not correspond to the same instance (or even class). - """ - #============================================================================= - # instance attrs - #============================================================================= - - #: CryptContext instance - #: (if any -- generally only set by DjangoContextAdapter subclass) - context = None - - #: internal cache of passlib hasher -> django hasher instance. - #: key stores weakref to passlib hasher. - _django_hasher_cache = None - - #: special case -- unsalted_sha1 - _django_unsalted_sha1 = None - - #: internal cache of django name -> passlib hasher - #: value stores weakrefs to passlib hasher. - _passlib_hasher_cache = None - - #============================================================================= - # init - #============================================================================= - - def __init__(self, context=None, **kwds): - super(DjangoTranslator, self).__init__(**kwds) - if context is not None: - self.context = context - - self._django_hasher_cache = weakref.WeakKeyDictionary() - self._passlib_hasher_cache = weakref.WeakValueDictionary() - - def reset_hashers(self): - self._django_hasher_cache.clear() - self._passlib_hasher_cache.clear() - self._django_unsalted_sha1 = None - - def _get_passlib_hasher(self, passlib_name): - """ - resolve passlib hasher by name, using context if available. - """ - context = self.context - if context is None: - return registry.get_crypt_handler(passlib_name) - else: - return context.handler(passlib_name) - - #============================================================================= - # resolve passlib hasher -> django hasher - #============================================================================= - - def passlib_to_django_name(self, passlib_name): - """ - Convert passlib hasher / name to Django hasher name. - """ - return self.passlib_to_django(passlib_name).algorithm - - # XXX: add option (in class, or call signature) to always return a wrapper, - # rather than native builtin -- would let HashersTest check that - # our own wrapper + implementations are matching up with their tests. - def passlib_to_django(self, passlib_hasher, cached=True): - """ - Convert passlib hasher / name to Django hasher. - - :param passlib_hasher: - passlib hasher / name - - :returns: - django hasher instance - """ - # resolve names to hasher - if not hasattr(passlib_hasher, "name"): - passlib_hasher = self._get_passlib_hasher(passlib_hasher) - - # check cache - if cached: - cache = self._django_hasher_cache - try: - return cache[passlib_hasher] - except KeyError: - pass - result = cache[passlib_hasher] = \ - self.passlib_to_django(passlib_hasher, cached=False) - return result - - # find native equivalent, and return wrapper if there isn't one - django_name = getattr(passlib_hasher, "django_name", None) - if django_name: - return self._create_django_hasher(django_name) - else: - return _PasslibHasherWrapper(passlib_hasher) - - _builtin_django_hashers = dict( - md5="MD5PasswordHasher", - ) - - def _create_django_hasher(self, django_name): - """ - helper to create new django hasher by name. - wraps underlying django methods. - """ - # if we haven't patched django, can use it directly - module = sys.modules.get("passlib.ext.django.models") - if module is None or not module.adapter.patched: - from django.contrib.auth.hashers import get_hasher - return get_hasher(django_name) - - # We've patched django's get_hashers(), so calling django's get_hasher() - # or get_hashers_by_algorithm() would only land us back here. - # As non-ideal workaround, have to use original get_hashers(), - get_hashers = module.adapter._manager.getorig("django.contrib.auth.hashers:get_hashers").__wrapped__ - for hasher in get_hashers(): - if hasher.algorithm == django_name: - return hasher - - # hardcode a few for cases where get_hashers() look won't work. - path = self._builtin_django_hashers.get(django_name) - if path: - if "." not in path: - path = "django.contrib.auth.hashers." + path - from django.utils.module_loading import import_string - return import_string(path)() - - raise ValueError("unknown hasher: %r" % django_name) - - #============================================================================= - # reverse django -> passlib - #============================================================================= - - def django_to_passlib_name(self, django_name): - """ - Convert Django hasher / name to Passlib hasher name. - """ - return self.django_to_passlib(django_name).name - - def django_to_passlib(self, django_name, cached=True): - """ - Convert Django hasher / name to Passlib hasher / name. - If present, CryptContext will be checked instead of main registry. - - :param django_name: - Django hasher class or algorithm name. - "default" allowed if context provided. - - :raises ValueError: - if can't resolve hasher. - - :returns: - passlib hasher or name - """ - # check for django hasher - if hasattr(django_name, "algorithm"): - - # check for passlib adapter - if isinstance(django_name, _PasslibHasherWrapper): - return django_name.passlib_handler - - # resolve django hasher -> name - django_name = django_name.algorithm - - # check cache - if cached: - cache = self._passlib_hasher_cache - try: - return cache[django_name] - except KeyError: - pass - result = cache[django_name] = \ - self.django_to_passlib(django_name, cached=False) - return result - - # check if it's an obviously-wrapped name - if django_name.startswith(PASSLIB_WRAPPER_PREFIX): - passlib_name = django_name[len(PASSLIB_WRAPPER_PREFIX):] - return self._get_passlib_hasher(passlib_name) - - # resolve default - if django_name == "default": - context = self.context - if context is None: - raise TypeError("can't determine default scheme w/ context") - return context.handler() - - # special case: Django uses a separate hasher for "sha1$$digest" - # hashes (unsalted_sha1) and "sha1$salt$digest" (sha1); - # but passlib uses "django_salted_sha1" for both of these. - if django_name == "unsalted_sha1": - django_name = "sha1" - - # resolve name - # XXX: bother caching these lists / mapping? - # not needed in long-term due to cache above. - context = self.context - if context is None: - # check registry - # TODO: should make iteration via registry easier - candidates = ( - registry.get_crypt_handler(passlib_name) - for passlib_name in registry.list_crypt_handlers() - if passlib_name.startswith(DJANGO_COMPAT_PREFIX) or - passlib_name in _other_django_hashes - ) - else: - # check context - candidates = context.schemes(resolve=True) - for handler in candidates: - if getattr(handler, "django_name", None) == django_name: - return handler - - # give up - # NOTE: this should only happen for custom django hashers that we don't - # know the equivalents for. _HasherHandler (below) is work in - # progress that would allow us to at least return a wrapper. - raise ValueError("can't translate django name to passlib name: %r" % - (django_name,)) - - #============================================================================= - # django hasher lookup - #============================================================================= - - def resolve_django_hasher(self, django_name, cached=True): - """ - Take in a django algorithm name, return django hasher. - """ - # check for django hasher - if hasattr(django_name, "algorithm"): - return django_name - - # resolve to passlib hasher - passlib_hasher = self.django_to_passlib(django_name, cached=cached) - - # special case: Django uses a separate hasher for "sha1$$digest" - # hashes (unsalted_sha1) and "sha1$salt$digest" (sha1); - # but passlib uses "django_salted_sha1" for both of these. - # XXX: this isn't ideal way to handle this. would like to do something - # like pass "django_variant=django_name" into passlib_to_django(), - # and have it cache separate hasher there. - # but that creates a LOT of complication in it's cache structure, - # for what is just one special case. - if django_name == "unsalted_sha1" and passlib_hasher.name == "django_salted_sha1": - if not cached: - return self._create_django_hasher(django_name) - result = self._django_unsalted_sha1 - if result is None: - result = self._django_unsalted_sha1 = self._create_django_hasher(django_name) - return result - - # lookup corresponding django hasher - return self.passlib_to_django(passlib_hasher, cached=cached) - - #============================================================================= - # eoc - #============================================================================= - -#============================================================================= -# adapter -#============================================================================= -class DjangoContextAdapter(DjangoTranslator): - """ - Object which tries to adapt a Passlib CryptContext object, - using a Django-hasher compatible API. - - When installed in django, :mod:`!passlib.ext.django` will create - an instance of this class, and then monkeypatch the appropriate - methods into :mod:`!django.contrib.auth` and other appropriate places. - """ - #============================================================================= - # instance attrs - #============================================================================= - - #: CryptContext instance we're wrapping - context = None - - #: ref to original make_password(), - #: needed to generate usuable passwords that match django - _orig_make_password = None - - #: ref to django helper of this name -- not monkeypatched - is_password_usable = None - - #: PatchManager instance used to track installation - _manager = None - - #: whether config=disabled flag was set - enabled = True - - #: patch status - patched = False - - #============================================================================= - # init - #============================================================================= - def __init__(self, context=None, get_user_category=None, **kwds): - - # init log - self.log = logging.getLogger(__name__ + ".DjangoContextAdapter") - - # init parent, filling in default context object - if context is None: - context = CryptContext() - super(DjangoContextAdapter, self).__init__(context=context, **kwds) - - # setup user category - if get_user_category: - assert callable(get_user_category) - self.get_user_category = get_user_category - - # install lru cache wrappers - from django.utils.lru_cache import lru_cache - self.get_hashers = lru_cache()(self.get_hashers) - - # get copy of original make_password - from django.contrib.auth.hashers import make_password - if make_password.__module__.startswith("passlib."): - make_password = _PatchManager.peek_unpatched_func(make_password) - self._orig_make_password = make_password - - # get other django helpers - from django.contrib.auth.hashers import is_password_usable - self.is_password_usable = is_password_usable - - # init manager - mlog = logging.getLogger(__name__ + ".DjangoContextAdapter._manager") - self._manager = _PatchManager(log=mlog) - - def reset_hashers(self): - """ - Wrapper to manually reset django's hasher lookup cache - """ - # resets cache for .get_hashers() & .get_hashers_by_algorithm() - from django.contrib.auth.hashers import reset_hashers - reset_hashers(setting="PASSWORD_HASHERS") - - # reset internal caches - super(DjangoContextAdapter, self).reset_hashers() - - #============================================================================= - # django hashers helpers -- hasher lookup - #============================================================================= - - # lru_cache()'ed by init - def get_hashers(self): - """ - Passlib replacement for get_hashers() -- - Return list of available django hasher classes - """ - passlib_to_django = self.passlib_to_django - return [passlib_to_django(hasher) - for hasher in self.context.schemes(resolve=True)] - - def get_hasher(self, algorithm="default"): - """ - Passlib replacement for get_hasher() -- - Return django hasher by name - """ - return self.resolve_django_hasher(algorithm) - - def identify_hasher(self, encoded): - """ - Passlib replacement for identify_hasher() -- - Identify django hasher based on hash. - """ - handler = self.context.identify(encoded, resolve=True, required=True) - if handler.name == "django_salted_sha1" and encoded.startswith("sha1$$"): - # Django uses a separate hasher for "sha1$$digest" hashes, but - # passlib identifies it as belonging to "sha1$salt$digest" handler. - # We want to resolve to correct django hasher. - return self.get_hasher("unsalted_sha1") - return self.passlib_to_django(handler) - - #============================================================================= - # django.contrib.auth.hashers helpers -- password helpers - #============================================================================= - - def make_password(self, password, salt=None, hasher="default"): - """ - Passlib replacement for make_password() - """ - if password is None: - return self._orig_make_password(None) - # NOTE: relying on hasher coming from context, and thus having - # context-specific config baked into it. - passlib_hasher = self.django_to_passlib(hasher) - if "salt" not in passlib_hasher.setting_kwds: - # ignore salt param even if preset - pass - elif hasher.startswith("unsalted_"): - # Django uses a separate 'unsalted_sha1' hasher for "sha1$$digest", - # but passlib just reuses it's "sha1" handler ("sha1$salt$digest"). To make - # this work, have to explicitly tell the sha1 handler to use an empty salt. - passlib_hasher = passlib_hasher.using(salt="") - elif salt: - # Django make_password() autogenerates a salt if salt is bool False (None / ''), - # so we only pass the keyword on if there's actually a fixed salt. - passlib_hasher = passlib_hasher.using(salt=salt) - return passlib_hasher.hash(password) - - def check_password(self, password, encoded, setter=None, preferred="default"): - """ - Passlib replacement for check_password() - """ - # XXX: this currently ignores "preferred" keyword, since its purpose - # was for hash migration, and that's handled by the context. - if password is None or not self.is_password_usable(encoded): - return False - - # verify password - context = self.context - correct = context.verify(password, encoded) - if not (correct and setter): - return correct - - # check if we need to rehash - if preferred == "default": - if not context.needs_update(encoded, secret=password): - return correct - else: - # Django's check_password() won't call setter() on a - # 'preferred' alg, even if it's otherwise deprecated. To try and - # replicate this behavior if preferred is set, we look up the - # passlib hasher, and call it's original needs_update() method. - # TODO: Solve redundancy that verify() call - # above is already identifying hash. - hasher = self.django_to_passlib(preferred) - if (hasher.identify(encoded) and - not hasher.needs_update(encoded, secret=password)): - # alg is 'preferred' and hash itself doesn't need updating, - # so nothing to do. - return correct - # else: either hash isn't preferred, or it needs updating. - - # call setter to rehash - setter(password) - return correct - - #============================================================================= - # django users helpers - #============================================================================= - - def user_check_password(self, user, password): - """ - Passlib replacement for User.check_password() - """ - if password is None: - return False - hash = user.password - if not self.is_password_usable(hash): - return False - cat = self.get_user_category(user) - ok, new_hash = self.context.verify_and_update(password, hash, - category=cat) - if ok and new_hash is not None: - # migrate to new hash if needed. - user.password = new_hash - user.save() - return ok - - def user_set_password(self, user, password): - """ - Passlib replacement for User.set_password() - """ - if password is None: - user.set_unusable_password() - else: - cat = self.get_user_category(user) - user.password = self.context.hash(password, category=cat) - - def get_user_category(self, user): - """ - Helper for hashing passwords per-user -- - figure out the CryptContext category for specified Django user object. - .. note:: - This may be overridden via PASSLIB_GET_CATEGORY django setting - """ - if user.is_superuser: - return "superuser" - elif user.is_staff: - return "staff" - else: - return None - - #============================================================================= - # patch control - #============================================================================= - - HASHERS_PATH = "django.contrib.auth.hashers" - MODELS_PATH = "django.contrib.auth.models" - USER_CLASS_PATH = MODELS_PATH + ":User" - FORMS_PATH = "django.contrib.auth.forms" - - #: list of locations to patch - patch_locations = [ - # - # User object - # NOTE: could leave defaults alone, but want to have user available - # so that we can support get_user_category() - # - (USER_CLASS_PATH + ".check_password", "user_check_password", dict(method=True)), - (USER_CLASS_PATH + ".set_password", "user_set_password", dict(method=True)), - - # - # Hashers module - # - (HASHERS_PATH + ":", "check_password"), - (HASHERS_PATH + ":", "make_password"), - (HASHERS_PATH + ":", "get_hashers"), - (HASHERS_PATH + ":", "get_hasher"), - (HASHERS_PATH + ":", "identify_hasher"), - - # - # Patch known imports from hashers module - # - (MODELS_PATH + ":", "check_password"), - (MODELS_PATH + ":", "make_password"), - (FORMS_PATH + ":", "get_hasher"), - (FORMS_PATH + ":", "identify_hasher"), - - ] - - def install_patch(self): - """ - Install monkeypatch to replace django hasher framework. - """ - # don't reapply - log = self.log - if self.patched: - log.warning("monkeypatching already applied, refusing to reapply") - return False - - # version check - if DJANGO_VERSION < MIN_DJANGO_VERSION: - raise RuntimeError("passlib.ext.django requires django >= %s" % - (MIN_DJANGO_VERSION,)) - - # log start - log.debug("preparing to monkeypatch django ...") - - # run through patch locations - manager = self._manager - for record in self.patch_locations: - if len(record) == 2: - record += ({},) - target, source, opts = record - if target.endswith((":", ",")): - target += source - value = getattr(self, source) - if opts.get("method"): - # have to wrap our method in a function, - # since we're installing it in a class *as* a method - # XXX: make this a flag for .patch()? - value = _wrap_method(value) - manager.patch(target, value) - - # reset django's caches (e.g. get_hash_by_algorithm) - self.reset_hashers() - - # done! - self.patched = True - log.debug("... finished monkeypatching django") - return True - - def remove_patch(self): - """ - Remove monkeypatch from django hasher framework. - As precaution in case there are lingering refs to context, - context object will be wiped. - - .. warning:: - This may cause problems if any other Django modules have imported - their own copies of the patched functions, though the patched - code has been designed to throw an error as soon as possible in - this case. - """ - log = self.log - manager = self._manager - - if self.patched: - log.debug("removing django monkeypatching...") - manager.unpatch_all(unpatch_conflicts=True) - self.context.load({}) - self.patched = False - self.reset_hashers() - log.debug("...finished removing django monkeypatching") - return True - - if manager.isactive(): # pragma: no cover -- sanity check - log.warning("reverting partial monkeypatching of django...") - manager.unpatch_all() - self.context.load({}) - self.reset_hashers() - log.debug("...finished removing django monkeypatching") - return True - - log.debug("django not monkeypatched") - return False - - #============================================================================= - # loading config - #============================================================================= - - def load_model(self): - """ - Load configuration from django, and install patch. - """ - self._load_settings() - if self.enabled: - try: - self.install_patch() - except: - # try to undo what we can - self.remove_patch() - raise - else: - if self.patched: # pragma: no cover -- sanity check - log.error("didn't expect monkeypatching would be applied!") - self.remove_patch() - log.debug("passlib.ext.django loaded") - - def _load_settings(self): - """ - Update settings from django - """ - from django.conf import settings - - # TODO: would like to add support for inheriting config from a preset - # (or from existing hasher state) and letting PASSLIB_CONFIG - # be an update, not a replacement. - - # TODO: wrap and import any custom hashers as passlib handlers, - # so they could be used in the passlib config. - - # load config from settings - _UNSET = object() - config = getattr(settings, "PASSLIB_CONFIG", _UNSET) - if config is _UNSET: - # XXX: should probably deprecate this alias - config = getattr(settings, "PASSLIB_CONTEXT", _UNSET) - if config is _UNSET: - config = "passlib-default" - if config is None: - warn("setting PASSLIB_CONFIG=None is deprecated, " - "and support will be removed in Passlib 1.8, " - "use PASSLIB_CONFIG='disabled' instead.", - DeprecationWarning) - config = "disabled" - elif not isinstance(config, (unicode, bytes, dict)): - raise exc.ExpectedTypeError(config, "str or dict", "PASSLIB_CONFIG") - - # load custom category func (if any) - get_category = getattr(settings, "PASSLIB_GET_CATEGORY", None) - if get_category and not callable(get_category): - raise exc.ExpectedTypeError(get_category, "callable", "PASSLIB_GET_CATEGORY") - - # check if we've been disabled - if config == "disabled": - self.enabled = False - return - else: - self.__dict__.pop("enabled", None) - - # resolve any preset aliases - if isinstance(config, str) and '\n' not in config: - config = get_preset_config(config) - - # setup category func - if get_category: - self.get_user_category = get_category - else: - self.__dict__.pop("get_category", None) - - # setup context - self.context.load(config) - self.reset_hashers() - - #============================================================================= - # eof - #============================================================================= - -#============================================================================= -# wrapping passlib handlers as django hashers -#============================================================================= -_GEN_SALT_SIGNAL = "--!!!generate-new-salt!!!--" - -class ProxyProperty(object): - """helper that proxies another attribute""" - - def __init__(self, attr): - self.attr = attr - - def __get__(self, obj, cls): - if obj is None: - cls = obj - return getattr(obj, self.attr) - - def __set__(self, obj, value): - setattr(obj, self.attr, value) - - def __delete__(self, obj): - delattr(obj, self.attr) - - -class _PasslibHasherWrapper(object): - """ - adapter which which wraps a :cls:`passlib.ifc.PasswordHash` class, - and provides an interface compatible with the Django hasher API. - - :param passlib_handler: - passlib hash handler (e.g. :cls:`passlib.hash.sha256_crypt`. - """ - #===================================================================== - # instance attrs - #===================================================================== - - #: passlib handler that we're adapting. - passlib_handler = None - - # NOTE: 'rounds' attr will store variable rounds, IF handler supports it. - # 'iterations' will act as proxy, for compatibility with django pbkdf2 hashers. - # rounds = None - # iterations = None - - #===================================================================== - # init - #===================================================================== - def __init__(self, passlib_handler): - # init handler - if getattr(passlib_handler, "django_name", None): - raise ValueError("handlers that reflect an official django " - "hasher shouldn't be wrapped: %r" % - (passlib_handler.name,)) - if passlib_handler.is_disabled: - # XXX: could this be implemented? - raise ValueError("can't wrap disabled-hash handlers: %r" % - (passlib_handler.name)) - self.passlib_handler = passlib_handler - - # init rounds support - if self._has_rounds: - self.rounds = passlib_handler.default_rounds - self.iterations = ProxyProperty("rounds") - - #===================================================================== - # internal methods - #===================================================================== - def __repr__(self): - return "" % self.passlib_handler - - #===================================================================== - # internal properties - #===================================================================== - - @memoized_property - def __name__(self): - return "Passlib_%s_PasswordHasher" % self.passlib_handler.name.title() - - @memoized_property - def _has_rounds(self): - return "rounds" in self.passlib_handler.setting_kwds - - @memoized_property - def _translate_kwds(self): - """ - internal helper for safe_summary() -- - used to translate passlib hash options -> django keywords - """ - out = dict(checksum="hash") - if self._has_rounds and "pbkdf2" in self.passlib_handler.name: - out['rounds'] = 'iterations' - return out - - #===================================================================== - # hasher properties - #===================================================================== - - @memoized_property - def algorithm(self): - return PASSLIB_WRAPPER_PREFIX + self.passlib_handler.name - - #===================================================================== - # hasher api - #===================================================================== - def salt(self): - # NOTE: passlib's handler.hash() should generate new salt each time, - # so this just returns a special constant which tells - # encode() (below) not to pass a salt keyword along. - return _GEN_SALT_SIGNAL - - def verify(self, password, encoded): - return self.passlib_handler.verify(password, encoded) - - def encode(self, password, salt=None, rounds=None, iterations=None): - kwds = {} - if salt is not None and salt != _GEN_SALT_SIGNAL: - kwds['salt'] = salt - if self._has_rounds: - if rounds is not None: - kwds['rounds'] = rounds - elif iterations is not None: - kwds['rounds'] = iterations - else: - kwds['rounds'] = self.rounds - elif rounds is not None or iterations is not None: - warn("%s.hash(): 'rounds' and 'iterations' are ignored" % self.__name__) - handler = self.passlib_handler - if kwds: - handler = handler.using(**kwds) - return handler.hash(password) - - def safe_summary(self, encoded): - from django.contrib.auth.hashers import mask_hash - from django.utils.translation import ugettext_noop as _ - handler = self.passlib_handler - items = [ - # since this is user-facing, we're reporting passlib's name, - # without the distracting PASSLIB_HASHER_PREFIX prepended. - (_('algorithm'), handler.name), - ] - if hasattr(handler, "parsehash"): - kwds = handler.parsehash(encoded, sanitize=mask_hash) - for key, value in iteritems(kwds): - key = self._translate_kwds.get(key, key) - items.append((_(key), value)) - return OrderedDict(items) - - def must_update(self, encoded): - # TODO: would like access CryptContext, would need caller to pass it to get_passlib_hasher(). - # for now (as of passlib 1.6.6), replicating django policy that this returns True - # if 'encoded' hash has different rounds value from self.rounds - if self._has_rounds: - # XXX: could cache this subclass somehow (would have to intercept writes to self.rounds) - # TODO: always call subcls/handler.needs_update() in case there's other things to check - subcls = self.passlib_handler.using(min_rounds=self.rounds, max_rounds=self.rounds) - if subcls.needs_update(encoded): - return True - return False - - #===================================================================== - # eoc - #===================================================================== - -#============================================================================= -# adapting django hashers -> passlib handlers -#============================================================================= -# TODO: this code probably halfway works, mainly just needs -# a routine to read HASHERS and PREFERRED_HASHER. - -##from passlib.registry import register_crypt_handler -##from passlib.utils import classproperty, to_native_str, to_unicode -##from passlib.utils.compat import unicode -## -## -##class _HasherHandler(object): -## "helper for wrapping Hasher instances as passlib handlers" -## # FIXME: this generic wrapper doesn't handle custom settings -## # FIXME: genconfig / genhash not supported. -## -## def __init__(self, hasher): -## self.django_hasher = hasher -## if hasattr(hasher, "iterations"): -## # assume encode() accepts an "iterations" parameter. -## # fake min/max rounds -## self.min_rounds = 1 -## self.max_rounds = 0xFFFFffff -## self.default_rounds = self.django_hasher.iterations -## self.setting_kwds += ("rounds",) -## -## # hasher instance - filled in by constructor -## django_hasher = None -## -## setting_kwds = ("salt",) -## context_kwds = () -## -## @property -## def name(self): -## # XXX: need to make sure this wont' collide w/ builtin django hashes. -## # maybe by renaming this to django compatible aliases? -## return DJANGO_PASSLIB_PREFIX + self.django_name -## -## @property -## def django_name(self): -## # expose this so hasher_to_passlib_name() extracts original name -## return self.django_hasher.algorithm -## -## @property -## def ident(self): -## # this should always be correct, as django relies on ident prefix. -## return unicode(self.django_name + "$") -## -## @property -## def identify(self, hash): -## # this should always work, as django relies on ident prefix. -## return to_unicode(hash, "latin-1", "hash").startswith(self.ident) -## -## @property -## def hash(self, secret, salt=None, **kwds): -## # NOTE: from how make_password() is coded, all hashers -## # should have salt param. but only some will have -## # 'iterations' parameter. -## opts = {} -## if 'rounds' in self.setting_kwds and 'rounds' in kwds: -## opts['iterations'] = kwds.pop("rounds") -## if kwds: -## raise TypeError("unexpected keyword arguments: %r" % list(kwds)) -## if isinstance(secret, unicode): -## secret = secret.encode("utf-8") -## if salt is None: -## salt = self.django_hasher.salt() -## return to_native_str(self.django_hasher(secret, salt, **opts)) -## -## @property -## def verify(self, secret, hash): -## hash = to_native_str(hash, "utf-8", "hash") -## if isinstance(secret, unicode): -## secret = secret.encode("utf-8") -## return self.django_hasher.verify(secret, hash) -## -##def register_hasher(hasher): -## handler = _HasherHandler(hasher) -## register_crypt_handler(handler) -## return handler - -#============================================================================= -# monkeypatch helpers -#============================================================================= -# private singleton indicating lack-of-value -_UNSET = object() - -class _PatchManager(object): - """helper to manage monkeypatches and run sanity checks""" - - # NOTE: this could easily use a dict interface, - # but keeping it distinct to make clear that it's not a dict, - # since it has important side-effects. - - #=================================================================== - # init and support - #=================================================================== - def __init__(self, log=None): - # map of key -> (original value, patched value) - # original value may be _UNSET - self.log = log or logging.getLogger(__name__ + "._PatchManager") - self._state = {} - - def isactive(self): - return bool(self._state) - - # bool value tests if any patches are currently applied. - # NOTE: this behavior is deprecated in favor of .isactive - __bool__ = __nonzero__ = isactive - - def _import_path(self, path): - """retrieve obj and final attribute name from resource path""" - name, attr = path.split(":") - obj = __import__(name, fromlist=[attr], level=0) - while '.' in attr: - head, attr = attr.split(".", 1) - obj = getattr(obj, head) - return obj, attr - - @staticmethod - def _is_same_value(left, right): - """check if two values are the same (stripping method wrappers, etc)""" - return get_method_function(left) == get_method_function(right) - - #=================================================================== - # reading - #=================================================================== - def _get_path(self, key, default=_UNSET): - obj, attr = self._import_path(key) - return getattr(obj, attr, default) - - def get(self, path, default=None): - """return current value for path""" - return self._get_path(path, default) - - def getorig(self, path, default=None): - """return original (unpatched) value for path""" - try: - value, _= self._state[path] - except KeyError: - value = self._get_path(path) - return default if value is _UNSET else value - - def check_all(self, strict=False): - """run sanity check on all keys, issue warning if out of sync""" - same = self._is_same_value - for path, (orig, expected) in iteritems(self._state): - if same(self._get_path(path), expected): - continue - msg = "another library has patched resource: %r" % path - if strict: - raise RuntimeError(msg) - else: - warn(msg, PasslibRuntimeWarning) - - #=================================================================== - # patching - #=================================================================== - def _set_path(self, path, value): - obj, attr = self._import_path(path) - if value is _UNSET: - if hasattr(obj, attr): - delattr(obj, attr) - else: - setattr(obj, attr, value) - - def patch(self, path, value, wrap=False): - """monkeypatch object+attr at to have , stores original""" - assert value != _UNSET - current = self._get_path(path) - try: - orig, expected = self._state[path] - except KeyError: - self.log.debug("patching resource: %r", path) - orig = current - else: - self.log.debug("modifying resource: %r", path) - if not self._is_same_value(current, expected): - warn("overridding resource another library has patched: %r" - % path, PasslibRuntimeWarning) - if wrap: - assert callable(value) - wrapped = orig - wrapped_by = value - def wrapper(*args, **kwds): - return wrapped_by(wrapped, *args, **kwds) - update_wrapper(wrapper, value) - value = wrapper - if callable(value): - # needed by DjangoContextAdapter init - get_method_function(value)._patched_original_value = orig - self._set_path(path, value) - self._state[path] = (orig, value) - - @classmethod - def peek_unpatched_func(cls, value): - return value._patched_original_value - - ##def patch_many(self, **kwds): - ## "override specified resources with new values" - ## for path, value in iteritems(kwds): - ## self.patch(path, value) - - def monkeypatch(self, parent, name=None, enable=True, wrap=False): - """function decorator which patches function of same name in """ - def builder(func): - if enable: - sep = "." if ":" in parent else ":" - path = parent + sep + (name or func.__name__) - self.patch(path, func, wrap=wrap) - return func - if callable(name): - # called in non-decorator mode - func = name - name = None - builder(func) - return None - return builder - - #=================================================================== - # unpatching - #=================================================================== - def unpatch(self, path, unpatch_conflicts=True): - try: - orig, expected = self._state[path] - except KeyError: - return - current = self._get_path(path) - self.log.debug("unpatching resource: %r", path) - if not self._is_same_value(current, expected): - if unpatch_conflicts: - warn("reverting resource another library has patched: %r" - % path, PasslibRuntimeWarning) - else: - warn("not reverting resource another library has patched: %r" - % path, PasslibRuntimeWarning) - del self._state[path] - return - self._set_path(path, orig) - del self._state[path] - - def unpatch_all(self, **kwds): - for key in list(self._state): - self.unpatch(key, **kwds) - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/__init__.py b/libs_crutch/contrib/passlib/handlers/__init__.py deleted file mode 100644 index 0a0338c..0000000 --- a/libs_crutch/contrib/passlib/handlers/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""passlib.handlers -- holds implementations of all passlib's builtin hash formats""" diff --git a/libs_crutch/contrib/passlib/handlers/argon2.py b/libs_crutch/contrib/passlib/handlers/argon2.py deleted file mode 100644 index 4a5691b..0000000 --- a/libs_crutch/contrib/passlib/handlers/argon2.py +++ /dev/null @@ -1,1009 +0,0 @@ -"""passlib.handlers.argon2 -- argon2 password hash wrapper - -References -========== -* argon2 - - home: https://github.com/P-H-C/phc-winner-argon2 - - whitepaper: https://github.com/P-H-C/phc-winner-argon2/blob/master/argon2-specs.pdf -* argon2 cffi wrapper - - pypi: https://pypi.python.org/pypi/argon2_cffi - - home: https://github.com/hynek/argon2_cffi -* argon2 pure python - - pypi: https://pypi.python.org/pypi/argon2pure - - home: https://github.com/bwesterb/argon2pure -""" -#============================================================================= -# imports -#============================================================================= -from __future__ import with_statement, absolute_import -# core -import logging -log = logging.getLogger(__name__) -import re -import types -from warnings import warn -# site -_argon2_cffi = None # loaded below -_argon2pure = None # dynamically imported by _load_backend_argon2pure() -# pkg -from passlib import exc -from passlib.crypto.digest import MAX_UINT32 -from passlib.utils import classproperty, to_bytes, render_bytes -from passlib.utils.binary import b64s_encode, b64s_decode -from passlib.utils.compat import u, unicode, bascii_to_str, uascii_to_str, PY2 -import passlib.utils.handlers as uh -# local -__all__ = [ - "argon2", -] - -#============================================================================= -# helpers -#============================================================================= - -# NOTE: when adding a new argon2 hash type, need to do the following: -# * add TYPE_XXX constant, and add to ALL_TYPES -# * make sure "_backend_type_map" constructors handle it correctly for all backends -# * make sure _hash_regex & _ident_regex (below) support type string. -# * add reference vectors for testing. - -#: argon2 type constants -- subclasses handle mapping these to backend-specific type constants. -#: (should be lowercase, to match representation in hash string) -TYPE_I = u("i") -TYPE_D = u("d") -TYPE_ID = u("id") # new 2016-10-29; passlib 1.7.2 requires backends new enough for support - -#: list of all known types; first (supported) type will be used as default. -ALL_TYPES = (TYPE_ID, TYPE_I, TYPE_D) -ALL_TYPES_SET = set(ALL_TYPES) - -#============================================================================= -# import argon2 package (https://pypi.python.org/pypi/argon2_cffi) -#============================================================================= - -# import cffi package -# NOTE: we try to do this even if caller is going to use argon2pure, -# so that we can always use the libargon2 default settings when possible. -_argon2_cffi_error = None -try: - import argon2 as _argon2_cffi -except ImportError: - _argon2_cffi = None -else: - if not hasattr(_argon2_cffi, "Type"): - # they have incompatible "argon2" package installed, instead of "argon2_cffi" package. - _argon2_cffi_error = ( - "'argon2' module points to unsupported 'argon2' pypi package; " - "please install 'argon2-cffi' instead." - ) - _argon2_cffi = None - elif not hasattr(_argon2_cffi, "low_level"): - # they have pre-v16 argon2_cffi package - _argon2_cffi_error = "'argon2-cffi' is too old, please update to argon2_cffi >= 18.2.0" - _argon2_cffi = None - -# init default settings for our hasher class -- -# if we have argon2_cffi >= 16.0, use their default hasher settings, otherwise use static default -if hasattr(_argon2_cffi, "PasswordHasher"): - # use cffi's default settings - _default_settings = _argon2_cffi.PasswordHasher() - _default_version = _argon2_cffi.low_level.ARGON2_VERSION -else: - # use fallback settings (for no backend, or argon2pure) - class _DummyCffiHasher: - """ - dummy object to use as source of defaults when argon2_cffi isn't present. - this tries to mimic the attributes of ``argon2.PasswordHasher()`` which the rest of - this module reads. - - .. note:: values last synced w/ argon2 19.2 as of 2019-11-09 - """ - time_cost = 2 - memory_cost = 512 - parallelism = 2 - salt_len = 16 - hash_len = 16 - # NOTE: "type" attribute added in argon2_cffi v18.2; but currently not reading it - # type = _argon2_cffi.Type.ID - - _default_settings = _DummyCffiHasher() - _default_version = 0x13 # v1.9 - -#============================================================================= -# handler -#============================================================================= -class _Argon2Common(uh.SubclassBackendMixin, uh.ParallelismMixin, - uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, - uh.GenericHandler): - """ - Base class which implements brunt of Argon2 code. - This is then subclassed by the various backends, - to override w/ backend-specific methods. - - When a backend is loaded, the bases of the 'argon2' class proper - are modified to prepend the correct backend-specific subclass. - """ - #=================================================================== - # class attrs - #=================================================================== - - #------------------------ - # PasswordHash - #------------------------ - - name = "argon2" - setting_kwds = ("salt", - "salt_size", - "salt_len", # 'salt_size' alias for compat w/ argon2 package - "rounds", - "time_cost", # 'rounds' alias for compat w/ argon2 package - "memory_cost", - "parallelism", - "digest_size", - "hash_len", # 'digest_size' alias for compat w/ argon2 package - "type", # the type of argon2 hash used - ) - - # TODO: could support the optional 'data' parameter, - # but need to research the uses, what a more descriptive name would be, - # and deal w/ fact that argon2_cffi 16.1 doesn't currently support it. - # (argon2_pure does though) - - #------------------------ - # GenericHandler - #------------------------ - - # NOTE: ident -- all argon2 hashes start with "$argon2$" - # XXX: could programmaticaly generate "ident_values" string from ALL_TYPES above - - checksum_size = _default_settings.hash_len - - #: force parsing these kwds - _always_parse_settings = uh.GenericHandler._always_parse_settings + \ - ("type",) - - #: exclude these kwds from parsehash() result (most are aliases for other keys) - _unparsed_settings = uh.GenericHandler._unparsed_settings + \ - ("salt_len", "time_cost", "hash_len", "digest_size") - - #------------------------ - # HasSalt - #------------------------ - default_salt_size = _default_settings.salt_len - min_salt_size = 8 - max_salt_size = MAX_UINT32 - - #------------------------ - # HasRounds - # TODO: once rounds limit logic is factored out, - # make 'rounds' and 'cost' an alias for 'time_cost' - #------------------------ - default_rounds = _default_settings.time_cost - min_rounds = 1 - max_rounds = MAX_UINT32 - rounds_cost = "linear" - - #------------------------ - # ParalleismMixin - #------------------------ - max_parallelism = (1 << 24) - 1 # from argon2.h / ARGON2_MAX_LANES - - #------------------------ - # custom - #------------------------ - - #: max version support - #: NOTE: this is dependant on the backend, and initialized/modified by set_backend() - max_version = _default_version - - #: minimum version before needs_update() marks the hash; if None, defaults to max_version - min_desired_version = None - - #: minimum valid memory_cost - min_memory_cost = 8 # from argon2.h / ARGON2_MIN_MEMORY - - #: maximum number of threads (-1=unlimited); - #: number of threads used by .hash() will be min(parallelism, max_threads) - max_threads = -1 - - #: global flag signalling argon2pure backend to use threads - #: rather than subprocesses. - pure_use_threads = False - - #: internal helper used to store mapping of TYPE_XXX constants -> backend-specific type constants; - #: this is populated by _load_backend_mixin(); and used to detect which types are supported. - #: XXX: could expose keys as class-level .supported_types property? - _backend_type_map = {} - - @classproperty - def type_values(cls): - """ - return tuple of types supported by this backend - - .. versionadded:: 1.7.2 - """ - cls.get_backend() # make sure backend is loaded - return tuple(cls._backend_type_map) - - #=================================================================== - # instance attrs - #=================================================================== - - #: argon2 hash type, one of ALL_TYPES -- class value controls the default - #: .. versionadded:: 1.7.2 - type = TYPE_ID - - #: parallelism setting -- class value controls the default - parallelism = _default_settings.parallelism - - #: hash version (int) - #: NOTE: this is modified by set_backend() - version = _default_version - - #: memory cost -- class value controls the default - memory_cost = _default_settings.memory_cost - - @property - def type_d(self): - """ - flag indicating a Type D hash - - .. deprecated:: 1.7.2; will be removed in passlib 2.0 - """ - return self.type == TYPE_D - - #: optional secret data - data = None - - #=================================================================== - # variant constructor - #=================================================================== - - @classmethod - def using(cls, type=None, memory_cost=None, salt_len=None, time_cost=None, digest_size=None, - checksum_size=None, hash_len=None, max_threads=None, **kwds): - # support aliases which match argon2 naming convention - if time_cost is not None: - if "rounds" in kwds: - raise TypeError("'time_cost' and 'rounds' are mutually exclusive") - kwds['rounds'] = time_cost - - if salt_len is not None: - if "salt_size" in kwds: - raise TypeError("'salt_len' and 'salt_size' are mutually exclusive") - kwds['salt_size'] = salt_len - - if hash_len is not None: - if digest_size is not None: - raise TypeError("'hash_len' and 'digest_size' are mutually exclusive") - digest_size = hash_len - - if checksum_size is not None: - if digest_size is not None: - raise TypeError("'checksum_size' and 'digest_size' are mutually exclusive") - digest_size = checksum_size - - # create variant - subcls = super(_Argon2Common, cls).using(**kwds) - - # set type - if type is not None: - subcls.type = subcls._norm_type(type) - - # set checksum size - relaxed = kwds.get("relaxed") - if digest_size is not None: - if isinstance(digest_size, uh.native_string_types): - digest_size = int(digest_size) - # NOTE: this isn't *really* digest size minimum, but want to enforce secure minimum. - subcls.checksum_size = uh.norm_integer(subcls, digest_size, min=16, max=MAX_UINT32, - param="digest_size", relaxed=relaxed) - - # set memory cost - if memory_cost is not None: - if isinstance(memory_cost, uh.native_string_types): - memory_cost = int(memory_cost) - subcls.memory_cost = subcls._norm_memory_cost(memory_cost, relaxed=relaxed) - - # validate constraints - subcls._validate_constraints(subcls.memory_cost, subcls.parallelism) - - # set max threads - if max_threads is not None: - if isinstance(max_threads, uh.native_string_types): - max_threads = int(max_threads) - if max_threads < 1 and max_threads != -1: - raise ValueError("max_threads (%d) must be -1 (unlimited), or at least 1." % - (max_threads,)) - subcls.max_threads = max_threads - - return subcls - - @classmethod - def _validate_constraints(cls, memory_cost, parallelism): - # NOTE: this is used by class & instance, hence passing in via arguments. - # could switch and make this a hybrid method. - min_memory_cost = 8 * parallelism - if memory_cost < min_memory_cost: - raise ValueError("%s: memory_cost (%d) is too low, must be at least " - "8 * parallelism (8 * %d = %d)" % - (cls.name, memory_cost, - parallelism, min_memory_cost)) - - #=================================================================== - # public api - #=================================================================== - - #: shorter version of _hash_regex, used to quickly identify hashes - _ident_regex = re.compile(r"^\$argon2[a-z]+\$") - - @classmethod - def identify(cls, hash): - hash = uh.to_unicode_for_identify(hash) - return cls._ident_regex.match(hash) is not None - - # hash(), verify(), genhash() -- implemented by backend subclass - - #=================================================================== - # hash parsing / rendering - #=================================================================== - - # info taken from source of decode_string() function in - # - # - # hash format: - # $argon2[$v=]$m=,t=,p=[,keyid=][,data=][$[$]] - # - # NOTE: as of 2016-6-17, the official source (above) lists the "keyid" param in the comments, - # but the actual source of decode_string & encode_string don't mention it at all. - # we're supporting parsing it, but throw NotImplementedError if encountered. - # - # sample hashes: - # v1.0: '$argon2i$m=512,t=2,p=2$5VtWOO3cGWYQHEMaYGbsfQ$AcmqasQgW/wI6wAHAMk4aQ' - # v1.3: '$argon2i$v=19$m=512,t=2,p=2$5VtWOO3cGWYQHEMaYGbsfQ$AcmqasQgW/wI6wAHAMk4aQ' - - #: regex to parse argon hash - _hash_regex = re.compile(br""" - ^ - \$argon2(?P[a-z]+)\$ - (?: - v=(?P\d+) - \$ - )? - m=(?P\d+) - , - t=(?P\d+) - , - p=(?P\d+) - (?: - ,keyid=(?P[^,$]+) - )? - (?: - ,data=(?P[^,$]+) - )? - (?: - \$ - (?P[^$]+) - (?: - \$ - (?P.+) - )? - )? - $ - """, re.X) - - @classmethod - def from_string(cls, hash): - # NOTE: assuming hash will be unicode, or use ascii-compatible encoding. - # TODO: switch to working w/ str or unicode - if isinstance(hash, unicode): - hash = hash.encode("utf-8") - if not isinstance(hash, bytes): - raise exc.ExpectedStringError(hash, "hash") - m = cls._hash_regex.match(hash) - if not m: - raise exc.MalformedHashError(cls) - type, version, memory_cost, time_cost, parallelism, keyid, data, salt, digest = \ - m.group("type", "version", "memory_cost", "time_cost", "parallelism", - "keyid", "data", "salt", "digest") - if keyid: - raise NotImplementedError("argon2 'keyid' parameter not supported") - return cls( - type=type.decode("ascii"), - version=int(version) if version else 0x10, - memory_cost=int(memory_cost), - rounds=int(time_cost), - parallelism=int(parallelism), - salt=b64s_decode(salt) if salt else None, - data=b64s_decode(data) if data else None, - checksum=b64s_decode(digest) if digest else None, - ) - - def to_string(self): - version = self.version - if version == 0x10: - vstr = "" - else: - vstr = "v=%d$" % version - - data = self.data - if data: - kdstr = ",data=" + bascii_to_str(b64s_encode(self.data)) - else: - kdstr = "" - - # NOTE: 'keyid' param currently not supported - return "$argon2%s$%sm=%d,t=%d,p=%d%s$%s$%s" % ( - uascii_to_str(self.type), - vstr, - self.memory_cost, - self.rounds, - self.parallelism, - kdstr, - bascii_to_str(b64s_encode(self.salt)), - bascii_to_str(b64s_encode(self.checksum)), - ) - - #=================================================================== - # init - #=================================================================== - def __init__(self, type=None, type_d=False, version=None, memory_cost=None, data=None, **kwds): - - # handle deprecated kwds - if type_d: - warn('argon2 `type_d=True` keyword is deprecated, and will be removed in passlib 2.0; ' - 'please use ``type="d"`` instead') - assert type is None - type = TYPE_D - - # TODO: factor out variable checksum size support into a mixin. - # set checksum size to specific value before _norm_checksum() is called - checksum = kwds.get("checksum") - if checksum is not None: - self.checksum_size = len(checksum) - - # call parent - super(_Argon2Common, self).__init__(**kwds) - - # init type - if type is None: - assert uh.validate_default_value(self, self.type, self._norm_type, param="type") - else: - self.type = self._norm_type(type) - - # init version - if version is None: - assert uh.validate_default_value(self, self.version, self._norm_version, - param="version") - else: - self.version = self._norm_version(version) - - # init memory cost - if memory_cost is None: - assert uh.validate_default_value(self, self.memory_cost, self._norm_memory_cost, - param="memory_cost") - else: - self.memory_cost = self._norm_memory_cost(memory_cost) - - # init data - if data is None: - assert self.data is None - else: - if not isinstance(data, bytes): - raise uh.exc.ExpectedTypeError(data, "bytes", "data") - self.data = data - - #------------------------------------------------------------------- - # parameter guards - #------------------------------------------------------------------- - - @classmethod - def _norm_type(cls, value): - # type check - if not isinstance(value, unicode): - if PY2 and isinstance(value, bytes): - value = value.decode('ascii') - else: - raise uh.exc.ExpectedTypeError(value, "str", "type") - - # check if type is valid - if value in ALL_TYPES_SET: - return value - - # translate from uppercase - temp = value.lower() - if temp in ALL_TYPES_SET: - return temp - - # failure! - raise ValueError("unknown argon2 hash type: %r" % (value,)) - - @classmethod - def _norm_version(cls, version): - if not isinstance(version, uh.int_types): - raise uh.exc.ExpectedTypeError(version, "integer", "version") - - # minimum valid version - if version < 0x13 and version != 0x10: - raise ValueError("invalid argon2 hash version: %d" % (version,)) - - # check this isn't past backend's max version - backend = cls.get_backend() - if version > cls.max_version: - raise ValueError("%s: hash version 0x%X not supported by %r backend " - "(max version is 0x%X); try updating or switching backends" % - (cls.name, version, backend, cls.max_version)) - return version - - @classmethod - def _norm_memory_cost(cls, memory_cost, relaxed=False): - return uh.norm_integer(cls, memory_cost, min=cls.min_memory_cost, - param="memory_cost", relaxed=relaxed) - - #=================================================================== - # digest calculation - #=================================================================== - - # NOTE: _calc_checksum implemented by backend subclass - - @classmethod - def _get_backend_type(cls, value): - """ - helper to resolve backend constant from type - """ - try: - return cls._backend_type_map[value] - except KeyError: - pass - # XXX: pick better error class? - msg = "unsupported argon2 hash (type %r not supported by %s backend)" % \ - (value, cls.get_backend()) - raise ValueError(msg) - - #=================================================================== - # hash migration - #=================================================================== - - def _calc_needs_update(self, **kwds): - cls = type(self) - if self.type != cls.type: - return True - minver = cls.min_desired_version - if minver is None or minver > cls.max_version: - minver = cls.max_version - if self.version < minver: - # version is too old. - return True - if self.memory_cost != cls.memory_cost: - return True - if self.checksum_size != cls.checksum_size: - return True - return super(_Argon2Common, self)._calc_needs_update(**kwds) - - #=================================================================== - # backend loading - #=================================================================== - - _no_backend_suggestion = " -- recommend you install one (e.g. 'pip install argon2_cffi')" - - @classmethod - def _finalize_backend_mixin(mixin_cls, name, dryrun): - """ - helper called by from backend mixin classes' _load_backend_mixin() -- - invoked after backend imports have been loaded, and performs - feature detection & testing common to all backends. - """ - # check argon2 version - max_version = mixin_cls.max_version - assert isinstance(max_version, int) and max_version >= 0x10 - if max_version < 0x13: - warn("%r doesn't support argon2 v1.3, and should be upgraded" % name, - uh.exc.PasslibSecurityWarning) - - # prefer best available type - for type in ALL_TYPES: - if type in mixin_cls._backend_type_map: - mixin_cls.type = type - break - else: - warn("%r lacks support for all known hash types" % name, uh.exc.PasslibRuntimeWarning) - # NOTE: class will just throw "unsupported argon2 hash" error if they try to use it... - mixin_cls.type = TYPE_ID - - return True - - @classmethod - def _adapt_backend_error(cls, err, hash=None, self=None): - """ - internal helper invoked when backend has hash/verification error; - used to adapt to passlib message. - """ - backend = cls.get_backend() - - # parse hash to throw error if format was invalid, parameter out of range, etc. - if self is None and hash is not None: - self = cls.from_string(hash) - - # check constraints on parsed object - # XXX: could move this to __init__, but not needed by needs_update calls - if self is not None: - self._validate_constraints(self.memory_cost, self.parallelism) - - # as of cffi 16.1, lacks support in hash_secret(), so genhash() will get here. - # as of cffi 16.2, support removed from verify_secret() as well. - if backend == "argon2_cffi" and self.data is not None: - raise NotImplementedError("argon2_cffi backend doesn't support the 'data' parameter") - - # fallback to reporting a malformed hash - text = str(err) - if text not in [ - "Decoding failed" # argon2_cffi's default message - ]: - reason = "%s reported: %s: hash=%r" % (backend, text, hash) - else: - reason = repr(hash) - raise exc.MalformedHashError(cls, reason=reason) - - #=================================================================== - # eoc - #=================================================================== - -#----------------------------------------------------------------------- -# stub backend -#----------------------------------------------------------------------- -class _NoBackend(_Argon2Common): - """ - mixin used before any backend has been loaded. - contains stubs that force loading of one of the available backends. - """ - #=================================================================== - # primary methods - #=================================================================== - @classmethod - def hash(cls, secret): - cls._stub_requires_backend() - return cls.hash(secret) - - @classmethod - def verify(cls, secret, hash): - cls._stub_requires_backend() - return cls.verify(secret, hash) - - @uh.deprecated_method(deprecated="1.7", removed="2.0") - @classmethod - def genhash(cls, secret, config): - cls._stub_requires_backend() - return cls.genhash(secret, config) - - #=================================================================== - # digest calculation - #=================================================================== - def _calc_checksum(self, secret): - # NOTE: since argon2_cffi takes care of rendering hash, - # _calc_checksum() is only used by the argon2pure backend. - self._stub_requires_backend() - # NOTE: have to use super() here so that we don't recursively - # call subclass's wrapped _calc_checksum - return super(argon2, self)._calc_checksum(secret) - - #=================================================================== - # eoc - #=================================================================== - -#----------------------------------------------------------------------- -# argon2_cffi backend -#----------------------------------------------------------------------- -class _CffiBackend(_Argon2Common): - """ - argon2_cffi backend - """ - #=================================================================== - # backend loading - #=================================================================== - - @classmethod - def _load_backend_mixin(mixin_cls, name, dryrun): - # make sure we write info to base class's __dict__, not that of a subclass - assert mixin_cls is _CffiBackend - - # we automatically import this at top, so just grab info - if _argon2_cffi is None: - if _argon2_cffi_error: - raise exc.PasslibSecurityError(_argon2_cffi_error) - return False - max_version = _argon2_cffi.low_level.ARGON2_VERSION - log.debug("detected 'argon2_cffi' backend, version %r, with support for 0x%x argon2 hashes", - _argon2_cffi.__version__, max_version) - - # build type map - TypeEnum = _argon2_cffi.Type - type_map = {} - for type in ALL_TYPES: - try: - type_map[type] = getattr(TypeEnum, type.upper()) - except AttributeError: - # TYPE_ID support not added until v18.2 - assert type not in (TYPE_I, TYPE_D), "unexpected missing type: %r" % type - mixin_cls._backend_type_map = type_map - - # set version info, and run common setup - mixin_cls.version = mixin_cls.max_version = max_version - return mixin_cls._finalize_backend_mixin(name, dryrun) - - #=================================================================== - # primary methods - #=================================================================== - @classmethod - def hash(cls, secret): - # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. - uh.validate_secret(secret) - secret = to_bytes(secret, "utf-8") - # XXX: doesn't seem to be a way to make this honor max_threads - try: - return bascii_to_str(_argon2_cffi.low_level.hash_secret( - type=cls._get_backend_type(cls.type), - memory_cost=cls.memory_cost, - time_cost=cls.default_rounds, - parallelism=cls.parallelism, - salt=to_bytes(cls._generate_salt()), - hash_len=cls.checksum_size, - secret=secret, - )) - except _argon2_cffi.exceptions.HashingError as err: - raise cls._adapt_backend_error(err) - - #: helper for verify() method below -- maps prefixes to type constants - _byte_ident_map = dict((render_bytes(b"$argon2%s$", type.encode("ascii")), type) - for type in ALL_TYPES) - - @classmethod - def verify(cls, secret, hash): - # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. - uh.validate_secret(secret) - secret = to_bytes(secret, "utf-8") - hash = to_bytes(hash, "ascii") - - # read type from start of hash - # NOTE: don't care about malformed strings, lowlevel will throw error for us - type = cls._byte_ident_map.get(hash[:1+hash.find(b"$", 1)], TYPE_I) - type_code = cls._get_backend_type(type) - - # XXX: doesn't seem to be a way to make this honor max_threads - try: - result = _argon2_cffi.low_level.verify_secret(hash, secret, type_code) - assert result is True - return True - except _argon2_cffi.exceptions.VerifyMismatchError: - return False - except _argon2_cffi.exceptions.VerificationError as err: - raise cls._adapt_backend_error(err, hash=hash) - - # NOTE: deprecated, will be removed in 2.0 - @classmethod - def genhash(cls, secret, config): - # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. - uh.validate_secret(secret) - secret = to_bytes(secret, "utf-8") - self = cls.from_string(config) - # XXX: doesn't seem to be a way to make this honor max_threads - try: - result = bascii_to_str(_argon2_cffi.low_level.hash_secret( - type=cls._get_backend_type(self.type), - memory_cost=self.memory_cost, - time_cost=self.rounds, - parallelism=self.parallelism, - salt=to_bytes(self.salt), - hash_len=self.checksum_size, - secret=secret, - version=self.version, - )) - except _argon2_cffi.exceptions.HashingError as err: - raise cls._adapt_backend_error(err, hash=config) - if self.version == 0x10: - # workaround: argon2 0x13 always returns "v=" segment, even for 0x10 hashes - result = result.replace("$v=16$", "$") - return result - - #=================================================================== - # digest calculation - #=================================================================== - def _calc_checksum(self, secret): - raise AssertionError("shouldn't be called under argon2_cffi backend") - - #=================================================================== - # eoc - #=================================================================== - -#----------------------------------------------------------------------- -# argon2pure backend -#----------------------------------------------------------------------- -class _PureBackend(_Argon2Common): - """ - argon2pure backend - """ - #=================================================================== - # backend loading - #=================================================================== - - @classmethod - def _load_backend_mixin(mixin_cls, name, dryrun): - # make sure we write info to base class's __dict__, not that of a subclass - assert mixin_cls is _PureBackend - - # import argon2pure - global _argon2pure - try: - import argon2pure as _argon2pure - except ImportError: - return False - - # get default / max supported version -- added in v1.2.2 - try: - from argon2pure import ARGON2_DEFAULT_VERSION as max_version - except ImportError: - log.warning("detected 'argon2pure' backend, but package is too old " - "(passlib requires argon2pure >= 1.2.3)") - return False - - log.debug("detected 'argon2pure' backend, with support for 0x%x argon2 hashes", - max_version) - - if not dryrun: - warn("Using argon2pure backend, which is 100x+ slower than is required " - "for adequate security. Installing argon2_cffi (via 'pip install argon2_cffi') " - "is strongly recommended", exc.PasslibSecurityWarning) - - # build type map - type_map = {} - for type in ALL_TYPES: - try: - type_map[type] = getattr(_argon2pure, "ARGON2" + type.upper()) - except AttributeError: - # TYPE_ID support not added until v1.3 - assert type not in (TYPE_I, TYPE_D), "unexpected missing type: %r" % type - mixin_cls._backend_type_map = type_map - - mixin_cls.version = mixin_cls.max_version = max_version - return mixin_cls._finalize_backend_mixin(name, dryrun) - - #=================================================================== - # primary methods - #=================================================================== - - # NOTE: this backend uses default .hash() & .verify() implementations. - - #=================================================================== - # digest calculation - #=================================================================== - def _calc_checksum(self, secret): - # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. - uh.validate_secret(secret) - secret = to_bytes(secret, "utf-8") - kwds = dict( - password=secret, - salt=self.salt, - time_cost=self.rounds, - memory_cost=self.memory_cost, - parallelism=self.parallelism, - tag_length=self.checksum_size, - type_code=self._get_backend_type(self.type), - version=self.version, - ) - if self.max_threads > 0: - kwds['threads'] = self.max_threads - if self.pure_use_threads: - kwds['use_threads'] = True - if self.data: - kwds['associated_data'] = self.data - # NOTE: should return raw bytes - # NOTE: this may raise _argon2pure.Argon2ParameterError, - # but it if does that, there's a bug in our own parameter checking code. - try: - return _argon2pure.argon2(**kwds) - except _argon2pure.Argon2Error as err: - raise self._adapt_backend_error(err, self=self) - - #=================================================================== - # eoc - #=================================================================== - -class argon2(_NoBackend, _Argon2Common): - """ - This class implements the Argon2 password hash [#argon2-home]_, and follows the :ref:`password-hash-api`. - - Argon2 supports a variable-length salt, and variable time & memory cost, - and a number of other configurable parameters. - - The :meth:`~passlib.ifc.PasswordHash.replace` method accepts the following optional keywords: - - :type type: str - :param type: - Specify the type of argon2 hash to generate. - Can be one of "ID", "I", "D". - - This defaults to "ID" if supported by the backend, otherwise "I". - - :type salt: str - :param salt: - Optional salt string. - If specified, the length must be between 0-1024 bytes. - If not specified, one will be auto-generated (this is recommended). - - :type salt_size: int - :param salt_size: - Optional number of bytes to use when autogenerating new salts. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - This corresponds linearly to the amount of time hashing will take. - - :type time_cost: int - :param time_cost: - An alias for **rounds**, for compatibility with underlying argon2 library. - - :param int memory_cost: - Defines the memory usage in kibibytes. - This corresponds linearly to the amount of memory hashing will take. - - :param int parallelism: - Defines the parallelization factor. - *NOTE: this will affect the resulting hash value.* - - :param int digest_size: - Length of the digest in bytes. - - :param int max_threads: - Maximum number of threads that will be used. - -1 means unlimited; otherwise hashing will use ``min(parallelism, max_threads)`` threads. - - .. note:: - - This option is currently only honored by the argon2pure backend. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. versionchanged:: 1.7.2 - - Added the "type" keyword, and support for type "D" and "ID" hashes. - (Prior versions could verify type "D" hashes, but not generate them). - - .. todo:: - - * Support configurable threading limits. - """ - #============================================================================= - # backend - #============================================================================= - - # NOTE: the brunt of the argon2 class is implemented in _Argon2Common. - # there are then subclass for each backend (e.g. _PureBackend), - # these are dynamically prepended to this class's bases - # in order to load the appropriate backend. - - #: list of potential backends - backends = ("argon2_cffi", "argon2pure") - - #: flag that this class's bases should be modified by SubclassBackendMixin - _backend_mixin_target = True - - #: map of backend -> mixin class, used by _get_backend_loader() - _backend_mixin_map = { - None: _NoBackend, - "argon2_cffi": _CffiBackend, - "argon2pure": _PureBackend, - } - - #============================================================================= - # - #============================================================================= - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/bcrypt.py b/libs_crutch/contrib/passlib/handlers/bcrypt.py deleted file mode 100644 index 3c168e9..0000000 --- a/libs_crutch/contrib/passlib/handlers/bcrypt.py +++ /dev/null @@ -1,1034 +0,0 @@ -"""passlib.bcrypt -- implementation of OpenBSD's BCrypt algorithm. - -TODO: - -* support 2x and altered-2a hashes? - http://www.openwall.com/lists/oss-security/2011/06/27/9 - -* deal with lack of PY3-compatibile c-ext implementation -""" -#============================================================================= -# imports -#============================================================================= -from __future__ import with_statement, absolute_import -# core -from base64 import b64encode -from hashlib import sha256 -import os -import re -import logging; log = logging.getLogger(__name__) -from warnings import warn -# site -_bcrypt = None # dynamically imported by _load_backend_bcrypt() -_pybcrypt = None # dynamically imported by _load_backend_pybcrypt() -_bcryptor = None # dynamically imported by _load_backend_bcryptor() -# pkg -_builtin_bcrypt = None # dynamically imported by _load_backend_builtin() -from passlib.exc import PasslibHashWarning, PasslibSecurityWarning, PasslibSecurityError -from passlib.utils import safe_crypt, repeat_string, to_bytes, parse_version, \ - rng, getrandstr, test_crypt, to_unicode -from passlib.utils.binary import bcrypt64 -from passlib.utils.compat import get_unbound_method_function -from passlib.utils.compat import u, uascii_to_str, unicode, str_to_uascii -import passlib.utils.handlers as uh - -# local -__all__ = [ - "bcrypt", -] - -#============================================================================= -# support funcs & constants -#============================================================================= -IDENT_2 = u("$2$") -IDENT_2A = u("$2a$") -IDENT_2X = u("$2x$") -IDENT_2Y = u("$2y$") -IDENT_2B = u("$2b$") -_BNULL = b'\x00' - -# reference hash of "test", used in various self-checks -TEST_HASH_2A = b"$2a$04$5BJqKfqMQvV7nS.yUguNcueVirQqDBGaLXSqj.rs.pZPlNR0UX/HK" - -def _detect_pybcrypt(): - """ - internal helper which tries to distinguish pybcrypt vs bcrypt. - - :returns: - True if cext-based py-bcrypt, - False if ffi-based bcrypt, - None if 'bcrypt' module not found. - - .. versionchanged:: 1.6.3 - - Now assuming bcrypt installed, unless py-bcrypt explicitly detected. - Previous releases assumed py-bcrypt by default. - - Making this change since py-bcrypt is (apparently) unmaintained and static, - whereas bcrypt is being actively maintained, and it's internal structure may shift. - """ - # NOTE: this is also used by the unittests. - - # check for module. - try: - import bcrypt - except ImportError: - # XXX: this is ignoring case where py-bcrypt's "bcrypt._bcrypt" C Ext fails to import; - # would need to inspect actual ImportError message to catch that. - return None - - # py-bcrypt has a "._bcrypt.__version__" attribute (confirmed for v0.1 - 0.4), - # which bcrypt lacks (confirmed for v1.0 - 2.0) - # "._bcrypt" alone isn't sufficient, since bcrypt 2.0 now has that attribute. - try: - from bcrypt._bcrypt import __version__ - except ImportError: - return False - return True - -#============================================================================= -# backend mixins -#============================================================================= -class _BcryptCommon(uh.SubclassBackendMixin, uh.TruncateMixin, uh.HasManyIdents, - uh.HasRounds, uh.HasSalt, uh.GenericHandler): - """ - Base class which implements brunt of BCrypt code. - This is then subclassed by the various backends, - to override w/ backend-specific methods. - - When a backend is loaded, the bases of the 'bcrypt' class proper - are modified to prepend the correct backend-specific subclass. - """ - #=================================================================== - # class attrs - #=================================================================== - - #-------------------- - # PasswordHash - #-------------------- - name = "bcrypt" - setting_kwds = ("salt", "rounds", "ident", "truncate_error") - - #-------------------- - # GenericHandler - #-------------------- - checksum_size = 31 - checksum_chars = bcrypt64.charmap - - #-------------------- - # HasManyIdents - #-------------------- - default_ident = IDENT_2B - ident_values = (IDENT_2, IDENT_2A, IDENT_2X, IDENT_2Y, IDENT_2B) - ident_aliases = {u("2"): IDENT_2, u("2a"): IDENT_2A, u("2y"): IDENT_2Y, - u("2b"): IDENT_2B} - - #-------------------- - # HasSalt - #-------------------- - min_salt_size = max_salt_size = 22 - salt_chars = bcrypt64.charmap - # NOTE: 22nd salt char must be in bcrypt64._padinfo2[1], not full charmap - - #-------------------- - # HasRounds - #-------------------- - default_rounds = 12 # current passlib default - min_rounds = 4 # minimum from bcrypt specification - max_rounds = 31 # 32-bit integer limit (since real_rounds=1< class - - # NOTE: set_backend() will execute the ._load_backend_mixin() - # of the matching mixin class, which will handle backend detection - - # appended to HasManyBackends' "no backends available" error message - _no_backend_suggestion = " -- recommend you install one (e.g. 'pip install bcrypt')" - - @classmethod - def _finalize_backend_mixin(mixin_cls, backend, dryrun): - """ - helper called by from backend mixin classes' _load_backend_mixin() -- - invoked after backend imports have been loaded, and performs - feature detection & testing common to all backends. - """ - #---------------------------------------------------------------- - # setup helpers - #---------------------------------------------------------------- - assert mixin_cls is bcrypt._backend_mixin_map[backend], \ - "_configure_workarounds() invoked from wrong class" - - if mixin_cls._workrounds_initialized: - return True - - verify = mixin_cls.verify - - err_types = (ValueError,) - if _bcryptor: - err_types += (_bcryptor.engine.SaltError,) - - def safe_verify(secret, hash): - """verify() wrapper which traps 'unknown identifier' errors""" - try: - return verify(secret, hash) - except err_types: - # backends without support for given ident will throw various - # errors about unrecognized version: - # pybcrypt, bcrypt -- raises ValueError - # bcryptor -- raises bcryptor.engine.SaltError - return NotImplemented - except AssertionError as err: - # _calc_checksum() code may also throw AssertionError - # if correct hash isn't returned (e.g. 2y hash converted to 2b, - # such as happens with bcrypt 3.0.0) - log.debug("trapped unexpected response from %r backend: verify(%r, %r):", - backend, secret, hash, exc_info=True) - return NotImplemented - - def assert_lacks_8bit_bug(ident): - """ - helper to check for cryptblowfish 8bit bug (fixed in 2y/2b); - even though it's not known to be present in any of passlib's backends. - this is treated as FATAL, because it can easily result in seriously malformed hashes, - and we can't correct for it ourselves. - - test cases from - reference hash is the incorrectly generated $2x$ hash taken from above url - """ - secret = b"\xA3" - bug_hash = ident.encode("ascii") + b"05$/OK.fbVrR/bpIqNJ5ianF.CE5elHaaO4EbggVDjb8P19RukzXSM3e" - if verify(secret, bug_hash): - # NOTE: this only EVER be observed in 2a hashes, - # 2y/2b hashes should have fixed the bug. - # (but we check w/ them anyways). - raise PasslibSecurityError( - "passlib.hash.bcrypt: Your installation of the %r backend is vulnerable to " - "the crypt_blowfish 8-bit bug (CVE-2011-2483), " - "and should be upgraded or replaced with another backend." % backend) - - # if it doesn't have wraparound bug, make sure it *does* handle things - # correctly -- or we're in some weird third case. - correct_hash = ident.encode("ascii") + b"05$/OK.fbVrR/bpIqNJ5ianF.Sa7shbm4.OzKpvFnX1pQLmQW96oUlCq" - if not verify(secret, correct_hash): - raise RuntimeError("%s backend failed to verify %s 8bit hash" % (backend, ident)) - - def detect_wrap_bug(ident): - """ - check for bsd wraparound bug (fixed in 2b) - this is treated as a warning, because it's rare in the field, - and pybcrypt (as of 2015-7-21) is unpatched, but some people may be stuck with it. - - test cases from - - NOTE: reference hash is of password "0"*72 - - NOTE: if in future we need to deliberately create hashes which have this bug, - can use something like 'hashpw(repeat_string(secret[:((1+secret) % 256) or 1]), 72)' - """ - # check if it exhibits wraparound bug - secret = (b"0123456789"*26)[:255] - bug_hash = ident.encode("ascii") + b"04$R1lJ2gkNaoPGdafE.H.16.nVyh2niHsGJhayOHLMiXlI45o8/DU.6" - if verify(secret, bug_hash): - return True - - # if it doesn't have wraparound bug, make sure it *does* handle things - # correctly -- or we're in some weird third case. - correct_hash = ident.encode("ascii") + b"04$R1lJ2gkNaoPGdafE.H.16.1MKHPvmKwryeulRe225LKProWYwt9Oi" - if not verify(secret, correct_hash): - raise RuntimeError("%s backend failed to verify %s wraparound hash" % (backend, ident)) - - return False - - def assert_lacks_wrap_bug(ident): - if not detect_wrap_bug(ident): - return - # should only see in 2a, later idents should NEVER exhibit this bug: - # * 2y implementations should have been free of it - # * 2b was what (supposedly) fixed it - raise RuntimeError("%s backend unexpectedly has wraparound bug for %s" % (backend, ident)) - - #---------------------------------------------------------------- - # check for old 20 support - #---------------------------------------------------------------- - test_hash_20 = b"$2$04$5BJqKfqMQvV7nS.yUguNcuRfMMOXK0xPWavM7pOzjEi5ze5T1k8/S" - result = safe_verify("test", test_hash_20) - if not result: - raise RuntimeError("%s incorrectly rejected $2$ hash" % backend) - elif result is NotImplemented: - mixin_cls._lacks_20_support = True - log.debug("%r backend lacks $2$ support, enabling workaround", backend) - - #---------------------------------------------------------------- - # check for 2a support - #---------------------------------------------------------------- - result = safe_verify("test", TEST_HASH_2A) - if not result: - raise RuntimeError("%s incorrectly rejected $2a$ hash" % backend) - elif result is NotImplemented: - # 2a support is required, and should always be present - raise RuntimeError("%s lacks support for $2a$ hashes" % backend) - else: - assert_lacks_8bit_bug(IDENT_2A) - if detect_wrap_bug(IDENT_2A): - warn("passlib.hash.bcrypt: Your installation of the %r backend is vulnerable to " - "the bsd wraparound bug, " - "and should be upgraded or replaced with another backend " - "(enabling workaround for now)." % backend, - uh.exc.PasslibSecurityWarning) - mixin_cls._has_2a_wraparound_bug = True - - #---------------------------------------------------------------- - # check for 2y support - #---------------------------------------------------------------- - test_hash_2y = TEST_HASH_2A.replace(b"2a", b"2y") - result = safe_verify("test", test_hash_2y) - if not result: - raise RuntimeError("%s incorrectly rejected $2y$ hash" % backend) - elif result is NotImplemented: - mixin_cls._lacks_2y_support = True - log.debug("%r backend lacks $2y$ support, enabling workaround", backend) - else: - # NOTE: Not using this as fallback candidate, - # lacks wide enough support across implementations. - assert_lacks_8bit_bug(IDENT_2Y) - assert_lacks_wrap_bug(IDENT_2Y) - - #---------------------------------------------------------------- - # TODO: check for 2x support - #---------------------------------------------------------------- - - #---------------------------------------------------------------- - # check for 2b support - #---------------------------------------------------------------- - test_hash_2b = TEST_HASH_2A.replace(b"2a", b"2b") - result = safe_verify("test", test_hash_2b) - if not result: - raise RuntimeError("%s incorrectly rejected $2b$ hash" % backend) - elif result is NotImplemented: - mixin_cls._lacks_2b_support = True - log.debug("%r backend lacks $2b$ support, enabling workaround", backend) - else: - mixin_cls._fallback_ident = IDENT_2B - assert_lacks_8bit_bug(IDENT_2B) - assert_lacks_wrap_bug(IDENT_2B) - - # set flag so we don't have to run this again - mixin_cls._workrounds_initialized = True - return True - - #=================================================================== - # digest calculation - #=================================================================== - - # _calc_checksum() defined by backends - - def _prepare_digest_args(self, secret): - """ - common helper for backends to implement _calc_checksum(). - takes in secret, returns (secret, ident) pair, - """ - return self._norm_digest_args(secret, self.ident, new=self.use_defaults) - - @classmethod - def _norm_digest_args(cls, secret, ident, new=False): - # make sure secret is unicode - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - - # check max secret size - uh.validate_secret(secret) - - # check for truncation (during .hash() calls only) - if new: - cls._check_truncate_policy(secret) - - # NOTE: especially important to forbid NULLs for bcrypt, since many - # backends (bcryptor, bcrypt) happily accept them, and then - # silently truncate the password at first NULL they encounter! - if _BNULL in secret: - raise uh.exc.NullPasswordError(cls) - - # TODO: figure out way to skip these tests when not needed... - - # protect from wraparound bug by truncating secret before handing it to the backend. - # bcrypt only uses first 72 bytes anyways. - # NOTE: not needed for 2y/2b, but might use 2a as fallback for them. - if cls._has_2a_wraparound_bug and len(secret) >= 255: - secret = secret[:72] - - # special case handling for variants (ordered most common first) - if ident == IDENT_2A: - # nothing needs to be done. - pass - - elif ident == IDENT_2B: - if cls._lacks_2b_support: - # handle $2b$ hash format even if backend is too old. - # have it generate a 2A/2Y digest, then return it as a 2B hash. - # 2a-only backend could potentially exhibit wraparound bug -- - # but we work around that issue above. - ident = cls._fallback_ident - - elif ident == IDENT_2Y: - if cls._lacks_2y_support: - # handle $2y$ hash format (not supported by BSDs, being phased out on others) - # have it generate a 2A/2B digest, then return it as a 2Y hash. - ident = cls._fallback_ident - - elif ident == IDENT_2: - if cls._lacks_20_support: - # handle legacy $2$ format (not supported by most backends except BSD os_crypt) - # we can fake $2$ behavior using the 2A/2Y/2B algorithm - # by repeating the password until it's at least 72 chars in length. - if secret: - secret = repeat_string(secret, 72) - ident = cls._fallback_ident - - elif ident == IDENT_2X: - - # NOTE: shouldn't get here. - # XXX: could check if backend does actually offer 'support' - raise RuntimeError("$2x$ hashes not currently supported by passlib") - - else: - raise AssertionError("unexpected ident value: %r" % ident) - - return secret, ident - -#----------------------------------------------------------------------- -# stub backend -#----------------------------------------------------------------------- -class _NoBackend(_BcryptCommon): - """ - mixin used before any backend has been loaded. - contains stubs that force loading of one of the available backends. - """ - #=================================================================== - # digest calculation - #=================================================================== - def _calc_checksum(self, secret): - self._stub_requires_backend() - # NOTE: have to use super() here so that we don't recursively - # call subclass's wrapped _calc_checksum, e.g. bcrypt_sha256._calc_checksum - return super(bcrypt, self)._calc_checksum(secret) - - #=================================================================== - # eoc - #=================================================================== - -#----------------------------------------------------------------------- -# bcrypt backend -#----------------------------------------------------------------------- -class _BcryptBackend(_BcryptCommon): - """ - backend which uses 'bcrypt' package - """ - - @classmethod - def _load_backend_mixin(mixin_cls, name, dryrun): - # try to import bcrypt - global _bcrypt - if _detect_pybcrypt(): - # pybcrypt was installed instead - return False - try: - import bcrypt as _bcrypt - except ImportError: # pragma: no cover - return False - try: - version = _bcrypt.__about__.__version__ - except: - log.warning("(trapped) error reading bcrypt version", exc_info=True) - version = '' - - log.debug("detected 'bcrypt' backend, version %r", version) - return mixin_cls._finalize_backend_mixin(name, dryrun) - - # # TODO: would like to implementing verify() directly, - # # to skip need for parsing hash strings. - # # below method has a few edge cases where it chokes though. - # @classmethod - # def verify(cls, secret, hash): - # if isinstance(hash, unicode): - # hash = hash.encode("ascii") - # ident = hash[:hash.index(b"$", 1)+1].decode("ascii") - # if ident not in cls.ident_values: - # raise uh.exc.InvalidHashError(cls) - # secret, eff_ident = cls._norm_digest_args(secret, ident) - # if eff_ident != ident: - # # lacks support for original ident, replace w/ new one. - # hash = eff_ident.encode("ascii") + hash[len(ident):] - # result = _bcrypt.hashpw(secret, hash) - # assert result.startswith(eff_ident) - # return consteq(result, hash) - - def _calc_checksum(self, secret): - # bcrypt behavior: - # secret must be bytes - # config must be ascii bytes - # returns ascii bytes - secret, ident = self._prepare_digest_args(secret) - config = self._get_config(ident) - if isinstance(config, unicode): - config = config.encode("ascii") - hash = _bcrypt.hashpw(secret, config) - assert hash.startswith(config) and len(hash) == len(config)+31, \ - "config mismatch: %r => %r" % (config, hash) - assert isinstance(hash, bytes) - return hash[-31:].decode("ascii") - -#----------------------------------------------------------------------- -# bcryptor backend -#----------------------------------------------------------------------- -class _BcryptorBackend(_BcryptCommon): - """ - backend which uses 'bcryptor' package - """ - - @classmethod - def _load_backend_mixin(mixin_cls, name, dryrun): - # try to import bcryptor - global _bcryptor - try: - import bcryptor as _bcryptor - except ImportError: # pragma: no cover - return False - - # deprecated as of 1.7.2 - if not dryrun: - warn("Support for `bcryptor` is deprecated, and will be removed in Passlib 1.8; " - "Please use `pip install bcrypt` instead", DeprecationWarning) - - return mixin_cls._finalize_backend_mixin(name, dryrun) - - def _calc_checksum(self, secret): - # bcryptor behavior: - # py2: unicode secret/hash encoded as ascii bytes before use, - # bytes taken as-is; returns ascii bytes. - # py3: not supported - secret, ident = self._prepare_digest_args(secret) - config = self._get_config(ident) - hash = _bcryptor.engine.Engine(False).hash_key(secret, config) - assert hash.startswith(config) and len(hash) == len(config)+31 - return str_to_uascii(hash[-31:]) - -#----------------------------------------------------------------------- -# pybcrypt backend -#----------------------------------------------------------------------- -class _PyBcryptBackend(_BcryptCommon): - """ - backend which uses 'pybcrypt' package - """ - - #: classwide thread lock used for pybcrypt < 0.3 - _calc_lock = None - - @classmethod - def _load_backend_mixin(mixin_cls, name, dryrun): - # try to import pybcrypt - global _pybcrypt - if not _detect_pybcrypt(): - # not installed, or bcrypt installed instead - return False - try: - import bcrypt as _pybcrypt - except ImportError: # pragma: no cover - # XXX: should we raise AssertionError here? (if get here, _detect_pybcrypt() is broken) - return False - - # deprecated as of 1.7.2 - if not dryrun: - warn("Support for `py-bcrypt` is deprecated, and will be removed in Passlib 1.8; " - "Please use `pip install bcrypt` instead", DeprecationWarning) - - # determine pybcrypt version - try: - version = _pybcrypt._bcrypt.__version__ - except: - log.warning("(trapped) error reading pybcrypt version", exc_info=True) - version = "" - log.debug("detected 'pybcrypt' backend, version %r", version) - - # return calc function based on version - vinfo = parse_version(version) or (0, 0) - if vinfo < (0, 3): - warn("py-bcrypt %s has a major security vulnerability, " - "you should upgrade to py-bcrypt 0.3 immediately." - % version, uh.exc.PasslibSecurityWarning) - if mixin_cls._calc_lock is None: - import threading - mixin_cls._calc_lock = threading.Lock() - mixin_cls._calc_checksum = get_unbound_method_function(mixin_cls._calc_checksum_threadsafe) - - return mixin_cls._finalize_backend_mixin(name, dryrun) - - def _calc_checksum_threadsafe(self, secret): - # as workaround for pybcrypt < 0.3's concurrency issue, - # we wrap everything in a thread lock. as long as bcrypt is only - # used through passlib, this should be safe. - with self._calc_lock: - return self._calc_checksum_raw(secret) - - def _calc_checksum_raw(self, secret): - # py-bcrypt behavior: - # py2: unicode secret/hash encoded as ascii bytes before use, - # bytes taken as-is; returns ascii bytes. - # py3: unicode secret encoded as utf-8 bytes, - # hash encoded as ascii bytes, returns ascii unicode. - secret, ident = self._prepare_digest_args(secret) - config = self._get_config(ident) - hash = _pybcrypt.hashpw(secret, config) - assert hash.startswith(config) and len(hash) == len(config)+31 - return str_to_uascii(hash[-31:]) - - _calc_checksum = _calc_checksum_raw - -#----------------------------------------------------------------------- -# os crypt backend -#----------------------------------------------------------------------- -class _OsCryptBackend(_BcryptCommon): - """ - backend which uses :func:`crypt.crypt` - """ - - @classmethod - def _load_backend_mixin(mixin_cls, name, dryrun): - if not test_crypt("test", TEST_HASH_2A): - return False - return mixin_cls._finalize_backend_mixin(name, dryrun) - - def _calc_checksum(self, secret): - secret, ident = self._prepare_digest_args(secret) - config = self._get_config(ident) - hash = safe_crypt(secret, config) - if hash: - assert hash.startswith(config) and len(hash) == len(config)+31 - return hash[-31:] - else: - # NOTE: Have to raise this error because python3's crypt.crypt() only accepts unicode. - # This means it can't handle any passwords that aren't either unicode - # or utf-8 encoded bytes. However, hashing a password with an alternate - # encoding should be a pretty rare edge case; if user needs it, they can just - # install bcrypt backend. - # XXX: is this the right error type to raise? - # maybe have safe_crypt() not swallow UnicodeDecodeError, and have handlers - # like sha256_crypt trap it if they have alternate method of handling them? - raise uh.exc.MissingBackendError( - "non-utf8 encoded passwords can't be handled by crypt.crypt() under python3, " - "recommend running `pip install bcrypt`.", - ) - -#----------------------------------------------------------------------- -# builtin backend -#----------------------------------------------------------------------- -class _BuiltinBackend(_BcryptCommon): - """ - backend which uses passlib's pure-python implementation - """ - @classmethod - def _load_backend_mixin(mixin_cls, name, dryrun): - from passlib.utils import as_bool - if not as_bool(os.environ.get("PASSLIB_BUILTIN_BCRYPT")): - log.debug("bcrypt 'builtin' backend not enabled via $PASSLIB_BUILTIN_BCRYPT") - return False - global _builtin_bcrypt - from passlib.crypto._blowfish import raw_bcrypt as _builtin_bcrypt - return mixin_cls._finalize_backend_mixin(name, dryrun) - - def _calc_checksum(self, secret): - secret, ident = self._prepare_digest_args(secret) - chk = _builtin_bcrypt(secret, ident[1:-1], - self.salt.encode("ascii"), self.rounds) - return chk.decode("ascii") - -#============================================================================= -# handler -#============================================================================= -class bcrypt(_NoBackend, _BcryptCommon): - """This class implements the BCrypt password hash, and follows the :ref:`password-hash-api`. - - It supports a fixed-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it must be 22 characters, drawn from the regexp range ``[./0-9A-Za-z]``. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - Defaults to 12, must be between 4 and 31, inclusive. - This value is logarithmic, the actual number of iterations used will be :samp:`2**{rounds}` - -- increasing the rounds by +1 will double the amount of time taken. - - :type ident: str - :param ident: - Specifies which version of the BCrypt algorithm will be used when creating a new hash. - Typically this option is not needed, as the default (``"2b"``) is usually the correct choice. - If specified, it must be one of the following: - - * ``"2"`` - the first revision of BCrypt, which suffers from a minor security flaw and is generally not used anymore. - * ``"2a"`` - some implementations suffered from rare security flaws, replaced by 2b. - * ``"2y"`` - format specific to the *crypt_blowfish* BCrypt implementation, - identical to ``"2b"`` in all but name. - * ``"2b"`` - latest revision of the official BCrypt algorithm, current default. - - :param bool truncate_error: - By default, BCrypt will silently truncate passwords larger than 72 bytes. - Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` - to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. - - .. versionadded:: 1.7 - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. versionadded:: 1.6 - - .. versionchanged:: 1.6 - This class now supports ``"2y"`` hashes, and recognizes - (but does not support) the broken ``"2x"`` hashes. - (see the :ref:`crypt_blowfish bug ` - for details). - - .. versionchanged:: 1.6 - Added a pure-python backend. - - .. versionchanged:: 1.6.3 - - Added support for ``"2b"`` variant. - - .. versionchanged:: 1.7 - - Now defaults to ``"2b"`` variant. - """ - #============================================================================= - # backend - #============================================================================= - - # NOTE: the brunt of the bcrypt class is implemented in _BcryptCommon. - # there are then subclass for each backend (e.g. _PyBcryptBackend), - # these are dynamically prepended to this class's bases - # in order to load the appropriate backend. - - #: list of potential backends - backends = ("bcrypt", "pybcrypt", "bcryptor", "os_crypt", "builtin") - - #: flag that this class's bases should be modified by SubclassBackendMixin - _backend_mixin_target = True - - #: map of backend -> mixin class, used by _get_backend_loader() - _backend_mixin_map = { - None: _NoBackend, - "bcrypt": _BcryptBackend, - "pybcrypt": _PyBcryptBackend, - "bcryptor": _BcryptorBackend, - "os_crypt": _OsCryptBackend, - "builtin": _BuiltinBackend, - } - - #============================================================================= - # eoc - #============================================================================= - -#============================================================================= -# variants -#============================================================================= -_UDOLLAR = u("$") - -# XXX: it might be better to have all the bcrypt variants share a common base class, -# and have the (django_)bcrypt_sha256 wrappers just proxy bcrypt instead of subclassing it. -class _wrapped_bcrypt(bcrypt): - """ - abstracts out some bits bcrypt_sha256 & django_bcrypt_sha256 share. - - bypass backend-loading wrappers for hash() etc - - disable truncation support, sha256 wrappers don't need it. - """ - setting_kwds = tuple(elem for elem in bcrypt.setting_kwds if elem not in ["truncate_error"]) - truncate_size = None - - # XXX: these will be needed if any bcrypt backends directly implement this... - # @classmethod - # def hash(cls, secret, **kwds): - # # bypass bcrypt backend overriding this method - # # XXX: would wrapping bcrypt make this easier than subclassing it? - # return super(_BcryptCommon, cls).hash(secret, **kwds) - # - # @classmethod - # def verify(cls, secret, hash): - # # bypass bcrypt backend overriding this method - # return super(_BcryptCommon, cls).verify(secret, hash) - # - # @classmethod - # def genhash(cls, secret, hash): - # # bypass bcrypt backend overriding this method - # return super(_BcryptCommon, cls).genhash(secret, hash) - - @classmethod - def _check_truncate_policy(cls, secret): - # disable check performed by bcrypt(), since this doesn't truncate passwords. - pass - -#============================================================================= -# bcrypt sha256 wrapper -#============================================================================= - -class bcrypt_sha256(_wrapped_bcrypt): - """This class implements a composition of BCrypt+SHA256, and follows the :ref:`password-hash-api`. - - It supports a fixed-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept - all the same optional keywords as the base :class:`bcrypt` hash. - - .. versionadded:: 1.6.2 - - .. versionchanged:: 1.7 - - Now defaults to ``"2b"`` variant. - """ - #=================================================================== - # class attrs - #=================================================================== - - #-------------------- - # PasswordHash - #-------------------- - name = "bcrypt_sha256" - - #-------------------- - # GenericHandler - #-------------------- - # this is locked at 2a/2b for now. - ident_values = (IDENT_2A, IDENT_2B) - - # clone bcrypt's ident aliases so they can be used here as well... - ident_aliases = (lambda ident_values: dict(item for item in bcrypt.ident_aliases.items() - if item[1] in ident_values))(ident_values) - default_ident = IDENT_2B - - #=================================================================== - # formatting - #=================================================================== - - # sample hash: - # $bcrypt-sha256$2a,6$/3OeRpbOf8/l6nPPRdZPp.$nRiyYqPobEZGdNRBWihQhiFDh1ws1tu - # $bcrypt-sha256$ -- prefix/identifier - # 2a -- bcrypt variant - # , -- field separator - # 6 -- bcrypt work factor - # $ -- section separator - # /3OeRpbOf8/l6nPPRdZPp. -- salt - # $ -- section separator - # nRiyYqPobEZGdNRBWihQhiFDh1ws1tu -- digest - - # XXX: we can't use .ident attr due to bcrypt code using it. - # working around that via prefix. - prefix = u('$bcrypt-sha256$') - - _hash_re = re.compile(r""" - ^ - [$]bcrypt-sha256 - [$](?P2[ab]) - ,(?P\d{1,2}) - [$](?P[^$]{22}) - (?:[$](?P.{31}))? - $ - """, re.X) - - @classmethod - def identify(cls, hash): - hash = uh.to_unicode_for_identify(hash) - if not hash: - return False - return hash.startswith(cls.prefix) - - @classmethod - def from_string(cls, hash): - hash = to_unicode(hash, "ascii", "hash") - if not hash.startswith(cls.prefix): - raise uh.exc.InvalidHashError(cls) - m = cls._hash_re.match(hash) - if not m: - raise uh.exc.MalformedHashError(cls) - rounds = m.group("rounds") - if rounds.startswith(uh._UZERO) and rounds != uh._UZERO: - raise uh.exc.ZeroPaddedRoundsError(cls) - return cls(ident=m.group("variant"), - rounds=int(rounds), - salt=m.group("salt"), - checksum=m.group("digest"), - ) - - _template = u("$bcrypt-sha256$%s,%d$%s$%s") - - def to_string(self): - hash = self._template % (self.ident.strip(_UDOLLAR), - self.rounds, self.salt, self.checksum) - return uascii_to_str(hash) - - #=================================================================== - # checksum - #=================================================================== - def _calc_checksum(self, secret): - # NOTE: can't use digest directly, since bcrypt stops at first NULL. - # NOTE: bcrypt doesn't fully mix entropy for bytes 55-72 of password - # (XXX: citation needed), so we don't want key to be > 55 bytes. - # thus, have to use base64 (44 bytes) rather than hex (64 bytes). - # XXX: it's later come out that 55-72 may be ok, so later revision of bcrypt_sha256 - # may switch to hex encoding, since it's simpler to implement elsewhere. - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - - # NOTE: output of b64encode() uses "+/" altchars, "=" padding chars, - # and no leading/trailing whitespace. - key = b64encode(sha256(secret).digest()) - - # hand result off to normal bcrypt algorithm - return super(bcrypt_sha256, self)._calc_checksum(key) - - #=================================================================== - # other - #=================================================================== - - # XXX: have _needs_update() mark the $2a$ ones for upgrading? - # maybe do that after we switch to hex encoding? - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/cisco.py b/libs_crutch/contrib/passlib/handlers/cisco.py deleted file mode 100644 index e715e1a..0000000 --- a/libs_crutch/contrib/passlib/handlers/cisco.py +++ /dev/null @@ -1,440 +0,0 @@ -""" -passlib.handlers.cisco -- Cisco password hashes -""" -#============================================================================= -# imports -#============================================================================= -# core -from binascii import hexlify, unhexlify -from hashlib import md5 -import logging; log = logging.getLogger(__name__) -from warnings import warn -# site -# pkg -from passlib.utils import right_pad_string, to_unicode, repeat_string, to_bytes -from passlib.utils.binary import h64 -from passlib.utils.compat import unicode, u, join_byte_values, \ - join_byte_elems, iter_byte_values, uascii_to_str -import passlib.utils.handlers as uh -# local -__all__ = [ - "cisco_pix", - "cisco_asa", - "cisco_type7", -] - -#============================================================================= -# utils -#============================================================================= - -#: dummy bytes used by spoil_digest var in cisco_pix._calc_checksum() -_DUMMY_BYTES = b'\xFF' * 32 - -#============================================================================= -# cisco pix firewall hash -#============================================================================= -class cisco_pix(uh.HasUserContext, uh.StaticHandler): - """ - This class implements the password hash used by older Cisco PIX firewalls, - and follows the :ref:`password-hash-api`. - It does a single round of hashing, and relies on the username - as the salt. - - This class only allows passwords <= 16 bytes, anything larger - will result in a :exc:`~passlib.exc.PasswordSizeError` if passed to :meth:`~cisco_pix.hash`, - and be silently rejected if passed to :meth:`~cisco_pix.verify`. - - The :meth:`~passlib.ifc.PasswordHash.hash`, - :meth:`~passlib.ifc.PasswordHash.genhash`, and - :meth:`~passlib.ifc.PasswordHash.verify` methods - all support the following extra keyword: - - :param str user: - String containing name of user account this password is associated with. - - This is *required* in order to correctly hash passwords associated - with a user account on the Cisco device, as it is used to salt - the hash. - - Conversely, this *must* be omitted or set to ``""`` in order to correctly - hash passwords which don't have an associated user account - (such as the "enable" password). - - .. versionadded:: 1.6 - - .. versionchanged:: 1.7.1 - - Passwords > 16 bytes are now rejected / throw error instead of being silently truncated, - to match Cisco behavior. A number of :ref:`bugs ` were fixed - which caused prior releases to generate unverifiable hashes in certain cases. - """ - #=================================================================== - # class attrs - #=================================================================== - - #-------------------- - # PasswordHash - #-------------------- - name = "cisco_pix" - - truncate_size = 16 - - # NOTE: these are the default policy for PasswordHash, - # but want to set them explicitly for now. - truncate_error = True - truncate_verify_reject = True - - #-------------------- - # GenericHandler - #-------------------- - checksum_size = 16 - checksum_chars = uh.HASH64_CHARS - - #-------------------- - # custom - #-------------------- - - #: control flag signalling "cisco_asa" mode, set by cisco_asa class - _is_asa = False - - #=================================================================== - # methods - #=================================================================== - def _calc_checksum(self, secret): - """ - This function implements the "encrypted" hash format used by Cisco - PIX & ASA. It's behavior has been confirmed for ASA 9.6, - but is presumed correct for PIX & other ASA releases, - as it fits with known test vectors, and existing literature. - - While nearly the same, the PIX & ASA hashes have slight differences, - so this function performs differently based on the _is_asa class flag. - Noteable changes from PIX to ASA include password size limit - increased from 16 -> 32, and other internal changes. - """ - # select PIX vs or ASA mode - asa = self._is_asa - - # - # encode secret - # - # per ASA 8.4 documentation, - # http://www.cisco.com/c/en/us/td/docs/security/asa/asa84/configuration/guide/asa_84_cli_config/ref_cli.html#Supported_Character_Sets, - # it supposedly uses UTF-8 -- though some double-encoding issues have - # been observed when trying to actually *set* a non-ascii password - # via ASDM, and access via SSH seems to strip 8-bit chars. - # - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - - # - # check if password too large - # - # Per ASA 9.6 changes listed in - # http://www.cisco.com/c/en/us/td/docs/security/asa/roadmap/asa_new_features.html, - # prior releases had a maximum limit of 32 characters. - # Testing with an ASA 9.6 system bears this out -- - # setting 32-char password for a user account, - # and logins will fail if any chars are appended. - # (ASA 9.6 added new PBKDF2-based hash algorithm, - # which supports larger passwords). - # - # Per PIX documentation - # http://www.cisco.com/en/US/docs/security/pix/pix50/configuration/guide/commands.html, - # it would not allow passwords > 16 chars. - # - # Thus, we unconditionally throw a password size error here, - # as nothing valid can come from a larger password. - # NOTE: assuming PIX has same behavior, but at 16 char limit. - # - spoil_digest = None - if len(secret) > self.truncate_size: - if self.use_defaults: - # called from hash() - msg = "Password too long (%s allows at most %d bytes)" % \ - (self.name, self.truncate_size) - raise uh.exc.PasswordSizeError(self.truncate_size, msg=msg) - else: - # called from verify() -- - # We don't want to throw error, or return early, - # as that would let attacker know too much. Instead, we set a - # flag to add some dummy data into the md5 digest, so that - # output won't match truncated version of secret, or anything - # else that's fixed and predictable. - spoil_digest = secret + _DUMMY_BYTES - - # - # append user to secret - # - # Policy appears to be: - # - # * Nothing appended for enable password (user = "") - # - # * ASA: If user present, but secret is >= 28 chars, nothing appended. - # - # * 1-2 byte users not allowed. - # DEVIATION: we're letting them through, and repeating their - # chars ala 3-char user, to simplify testing. - # Could issue warning in the future though. - # - # * 3 byte user has first char repeated, to pad to 4. - # (observed under ASA 9.6, assuming true elsewhere) - # - # * 4 byte users are used directly. - # - # * 5+ byte users are truncated to 4 bytes. - # - user = self.user - if user: - if isinstance(user, unicode): - user = user.encode("utf-8") - if not asa or len(secret) < 28: - secret += repeat_string(user, 4) - - # - # pad / truncate result to limit - # - # While PIX always pads to 16 bytes, ASA increases to 32 bytes IFF - # secret+user > 16 bytes. This makes PIX & ASA have different results - # where secret size in range(13,16), and user is present -- - # PIX will truncate to 16, ASA will truncate to 32. - # - if asa and len(secret) > 16: - pad_size = 32 - else: - pad_size = 16 - secret = right_pad_string(secret, pad_size) - - # - # md5 digest - # - if spoil_digest: - # make sure digest won't match truncated version of secret - secret += spoil_digest - digest = md5(secret).digest() - - # - # drop every 4th byte - # NOTE: guessing this was done because it makes output exactly - # 16 bytes, which may have been a general 'char password[]' - # size limit under PIX - # - digest = join_byte_elems(c for i, c in enumerate(digest) if (i + 1) & 3) - - # - # encode using Hash64 - # - return h64.encode_bytes(digest).decode("ascii") - - # NOTE: works, but needs UTs. - # @classmethod - # def same_as_pix(cls, secret, user=""): - # """ - # test whether (secret + user) combination should - # have the same hash under PIX and ASA. - # - # mainly present to help unittests. - # """ - # # see _calc_checksum() above for details of this logic. - # size = len(to_bytes(secret, "utf-8")) - # if user and size < 28: - # size += 4 - # return size < 17 - - #=================================================================== - # eoc - #=================================================================== - - -class cisco_asa(cisco_pix): - """ - This class implements the password hash used by Cisco ASA/PIX 7.0 and newer (2005). - Aside from a different internal algorithm, it's use and format is identical - to the older :class:`cisco_pix` class. - - For passwords less than 13 characters, this should be identical to :class:`!cisco_pix`, - but will generate a different hash for most larger inputs - (See the `Format & Algorithm`_ section for the details). - - This class only allows passwords <= 32 bytes, anything larger - will result in a :exc:`~passlib.exc.PasswordSizeError` if passed to :meth:`~cisco_asa.hash`, - and be silently rejected if passed to :meth:`~cisco_asa.verify`. - - .. versionadded:: 1.7 - - .. versionchanged:: 1.7.1 - - Passwords > 32 bytes are now rejected / throw error instead of being silently truncated, - to match Cisco behavior. A number of :ref:`bugs ` were fixed - which caused prior releases to generate unverifiable hashes in certain cases. - """ - #=================================================================== - # class attrs - #=================================================================== - - #-------------------- - # PasswordHash - #-------------------- - name = "cisco_asa" - - #-------------------- - # TruncateMixin - #-------------------- - truncate_size = 32 - - #-------------------- - # cisco_pix - #-------------------- - _is_asa = True - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# type 7 -#============================================================================= -class cisco_type7(uh.GenericHandler): - """ - This class implements the "Type 7" password encoding used by Cisco IOS, - and follows the :ref:`password-hash-api`. - It has a simple 4-5 bit salt, but is nonetheless a reversible encoding - instead of a real hash. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: int - :param salt: - This may be an optional salt integer drawn from ``range(0,16)``. - If omitted, one will be chosen at random. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include - ``salt`` values that are out of range. - - Note that while this class outputs digests in upper-case hexadecimal, - it will accept lower-case as well. - - This class also provides the following additional method: - - .. automethod:: decode - """ - #=================================================================== - # class attrs - #=================================================================== - - #-------------------- - # PasswordHash - #-------------------- - name = "cisco_type7" - setting_kwds = ("salt",) - - #-------------------- - # GenericHandler - #-------------------- - checksum_chars = uh.UPPER_HEX_CHARS - - #-------------------- - # HasSalt - #-------------------- - - # NOTE: encoding could handle max_salt_value=99, but since key is only 52 - # chars in size, not sure what appropriate behavior is for that edge case. - min_salt_value = 0 - max_salt_value = 52 - - #=================================================================== - # methods - #=================================================================== - @classmethod - def using(cls, salt=None, **kwds): - subcls = super(cisco_type7, cls).using(**kwds) - if salt is not None: - salt = subcls._norm_salt(salt, relaxed=kwds.get("relaxed")) - subcls._generate_salt = staticmethod(lambda: salt) - return subcls - - @classmethod - def from_string(cls, hash): - hash = to_unicode(hash, "ascii", "hash") - if len(hash) < 2: - raise uh.exc.InvalidHashError(cls) - salt = int(hash[:2]) # may throw ValueError - return cls(salt=salt, checksum=hash[2:].upper()) - - def __init__(self, salt=None, **kwds): - super(cisco_type7, self).__init__(**kwds) - if salt is not None: - salt = self._norm_salt(salt) - elif self.use_defaults: - salt = self._generate_salt() - assert self._norm_salt(salt) == salt, "generated invalid salt: %r" % (salt,) - else: - raise TypeError("no salt specified") - self.salt = salt - - @classmethod - def _norm_salt(cls, salt, relaxed=False): - """ - validate & normalize salt value. - .. note:: - the salt for this algorithm is an integer 0-52, not a string - """ - if not isinstance(salt, int): - raise uh.exc.ExpectedTypeError(salt, "integer", "salt") - if 0 <= salt <= cls.max_salt_value: - return salt - msg = "salt/offset must be in 0..52 range" - if relaxed: - warn(msg, uh.PasslibHashWarning) - return 0 if salt < 0 else cls.max_salt_value - else: - raise ValueError(msg) - - @staticmethod - def _generate_salt(): - return uh.rng.randint(0, 15) - - def to_string(self): - return "%02d%s" % (self.salt, uascii_to_str(self.checksum)) - - def _calc_checksum(self, secret): - # XXX: no idea what unicode policy is, but all examples are - # 7-bit ascii compatible, so using UTF-8 - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - return hexlify(self._cipher(secret, self.salt)).decode("ascii").upper() - - @classmethod - def decode(cls, hash, encoding="utf-8"): - """decode hash, returning original password. - - :arg hash: encoded password - :param encoding: optional encoding to use (defaults to ``UTF-8``). - :returns: password as unicode - """ - self = cls.from_string(hash) - tmp = unhexlify(self.checksum.encode("ascii")) - raw = self._cipher(tmp, self.salt) - return raw.decode(encoding) if encoding else raw - - # type7 uses a xor-based vingere variant, using the following secret key: - _key = u("dsfd;kfoA,.iyewrkldJKDHSUBsgvca69834ncxv9873254k;fg87") - - @classmethod - def _cipher(cls, data, salt): - """xor static key against data - encrypts & decrypts""" - key = cls._key - key_size = len(key) - return join_byte_values( - value ^ ord(key[(salt + idx) % key_size]) - for idx, value in enumerate(iter_byte_values(data)) - ) - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/des_crypt.py b/libs_crutch/contrib/passlib/handlers/des_crypt.py deleted file mode 100644 index 9561ab4..0000000 --- a/libs_crutch/contrib/passlib/handlers/des_crypt.py +++ /dev/null @@ -1,607 +0,0 @@ -"""passlib.handlers.des_crypt - traditional unix (DES) crypt and variants""" -#============================================================================= -# imports -#============================================================================= -# core -import re -import logging; log = logging.getLogger(__name__) -from warnings import warn -# site -# pkg -from passlib.utils import safe_crypt, test_crypt, to_unicode -from passlib.utils.binary import h64, h64big -from passlib.utils.compat import byte_elem_value, u, uascii_to_str, unicode, suppress_cause -from passlib.crypto.des import des_encrypt_int_block -import passlib.utils.handlers as uh -# local -__all__ = [ - "des_crypt", - "bsdi_crypt", - "bigcrypt", - "crypt16", -] - -#============================================================================= -# pure-python backend for des_crypt family -#============================================================================= -_BNULL = b'\x00' - -def _crypt_secret_to_key(secret): - """convert secret to 64-bit DES key. - - this only uses the first 8 bytes of the secret, - and discards the high 8th bit of each byte at that. - a null parity bit is inserted after every 7th bit of the output. - """ - # NOTE: this would set the parity bits correctly, - # but des_encrypt_int_block() would just ignore them... - ##return sum(expand_7bit(byte_elem_value(c) & 0x7f) << (56-i*8) - ## for i, c in enumerate(secret[:8])) - return sum((byte_elem_value(c) & 0x7f) << (57-i*8) - for i, c in enumerate(secret[:8])) - -def _raw_des_crypt(secret, salt): - """pure-python backed for des_crypt""" - assert len(salt) == 2 - - # NOTE: some OSes will accept non-HASH64 characters in the salt, - # but what value they assign these characters varies wildy, - # so just rejecting them outright. - # the same goes for single-character salts... - # some OSes duplicate the char, some insert a '.' char, - # and openbsd does (something) which creates an invalid hash. - salt_value = h64.decode_int12(salt) - - # gotta do something - no official policy since this predates unicode - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - assert isinstance(secret, bytes) - - # forbidding NULL char because underlying crypt() rejects them too. - if _BNULL in secret: - raise uh.exc.NullPasswordError(des_crypt) - - # convert first 8 bytes of secret string into an integer - key_value = _crypt_secret_to_key(secret) - - # run data through des using input of 0 - result = des_encrypt_int_block(key_value, 0, salt_value, 25) - - # run h64 encode on result - return h64big.encode_int64(result) - -def _bsdi_secret_to_key(secret): - """convert secret to DES key used by bsdi_crypt""" - key_value = _crypt_secret_to_key(secret) - idx = 8 - end = len(secret) - while idx < end: - next = idx + 8 - tmp_value = _crypt_secret_to_key(secret[idx:next]) - key_value = des_encrypt_int_block(key_value, key_value) ^ tmp_value - idx = next - return key_value - -def _raw_bsdi_crypt(secret, rounds, salt): - """pure-python backend for bsdi_crypt""" - - # decode salt - salt_value = h64.decode_int24(salt) - - # gotta do something - no official policy since this predates unicode - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - assert isinstance(secret, bytes) - - # forbidding NULL char because underlying crypt() rejects them too. - if _BNULL in secret: - raise uh.exc.NullPasswordError(bsdi_crypt) - - # convert secret string into an integer - key_value = _bsdi_secret_to_key(secret) - - # run data through des using input of 0 - result = des_encrypt_int_block(key_value, 0, salt_value, rounds) - - # run h64 encode on result - return h64big.encode_int64(result) - -#============================================================================= -# handlers -#============================================================================= -class des_crypt(uh.TruncateMixin, uh.HasManyBackends, uh.HasSalt, uh.GenericHandler): - """This class implements the des-crypt password hash, and follows the :ref:`password-hash-api`. - - It supports a fixed-length salt. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it must be 2 characters, drawn from the regexp range ``[./0-9A-Za-z]``. - - :param bool truncate_error: - By default, des_crypt will silently truncate passwords larger than 8 bytes. - Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` - to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. - - .. versionadded:: 1.7 - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include - ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - #=================================================================== - # class attrs - #=================================================================== - - #-------------------- - # PasswordHash - #-------------------- - name = "des_crypt" - setting_kwds = ("salt", "truncate_error") - - #-------------------- - # GenericHandler - #-------------------- - checksum_chars = uh.HASH64_CHARS - checksum_size = 11 - - #-------------------- - # HasSalt - #-------------------- - min_salt_size = max_salt_size = 2 - salt_chars = uh.HASH64_CHARS - - #-------------------- - # TruncateMixin - #-------------------- - truncate_size = 8 - - #=================================================================== - # formatting - #=================================================================== - # FORMAT: 2 chars of H64-encoded salt + 11 chars of H64-encoded checksum - - _hash_regex = re.compile(u(r""" - ^ - (?P[./a-z0-9]{2}) - (?P[./a-z0-9]{11})? - $"""), re.X|re.I) - - @classmethod - def from_string(cls, hash): - hash = to_unicode(hash, "ascii", "hash") - salt, chk = hash[:2], hash[2:] - return cls(salt=salt, checksum=chk or None) - - def to_string(self): - hash = u("%s%s") % (self.salt, self.checksum) - return uascii_to_str(hash) - - #=================================================================== - # digest calculation - #=================================================================== - def _calc_checksum(self, secret): - # check for truncation (during .hash() calls only) - if self.use_defaults: - self._check_truncate_policy(secret) - - return self._calc_checksum_backend(secret) - - #=================================================================== - # backend - #=================================================================== - backends = ("os_crypt", "builtin") - - #--------------------------------------------------------------- - # os_crypt backend - #--------------------------------------------------------------- - @classmethod - def _load_backend_os_crypt(cls): - if test_crypt("test", 'abgOeLfPimXQo'): - cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) - return True - else: - return False - - def _calc_checksum_os_crypt(self, secret): - # NOTE: we let safe_crypt() encode unicode secret -> utf8; - # no official policy since des-crypt predates unicode - hash = safe_crypt(secret, self.salt) - if hash: - assert hash.startswith(self.salt) and len(hash) == 13 - return hash[2:] - else: - # py3's crypt.crypt() can't handle non-utf8 bytes. - # fallback to builtin alg, which is always available. - return self._calc_checksum_builtin(secret) - - #--------------------------------------------------------------- - # builtin backend - #--------------------------------------------------------------- - @classmethod - def _load_backend_builtin(cls): - cls._set_calc_checksum_backend(cls._calc_checksum_builtin) - return True - - def _calc_checksum_builtin(self, secret): - return _raw_des_crypt(secret, self.salt.encode("ascii")).decode("ascii") - - #=================================================================== - # eoc - #=================================================================== - -class bsdi_crypt(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, uh.GenericHandler): - """This class implements the BSDi-Crypt password hash, and follows the :ref:`password-hash-api`. - - It supports a fixed-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it must be 4 characters, drawn from the regexp range ``[./0-9A-Za-z]``. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - Defaults to 5001, must be between 1 and 16777215, inclusive. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. versionadded:: 1.6 - - .. versionchanged:: 1.6 - :meth:`hash` will now issue a warning if an even number of rounds is used - (see :ref:`bsdi-crypt-security-issues` regarding weak DES keys). - """ - #=================================================================== - # class attrs - #=================================================================== - #--GenericHandler-- - name = "bsdi_crypt" - setting_kwds = ("salt", "rounds") - checksum_size = 11 - checksum_chars = uh.HASH64_CHARS - - #--HasSalt-- - min_salt_size = max_salt_size = 4 - salt_chars = uh.HASH64_CHARS - - #--HasRounds-- - default_rounds = 5001 - min_rounds = 1 - max_rounds = 16777215 # (1<<24)-1 - rounds_cost = "linear" - - # NOTE: OpenBSD login.conf reports 7250 as minimum allowed rounds, - # but that seems to be an OS policy, not a algorithm limitation. - - #=================================================================== - # parsing - #=================================================================== - _hash_regex = re.compile(u(r""" - ^ - _ - (?P[./a-z0-9]{4}) - (?P[./a-z0-9]{4}) - (?P[./a-z0-9]{11})? - $"""), re.X|re.I) - - @classmethod - def from_string(cls, hash): - hash = to_unicode(hash, "ascii", "hash") - m = cls._hash_regex.match(hash) - if not m: - raise uh.exc.InvalidHashError(cls) - rounds, salt, chk = m.group("rounds", "salt", "chk") - return cls( - rounds=h64.decode_int24(rounds.encode("ascii")), - salt=salt, - checksum=chk, - ) - - def to_string(self): - hash = u("_%s%s%s") % (h64.encode_int24(self.rounds).decode("ascii"), - self.salt, self.checksum) - return uascii_to_str(hash) - - #=================================================================== - # validation - #=================================================================== - - # NOTE: keeping this flag for admin/choose_rounds.py script. - # want to eventually expose rounds logic to that script in better way. - _avoid_even_rounds = True - - @classmethod - def using(cls, **kwds): - subcls = super(bsdi_crypt, cls).using(**kwds) - if not subcls.default_rounds & 1: - # issue warning if caller set an even 'rounds' value. - warn("bsdi_crypt rounds should be odd, as even rounds may reveal weak DES keys", - uh.exc.PasslibSecurityWarning) - return subcls - - @classmethod - def _generate_rounds(cls): - rounds = super(bsdi_crypt, cls)._generate_rounds() - # ensure autogenerated rounds are always odd - # NOTE: doing this even for default_rounds so needs_update() doesn't get - # caught in a loop. - # FIXME: this technically might generate a rounds value 1 larger - # than the requested upper bound - but better to err on side of safety. - return rounds|1 - - #=================================================================== - # migration - #=================================================================== - - def _calc_needs_update(self, **kwds): - # mark bsdi_crypt hashes as deprecated if they have even rounds. - if not self.rounds & 1: - return True - # hand off to base implementation - return super(bsdi_crypt, self)._calc_needs_update(**kwds) - - #=================================================================== - # backends - #=================================================================== - backends = ("os_crypt", "builtin") - - #--------------------------------------------------------------- - # os_crypt backend - #--------------------------------------------------------------- - @classmethod - def _load_backend_os_crypt(cls): - if test_crypt("test", '_/...lLDAxARksGCHin.'): - cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) - return True - else: - return False - - def _calc_checksum_os_crypt(self, secret): - config = self.to_string() - hash = safe_crypt(secret, config) - if hash: - assert hash.startswith(config[:9]) and len(hash) == 20 - return hash[-11:] - else: - # py3's crypt.crypt() can't handle non-utf8 bytes. - # fallback to builtin alg, which is always available. - return self._calc_checksum_builtin(secret) - - #--------------------------------------------------------------- - # builtin backend - #--------------------------------------------------------------- - @classmethod - def _load_backend_builtin(cls): - cls._set_calc_checksum_backend(cls._calc_checksum_builtin) - return True - - def _calc_checksum_builtin(self, secret): - return _raw_bsdi_crypt(secret, self.rounds, self.salt.encode("ascii")).decode("ascii") - - #=================================================================== - # eoc - #=================================================================== - -class bigcrypt(uh.HasSalt, uh.GenericHandler): - """This class implements the BigCrypt password hash, and follows the :ref:`password-hash-api`. - - It supports a fixed-length salt. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it must be 22 characters, drawn from the regexp range ``[./0-9A-Za-z]``. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include - ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - #=================================================================== - # class attrs - #=================================================================== - #--GenericHandler-- - name = "bigcrypt" - setting_kwds = ("salt",) - checksum_chars = uh.HASH64_CHARS - # NOTE: checksum chars must be multiple of 11 - - #--HasSalt-- - min_salt_size = max_salt_size = 2 - salt_chars = uh.HASH64_CHARS - - #=================================================================== - # internal helpers - #=================================================================== - _hash_regex = re.compile(u(r""" - ^ - (?P[./a-z0-9]{2}) - (?P([./a-z0-9]{11})+)? - $"""), re.X|re.I) - - @classmethod - def from_string(cls, hash): - hash = to_unicode(hash, "ascii", "hash") - m = cls._hash_regex.match(hash) - if not m: - raise uh.exc.InvalidHashError(cls) - salt, chk = m.group("salt", "chk") - return cls(salt=salt, checksum=chk) - - def to_string(self): - hash = u("%s%s") % (self.salt, self.checksum) - return uascii_to_str(hash) - - def _norm_checksum(self, checksum, relaxed=False): - checksum = super(bigcrypt, self)._norm_checksum(checksum, relaxed=relaxed) - if len(checksum) % 11: - raise uh.exc.InvalidHashError(self) - return checksum - - #=================================================================== - # backend - #=================================================================== - def _calc_checksum(self, secret): - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - chk = _raw_des_crypt(secret, self.salt.encode("ascii")) - idx = 8 - end = len(secret) - while idx < end: - next = idx + 8 - chk += _raw_des_crypt(secret[idx:next], chk[-11:-9]) - idx = next - return chk.decode("ascii") - - #=================================================================== - # eoc - #=================================================================== - -class crypt16(uh.TruncateMixin, uh.HasSalt, uh.GenericHandler): - """This class implements the crypt16 password hash, and follows the :ref:`password-hash-api`. - - It supports a fixed-length salt. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it must be 2 characters, drawn from the regexp range ``[./0-9A-Za-z]``. - - :param bool truncate_error: - By default, crypt16 will silently truncate passwords larger than 16 bytes. - Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` - to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. - - .. versionadded:: 1.7 - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include - ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - #=================================================================== - # class attrs - #=================================================================== - - #-------------------- - # PasswordHash - #-------------------- - name = "crypt16" - setting_kwds = ("salt", "truncate_error") - - #-------------------- - # GenericHandler - #-------------------- - checksum_size = 22 - checksum_chars = uh.HASH64_CHARS - - #-------------------- - # HasSalt - #-------------------- - min_salt_size = max_salt_size = 2 - salt_chars = uh.HASH64_CHARS - - #-------------------- - # TruncateMixin - #-------------------- - truncate_size = 16 - - #=================================================================== - # internal helpers - #=================================================================== - _hash_regex = re.compile(u(r""" - ^ - (?P[./a-z0-9]{2}) - (?P[./a-z0-9]{22})? - $"""), re.X|re.I) - - @classmethod - def from_string(cls, hash): - hash = to_unicode(hash, "ascii", "hash") - m = cls._hash_regex.match(hash) - if not m: - raise uh.exc.InvalidHashError(cls) - salt, chk = m.group("salt", "chk") - return cls(salt=salt, checksum=chk) - - def to_string(self): - hash = u("%s%s") % (self.salt, self.checksum) - return uascii_to_str(hash) - - #=================================================================== - # backend - #=================================================================== - def _calc_checksum(self, secret): - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - - # check for truncation (during .hash() calls only) - if self.use_defaults: - self._check_truncate_policy(secret) - - # parse salt value - try: - salt_value = h64.decode_int12(self.salt.encode("ascii")) - except ValueError: # pragma: no cover - caught by class - raise suppress_cause(ValueError("invalid chars in salt")) - - # convert first 8 byts of secret string into an integer, - key1 = _crypt_secret_to_key(secret) - - # run data through des using input of 0 - result1 = des_encrypt_int_block(key1, 0, salt_value, 20) - - # convert next 8 bytes of secret string into integer (key=0 if secret < 8 chars) - key2 = _crypt_secret_to_key(secret[8:16]) - - # run data through des using input of 0 - result2 = des_encrypt_int_block(key2, 0, salt_value, 5) - - # done - chk = h64big.encode_int64(result1) + h64big.encode_int64(result2) - return chk.decode("ascii") - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/digests.py b/libs_crutch/contrib/passlib/handlers/digests.py deleted file mode 100644 index 3761051..0000000 --- a/libs_crutch/contrib/passlib/handlers/digests.py +++ /dev/null @@ -1,146 +0,0 @@ -"""passlib.handlers.digests - plain hash digests -""" -#============================================================================= -# imports -#============================================================================= -# core -import hashlib -import logging; log = logging.getLogger(__name__) -# site -# pkg -from passlib.utils import to_native_str, to_bytes, render_bytes, consteq -from passlib.utils.compat import unicode, str_to_uascii -import passlib.utils.handlers as uh -from passlib.crypto.digest import lookup_hash -# local -__all__ = [ - "create_hex_hash", - "hex_md4", - "hex_md5", - "hex_sha1", - "hex_sha256", - "hex_sha512", -] - -#============================================================================= -# helpers for hexadecimal hashes -#============================================================================= -class HexDigestHash(uh.StaticHandler): - """this provides a template for supporting passwords stored as plain hexadecimal hashes""" - #=================================================================== - # class attrs - #=================================================================== - _hash_func = None # hash function to use - filled in by create_hex_hash() - checksum_size = None # filled in by create_hex_hash() - checksum_chars = uh.HEX_CHARS - - #=================================================================== - # methods - #=================================================================== - @classmethod - def _norm_hash(cls, hash): - return hash.lower() - - def _calc_checksum(self, secret): - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - return str_to_uascii(self._hash_func(secret).hexdigest()) - - #=================================================================== - # eoc - #=================================================================== - -def create_hex_hash(digest, module=__name__): - # NOTE: could set digest_name=hash.name for cpython, but not for some other platforms. - info = lookup_hash(digest) - name = "hex_" + info.name - return type(name, (HexDigestHash,), dict( - name=name, - __module__=module, # so ABCMeta won't clobber it - _hash_func=staticmethod(info.const), # sometimes it's a function, sometimes not. so wrap it. - checksum_size=info.digest_size*2, - __doc__="""This class implements a plain hexadecimal %s hash, and follows the :ref:`password-hash-api`. - -It supports no optional or contextual keywords. -""" % (info.name,) - )) - -#============================================================================= -# predefined handlers -#============================================================================= -hex_md4 = create_hex_hash("md4") -hex_md5 = create_hex_hash("md5") -hex_md5.django_name = "unsalted_md5" -hex_sha1 = create_hex_hash("sha1") -hex_sha256 = create_hex_hash("sha256") -hex_sha512 = create_hex_hash("sha512") - -#============================================================================= -# htdigest -#============================================================================= -class htdigest(uh.MinimalHandler): - """htdigest hash function. - - .. todo:: - document this hash - """ - name = "htdigest" - setting_kwds = () - context_kwds = ("user", "realm", "encoding") - default_encoding = "utf-8" - - @classmethod - def hash(cls, secret, user, realm, encoding=None): - # NOTE: this was deliberately written so that raw bytes are passed through - # unchanged, the encoding kwd is only used to handle unicode values. - if not encoding: - encoding = cls.default_encoding - uh.validate_secret(secret) - if isinstance(secret, unicode): - secret = secret.encode(encoding) - user = to_bytes(user, encoding, "user") - realm = to_bytes(realm, encoding, "realm") - data = render_bytes("%s:%s:%s", user, realm, secret) - return hashlib.md5(data).hexdigest() - - @classmethod - def _norm_hash(cls, hash): - """normalize hash to native string, and validate it""" - hash = to_native_str(hash, param="hash") - if len(hash) != 32: - raise uh.exc.MalformedHashError(cls, "wrong size") - for char in hash: - if char not in uh.LC_HEX_CHARS: - raise uh.exc.MalformedHashError(cls, "invalid chars in hash") - return hash - - @classmethod - def verify(cls, secret, hash, user, realm, encoding="utf-8"): - hash = cls._norm_hash(hash) - other = cls.hash(secret, user, realm, encoding) - return consteq(hash, other) - - @classmethod - def identify(cls, hash): - try: - cls._norm_hash(hash) - except ValueError: - return False - return True - - @uh.deprecated_method(deprecated="1.7", removed="2.0") - @classmethod - def genconfig(cls): - return cls.hash("", "", "") - - @uh.deprecated_method(deprecated="1.7", removed="2.0") - @classmethod - def genhash(cls, secret, config, user, realm, encoding=None): - # NOTE: 'config' is ignored, as this hash has no salting / other configuration. - # just have to make sure it's valid. - cls._norm_hash(config) - return cls.hash(secret, user, realm, encoding) - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/django.py b/libs_crutch/contrib/passlib/handlers/django.py deleted file mode 100644 index 6dd499a..0000000 --- a/libs_crutch/contrib/passlib/handlers/django.py +++ /dev/null @@ -1,512 +0,0 @@ -"""passlib.handlers.django- Django password hash support""" -#============================================================================= -# imports -#============================================================================= -# core -from base64 import b64encode -from binascii import hexlify -from hashlib import md5, sha1, sha256 -import logging; log = logging.getLogger(__name__) -# site -# pkg -from passlib.handlers.bcrypt import _wrapped_bcrypt -from passlib.hash import argon2, bcrypt, pbkdf2_sha1, pbkdf2_sha256 -from passlib.utils import to_unicode, rng, getrandstr -from passlib.utils.binary import BASE64_CHARS -from passlib.utils.compat import str_to_uascii, uascii_to_str, unicode, u -from passlib.crypto.digest import pbkdf2_hmac -import passlib.utils.handlers as uh -# local -__all__ = [ - "django_salted_sha1", - "django_salted_md5", - "django_bcrypt", - "django_pbkdf2_sha1", - "django_pbkdf2_sha256", - "django_argon2", - "django_des_crypt", - "django_disabled", -] - -#============================================================================= -# lazy imports & constants -#============================================================================= - -# imported by django_des_crypt._calc_checksum() -des_crypt = None - -def _import_des_crypt(): - global des_crypt - if des_crypt is None: - from passlib.hash import des_crypt - return des_crypt - -# django 1.4's salt charset -SALT_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' - -#============================================================================= -# salted hashes -#============================================================================= -class DjangoSaltedHash(uh.HasSalt, uh.GenericHandler): - """base class providing common code for django hashes""" - # name, ident, checksum_size must be set by subclass. - # ident must include "$" suffix. - setting_kwds = ("salt", "salt_size") - - # NOTE: django 1.0-1.3 would accept empty salt strings. - # django 1.4 won't, but this appears to be regression - # (https://code.djangoproject.com/ticket/18144) - # so presumably it will be fixed in a later release. - default_salt_size = 12 - max_salt_size = None - salt_chars = SALT_CHARS - - checksum_chars = uh.LOWER_HEX_CHARS - - @classmethod - def from_string(cls, hash): - salt, chk = uh.parse_mc2(hash, cls.ident, handler=cls) - return cls(salt=salt, checksum=chk) - - def to_string(self): - return uh.render_mc2(self.ident, self.salt, self.checksum) - -# NOTE: only used by PBKDF2 -class DjangoVariableHash(uh.HasRounds, DjangoSaltedHash): - """base class providing common code for django hashes w/ variable rounds""" - setting_kwds = DjangoSaltedHash.setting_kwds + ("rounds",) - - min_rounds = 1 - - @classmethod - def from_string(cls, hash): - rounds, salt, chk = uh.parse_mc3(hash, cls.ident, handler=cls) - return cls(rounds=rounds, salt=salt, checksum=chk) - - def to_string(self): - return uh.render_mc3(self.ident, self.rounds, self.salt, self.checksum) - -class django_salted_sha1(DjangoSaltedHash): - """This class implements Django's Salted SHA1 hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, and uses a single round of SHA1. - - The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, a 12 character one will be autogenerated (this is recommended). - If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. - - :type salt_size: int - :param salt_size: - Optional number of characters to use when autogenerating new salts. - Defaults to 12, but can be any positive value. - - This should be compatible with Django 1.4's :class:`!SHA1PasswordHasher` class. - - .. versionchanged: 1.6 - This class now generates 12-character salts instead of 5, - and generated salts uses the character range ``[0-9a-zA-Z]`` instead of - the ``[0-9a-f]``. This is to be compatible with how Django >= 1.4 - generates these hashes; but hashes generated in this manner will still be - correctly interpreted by earlier versions of Django. - """ - name = "django_salted_sha1" - django_name = "sha1" - ident = u("sha1$") - checksum_size = 40 - - def _calc_checksum(self, secret): - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - return str_to_uascii(sha1(self.salt.encode("ascii") + secret).hexdigest()) - -class django_salted_md5(DjangoSaltedHash): - """This class implements Django's Salted MD5 hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, and uses a single round of MD5. - - The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, a 12 character one will be autogenerated (this is recommended). - If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. - - :type salt_size: int - :param salt_size: - Optional number of characters to use when autogenerating new salts. - Defaults to 12, but can be any positive value. - - This should be compatible with the hashes generated by - Django 1.4's :class:`!MD5PasswordHasher` class. - - .. versionchanged: 1.6 - This class now generates 12-character salts instead of 5, - and generated salts uses the character range ``[0-9a-zA-Z]`` instead of - the ``[0-9a-f]``. This is to be compatible with how Django >= 1.4 - generates these hashes; but hashes generated in this manner will still be - correctly interpreted by earlier versions of Django. - """ - name = "django_salted_md5" - django_name = "md5" - ident = u("md5$") - checksum_size = 32 - - def _calc_checksum(self, secret): - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - return str_to_uascii(md5(self.salt.encode("ascii") + secret).hexdigest()) - -#============================================================================= -# BCrypt -#============================================================================= - -django_bcrypt = uh.PrefixWrapper("django_bcrypt", bcrypt, - prefix=u('bcrypt$'), ident=u("bcrypt$"), - # NOTE: this docstring is duplicated in the docs, since sphinx - # seems to be having trouble reading it via autodata:: - doc="""This class implements Django 1.4's BCrypt wrapper, and follows the :ref:`password-hash-api`. - - This is identical to :class:`!bcrypt` itself, but with - the Django-specific prefix ``"bcrypt$"`` prepended. - - See :doc:`/lib/passlib.hash.bcrypt` for more details, - the usage and behavior is identical. - - This should be compatible with the hashes generated by - Django 1.4's :class:`!BCryptPasswordHasher` class. - - .. versionadded:: 1.6 - """) -django_bcrypt.django_name = "bcrypt" -django_bcrypt._using_clone_attrs += ("django_name",) - -#============================================================================= -# BCRYPT + SHA256 -#============================================================================= - -class django_bcrypt_sha256(_wrapped_bcrypt): - """This class implements Django 1.6's Bcrypt+SHA256 hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, and a variable number of rounds. - - While the algorithm and format is somewhat different, - the api and options for this hash are identical to :class:`!bcrypt` itself, - see :doc:`bcrypt ` for more details. - - .. versionadded:: 1.6.2 - """ - name = "django_bcrypt_sha256" - django_name = "bcrypt_sha256" - _digest = sha256 - - # sample hash: - # bcrypt_sha256$$2a$06$/3OeRpbOf8/l6nPPRdZPp.nRiyYqPobEZGdNRBWihQhiFDh1ws1tu - - # XXX: we can't use .ident attr due to bcrypt code using it. - # working around that via django_prefix - django_prefix = u('bcrypt_sha256$') - - @classmethod - def identify(cls, hash): - hash = uh.to_unicode_for_identify(hash) - if not hash: - return False - return hash.startswith(cls.django_prefix) - - @classmethod - def from_string(cls, hash): - hash = to_unicode(hash, "ascii", "hash") - if not hash.startswith(cls.django_prefix): - raise uh.exc.InvalidHashError(cls) - bhash = hash[len(cls.django_prefix):] - if not bhash.startswith("$2"): - raise uh.exc.MalformedHashError(cls) - return super(django_bcrypt_sha256, cls).from_string(bhash) - - def to_string(self): - bhash = super(django_bcrypt_sha256, self).to_string() - return uascii_to_str(self.django_prefix) + bhash - - def _calc_checksum(self, secret): - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - secret = hexlify(self._digest(secret).digest()) - return super(django_bcrypt_sha256, self)._calc_checksum(secret) - -#============================================================================= -# PBKDF2 variants -#============================================================================= - -class django_pbkdf2_sha256(DjangoVariableHash): - """This class implements Django's PBKDF2-HMAC-SHA256 hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, a 12 character one will be autogenerated (this is recommended). - If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. - - :type salt_size: int - :param salt_size: - Optional number of characters to use when autogenerating new salts. - Defaults to 12, but can be any positive value. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - Defaults to 29000, but must be within ``range(1,1<<32)``. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - This should be compatible with the hashes generated by - Django 1.4's :class:`!PBKDF2PasswordHasher` class. - - .. versionadded:: 1.6 - """ - name = "django_pbkdf2_sha256" - django_name = "pbkdf2_sha256" - ident = u('pbkdf2_sha256$') - min_salt_size = 1 - max_rounds = 0xffffffff # setting at 32-bit limit for now - checksum_chars = uh.PADDED_BASE64_CHARS - checksum_size = 44 # 32 bytes -> base64 - default_rounds = pbkdf2_sha256.default_rounds # NOTE: django 1.6 uses 12000 - _digest = "sha256" - - def _calc_checksum(self, secret): - # NOTE: secret & salt will be encoded using UTF-8 by pbkdf2_hmac() - hash = pbkdf2_hmac(self._digest, secret, self.salt, self.rounds) - return b64encode(hash).rstrip().decode("ascii") - -class django_pbkdf2_sha1(django_pbkdf2_sha256): - """This class implements Django's PBKDF2-HMAC-SHA1 hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, a 12 character one will be autogenerated (this is recommended). - If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. - - :type salt_size: int - :param salt_size: - Optional number of characters to use when autogenerating new salts. - Defaults to 12, but can be any positive value. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - Defaults to 131000, but must be within ``range(1,1<<32)``. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - This should be compatible with the hashes generated by - Django 1.4's :class:`!PBKDF2SHA1PasswordHasher` class. - - .. versionadded:: 1.6 - """ - name = "django_pbkdf2_sha1" - django_name = "pbkdf2_sha1" - ident = u('pbkdf2_sha1$') - checksum_size = 28 # 20 bytes -> base64 - default_rounds = pbkdf2_sha1.default_rounds # NOTE: django 1.6 uses 12000 - _digest = "sha1" - -#============================================================================= -# Argon2 -#============================================================================= - -# NOTE: as of 2019-11-11, Django's Argon2PasswordHasher only supports Type I; -# so limiting this to ensure that as well. - -django_argon2 = uh.PrefixWrapper( - name="django_argon2", - wrapped=argon2.using(type="I"), - prefix=u('argon2'), - ident=u('argon2$argon2i$'), - # NOTE: this docstring is duplicated in the docs, since sphinx - # seems to be having trouble reading it via autodata:: - doc="""This class implements Django 1.10's Argon2 wrapper, and follows the :ref:`password-hash-api`. - - This is identical to :class:`!argon2` itself, but with - the Django-specific prefix ``"argon2$"`` prepended. - - See :doc:`argon2 ` for more details, - the usage and behavior is identical. - - This should be compatible with the hashes generated by - Django 1.10's :class:`!Argon2PasswordHasher` class. - - .. versionadded:: 1.7 - """) -django_argon2.django_name = "argon2" -django_argon2._using_clone_attrs += ("django_name",) - -#============================================================================= -# DES -#============================================================================= -class django_des_crypt(uh.TruncateMixin, uh.HasSalt, uh.GenericHandler): - """This class implements Django's :class:`des_crypt` wrapper, and follows the :ref:`password-hash-api`. - - It supports a fixed-length salt. - - The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it must be 2 characters, drawn from the regexp range ``[./0-9A-Za-z]``. - - :param bool truncate_error: - By default, django_des_crypt will silently truncate passwords larger than 8 bytes. - Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` - to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. - - .. versionadded:: 1.7 - - This should be compatible with the hashes generated by - Django 1.4's :class:`!CryptPasswordHasher` class. - Note that Django only supports this hash on Unix systems - (though :class:`!django_des_crypt` is available cross-platform - under Passlib). - - .. versionchanged:: 1.6 - This class will now accept hashes with empty salt strings, - since Django 1.4 generates them this way. - """ - name = "django_des_crypt" - django_name = "crypt" - setting_kwds = ("salt", "salt_size", "truncate_error") - ident = u("crypt$") - checksum_chars = salt_chars = uh.HASH64_CHARS - checksum_size = 11 - min_salt_size = default_salt_size = 2 - truncate_size = 8 - - # NOTE: regarding duplicate salt field: - # - # django 1.0 had a "crypt$$" hash format, - # used [a-z0-9] to generate a 5 char salt, stored it in salt1, - # duplicated the first two chars of salt1 as salt2. - # it would throw an error if salt1 was empty. - # - # django 1.4 started generating 2 char salt using the full alphabet, - # left salt1 empty, and only paid attention to salt2. - # - # in order to be compatible with django 1.0, the hashes generated - # by this function will always include salt1, unless the following - # class-level field is disabled (mainly used for testing) - use_duplicate_salt = True - - @classmethod - def from_string(cls, hash): - salt, chk = uh.parse_mc2(hash, cls.ident, handler=cls) - if chk: - # chk should be full des_crypt hash - if not salt: - # django 1.4 always uses empty salt field, - # so extract salt from des_crypt hash - salt = chk[:2] - elif salt[:2] != chk[:2]: - # django 1.0 stored 5 chars in salt field, and duplicated - # the first two chars in . we keep the full salt, - # but make sure the first two chars match as sanity check. - raise uh.exc.MalformedHashError(cls, - "first two digits of salt and checksum must match") - # in all cases, strip salt chars from - chk = chk[2:] - return cls(salt=salt, checksum=chk) - - def to_string(self): - salt = self.salt - chk = salt[:2] + self.checksum - if self.use_duplicate_salt: - # filling in salt field, so that we're compatible with django 1.0 - return uh.render_mc2(self.ident, salt, chk) - else: - # django 1.4+ style hash - return uh.render_mc2(self.ident, "", chk) - - def _calc_checksum(self, secret): - # NOTE: we lazily import des_crypt, - # since most django deploys won't use django_des_crypt - global des_crypt - if des_crypt is None: - _import_des_crypt() - # check for truncation (during .hash() calls only) - if self.use_defaults: - self._check_truncate_policy(secret) - return des_crypt(salt=self.salt[:2])._calc_checksum(secret) - -class django_disabled(uh.ifc.DisabledHash, uh.StaticHandler): - """This class provides disabled password behavior for Django, and follows the :ref:`password-hash-api`. - - This class does not implement a hash, but instead - claims the special hash string ``"!"`` which Django uses - to indicate an account's password has been disabled. - - * newly encrypted passwords will hash to ``"!"``. - * it rejects all passwords. - - .. note:: - - Django 1.6 prepends a randomly generated 40-char alphanumeric string - to each unusuable password. This class recognizes such strings, - but for backwards compatibility, still returns ``"!"``. - - See ``_ for why - Django appends an alphanumeric string. - - .. versionchanged:: 1.6.2 added Django 1.6 support - - .. versionchanged:: 1.7 started appending an alphanumeric string. - """ - name = "django_disabled" - _hash_prefix = u("!") - suffix_length = 40 - - # XXX: move this to StaticHandler, or wherever _hash_prefix is being used? - @classmethod - def identify(cls, hash): - hash = uh.to_unicode_for_identify(hash) - return hash.startswith(cls._hash_prefix) - - def _calc_checksum(self, secret): - # generate random suffix to match django's behavior - return getrandstr(rng, BASE64_CHARS[:-2], self.suffix_length) - - @classmethod - def verify(cls, secret, hash): - uh.validate_secret(secret) - if not cls.identify(hash): - raise uh.exc.InvalidHashError(cls) - return False - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/fshp.py b/libs_crutch/contrib/passlib/handlers/fshp.py deleted file mode 100644 index db13e74..0000000 --- a/libs_crutch/contrib/passlib/handlers/fshp.py +++ /dev/null @@ -1,214 +0,0 @@ -"""passlib.handlers.fshp -""" - -#============================================================================= -# imports -#============================================================================= -# core -from base64 import b64encode, b64decode -import re -import logging; log = logging.getLogger(__name__) -# site -# pkg -from passlib.utils import to_unicode -import passlib.utils.handlers as uh -from passlib.utils.compat import bascii_to_str, iteritems, u,\ - unicode -from passlib.crypto.digest import pbkdf1 -# local -__all__ = [ - 'fshp', -] -#============================================================================= -# sha1-crypt -#============================================================================= -class fshp(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): - """This class implements the FSHP password hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :param salt: - Optional raw salt string. - If not specified, one will be autogenerated (this is recommended). - - :param salt_size: - Optional number of bytes to use when autogenerating new salts. - Defaults to 16 bytes, but can be any non-negative value. - - :param rounds: - Optional number of rounds to use. - Defaults to 480000, must be between 1 and 4294967295, inclusive. - - :param variant: - Optionally specifies variant of FSHP to use. - - * ``0`` - uses SHA-1 digest (deprecated). - * ``1`` - uses SHA-2/256 digest (default). - * ``2`` - uses SHA-2/384 digest. - * ``3`` - uses SHA-2/512 digest. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - - #=================================================================== - # class attrs - #=================================================================== - #--GenericHandler-- - name = "fshp" - setting_kwds = ("salt", "salt_size", "rounds", "variant") - checksum_chars = uh.PADDED_BASE64_CHARS - ident = u("{FSHP") - # checksum_size is property() that depends on variant - - #--HasRawSalt-- - default_salt_size = 16 # current passlib default, FSHP uses 8 - max_salt_size = None - - #--HasRounds-- - # FIXME: should probably use different default rounds - # based on the variant. setting for default variant (sha256) for now. - default_rounds = 480000 # current passlib default, FSHP uses 4096 - min_rounds = 1 # set by FSHP - max_rounds = 4294967295 # 32-bit integer limit - not set by FSHP - rounds_cost = "linear" - - #--variants-- - default_variant = 1 - _variant_info = { - # variant: (hash name, digest size) - 0: ("sha1", 20), - 1: ("sha256", 32), - 2: ("sha384", 48), - 3: ("sha512", 64), - } - _variant_aliases = dict( - [(unicode(k),k) for k in _variant_info] + - [(v[0],k) for k,v in iteritems(_variant_info)] - ) - - #=================================================================== - # configuration - #=================================================================== - @classmethod - def using(cls, variant=None, **kwds): - subcls = super(fshp, cls).using(**kwds) - if variant is not None: - subcls.default_variant = cls._norm_variant(variant) - return subcls - - #=================================================================== - # instance attrs - #=================================================================== - variant = None - - #=================================================================== - # init - #=================================================================== - def __init__(self, variant=None, **kwds): - # NOTE: variant must be set first, since it controls checksum size, etc. - self.use_defaults = kwds.get("use_defaults") # load this early - if variant is not None: - variant = self._norm_variant(variant) - elif self.use_defaults: - variant = self.default_variant - assert self._norm_variant(variant) == variant, "invalid default variant: %r" % (variant,) - else: - raise TypeError("no variant specified") - self.variant = variant - super(fshp, self).__init__(**kwds) - - @classmethod - def _norm_variant(cls, variant): - if isinstance(variant, bytes): - variant = variant.decode("ascii") - if isinstance(variant, unicode): - try: - variant = cls._variant_aliases[variant] - except KeyError: - raise ValueError("invalid fshp variant") - if not isinstance(variant, int): - raise TypeError("fshp variant must be int or known alias") - if variant not in cls._variant_info: - raise ValueError("invalid fshp variant") - return variant - - @property - def checksum_alg(self): - return self._variant_info[self.variant][0] - - @property - def checksum_size(self): - return self._variant_info[self.variant][1] - - #=================================================================== - # formatting - #=================================================================== - - _hash_regex = re.compile(u(r""" - ^ - \{FSHP - (\d+)\| # variant - (\d+)\| # salt size - (\d+)\} # rounds - ([a-zA-Z0-9+/]+={0,3}) # digest - $"""), re.X) - - @classmethod - def from_string(cls, hash): - hash = to_unicode(hash, "ascii", "hash") - m = cls._hash_regex.match(hash) - if not m: - raise uh.exc.InvalidHashError(cls) - variant, salt_size, rounds, data = m.group(1,2,3,4) - variant = int(variant) - salt_size = int(salt_size) - rounds = int(rounds) - try: - data = b64decode(data.encode("ascii")) - except TypeError: - raise uh.exc.MalformedHashError(cls) - salt = data[:salt_size] - chk = data[salt_size:] - return cls(salt=salt, checksum=chk, rounds=rounds, variant=variant) - - def to_string(self): - chk = self.checksum - salt = self.salt - data = bascii_to_str(b64encode(salt+chk)) - return "{FSHP%d|%d|%d}%s" % (self.variant, len(salt), self.rounds, data) - - #=================================================================== - # backend - #=================================================================== - - def _calc_checksum(self, secret): - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - # NOTE: for some reason, FSHP uses pbkdf1 with password & salt reversed. - # this has only a minimal impact on security, - # but it is worth noting this deviation. - return pbkdf1( - digest=self.checksum_alg, - secret=self.salt, - salt=secret, - rounds=self.rounds, - keylen=self.checksum_size, - ) - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/ldap_digests.py b/libs_crutch/contrib/passlib/handlers/ldap_digests.py deleted file mode 100644 index 17ab774..0000000 --- a/libs_crutch/contrib/passlib/handlers/ldap_digests.py +++ /dev/null @@ -1,272 +0,0 @@ -"""passlib.handlers.digests - plain hash digests -""" -#============================================================================= -# imports -#============================================================================= -# core -from base64 import b64encode, b64decode -from hashlib import md5, sha1 -import logging; log = logging.getLogger(__name__) -import re -# site -# pkg -from passlib.handlers.misc import plaintext -from passlib.utils import unix_crypt_schemes, to_unicode -from passlib.utils.compat import uascii_to_str, unicode, u -from passlib.utils.decor import classproperty -import passlib.utils.handlers as uh -# local -__all__ = [ - "ldap_plaintext", - "ldap_md5", - "ldap_sha1", - "ldap_salted_md5", - "ldap_salted_sha1", - - ##"get_active_ldap_crypt_schemes", - "ldap_des_crypt", - "ldap_bsdi_crypt", - "ldap_md5_crypt", - "ldap_sha1_crypt", - "ldap_bcrypt", - "ldap_sha256_crypt", - "ldap_sha512_crypt", -] - -#============================================================================= -# ldap helpers -#============================================================================= -class _Base64DigestHelper(uh.StaticHandler): - """helper for ldap_md5 / ldap_sha1""" - # XXX: could combine this with hex digests in digests.py - - ident = None # required - prefix identifier - _hash_func = None # required - hash function - _hash_regex = None # required - regexp to recognize hash - checksum_chars = uh.PADDED_BASE64_CHARS - - @classproperty - def _hash_prefix(cls): - """tell StaticHandler to strip ident from checksum""" - return cls.ident - - def _calc_checksum(self, secret): - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - chk = self._hash_func(secret).digest() - return b64encode(chk).decode("ascii") - -class _SaltedBase64DigestHelper(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): - """helper for ldap_salted_md5 / ldap_salted_sha1""" - setting_kwds = ("salt", "salt_size") - checksum_chars = uh.PADDED_BASE64_CHARS - - ident = None # required - prefix identifier - _hash_func = None # required - hash function - _hash_regex = None # required - regexp to recognize hash - min_salt_size = max_salt_size = 4 - - # NOTE: openldap implementation uses 4 byte salt, - # but it's been reported (issue 30) that some servers use larger salts. - # the semi-related rfc3112 recommends support for up to 16 byte salts. - min_salt_size = 4 - default_salt_size = 4 - max_salt_size = 16 - - @classmethod - def from_string(cls, hash): - hash = to_unicode(hash, "ascii", "hash") - m = cls._hash_regex.match(hash) - if not m: - raise uh.exc.InvalidHashError(cls) - try: - data = b64decode(m.group("tmp").encode("ascii")) - except TypeError: - raise uh.exc.MalformedHashError(cls) - cs = cls.checksum_size - assert cs - return cls(checksum=data[:cs], salt=data[cs:]) - - def to_string(self): - data = self.checksum + self.salt - hash = self.ident + b64encode(data).decode("ascii") - return uascii_to_str(hash) - - def _calc_checksum(self, secret): - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - return self._hash_func(secret + self.salt).digest() - -#============================================================================= -# implementations -#============================================================================= -class ldap_md5(_Base64DigestHelper): - """This class stores passwords using LDAP's plain MD5 format, and follows the :ref:`password-hash-api`. - - The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods have no optional keywords. - """ - name = "ldap_md5" - ident = u("{MD5}") - _hash_func = md5 - _hash_regex = re.compile(u(r"^\{MD5\}(?P[+/a-zA-Z0-9]{22}==)$")) - -class ldap_sha1(_Base64DigestHelper): - """This class stores passwords using LDAP's plain SHA1 format, and follows the :ref:`password-hash-api`. - - The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods have no optional keywords. - """ - name = "ldap_sha1" - ident = u("{SHA}") - _hash_func = sha1 - _hash_regex = re.compile(u(r"^\{SHA\}(?P[+/a-zA-Z0-9]{27}=)$")) - -class ldap_salted_md5(_SaltedBase64DigestHelper): - """This class stores passwords using LDAP's salted MD5 format, and follows the :ref:`password-hash-api`. - - It supports a 4-16 byte salt. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: bytes - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it may be any 4-16 byte string. - - :type salt_size: int - :param salt_size: - Optional number of bytes to use when autogenerating new salts. - Defaults to 4 bytes for compatibility with the LDAP spec, - but some systems use larger salts, and Passlib supports - any value between 4-16. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include - ``salt`` strings that are too long. - - .. versionadded:: 1.6 - - .. versionchanged:: 1.6 - This format now supports variable length salts, instead of a fix 4 bytes. - """ - name = "ldap_salted_md5" - ident = u("{SMD5}") - checksum_size = 16 - _hash_func = md5 - _hash_regex = re.compile(u(r"^\{SMD5\}(?P[+/a-zA-Z0-9]{27,}={0,2})$")) - -class ldap_salted_sha1(_SaltedBase64DigestHelper): - """This class stores passwords using LDAP's salted SHA1 format, and follows the :ref:`password-hash-api`. - - It supports a 4-16 byte salt. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: bytes - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it may be any 4-16 byte string. - - :type salt_size: int - :param salt_size: - Optional number of bytes to use when autogenerating new salts. - Defaults to 4 bytes for compatibility with the LDAP spec, - but some systems use larger salts, and Passlib supports - any value between 4-16. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include - ``salt`` strings that are too long. - - .. versionadded:: 1.6 - - .. versionchanged:: 1.6 - This format now supports variable length salts, instead of a fix 4 bytes. - """ - name = "ldap_salted_sha1" - ident = u("{SSHA}") - checksum_size = 20 - _hash_func = sha1 - _hash_regex = re.compile(u(r"^\{SSHA\}(?P[+/a-zA-Z0-9]{32,}={0,2})$")) - -class ldap_plaintext(plaintext): - """This class stores passwords in plaintext, and follows the :ref:`password-hash-api`. - - This class acts much like the generic :class:`!passlib.hash.plaintext` handler, - except that it will identify a hash only if it does NOT begin with the ``{XXX}`` identifier prefix - used by RFC2307 passwords. - - The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods all require the - following additional contextual keyword: - - :type encoding: str - :param encoding: - This controls the character encoding to use (defaults to ``utf-8``). - - This encoding will be used to encode :class:`!unicode` passwords - under Python 2, and decode :class:`!bytes` hashes under Python 3. - - .. versionchanged:: 1.6 - The ``encoding`` keyword was added. - """ - # NOTE: this subclasses plaintext, since all it does differently - # is override identify() - - name = "ldap_plaintext" - _2307_pat = re.compile(u(r"^\{\w+\}.*$")) - - @uh.deprecated_method(deprecated="1.7", removed="2.0") - @classmethod - def genconfig(cls): - # Overridding plaintext.genconfig() since it returns "", - # but have to return non-empty value due to identify() below - return "!" - - @classmethod - def identify(cls, hash): - # NOTE: identifies all strings EXCEPT those with {XXX} prefix - hash = uh.to_unicode_for_identify(hash) - return bool(hash) and cls._2307_pat.match(hash) is None - -#============================================================================= -# {CRYPT} wrappers -# the following are wrappers around the base crypt algorithms, -# which add the ldap required {CRYPT} prefix -#============================================================================= -ldap_crypt_schemes = [ 'ldap_' + name for name in unix_crypt_schemes ] - -def _init_ldap_crypt_handlers(): - # NOTE: I don't like to implicitly modify globals() like this, - # but don't want to write out all these handlers out either :) - g = globals() - for wname in unix_crypt_schemes: - name = 'ldap_' + wname - g[name] = uh.PrefixWrapper(name, wname, prefix=u("{CRYPT}"), lazy=True) - del g -_init_ldap_crypt_handlers() - -##_lcn_host = None -##def get_host_ldap_crypt_schemes(): -## global _lcn_host -## if _lcn_host is None: -## from passlib.hosts import host_context -## schemes = host_context.schemes() -## _lcn_host = [ -## "ldap_" + name -## for name in unix_crypt_names -## if name in schemes -## ] -## return _lcn_host - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/md5_crypt.py b/libs_crutch/contrib/passlib/handlers/md5_crypt.py deleted file mode 100644 index 993db4d..0000000 --- a/libs_crutch/contrib/passlib/handlers/md5_crypt.py +++ /dev/null @@ -1,346 +0,0 @@ -"""passlib.handlers.md5_crypt - md5-crypt algorithm""" -#============================================================================= -# imports -#============================================================================= -# core -from hashlib import md5 -import logging; log = logging.getLogger(__name__) -# site -# pkg -from passlib.utils import safe_crypt, test_crypt, repeat_string -from passlib.utils.binary import h64 -from passlib.utils.compat import unicode, u -import passlib.utils.handlers as uh -# local -__all__ = [ - "md5_crypt", - "apr_md5_crypt", -] - -#============================================================================= -# pure-python backend -#============================================================================= -_BNULL = b"\x00" -_MD5_MAGIC = b"$1$" -_APR_MAGIC = b"$apr1$" - -# pre-calculated offsets used to speed up C digest stage (see notes below). -# sequence generated using the following: - ##perms_order = "p,pp,ps,psp,sp,spp".split(",") - ##def offset(i): - ## key = (("p" if i % 2 else "") + ("s" if i % 3 else "") + - ## ("p" if i % 7 else "") + ("" if i % 2 else "p")) - ## return perms_order.index(key) - ##_c_digest_offsets = [(offset(i), offset(i+1)) for i in range(0,42,2)] -_c_digest_offsets = ( - (0, 3), (5, 1), (5, 3), (1, 2), (5, 1), (5, 3), (1, 3), - (4, 1), (5, 3), (1, 3), (5, 0), (5, 3), (1, 3), (5, 1), - (4, 3), (1, 3), (5, 1), (5, 2), (1, 3), (5, 1), (5, 3), - ) - -# map used to transpose bytes when encoding final digest -_transpose_map = (12, 6, 0, 13, 7, 1, 14, 8, 2, 15, 9, 3, 5, 10, 4, 11) - -def _raw_md5_crypt(pwd, salt, use_apr=False): - """perform raw md5-crypt calculation - - this function provides a pure-python implementation of the internals - for the MD5-Crypt algorithms; it doesn't handle any of the - parsing/validation of the hash strings themselves. - - :arg pwd: password chars/bytes to hash - :arg salt: salt chars to use - :arg use_apr: use apache variant - - :returns: - encoded checksum chars - """ - # NOTE: regarding 'apr' format: - # really, apache? you had to invent a whole new "$apr1$" format, - # when all you did was change the ident incorporated into the hash? - # would love to find webpage explaining why just using a portable - # implementation of $1$ wasn't sufficient. *nothing else* was changed. - - #=================================================================== - # init & validate inputs - #=================================================================== - - # validate secret - # XXX: not sure what official unicode policy is, using this as default - if isinstance(pwd, unicode): - pwd = pwd.encode("utf-8") - assert isinstance(pwd, bytes), "pwd not unicode or bytes" - if _BNULL in pwd: - raise uh.exc.NullPasswordError(md5_crypt) - pwd_len = len(pwd) - - # validate salt - should have been taken care of by caller - assert isinstance(salt, unicode), "salt not unicode" - salt = salt.encode("ascii") - assert len(salt) < 9, "salt too large" - # NOTE: spec says salts larger than 8 bytes should be truncated, - # instead of causing an error. this function assumes that's been - # taken care of by the handler class. - - # load APR specific constants - if use_apr: - magic = _APR_MAGIC - else: - magic = _MD5_MAGIC - - #=================================================================== - # digest B - used as subinput to digest A - #=================================================================== - db = md5(pwd + salt + pwd).digest() - - #=================================================================== - # digest A - used to initialize first round of digest C - #=================================================================== - # start out with pwd + magic + salt - a_ctx = md5(pwd + magic + salt) - a_ctx_update = a_ctx.update - - # add pwd_len bytes of b, repeating b as many times as needed. - a_ctx_update(repeat_string(db, pwd_len)) - - # add null chars & first char of password - # NOTE: this may have historically been a bug, - # where they meant to use db[0] instead of B_NULL, - # but the original code memclear'ed db, - # and now all implementations have to use this. - i = pwd_len - evenchar = pwd[:1] - while i: - a_ctx_update(_BNULL if i & 1 else evenchar) - i >>= 1 - - # finish A - da = a_ctx.digest() - - #=================================================================== - # digest C - for a 1000 rounds, combine A, S, and P - # digests in various ways; in order to burn CPU time. - #=================================================================== - - # NOTE: the original MD5-Crypt implementation performs the C digest - # calculation using the following loop: - # - ##dc = da - ##i = 0 - ##while i < rounds: - ## tmp_ctx = md5(pwd if i & 1 else dc) - ## if i % 3: - ## tmp_ctx.update(salt) - ## if i % 7: - ## tmp_ctx.update(pwd) - ## tmp_ctx.update(dc if i & 1 else pwd) - ## dc = tmp_ctx.digest() - ## i += 1 - # - # The code Passlib uses (below) implements an equivalent algorithm, - # it's just been heavily optimized to pre-calculate a large number - # of things beforehand. It works off of a couple of observations - # about the original algorithm: - # - # 1. each round is a combination of 'dc', 'salt', and 'pwd'; and the exact - # combination is determined by whether 'i' a multiple of 2,3, and/or 7. - # 2. since lcm(2,3,7)==42, the series of combinations will repeat - # every 42 rounds. - # 3. even rounds 0-40 consist of 'hash(dc + round-specific-constant)'; - # while odd rounds 1-41 consist of hash(round-specific-constant + dc) - # - # Using these observations, the following code... - # * calculates the round-specific combination of salt & pwd for each round 0-41 - # * runs through as many 42-round blocks as possible (23) - # * runs through as many pairs of rounds as needed for remaining rounds (17) - # * this results in the required 42*23+2*17=1000 rounds required by md5_crypt. - # - # this cuts out a lot of the control overhead incurred when running the - # original loop 1000 times in python, resulting in ~20% increase in - # speed under CPython (though still 2x slower than glibc crypt) - - # prepare the 6 combinations of pwd & salt which are needed - # (order of 'perms' must match how _c_digest_offsets was generated) - pwd_pwd = pwd+pwd - pwd_salt = pwd+salt - perms = [pwd, pwd_pwd, pwd_salt, pwd_salt+pwd, salt+pwd, salt+pwd_pwd] - - # build up list of even-round & odd-round constants, - # and store in 21-element list as (even,odd) pairs. - data = [ (perms[even], perms[odd]) for even, odd in _c_digest_offsets] - - # perform 23 blocks of 42 rounds each (for a total of 966 rounds) - dc = da - blocks = 23 - while blocks: - for even, odd in data: - dc = md5(odd + md5(dc + even).digest()).digest() - blocks -= 1 - - # perform 17 more pairs of rounds (34 more rounds, for a total of 1000) - for even, odd in data[:17]: - dc = md5(odd + md5(dc + even).digest()).digest() - - #=================================================================== - # encode digest using appropriate transpose map - #=================================================================== - return h64.encode_transposed_bytes(dc, _transpose_map).decode("ascii") - -#============================================================================= -# handler -#============================================================================= -class _MD5_Common(uh.HasSalt, uh.GenericHandler): - """common code for md5_crypt and apr_md5_crypt""" - #=================================================================== - # class attrs - #=================================================================== - # name - set in subclass - setting_kwds = ("salt", "salt_size") - # ident - set in subclass - checksum_size = 22 - checksum_chars = uh.HASH64_CHARS - - max_salt_size = 8 - salt_chars = uh.HASH64_CHARS - - #=================================================================== - # methods - #=================================================================== - - @classmethod - def from_string(cls, hash): - salt, chk = uh.parse_mc2(hash, cls.ident, handler=cls) - return cls(salt=salt, checksum=chk) - - def to_string(self): - return uh.render_mc2(self.ident, self.salt, self.checksum) - - # _calc_checksum() - provided by subclass - - #=================================================================== - # eoc - #=================================================================== - -class md5_crypt(uh.HasManyBackends, _MD5_Common): - """This class implements the MD5-Crypt password hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it must be 0-8 characters, drawn from the regexp range ``[./0-9A-Za-z]``. - - :type salt_size: int - :param salt_size: - Optional number of characters to use when autogenerating new salts. - Defaults to 8, but can be any value between 0 and 8. - (This is mainly needed when generating Cisco-compatible hashes, - which require ``salt_size=4``). - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include - ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - #=================================================================== - # class attrs - #=================================================================== - name = "md5_crypt" - ident = u("$1$") - - #=================================================================== - # methods - #=================================================================== - # FIXME: can't find definitive policy on how md5-crypt handles non-ascii. - # all backends currently coerce -> utf-8 - - backends = ("os_crypt", "builtin") - - #--------------------------------------------------------------- - # os_crypt backend - #--------------------------------------------------------------- - @classmethod - def _load_backend_os_crypt(cls): - if test_crypt("test", '$1$test$pi/xDtU5WFVRqYS6BMU8X/'): - cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) - return True - else: - return False - - def _calc_checksum_os_crypt(self, secret): - config = self.ident + self.salt - hash = safe_crypt(secret, config) - if hash: - assert hash.startswith(config) and len(hash) == len(config) + 23 - return hash[-22:] - else: - # py3's crypt.crypt() can't handle non-utf8 bytes. - # fallback to builtin alg, which is always available. - return self._calc_checksum_builtin(secret) - - #--------------------------------------------------------------- - # builtin backend - #--------------------------------------------------------------- - @classmethod - def _load_backend_builtin(cls): - cls._set_calc_checksum_backend(cls._calc_checksum_builtin) - return True - - def _calc_checksum_builtin(self, secret): - return _raw_md5_crypt(secret, self.salt) - - #=================================================================== - # eoc - #=================================================================== - -class apr_md5_crypt(_MD5_Common): - """This class implements the Apr-MD5-Crypt password hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it must be 0-8 characters, drawn from the regexp range ``[./0-9A-Za-z]``. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include - ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - #=================================================================== - # class attrs - #=================================================================== - name = "apr_md5_crypt" - ident = u("$apr1$") - - #=================================================================== - # methods - #=================================================================== - def _calc_checksum(self, secret): - return _raw_md5_crypt(secret, self.salt, use_apr=True) - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/misc.py b/libs_crutch/contrib/passlib/handlers/misc.py deleted file mode 100644 index 44abc34..0000000 --- a/libs_crutch/contrib/passlib/handlers/misc.py +++ /dev/null @@ -1,269 +0,0 @@ -"""passlib.handlers.misc - misc generic handlers -""" -#============================================================================= -# imports -#============================================================================= -# core -import sys -import logging; log = logging.getLogger(__name__) -from warnings import warn -# site -# pkg -from passlib.utils import to_native_str, str_consteq -from passlib.utils.compat import unicode, u, unicode_or_bytes_types -import passlib.utils.handlers as uh -# local -__all__ = [ - "unix_disabled", - "unix_fallback", - "plaintext", -] - -#============================================================================= -# handler -#============================================================================= -class unix_fallback(uh.ifc.DisabledHash, uh.StaticHandler): - """This class provides the fallback behavior for unix shadow files, and follows the :ref:`password-hash-api`. - - This class does not implement a hash, but instead provides fallback - behavior as found in /etc/shadow on most unix variants. - If used, should be the last scheme in the context. - - * this class will positively identify all hash strings. - * for security, passwords will always hash to ``!``. - * it rejects all passwords if the hash is NOT an empty string (``!`` or ``*`` are frequently used). - * by default it rejects all passwords if the hash is an empty string, - but if ``enable_wildcard=True`` is passed to verify(), - all passwords will be allowed through if the hash is an empty string. - - .. deprecated:: 1.6 - This has been deprecated due to its "wildcard" feature, - and will be removed in Passlib 1.8. Use :class:`unix_disabled` instead. - """ - name = "unix_fallback" - context_kwds = ("enable_wildcard",) - - @classmethod - def identify(cls, hash): - if isinstance(hash, unicode_or_bytes_types): - return True - else: - raise uh.exc.ExpectedStringError(hash, "hash") - - def __init__(self, enable_wildcard=False, **kwds): - warn("'unix_fallback' is deprecated, " - "and will be removed in Passlib 1.8; " - "please use 'unix_disabled' instead.", - DeprecationWarning) - super(unix_fallback, self).__init__(**kwds) - self.enable_wildcard = enable_wildcard - - def _calc_checksum(self, secret): - if self.checksum: - # NOTE: hash will generally be "!", but we want to preserve - # it in case it's something else, like "*". - return self.checksum - else: - return u("!") - - @classmethod - def verify(cls, secret, hash, enable_wildcard=False): - uh.validate_secret(secret) - if not isinstance(hash, unicode_or_bytes_types): - raise uh.exc.ExpectedStringError(hash, "hash") - elif hash: - return False - else: - return enable_wildcard - -_MARKER_CHARS = u("*!") -_MARKER_BYTES = b"*!" - -class unix_disabled(uh.ifc.DisabledHash, uh.MinimalHandler): - """This class provides disabled password behavior for unix shadow files, - and follows the :ref:`password-hash-api`. - - This class does not implement a hash, but instead matches the "disabled account" - strings found in ``/etc/shadow`` on most Unix variants. "encrypting" a password - will simply return the disabled account marker. It will reject all passwords, - no matter the hash string. The :meth:`~passlib.ifc.PasswordHash.hash` - method supports one optional keyword: - - :type marker: str - :param marker: - Optional marker string which overrides the platform default - used to indicate a disabled account. - - If not specified, this will default to ``"*"`` on BSD systems, - and use the Linux default ``"!"`` for all other platforms. - (:attr:`!unix_disabled.default_marker` will contain the default value) - - .. versionadded:: 1.6 - This class was added as a replacement for the now-deprecated - :class:`unix_fallback` class, which had some undesirable features. - """ - name = "unix_disabled" - setting_kwds = ("marker",) - context_kwds = () - - _disable_prefixes = tuple(str(_MARKER_CHARS)) - - # TODO: rename attr to 'marker'... - if 'bsd' in sys.platform: # pragma: no cover -- runtime detection - default_marker = u("*") - else: - # use the linux default for other systems - # (glibc also supports adding old hash after the marker - # so it can be restored later). - default_marker = u("!") - - @classmethod - def using(cls, marker=None, **kwds): - subcls = super(unix_disabled, cls).using(**kwds) - if marker is not None: - if not cls.identify(marker): - raise ValueError("invalid marker: %r" % marker) - subcls.default_marker = marker - return subcls - - @classmethod - def identify(cls, hash): - # NOTE: technically, anything in the /etc/shadow password field - # which isn't valid crypt() output counts as "disabled". - # but that's rather ambiguous, and it's hard to predict what - # valid output is for unknown crypt() implementations. - # so to be on the safe side, we only match things *known* - # to be disabled field indicators, and will add others - # as they are found. things beginning w/ "$" should *never* match. - # - # things currently matched: - # * linux uses "!" - # * bsd uses "*" - # * linux may use "!" + hash to disable but preserve original hash - # * linux counts empty string as "any password"; - # this code recognizes it, but treats it the same as "!" - if isinstance(hash, unicode): - start = _MARKER_CHARS - elif isinstance(hash, bytes): - start = _MARKER_BYTES - else: - raise uh.exc.ExpectedStringError(hash, "hash") - return not hash or hash[0] in start - - @classmethod - def verify(cls, secret, hash): - uh.validate_secret(secret) - if not cls.identify(hash): # handles typecheck - raise uh.exc.InvalidHashError(cls) - return False - - @classmethod - def hash(cls, secret, **kwds): - if kwds: - uh.warn_hash_settings_deprecation(cls, kwds) - return cls.using(**kwds).hash(secret) - uh.validate_secret(secret) - marker = cls.default_marker - assert marker and cls.identify(marker) - return to_native_str(marker, param="marker") - - @uh.deprecated_method(deprecated="1.7", removed="2.0") - @classmethod - def genhash(cls, secret, config, marker=None): - if not cls.identify(config): - raise uh.exc.InvalidHashError(cls) - elif config: - # preserve the existing str,since it might contain a disabled password hash ("!" + hash) - uh.validate_secret(secret) - return to_native_str(config, param="config") - else: - if marker is not None: - cls = cls.using(marker=marker) - return cls.hash(secret) - - @classmethod - def disable(cls, hash=None): - out = cls.hash("") - if hash is not None: - hash = to_native_str(hash, param="hash") - if cls.identify(hash): - # extract original hash, so that we normalize marker - hash = cls.enable(hash) - if hash: - out += hash - return out - - @classmethod - def enable(cls, hash): - hash = to_native_str(hash, param="hash") - for prefix in cls._disable_prefixes: - if hash.startswith(prefix): - orig = hash[len(prefix):] - if orig: - return orig - else: - raise ValueError("cannot restore original hash") - raise uh.exc.InvalidHashError(cls) - -class plaintext(uh.MinimalHandler): - """This class stores passwords in plaintext, and follows the :ref:`password-hash-api`. - - The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods all require the - following additional contextual keyword: - - :type encoding: str - :param encoding: - This controls the character encoding to use (defaults to ``utf-8``). - - This encoding will be used to encode :class:`!unicode` passwords - under Python 2, and decode :class:`!bytes` hashes under Python 3. - - .. versionchanged:: 1.6 - The ``encoding`` keyword was added. - """ - # NOTE: this is subclassed by ldap_plaintext - - name = "plaintext" - setting_kwds = () - context_kwds = ("encoding",) - default_encoding = "utf-8" - - @classmethod - def identify(cls, hash): - if isinstance(hash, unicode_or_bytes_types): - return True - else: - raise uh.exc.ExpectedStringError(hash, "hash") - - @classmethod - def hash(cls, secret, encoding=None): - uh.validate_secret(secret) - if not encoding: - encoding = cls.default_encoding - return to_native_str(secret, encoding, "secret") - - @classmethod - def verify(cls, secret, hash, encoding=None): - if not encoding: - encoding = cls.default_encoding - hash = to_native_str(hash, encoding, "hash") - if not cls.identify(hash): - raise uh.exc.InvalidHashError(cls) - return str_consteq(cls.hash(secret, encoding), hash) - - @uh.deprecated_method(deprecated="1.7", removed="2.0") - @classmethod - def genconfig(cls): - return cls.hash("") - - @uh.deprecated_method(deprecated="1.7", removed="2.0") - @classmethod - def genhash(cls, secret, config, encoding=None): - # NOTE: 'config' is ignored, as this hash has no salting / etc - if not cls.identify(config): - raise uh.exc.InvalidHashError(cls) - return cls.hash(secret, encoding=encoding) - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/mssql.py b/libs_crutch/contrib/passlib/handlers/mssql.py deleted file mode 100644 index b060b36..0000000 --- a/libs_crutch/contrib/passlib/handlers/mssql.py +++ /dev/null @@ -1,244 +0,0 @@ -"""passlib.handlers.mssql - MS-SQL Password Hash - -Notes -===== -MS-SQL has used a number of hash algs over the years, -most of which were exposed through the undocumented -'pwdencrypt' and 'pwdcompare' sql functions. - -Known formats -------------- -6.5 - snefru hash, ascii encoded password - no examples found - -7.0 - snefru hash, unicode (what encoding?) - saw ref that these blobs were 16 bytes in size - no examples found - -2000 - byte string using displayed as 0x hex, using 0x0100 prefix. - contains hashes of password and upper-case password. - -2007 - same as 2000, but without the upper-case hash. - -refs ----------- -https://blogs.msdn.com/b/lcris/archive/2007/04/30/sql-server-2005-about-login-password-hashes.aspx?Redirected=true -http://us.generation-nt.com/securing-passwords-hash-help-35429432.html -http://forum.md5decrypter.co.uk/topic230-mysql-and-mssql-get-password-hashes.aspx -http://www.theregister.co.uk/2002/07/08/cracking_ms_sql_server_passwords/ -""" -#============================================================================= -# imports -#============================================================================= -# core -from binascii import hexlify, unhexlify -from hashlib import sha1 -import re -import logging; log = logging.getLogger(__name__) -from warnings import warn -# site -# pkg -from passlib.utils import consteq -from passlib.utils.compat import bascii_to_str, unicode, u -import passlib.utils.handlers as uh -# local -__all__ = [ - "mssql2000", - "mssql2005", -] - -#============================================================================= -# mssql 2000 -#============================================================================= -def _raw_mssql(secret, salt): - assert isinstance(secret, unicode) - assert isinstance(salt, bytes) - return sha1(secret.encode("utf-16-le") + salt).digest() - -BIDENT = b"0x0100" -##BIDENT2 = b("\x01\x00") -UIDENT = u("0x0100") - -def _ident_mssql(hash, csize, bsize): - """common identify for mssql 2000/2005""" - if isinstance(hash, unicode): - if len(hash) == csize and hash.startswith(UIDENT): - return True - elif isinstance(hash, bytes): - if len(hash) == csize and hash.startswith(BIDENT): - return True - ##elif len(hash) == bsize and hash.startswith(BIDENT2): # raw bytes - ## return True - else: - raise uh.exc.ExpectedStringError(hash, "hash") - return False - -def _parse_mssql(hash, csize, bsize, handler): - """common parser for mssql 2000/2005; returns 4 byte salt + checksum""" - if isinstance(hash, unicode): - if len(hash) == csize and hash.startswith(UIDENT): - try: - return unhexlify(hash[6:].encode("utf-8")) - except TypeError: # throw when bad char found - pass - elif isinstance(hash, bytes): - # assumes ascii-compat encoding - assert isinstance(hash, bytes) - if len(hash) == csize and hash.startswith(BIDENT): - try: - return unhexlify(hash[6:]) - except TypeError: # throw when bad char found - pass - ##elif len(hash) == bsize and hash.startswith(BIDENT2): # raw bytes - ## return hash[2:] - else: - raise uh.exc.ExpectedStringError(hash, "hash") - raise uh.exc.InvalidHashError(handler) - -class mssql2000(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): - """This class implements the password hash used by MS-SQL 2000, and follows the :ref:`password-hash-api`. - - It supports a fixed-length salt. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: bytes - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it must be 4 bytes in length. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include - ``salt`` strings that are too long. - """ - #=================================================================== - # algorithm information - #=================================================================== - name = "mssql2000" - setting_kwds = ("salt",) - checksum_size = 40 - min_salt_size = max_salt_size = 4 - - #=================================================================== - # formatting - #=================================================================== - - # 0100 - 2 byte identifier - # 4 byte salt - # 20 byte checksum - # 20 byte checksum - # = 46 bytes - # encoded '0x' + 92 chars = 94 - - @classmethod - def identify(cls, hash): - return _ident_mssql(hash, 94, 46) - - @classmethod - def from_string(cls, hash): - data = _parse_mssql(hash, 94, 46, cls) - return cls(salt=data[:4], checksum=data[4:]) - - def to_string(self): - raw = self.salt + self.checksum - # raw bytes format - BIDENT2 + raw - return "0x0100" + bascii_to_str(hexlify(raw).upper()) - - def _calc_checksum(self, secret): - if isinstance(secret, bytes): - secret = secret.decode("utf-8") - salt = self.salt - return _raw_mssql(secret, salt) + _raw_mssql(secret.upper(), salt) - - @classmethod - def verify(cls, secret, hash): - # NOTE: we only compare against the upper-case hash - # XXX: add 'full' just to verify both checksums? - uh.validate_secret(secret) - self = cls.from_string(hash) - chk = self.checksum - if chk is None: - raise uh.exc.MissingDigestError(cls) - if isinstance(secret, bytes): - secret = secret.decode("utf-8") - result = _raw_mssql(secret.upper(), self.salt) - return consteq(result, chk[20:]) - -#============================================================================= -# handler -#============================================================================= -class mssql2005(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): - """This class implements the password hash used by MS-SQL 2005, and follows the :ref:`password-hash-api`. - - It supports a fixed-length salt. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: bytes - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it must be 4 bytes in length. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include - ``salt`` strings that are too long. - """ - #=================================================================== - # algorithm information - #=================================================================== - name = "mssql2005" - setting_kwds = ("salt",) - - checksum_size = 20 - min_salt_size = max_salt_size = 4 - - #=================================================================== - # formatting - #=================================================================== - - # 0x0100 - 2 byte identifier - # 4 byte salt - # 20 byte checksum - # = 26 bytes - # encoded '0x' + 52 chars = 54 - - @classmethod - def identify(cls, hash): - return _ident_mssql(hash, 54, 26) - - @classmethod - def from_string(cls, hash): - data = _parse_mssql(hash, 54, 26, cls) - return cls(salt=data[:4], checksum=data[4:]) - - def to_string(self): - raw = self.salt + self.checksum - # raw bytes format - BIDENT2 + raw - return "0x0100" + bascii_to_str(hexlify(raw)).upper() - - def _calc_checksum(self, secret): - if isinstance(secret, bytes): - secret = secret.decode("utf-8") - return _raw_mssql(secret, self.salt) - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/mysql.py b/libs_crutch/contrib/passlib/handlers/mysql.py deleted file mode 100644 index 4a71253..0000000 --- a/libs_crutch/contrib/passlib/handlers/mysql.py +++ /dev/null @@ -1,128 +0,0 @@ -"""passlib.handlers.mysql - -MySQL 3.2.3 / OLD_PASSWORD() - - This implements Mysql's OLD_PASSWORD algorithm, introduced in version 3.2.3, deprecated in version 4.1. - - See :mod:`passlib.handlers.mysql_41` for the new algorithm was put in place in version 4.1 - - This algorithm is known to be very insecure, and should only be used to verify existing password hashes. - - http://djangosnippets.org/snippets/1508/ - -MySQL 4.1.1 / NEW PASSWORD - This implements Mysql new PASSWORD algorithm, introduced in version 4.1. - - This function is unsalted, and therefore not very secure against rainbow attacks. - It should only be used when dealing with mysql passwords, - for all other purposes, you should use a salted hash function. - - Description taken from http://dev.mysql.com/doc/refman/6.0/en/password-hashing.html -""" -#============================================================================= -# imports -#============================================================================= -# core -from hashlib import sha1 -import re -import logging; log = logging.getLogger(__name__) -from warnings import warn -# site -# pkg -from passlib.utils import to_native_str -from passlib.utils.compat import bascii_to_str, unicode, u, \ - byte_elem_value, str_to_uascii -import passlib.utils.handlers as uh -# local -__all__ = [ - 'mysql323', - 'mysq41', -] - -#============================================================================= -# backend -#============================================================================= -class mysql323(uh.StaticHandler): - """This class implements the MySQL 3.2.3 password hash, and follows the :ref:`password-hash-api`. - - It has no salt and a single fixed round. - - The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. - """ - #=================================================================== - # class attrs - #=================================================================== - name = "mysql323" - checksum_size = 16 - checksum_chars = uh.HEX_CHARS - - #=================================================================== - # methods - #=================================================================== - @classmethod - def _norm_hash(cls, hash): - return hash.lower() - - def _calc_checksum(self, secret): - # FIXME: no idea if mysql has a policy about handling unicode passwords - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - - MASK_32 = 0xffffffff - MASK_31 = 0x7fffffff - WHITE = b' \t' - - nr1 = 0x50305735 - nr2 = 0x12345671 - add = 7 - for c in secret: - if c in WHITE: - continue - tmp = byte_elem_value(c) - nr1 ^= ((((nr1 & 63)+add)*tmp) + (nr1 << 8)) & MASK_32 - nr2 = (nr2+((nr2 << 8) ^ nr1)) & MASK_32 - add = (add+tmp) & MASK_32 - return u("%08x%08x") % (nr1 & MASK_31, nr2 & MASK_31) - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# handler -#============================================================================= -class mysql41(uh.StaticHandler): - """This class implements the MySQL 4.1 password hash, and follows the :ref:`password-hash-api`. - - It has no salt and a single fixed round. - - The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. - """ - #=================================================================== - # class attrs - #=================================================================== - name = "mysql41" - _hash_prefix = u("*") - checksum_chars = uh.HEX_CHARS - checksum_size = 40 - - #=================================================================== - # methods - #=================================================================== - @classmethod - def _norm_hash(cls, hash): - return hash.upper() - - def _calc_checksum(self, secret): - # FIXME: no idea if mysql has a policy about handling unicode passwords - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - return str_to_uascii(sha1(sha1(secret).digest()).hexdigest()).upper() - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/oracle.py b/libs_crutch/contrib/passlib/handlers/oracle.py deleted file mode 100644 index a094f37..0000000 --- a/libs_crutch/contrib/passlib/handlers/oracle.py +++ /dev/null @@ -1,172 +0,0 @@ -"""passlib.handlers.oracle - Oracle DB Password Hashes""" -#============================================================================= -# imports -#============================================================================= -# core -from binascii import hexlify, unhexlify -from hashlib import sha1 -import re -import logging; log = logging.getLogger(__name__) -# site -# pkg -from passlib.utils import to_unicode, xor_bytes -from passlib.utils.compat import irange, u, \ - uascii_to_str, unicode, str_to_uascii -from passlib.crypto.des import des_encrypt_block -import passlib.utils.handlers as uh -# local -__all__ = [ - "oracle10g", - "oracle11g" -] - -#============================================================================= -# oracle10 -#============================================================================= -def des_cbc_encrypt(key, value, iv=b'\x00' * 8, pad=b'\x00'): - """performs des-cbc encryption, returns only last block. - - this performs a specific DES-CBC encryption implementation - as needed by the Oracle10 hash. it probably won't be useful for - other purposes as-is. - - input value is null-padded to multiple of 8 bytes. - - :arg key: des key as bytes - :arg value: value to encrypt, as bytes. - :param iv: optional IV - :param pad: optional pad byte - - :returns: last block of DES-CBC encryption of all ``value``'s byte blocks. - """ - value += pad * (-len(value) % 8) # null pad to multiple of 8 - hash = iv # start things off - for offset in irange(0,len(value),8): - chunk = xor_bytes(hash, value[offset:offset+8]) - hash = des_encrypt_block(key, chunk) - return hash - -# magic string used as initial des key by oracle10 -ORACLE10_MAGIC = b"\x01\x23\x45\x67\x89\xAB\xCD\xEF" - -class oracle10(uh.HasUserContext, uh.StaticHandler): - """This class implements the password hash used by Oracle up to version 10g, and follows the :ref:`password-hash-api`. - - It does a single round of hashing, and relies on the username as the salt. - - The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods all require the - following additional contextual keywords: - - :type user: str - :param user: name of oracle user account this password is associated with. - """ - #=================================================================== - # algorithm information - #=================================================================== - name = "oracle10" - checksum_chars = uh.HEX_CHARS - checksum_size = 16 - - #=================================================================== - # methods - #=================================================================== - @classmethod - def _norm_hash(cls, hash): - return hash.upper() - - def _calc_checksum(self, secret): - # FIXME: not sure how oracle handles unicode. - # online docs about 10g hash indicate it puts ascii chars - # in a 2-byte encoding w/ the high byte set to null. - # they don't say how it handles other chars, or what encoding. - # - # so for now, encoding secret & user to utf-16-be, - # since that fits, and if secret/user is bytes, - # we assume utf-8, and decode first. - # - # this whole mess really needs someone w/ an oracle system, - # and some answers :) - if isinstance(secret, bytes): - secret = secret.decode("utf-8") - user = to_unicode(self.user, "utf-8", param="user") - input = (user+secret).upper().encode("utf-16-be") - hash = des_cbc_encrypt(ORACLE10_MAGIC, input) - hash = des_cbc_encrypt(hash, input) - return hexlify(hash).decode("ascii").upper() - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# oracle11 -#============================================================================= -class oracle11(uh.HasSalt, uh.GenericHandler): - """This class implements the Oracle11g password hash, and follows the :ref:`password-hash-api`. - - It supports a fixed-length salt. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it must be 20 hexadecimal characters. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include - ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - #=================================================================== - # class attrs - #=================================================================== - #--GenericHandler-- - name = "oracle11" - setting_kwds = ("salt",) - checksum_size = 40 - checksum_chars = uh.UPPER_HEX_CHARS - - #--HasSalt-- - min_salt_size = max_salt_size = 20 - salt_chars = uh.UPPER_HEX_CHARS - - - #=================================================================== - # methods - #=================================================================== - _hash_regex = re.compile(u("^S:(?P[0-9a-f]{40})(?P[0-9a-f]{20})$"), re.I) - - @classmethod - def from_string(cls, hash): - hash = to_unicode(hash, "ascii", "hash") - m = cls._hash_regex.match(hash) - if not m: - raise uh.exc.InvalidHashError(cls) - salt, chk = m.group("salt", "chk") - return cls(salt=salt, checksum=chk.upper()) - - def to_string(self): - chk = self.checksum - hash = u("S:%s%s") % (chk.upper(), self.salt.upper()) - return uascii_to_str(hash) - - def _calc_checksum(self, secret): - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - chk = sha1(secret + unhexlify(self.salt.encode("ascii"))).hexdigest() - return str_to_uascii(chk).upper() - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/pbkdf2.py b/libs_crutch/contrib/passlib/handlers/pbkdf2.py deleted file mode 100644 index 274278d..0000000 --- a/libs_crutch/contrib/passlib/handlers/pbkdf2.py +++ /dev/null @@ -1,475 +0,0 @@ -"""passlib.handlers.pbkdf - PBKDF2 based hashes""" -#============================================================================= -# imports -#============================================================================= -# core -from binascii import hexlify, unhexlify -from base64 import b64encode, b64decode -import logging; log = logging.getLogger(__name__) -# site -# pkg -from passlib.utils import to_unicode -from passlib.utils.binary import ab64_decode, ab64_encode -from passlib.utils.compat import str_to_bascii, u, uascii_to_str, unicode -from passlib.crypto.digest import pbkdf2_hmac -import passlib.utils.handlers as uh -# local -__all__ = [ - "pbkdf2_sha1", - "pbkdf2_sha256", - "pbkdf2_sha512", - "cta_pbkdf2_sha1", - "dlitz_pbkdf2_sha1", - "grub_pbkdf2_sha512", -] - -#============================================================================= -# -#============================================================================= -class Pbkdf2DigestHandler(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): - """base class for various pbkdf2_{digest} algorithms""" - #=================================================================== - # class attrs - #=================================================================== - - #--GenericHandler-- - setting_kwds = ("salt", "salt_size", "rounds") - checksum_chars = uh.HASH64_CHARS - - #--HasSalt-- - default_salt_size = 16 - max_salt_size = 1024 - - #--HasRounds-- - default_rounds = None # set by subclass - min_rounds = 1 - max_rounds = 0xffffffff # setting at 32-bit limit for now - rounds_cost = "linear" - - #--this class-- - _digest = None # name of subclass-specified hash - - # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide sanity check. - # the underlying pbkdf2 specifies no bounds for either. - - # NOTE: defaults chosen to be at least as large as pbkdf2 rfc recommends... - # >8 bytes of entropy in salt, >1000 rounds - # increased due to time since rfc established - - #=================================================================== - # methods - #=================================================================== - - @classmethod - def from_string(cls, hash): - rounds, salt, chk = uh.parse_mc3(hash, cls.ident, handler=cls) - salt = ab64_decode(salt.encode("ascii")) - if chk: - chk = ab64_decode(chk.encode("ascii")) - return cls(rounds=rounds, salt=salt, checksum=chk) - - def to_string(self): - salt = ab64_encode(self.salt).decode("ascii") - chk = ab64_encode(self.checksum).decode("ascii") - return uh.render_mc3(self.ident, self.rounds, salt, chk) - - def _calc_checksum(self, secret): - # NOTE: pbkdf2_hmac() will encode secret & salt using UTF8 - return pbkdf2_hmac(self._digest, secret, self.salt, self.rounds, self.checksum_size) - -def create_pbkdf2_hash(hash_name, digest_size, rounds=12000, ident=None, module=__name__): - """create new Pbkdf2DigestHandler subclass for a specific hash""" - name = 'pbkdf2_' + hash_name - if ident is None: - ident = u("$pbkdf2-%s$") % (hash_name,) - base = Pbkdf2DigestHandler - return type(name, (base,), dict( - __module__=module, # so ABCMeta won't clobber it. - name=name, - ident=ident, - _digest = hash_name, - default_rounds=rounds, - checksum_size=digest_size, - encoded_checksum_size=(digest_size*4+2)//3, - __doc__="""This class implements a generic ``PBKDF2-HMAC-%(digest)s``-based password hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: bytes - :param salt: - Optional salt bytes. - If specified, the length must be between 0-1024 bytes. - If not specified, a %(dsc)d byte salt will be autogenerated (this is recommended). - - :type salt_size: int - :param salt_size: - Optional number of bytes to use when autogenerating new salts. - Defaults to %(dsc)d bytes, but can be any value between 0 and 1024. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - Defaults to %(dr)d, but must be within ``range(1,1<<32)``. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ % dict(digest=hash_name.upper(), dsc=base.default_salt_size, dr=rounds) - )) - -#------------------------------------------------------------------------ -# derived handlers -#------------------------------------------------------------------------ -pbkdf2_sha1 = create_pbkdf2_hash("sha1", 20, 131000, ident=u("$pbkdf2$")) -pbkdf2_sha256 = create_pbkdf2_hash("sha256", 32, 29000) -pbkdf2_sha512 = create_pbkdf2_hash("sha512", 64, 25000) - -ldap_pbkdf2_sha1 = uh.PrefixWrapper("ldap_pbkdf2_sha1", pbkdf2_sha1, "{PBKDF2}", "$pbkdf2$", ident=True) -ldap_pbkdf2_sha256 = uh.PrefixWrapper("ldap_pbkdf2_sha256", pbkdf2_sha256, "{PBKDF2-SHA256}", "$pbkdf2-sha256$", ident=True) -ldap_pbkdf2_sha512 = uh.PrefixWrapper("ldap_pbkdf2_sha512", pbkdf2_sha512, "{PBKDF2-SHA512}", "$pbkdf2-sha512$", ident=True) - -#============================================================================= -# cryptacular's pbkdf2 hash -#============================================================================= - -# bytes used by cta hash for base64 values 63 & 64 -CTA_ALTCHARS = b"-_" - -class cta_pbkdf2_sha1(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): - """This class implements Cryptacular's PBKDF2-based crypt algorithm, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: bytes - :param salt: - Optional salt bytes. - If specified, it may be any length. - If not specified, a one will be autogenerated (this is recommended). - - :type salt_size: int - :param salt_size: - Optional number of bytes to use when autogenerating new salts. - Defaults to 16 bytes, but can be any value between 0 and 1024. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - Defaults to 60000, must be within ``range(1,1<<32)``. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - - #=================================================================== - # class attrs - #=================================================================== - #--GenericHandler-- - name = "cta_pbkdf2_sha1" - setting_kwds = ("salt", "salt_size", "rounds") - ident = u("$p5k2$") - checksum_size = 20 - - # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a - # sanity check. underlying algorithm (and reference implementation) - # allows effectively unbounded values for both of these parameters. - - #--HasSalt-- - default_salt_size = 16 - max_salt_size = 1024 - - #--HasRounds-- - default_rounds = pbkdf2_sha1.default_rounds - min_rounds = 1 - max_rounds = 0xffffffff # setting at 32-bit limit for now - rounds_cost = "linear" - - #=================================================================== - # formatting - #=================================================================== - - # hash $p5k2$1000$ZxK4ZBJCfQg=$jJZVscWtO--p1-xIZl6jhO2LKR0= - # ident $p5k2$ - # rounds 1000 - # salt ZxK4ZBJCfQg= - # chk jJZVscWtO--p1-xIZl6jhO2LKR0= - # NOTE: rounds in hex - - @classmethod - def from_string(cls, hash): - # NOTE: passlib deviation - forbidding zero-padded rounds - rounds, salt, chk = uh.parse_mc3(hash, cls.ident, rounds_base=16, handler=cls) - salt = b64decode(salt.encode("ascii"), CTA_ALTCHARS) - if chk: - chk = b64decode(chk.encode("ascii"), CTA_ALTCHARS) - return cls(rounds=rounds, salt=salt, checksum=chk) - - def to_string(self): - salt = b64encode(self.salt, CTA_ALTCHARS).decode("ascii") - chk = b64encode(self.checksum, CTA_ALTCHARS).decode("ascii") - return uh.render_mc3(self.ident, self.rounds, salt, chk, rounds_base=16) - - #=================================================================== - # backend - #=================================================================== - def _calc_checksum(self, secret): - # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8 - return pbkdf2_hmac("sha1", secret, self.salt, self.rounds, 20) - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# dlitz's pbkdf2 hash -#============================================================================= -class dlitz_pbkdf2_sha1(uh.HasRounds, uh.HasSalt, uh.GenericHandler): - """This class implements Dwayne Litzenberger's PBKDF2-based crypt algorithm, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If specified, it may be any length, but must use the characters in the regexp range ``[./0-9A-Za-z]``. - If not specified, a 16 character salt will be autogenerated (this is recommended). - - :type salt_size: int - :param salt_size: - Optional number of bytes to use when autogenerating new salts. - Defaults to 16 bytes, but can be any value between 0 and 1024. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - Defaults to 60000, must be within ``range(1,1<<32)``. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - - #=================================================================== - # class attrs - #=================================================================== - #--GenericHandler-- - name = "dlitz_pbkdf2_sha1" - setting_kwds = ("salt", "salt_size", "rounds") - ident = u("$p5k2$") - _stub_checksum = u("0" * 48 + "=") - - # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a - # sanity check. underlying algorithm (and reference implementation) - # allows effectively unbounded values for both of these parameters. - - #--HasSalt-- - default_salt_size = 16 - max_salt_size = 1024 - salt_chars = uh.HASH64_CHARS - - #--HasRounds-- - # NOTE: for security, the default here is set to match pbkdf2_sha1, - # even though this hash's extra block makes it twice as slow. - default_rounds = pbkdf2_sha1.default_rounds - min_rounds = 1 - max_rounds = 0xffffffff # setting at 32-bit limit for now - rounds_cost = "linear" - - #=================================================================== - # formatting - #=================================================================== - - # hash $p5k2$c$u9HvcT4d$Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g - # ident $p5k2$ - # rounds c - # salt u9HvcT4d - # chk Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g - # rounds in lowercase hex, no zero padding - - @classmethod - def from_string(cls, hash): - rounds, salt, chk = uh.parse_mc3(hash, cls.ident, rounds_base=16, - default_rounds=400, handler=cls) - return cls(rounds=rounds, salt=salt, checksum=chk) - - def to_string(self): - rounds = self.rounds - if rounds == 400: - rounds = None # omit rounds measurement if == 400 - return uh.render_mc3(self.ident, rounds, self.salt, self.checksum, rounds_base=16) - - def _get_config(self): - rounds = self.rounds - if rounds == 400: - rounds = None # omit rounds measurement if == 400 - return uh.render_mc3(self.ident, rounds, self.salt, None, rounds_base=16) - - #=================================================================== - # backend - #=================================================================== - def _calc_checksum(self, secret): - # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8 - salt = self._get_config() - result = pbkdf2_hmac("sha1", secret, salt, self.rounds, 24) - return ab64_encode(result).decode("ascii") - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# crowd -#============================================================================= -class atlassian_pbkdf2_sha1(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): - """This class implements the PBKDF2 hash used by Atlassian. - - It supports a fixed-length salt, and a fixed number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: bytes - :param salt: - Optional salt bytes. - If specified, the length must be exactly 16 bytes. - If not specified, a salt will be autogenerated (this is recommended). - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include - ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - #--GenericHandler-- - name = "atlassian_pbkdf2_sha1" - setting_kwds =("salt",) - ident = u("{PKCS5S2}") - checksum_size = 32 - - #--HasRawSalt-- - min_salt_size = max_salt_size = 16 - - @classmethod - def from_string(cls, hash): - hash = to_unicode(hash, "ascii", "hash") - ident = cls.ident - if not hash.startswith(ident): - raise uh.exc.InvalidHashError(cls) - data = b64decode(hash[len(ident):].encode("ascii")) - salt, chk = data[:16], data[16:] - return cls(salt=salt, checksum=chk) - - def to_string(self): - data = self.salt + self.checksum - hash = self.ident + b64encode(data).decode("ascii") - return uascii_to_str(hash) - - def _calc_checksum(self, secret): - # TODO: find out what crowd's policy is re: unicode - # crowd seems to use a fixed number of rounds. - # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8 - return pbkdf2_hmac("sha1", secret, self.salt, 10000, 32) - -#============================================================================= -# grub -#============================================================================= -class grub_pbkdf2_sha512(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): - """This class implements Grub's pbkdf2-hmac-sha512 hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: bytes - :param salt: - Optional salt bytes. - If specified, the length must be between 0-1024 bytes. - If not specified, a 64 byte salt will be autogenerated (this is recommended). - - :type salt_size: int - :param salt_size: - Optional number of bytes to use when autogenerating new salts. - Defaults to 64 bytes, but can be any value between 0 and 1024. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - Defaults to 19000, but must be within ``range(1,1<<32)``. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - name = "grub_pbkdf2_sha512" - setting_kwds = ("salt", "salt_size", "rounds") - - ident = u("grub.pbkdf2.sha512.") - checksum_size = 64 - - # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a - # sanity check. the underlying pbkdf2 specifies no bounds for either, - # and it's not clear what grub specifies. - - default_salt_size = 64 - max_salt_size = 1024 - - default_rounds = pbkdf2_sha512.default_rounds - min_rounds = 1 - max_rounds = 0xffffffff # setting at 32-bit limit for now - rounds_cost = "linear" - - @classmethod - def from_string(cls, hash): - rounds, salt, chk = uh.parse_mc3(hash, cls.ident, sep=u("."), - handler=cls) - salt = unhexlify(salt.encode("ascii")) - if chk: - chk = unhexlify(chk.encode("ascii")) - return cls(rounds=rounds, salt=salt, checksum=chk) - - def to_string(self): - salt = hexlify(self.salt).decode("ascii").upper() - chk = hexlify(self.checksum).decode("ascii").upper() - return uh.render_mc3(self.ident, self.rounds, salt, chk, sep=u(".")) - - def _calc_checksum(self, secret): - # TODO: find out what grub's policy is re: unicode - # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8 - return pbkdf2_hmac("sha512", secret, self.salt, self.rounds, 64) - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/phpass.py b/libs_crutch/contrib/passlib/handlers/phpass.py deleted file mode 100644 index 6736f0f..0000000 --- a/libs_crutch/contrib/passlib/handlers/phpass.py +++ /dev/null @@ -1,135 +0,0 @@ -"""passlib.handlers.phpass - PHPass Portable Crypt - -phppass located - http://www.openwall.com/phpass/ -algorithm described - http://www.openwall.com/articles/PHP-Users-Passwords - -phpass context - blowfish, bsdi_crypt, phpass -""" -#============================================================================= -# imports -#============================================================================= -# core -from hashlib import md5 -import logging; log = logging.getLogger(__name__) -# site -# pkg -from passlib.utils.binary import h64 -from passlib.utils.compat import u, uascii_to_str, unicode -import passlib.utils.handlers as uh -# local -__all__ = [ - "phpass", -] - -#============================================================================= -# phpass -#============================================================================= -class phpass(uh.HasManyIdents, uh.HasRounds, uh.HasSalt, uh.GenericHandler): - """This class implements the PHPass Portable Hash, and follows the :ref:`password-hash-api`. - - It supports a fixed-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it must be 8 characters, drawn from the regexp range ``[./0-9A-Za-z]``. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - Defaults to 19, must be between 7 and 30, inclusive. - This value is logarithmic, the actual number of iterations used will be :samp:`2**{rounds}`. - - :type ident: str - :param ident: - phpBB3 uses ``H`` instead of ``P`` for its identifier, - this may be set to ``H`` in order to generate phpBB3 compatible hashes. - it defaults to ``P``. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - - #=================================================================== - # class attrs - #=================================================================== - #--GenericHandler-- - name = "phpass" - setting_kwds = ("salt", "rounds", "ident") - checksum_chars = uh.HASH64_CHARS - - #--HasSalt-- - min_salt_size = max_salt_size = 8 - salt_chars = uh.HASH64_CHARS - - #--HasRounds-- - default_rounds = 19 - min_rounds = 7 - max_rounds = 30 - rounds_cost = "log2" - - #--HasManyIdents-- - default_ident = u("$P$") - ident_values = (u("$P$"), u("$H$")) - ident_aliases = {u("P"):u("$P$"), u("H"):u("$H$")} - - #=================================================================== - # formatting - #=================================================================== - - #$P$9IQRaTwmfeRo7ud9Fh4E2PdI0S3r.L0 - # $P$ - # 9 - # IQRaTwmf - # eRo7ud9Fh4E2PdI0S3r.L0 - - @classmethod - def from_string(cls, hash): - ident, data = cls._parse_ident(hash) - rounds, salt, chk = data[0], data[1:9], data[9:] - return cls( - ident=ident, - rounds=h64.decode_int6(rounds.encode("ascii")), - salt=salt, - checksum=chk or None, - ) - - def to_string(self): - hash = u("%s%s%s%s") % (self.ident, - h64.encode_int6(self.rounds).decode("ascii"), - self.salt, - self.checksum or u('')) - return uascii_to_str(hash) - - #=================================================================== - # backend - #=================================================================== - def _calc_checksum(self, secret): - # FIXME: can't find definitive policy on how phpass handles non-ascii. - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - real_rounds = 1<`_ - hash names. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. versionadded:: 1.6 - - In addition to the standard :ref:`password-hash-api` methods, - this class also provides the following methods for manipulating Passlib - scram hashes in ways useful for pluging into a SCRAM protocol stack: - - .. automethod:: extract_digest_info - .. automethod:: extract_digest_algs - .. automethod:: derive_digest - """ - #=================================================================== - # class attrs - #=================================================================== - - # NOTE: unlike most GenericHandler classes, the 'checksum' attr of - # ScramHandler is actually a map from digest_name -> digest, so - # many of the standard methods have been overridden. - - # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide - # a sanity check; the underlying pbkdf2 specifies no bounds for either. - - #--GenericHandler-- - name = "scram" - setting_kwds = ("salt", "salt_size", "rounds", "algs") - ident = u("$scram$") - - #--HasSalt-- - default_salt_size = 12 - max_salt_size = 1024 - - #--HasRounds-- - default_rounds = 100000 - min_rounds = 1 - max_rounds = 2**32-1 - rounds_cost = "linear" - - #--custom-- - - # default algorithms when creating new hashes. - default_algs = ["sha-1", "sha-256", "sha-512"] - - # list of algs verify prefers to use, in order. - _verify_algs = ["sha-256", "sha-512", "sha-224", "sha-384", "sha-1"] - - #=================================================================== - # instance attrs - #=================================================================== - - # 'checksum' is different from most GenericHandler subclasses, - # in that it contains a dict mapping from alg -> digest, - # or None if no checksum present. - - # list of algorithms to create/compare digests for. - algs = None - - #=================================================================== - # scram frontend helpers - #=================================================================== - @classmethod - def extract_digest_info(cls, hash, alg): - """return (salt, rounds, digest) for specific hash algorithm. - - :type hash: str - :arg hash: - :class:`!scram` hash stored for desired user - - :type alg: str - :arg alg: - Name of digest algorithm (e.g. ``"sha-1"``) requested by client. - - This value is run through :func:`~passlib.crypto.digest.norm_hash_name`, - so it is case-insensitive, and can be the raw SCRAM - mechanism name (e.g. ``"SCRAM-SHA-1"``), the IANA name, - or the hashlib name. - - :raises KeyError: - If the hash does not contain an entry for the requested digest - algorithm. - - :returns: - A tuple containing ``(salt, rounds, digest)``, - where *digest* matches the raw bytes returned by - SCRAM's :func:`Hi` function for the stored password, - the provided *salt*, and the iteration count (*rounds*). - *salt* and *digest* are both raw (unencoded) bytes. - """ - # XXX: this could be sped up by writing custom parsing routine - # that just picks out relevant digest, and doesn't bother - # with full structure validation each time it's called. - alg = norm_hash_name(alg, 'iana') - self = cls.from_string(hash) - chkmap = self.checksum - if not chkmap: - raise ValueError("scram hash contains no digests") - return self.salt, self.rounds, chkmap[alg] - - @classmethod - def extract_digest_algs(cls, hash, format="iana"): - """Return names of all algorithms stored in a given hash. - - :type hash: str - :arg hash: - The :class:`!scram` hash to parse - - :type format: str - :param format: - This changes the naming convention used by the - returned algorithm names. By default the names - are IANA-compatible; possible values are ``"iana"`` or ``"hashlib"``. - - :returns: - Returns a list of digest algorithms; e.g. ``["sha-1"]`` - """ - # XXX: this could be sped up by writing custom parsing routine - # that just picks out relevant names, and doesn't bother - # with full structure validation each time it's called. - algs = cls.from_string(hash).algs - if format == "iana": - return algs - else: - return [norm_hash_name(alg, format) for alg in algs] - - @classmethod - def derive_digest(cls, password, salt, rounds, alg): - """helper to create SaltedPassword digest for SCRAM. - - This performs the step in the SCRAM protocol described as:: - - SaltedPassword := Hi(Normalize(password), salt, i) - - :type password: unicode or utf-8 bytes - :arg password: password to run through digest - - :type salt: bytes - :arg salt: raw salt data - - :type rounds: int - :arg rounds: number of iterations. - - :type alg: str - :arg alg: name of digest to use (e.g. ``"sha-1"``). - - :returns: - raw bytes of ``SaltedPassword`` - """ - if isinstance(password, bytes): - password = password.decode("utf-8") - # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8, - # and handle normalizing alg name. - return pbkdf2_hmac(alg, saslprep(password), salt, rounds) - - #=================================================================== - # serialization - #=================================================================== - - @classmethod - def from_string(cls, hash): - hash = to_native_str(hash, "ascii", "hash") - if not hash.startswith("$scram$"): - raise uh.exc.InvalidHashError(cls) - parts = hash[7:].split("$") - if len(parts) != 3: - raise uh.exc.MalformedHashError(cls) - rounds_str, salt_str, chk_str = parts - - # decode rounds - rounds = int(rounds_str) - if rounds_str != str(rounds): # forbid zero padding, etc. - raise uh.exc.MalformedHashError(cls) - - # decode salt - try: - salt = ab64_decode(salt_str.encode("ascii")) - except TypeError: - raise uh.exc.MalformedHashError(cls) - - # decode algs/digest list - if not chk_str: - # scram hashes MUST have something here. - raise uh.exc.MalformedHashError(cls) - elif "=" in chk_str: - # comma-separated list of 'alg=digest' pairs - algs = None - chkmap = {} - for pair in chk_str.split(","): - alg, digest = pair.split("=") - try: - chkmap[alg] = ab64_decode(digest.encode("ascii")) - except TypeError: - raise uh.exc.MalformedHashError(cls) - else: - # comma-separated list of alg names, no digests - algs = chk_str - chkmap = None - - # return new object - return cls( - rounds=rounds, - salt=salt, - checksum=chkmap, - algs=algs, - ) - - def to_string(self): - salt = bascii_to_str(ab64_encode(self.salt)) - chkmap = self.checksum - chk_str = ",".join( - "%s=%s" % (alg, bascii_to_str(ab64_encode(chkmap[alg]))) - for alg in self.algs - ) - return '$scram$%d$%s$%s' % (self.rounds, salt, chk_str) - - #=================================================================== - # variant constructor - #=================================================================== - @classmethod - def using(cls, default_algs=None, algs=None, **kwds): - # parse aliases - if algs is not None: - assert default_algs is None - default_algs = algs - - # create subclass - subcls = super(scram, cls).using(**kwds) - - # fill in algs - if default_algs is not None: - subcls.default_algs = cls._norm_algs(default_algs) - return subcls - - #=================================================================== - # init - #=================================================================== - def __init__(self, algs=None, **kwds): - super(scram, self).__init__(**kwds) - - # init algs - digest_map = self.checksum - if algs is not None: - if digest_map is not None: - raise RuntimeError("checksum & algs kwds are mutually exclusive") - algs = self._norm_algs(algs) - elif digest_map is not None: - # derive algs list from digest map (if present). - algs = self._norm_algs(digest_map.keys()) - elif self.use_defaults: - algs = list(self.default_algs) - assert self._norm_algs(algs) == algs, "invalid default algs: %r" % (algs,) - else: - raise TypeError("no algs list specified") - self.algs = algs - - def _norm_checksum(self, checksum, relaxed=False): - if not isinstance(checksum, dict): - raise uh.exc.ExpectedTypeError(checksum, "dict", "checksum") - for alg, digest in iteritems(checksum): - if alg != norm_hash_name(alg, 'iana'): - raise ValueError("malformed algorithm name in scram hash: %r" % - (alg,)) - if len(alg) > 9: - raise ValueError("SCRAM limits algorithm names to " - "9 characters: %r" % (alg,)) - if not isinstance(digest, bytes): - raise uh.exc.ExpectedTypeError(digest, "raw bytes", "digests") - # TODO: verify digest size (if digest is known) - if 'sha-1' not in checksum: - # NOTE: required because of SCRAM spec. - raise ValueError("sha-1 must be in algorithm list of scram hash") - return checksum - - @classmethod - def _norm_algs(cls, algs): - """normalize algs parameter""" - if isinstance(algs, native_string_types): - algs = splitcomma(algs) - algs = sorted(norm_hash_name(alg, 'iana') for alg in algs) - if any(len(alg)>9 for alg in algs): - raise ValueError("SCRAM limits alg names to max of 9 characters") - if 'sha-1' not in algs: - # NOTE: required because of SCRAM spec (rfc 5802) - raise ValueError("sha-1 must be in algorithm list of scram hash") - return algs - - #=================================================================== - # migration - #=================================================================== - def _calc_needs_update(self, **kwds): - # marks hashes as deprecated if they don't include at least all default_algs. - # XXX: should we deprecate if they aren't exactly the same, - # to permit removing legacy hashes? - if not set(self.algs).issuperset(self.default_algs): - return True - - # hand off to base implementation - return super(scram, self)._calc_needs_update(**kwds) - - #=================================================================== - # digest methods - #=================================================================== - def _calc_checksum(self, secret, alg=None): - rounds = self.rounds - salt = self.salt - hash = self.derive_digest - if alg: - # if requested, generate digest for specific alg - return hash(secret, salt, rounds, alg) - else: - # by default, return dict containing digests for all algs - return dict( - (alg, hash(secret, salt, rounds, alg)) - for alg in self.algs - ) - - @classmethod - def verify(cls, secret, hash, full=False): - uh.validate_secret(secret) - self = cls.from_string(hash) - chkmap = self.checksum - if not chkmap: - raise ValueError("expected %s hash, got %s config string instead" % - (cls.name, cls.name)) - - # NOTE: to make the verify method efficient, we just calculate hash - # of shortest digest by default. apps can pass in "full=True" to - # check entire hash for consistency. - if full: - correct = failed = False - for alg, digest in iteritems(chkmap): - other = self._calc_checksum(secret, alg) - # NOTE: could do this length check in norm_algs(), - # but don't need to be that strict, and want to be able - # to parse hashes containing algs not supported by platform. - # it's fine if we fail here though. - if len(digest) != len(other): - raise ValueError("mis-sized %s digest in scram hash: %r != %r" - % (alg, len(digest), len(other))) - if consteq(other, digest): - correct = True - else: - failed = True - if correct and failed: - raise ValueError("scram hash verified inconsistently, " - "may be corrupted") - else: - return correct - else: - # XXX: should this just always use sha1 hash? would be faster. - # otherwise only verify against one hash, pick one w/ best security. - for alg in self._verify_algs: - if alg in chkmap: - other = self._calc_checksum(secret, alg) - return consteq(other, chkmap[alg]) - # there should always be sha-1 at the very least, - # or something went wrong inside _norm_algs() - raise AssertionError("sha-1 digest not found!") - - #=================================================================== - # - #=================================================================== - -#============================================================================= -# code used for testing scram against protocol examples during development. -#============================================================================= -##def _test_reference_scram(): -## "quick hack testing scram reference vectors" -## # NOTE: "n,," is GS2 header - see https://tools.ietf.org/html/rfc5801 -## from passlib.utils.compat import print_ -## -## engine = _scram_engine( -## alg="sha-1", -## salt='QSXCR+Q6sek8bf92'.decode("base64"), -## rounds=4096, -## password=u("pencil"), -## ) -## print_(engine.digest.encode("base64").rstrip()) -## -## msg = engine.format_auth_msg( -## username="user", -## client_nonce = "fyko+d2lbbFgONRv9qkxdawL", -## server_nonce = "3rfcNHYJY1ZVvWVs7j", -## header='c=biws', -## ) -## -## cp = engine.get_encoded_client_proof(msg) -## assert cp == "v0X8v3Bz2T0CJGbJQyF0X+HI4Ts=", cp -## -## ss = engine.get_encoded_server_sig(msg) -## assert ss == "rmF9pqV8S7suAoZWja4dJRkFsKQ=", ss -## -##class _scram_engine(object): -## """helper class for verifying scram hash behavior -## against SCRAM protocol examples. not officially part of Passlib. -## -## takes in alg, salt, rounds, and a digest or password. -## -## can calculate the various keys & messages of the scram protocol. -## -## """ -## #========================================================= -## # init -## #========================================================= -## -## @classmethod -## def from_string(cls, hash, alg): -## "create record from scram hash, for given alg" -## return cls(alg, *scram.extract_digest_info(hash, alg)) -## -## def __init__(self, alg, salt, rounds, digest=None, password=None): -## self.alg = norm_hash_name(alg) -## self.salt = salt -## self.rounds = rounds -## self.password = password -## if password: -## data = scram.derive_digest(password, salt, rounds, alg) -## if digest and data != digest: -## raise ValueError("password doesn't match digest") -## else: -## digest = data -## elif not digest: -## raise TypeError("must provide password or digest") -## self.digest = digest -## -## #========================================================= -## # frontend methods -## #========================================================= -## def get_hash(self, data): -## "return hash of raw data" -## return hashlib.new(iana_to_hashlib(self.alg), data).digest() -## -## def get_client_proof(self, msg): -## "return client proof of specified auth msg text" -## return xor_bytes(self.client_key, self.get_client_sig(msg)) -## -## def get_encoded_client_proof(self, msg): -## return self.get_client_proof(msg).encode("base64").rstrip() -## -## def get_client_sig(self, msg): -## "return client signature of specified auth msg text" -## return self.get_hmac(self.stored_key, msg) -## -## def get_server_sig(self, msg): -## "return server signature of specified auth msg text" -## return self.get_hmac(self.server_key, msg) -## -## def get_encoded_server_sig(self, msg): -## return self.get_server_sig(msg).encode("base64").rstrip() -## -## def format_server_response(self, client_nonce, server_nonce): -## return 'r={client_nonce}{server_nonce},s={salt},i={rounds}'.format( -## client_nonce=client_nonce, -## server_nonce=server_nonce, -## rounds=self.rounds, -## salt=self.encoded_salt, -## ) -## -## def format_auth_msg(self, username, client_nonce, server_nonce, -## header='c=biws'): -## return ( -## 'n={username},r={client_nonce}' -## ',' -## 'r={client_nonce}{server_nonce},s={salt},i={rounds}' -## ',' -## '{header},r={client_nonce}{server_nonce}' -## ).format( -## username=username, -## client_nonce=client_nonce, -## server_nonce=server_nonce, -## salt=self.encoded_salt, -## rounds=self.rounds, -## header=header, -## ) -## -## #========================================================= -## # helpers to calculate & cache constant data -## #========================================================= -## def _calc_get_hmac(self): -## return get_prf("hmac-" + iana_to_hashlib(self.alg))[0] -## -## def _calc_client_key(self): -## return self.get_hmac(self.digest, b("Client Key")) -## -## def _calc_stored_key(self): -## return self.get_hash(self.client_key) -## -## def _calc_server_key(self): -## return self.get_hmac(self.digest, b("Server Key")) -## -## def _calc_encoded_salt(self): -## return self.salt.encode("base64").rstrip() -## -## #========================================================= -## # hacks for calculated attributes -## #========================================================= -## -## def __getattr__(self, attr): -## if not attr.startswith("_"): -## f = getattr(self, "_calc_" + attr, None) -## if f: -## value = f() -## setattr(self, attr, value) -## return value -## raise AttributeError("attribute not found") -## -## def __dir__(self): -## cdir = dir(self.__class__) -## attrs = set(cdir) -## attrs.update(self.__dict__) -## attrs.update(attr[6:] for attr in cdir -## if attr.startswith("_calc_")) -## return sorted(attrs) -## #========================================================= -## # eoc -## #========================================================= - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/scrypt.py b/libs_crutch/contrib/passlib/handlers/scrypt.py deleted file mode 100644 index 1686fda..0000000 --- a/libs_crutch/contrib/passlib/handlers/scrypt.py +++ /dev/null @@ -1,383 +0,0 @@ -"""passlib.handlers.scrypt -- scrypt password hash""" -#============================================================================= -# imports -#============================================================================= -from __future__ import with_statement, absolute_import -# core -import logging; log = logging.getLogger(__name__) -# site -# pkg -from passlib.crypto import scrypt as _scrypt -from passlib.utils import h64, to_bytes -from passlib.utils.binary import h64, b64s_decode, b64s_encode -from passlib.utils.compat import u, bascii_to_str, suppress_cause -from passlib.utils.decor import classproperty -import passlib.utils.handlers as uh -# local -__all__ = [ - "scrypt", -] - -#============================================================================= -# scrypt format identifiers -#============================================================================= - -IDENT_SCRYPT = u("$scrypt$") # identifier used by passlib -IDENT_7 = u("$7$") # used by official scrypt spec - -_UDOLLAR = u("$") - -#============================================================================= -# handler -#============================================================================= -class scrypt(uh.ParallelismMixin, uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.HasManyIdents, - uh.GenericHandler): - """This class implements an SCrypt-based password [#scrypt-home]_ hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, a variable number of rounds, - as well as some custom tuning parameters unique to scrypt (see below). - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If specified, the length must be between 0-1024 bytes. - If not specified, one will be auto-generated (this is recommended). - - :type salt_size: int - :param salt_size: - Optional number of bytes to use when autogenerating new salts. - Defaults to 16 bytes, but can be any value between 0 and 1024. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - Defaults to 16, but must be within ``range(1,32)``. - - .. warning:: - - Unlike many hash algorithms, increasing the rounds value - will increase both the time *and memory* required to hash a password. - - :type block_size: int - :param block_size: - Optional block size to pass to scrypt hash function (the ``r`` parameter). - Useful for tuning scrypt to optimal performance for your CPU architecture. - Defaults to 8. - - :type parallelism: int - :param parallelism: - Optional parallelism to pass to scrypt hash function (the ``p`` parameter). - Defaults to 1. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. note:: - - The underlying scrypt hash function has a number of limitations - on it's parameter values, which forbids certain combinations of settings. - The requirements are: - - * ``linear_rounds = 2**`` - * ``linear_rounds < 2**(16 * block_size)`` - * ``block_size * parallelism <= 2**30-1`` - - .. todo:: - - This class currently does not support configuring default values - for ``block_size`` or ``parallelism`` via a :class:`~passlib.context.CryptContext` - configuration. - """ - - #=================================================================== - # class attrs - #=================================================================== - - #------------------------ - # PasswordHash - #------------------------ - name = "scrypt" - setting_kwds = ("ident", "salt", "salt_size", "rounds", "block_size", "parallelism") - - #------------------------ - # GenericHandler - #------------------------ - # NOTE: scrypt supports arbitrary output sizes. since it's output runs through - # pbkdf2-hmac-sha256 before returning, and this could be raised eventually... - # but a 256-bit digest is more than sufficient for password hashing. - # XXX: make checksum size configurable? could merge w/ argon2 code that does this. - checksum_size = 32 - - #------------------------ - # HasManyIdents - #------------------------ - default_ident = IDENT_SCRYPT - ident_values = (IDENT_SCRYPT, IDENT_7) - - #------------------------ - # HasRawSalt - #------------------------ - default_salt_size = 16 - max_salt_size = 1024 - - #------------------------ - # HasRounds - #------------------------ - # TODO: would like to dynamically pick this based on system - default_rounds = 16 - min_rounds = 1 - max_rounds = 31 # limited by scrypt alg - rounds_cost = "log2" - - # TODO: make default block size configurable via using(), and deprecatable via .needs_update() - - #=================================================================== - # instance attrs - #=================================================================== - - #: default parallelism setting (min=1 currently hardcoded in mixin) - parallelism = 1 - - #: default block size setting - block_size = 8 - - #=================================================================== - # variant constructor - #=================================================================== - - @classmethod - def using(cls, block_size=None, **kwds): - subcls = super(scrypt, cls).using(**kwds) - if block_size is not None: - if isinstance(block_size, uh.native_string_types): - block_size = int(block_size) - subcls.block_size = subcls._norm_block_size(block_size, relaxed=kwds.get("relaxed")) - - # make sure param combination is valid for scrypt() - try: - _scrypt.validate(1 << cls.default_rounds, cls.block_size, cls.parallelism) - except ValueError as err: - raise suppress_cause(ValueError("scrypt: invalid settings combination: " + str(err))) - - return subcls - - #=================================================================== - # parsing - #=================================================================== - - @classmethod - def from_string(cls, hash): - return cls(**cls.parse(hash)) - - @classmethod - def parse(cls, hash): - ident, suffix = cls._parse_ident(hash) - func = getattr(cls, "_parse_%s_string" % ident.strip(_UDOLLAR), None) - if func: - return func(suffix) - else: - raise uh.exc.InvalidHashError(cls) - - # - # passlib's format: - # $scrypt$ln=,r=,p=

$[$] - # where: - # logN, r, p -- decimal-encoded positive integer, no zero-padding - # logN -- log cost setting - # r -- block size setting (usually 8) - # p -- parallelism setting (usually 1) - # salt, digest -- b64-nopad encoded bytes - # - - @classmethod - def _parse_scrypt_string(cls, suffix): - # break params, salt, and digest sections - parts = suffix.split("$") - if len(parts) == 3: - params, salt, digest = parts - elif len(parts) == 2: - params, salt = parts - digest = None - else: - raise uh.exc.MalformedHashError(cls, "malformed hash") - - # break params apart - parts = params.split(",") - if len(parts) == 3: - nstr, bstr, pstr = parts - assert nstr.startswith("ln=") - assert bstr.startswith("r=") - assert pstr.startswith("p=") - else: - raise uh.exc.MalformedHashError(cls, "malformed settings field") - - return dict( - ident=IDENT_SCRYPT, - rounds=int(nstr[3:]), - block_size=int(bstr[2:]), - parallelism=int(pstr[2:]), - salt=b64s_decode(salt.encode("ascii")), - checksum=b64s_decode(digest.encode("ascii")) if digest else None, - ) - - # - # official format specification defined at - # https://gitlab.com/jas/scrypt-unix-crypt/blob/master/unix-scrypt.txt - # format: - # $7$[$] - # 0 12345 67890 1 - # where: - # All bytes use h64-little-endian encoding - # N: 6-bit log cost setting - # r: 30-bit block size setting - # p: 30-bit parallelism setting - # salt: variable length salt bytes - # digest: fixed 32-byte digest - # - - @classmethod - def _parse_7_string(cls, suffix): - # XXX: annoyingly, official spec embeds salt *raw*, yet doesn't specify a hash encoding. - # so assuming only h64 chars are valid for salt, and are ASCII encoded. - - # split into params & digest - parts = suffix.encode("ascii").split(b"$") - if len(parts) == 2: - params, digest = parts - elif len(parts) == 1: - params, = parts - digest = None - else: - raise uh.exc.MalformedHashError() - - # parse params & return - if len(params) < 11: - raise uh.exc.MalformedHashError(cls, "params field too short") - return dict( - ident=IDENT_7, - rounds=h64.decode_int6(params[:1]), - block_size=h64.decode_int30(params[1:6]), - parallelism=h64.decode_int30(params[6:11]), - salt=params[11:], - checksum=h64.decode_bytes(digest) if digest else None, - ) - - #=================================================================== - # formatting - #=================================================================== - def to_string(self): - ident = self.ident - if ident == IDENT_SCRYPT: - return "$scrypt$ln=%d,r=%d,p=%d$%s$%s" % ( - self.rounds, - self.block_size, - self.parallelism, - bascii_to_str(b64s_encode(self.salt)), - bascii_to_str(b64s_encode(self.checksum)), - ) - else: - assert ident == IDENT_7 - salt = self.salt - try: - salt.decode("ascii") - except UnicodeDecodeError: - raise suppress_cause(NotImplementedError("scrypt $7$ hashes dont support non-ascii salts")) - return bascii_to_str(b"".join([ - b"$7$", - h64.encode_int6(self.rounds), - h64.encode_int30(self.block_size), - h64.encode_int30(self.parallelism), - self.salt, - b"$", - h64.encode_bytes(self.checksum) - ])) - - #=================================================================== - # init - #=================================================================== - def __init__(self, block_size=None, **kwds): - super(scrypt, self).__init__(**kwds) - - # init block size - if block_size is None: - assert uh.validate_default_value(self, self.block_size, self._norm_block_size, - param="block_size") - else: - self.block_size = self._norm_block_size(block_size) - - # NOTE: if hash contains invalid complex constraint, relying on error - # being raised by scrypt call in _calc_checksum() - - @classmethod - def _norm_block_size(cls, block_size, relaxed=False): - return uh.norm_integer(cls, block_size, min=1, param="block_size", relaxed=relaxed) - - def _generate_salt(self): - salt = super(scrypt, self)._generate_salt() - if self.ident == IDENT_7: - # this format doesn't support non-ascii salts. - # as workaround, we take raw bytes, encoded to base64 - salt = b64s_encode(salt) - return salt - - #=================================================================== - # backend configuration - # NOTE: this following HasManyBackends' API, but provides it's own implementation, - # which actually switches the backend that 'passlib.crypto.scrypt.scrypt()' uses. - #=================================================================== - - @classproperty - def backends(cls): - return _scrypt.backend_values - - @classmethod - def get_backend(cls): - return _scrypt.backend - - @classmethod - def has_backend(cls, name="any"): - try: - cls.set_backend(name, dryrun=True) - return True - except uh.exc.MissingBackendError: - return False - - @classmethod - def set_backend(cls, name="any", dryrun=False): - _scrypt._set_backend(name, dryrun=dryrun) - - #=================================================================== - # digest calculation - #=================================================================== - def _calc_checksum(self, secret): - secret = to_bytes(secret, param="secret") - return _scrypt.scrypt(secret, self.salt, n=(1 << self.rounds), r=self.block_size, - p=self.parallelism, keylen=self.checksum_size) - - #=================================================================== - # hash migration - #=================================================================== - - def _calc_needs_update(self, **kwds): - """ - mark hash as needing update if rounds is outside desired bounds. - """ - # XXX: for now, marking all hashes which don't have matching block_size setting - if self.block_size != type(self).block_size: - return True - return super(scrypt, self)._calc_needs_update(**kwds) - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/sha1_crypt.py b/libs_crutch/contrib/passlib/handlers/sha1_crypt.py deleted file mode 100644 index d3e972c..0000000 --- a/libs_crutch/contrib/passlib/handlers/sha1_crypt.py +++ /dev/null @@ -1,158 +0,0 @@ -"""passlib.handlers.sha1_crypt -""" - -#============================================================================= -# imports -#============================================================================= - -# core -import logging; log = logging.getLogger(__name__) -# site -# pkg -from passlib.utils import safe_crypt, test_crypt -from passlib.utils.binary import h64 -from passlib.utils.compat import u, unicode, irange -from passlib.crypto.digest import compile_hmac -import passlib.utils.handlers as uh -# local -__all__ = [ -] -#============================================================================= -# sha1-crypt -#============================================================================= -_BNULL = b'\x00' - -class sha1_crypt(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, uh.GenericHandler): - """This class implements the SHA1-Crypt password hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, an 8 character one will be autogenerated (this is recommended). - If specified, it must be 0-64 characters, drawn from the regexp range ``[./0-9A-Za-z]``. - - :type salt_size: int - :param salt_size: - Optional number of bytes to use when autogenerating new salts. - Defaults to 8 bytes, but can be any value between 0 and 64. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - Defaults to 480000, must be between 1 and 4294967295, inclusive. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - - #=================================================================== - # class attrs - #=================================================================== - #--GenericHandler-- - name = "sha1_crypt" - setting_kwds = ("salt", "salt_size", "rounds") - ident = u("$sha1$") - checksum_size = 28 - checksum_chars = uh.HASH64_CHARS - - #--HasSalt-- - default_salt_size = 8 - max_salt_size = 64 - salt_chars = uh.HASH64_CHARS - - #--HasRounds-- - default_rounds = 480000 # current passlib default - min_rounds = 1 # really, this should be higher. - max_rounds = 4294967295 # 32-bit integer limit - rounds_cost = "linear" - - #=================================================================== - # formatting - #=================================================================== - @classmethod - def from_string(cls, hash): - rounds, salt, chk = uh.parse_mc3(hash, cls.ident, handler=cls) - return cls(rounds=rounds, salt=salt, checksum=chk) - - def to_string(self, config=False): - chk = None if config else self.checksum - return uh.render_mc3(self.ident, self.rounds, self.salt, chk) - - #=================================================================== - # backend - #=================================================================== - backends = ("os_crypt", "builtin") - - #--------------------------------------------------------------- - # os_crypt backend - #--------------------------------------------------------------- - @classmethod - def _load_backend_os_crypt(cls): - if test_crypt("test", '$sha1$1$Wq3GL2Vp$C8U25GvfHS8qGHim' - 'ExLaiSFlGkAe'): - cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) - return True - else: - return False - - def _calc_checksum_os_crypt(self, secret): - config = self.to_string(config=True) - hash = safe_crypt(secret, config) - if hash: - assert hash.startswith(config) and len(hash) == len(config) + 29 - return hash[-28:] - else: - # py3's crypt.crypt() can't handle non-utf8 bytes. - # fallback to builtin alg, which is always available. - return self._calc_checksum_builtin(secret) - - #--------------------------------------------------------------- - # builtin backend - #--------------------------------------------------------------- - @classmethod - def _load_backend_builtin(cls): - cls._set_calc_checksum_backend(cls._calc_checksum_builtin) - return True - - def _calc_checksum_builtin(self, secret): - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - if _BNULL in secret: - raise uh.exc.NullPasswordError(self) - rounds = self.rounds - # NOTE: this seed value is NOT the same as the config string - result = (u("%s$sha1$%s") % (self.salt, rounds)).encode("ascii") - # NOTE: this algorithm is essentially PBKDF1, modified to use HMAC. - keyed_hmac = compile_hmac("sha1", secret) - for _ in irange(rounds): - result = keyed_hmac(result) - return h64.encode_transposed_bytes(result, self._chk_offsets).decode("ascii") - - _chk_offsets = [ - 2,1,0, - 5,4,3, - 8,7,6, - 11,10,9, - 14,13,12, - 17,16,15, - 0,19,18, - ] - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/sha2_crypt.py b/libs_crutch/contrib/passlib/handlers/sha2_crypt.py deleted file mode 100644 index 807de5e..0000000 --- a/libs_crutch/contrib/passlib/handlers/sha2_crypt.py +++ /dev/null @@ -1,519 +0,0 @@ -"""passlib.handlers.sha2_crypt - SHA256-Crypt / SHA512-Crypt""" -#============================================================================= -# imports -#============================================================================= -# core -import hashlib -import logging; log = logging.getLogger(__name__) -# site -# pkg -from passlib.utils import safe_crypt, test_crypt, \ - repeat_string, to_unicode -from passlib.utils.binary import h64 -from passlib.utils.compat import byte_elem_value, u, \ - uascii_to_str, unicode -import passlib.utils.handlers as uh -# local -__all__ = [ - "sha512_crypt", - "sha256_crypt", -] - -#============================================================================= -# pure-python backend, used by both sha256_crypt & sha512_crypt -# when crypt.crypt() backend is not available. -#============================================================================= -_BNULL = b'\x00' - -# pre-calculated offsets used to speed up C digest stage (see notes below). -# sequence generated using the following: - ##perms_order = "p,pp,ps,psp,sp,spp".split(",") - ##def offset(i): - ## key = (("p" if i % 2 else "") + ("s" if i % 3 else "") + - ## ("p" if i % 7 else "") + ("" if i % 2 else "p")) - ## return perms_order.index(key) - ##_c_digest_offsets = [(offset(i), offset(i+1)) for i in range(0,42,2)] -_c_digest_offsets = ( - (0, 3), (5, 1), (5, 3), (1, 2), (5, 1), (5, 3), (1, 3), - (4, 1), (5, 3), (1, 3), (5, 0), (5, 3), (1, 3), (5, 1), - (4, 3), (1, 3), (5, 1), (5, 2), (1, 3), (5, 1), (5, 3), - ) - -# map used to transpose bytes when encoding final sha256_crypt digest -_256_transpose_map = ( - 20, 10, 0, 11, 1, 21, 2, 22, 12, 23, 13, 3, 14, 4, 24, 5, - 25, 15, 26, 16, 6, 17, 7, 27, 8, 28, 18, 29, 19, 9, 30, 31, -) - -# map used to transpose bytes when encoding final sha512_crypt digest -_512_transpose_map = ( - 42, 21, 0, 1, 43, 22, 23, 2, 44, 45, 24, 3, 4, 46, 25, 26, - 5, 47, 48, 27, 6, 7, 49, 28, 29, 8, 50, 51, 30, 9, 10, 52, - 31, 32, 11, 53, 54, 33, 12, 13, 55, 34, 35, 14, 56, 57, 36, 15, - 16, 58, 37, 38, 17, 59, 60, 39, 18, 19, 61, 40, 41, 20, 62, 63, -) - -def _raw_sha2_crypt(pwd, salt, rounds, use_512=False): - """perform raw sha256-crypt / sha512-crypt - - this function provides a pure-python implementation of the internals - for the SHA256-Crypt and SHA512-Crypt algorithms; it doesn't - handle any of the parsing/validation of the hash strings themselves. - - :arg pwd: password chars/bytes to hash - :arg salt: salt chars to use - :arg rounds: linear rounds cost - :arg use_512: use sha512-crypt instead of sha256-crypt mode - - :returns: - encoded checksum chars - """ - #=================================================================== - # init & validate inputs - #=================================================================== - - # NOTE: the setup portion of this algorithm scales ~linearly in time - # with the size of the password, making it vulnerable to a DOS from - # unreasonably large inputs. the following code has some optimizations - # which would make things even worse, using O(pwd_len**2) memory - # when calculating digest P. - # - # to mitigate these two issues: 1) this code switches to a - # O(pwd_len)-memory algorithm for passwords that are much larger - # than average, and 2) Passlib enforces a library-wide max limit on - # the size of passwords it will allow, to prevent this algorithm and - # others from being DOSed in this way (see passlib.exc.PasswordSizeError - # for details). - - # validate secret - if isinstance(pwd, unicode): - # XXX: not sure what official unicode policy is, using this as default - pwd = pwd.encode("utf-8") - assert isinstance(pwd, bytes) - if _BNULL in pwd: - raise uh.exc.NullPasswordError(sha512_crypt if use_512 else sha256_crypt) - pwd_len = len(pwd) - - # validate rounds - assert 1000 <= rounds <= 999999999, "invalid rounds" - # NOTE: spec says out-of-range rounds should be clipped, instead of - # causing an error. this function assumes that's been taken care of - # by the handler class. - - # validate salt - assert isinstance(salt, unicode), "salt not unicode" - salt = salt.encode("ascii") - salt_len = len(salt) - assert salt_len < 17, "salt too large" - # NOTE: spec says salts larger than 16 bytes should be truncated, - # instead of causing an error. this function assumes that's been - # taken care of by the handler class. - - # load sha256/512 specific constants - if use_512: - hash_const = hashlib.sha512 - transpose_map = _512_transpose_map - else: - hash_const = hashlib.sha256 - transpose_map = _256_transpose_map - - #=================================================================== - # digest B - used as subinput to digest A - #=================================================================== - db = hash_const(pwd + salt + pwd).digest() - - #=================================================================== - # digest A - used to initialize first round of digest C - #=================================================================== - # start out with pwd + salt - a_ctx = hash_const(pwd + salt) - a_ctx_update = a_ctx.update - - # add pwd_len bytes of b, repeating b as many times as needed. - a_ctx_update(repeat_string(db, pwd_len)) - - # for each bit in pwd_len: add b if it's 1, or pwd if it's 0 - i = pwd_len - while i: - a_ctx_update(db if i & 1 else pwd) - i >>= 1 - - # finish A - da = a_ctx.digest() - - #=================================================================== - # digest P from password - used instead of password itself - # when calculating digest C. - #=================================================================== - if pwd_len < 96: - # this method is faster under python, but uses O(pwd_len**2) memory; - # so we don't use it for larger passwords to avoid a potential DOS. - dp = repeat_string(hash_const(pwd * pwd_len).digest(), pwd_len) - else: - # this method is slower under python, but uses a fixed amount of memory. - tmp_ctx = hash_const(pwd) - tmp_ctx_update = tmp_ctx.update - i = pwd_len-1 - while i: - tmp_ctx_update(pwd) - i -= 1 - dp = repeat_string(tmp_ctx.digest(), pwd_len) - assert len(dp) == pwd_len - - #=================================================================== - # digest S - used instead of salt itself when calculating digest C - #=================================================================== - ds = hash_const(salt * (16 + byte_elem_value(da[0]))).digest()[:salt_len] - assert len(ds) == salt_len, "salt_len somehow > hash_len!" - - #=================================================================== - # digest C - for a variable number of rounds, combine A, S, and P - # digests in various ways; in order to burn CPU time. - #=================================================================== - - # NOTE: the original SHA256/512-Crypt specification performs the C digest - # calculation using the following loop: - # - ##dc = da - ##i = 0 - ##while i < rounds: - ## tmp_ctx = hash_const(dp if i & 1 else dc) - ## if i % 3: - ## tmp_ctx.update(ds) - ## if i % 7: - ## tmp_ctx.update(dp) - ## tmp_ctx.update(dc if i & 1 else dp) - ## dc = tmp_ctx.digest() - ## i += 1 - # - # The code Passlib uses (below) implements an equivalent algorithm, - # it's just been heavily optimized to pre-calculate a large number - # of things beforehand. It works off of a couple of observations - # about the original algorithm: - # - # 1. each round is a combination of 'dc', 'ds', and 'dp'; determined - # by the whether 'i' a multiple of 2,3, and/or 7. - # 2. since lcm(2,3,7)==42, the series of combinations will repeat - # every 42 rounds. - # 3. even rounds 0-40 consist of 'hash(dc + round-specific-constant)'; - # while odd rounds 1-41 consist of hash(round-specific-constant + dc) - # - # Using these observations, the following code... - # * calculates the round-specific combination of ds & dp for each round 0-41 - # * runs through as many 42-round blocks as possible - # * runs through as many pairs of rounds as possible for remaining rounds - # * performs once last round if the total rounds should be odd. - # - # this cuts out a lot of the control overhead incurred when running the - # original loop 40,000+ times in python, resulting in ~20% increase in - # speed under CPython (though still 2x slower than glibc crypt) - - # prepare the 6 combinations of ds & dp which are needed - # (order of 'perms' must match how _c_digest_offsets was generated) - dp_dp = dp+dp - dp_ds = dp+ds - perms = [dp, dp_dp, dp_ds, dp_ds+dp, ds+dp, ds+dp_dp] - - # build up list of even-round & odd-round constants, - # and store in 21-element list as (even,odd) pairs. - data = [ (perms[even], perms[odd]) for even, odd in _c_digest_offsets] - - # perform as many full 42-round blocks as possible - dc = da - blocks, tail = divmod(rounds, 42) - while blocks: - for even, odd in data: - dc = hash_const(odd + hash_const(dc + even).digest()).digest() - blocks -= 1 - - # perform any leftover rounds - if tail: - # perform any pairs of rounds - pairs = tail>>1 - for even, odd in data[:pairs]: - dc = hash_const(odd + hash_const(dc + even).digest()).digest() - - # if rounds was odd, do one last round (since we started at 0, - # last round will be an even-numbered round) - if tail & 1: - dc = hash_const(dc + data[pairs][0]).digest() - - #=================================================================== - # encode digest using appropriate transpose map - #=================================================================== - return h64.encode_transposed_bytes(dc, transpose_map).decode("ascii") - -#============================================================================= -# handlers -#============================================================================= -_UROUNDS = u("rounds=") -_UDOLLAR = u("$") -_UZERO = u("0") - -class _SHA2_Common(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, - uh.GenericHandler): - """class containing common code shared by sha256_crypt & sha512_crypt""" - #=================================================================== - # class attrs - #=================================================================== - # name - set by subclass - setting_kwds = ("salt", "rounds", "implicit_rounds", "salt_size") - # ident - set by subclass - checksum_chars = uh.HASH64_CHARS - # checksum_size - set by subclass - - max_salt_size = 16 - salt_chars = uh.HASH64_CHARS - - min_rounds = 1000 # bounds set by spec - max_rounds = 999999999 # bounds set by spec - rounds_cost = "linear" - - _cdb_use_512 = False # flag for _calc_digest_builtin() - _rounds_prefix = None # ident + _UROUNDS - - #=================================================================== - # methods - #=================================================================== - implicit_rounds = False - - def __init__(self, implicit_rounds=None, **kwds): - super(_SHA2_Common, self).__init__(**kwds) - # if user calls hash() w/ 5000 rounds, default to compact form. - if implicit_rounds is None: - implicit_rounds = (self.use_defaults and self.rounds == 5000) - self.implicit_rounds = implicit_rounds - - def _parse_salt(self, salt): - # required per SHA2-crypt spec -- truncate config salts rather than throwing error - return self._norm_salt(salt, relaxed=self.checksum is None) - - def _parse_rounds(self, rounds): - # required per SHA2-crypt spec -- clip config rounds rather than throwing error - return self._norm_rounds(rounds, relaxed=self.checksum is None) - - @classmethod - def from_string(cls, hash): - # basic format this parses - - # $5$[rounds=$][$] - - # TODO: this *could* use uh.parse_mc3(), except that the rounds - # portion has a slightly different grammar. - - # convert to unicode, check for ident prefix, split on dollar signs. - hash = to_unicode(hash, "ascii", "hash") - ident = cls.ident - if not hash.startswith(ident): - raise uh.exc.InvalidHashError(cls) - assert len(ident) == 3 - parts = hash[3:].split(_UDOLLAR) - - # extract rounds value - if parts[0].startswith(_UROUNDS): - assert len(_UROUNDS) == 7 - rounds = parts.pop(0)[7:] - if rounds.startswith(_UZERO) and rounds != _UZERO: - raise uh.exc.ZeroPaddedRoundsError(cls) - rounds = int(rounds) - implicit_rounds = False - else: - rounds = 5000 - implicit_rounds = True - - # rest should be salt and checksum - if len(parts) == 2: - salt, chk = parts - elif len(parts) == 1: - salt = parts[0] - chk = None - else: - raise uh.exc.MalformedHashError(cls) - - # return new object - return cls( - rounds=rounds, - salt=salt, - checksum=chk or None, - implicit_rounds=implicit_rounds, - ) - - def to_string(self): - if self.rounds == 5000 and self.implicit_rounds: - hash = u("%s%s$%s") % (self.ident, self.salt, - self.checksum or u('')) - else: - hash = u("%srounds=%d$%s$%s") % (self.ident, self.rounds, - self.salt, self.checksum or u('')) - return uascii_to_str(hash) - - #=================================================================== - # backends - #=================================================================== - backends = ("os_crypt", "builtin") - - #--------------------------------------------------------------- - # os_crypt backend - #--------------------------------------------------------------- - - #: test hash for OS detection -- provided by subclass - _test_hash = None - - @classmethod - def _load_backend_os_crypt(cls): - if test_crypt(*cls._test_hash): - cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) - return True - else: - return False - - def _calc_checksum_os_crypt(self, secret): - hash = safe_crypt(secret, self.to_string()) - if hash: - # NOTE: avoiding full parsing routine via from_string().checksum, - # and just extracting the bit we need. - cs = self.checksum_size - assert hash.startswith(self.ident) and hash[-cs-1] == _UDOLLAR - return hash[-cs:] - else: - # py3's crypt.crypt() can't handle non-utf8 bytes. - # fallback to builtin alg, which is always available. - return self._calc_checksum_builtin(secret) - - #--------------------------------------------------------------- - # builtin backend - #--------------------------------------------------------------- - @classmethod - def _load_backend_builtin(cls): - cls._set_calc_checksum_backend(cls._calc_checksum_builtin) - return True - - def _calc_checksum_builtin(self, secret): - return _raw_sha2_crypt(secret, self.salt, self.rounds, - self._cdb_use_512) - - #=================================================================== - # eoc - #=================================================================== - -class sha256_crypt(_SHA2_Common): - """This class implements the SHA256-Crypt password hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it must be 0-16 characters, drawn from the regexp range ``[./0-9A-Za-z]``. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - Defaults to 535000, must be between 1000 and 999999999, inclusive. - - :type implicit_rounds: bool - :param implicit_rounds: - this is an internal option which generally doesn't need to be touched. - - this flag determines whether the hash should omit the rounds parameter - when encoding it to a string; this is only permitted by the spec for rounds=5000, - and the flag is ignored otherwise. the spec requires the two different - encodings be preserved as they are, instead of normalizing them. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - #=================================================================== - # class attrs - #=================================================================== - name = "sha256_crypt" - ident = u("$5$") - checksum_size = 43 - # NOTE: using 25/75 weighting of builtin & os_crypt backends - default_rounds = 535000 - - #=================================================================== - # backends - #=================================================================== - _test_hash = ("test", "$5$rounds=1000$test$QmQADEXMG8POI5W" - "Dsaeho0P36yK3Tcrgboabng6bkb/") - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# sha 512 crypt -#============================================================================= -class sha512_crypt(_SHA2_Common): - """This class implements the SHA512-Crypt password hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, one will be autogenerated (this is recommended). - If specified, it must be 0-16 characters, drawn from the regexp range ``[./0-9A-Za-z]``. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - Defaults to 656000, must be between 1000 and 999999999, inclusive. - - :type implicit_rounds: bool - :param implicit_rounds: - this is an internal option which generally doesn't need to be touched. - - this flag determines whether the hash should omit the rounds parameter - when encoding it to a string; this is only permitted by the spec for rounds=5000, - and the flag is ignored otherwise. the spec requires the two different - encodings be preserved as they are, instead of normalizing them. - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - - #=================================================================== - # class attrs - #=================================================================== - name = "sha512_crypt" - ident = u("$6$") - checksum_size = 86 - _cdb_use_512 = True - # NOTE: using 25/75 weighting of builtin & os_crypt backends - default_rounds = 656000 - - #=================================================================== - # backend - #=================================================================== - _test_hash = ("test", "$6$rounds=1000$test$2M/Lx6Mtobqj" - "Ljobw0Wmo4Q5OFx5nVLJvmgseatA6oMn" - "yWeBdRDx4DU.1H3eGmse6pgsOgDisWBG" - "I5c7TZauS0") - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/sun_md5_crypt.py b/libs_crutch/contrib/passlib/handlers/sun_md5_crypt.py deleted file mode 100644 index 0eeb4e7..0000000 --- a/libs_crutch/contrib/passlib/handlers/sun_md5_crypt.py +++ /dev/null @@ -1,363 +0,0 @@ -"""passlib.handlers.sun_md5_crypt - Sun's Md5 Crypt, used on Solaris - -.. warning:: - - This implementation may not reproduce - the original Solaris behavior in some border cases. - See documentation for details. -""" - -#============================================================================= -# imports -#============================================================================= -# core -from hashlib import md5 -import re -import logging; log = logging.getLogger(__name__) -from warnings import warn -# site -# pkg -from passlib.utils import to_unicode -from passlib.utils.binary import h64 -from passlib.utils.compat import byte_elem_value, irange, u, \ - uascii_to_str, unicode, str_to_bascii -import passlib.utils.handlers as uh -# local -__all__ = [ - "sun_md5_crypt", -] - -#============================================================================= -# backend -#============================================================================= -# constant data used by alg - Hamlet act 3 scene 1 + null char -# exact bytes as in http://www.ibiblio.org/pub/docs/books/gutenberg/etext98/2ws2610.txt -# from Project Gutenberg. - -MAGIC_HAMLET = ( - b"To be, or not to be,--that is the question:--\n" - b"Whether 'tis nobler in the mind to suffer\n" - b"The slings and arrows of outrageous fortune\n" - b"Or to take arms against a sea of troubles,\n" - b"And by opposing end them?--To die,--to sleep,--\n" - b"No more; and by a sleep to say we end\n" - b"The heartache, and the thousand natural shocks\n" - b"That flesh is heir to,--'tis a consummation\n" - b"Devoutly to be wish'd. To die,--to sleep;--\n" - b"To sleep! perchance to dream:--ay, there's the rub;\n" - b"For in that sleep of death what dreams may come,\n" - b"When we have shuffled off this mortal coil,\n" - b"Must give us pause: there's the respect\n" - b"That makes calamity of so long life;\n" - b"For who would bear the whips and scorns of time,\n" - b"The oppressor's wrong, the proud man's contumely,\n" - b"The pangs of despis'd love, the law's delay,\n" - b"The insolence of office, and the spurns\n" - b"That patient merit of the unworthy takes,\n" - b"When he himself might his quietus make\n" - b"With a bare bodkin? who would these fardels bear,\n" - b"To grunt and sweat under a weary life,\n" - b"But that the dread of something after death,--\n" - b"The undiscover'd country, from whose bourn\n" - b"No traveller returns,--puzzles the will,\n" - b"And makes us rather bear those ills we have\n" - b"Than fly to others that we know not of?\n" - b"Thus conscience does make cowards of us all;\n" - b"And thus the native hue of resolution\n" - b"Is sicklied o'er with the pale cast of thought;\n" - b"And enterprises of great pith and moment,\n" - b"With this regard, their currents turn awry,\n" - b"And lose the name of action.--Soft you now!\n" - b"The fair Ophelia!--Nymph, in thy orisons\n" - b"Be all my sins remember'd.\n\x00" #<- apparently null at end of C string is included (test vector won't pass otherwise) -) - -# NOTE: these sequences are pre-calculated iteration ranges used by X & Y loops w/in rounds function below -xr = irange(7) -_XY_ROUNDS = [ - tuple((i,i,i+3) for i in xr), # xrounds 0 - tuple((i,i+1,i+4) for i in xr), # xrounds 1 - tuple((i,i+8,(i+11)&15) for i in xr), # yrounds 0 - tuple((i,(i+9)&15, (i+12)&15) for i in xr), # yrounds 1 -] -del xr - -def raw_sun_md5_crypt(secret, rounds, salt): - """given secret & salt, return encoded sun-md5-crypt checksum""" - global MAGIC_HAMLET - assert isinstance(secret, bytes) - assert isinstance(salt, bytes) - - # validate rounds - if rounds <= 0: - rounds = 0 - real_rounds = 4096 + rounds - # NOTE: spec seems to imply max 'rounds' is 2**32-1 - - # generate initial digest to start off round 0. - # NOTE: algorithm 'salt' includes full config string w/ trailing "$" - result = md5(secret + salt).digest() - assert len(result) == 16 - - # NOTE: many things in this function have been inlined (to speed up the loop - # as much as possible), to the point that this code barely resembles - # the algorithm as described in the docs. in particular: - # - # * all accesses to a given bit have been inlined using the formula - # rbitval(bit) = (rval((bit>>3) & 15) >> (bit & 7)) & 1 - # - # * the calculation of coinflip value R has been inlined - # - # * the conditional division of coinflip value V has been inlined as - # a shift right of 0 or 1. - # - # * the i, i+3, etc iterations are precalculated in lists. - # - # * the round-based conditional division of x & y is now performed - # by choosing an appropriate precalculated list, so that it only - # calculates the 7 bits which will actually be used. - # - X_ROUNDS_0, X_ROUNDS_1, Y_ROUNDS_0, Y_ROUNDS_1 = _XY_ROUNDS - - # NOTE: % appears to be *slightly* slower than &, so we prefer & if possible - - round = 0 - while round < real_rounds: - # convert last result byte string to list of byte-ints for easy access - rval = [ byte_elem_value(c) for c in result ].__getitem__ - - # build up X bit by bit - x = 0 - xrounds = X_ROUNDS_1 if (rval((round>>3) & 15)>>(round & 7)) & 1 else X_ROUNDS_0 - for i, ia, ib in xrounds: - a = rval(ia) - b = rval(ib) - v = rval((a >> (b % 5)) & 15) >> ((b>>(a&7)) & 1) - x |= ((rval((v>>3)&15)>>(v&7))&1) << i - - # build up Y bit by bit - y = 0 - yrounds = Y_ROUNDS_1 if (rval(((round+64)>>3) & 15)>>(round & 7)) & 1 else Y_ROUNDS_0 - for i, ia, ib in yrounds: - a = rval(ia) - b = rval(ib) - v = rval((a >> (b % 5)) & 15) >> ((b>>(a&7)) & 1) - y |= ((rval((v>>3)&15)>>(v&7))&1) << i - - # extract x'th and y'th bit, xoring them together to yeild "coin flip" - coin = ((rval(x>>3) >> (x&7)) ^ (rval(y>>3) >> (y&7))) & 1 - - # construct hash for this round - h = md5(result) - if coin: - h.update(MAGIC_HAMLET) - h.update(unicode(round).encode("ascii")) - result = h.digest() - - round += 1 - - # encode output - return h64.encode_transposed_bytes(result, _chk_offsets) - -# NOTE: same offsets as md5_crypt -_chk_offsets = ( - 12,6,0, - 13,7,1, - 14,8,2, - 15,9,3, - 5,10,4, - 11, -) - -#============================================================================= -# handler -#============================================================================= -class sun_md5_crypt(uh.HasRounds, uh.HasSalt, uh.GenericHandler): - """This class implements the Sun-MD5-Crypt password hash, and follows the :ref:`password-hash-api`. - - It supports a variable-length salt, and a variable number of rounds. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: - - :type salt: str - :param salt: - Optional salt string. - If not specified, a salt will be autogenerated (this is recommended). - If specified, it must be drawn from the regexp range ``[./0-9A-Za-z]``. - - :type salt_size: int - :param salt_size: - If no salt is specified, this parameter can be used to specify - the size (in characters) of the autogenerated salt. - It currently defaults to 8. - - :type rounds: int - :param rounds: - Optional number of rounds to use. - Defaults to 34000, must be between 0 and 4294963199, inclusive. - - :type bare_salt: bool - :param bare_salt: - Optional flag used to enable an alternate salt digest behavior - used by some hash strings in this scheme. - This flag can be ignored by most users. - Defaults to ``False``. - (see :ref:`smc-bare-salt` for details). - - :type relaxed: bool - :param relaxed: - By default, providing an invalid value for one of the other - keywords will result in a :exc:`ValueError`. If ``relaxed=True``, - and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` - will be issued instead. Correctable errors include ``rounds`` - that are too small or too large, and ``salt`` strings that are too long. - - .. versionadded:: 1.6 - """ - #=================================================================== - # class attrs - #=================================================================== - name = "sun_md5_crypt" - setting_kwds = ("salt", "rounds", "bare_salt", "salt_size") - checksum_chars = uh.HASH64_CHARS - checksum_size = 22 - - # NOTE: docs say max password length is 255. - # release 9u2 - - # NOTE: not sure if original crypt has a salt size limit, - # all instances that have been seen use 8 chars. - default_salt_size = 8 - max_salt_size = None - salt_chars = uh.HASH64_CHARS - - default_rounds = 34000 # current passlib default - min_rounds = 0 - max_rounds = 4294963199 ##2**32-1-4096 - # XXX: ^ not sure what it does if past this bound... does 32 int roll over? - rounds_cost = "linear" - - ident_values = (u("$md5$"), u("$md5,")) - - #=================================================================== - # instance attrs - #=================================================================== - bare_salt = False # flag to indicate legacy hashes that lack "$$" suffix - - #=================================================================== - # constructor - #=================================================================== - def __init__(self, bare_salt=False, **kwds): - self.bare_salt = bare_salt - super(sun_md5_crypt, self).__init__(**kwds) - - #=================================================================== - # internal helpers - #=================================================================== - @classmethod - def identify(cls, hash): - hash = uh.to_unicode_for_identify(hash) - return hash.startswith(cls.ident_values) - - @classmethod - def from_string(cls, hash): - hash = to_unicode(hash, "ascii", "hash") - - # - # detect if hash specifies rounds value. - # if so, parse and validate it. - # by end, set 'rounds' to int value, and 'tail' containing salt+chk - # - if hash.startswith(u("$md5$")): - rounds = 0 - salt_idx = 5 - elif hash.startswith(u("$md5,rounds=")): - idx = hash.find(u("$"), 12) - if idx == -1: - raise uh.exc.MalformedHashError(cls, "unexpected end of rounds") - rstr = hash[12:idx] - try: - rounds = int(rstr) - except ValueError: - raise uh.exc.MalformedHashError(cls, "bad rounds") - if rstr != unicode(rounds): - raise uh.exc.ZeroPaddedRoundsError(cls) - if rounds == 0: - # NOTE: not sure if this is forbidden by spec or not; - # but allowing it would complicate things, - # and it should never occur anyways. - raise uh.exc.MalformedHashError(cls, "explicit zero rounds") - salt_idx = idx+1 - else: - raise uh.exc.InvalidHashError(cls) - - # - # salt/checksum separation is kinda weird, - # to deal cleanly with some backward-compatible workarounds - # implemented by original implementation. - # - chk_idx = hash.rfind(u("$"), salt_idx) - if chk_idx == -1: - # ''-config for $-hash - salt = hash[salt_idx:] - chk = None - bare_salt = True - elif chk_idx == len(hash)-1: - if chk_idx > salt_idx and hash[-2] == u("$"): - raise uh.exc.MalformedHashError(cls, "too many '$' separators") - # $-config for $$-hash - salt = hash[salt_idx:-1] - chk = None - bare_salt = False - elif chk_idx > 0 and hash[chk_idx-1] == u("$"): - # $$-hash - salt = hash[salt_idx:chk_idx-1] - chk = hash[chk_idx+1:] - bare_salt = False - else: - # $-hash - salt = hash[salt_idx:chk_idx] - chk = hash[chk_idx+1:] - bare_salt = True - - return cls( - rounds=rounds, - salt=salt, - checksum=chk, - bare_salt=bare_salt, - ) - - def to_string(self, _withchk=True): - ss = u('') if self.bare_salt else u('$') - rounds = self.rounds - if rounds > 0: - hash = u("$md5,rounds=%d$%s%s") % (rounds, self.salt, ss) - else: - hash = u("$md5$%s%s") % (self.salt, ss) - if _withchk: - chk = self.checksum - hash = u("%s$%s") % (hash, chk) - return uascii_to_str(hash) - - #=================================================================== - # primary interface - #=================================================================== - # TODO: if we're on solaris, check for native crypt() support. - # this will require extra testing, to make sure native crypt - # actually behaves correctly. of particular importance: - # when using ""-config, make sure to append "$x" to string. - - def _calc_checksum(self, secret): - # NOTE: no reference for how sun_md5_crypt handles unicode - if isinstance(secret, unicode): - secret = secret.encode("utf-8") - config = str_to_bascii(self.to_string(_withchk=False)) - return raw_sun_md5_crypt(secret, self.rounds, config).decode("ascii") - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/handlers/windows.py b/libs_crutch/contrib/passlib/handlers/windows.py deleted file mode 100644 index e17beba..0000000 --- a/libs_crutch/contrib/passlib/handlers/windows.py +++ /dev/null @@ -1,334 +0,0 @@ -"""passlib.handlers.nthash - Microsoft Windows -related hashes""" -#============================================================================= -# imports -#============================================================================= -# core -from binascii import hexlify -import logging; log = logging.getLogger(__name__) -from warnings import warn -# site -# pkg -from passlib.utils import to_unicode, right_pad_string -from passlib.utils.compat import unicode -from passlib.crypto.digest import lookup_hash -md4 = lookup_hash("md4").const -import passlib.utils.handlers as uh -# local -__all__ = [ - "lmhash", - "nthash", - "bsd_nthash", - "msdcc", - "msdcc2", -] - -#============================================================================= -# lanman hash -#============================================================================= -class lmhash(uh.TruncateMixin, uh.HasEncodingContext, uh.StaticHandler): - """This class implements the Lan Manager Password hash, and follows the :ref:`password-hash-api`. - - It has no salt and a single fixed round. - - The :meth:`~passlib.ifc.PasswordHash.using` method accepts a single - optional keyword: - - :param bool truncate_error: - By default, this will silently truncate passwords larger than 14 bytes. - Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` - to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. - - .. versionadded:: 1.7 - - The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.verify` methods accept a single - optional keyword: - - :type encoding: str - :param encoding: - - This specifies what character encoding LMHASH should use when - calculating digest. It defaults to ``cp437``, the most - common encoding encountered. - - Note that while this class outputs digests in lower-case hexadecimal, - it will accept upper-case as well. - """ - #=================================================================== - # class attrs - #=================================================================== - - #-------------------- - # PasswordHash - #-------------------- - name = "lmhash" - setting_kwds = ("truncate_error",) - - #-------------------- - # GenericHandler - #-------------------- - checksum_chars = uh.HEX_CHARS - checksum_size = 32 - - #-------------------- - # TruncateMixin - #-------------------- - truncate_size = 14 - - #-------------------- - # custom - #-------------------- - default_encoding = "cp437" - - #=================================================================== - # methods - #=================================================================== - @classmethod - def _norm_hash(cls, hash): - return hash.lower() - - def _calc_checksum(self, secret): - # check for truncation (during .hash() calls only) - if self.use_defaults: - self._check_truncate_policy(secret) - - return hexlify(self.raw(secret, self.encoding)).decode("ascii") - - # magic constant used by LMHASH - _magic = b"KGS!@#$%" - - @classmethod - def raw(cls, secret, encoding=None): - """encode password using LANMAN hash algorithm. - - :type secret: unicode or utf-8 encoded bytes - :arg secret: secret to hash - :type encoding: str - :arg encoding: - optional encoding to use for unicode inputs. - this defaults to ``cp437``, which is the - common case for most situations. - - :returns: returns string of raw bytes - """ - if not encoding: - encoding = cls.default_encoding - # some nice empircal data re: different encodings is at... - # http://www.openwall.com/lists/john-dev/2011/08/01/2 - # http://www.freerainbowtables.com/phpBB3/viewtopic.php?t=387&p=12163 - from passlib.crypto.des import des_encrypt_block - MAGIC = cls._magic - if isinstance(secret, unicode): - # perform uppercasing while we're still unicode, - # to give a better shot at getting non-ascii chars right. - # (though some codepages do NOT upper-case the same as unicode). - secret = secret.upper().encode(encoding) - elif isinstance(secret, bytes): - # FIXME: just trusting ascii upper will work? - # and if not, how to do codepage specific case conversion? - # we could decode first using , - # but *that* might not always be right. - secret = secret.upper() - else: - raise TypeError("secret must be unicode or bytes") - secret = right_pad_string(secret, 14) - return des_encrypt_block(secret[0:7], MAGIC) + \ - des_encrypt_block(secret[7:14], MAGIC) - - #=================================================================== - # eoc - #=================================================================== - -#============================================================================= -# ntlm hash -#============================================================================= -class nthash(uh.StaticHandler): - """This class implements the NT Password hash, and follows the :ref:`password-hash-api`. - - It has no salt and a single fixed round. - - The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. - - Note that while this class outputs lower-case hexadecimal digests, - it will accept upper-case digests as well. - """ - #=================================================================== - # class attrs - #=================================================================== - name = "nthash" - checksum_chars = uh.HEX_CHARS - checksum_size = 32 - - #=================================================================== - # methods - #=================================================================== - @classmethod - def _norm_hash(cls, hash): - return hash.lower() - - def _calc_checksum(self, secret): - return hexlify(self.raw(secret)).decode("ascii") - - @classmethod - def raw(cls, secret): - """encode password using MD4-based NTHASH algorithm - - :arg secret: secret as unicode or utf-8 encoded bytes - - :returns: returns string of raw bytes - """ - secret = to_unicode(secret, "utf-8", param="secret") - # XXX: found refs that say only first 128 chars are used. - return md4(secret.encode("utf-16-le")).digest() - - @classmethod - def raw_nthash(cls, secret, hex=False): - warn("nthash.raw_nthash() is deprecated, and will be removed " - "in Passlib 1.8, please use nthash.raw() instead", - DeprecationWarning) - ret = nthash.raw(secret) - return hexlify(ret).decode("ascii") if hex else ret - - #=================================================================== - # eoc - #=================================================================== - -bsd_nthash = uh.PrefixWrapper("bsd_nthash", nthash, prefix="$3$$", ident="$3$$", - doc="""The class support FreeBSD's representation of NTHASH - (which is compatible with the :ref:`modular-crypt-format`), - and follows the :ref:`password-hash-api`. - - It has no salt and a single fixed round. - - The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. - """) - -##class ntlm_pair(object): -## "combined lmhash & nthash" -## name = "ntlm_pair" -## setting_kwds = () -## _hash_regex = re.compile(u"^(?P[0-9a-f]{32}):(?P[0-9][a-f]{32})$", -## re.I) -## -## @classmethod -## def identify(cls, hash): -## hash = to_unicode(hash, "latin-1", "hash") -## return len(hash) == 65 and cls._hash_regex.match(hash) is not None -## -## @classmethod -## def hash(cls, secret, config=None): -## if config is not None and not cls.identify(config): -## raise uh.exc.InvalidHashError(cls) -## return lmhash.hash(secret) + ":" + nthash.hash(secret) -## -## @classmethod -## def verify(cls, secret, hash): -## hash = to_unicode(hash, "ascii", "hash") -## m = cls._hash_regex.match(hash) -## if not m: -## raise uh.exc.InvalidHashError(cls) -## lm, nt = m.group("lm", "nt") -## # NOTE: verify against both in case encoding issue -## # causes one not to match. -## return lmhash.verify(secret, lm) or nthash.verify(secret, nt) - -#============================================================================= -# msdcc v1 -#============================================================================= -class msdcc(uh.HasUserContext, uh.StaticHandler): - """This class implements Microsoft's Domain Cached Credentials password hash, - and follows the :ref:`password-hash-api`. - - It has a fixed number of rounds, and uses the associated - username as the salt. - - The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods - have the following optional keywords: - - :type user: str - :param user: - String containing name of user account this password is associated with. - This is required to properly calculate the hash. - - This keyword is case-insensitive, and should contain just the username - (e.g. ``Administrator``, not ``SOMEDOMAIN\\Administrator``). - - Note that while this class outputs lower-case hexadecimal digests, - it will accept upper-case digests as well. - """ - name = "msdcc" - checksum_chars = uh.HEX_CHARS - checksum_size = 32 - - @classmethod - def _norm_hash(cls, hash): - return hash.lower() - - def _calc_checksum(self, secret): - return hexlify(self.raw(secret, self.user)).decode("ascii") - - @classmethod - def raw(cls, secret, user): - """encode password using mscash v1 algorithm - - :arg secret: secret as unicode or utf-8 encoded bytes - :arg user: username to use as salt - - :returns: returns string of raw bytes - """ - secret = to_unicode(secret, "utf-8", param="secret").encode("utf-16-le") - user = to_unicode(user, "utf-8", param="user").lower().encode("utf-16-le") - return md4(md4(secret).digest() + user).digest() - -#============================================================================= -# msdcc2 aka mscash2 -#============================================================================= -class msdcc2(uh.HasUserContext, uh.StaticHandler): - """This class implements version 2 of Microsoft's Domain Cached Credentials - password hash, and follows the :ref:`password-hash-api`. - - It has a fixed number of rounds, and uses the associated - username as the salt. - - The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods - have the following extra keyword: - - :type user: str - :param user: - String containing name of user account this password is associated with. - This is required to properly calculate the hash. - - This keyword is case-insensitive, and should contain just the username - (e.g. ``Administrator``, not ``SOMEDOMAIN\\Administrator``). - """ - name = "msdcc2" - checksum_chars = uh.HEX_CHARS - checksum_size = 32 - - @classmethod - def _norm_hash(cls, hash): - return hash.lower() - - def _calc_checksum(self, secret): - return hexlify(self.raw(secret, self.user)).decode("ascii") - - @classmethod - def raw(cls, secret, user): - """encode password using msdcc v2 algorithm - - :type secret: unicode or utf-8 bytes - :arg secret: secret - - :type user: str - :arg user: username to use as salt - - :returns: returns string of raw bytes - """ - from passlib.crypto.digest import pbkdf2_hmac - secret = to_unicode(secret, "utf-8", param="secret").encode("utf-16-le") - user = to_unicode(user, "utf-8", param="user").lower().encode("utf-16-le") - tmp = md4(md4(secret).digest() + user).digest() - return pbkdf2_hmac("sha1", tmp, user, 10240, 16) - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/hash.py b/libs_crutch/contrib/passlib/hash.py deleted file mode 100644 index 9b72448..0000000 --- a/libs_crutch/contrib/passlib/hash.py +++ /dev/null @@ -1,68 +0,0 @@ -""" -passlib.hash - proxy object mapping hash scheme names -> handlers - -================== -***** NOTICE ***** -================== - -This module does not actually contain any hashes. This file -is a stub that replaces itself with a proxy object. - -This proxy object (passlib.registry._PasslibRegistryProxy) -handles lazy-loading hashes as they are requested. - -The actual implementation of the various hashes is store elsewhere, -mainly in the submodules of the ``passlib.handlers`` subpackage. -""" - -#============================================================================= -# import proxy object and replace this module -#============================================================================= - -# XXX: if any platform has problem w/ lazy modules, could support 'non-lazy' -# version which just imports all schemes known to list_crypt_handlers() - -from passlib.registry import _proxy -import sys -sys.modules[__name__] = _proxy - -#============================================================================= -# HACK: the following bit of code is unreachable, but it's presence seems to -# help make autocomplete work for certain IDEs such as PyCharm. -# this list is automatically regenerated using $SOURCE/admin/regen.py -#============================================================================= - -#---------------------------------------------------- -# begin autocomplete hack (autogenerated 2016-11-10) -#---------------------------------------------------- -if False: - from passlib.handlers.argon2 import argon2 - from passlib.handlers.bcrypt import bcrypt, bcrypt_sha256 - from passlib.handlers.cisco import cisco_asa, cisco_pix, cisco_type7 - from passlib.handlers.des_crypt import bigcrypt, bsdi_crypt, crypt16, des_crypt - from passlib.handlers.digests import hex_md4, hex_md5, hex_sha1, hex_sha256, hex_sha512, htdigest - from passlib.handlers.django import django_bcrypt, django_bcrypt_sha256, django_des_crypt, django_disabled, django_pbkdf2_sha1, django_pbkdf2_sha256, django_salted_md5, django_salted_sha1 - from passlib.handlers.fshp import fshp - from passlib.handlers.ldap_digests import ldap_bcrypt, ldap_bsdi_crypt, ldap_des_crypt, ldap_md5, ldap_md5_crypt, ldap_plaintext, ldap_salted_md5, ldap_salted_sha1, ldap_sha1, ldap_sha1_crypt, ldap_sha256_crypt, ldap_sha512_crypt - from passlib.handlers.md5_crypt import apr_md5_crypt, md5_crypt - from passlib.handlers.misc import plaintext, unix_disabled, unix_fallback - from passlib.handlers.mssql import mssql2000, mssql2005 - from passlib.handlers.mysql import mysql323, mysql41 - from passlib.handlers.oracle import oracle10, oracle11 - from passlib.handlers.pbkdf2 import atlassian_pbkdf2_sha1, cta_pbkdf2_sha1, dlitz_pbkdf2_sha1, grub_pbkdf2_sha512, ldap_pbkdf2_sha1, ldap_pbkdf2_sha256, ldap_pbkdf2_sha512, pbkdf2_sha1, pbkdf2_sha256, pbkdf2_sha512 - from passlib.handlers.phpass import phpass - from passlib.handlers.postgres import postgres_md5 - from passlib.handlers.roundup import ldap_hex_md5, ldap_hex_sha1, roundup_plaintext - from passlib.handlers.scram import scram - from passlib.handlers.scrypt import scrypt - from passlib.handlers.sha1_crypt import sha1_crypt - from passlib.handlers.sha2_crypt import sha256_crypt, sha512_crypt - from passlib.handlers.sun_md5_crypt import sun_md5_crypt - from passlib.handlers.windows import bsd_nthash, lmhash, msdcc, msdcc2, nthash -#---------------------------------------------------- -# end autocomplete hack -#---------------------------------------------------- - -#============================================================================= -# eoc -#============================================================================= diff --git a/libs_crutch/contrib/passlib/hosts.py b/libs_crutch/contrib/passlib/hosts.py deleted file mode 100644 index 1f137a2..0000000 --- a/libs_crutch/contrib/passlib/hosts.py +++ /dev/null @@ -1,106 +0,0 @@ -"""passlib.hosts""" -#============================================================================= -# imports -#============================================================================= -# core -from warnings import warn -# pkg -from passlib.context import LazyCryptContext -from passlib.exc import PasslibRuntimeWarning -from passlib import registry -from passlib.utils import has_crypt, unix_crypt_schemes -# local -__all__ = [ - "linux_context", "linux2_context", - "openbsd_context", - "netbsd_context", - "freebsd_context", - "host_context", -] - -#============================================================================= -# linux support -#============================================================================= - -# known platform names - linux2 - -linux_context = linux2_context = LazyCryptContext( - schemes = [ "sha512_crypt", "sha256_crypt", "md5_crypt", - "des_crypt", "unix_disabled" ], - deprecated = [ "des_crypt" ], - ) - -#============================================================================= -# bsd support -#============================================================================= - -# known platform names - -# freebsd2 -# freebsd3 -# freebsd4 -# freebsd5 -# freebsd6 -# freebsd7 -# -# netbsd1 - -# referencing source via -http://fxr.googlebit.com -# freebsd 6,7,8 - des, md5, bcrypt, bsd_nthash -# netbsd - des, ext, md5, bcrypt, sha1 -# openbsd - des, ext, md5, bcrypt - -freebsd_context = LazyCryptContext(["bcrypt", "md5_crypt", "bsd_nthash", - "des_crypt", "unix_disabled"]) - -openbsd_context = LazyCryptContext(["bcrypt", "md5_crypt", "bsdi_crypt", - "des_crypt", "unix_disabled"]) - -netbsd_context = LazyCryptContext(["bcrypt", "sha1_crypt", "md5_crypt", - "bsdi_crypt", "des_crypt", "unix_disabled"]) - -# XXX: include darwin in this list? it's got a BSD crypt variant, -# but that's not what it uses for user passwords. - -#============================================================================= -# current host -#============================================================================= -if registry.os_crypt_present: - # NOTE: this is basically mimicing the output of os crypt(), - # except that it uses passlib's (usually stronger) defaults settings, - # and can be inspected and used much more flexibly. - - def _iter_os_crypt_schemes(): - """helper which iterates over supported os_crypt schemes""" - out = registry.get_supported_os_crypt_schemes() - if out: - # only offer disabled handler if there's another scheme in front, - # as this can't actually hash any passwords - out += ("unix_disabled",) - return out - - host_context = LazyCryptContext(_iter_os_crypt_schemes()) - -#============================================================================= -# other platforms -#============================================================================= - -# known platform strings - -# aix3 -# aix4 -# atheos -# beos5 -# darwin -# generic -# hp-ux11 -# irix5 -# irix6 -# mac -# next3 -# os2emx -# riscos -# sunos5 -# unixware7 - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/ifc.py b/libs_crutch/contrib/passlib/ifc.py deleted file mode 100644 index 1a1aef2..0000000 --- a/libs_crutch/contrib/passlib/ifc.py +++ /dev/null @@ -1,353 +0,0 @@ -"""passlib.ifc - abstract interfaces used by Passlib""" -#============================================================================= -# imports -#============================================================================= -# core -import logging; log = logging.getLogger(__name__) -import sys -# site -# pkg -from passlib.utils.decor import deprecated_method -# local -__all__ = [ - "PasswordHash", -] - -#============================================================================= -# 2/3 compatibility helpers -#============================================================================= -def recreate_with_metaclass(meta): - """class decorator that re-creates class using metaclass""" - def builder(cls): - if meta is type(cls): - return cls - return meta(cls.__name__, cls.__bases__, cls.__dict__.copy()) - return builder - -#============================================================================= -# PasswordHash interface -#============================================================================= -from abc import ABCMeta, abstractmethod, abstractproperty - -# TODO: make this actually use abstractproperty(), -# now that we dropped py25, 'abc' is always available. - -# XXX: rename to PasswordHasher? - -@recreate_with_metaclass(ABCMeta) -class PasswordHash(object): - """This class describes an abstract interface which all password hashes - in Passlib adhere to. Under Python 2.6 and up, this is an actual - Abstract Base Class built using the :mod:`!abc` module. - - See the Passlib docs for full documentation. - """ - #=================================================================== - # class attributes - #=================================================================== - - #--------------------------------------------------------------- - # general information - #--------------------------------------------------------------- - ##name - ##setting_kwds - ##context_kwds - - #: flag which indicates this hasher matches a "disabled" hash - #: (e.g. unix_disabled, or django_disabled); and doesn't actually - #: depend on the provided password. - is_disabled = False - - #: Should be None, or a positive integer indicating hash - #: doesn't support secrets larger than this value. - #: Whether hash throws error or silently truncates secret - #: depends on .truncate_error and .truncate_verify_reject flags below. - #: NOTE: calls may treat as boolean, since value will never be 0. - #: .. versionadded:: 1.7 - #: .. TODO: passlib 1.8: deprecate/rename this attr to "max_secret_size"? - truncate_size = None - - # NOTE: these next two default to the optimistic "ideal", - # most hashes in passlib have to default to False - # for backward compat and/or expected behavior with existing hashes. - - #: If True, .hash() should throw a :exc:`~passlib.exc.PasswordSizeError` for - #: any secrets larger than .truncate_size. Many hashers default to False - #: for historical / compatibility purposes, indicating they will silently - #: truncate instead. All such hashers SHOULD support changing - #: the policy via ``.using(truncate_error=True)``. - #: .. versionadded:: 1.7 - #: .. TODO: passlib 1.8: deprecate/rename this attr to "truncate_hash_error"? - truncate_error = True - - #: If True, .verify() should reject secrets larger than max_password_size. - #: Many hashers default to False for historical / compatibility purposes, - #: indicating they will match on the truncated portion instead. - #: .. versionadded:: 1.7.1 - truncate_verify_reject = True - - #--------------------------------------------------------------- - # salt information -- if 'salt' in setting_kwds - #--------------------------------------------------------------- - ##min_salt_size - ##max_salt_size - ##default_salt_size - ##salt_chars - ##default_salt_chars - - #--------------------------------------------------------------- - # rounds information -- if 'rounds' in setting_kwds - #--------------------------------------------------------------- - ##min_rounds - ##max_rounds - ##default_rounds - ##rounds_cost - - #--------------------------------------------------------------- - # encoding info -- if 'encoding' in context_kwds - #--------------------------------------------------------------- - ##default_encoding - - #=================================================================== - # primary methods - #=================================================================== - @classmethod - @abstractmethod - def hash(cls, secret, # * - **setting_and_context_kwds): # pragma: no cover -- abstract method - r""" - Hash secret, returning result. - Should handle generating salt, etc, and should return string - containing identifier, salt & other configuration, as well as digest. - - :param \*\*settings_kwds: - - Pass in settings to customize configuration of resulting hash. - - .. deprecated:: 1.7 - - Starting with Passlib 1.7, callers should no longer pass settings keywords - (e.g. ``rounds`` or ``salt`` directly to :meth:`!hash`); should use - ``.using(**settings).hash(secret)`` construction instead. - - Support will be removed in Passlib 2.0. - - :param \*\*context_kwds: - - Specific algorithms may require context-specific information (such as the user login). - """ - # FIXME: need stub for classes that define .encrypt() instead ... - # this should call .encrypt(), and check for recursion back to here. - raise NotImplementedError("must be implemented by subclass") - - @deprecated_method(deprecated="1.7", removed="2.0", replacement=".hash()") - @classmethod - def encrypt(cls, *args, **kwds): - """ - Legacy alias for :meth:`hash`. - - .. deprecated:: 1.7 - This method was renamed to :meth:`!hash` in version 1.7. - This alias will be removed in version 2.0, and should only - be used for compatibility with Passlib 1.3 - 1.6. - """ - return cls.hash(*args, **kwds) - - # XXX: could provide default implementation which hands value to - # hash(), and then does constant-time comparision on the result - # (after making both are same string type) - @classmethod - @abstractmethod - def verify(cls, secret, hash, **context_kwds): # pragma: no cover -- abstract method - """verify secret against hash, returns True/False""" - raise NotImplementedError("must be implemented by subclass") - - #=================================================================== - # configuration - #=================================================================== - @classmethod - @abstractmethod - def using(cls, relaxed=False, **kwds): - """ - Return another hasher object (typically a subclass of the current one), - which integrates the configuration options specified by ``kwds``. - This should *always* return a new object, even if no configuration options are changed. - - .. todo:: - - document which options are accepted. - - :returns: - typically returns a subclass for most hasher implementations. - - .. todo:: - - add this method to main documentation. - """ - raise NotImplementedError("must be implemented by subclass") - - #=================================================================== - # migration - #=================================================================== - @classmethod - def needs_update(cls, hash, secret=None): - """ - check if hash's configuration is outside desired bounds, - or contains some other internal option which requires - updating the password hash. - - :param hash: - hash string to examine - - :param secret: - optional secret known to have verified against the provided hash. - (this is used by some hashes to detect legacy algorithm mistakes). - - :return: - whether secret needs re-hashing. - - .. versionadded:: 1.7 - """ - # by default, always report that we don't need update - return False - - #=================================================================== - # additional methods - #=================================================================== - @classmethod - @abstractmethod - def identify(cls, hash): # pragma: no cover -- abstract method - """check if hash belongs to this scheme, returns True/False""" - raise NotImplementedError("must be implemented by subclass") - - @deprecated_method(deprecated="1.7", removed="2.0") - @classmethod - def genconfig(cls, **setting_kwds): # pragma: no cover -- abstract method - """ - compile settings into a configuration string for genhash() - - .. deprecated:: 1.7 - - As of 1.7, this method is deprecated, and slated for complete removal in Passlib 2.0. - - For all known real-world uses, hashing a constant string - should provide equivalent functionality. - - This deprecation may be reversed if a use-case presents itself in the mean time. - """ - # NOTE: this fallback runs full hash alg, w/ whatever cost param is passed along. - # implementations (esp ones w/ variable cost) will want to subclass this - # with a constant-time implementation that just renders a config string. - if cls.context_kwds: - raise NotImplementedError("must be implemented by subclass") - return cls.using(**setting_kwds).hash("") - - @deprecated_method(deprecated="1.7", removed="2.0") - @classmethod - def genhash(cls, secret, config, **context): - """ - generated hash for secret, using settings from config/hash string - - .. deprecated:: 1.7 - - As of 1.7, this method is deprecated, and slated for complete removal in Passlib 2.0. - - This deprecation may be reversed if a use-case presents itself in the mean time. - """ - # XXX: if hashes reliably offered a .parse() method, could make a fallback for this. - raise NotImplementedError("must be implemented by subclass") - - #=================================================================== - # undocumented methods / attributes - #=================================================================== - # the following entry points are used internally by passlib, - # and aren't documented as part of the exposed interface. - # they are subject to change between releases, - # but are documented here so there's a list of them *somewhere*. - - #--------------------------------------------------------------- - # extra metdata - #--------------------------------------------------------------- - - #: this attribute shouldn't be used by hashers themselves, - #: it's reserved for the CryptContext to track which hashers are deprecated. - #: Note the context will only set this on objects it owns (and generated by .using()), - #: and WONT set it on global objects. - #: [added in 1.7] - #: TODO: document this, or at least the use of testing for - #: 'CryptContext().handler().deprecated' - deprecated = False - - #: optionally present if hasher corresponds to format built into Django. - #: this attribute (if not None) should be the Django 'algorithm' name. - #: also indicates to passlib.ext.django that (when installed in django), - #: django's native hasher should be used in preference to this one. - ## django_name - - #--------------------------------------------------------------- - # checksum information - defined for many hashes - #--------------------------------------------------------------- - ## checksum_chars - ## checksum_size - - #--------------------------------------------------------------- - # experimental methods - #--------------------------------------------------------------- - - ##@classmethod - ##def normhash(cls, hash): - ## """helper to clean up non-canonic instances of hash. - ## currently only provided by bcrypt() to fix an historical passlib issue. - ## """ - - # experimental helper to parse hash into components. - ##@classmethod - ##def parsehash(cls, hash, checksum=True, sanitize=False): - ## """helper to parse hash into components, returns dict""" - - # experiment helper to estimate bitsize of different hashes, - # implement for GenericHandler, but may be currently be off for some hashes. - # want to expand this into a way to programmatically compare - # "strengths" of different hashes and hash algorithms. - # still needs to have some factor for estimate relative cost per round, - # ala in the style of the scrypt whitepaper. - ##@classmethod - ##def bitsize(cls, **kwds): - ## """returns dict mapping component -> bits contributed. - ## components currently include checksum, salt, rounds. - ## """ - - #=================================================================== - # eoc - #=================================================================== - -class DisabledHash(PasswordHash): - """ - extended disabled-hash methods; only need be present if .disabled = True - """ - - is_disabled = True - - @classmethod - def disable(cls, hash=None): - """ - return string representing a 'disabled' hash; - optionally including previously enabled hash - (this is up to the individual scheme). - """ - # default behavior: ignore original hash, return standalone marker - return cls.hash("") - - @classmethod - def enable(cls, hash): - """ - given a disabled-hash string, - extract previously-enabled hash if one is present, - otherwise raises ValueError - """ - # default behavior: no way to restore original hash - raise ValueError("cannot restore original hash") - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/pwd.py b/libs_crutch/contrib/passlib/pwd.py deleted file mode 100644 index 12d6ecb..0000000 --- a/libs_crutch/contrib/passlib/pwd.py +++ /dev/null @@ -1,809 +0,0 @@ -"""passlib.pwd -- password generation helpers""" -#============================================================================= -# imports -#============================================================================= -from __future__ import absolute_import, division, print_function, unicode_literals -# core -import codecs -from collections import defaultdict -try: - from collections.abc import MutableMapping -except ImportError: - # py2 compat - from collections import MutableMapping -from math import ceil, log as logf -import logging; log = logging.getLogger(__name__) -import pkg_resources -import os -# site -# pkg -from passlib import exc -from passlib.utils.compat import PY2, irange, itervalues, int_types -from passlib.utils import rng, getrandstr, to_unicode -from passlib.utils.decor import memoized_property -# local -__all__ = [ - "genword", "default_charsets", - "genphrase", "default_wordsets", -] - -#============================================================================= -# constants -#============================================================================= - -# XXX: rename / publically document this map? -entropy_aliases = dict( - # barest protection from throttled online attack - unsafe=12, - - # some protection from unthrottled online attack - weak=24, - - # some protection from offline attacks - fair=36, - - # reasonable protection from offline attacks - strong=48, - - # very good protection from offline attacks - secure=60, -) - -#============================================================================= -# internal helpers -#============================================================================= - -def _superclasses(obj, cls): - """return remaining classes in object's MRO after cls""" - mro = type(obj).__mro__ - return mro[mro.index(cls)+1:] - - -def _self_info_rate(source): - """ - returns 'rate of self-information' -- - i.e. average (per-symbol) entropy of the sequence **source**, - where probability of a given symbol occurring is calculated based on - the number of occurrences within the sequence itself. - - if all elements of the source are unique, this should equal ``log(len(source), 2)``. - - :arg source: - iterable containing 0+ symbols - (e.g. list of strings or ints, string of characters, etc). - - :returns: - float bits of entropy - """ - try: - size = len(source) - except TypeError: - # if len() doesn't work, calculate size by summing counts later - size = None - counts = defaultdict(int) - for char in source: - counts[char] += 1 - if size is None: - values = counts.values() - size = sum(values) - else: - values = itervalues(counts) - if not size: - return 0 - # NOTE: the following performs ``- sum(value / size * logf(value / size, 2) for value in values)``, - # it just does so with as much pulled out of the sum() loop as possible... - return logf(size, 2) - sum(value * logf(value, 2) for value in values) / size - - -# def _total_self_info(source): -# """ -# return total self-entropy of a sequence -# (the average entropy per symbol * size of sequence) -# """ -# return _self_info_rate(source) * len(source) - - -def _open_asset_path(path, encoding=None): - """ - :param asset_path: - string containing absolute path to file, - or package-relative path using format - ``"python.module:relative/file/path"``. - - :returns: - filehandle opened in 'rb' mode - (unless encoding explicitly specified) - """ - if encoding: - return codecs.getreader(encoding)(_open_asset_path(path)) - if os.path.isabs(path): - return open(path, "rb") - package, sep, subpath = path.partition(":") - if not sep: - raise ValueError("asset path must be absolute file path " - "or use 'pkg.name:sub/path' format: %r" % (path,)) - return pkg_resources.resource_stream(package, subpath) - - -#: type aliases -_sequence_types = (list, tuple) -_set_types = (set, frozenset) - -#: set of elements that ensure_unique() has validated already. -_ensure_unique_cache = set() - - -def _ensure_unique(source, param="source"): - """ - helper for generators -- - Throws ValueError if source elements aren't unique. - Error message will display (abbreviated) repr of the duplicates in a string/list - """ - # check cache to speed things up for frozensets / tuples / strings - cache = _ensure_unique_cache - hashable = True - try: - if source in cache: - return True - except TypeError: - hashable = False - - # check if it has dup elements - if isinstance(source, _set_types) or len(set(source)) == len(source): - if hashable: - try: - cache.add(source) - except TypeError: - # XXX: under pypy, "list() in set()" above doesn't throw TypeError, - # but trying to add unhashable it to a set *does*. - pass - return True - - # build list of duplicate values - seen = set() - dups = set() - for elem in source: - (dups if elem in seen else seen).add(elem) - dups = sorted(dups) - trunc = 8 - if len(dups) > trunc: - trunc = 5 - dup_repr = ", ".join(repr(str(word)) for word in dups[:trunc]) - if len(dups) > trunc: - dup_repr += ", ... plus %d others" % (len(dups) - trunc) - - # throw error - raise ValueError("`%s` cannot contain duplicate elements: %s" % - (param, dup_repr)) - -#============================================================================= -# base generator class -#============================================================================= -class SequenceGenerator(object): - """ - Base class used by word & phrase generators. - - These objects take a series of options, corresponding - to those of the :func:`generate` function. - They act as callables which can be used to generate a password - or a list of 1+ passwords. They also expose some read-only - informational attributes. - - Parameters - ---------- - :param entropy: - Optionally specify the amount of entropy the resulting passwords - should contain (as measured with respect to the generator itself). - This will be used to auto-calculate the required password size. - - :param length: - Optionally specify the length of password to generate, - measured as count of whatever symbols the subclass uses (characters or words). - Note if ``entropy`` requires a larger minimum length, - that will be used instead. - - :param rng: - Optionally provide a custom RNG source to use. - Should be an instance of :class:`random.Random`, - defaults to :class:`random.SystemRandom`. - - Attributes - ---------- - .. autoattribute:: length - .. autoattribute:: symbol_count - .. autoattribute:: entropy_per_symbol - .. autoattribute:: entropy - - Subclassing - ----------- - Subclasses must implement the ``.__next__()`` method, - and set ``.symbol_count`` before calling base ``__init__`` method. - """ - #============================================================================= - # instance attrs - #============================================================================= - - #: requested size of final password - length = None - - #: requested entropy of final password - requested_entropy = "strong" - - #: random number source to use - rng = rng - - #: number of potential symbols (must be filled in by subclass) - symbol_count = None - - #============================================================================= - # init - #============================================================================= - def __init__(self, entropy=None, length=None, rng=None, **kwds): - - # make sure subclass set things up correctly - assert self.symbol_count is not None, "subclass must set .symbol_count" - - # init length & requested entropy - if entropy is not None or length is None: - if entropy is None: - entropy = self.requested_entropy - entropy = entropy_aliases.get(entropy, entropy) - if entropy <= 0: - raise ValueError("`entropy` must be positive number") - min_length = int(ceil(entropy / self.entropy_per_symbol)) - if length is None or length < min_length: - length = min_length - - self.requested_entropy = entropy - - if length < 1: - raise ValueError("`length` must be positive integer") - self.length = length - - # init other common options - if rng is not None: - self.rng = rng - - # hand off to parent - if kwds and _superclasses(self, SequenceGenerator) == (object,): - raise TypeError("Unexpected keyword(s): %s" % ", ".join(kwds.keys())) - super(SequenceGenerator, self).__init__(**kwds) - - #============================================================================= - # informational helpers - #============================================================================= - - @memoized_property - def entropy_per_symbol(self): - """ - Average entropy per symbol (assuming all symbols have equal probability) - """ - return logf(self.symbol_count, 2) - - @memoized_property - def entropy(self): - """ - Effective entropy of generated passwords. - - This value will always be a multiple of :attr:`entropy_per_symbol`. - If entropy is specified in constructor, :attr:`length` will be chosen so - so that this value is the smallest multiple >= :attr:`requested_entropy`. - """ - return self.length * self.entropy_per_symbol - - #============================================================================= - # generation - #============================================================================= - def __next__(self): - """main generation function, should create one password/phrase""" - raise NotImplementedError("implement in subclass") - - def __call__(self, returns=None): - """ - frontend used by genword() / genphrase() to create passwords - """ - if returns is None: - return next(self) - elif isinstance(returns, int_types): - return [next(self) for _ in irange(returns)] - elif returns is iter: - return self - else: - raise exc.ExpectedTypeError(returns, ", int, or ", "returns") - - def __iter__(self): - return self - - if PY2: - def next(self): - return self.__next__() - - #============================================================================= - # eoc - #============================================================================= - -#============================================================================= -# default charsets -#============================================================================= - -#: global dict of predefined characters sets -default_charsets = dict( - # ascii letters, digits, and some punctuation - ascii_72='0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%^&*?/', - - # ascii letters and digits - ascii_62='0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ', - - # ascii_50, without visually similar '1IiLl', '0Oo', '5S', '8B' - ascii_50='234679abcdefghjkmnpqrstuvwxyzACDEFGHJKMNPQRTUVWXYZ', - - # lower case hexadecimal - hex='0123456789abcdef', -) - -#============================================================================= -# password generator -#============================================================================= - -class WordGenerator(SequenceGenerator): - """ - Class which generates passwords by randomly choosing from a string of unique characters. - - Parameters - ---------- - :param chars: - custom character string to draw from. - - :param charset: - predefined charset to draw from. - - :param \*\*kwds: - all other keywords passed to the :class:`SequenceGenerator` parent class. - - Attributes - ---------- - .. autoattribute:: chars - .. autoattribute:: charset - .. autoattribute:: default_charsets - """ - #============================================================================= - # instance attrs - #============================================================================= - - #: Predefined character set in use (set to None for instances using custom 'chars') - charset = "ascii_62" - - #: string of chars to draw from -- usually filled in from charset - chars = None - - #============================================================================= - # init - #============================================================================= - def __init__(self, chars=None, charset=None, **kwds): - - # init chars and charset - if chars: - if charset: - raise TypeError("`chars` and `charset` are mutually exclusive") - else: - if not charset: - charset = self.charset - assert charset - chars = default_charsets[charset] - self.charset = charset - chars = to_unicode(chars, param="chars") - _ensure_unique(chars, param="chars") - self.chars = chars - - # hand off to parent - super(WordGenerator, self).__init__(**kwds) - # log.debug("WordGenerator(): entropy/char=%r", self.entropy_per_symbol) - - #============================================================================= - # informational helpers - #============================================================================= - - @memoized_property - def symbol_count(self): - return len(self.chars) - - #============================================================================= - # generation - #============================================================================= - - def __next__(self): - # XXX: could do things like optionally ensure certain character groups - # (e.g. letters & punctuation) are included - return getrandstr(self.rng, self.chars, self.length) - - #============================================================================= - # eoc - #============================================================================= - - -def genword(entropy=None, length=None, returns=None, **kwds): - """Generate one or more random passwords. - - This function uses :mod:`random.SystemRandom` to generate - one or more passwords using various character sets. - The complexity of the password can be specified - by size, or by the desired amount of entropy. - - Usage Example:: - - >>> # generate a random alphanumeric string with 48 bits of entropy (the default) - >>> from passlib import pwd - >>> pwd.genword() - 'DnBHvDjMK6' - - >>> # generate a random hexadecimal string with 52 bits of entropy - >>> pwd.genword(entropy=52, charset="hex") - '310f1a7ac793f' - - :param entropy: - Strength of resulting password, measured in 'guessing entropy' bits. - An appropriate **length** value will be calculated - based on the requested entropy amount, and the size of the character set. - - This can be a positive integer, or one of the following preset - strings: ``"weak"`` (24), ``"fair"`` (36), - ``"strong"`` (48), and ``"secure"`` (56). - - If neither this or **length** is specified, **entropy** will default - to ``"strong"`` (48). - - :param length: - Size of resulting password, measured in characters. - If omitted, the size is auto-calculated based on the **entropy** parameter. - - If both **entropy** and **length** are specified, - the stronger value will be used. - - :param returns: - Controls what this function returns: - - * If ``None`` (the default), this function will generate a single password. - * If an integer, this function will return a list containing that many passwords. - * If the ``iter`` constant, will return an iterator that yields passwords. - - :param chars: - - Optionally specify custom string of characters to use when randomly - generating a password. This option cannot be combined with **charset**. - - :param charset: - - The predefined character set to draw from (if not specified by **chars**). - There are currently four presets available: - - * ``"ascii_62"`` (the default) -- all digits and ascii upper & lowercase letters. - Provides ~5.95 entropy per character. - - * ``"ascii_50"`` -- subset which excludes visually similar characters - (``1IiLl0Oo5S8B``). Provides ~5.64 entropy per character. - - * ``"ascii_72"`` -- all digits and ascii upper & lowercase letters, - as well as some punctuation. Provides ~6.17 entropy per character. - - * ``"hex"`` -- Lower case hexadecimal. Providers 4 bits of entropy per character. - - :returns: - :class:`!unicode` string containing randomly generated password; - or list of 1+ passwords if :samp:`returns={int}` is specified. - """ - gen = WordGenerator(length=length, entropy=entropy, **kwds) - return gen(returns) - -#============================================================================= -# default wordsets -#============================================================================= - -def _load_wordset(asset_path): - """ - load wordset from compressed datafile within package data. - file should be utf-8 encoded - - :param asset_path: - string containing absolute path to wordset file, - or "python.module:relative/file/path". - - :returns: - tuple of words, as loaded from specified words file. - """ - # open resource file, convert to tuple of words (strip blank lines & ws) - with _open_asset_path(asset_path, "utf-8") as fh: - gen = (word.strip() for word in fh) - words = tuple(word for word in gen if word) - - # NOTE: works but not used - # # detect if file uses " " format, and strip numeric prefix - # def extract(row): - # idx, word = row.replace("\t", " ").split(" ", 1) - # if not idx.isdigit(): - # raise ValueError("row is not dice index + word") - # return word - # try: - # extract(words[-1]) - # except ValueError: - # pass - # else: - # words = tuple(extract(word) for word in words) - - log.debug("loaded %d-element wordset from %r", len(words), asset_path) - return words - - -class WordsetDict(MutableMapping): - """ - Special mapping used to store dictionary of wordsets. - Different from a regular dict in that some wordsets - may be lazy-loaded from an asset path. - """ - - #: dict of key -> asset path - paths = None - - #: dict of key -> value - _loaded = None - - def __init__(self, *args, **kwds): - self.paths = {} - self._loaded = {} - super(WordsetDict, self).__init__(*args, **kwds) - - def __getitem__(self, key): - try: - return self._loaded[key] - except KeyError: - pass - path = self.paths[key] - value = self._loaded[key] = _load_wordset(path) - return value - - def set_path(self, key, path): - """ - set asset path to lazy-load wordset from. - """ - self.paths[key] = path - - def __setitem__(self, key, value): - self._loaded[key] = value - - def __delitem__(self, key): - if key in self: - del self._loaded[key] - self.paths.pop(key, None) - else: - del self.paths[key] - - @property - def _keyset(self): - keys = set(self._loaded) - keys.update(self.paths) - return keys - - def __iter__(self): - return iter(self._keyset) - - def __len__(self): - return len(self._keyset) - - # NOTE: speeds things up, and prevents contains from lazy-loading - def __contains__(self, key): - return key in self._loaded or key in self.paths - - -#: dict of predefined word sets. -#: key is name of wordset, value should be sequence of words. -default_wordsets = WordsetDict() - -# register the wordsets built into passlib -for name in "eff_long eff_short eff_prefixed bip39".split(): - default_wordsets.set_path(name, "passlib:_data/wordsets/%s.txt" % name) - -#============================================================================= -# passphrase generator -#============================================================================= -class PhraseGenerator(SequenceGenerator): - """class which generates passphrases by randomly choosing - from a list of unique words. - - :param wordset: - wordset to draw from. - :param preset: - name of preset wordlist to use instead of ``wordset``. - :param spaces: - whether to insert spaces between words in output (defaults to ``True``). - :param \*\*kwds: - all other keywords passed to the :class:`SequenceGenerator` parent class. - - .. autoattribute:: wordset - """ - #============================================================================= - # instance attrs - #============================================================================= - - #: predefined wordset to use - wordset = "eff_long" - - #: list of words to draw from - words = None - - #: separator to use when joining words - sep = " " - - #============================================================================= - # init - #============================================================================= - def __init__(self, wordset=None, words=None, sep=None, **kwds): - - # load wordset - if words is not None: - if wordset is not None: - raise TypeError("`words` and `wordset` are mutually exclusive") - else: - if wordset is None: - wordset = self.wordset - assert wordset - words = default_wordsets[wordset] - self.wordset = wordset - - # init words - if not isinstance(words, _sequence_types): - words = tuple(words) - _ensure_unique(words, param="words") - self.words = words - - # init separator - if sep is None: - sep = self.sep - sep = to_unicode(sep, param="sep") - self.sep = sep - - # hand off to parent - super(PhraseGenerator, self).__init__(**kwds) - ##log.debug("PhraseGenerator(): entropy/word=%r entropy/char=%r min_chars=%r", - ## self.entropy_per_symbol, self.entropy_per_char, self.min_chars) - - #============================================================================= - # informational helpers - #============================================================================= - - @memoized_property - def symbol_count(self): - return len(self.words) - - #============================================================================= - # generation - #============================================================================= - - def __next__(self): - words = (self.rng.choice(self.words) for _ in irange(self.length)) - return self.sep.join(words) - - #============================================================================= - # eoc - #============================================================================= - - -def genphrase(entropy=None, length=None, returns=None, **kwds): - """Generate one or more random password / passphrases. - - This function uses :mod:`random.SystemRandom` to generate - one or more passwords; it can be configured to generate - alphanumeric passwords, or full english phrases. - The complexity of the password can be specified - by size, or by the desired amount of entropy. - - Usage Example:: - - >>> # generate random phrase with 48 bits of entropy - >>> from passlib import pwd - >>> pwd.genphrase() - 'gangly robbing salt shove' - - >>> # generate a random phrase with 52 bits of entropy - >>> # using a particular wordset - >>> pwd.genword(entropy=52, wordset="bip39") - 'wheat dilemma reward rescue diary' - - :param entropy: - Strength of resulting password, measured in 'guessing entropy' bits. - An appropriate **length** value will be calculated - based on the requested entropy amount, and the size of the word set. - - This can be a positive integer, or one of the following preset - strings: ``"weak"`` (24), ``"fair"`` (36), - ``"strong"`` (48), and ``"secure"`` (56). - - If neither this or **length** is specified, **entropy** will default - to ``"strong"`` (48). - - :param length: - Length of resulting password, measured in words. - If omitted, the size is auto-calculated based on the **entropy** parameter. - - If both **entropy** and **length** are specified, - the stronger value will be used. - - :param returns: - Controls what this function returns: - - * If ``None`` (the default), this function will generate a single password. - * If an integer, this function will return a list containing that many passwords. - * If the ``iter`` builtin, will return an iterator that yields passwords. - - :param words: - - Optionally specifies a list/set of words to use when randomly generating a passphrase. - This option cannot be combined with **wordset**. - - :param wordset: - - The predefined word set to draw from (if not specified by **words**). - There are currently four presets available: - - ``"eff_long"`` (the default) - - Wordset containing 7776 english words of ~7 letters. - Constructed by the EFF, it offers ~12.9 bits of entropy per word. - - This wordset (and the other ``"eff_"`` wordsets) - were `created by the EFF `_ - to aid in generating passwords. See their announcement page - for more details about the design & properties of these wordsets. - - ``"eff_short"`` - - Wordset containing 1296 english words of ~4.5 letters. - Constructed by the EFF, it offers ~10.3 bits of entropy per word. - - ``"eff_prefixed"`` - - Wordset containing 1296 english words of ~8 letters, - selected so that they each have a unique 3-character prefix. - Constructed by the EFF, it offers ~10.3 bits of entropy per word. - - ``"bip39"`` - - Wordset of 2048 english words of ~5 letters, - selected so that they each have a unique 4-character prefix. - Published as part of Bitcoin's `BIP 39 `_, - this wordset has exactly 11 bits of entropy per word. - - This list offers words that are typically shorter than ``"eff_long"`` - (at the cost of slightly less entropy); and much shorter than - ``"eff_prefixed"`` (at the cost of a longer unique prefix). - - :param sep: - Optional separator to use when joining words. - Defaults to ``" "`` (a space), but can be an empty string, a hyphen, etc. - - :returns: - :class:`!unicode` string containing randomly generated passphrase; - or list of 1+ passphrases if :samp:`returns={int}` is specified. - """ - gen = PhraseGenerator(entropy=entropy, length=length, **kwds) - return gen(returns) - -#============================================================================= -# strength measurement -# -# NOTE: -# for a little while, had rough draft of password strength measurement alg here. -# but not sure if there's value in yet another measurement algorithm, -# that's not just duplicating the effort of libraries like zxcbn. -# may revive it later, but for now, leaving some refs to others out there: -# * NIST 800-63 has simple alg -# * zxcvbn (https://tech.dropbox.com/2012/04/zxcvbn-realistic-password-strength-estimation/) -# might also be good, and has approach similar to composite approach i was already thinking about, -# but much more well thought out. -# * passfault (https://github.com/c-a-m/passfault) looks thorough, -# but may have licensing issues, plus porting to python looks like very big job :( -# * give a look at running things through zlib - might be able to cheaply -# catch extra redundancies. -#============================================================================= - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/registry.py b/libs_crutch/contrib/passlib/registry.py deleted file mode 100644 index d4ef6d0..0000000 --- a/libs_crutch/contrib/passlib/registry.py +++ /dev/null @@ -1,542 +0,0 @@ -"""passlib.registry - registry for password hash handlers""" -#============================================================================= -# imports -#============================================================================= -# core -import re -import logging; log = logging.getLogger(__name__) -from warnings import warn -# pkg -from passlib import exc -from passlib.exc import ExpectedTypeError, PasslibWarning -from passlib.ifc import PasswordHash -from passlib.utils import ( - is_crypt_handler, has_crypt as os_crypt_present, - unix_crypt_schemes as os_crypt_schemes, -) -from passlib.utils.compat import unicode_or_str -from passlib.utils.decor import memoize_single_value -# local -__all__ = [ - "register_crypt_handler_path", - "register_crypt_handler", - "get_crypt_handler", - "list_crypt_handlers", -] - -#============================================================================= -# proxy object used in place of 'passlib.hash' module -#============================================================================= -class _PasslibRegistryProxy(object): - """proxy module passlib.hash - - this module is in fact an object which lazy-loads - the requested password hash algorithm from wherever it has been stored. - it acts as a thin wrapper around :func:`passlib.registry.get_crypt_handler`. - """ - __name__ = "passlib.hash" - __package__ = None - - def __getattr__(self, attr): - if attr.startswith("_"): - raise AttributeError("missing attribute: %r" % (attr,)) - handler = get_crypt_handler(attr, None) - if handler: - return handler - else: - raise AttributeError("unknown password hash: %r" % (attr,)) - - def __setattr__(self, attr, value): - if attr.startswith("_"): - # writing to private attributes should behave normally. - # (required so GAE can write to the __loader__ attribute). - object.__setattr__(self, attr, value) - else: - # writing to public attributes should be treated - # as attempting to register a handler. - register_crypt_handler(value, _attr=attr) - - def __repr__(self): - return "" - - def __dir__(self): - # this adds in lazy-loaded handler names, - # otherwise this is the standard dir() implementation. - attrs = set(dir(self.__class__)) - attrs.update(self.__dict__) - attrs.update(_locations) - return sorted(attrs) - -# create single instance - available publically as 'passlib.hash' -_proxy = _PasslibRegistryProxy() - -#============================================================================= -# internal registry state -#============================================================================= - -# singleton uses to detect omitted keywords -_UNSET = object() - -# dict mapping name -> loaded handlers (just uses proxy object's internal dict) -_handlers = _proxy.__dict__ - -# dict mapping names -> import path for lazy loading. -# * import path should be "module.path" or "module.path:attr" -# * if attr omitted, "name" used as default. -_locations = dict( - # NOTE: this is a hardcoded list of the handlers built into passlib, - # applications should call register_crypt_handler_path() - apr_md5_crypt = "passlib.handlers.md5_crypt", - argon2 = "passlib.handlers.argon2", - atlassian_pbkdf2_sha1 = "passlib.handlers.pbkdf2", - bcrypt = "passlib.handlers.bcrypt", - bcrypt_sha256 = "passlib.handlers.bcrypt", - bigcrypt = "passlib.handlers.des_crypt", - bsd_nthash = "passlib.handlers.windows", - bsdi_crypt = "passlib.handlers.des_crypt", - cisco_pix = "passlib.handlers.cisco", - cisco_asa = "passlib.handlers.cisco", - cisco_type7 = "passlib.handlers.cisco", - cta_pbkdf2_sha1 = "passlib.handlers.pbkdf2", - crypt16 = "passlib.handlers.des_crypt", - des_crypt = "passlib.handlers.des_crypt", - django_argon2 = "passlib.handlers.django", - django_bcrypt = "passlib.handlers.django", - django_bcrypt_sha256 = "passlib.handlers.django", - django_pbkdf2_sha256 = "passlib.handlers.django", - django_pbkdf2_sha1 = "passlib.handlers.django", - django_salted_sha1 = "passlib.handlers.django", - django_salted_md5 = "passlib.handlers.django", - django_des_crypt = "passlib.handlers.django", - django_disabled = "passlib.handlers.django", - dlitz_pbkdf2_sha1 = "passlib.handlers.pbkdf2", - fshp = "passlib.handlers.fshp", - grub_pbkdf2_sha512 = "passlib.handlers.pbkdf2", - hex_md4 = "passlib.handlers.digests", - hex_md5 = "passlib.handlers.digests", - hex_sha1 = "passlib.handlers.digests", - hex_sha256 = "passlib.handlers.digests", - hex_sha512 = "passlib.handlers.digests", - htdigest = "passlib.handlers.digests", - ldap_plaintext = "passlib.handlers.ldap_digests", - ldap_md5 = "passlib.handlers.ldap_digests", - ldap_sha1 = "passlib.handlers.ldap_digests", - ldap_hex_md5 = "passlib.handlers.roundup", - ldap_hex_sha1 = "passlib.handlers.roundup", - ldap_salted_md5 = "passlib.handlers.ldap_digests", - ldap_salted_sha1 = "passlib.handlers.ldap_digests", - ldap_des_crypt = "passlib.handlers.ldap_digests", - ldap_bsdi_crypt = "passlib.handlers.ldap_digests", - ldap_md5_crypt = "passlib.handlers.ldap_digests", - ldap_bcrypt = "passlib.handlers.ldap_digests", - ldap_sha1_crypt = "passlib.handlers.ldap_digests", - ldap_sha256_crypt = "passlib.handlers.ldap_digests", - ldap_sha512_crypt = "passlib.handlers.ldap_digests", - ldap_pbkdf2_sha1 = "passlib.handlers.pbkdf2", - ldap_pbkdf2_sha256 = "passlib.handlers.pbkdf2", - ldap_pbkdf2_sha512 = "passlib.handlers.pbkdf2", - lmhash = "passlib.handlers.windows", - md5_crypt = "passlib.handlers.md5_crypt", - msdcc = "passlib.handlers.windows", - msdcc2 = "passlib.handlers.windows", - mssql2000 = "passlib.handlers.mssql", - mssql2005 = "passlib.handlers.mssql", - mysql323 = "passlib.handlers.mysql", - mysql41 = "passlib.handlers.mysql", - nthash = "passlib.handlers.windows", - oracle10 = "passlib.handlers.oracle", - oracle11 = "passlib.handlers.oracle", - pbkdf2_sha1 = "passlib.handlers.pbkdf2", - pbkdf2_sha256 = "passlib.handlers.pbkdf2", - pbkdf2_sha512 = "passlib.handlers.pbkdf2", - phpass = "passlib.handlers.phpass", - plaintext = "passlib.handlers.misc", - postgres_md5 = "passlib.handlers.postgres", - roundup_plaintext = "passlib.handlers.roundup", - scram = "passlib.handlers.scram", - scrypt = "passlib.handlers.scrypt", - sha1_crypt = "passlib.handlers.sha1_crypt", - sha256_crypt = "passlib.handlers.sha2_crypt", - sha512_crypt = "passlib.handlers.sha2_crypt", - sun_md5_crypt = "passlib.handlers.sun_md5_crypt", - unix_disabled = "passlib.handlers.misc", - unix_fallback = "passlib.handlers.misc", -) - -# master regexp for detecting valid handler names -_name_re = re.compile("^[a-z][a-z0-9_]+[a-z0-9]$") - -# names which aren't allowed for various reasons -# (mainly keyword conflicts in CryptContext) -_forbidden_names = frozenset(["onload", "policy", "context", "all", - "default", "none", "auto"]) - -#============================================================================= -# registry frontend functions -#============================================================================= -def _validate_handler_name(name): - """helper to validate handler name - - :raises ValueError: - * if empty name - * if name not lower case - * if name contains double underscores - * if name is reserved (e.g. ``context``, ``all``). - """ - if not name: - raise ValueError("handler name cannot be empty: %r" % (name,)) - if name.lower() != name: - raise ValueError("name must be lower-case: %r" % (name,)) - if not _name_re.match(name): - raise ValueError("invalid name (must be 3+ characters, " - " begin with a-z, and contain only underscore, a-z, " - "0-9): %r" % (name,)) - if '__' in name: - raise ValueError("name may not contain double-underscores: %r" % - (name,)) - if name in _forbidden_names: - raise ValueError("that name is not allowed: %r" % (name,)) - return True - -def register_crypt_handler_path(name, path): - """register location to lazy-load handler when requested. - - custom hashes may be registered via :func:`register_crypt_handler`, - or they may be registered by this function, - which will delay actually importing and loading the handler - until a call to :func:`get_crypt_handler` is made for the specified name. - - :arg name: name of handler - :arg path: module import path - - the specified module path should contain a password hash handler - called :samp:`{name}`, or the path may contain a colon, - specifying the module and module attribute to use. - for example, the following would cause ``get_handler("myhash")`` to look - for a class named ``myhash`` within the ``myapp.helpers`` module:: - - >>> from passlib.registry import registry_crypt_handler_path - >>> registry_crypt_handler_path("myhash", "myapp.helpers") - - ...while this form would cause ``get_handler("myhash")`` to look - for a class name ``MyHash`` within the ``myapp.helpers`` module:: - - >>> from passlib.registry import registry_crypt_handler_path - >>> registry_crypt_handler_path("myhash", "myapp.helpers:MyHash") - """ - # validate name - _validate_handler_name(name) - - # validate path - if path.startswith("."): - raise ValueError("path cannot start with '.'") - if ':' in path: - if path.count(':') > 1: - raise ValueError("path cannot have more than one ':'") - if path.find('.', path.index(':')) > -1: - raise ValueError("path cannot have '.' to right of ':'") - - # store location - _locations[name] = path - log.debug("registered path to %r handler: %r", name, path) - -def register_crypt_handler(handler, force=False, _attr=None): - """register password hash handler. - - this method immediately registers a handler with the internal passlib registry, - so that it will be returned by :func:`get_crypt_handler` when requested. - - :arg handler: the password hash handler to register - :param force: force override of existing handler (defaults to False) - :param _attr: - [internal kwd] if specified, ensures ``handler.name`` - matches this value, or raises :exc:`ValueError`. - - :raises TypeError: - if the specified object does not appear to be a valid handler. - - :raises ValueError: - if the specified object's name (or other required attributes) - contain invalid values. - - :raises KeyError: - if a (different) handler was already registered with - the same name, and ``force=True`` was not specified. - """ - # validate handler - if not is_crypt_handler(handler): - raise ExpectedTypeError(handler, "password hash handler", "handler") - if not handler: - raise AssertionError("``bool(handler)`` must be True") - - # validate name - name = handler.name - _validate_handler_name(name) - if _attr and _attr != name: - raise ValueError("handlers must be stored only under their own name (%r != %r)" % - (_attr, name)) - - # check for existing handler - other = _handlers.get(name) - if other: - if other is handler: - log.debug("same %r handler already registered: %r", name, handler) - return - elif force: - log.warning("overriding previously registered %r handler: %r", - name, other) - else: - raise KeyError("another %r handler has already been registered: %r" % - (name, other)) - - # register handler - _handlers[name] = handler - log.debug("registered %r handler: %r", name, handler) - -def get_crypt_handler(name, default=_UNSET): - """return handler for specified password hash scheme. - - this method looks up a handler for the specified scheme. - if the handler is not already loaded, - it checks if the location is known, and loads it first. - - :arg name: name of handler to return - :param default: optional default value to return if no handler with specified name is found. - - :raises KeyError: if no handler matching that name is found, and no default specified, a KeyError will be raised. - - :returns: handler attached to name, or default value (if specified). - """ - # catch invalid names before we check _handlers, - # since it's a module dict, and exposes things like __package__, etc. - if name.startswith("_"): - if default is _UNSET: - raise KeyError("invalid handler name: %r" % (name,)) - else: - return default - - # check if handler is already loaded - try: - return _handlers[name] - except KeyError: - pass - - # normalize name (and if changed, check dict again) - assert isinstance(name, unicode_or_str), "name must be string instance" - alt = name.replace("-","_").lower() - if alt != name: - warn("handler names should be lower-case, and use underscores instead " - "of hyphens: %r => %r" % (name, alt), PasslibWarning, - stacklevel=2) - name = alt - - # try to load using new name - try: - return _handlers[name] - except KeyError: - pass - - # check if lazy load mapping has been specified for this driver - path = _locations.get(name) - if path: - if ':' in path: - modname, modattr = path.split(":") - else: - modname, modattr = path, name - ##log.debug("loading %r handler from path: '%s:%s'", name, modname, modattr) - - # try to load the module - any import errors indicate runtime config, usually - # either missing package, or bad path provided to register_crypt_handler_path() - mod = __import__(modname, fromlist=[modattr], level=0) - - # first check if importing module triggered register_crypt_handler(), - # (this is discouraged due to its magical implicitness) - handler = _handlers.get(name) - if handler: - # XXX: issue deprecation warning here? - assert is_crypt_handler(handler), "unexpected object: name=%r object=%r" % (name, handler) - return handler - - # then get real handler & register it - handler = getattr(mod, modattr) - register_crypt_handler(handler, _attr=name) - return handler - - # fail! - if default is _UNSET: - raise KeyError("no crypt handler found for algorithm: %r" % (name,)) - else: - return default - -def list_crypt_handlers(loaded_only=False): - """return sorted list of all known crypt handler names. - - :param loaded_only: if ``True``, only returns names of handlers which have actually been loaded. - - :returns: list of names of all known handlers - """ - names = set(_handlers) - if not loaded_only: - names.update(_locations) - # strip private attrs out of namespace and sort. - # TODO: make _handlers a separate list, so we don't have module namespace mixed in. - return sorted(name for name in names if not name.startswith("_")) - -# NOTE: these two functions mainly exist just for the unittests... - -def _has_crypt_handler(name, loaded_only=False): - """check if handler name is known. - - this is only useful for two cases: - - * quickly checking if handler has already been loaded - * checking if handler exists, without actually loading it - - :arg name: name of handler - :param loaded_only: if ``True``, returns False if handler exists but hasn't been loaded - """ - return (name in _handlers) or (not loaded_only and name in _locations) - -def _unload_handler_name(name, locations=True): - """unloads a handler from the registry. - - .. warning:: - - this is an internal function, - used only by the unittests. - - if loaded handler is found with specified name, it's removed. - if path to lazy load handler is found, it's removed. - - missing names are a noop. - - :arg name: name of handler to unload - :param locations: if False, won't purge registered handler locations (default True) - """ - if name in _handlers: - del _handlers[name] - if locations and name in _locations: - del _locations[name] - -#============================================================================= -# inspection helpers -#============================================================================= - -#------------------------------------------------------------------ -# general -#------------------------------------------------------------------ - -# TODO: needs UTs -def _resolve(hasher, param="value"): - """ - internal helper to resolve argument to hasher object - """ - if is_crypt_handler(hasher): - return hasher - elif isinstance(hasher, unicode_or_str): - return get_crypt_handler(hasher) - else: - raise exc.ExpectedTypeError(hasher, unicode_or_str, param) - - -#: backend aliases -ANY = "any" -BUILTIN = "builtin" -OS_CRYPT = "os_crypt" - -# TODO: needs UTs -def has_backend(hasher, backend=ANY, safe=False): - """ - Test if specified backend is available for hasher. - - :param hasher: - Hasher name or object. - - :param backend: - Name of backend, or ``"any"`` if any backend will do. - For hashers without multiple backends, will pretend - they have a single backend named ``"builtin"``. - - :param safe: - By default, throws error if backend is unknown. - If ``safe=True``, will just return false value. - - :raises ValueError: - * if hasher name is unknown. - * if backend is unknown to hasher, and safe=False. - - :return: - True if backend available, False if not available, - and None if unknown + safe=True. - """ - hasher = _resolve(hasher) - - if backend == ANY: - if not hasattr(hasher, "get_backend"): - # single backend, assume it's loaded - return True - - # multiple backends, check at least one is loadable - try: - hasher.get_backend() - return True - except exc.MissingBackendError: - return False - - # test for specific backend - if hasattr(hasher, "has_backend"): - # multiple backends - if safe and backend not in hasher.backends: - return None - return hasher.has_backend(backend) - - # single builtin backend - if backend == BUILTIN: - return True - elif safe: - return None - else: - raise exc.UnknownBackendError(hasher, backend) - -#------------------------------------------------------------------ -# os crypt -#------------------------------------------------------------------ - -# TODO: move unix_crypt_schemes list to here. -# os_crypt_schemes -- alias for unix_crypt_schemes above - - -# TODO: needs UTs -@memoize_single_value -def get_supported_os_crypt_schemes(): - """ - return tuple of schemes which :func:`crypt.crypt` natively supports. - """ - if not os_crypt_present: - return () - cache = tuple(name for name in os_crypt_schemes - if get_crypt_handler(name).has_backend(OS_CRYPT)) - if not cache: # pragma: no cover -- sanity check - # no idea what OS this could happen on... - warn("crypt.crypt() function is present, but doesn't support any " - "formats known to passlib!", exc.PasslibRuntimeWarning) - return cache - - -# TODO: needs UTs -def has_os_crypt_support(hasher): - """ - check if hash is supported by native :func:`crypt.crypt` function. - if :func:`crypt.crypt` is not present, will always return False. - - :param hasher: - name or hasher object. - - :returns bool: - True if hash format is supported by OS, else False. - """ - return os_crypt_present and has_backend(hasher, OS_CRYPT, safe=True) - -#============================================================================= -# eof -#============================================================================= diff --git a/libs_crutch/contrib/passlib/totp.py b/libs_crutch/contrib/passlib/totp.py deleted file mode 100644 index 7d8c6ed..0000000 --- a/libs_crutch/contrib/passlib/totp.py +++ /dev/null @@ -1,1908 +0,0 @@ -"""passlib.totp -- TOTP / RFC6238 / Google Authenticator utilities.""" -#============================================================================= -# imports -#============================================================================= -from __future__ import absolute_import, division, print_function -from passlib.utils.compat import PY3 -# core -import base64 -import calendar -import json -import logging; log = logging.getLogger(__name__) -import math -import struct -import sys -import time as _time -import re -if PY3: - from urllib.parse import urlparse, parse_qsl, quote, unquote -else: - from urllib import quote, unquote - from urlparse import urlparse, parse_qsl -from warnings import warn -# site -try: - # TOTP encrypted keys only supported if cryptography (https://cryptography.io) is installed - from cryptography.hazmat.backends import default_backend as _cg_default_backend - import cryptography.hazmat.primitives.ciphers.algorithms - import cryptography.hazmat.primitives.ciphers.modes - from cryptography.hazmat.primitives import ciphers as _cg_ciphers - del cryptography -except ImportError: - log.debug("can't import 'cryptography' package, totp encryption disabled") - _cg_ciphers = _cg_default_backend = None -# pkg -from passlib import exc -from passlib.exc import TokenError, MalformedTokenError, InvalidTokenError, UsedTokenError -from passlib.utils import (to_unicode, to_bytes, consteq, - getrandbytes, rng, SequenceMixin, xor_bytes, getrandstr) -from passlib.utils.binary import BASE64_CHARS, b32encode, b32decode -from passlib.utils.compat import (u, unicode, native_string_types, bascii_to_str, int_types, num_types, - irange, byte_elem_value, UnicodeIO, suppress_cause) -from passlib.utils.decor import hybrid_method, memoized_property -from passlib.crypto.digest import lookup_hash, compile_hmac, pbkdf2_hmac -from passlib.hash import pbkdf2_sha256 -# local -__all__ = [ - # frontend classes - "AppWallet", - "TOTP", - - # errors (defined in passlib.exc, but exposed here for convenience) - "TokenError", - "MalformedTokenError", - "InvalidTokenError", - "UsedTokenError", - - # internal helper classes - "TotpToken", - "TotpMatch", -] - -#============================================================================= -# HACK: python < 2.7.4's urlparse() won't parse query strings unless the url scheme -# is one of the schemes in the urlparse.uses_query list. 2.7 abandoned -# this, and parses query if present, regardless of the scheme. -# as a workaround for older versions, we add "otpauth" to the known list. -# this was fixed by https://bugs.python.org/issue9374, in 2.7.4 release. -#============================================================================= -if sys.version_info < (2,7,4): - from urlparse import uses_query - if "otpauth" not in uses_query: - uses_query.append("otpauth") - log.debug("registered 'otpauth' scheme with urlparse.uses_query") - del uses_query - -#============================================================================= -# internal helpers -#============================================================================= - -#----------------------------------------------------------------------------- -# token parsing / rendering helpers -#----------------------------------------------------------------------------- - -#: regex used to clean whitespace from tokens & keys -_clean_re = re.compile(u(r"\s|[-=]"), re.U) - -_chunk_sizes = [4,6,5] - -def _get_group_size(klen): - """ - helper for group_string() -- - calculates optimal size of group for given string size. - """ - # look for exact divisor - for size in _chunk_sizes: - if not klen % size: - return size - # fallback to divisor with largest remainder - # (so chunks are as close to even as possible) - best = _chunk_sizes[0] - rem = 0 - for size in _chunk_sizes: - if klen % size > rem: - best = size - rem = klen % size - return best - -def group_string(value, sep="-"): - """ - reformat string into (roughly) evenly-sized groups, separated by **sep**. - useful for making tokens & keys easier to read by humans. - """ - klen = len(value) - size = _get_group_size(klen) - return sep.join(value[o:o+size] for o in irange(0, klen, size)) - -#----------------------------------------------------------------------------- -# encoding helpers -#----------------------------------------------------------------------------- - -def _decode_bytes(key, format): - """ - internal TOTP() helper -- - decodes key according to specified format. - """ - if format == "raw": - if not isinstance(key, bytes): - raise exc.ExpectedTypeError(key, "bytes", "key") - return key - # for encoded data, key must be either unicode or ascii-encoded bytes, - # and must contain a hex or base32 string. - key = to_unicode(key, param="key") - key = _clean_re.sub("", key).encode("utf-8") # strip whitespace & hypens - if format == "hex" or format == "base16": - return base64.b16decode(key.upper()) - elif format == "base32": - return b32decode(key) - # XXX: add base64 support? - else: - raise ValueError("unknown byte-encoding format: %r" % (format,)) - -#============================================================================= -# OTP management -#============================================================================= - -#: flag for detecting if encrypted totp support is present -AES_SUPPORT = bool(_cg_ciphers) - -#: regex for validating secret tags -_tag_re = re.compile("(?i)^[a-z0-9][a-z0-9_.-]*$") - -class AppWallet(object): - """ - This class stores application-wide secrets that can be used - to encrypt & decrypt TOTP keys for storage. - It's mostly an internal detail, applications usually just need - to pass ``secrets`` or ``secrets_path`` to :meth:`TOTP.using`. - - .. seealso:: - - :ref:`totp-storing-instances` for more details on this workflow. - - Arguments - ========= - :param secrets: - Dict of application secrets to use when encrypting/decrypting - stored TOTP keys. This should include a secret to use when encrypting - new keys, but may contain additional older secrets to decrypt - existing stored keys. - - The dict should map tags -> secrets, so that each secret is identified - by a unique tag. This tag will be stored along with the encrypted - key in order to determine which secret should be used for decryption. - Tag should be string that starts with regex range ``[a-z0-9]``, - and the remaining characters must be in ``[a-z0-9_.-]``. - - It is recommended to use something like a incremental counter - ("1", "2", ...), an ISO date ("2016-01-01", "2016-05-16", ...), - or a timestamp ("19803495", "19813495", ...) when assigning tags. - - This mapping be provided in three formats: - - * A python dict mapping tag -> secret - * A JSON-formatted string containing the dict - * A multiline string with the format ``"tag: value\\ntag: value\\n..."`` - - (This last format is mainly useful when loading from a text file via **secrets_path**) - - .. seealso:: :func:`generate_secret` to create a secret with sufficient entropy - - :param secrets_path: - Alternately, callers can specify a separate file where the - application-wide secrets are stored, using either of the string - formats described in **secrets**. - - :param default_tag: - Specifies which tag in **secrets** should be used as the default - for encrypting new keys. If omitted, the tags will be sorted, - and the largest tag used as the default. - - if all tags are numeric, they will be sorted numerically; - otherwise they will be sorted alphabetically. - this permits tags to be assigned numerically, - or e.g. using ``YYYY-MM-DD`` dates. - - :param encrypt_cost: - Optional time-cost factor for key encryption. - This value corresponds to log2() of the number of PBKDF2 - rounds used. - - .. warning:: - - The application secret(s) should be stored in a secure location by - your application, and each secret should contain a large amount - of entropy (to prevent brute-force attacks if the encrypted keys - are leaked). - - :func:`generate_secret` is provided as a convenience helper - to generate a new application secret of suitable size. - - Best practice is to load these values from a file via **secrets_path**, - and then have your application give up permission to read this file - once it's running. - - Public Methods - ============== - .. autoattribute:: has_secrets - .. autoattribute:: default_tag - - Semi-Private Methods - ==================== - The following methods are used internally by the :class:`TOTP` - class in order to encrypt & decrypt keys using the provided application - secrets. They will generally not be publically useful, and may have their - API changed periodically. - - .. automethod:: get_secret - .. automethod:: encrypt_key - .. automethod:: decrypt_key - """ - #======================================================================== - # instance attrs - #======================================================================== - - #: default salt size for encrypt_key() output - salt_size = 12 - - #: default cost (log2 of pbkdf2 rounds) for encrypt_key() output - #: NOTE: this is relatively low, since the majority of the security - #: relies on a high entropy secret to pass to AES. - encrypt_cost = 14 - - #: map of secret tag -> secret bytes - _secrets = None - - #: tag for default secret - default_tag = None - - #======================================================================== - # init - #======================================================================== - def __init__(self, secrets=None, default_tag=None, encrypt_cost=None, - secrets_path=None): - - # TODO: allow a lot more things to be customized from here, - # e.g. setting default TOTP constructor options. - - # - # init cost - # - if encrypt_cost is not None: - if isinstance(encrypt_cost, native_string_types): - encrypt_cost = int(encrypt_cost) - assert encrypt_cost >= 0 - self.encrypt_cost = encrypt_cost - - # - # init secrets map - # - - # load secrets from file (if needed) - if secrets_path is not None: - if secrets is not None: - raise TypeError("'secrets' and 'secrets_path' are mutually exclusive") - secrets = open(secrets_path, "rt").read() - - # parse & store secrets - secrets = self._secrets = self._parse_secrets(secrets) - - # - # init default tag/secret - # - if secrets: - if default_tag is not None: - # verify that tag is present in map - self.get_secret(default_tag) - elif all(tag.isdigit() for tag in secrets): - default_tag = max(secrets, key=int) - else: - default_tag = max(secrets) - self.default_tag = default_tag - - def _parse_secrets(self, source): - """ - parse 'secrets' parameter - - :returns: - Dict[tag:str, secret:bytes] - """ - # parse string formats - # to make this easy to pass in configuration from a separate file, - # 'secrets' can be string using two formats -- json & "tag:value\n" - check_type = True - if isinstance(source, native_string_types): - if source.lstrip().startswith(("[", "{")): - # json list / dict - source = json.loads(source) - elif "\n" in source and ":" in source: - # multiline string containing series of "tag: value\n" rows; - # empty and "#\n" rows are ignored - def iter_pairs(source): - for line in source.splitlines(): - line = line.strip() - if line and not line.startswith("#"): - tag, secret = line.split(":", 1) - yield tag.strip(), secret.strip() - source = iter_pairs(source) - check_type = False - else: - raise ValueError("unrecognized secrets string format") - - # ensure we have iterable of (tag, value) pairs - if source is None: - return {} - elif isinstance(source, dict): - source = source.items() - # XXX: could support iterable of (tag,value) pairs, but not yet needed... - # elif check_type and (isinstance(source, str) or not isinstance(source, Iterable)): - elif check_type: - raise TypeError("'secrets' must be mapping, or list of items") - - # parse into final dict, normalizing contents - return dict(self._parse_secret_pair(tag, value) - for tag, value in source) - - def _parse_secret_pair(self, tag, value): - if isinstance(tag, native_string_types): - pass - elif isinstance(tag, int): - tag = str(tag) - else: - raise TypeError("tag must be unicode/string: %r" % (tag,)) - if not _tag_re.match(tag): - raise ValueError("tag contains invalid characters: %r" % (tag,)) - if not isinstance(value, bytes): - value = to_bytes(value, param="secret %r" % (tag,)) - if not value: - raise ValueError("tag contains empty secret: %r" % (tag,)) - return tag, value - - #======================================================================== - # accessing secrets - #======================================================================== - - @property - def has_secrets(self): - """whether at least one application secret is present""" - return self.default_tag is not None - - def get_secret(self, tag): - """ - resolve a secret tag to the secret (as bytes). - throws a KeyError if not found. - """ - secrets = self._secrets - if not secrets: - raise KeyError("no application secrets configured") - try: - return secrets[tag] - except KeyError: - raise suppress_cause(KeyError("unknown secret tag: %r" % (tag,))) - - #======================================================================== - # encrypted key helpers -- used internally by TOTP - #======================================================================== - - @staticmethod - def _cipher_aes_key(value, secret, salt, cost, decrypt=False): - """ - Internal helper for :meth:`encrypt_key` -- - handles lowlevel encryption/decryption. - - Algorithm details: - - This function uses PBKDF2-HMAC-SHA256 to generate a 32-byte AES key - and a 16-byte IV from the application secret & random salt. - It then uses AES-256-CTR to encrypt/decrypt the TOTP key. - - CTR mode was chosen over CBC because the main attack scenario here - is that the attacker has stolen the database, and is trying to decrypt a TOTP key - (the plaintext value here). To make it hard for them, we want every password - to decrypt to a potentially valid key -- thus need to avoid any authentication - or padding oracle attacks. While some random padding construction could be devised - to make this work for CBC mode, a stream cipher mode is just plain simpler. - OFB/CFB modes would also work here, but seeing as they have malleability - and cyclic issues (though remote and barely relevant here), - CTR was picked as the best overall choice. - """ - # make sure backend AES support is available - if _cg_ciphers is None: - raise RuntimeError("TOTP encryption requires 'cryptography' package " - "(https://cryptography.io)") - - # use pbkdf2 to derive both key (32 bytes) & iv (16 bytes) - # NOTE: this requires 2 sha256 blocks to be calculated. - keyiv = pbkdf2_hmac("sha256", secret, salt=salt, rounds=(1 << cost), keylen=48) - - # use AES-256-CTR to encrypt/decrypt input value - cipher = _cg_ciphers.Cipher(_cg_ciphers.algorithms.AES(keyiv[:32]), - _cg_ciphers.modes.CTR(keyiv[32:]), - _cg_default_backend()) - ctx = cipher.decryptor() if decrypt else cipher.encryptor() - return ctx.update(value) + ctx.finalize() - - def encrypt_key(self, key): - """ - Helper used to encrypt TOTP keys for storage. - - :param key: - TOTP key to encrypt, as raw bytes. - - :returns: - dict containing encrypted TOTP key & configuration parameters. - this format should be treated as opaque, and potentially subject - to change, though it is designed to be easily serialized/deserialized - (e.g. via JSON). - - .. note:: - - This function requires installation of the external - `cryptography `_ package. - - To give some algorithm details: This function uses AES-256-CTR to encrypt - the provided data. It takes the application secret and randomly generated salt, - and uses PBKDF2-HMAC-SHA256 to combine them and generate the AES key & IV. - """ - if not key: - raise ValueError("no key provided") - salt = getrandbytes(rng, self.salt_size) - cost = self.encrypt_cost - tag = self.default_tag - if not tag: - raise TypeError("no application secrets configured, can't encrypt OTP key") - ckey = self._cipher_aes_key(key, self.get_secret(tag), salt, cost) - # XXX: switch to base64? - return dict(v=1, c=cost, t=tag, s=b32encode(salt), k=b32encode(ckey)) - - def decrypt_key(self, enckey): - """ - Helper used to decrypt TOTP keys from storage format. - Consults configured secrets to decrypt key. - - :param source: - source object, as returned by :meth:`encrypt_key`. - - :returns: - ``(key, needs_recrypt)`` -- - - **key** will be the decrypted key, as bytes. - - **needs_recrypt** will be a boolean flag indicating - whether encryption cost or default tag is too old, - and henace that key needs re-encrypting before storing. - - .. note:: - - This function requires installation of the external - `cryptography `_ package. - """ - if not isinstance(enckey, dict): - raise TypeError("'enckey' must be dictionary") - version = enckey.get("v", None) - needs_recrypt = False - if version == 1: - _cipher_key = self._cipher_aes_key - else: - raise ValueError("missing / unrecognized 'enckey' version: %r" % (version,)) - tag = enckey['t'] - cost = enckey['c'] - key = _cipher_key( - value=b32decode(enckey['k']), - secret=self.get_secret(tag), - salt=b32decode(enckey['s']), - cost=cost, - ) - if cost != self.encrypt_cost or tag != self.default_tag: - needs_recrypt = True - return key, needs_recrypt - - #============================================================================= - # eoc - #============================================================================= - -#============================================================================= -# TOTP class -#============================================================================= - -#: helper to convert HOTP counter to bytes -_pack_uint64 = struct.Struct(">Q").pack - -#: helper to extract value from HOTP digest -_unpack_uint32 = struct.Struct(">I").unpack - -#: dummy bytes used as temp key for .using() method -_DUMMY_KEY = b"\x00" * 16 - -class TOTP(object): - """ - Helper for generating and verifying TOTP codes. - - Given a secret key and set of configuration options, this object - offers methods for token generation, token validation, and serialization. - It can also be used to track important persistent TOTP state, - such as the last counter used. - - This class accepts the following options - (only **key** and **format** may be specified as positional arguments). - - :arg str key: - The secret key to use. By default, should be encoded as - a base32 string (see **format** for other encodings). - - Exactly one of **key** or ``new=True`` must be specified. - - :arg str format: - The encoding used by the **key** parameter. May be one of: - ``"base32"`` (base32-encoded string), - ``"hex"`` (hexadecimal string), or ``"raw"`` (raw bytes). - Defaults to ``"base32"``. - - :param bool new: - If ``True``, a new key will be generated using :class:`random.SystemRandom`. - - Exactly one ``new=True`` or **key** must be specified. - - :param str label: - Label to associate with this token when generating a URI. - Displayed to user by most OTP client applications (e.g. Google Authenticator), - and typically has format such as ``"John Smith"`` or ``"jsmith@webservice.example.org"``. - Defaults to ``None``. - See :meth:`to_uri` for details. - - :param str issuer: - String identifying the token issuer (e.g. the domain name of your service). - Used internally by some OTP client applications (e.g. Google Authenticator) to distinguish entries - which otherwise have the same label. - Optional but strongly recommended if you're rendering to a URI. - Defaults to ``None``. - See :meth:`to_uri` for details. - - :param int size: - Number of bytes when generating new keys. Defaults to size of hash algorithm (e.g. 20 for SHA1). - - .. warning:: - - Overriding the default values for ``digits``, ``period``, or ``alg`` may - cause problems with some OTP client programs (such as Google Authenticator), - which may have these defaults hardcoded. - - :param int digits: - The number of digits in the generated / accepted tokens. Defaults to ``6``. - Must be in range [6 .. 10]. - - .. rst-class:: inline-title - .. caution:: - Due to a limitation of the HOTP algorithm, the 10th digit can only take on values 0 .. 2, - and thus offers very little extra security. - - :param str alg: - Name of hash algorithm to use. Defaults to ``"sha1"``. - ``"sha256"`` and ``"sha512"`` are also accepted, per :rfc:`6238`. - - :param int period: - The time-step period to use, in integer seconds. Defaults to ``30``. - - .. - See the passlib documentation for a full list of attributes & methods. - """ - #============================================================================= - # class attrs - #============================================================================= - - #: minimum number of bytes to allow in key, enforced by passlib. - # XXX: see if spec says anything relevant to this. - _min_key_size = 10 - - #: minimum & current serialization version (may be set independently by subclasses) - min_json_version = json_version = 1 - - #: AppWallet that this class will use for encrypting/decrypting keys. - #: (can be overwritten via the :meth:`TOTP.using()` constructor) - wallet = None - - #: function to get system time in seconds, as needed by :meth:`generate` and :meth:`verify`. - #: defaults to :func:`time.time`, but can be overridden on a per-instance basis. - now = _time.time - - #============================================================================= - # instance attrs - #============================================================================= - - #--------------------------------------------------------------------------- - # configuration attrs - #--------------------------------------------------------------------------- - - #: [private] secret key as raw :class:`!bytes` - #: see .key property for public access. - _key = None - - #: [private] cached copy of encrypted secret, - #: so .to_json() doesn't have to re-encrypt on each call. - _encrypted_key = None - - #: [private] cached copy of keyed HMAC function, - #: so ._generate() doesn't have to rebuild this each time - #: ._find_match() invokes it. - _keyed_hmac = None - - #: number of digits in the generated tokens. - digits = 6 - - #: name of hash algorithm in use (e.g. ``"sha1"``) - alg = "sha1" - - #: default label for :meth:`to_uri` - label = None - - #: default issuer for :meth:`to_uri` - issuer = None - - #: number of seconds per counter step. - #: *(TOTP uses an internal time-derived counter which - #: increments by 1 every* :attr:`!period` *seconds)*. - period = 30 - - #--------------------------------------------------------------------------- - # state attrs - #--------------------------------------------------------------------------- - - #: Flag set by deserialization methods to indicate the object needs to be re-serialized. - #: This can be for a number of reasons -- encoded using deprecated format, - #: or encrypted using a deprecated key or too few rounds. - changed = False - - #============================================================================= - # prototype construction - #============================================================================= - @classmethod - def using(cls, digits=None, alg=None, period=None, - issuer=None, wallet=None, now=None, **kwds): - """ - Dynamically create subtype of :class:`!TOTP` class - which has the specified defaults set. - - :parameters: **digits, alg, period, issuer**: - - All these options are the same as in the :class:`TOTP` constructor, - and the resulting class will use any values you specify here - as the default for all TOTP instances it creates. - - :param wallet: - Optional :class:`AppWallet` that will be used for encrypting/decrypting keys. - - :param secrets, secrets_path, encrypt_cost: - - If specified, these options will be passed to the :class:`AppWallet` constructor, - allowing you to directly specify the secret keys that should be used - to encrypt & decrypt stored keys. - - :returns: - subclass of :class:`!TOTP`. - - This method is useful for creating a TOTP class configured - to use your application's secrets for encrypting & decrypting - keys, as well as create new keys using it's desired configuration defaults. - - As an example:: - - >>> # your application can create a custom class when it initializes - >>> from passlib.totp import TOTP, generate_secret - >>> TotpFactory = TOTP.using(secrets={"1": generate_secret()}) - - >>> # subsequent TOTP objects created from this factory - >>> # will use the specified secrets to encrypt their keys... - >>> totp = TotpFactory.new() - >>> totp.to_dict() - {'enckey': {'c': 14, - 'k': 'H77SYXWORDPGVOQTFRR2HFUB3C45XXI7', - 's': 'G5DOQPIHIBUM2OOHHADQ', - 't': '1', - 'v': 1}, - 'type': 'totp', - 'v': 1} - - .. seealso:: :ref:`totp-creation` and :ref:`totp-storing-instances` tutorials for a usage example - """ - # XXX: could add support for setting default match 'window' and 'reuse' policy - - # :param now: - # Optional callable that should return current time for generator to use. - # Default to :func:`time.time`. This optional is generally not needed, - # and is mainly present for examples & unit-testing. - - subcls = type("TOTP", (cls,), {}) - - def norm_param(attr, value): - """ - helper which uses constructor to validate parameter value. - it returns corresponding attribute, so we use normalized value. - """ - # NOTE: this creates *subclass* instance, - # so normalization takes into account any custom params - # already stored. - kwds = dict(key=_DUMMY_KEY, format="raw") - kwds[attr] = value - obj = subcls(**kwds) - return getattr(obj, attr) - - if digits is not None: - subcls.digits = norm_param("digits", digits) - - if alg is not None: - subcls.alg = norm_param("alg", alg) - - if period is not None: - subcls.period = norm_param("period", period) - - # XXX: add default size as configurable parameter? - - if issuer is not None: - subcls.issuer = norm_param("issuer", issuer) - - if kwds: - subcls.wallet = AppWallet(**kwds) - if wallet: - raise TypeError("'wallet' and 'secrets' keywords are mutually exclusive") - elif wallet is not None: - if not isinstance(wallet, AppWallet): - raise exc.ExpectedTypeError(wallet, AppWallet, "wallet") - subcls.wallet = wallet - - if now is not None: - assert isinstance(now(), num_types) and now() >= 0, \ - "now() function must return non-negative int/float" - subcls.now = staticmethod(now) - - return subcls - - #============================================================================= - # init - #============================================================================= - - @classmethod - def new(cls, **kwds): - """ - convenience alias for creating new TOTP key, same as ``TOTP(new=True)`` - """ - return cls(new=True, **kwds) - - def __init__(self, key=None, format="base32", - # keyword only... - new=False, digits=None, alg=None, size=None, period=None, - label=None, issuer=None, changed=False, - **kwds): - super(TOTP, self).__init__(**kwds) - if changed: - self.changed = changed - - # validate & normalize alg - info = lookup_hash(alg or self.alg) - self.alg = info.name - digest_size = info.digest_size - if digest_size < 4: - raise RuntimeError("%r hash digest too small" % alg) - - # parse or generate new key - if new: - # generate new key - if key: - raise TypeError("'key' and 'new=True' are mutually exclusive") - if size is None: - # default to digest size, per RFC 6238 Section 5.1 - size = digest_size - elif size > digest_size: - # not forbidden by spec, but would just be wasted bytes. - # maybe just warn about this? - raise ValueError("'size' should be less than digest size " - "(%d)" % digest_size) - self.key = getrandbytes(rng, size) - elif not key: - raise TypeError("must specify either an existing 'key', or 'new=True'") - elif format == "encrypted": - # NOTE: this handles decrypting & setting '.key' - self.encrypted_key = key - elif key: - # use existing key, encoded using specified - self.key = _decode_bytes(key, format) - - # enforce min key size - if len(self.key) < self._min_key_size: - # only making this fatal for new=True, - # so that existing (but ridiculously small) keys can still be used. - msg = "for security purposes, secret key must be >= %d bytes" % self._min_key_size - if new: - raise ValueError(msg) - else: - warn(msg, exc.PasslibSecurityWarning, stacklevel=1) - - # validate digits - if digits is None: - digits = self.digits - if not isinstance(digits, int_types): - raise TypeError("digits must be an integer, not a %r" % type(digits)) - if digits < 6 or digits > 10: - raise ValueError("digits must in range(6,11)") - self.digits = digits - - # validate label - if label: - self._check_label(label) - self.label = label - - # validate issuer - if issuer: - self._check_issuer(issuer) - self.issuer = issuer - - # init period - if period is not None: - self._check_serial(period, "period", minval=1) - self.period = period - - #============================================================================= - # helpers to verify value types & ranges - #============================================================================= - - @staticmethod - def _check_serial(value, param, minval=0): - """ - check that serial value (e.g. 'counter') is non-negative integer - """ - if not isinstance(value, int_types): - raise exc.ExpectedTypeError(value, "int", param) - if value < minval: - raise ValueError("%s must be >= %d" % (param, minval)) - - @staticmethod - def _check_label(label): - """ - check that label doesn't contain chars forbidden by KeyURI spec - """ - if label and ":" in label: - raise ValueError("label may not contain ':'") - - @staticmethod - def _check_issuer(issuer): - """ - check that issuer doesn't contain chars forbidden by KeyURI spec - """ - if issuer and ":" in issuer: - raise ValueError("issuer may not contain ':'") - - #============================================================================= - # key attributes - #============================================================================= - - #------------------------------------------------------------------ - # raw key - #------------------------------------------------------------------ - @property - def key(self): - """ - secret key as raw bytes - """ - return self._key - - @key.setter - def key(self, value): - # set key - if not isinstance(value, bytes): - raise exc.ExpectedTypeError(value, bytes, "key") - self._key = value - - # clear cached properties derived from key - self._encrypted_key = self._keyed_hmac = None - - #------------------------------------------------------------------ - # encrypted key - #------------------------------------------------------------------ - @property - def encrypted_key(self): - """ - secret key, encrypted using application secret. - this match the output of :meth:`AppWallet.encrypt_key`, - and should be treated as an opaque json serializable object. - """ - enckey = self._encrypted_key - if enckey is None: - wallet = self.wallet - if not wallet: - raise TypeError("no application secrets present, can't encrypt TOTP key") - enckey = self._encrypted_key = wallet.encrypt_key(self.key) - return enckey - - @encrypted_key.setter - def encrypted_key(self, value): - wallet = self.wallet - if not wallet: - raise TypeError("no application secrets present, can't decrypt TOTP key") - self.key, needs_recrypt = wallet.decrypt_key(value) - if needs_recrypt: - # mark as changed so it gets re-encrypted & written to db - self.changed = True - else: - # cache encrypted key for re-use - self._encrypted_key = value - - #------------------------------------------------------------------ - # pretty-printed / encoded key helpers - #------------------------------------------------------------------ - - @property - def hex_key(self): - """ - secret key encoded as hexadecimal string - """ - return bascii_to_str(base64.b16encode(self.key)).lower() - - @property - def base32_key(self): - """ - secret key encoded as base32 string - """ - return b32encode(self.key) - - def pretty_key(self, format="base32", sep="-"): - """ - pretty-print the secret key. - - This is mainly useful for situations where the user cannot get the qrcode to work, - and must enter the key manually into their TOTP client. It tries to format - the key in a manner that is easier for humans to read. - - :param format: - format to output secret key. ``"hex"`` and ``"base32"`` are both accepted. - - :param sep: - separator to insert to break up key visually. - can be any of ``"-"`` (the default), ``" "``, or ``False`` (no separator). - - :return: - key as native string. - - Usage example:: - - >>> t = TOTP('s3jdvb7qd2r7jpxx') - >>> t.pretty_key() - 'S3JD-VB7Q-D2R7-JPXX' - """ - if format == "hex" or format == "base16": - key = self.hex_key - elif format == "base32": - key = self.base32_key - else: - raise ValueError("unknown byte-encoding format: %r" % (format,)) - if sep: - key = group_string(key, sep) - return key - - #============================================================================= - # time & token parsing - #============================================================================= - - @classmethod - def normalize_time(cls, time): - """ - Normalize time value to unix epoch seconds. - - :arg time: - Can be ``None``, :class:`!datetime`, - or unix epoch timestamp as :class:`!float` or :class:`!int`. - If ``None``, uses current system time. - Naive datetimes are treated as UTC. - - :returns: - unix epoch timestamp as :class:`int`. - """ - if isinstance(time, int_types): - return time - elif isinstance(time, float): - return int(time) - elif time is None: - return int(cls.now()) - elif hasattr(time, "utctimetuple"): - # coerce datetime to UTC timestamp - # NOTE: utctimetuple() assumes naive datetimes are in UTC - # NOTE: we explicitly *don't* want microseconds. - return calendar.timegm(time.utctimetuple()) - else: - raise exc.ExpectedTypeError(time, "int, float, or datetime", "time") - - def _time_to_counter(self, time): - """ - convert timestamp to HOTP counter using :attr:`period`. - """ - return time // self.period - - def _counter_to_time(self, counter): - """ - convert HOTP counter to timestamp using :attr:`period`. - """ - return counter * self.period - - @hybrid_method - def normalize_token(self_or_cls, token): - """ - Normalize OTP token representation: - strips whitespace, converts integers to a zero-padded string, - validates token content & number of digits. - - This is a hybrid method -- it can be called at the class level, - as ``TOTP.normalize_token()``, or the instance level as ``TOTP().normalize_token()``. - It will normalize to the instance-specific number of :attr:`~TOTP.digits`, - or use the class default. - - :arg token: - token as ascii bytes, unicode, or an integer. - - :raises ValueError: - if token has wrong number of digits, or contains non-numeric characters. - - :returns: - token as :class:`!unicode` string, containing only digits 0-9. - """ - digits = self_or_cls.digits - if isinstance(token, int_types): - token = u("%0*d") % (digits, token) - else: - token = to_unicode(token, param="token") - token = _clean_re.sub(u(""), token) - if not token.isdigit(): - raise MalformedTokenError("Token must contain only the digits 0-9") - if len(token) != digits: - raise MalformedTokenError("Token must have exactly %d digits" % digits) - return token - - #============================================================================= - # token generation - #============================================================================= - -# # debug helper -# def generate_range(self, size, time=None): -# counter = self._time_to_counter(time) - (size + 1) // 2 -# end = counter + size -# while counter <= end: -# token = self._generate(counter) -# yield TotpToken(self, token, counter) -# counter += 1 - - def generate(self, time=None): - """ - Generate token for specified time - (uses current time if none specified). - - :arg time: - Can be ``None``, a :class:`!datetime`, - or class:`!float` / :class:`!int` unix epoch timestamp. - If ``None`` (the default), uses current system time. - Naive datetimes are treated as UTC. - - :returns: - - A :class:`TotpToken` instance, which can be treated - as a sequence of ``(token, expire_time)`` -- see that class - for more details. - - Usage example:: - - >>> # generate a new token, wrapped in a TotpToken instance... - >>> otp = TOTP('s3jdvb7qd2r7jpxx') - >>> otp.generate(1419622739) - - - >>> # when you just need the token... - >>> otp.generate(1419622739).token - '897212' - """ - time = self.normalize_time(time) - counter = self._time_to_counter(time) - if counter < 0: - raise ValueError("timestamp must be >= 0") - token = self._generate(counter) - return TotpToken(self, token, counter) - - def _generate(self, counter): - """ - base implementation of HOTP token generation algorithm. - - :arg counter: HOTP counter, as non-negative integer - :returns: token as unicode string - """ - # generate digest - assert isinstance(counter, int_types), "counter must be integer" - assert counter >= 0, "counter must be non-negative" - keyed_hmac = self._keyed_hmac - if keyed_hmac is None: - keyed_hmac = self._keyed_hmac = compile_hmac(self.alg, self.key) - digest = keyed_hmac(_pack_uint64(counter)) - digest_size = keyed_hmac.digest_info.digest_size - assert len(digest) == digest_size, "digest_size: sanity check failed" - - # derive 31-bit token value - assert digest_size >= 20, "digest_size: sanity check 2 failed" # otherwise 0xF+4 will run off end of hash. - offset = byte_elem_value(digest[-1]) & 0xF - value = _unpack_uint32(digest[offset:offset+4])[0] & 0x7fffffff - - # render to decimal string, return last chars - # NOTE: the 10'th digit is not as secure, as it can only take on values 0-2, not 0-9, - # due to 31-bit mask on int ">I". But some servers / clients use it :| - # if 31-bit mask removed (which breaks spec), would only get values 0-4. - digits = self.digits - assert 0 < digits < 11, "digits: sanity check failed" - return (u("%0*d") % (digits, value))[-digits:] - - #============================================================================= - # token verification - #============================================================================= - - @classmethod - def verify(cls, token, source, **kwds): - r""" - Convenience wrapper around :meth:`TOTP.from_source` and :meth:`TOTP.match`. - - This parses a TOTP key & configuration from the specified source, - and tries and match the token. - It's designed to parallel the :meth:`passlib.ifc.PasswordHash.verify` method. - - :param token: - Token string to match. - - :param source: - Serialized TOTP key. - Can be anything accepted by :meth:`TOTP.from_source`. - - :param \*\*kwds: - All additional keywords passed to :meth:`TOTP.match`. - - :return: - A :class:`TotpMatch` instance, or raises a :exc:`TokenError`. - """ - return cls.from_source(source).match(token, **kwds) - - def match(self, token, time=None, window=30, skew=0, last_counter=None): - """ - Match TOTP token against specified timestamp. - Searches within a window before & after the provided time, - in order to account for transmission delay and small amounts of skew in the client's clock. - - :arg token: - Token to validate. - may be integer or string (whitespace and hyphens are ignored). - - :param time: - Unix epoch timestamp, can be any of :class:`!float`, :class:`!int`, or :class:`!datetime`. - if ``None`` (the default), uses current system time. - *this should correspond to the time the token was received from the client*. - - :param int window: - How far backward and forward in time to search for a match. - Measured in seconds. Defaults to ``30``. Typically only useful if set - to multiples of :attr:`period`. - - :param int skew: - Adjust timestamp by specified value, to account for excessive - client clock skew. Measured in seconds. Defaults to ``0``. - - Negative skew (the common case) indicates transmission delay, - and/or that the client clock is running behind the server. - - Positive skew indicates the client clock is running ahead of the server - (and by enough that it cancels out any negative skew added by - the transmission delay). - - You should ensure the server clock uses a reliable time source such as NTP, - so that only the client clock's inaccuracy needs to be accounted for. - - This is an advanced parameter that should usually be left at ``0``; - The **window** parameter is usually enough to account - for any observed transmission delay. - - :param last_counter: - Optional value of last counter value that was successfully used. - If specified, verify will never search earlier counters, - no matter how large the window is. - - Useful when client has previously authenticated, - and thus should never provide a token older than previously - verified value. - - :raises ~passlib.exc.TokenError: - - If the token is malformed, fails to match, or has already been used. - - :returns TotpMatch: - - Returns a :class:`TotpMatch` instance on successful match. - Can be treated as tuple of ``(counter, time)``. - Raises error if token is malformed / can't be verified. - - Usage example:: - - >>> totp = TOTP('s3jdvb7qd2r7jpxx') - - >>> # valid token for this time period - >>> totp.match('897212', 1419622729) - - - >>> # token from counter step 30 sec ago (within allowed window) - >>> totp.match('000492', 1419622729) - - - >>> # invalid token -- token from 60 sec ago (outside of window) - >>> totp.match('760389', 1419622729) - Traceback: - ... - InvalidTokenError: Token did not match - """ - time = self.normalize_time(time) - self._check_serial(window, "window") - - client_time = time + skew - if last_counter is None: - last_counter = -1 - start = max(last_counter, self._time_to_counter(client_time - window)) - end = self._time_to_counter(client_time + window) + 1 - # XXX: could pass 'expected = _time_to_counter(client_time + TRANSMISSION_DELAY)' - # to the _find_match() method, would help if window set to very large value. - - counter = self._find_match(token, start, end) - assert counter >= last_counter, "sanity check failed: counter went backward" - - if counter == last_counter: - raise UsedTokenError(expire_time=(last_counter + 1) * self.period) - - # NOTE: By returning match tied to


\d{2}):(?P\d{2}):(?P\d{2})(?P\.\d+)?' -OFFSET_PATTERN = r'(?P[+-]\d{2}):(?P\d{2})' -DATETIME_PATTERN = DATE_PATTERN + '[T ]' + TIME_PATTERN - - -class Time(SimpleModel): - """Just that, Time. No time zone support. - - Native type is :class:`datetime.time`. - """ - - __type_name__ = 'time' - Value = datetime.time - - class Attributes(SimpleModel.Attributes): - """Customizable attributes of the :class:`spyne.model.primitive.Time` - type.""" - - gt = None # minExclusive - """The time should be greater than this time.""" - - ge = datetime.time(0, 0, 0, 0) # minInclusive - """The time should be greater than or equal to this time.""" - - lt = None # maxExclusive - """The time should be lower than this time.""" - - le = datetime.time(23, 59, 59, 999999) # maxInclusive - """The time should be lower than or equal to this time.""" - - pattern = None - """A regular expression that matches the whole time. See here for more - info: http://www.regular-expressions.info/xml.html""" - - time_format = None - """Time format fed to the ``strftime`` function. See: - http://docs.python.org/library/datetime.html?highlight=strftime#strftime-strptime-behavior - Ignored by protocols like SOAP which have their own ideas about how - Date objects should be serialized.""" - - @staticmethod - def is_default(cls): - return ( SimpleModel.is_default(cls) - and cls.Attributes.gt == Time.Attributes.gt - and cls.Attributes.ge == Time.Attributes.ge - and cls.Attributes.lt == Time.Attributes.lt - and cls.Attributes.le == Time.Attributes.le - and cls.Attributes.pattern == Time.Attributes.pattern - ) - - @staticmethod - def validate_native(cls, value): - return SimpleModel.validate_native(cls, value) and ( - value is None or ( - (cls.Attributes.gt is None or value > cls.Attributes.gt) - and value >= cls.Attributes.ge - and (cls.Attributes.lt is None or value < cls.Attributes.lt) - and value <= cls.Attributes.le - )) - -_min_dt = datetime.datetime.min.replace(tzinfo=spyne.LOCAL_TZ) -_max_dt = datetime.datetime.max.replace(tzinfo=spyne.LOCAL_TZ) - - -class DateTime(SimpleModel): - """A compact way to represent dates and times together. Supports time zones. - Working with timezones is a bit quirky -- Spyne works very hard to have - all datetimes with time zones internally and only strips them when - explicitly requested with ``timezone=False``\\. See - :attr:`DateTime.Attributes.as_timezone` for more information. - - Native type is :class:`datetime.datetime`. - """ - - __type_name__ = 'dateTime' - Value = datetime.datetime - - _local_re = re.compile(DATETIME_PATTERN) - _utc_re = re.compile(DATETIME_PATTERN + 'Z') - _offset_re = re.compile(DATETIME_PATTERN + OFFSET_PATTERN) - - class Attributes(SimpleModel.Attributes): - """Customizable attributes of the :class:`spyne.model.primitive.DateTime` - type.""" - - gt = None # minExclusive - """The datetime should be greater than this datetime. It must always - have a timezone.""" - - ge = _min_dt # minInclusive - """The datetime should be greater than or equal to this datetime. It - must always have a timezone.""" - - lt = None # maxExclusive - """The datetime should be lower than this datetime. It must always have - a timezone.""" - - le = _max_dt # maxInclusive - """The datetime should be lower than or equal to this datetime. It must - always have a timezone.""" - - pattern = None - """A regular expression that matches the whole datetime. See here for - more info: http://www.regular-expressions.info/xml.html""" - - dt_format = None - """DateTime format fed to the ``strftime`` function. See: - http://docs.python.org/library/datetime.html?highlight=strftime#strftime-strptime-behavior - Ignored by protocols like SOAP which have their own ideas about how - DateTime objects should be serialized.""" - - out_format = None - """DateTime format fed to the ``strftime`` function only when - serializing. See: - http://docs.python.org/library/datetime.html?highlight=strftime#strftime-strptime-behavior - Ignored by protocols like SOAP which have their own ideas about how - DateTime objects should be serialized.""" - - string_format = None - """A regular python string formatting string. %s will contain the date - string. See here for more info: - http://docs.python.org/library/stdtypes.html#string-formatting""" - - as_timezone = None - """When not None, converts: - - Outgoing values to the given time zone (by calling - ``.astimezone()``). - - Incoming values without tzinfo to the given time zone by calling - ``.replace(tzinfo=)`` and values with tzinfo to the - given timezone by calling ``.astimezone()``. - - Either None or a return value of pytz.timezone() - - When this is None and a datetime with tzinfo=None comes in, it's - converted to spyne.LOCAL_TZ which defaults to ``pytz.utc``. You can use - `tzlocal `_ to set it to local - time right after ``import spyne``. - """ - - timezone = True - """If False, time zone info is stripped before serialization. Also makes - sqlalchemy schema generator emit 'timestamp without timezone'.""" - - serialize_as = None - """One of (None, 'sec', 'sec_float', 'msec', 'msec_float', 'usec')""" - - # TODO: Move this to ModelBase and make it work with all types in all - # protocols. - parser = None - """Callable for string parser. It must accept exactly four arguments: - `protocol, cls, string` and must return a `datetime.datetime` object. - If this is not None, all other parsing configurations (e.g. - `date_format`) are ignored. - """ - - @staticmethod - def is_default(cls): - return ( SimpleModel.is_default(cls) - and cls.Attributes.gt == DateTime.Attributes.gt - and cls.Attributes.ge == DateTime.Attributes.ge - and cls.Attributes.lt == DateTime.Attributes.lt - and cls.Attributes.le == DateTime.Attributes.le - and cls.Attributes.pattern == DateTime.Attributes.pattern - ) - - @staticmethod - def validate_native(cls, value): - if isinstance(value, datetime.datetime) and value.tzinfo is None: - value = value.replace(tzinfo=spyne.LOCAL_TZ) - return SimpleModel.validate_native(cls, value) and ( - value is None or ( - # min_dt is also a valid value if gt is intact. - (cls.Attributes.gt is None or value > cls.Attributes.gt) - and value >= cls.Attributes.ge - # max_dt is also a valid value if lt is intact. - and (cls.Attributes.lt is None or value < cls.Attributes.lt) - and value <= cls.Attributes.le - )) - - -class Date(DateTime): - """Just that, Date. No time zone support. - - Native type is :class:`datetime.date`. - """ - - __type_name__ = 'date' - - _offset_re = re.compile(DATE_PATTERN + '(' + OFFSET_PATTERN + '|Z)') - Value = datetime.date - - class Attributes(DateTime.Attributes): - """Customizable attributes of the :class:`spyne.model.primitive.Date` - type.""" - - gt = None # minExclusive - """The date should be greater than this date.""" - - ge = datetime.date(1, 1, 1) # minInclusive - """The date should be greater than or equal to this date.""" - - lt = None # maxExclusive - """The date should be lower than this date.""" - - le = datetime.date(datetime.MAXYEAR, 12, 31) # maxInclusive - """The date should be lower than or equal to this date.""" - - date_format = None - """Date format fed to the ``strftime`` function. See: - http://docs.python.org/library/datetime.html?highlight=strftime#strftime-strptime-behavior - Ignored by protocols like SOAP which have their own ideas about how - Date objects should be serialized.""" - - pattern = None - """A regular expression that matches the whole date. See here for more - info: http://www.regular-expressions.info/xml.html""" - - - @staticmethod - def is_default(cls): - return ( SimpleModel.is_default(cls) - and cls.Attributes.gt == Date.Attributes.gt - and cls.Attributes.ge == Date.Attributes.ge - and cls.Attributes.lt == Date.Attributes.lt - and cls.Attributes.le == Date.Attributes.le - and cls.Attributes.pattern == Date.Attributes.pattern - ) - - -# this object tries to follow ISO 8601 standard. -class Duration(SimpleModel): - """Native type is :class:`datetime.timedelta`.""" - - __type_name__ = 'duration' - Value = datetime.timedelta - - -NATIVE_MAP.update({ - datetime.datetime: DateTime, - datetime.time: Time, - datetime.date: Date, - datetime.timedelta: Duration, -}) diff --git a/libs_crutch/contrib/spyne/model/primitive/network.py b/libs_crutch/contrib/spyne/model/primitive/network.py deleted file mode 100644 index dfbc292..0000000 --- a/libs_crutch/contrib/spyne/model/primitive/network.py +++ /dev/null @@ -1,157 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from spyne.model._base import SimpleModel -from spyne.model.primitive._base import re_match_with_span -from spyne.model.primitive.string import Unicode - - -_PATT_MAC = "([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})" - - -def _validate_string(cls, value): - return ( SimpleModel.validate_string(cls, value) - and (value is None or ( - cls.Attributes.min_len <= len(value) <= cls.Attributes.max_len - and re_match_with_span(cls.Attributes, value) - ))) - -_mac_validate = { - None: _validate_string, - # TODO: add int serialization -} - - -_MacBase = Unicode(max_len=17, min_len=17, pattern=_PATT_MAC) -class MacAddress(_MacBase): - """Unicode subclass for a MAC address.""" - - __namespace__ = 'http://spyne.io/schema' - __type_name__ = 'addr_mac' - - class Attributes(_MacBase.Attributes): - serialize_as = None - - @staticmethod - def validate_string(cls, value): - return _mac_validate[cls.Attributes.serialize_as](cls, value) - - @staticmethod - def validate_native(cls, value): - return SimpleModel.validate_native(cls, value) - - -_PATT_IPV4_FRAG = r"(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])" -_PATT_IPV4 = r"(%(P4)s\.){3,3}%(P4)s" % {'P4': _PATT_IPV4_FRAG} - - -_ipv4_validate = { - None: _validate_string, - # TODO: add int serialization -} - - -_Ipv4Base = Unicode(15, pattern=_PATT_IPV4) -class Ipv4Address(_Ipv4Base): - """Unicode subclass for an IPv4 address.""" - - __namespace__ = 'http://spyne.io/schema' - __type_name__ = 'addr_ipv4' - - class Attributes(_Ipv4Base.Attributes): - serialize_as = None - - @staticmethod - def validate_string(cls, value): - return _ipv4_validate[cls.Attributes.serialize_as](cls, value) - - @staticmethod - def validate_native(cls, value): - return SimpleModel.validate_native(cls, value) - - -# http://stackoverflow.com/a/1934546 -_PATT_IPV6_FRAG = "[0-9a-fA-F]{1,4}" -_PATT_IPV6 = ("(" - "(%(P6)s:){7,7}%(P6)s|" # 1:2:3:4:5:6:7:8 - "(%(P6)s:){1,7}:|" # 1:: 1:2:3:4:5:6:7:: - "(%(P6)s:){1,6}:%(P6)s|" # 1::8 1:2:3:4:5:6::8 1:2:3:4:5:6::8 - "(%(P6)s:){1,5}(:%(P6)s){1,2}|" # 1::7:8 1:2:3:4:5::7:8 1:2:3:4:5::8 - "(%(P6)s:){1,4}(:%(P6)s){1,3}|" # 1::6:7:8 1:2:3:4::6:7:8 1:2:3:4::8 - "(%(P6)s:){1,3}(:%(P6)s){1,4}|" # 1::5:6:7:8 1:2:3::5:6:7:8 1:2:3::8 - "(%(P6)s:){1,2}(:%(P6)s){1,5}|" # 1::4:5:6:7:8 1:2::4:5:6:7:8 1:2::8 - "%(P6)s:((:%(P6)s){1,6})|" # 1::3:4:5:6:7:8 1::3:4:5:6:7:8 1::8 - ":((:%(P6)s){1,7}|:)|" # ::2:3:4:5:6:7:8 ::2:3:4:5:6:7:8 ::8 :: - "fe80:(:%(P6)s){0,4}%%[0-9a-zA-Z]{1,}|" # fe80::7:8%eth0 fe80::7:8%1 (link-local IPv6 addresses with zone index) - "::(ffff(:0{1,4}){0,1}:){0,1}%(A4)s|" # ::255.255.255.255 ::ffff:255.255.255.255 ::ffff:0:255.255.255.255 (IPv4-mapped IPv6 addresses and IPv4-translated addresses) - "(%(P6)s:){1,4}:%(A4)s" # 2001:db8:3:4::192.0.2.33 64:ff9b::192.0.2.33 (IPv4-Embedded IPv6 Address) -")") % {'P6': _PATT_IPV6_FRAG, 'A4': _PATT_IPV4} - - -_ipv6_validate = { - None: _validate_string, - # TODO: add int serialization -} - - -_Ipv6Base = Unicode(45, pattern=_PATT_IPV6) -class Ipv6Address(_Ipv6Base): - """Unicode subclass for an IPv6 address.""" - - __namespace__ = 'http://spyne.io/schema' - __type_name__ = 'addr_ipv6' - - class Attributes(_Ipv6Base.Attributes): - serialize_as = None - - @staticmethod - def validate_string(cls, value): - return _ipv6_validate[cls.Attributes.serialize_as](cls, value) - - @staticmethod - def validate_native(cls, value): - return SimpleModel.validate_native(cls, value) - - -_PATT_IPV4V6 = "(%s|%s)" % (_PATT_IPV4, _PATT_IPV6) - - -_ip_validate = { - None: _validate_string, - # TODO: add int serialization -} - - -_IpAddressBase = Unicode(45, pattern=_PATT_IPV4V6) -class IpAddress(_IpAddressBase): - """Unicode subclass for an IPv4 or IPv6 address.""" - - __namespace__ = 'http://spyne.io/schema' - __type_name__ = 'addr_ip' - - class Attributes(_IpAddressBase.Attributes): - serialize_as = None - - @staticmethod - def validate_string(cls, value): - return _ip_validate[cls.Attributes.serialize_as](cls, value) - - @staticmethod - def validate_native(cls, value): - return SimpleModel.validate_native(cls, value) diff --git a/libs_crutch/contrib/spyne/model/primitive/number.py b/libs_crutch/contrib/spyne/model/primitive/number.py deleted file mode 100644 index e10c08b..0000000 --- a/libs_crutch/contrib/spyne/model/primitive/number.py +++ /dev/null @@ -1,417 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import math -import decimal -import platform -from _warnings import warn - -from spyne.model import SimpleModel -from spyne.model.primitive import NATIVE_MAP -from spyne.util import six - - -class NumberLimitsWarning(Warning): - pass - - -class Decimal(SimpleModel): - """The primitive that corresponds to the native python Decimal. - - This is also the base class for denoting numbers. - - Note that it is your responsibility to make sure that the scale and - precision constraints set in this type is consistent with the values in the - context of the decimal package. See the :func:`decimal.getcontext` - documentation for more information. - """ - - __type_name__ = 'decimal' - - Value = decimal.Decimal - # contrary to popular belief, Decimal hates float. - - class Attributes(SimpleModel.Attributes): - """Customizable attributes of the :class:`spyne.model.primitive.Decimal` - type.""" - - gt = decimal.Decimal('-inf') # minExclusive - """The value should be greater than this number.""" - - ge = decimal.Decimal('-inf') # minInclusive - """The value should be greater than or equal to this number.""" - - lt = decimal.Decimal('inf') # maxExclusive - """The value should be lower than this number.""" - - le = decimal.Decimal('inf') # maxInclusive - """The value should be lower than or equal to this number.""" - - max_str_len = 1024 - """The maximum length of string to be attempted to convert to number.""" - - format = None - """A regular python string formatting string. See here: - http://docs.python.org/2/library/stdtypes.html#string-formatting""" - - str_format = None - """A regular python string formatting string used by invoking its - ``format()`` function. See here: - http://docs.python.org/2/library/string.html#format-string-syntax""" - - pattern = None - """A regular expression that matches the whole field. See here for more - info: http://www.regular-expressions.info/xml.html""" - - total_digits = decimal.Decimal('inf') - """Maximum number of digits.""" - - fraction_digits = decimal.Decimal('inf') - """Maximum number of digits after the decimal separator.""" - - min_bound = None - """Hardware limit that determines the lowest value this type can - store.""" - - max_bound = None - """Hardware limit that determines the highest value this type can - store.""" - - def __new__(cls, *args, **kwargs): - assert len(args) <= 2 - - if len(args) >= 1 and args[0] is not None: - kwargs['total_digits'] = args[0] - kwargs['fraction_digits'] = 0 - if len(args) == 2 and args[1] is not None: - kwargs['fraction_digits'] = args[1] - - retval = SimpleModel.__new__(cls, **kwargs) - - return retval - - @classmethod - def _s_customize(cls, **kwargs): - td = kwargs.get('total_digits', None) - fd = kwargs.get('fraction_digits', None) - if td is not None and fd is not None: - assert td > 0, "'total_digits' must be positive." - assert fd <= td, \ - "'total_digits' must be greater than" \ - " or equal to 'fraction_digits'." \ - " %r ! <= %r" % (fd, td) - - msl = kwargs.get('max_str_len', None) - if msl is None: - kwargs['max_str_len'] = cls.Attributes.total_digits + 2 - # + 1 for decimal separator - # + 1 for negative sign - - else: - kwargs['max_str_len'] = msl - - minb = cls.Attributes.min_bound - maxb = cls.Attributes.max_bound - ge = kwargs.get("ge", None) - gt = kwargs.get("gt", None) - le = kwargs.get("le", None) - lt = kwargs.get("lt", None) - - if minb is not None: - if ge is not None and ge < minb: - warn("'Greater than or equal value' %d smaller than min_bound %d" - % (ge, minb), NumberLimitsWarning) - - if gt is not None and gt < minb: - warn("'Greater than' value %d smaller than min_bound %d" - % (gt, minb), NumberLimitsWarning) - - if le is not None and le < minb: - raise ValueError( - "'Little than or equal' value %d smaller than min_bound %d" - % (le, minb)) - - if lt is not None and lt <= minb: - raise ValueError( - "'Little than' value %d smaller than min_bound %d" - % (lt, minb)) - - if maxb is not None: - if le is not None and le > maxb: - warn("'Little than or equal' value %d greater than max_bound %d" - % (le, maxb), NumberLimitsWarning) - - if lt is not None and lt > maxb: - warn("'Little than' value %d greater than max_bound %d" - % (lt, maxb), NumberLimitsWarning) - - if ge is not None and ge > maxb: - raise ValueError( - "'Greater than or equal' value %d greater than max_bound %d" - % (ge, maxb)) - - if gt is not None and gt >= maxb: - raise ValueError( - "'Greater than' value %d greater than max_bound %d" - % (gt, maxb)) - - return super(Decimal, cls)._s_customize(**kwargs) - - @staticmethod - def is_default(cls): - return ( SimpleModel.is_default(cls) - and cls.Attributes.gt == Decimal.Attributes.gt - and cls.Attributes.ge == Decimal.Attributes.ge - and cls.Attributes.lt == Decimal.Attributes.lt - and cls.Attributes.le == Decimal.Attributes.le - and cls.Attributes.total_digits == - Decimal.Attributes.total_digits - and cls.Attributes.fraction_digits == - Decimal.Attributes.fraction_digits - ) - - @staticmethod - def validate_string(cls, value): - return SimpleModel.validate_string(cls, value) and ( - value is None or (len(value) <= cls.Attributes.max_str_len) - ) - - @staticmethod - def validate_native(cls, value): - return SimpleModel.validate_native(cls, value) and ( - value is None or ( - value > cls.Attributes.gt and - value >= cls.Attributes.ge and - value < cls.Attributes.lt and - value <= cls.Attributes.le - )) - - -class Double(Decimal): - """This is serialized as the python ``float``. So this type comes with its - gotchas. Unless you really know what you're doing, you should use a - :class:`Decimal` with a pre-defined number of integer and decimal digits. - - .. NOTE:: - This class is not compatible with :class:`spyne.model.Decimal`. You can - get strange results if you're using a `decimal.Decimal` instance for a - field denoted as `Double` or `Float` and vice versa. Make sure you only - return instances of types compatible with designated types. - """ - - __type_name__ = 'double' - Value = float - - if platform.python_version_tuple()[:2] == ('2','6'): - class Attributes(Decimal.Attributes): - """Customizable attributes of the :class:`spyne.model.primitive.Double` - type. This class is only here for Python 2.6: See this bug report - for more info: http://bugs.python.org/issue2531 - """ - - gt = float('-inf') # minExclusive - """The value should be greater than this number.""" - - ge = float('-inf') # minInclusive - """The value should be greater than or equal to this number.""" - - lt = float('inf') # maxExclusive - """The value should be lower than this number.""" - - le = float('inf') # maxInclusive - """The value should be lower than or equal to this number.""" - - @staticmethod - def is_default(cls): - return ( SimpleModel.is_default(cls) - and cls.Attributes.gt == Double.Attributes.gt - and cls.Attributes.ge == Double.Attributes.ge - and cls.Attributes.lt == Double.Attributes.lt - and cls.Attributes.le == Double.Attributes.le - ) - - -class Float(Double): - """Synonym for Double (as far as python side of things are concerned). - It's here for compatibility reasons.""" - - __type_name__ = 'float' - - -class Integer(Decimal): - """The arbitrary-size signed integer.""" - - __type_name__ = 'integer' - Value = int - - @staticmethod - def validate_native(cls, value): - return ( Decimal.validate_native(cls, value) - and (value is None or int(value) == value) - ) - - -class UnsignedInteger(Integer): - """The arbitrary-size unsigned integer, also known as nonNegativeInteger.""" - - __type_name__ = 'nonNegativeInteger' - - @staticmethod - def validate_native(cls, value): - return ( Integer.validate_native(cls, value) - and (value is None or value >= 0) - ) - - -NonNegativeInteger = UnsignedInteger -"""The arbitrary-size unsigned integer, alias for UnsignedInteger.""" - - -class PositiveInteger(NonNegativeInteger): - - """The arbitrary-size positive integer (natural number).""" - - __type_name__ = 'positiveInteger' - - @staticmethod - def validate_native(cls, value): - return (Integer.validate_native(cls, value) - and (value is None or value > 0)) - - -def TBoundedInteger(num_bits, type_name): - _min_b = -(0x8<<(num_bits-4)) # 0x8 is 4 bits. - _max_b = (0x8<<(num_bits-4)) - 1 # -1? c'est la vie - - class _BoundedInteger(Integer): - __type_name__ = type_name - - class Attributes(Integer.Attributes): - max_str_len = math.ceil(math.log(2**num_bits, 10)) - min_bound = _min_b - max_bound = _max_b - - @staticmethod - def validate_native(cls, value): - return ( - Integer.validate_native(cls, value) - and (value is None or (_min_b <= value <= _max_b)) - ) - - return _BoundedInteger - - -def TBoundedUnsignedInteger(num_bits, type_name): - _min_b = 0 - _max_b = 2 ** num_bits - 1 # -1? c'est la vie ;) - - class _BoundedUnsignedInteger(UnsignedInteger): - __type_name__ = type_name - - class Attributes(UnsignedInteger.Attributes): - max_str_len = math.ceil(math.log(2**num_bits, 10)) - min_bound = _min_b - max_bound = _max_b - - @staticmethod - def validate_native(cls, value): - return ( - UnsignedInteger.validate_native(cls, value) - and (value is None or (_min_b <= value < _max_b)) - ) - - return _BoundedUnsignedInteger - - -Integer64 = TBoundedInteger(64, 'long') -"""The 64-bit signed integer, also known as ``long``.""" - -Long = Integer64 -"""The 64-bit signed integer, alias for :class:`Integer64`.""" - - -Integer32 = TBoundedInteger(32, 'int') -"""The 64-bit signed integer, also known as ``int``.""" - -Int = Integer32 -"""The 32-bit signed integer, alias for :class:`Integer32`.""" - - -Integer16 = TBoundedInteger(16, 'short') -"""The 16-bit signed integer, also known as ``short``.""" - -Short = Integer16 -"""The 16-bit signed integer, alias for :class:`Integer16`.""" - - -Integer8 = TBoundedInteger(8, 'byte') -"""The 8-bit signed integer, also known as ``byte``.""" - -Byte = Integer8 -"""The 8-bit signed integer, alias for :class:`Integer8`.""" - - -UnsignedInteger64 = TBoundedUnsignedInteger(64, 'unsignedLong') -"""The 64-bit unsigned integer, also known as ``unsignedLong``.""" - -UnsignedLong = UnsignedInteger64 -"""The 64-bit unsigned integer, alias for :class:`UnsignedInteger64`.""" - - -UnsignedInteger32 = TBoundedUnsignedInteger(32, 'unsignedInt') -"""The 64-bit unsigned integer, also known as ``unsignedInt``.""" - -UnsignedInt = UnsignedInteger32 -"""The 32-bit unsigned integer, alias for :class:`UnsignedInteger32`.""" - - -UnsignedInteger16 = TBoundedUnsignedInteger(16, 'unsignedShort') -"""The 16-bit unsigned integer, also known as ``unsignedShort``.""" - -UnsignedShort = UnsignedInteger16 -"""The 16-bit unsigned integer, alias for :class:`UnsignedInteger16`.""" - - -UnsignedInteger8 = TBoundedUnsignedInteger(8, 'unsignedByte') -"""The 8-bit unsigned integer, also known as ``unsignedByte``.""" - -UnsignedByte = UnsignedInteger8 -"""The 8-bit unsigned integer, alias for :class:`UnsignedInteger8`.""" - - -NATIVE_MAP.update({ - float: Double, - decimal.Decimal: Decimal, -}) - - -if not six.PY2: - NATIVE_MAP.update({ - int: Integer, - }) - -else: - NATIVE_MAP.update({ - long: Integer, - }) - - if isinstance(0x80000000, long): # 32-bit architecture - NATIVE_MAP[int] = Integer32 - else: # not 32-bit (so most probably 64-bit) architecture - NATIVE_MAP[int] = Integer64 diff --git a/libs_crutch/contrib/spyne/model/primitive/spatial.py b/libs_crutch/contrib/spyne/model/primitive/spatial.py deleted file mode 100644 index cf3ada5..0000000 --- a/libs_crutch/contrib/spyne/model/primitive/spatial.py +++ /dev/null @@ -1,263 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -# -# FIXME: Supports e.g. -# MULTIPOINT (10 40, 40 30, 20 20, 30 10) -# -# but not: -# MULTIPOINT ((10 40), (40 30), (20 20), (30 10)) -# -from spyne.model import SimpleModel -from spyne.model.primitive.string import Unicode - - -FLOAT_PATTERN = r'-?[0-9]+\.?[0-9]*(e-?[0-9]+)?' - - -_rinse_and_repeat = r'\s*\(%s\s*(,\s*%s)*\)\s*' -def _get_one_point_pattern(dim): - return ' +'.join([FLOAT_PATTERN] * dim) - -def _get_point_pattern(dim): - return r'POINT\s*\(%s\)' % _get_one_point_pattern(dim) - -def _get_one_multipoint_pattern(dim): - one_point = _get_one_point_pattern(dim) - return _rinse_and_repeat % (one_point, one_point) - -def _get_multipoint_pattern(dim): - return r'MULTIPOINT%s' % _get_one_multipoint_pattern(dim) - - -def _get_one_line_pattern(dim): - one_point = _get_one_point_pattern(dim) - return _rinse_and_repeat % (one_point, one_point) - -def _get_linestring_pattern(dim): - return r'LINESTRING%s' % _get_one_line_pattern(dim) - -def _get_one_multilinestring_pattern(dim): - one_line = _get_one_line_pattern(dim) - return _rinse_and_repeat % (one_line, one_line) - -def _get_multilinestring_pattern(dim): - return r'MULTILINESTRING%s' % _get_one_multilinestring_pattern(dim) - - -def _get_one_polygon_pattern(dim): - one_line = _get_one_line_pattern(dim) - return _rinse_and_repeat % (one_line, one_line) - -def _get_polygon_pattern(dim): - return r'POLYGON%s' % _get_one_polygon_pattern(dim) - -def _get_one_multipolygon_pattern(dim): - one_line = _get_one_polygon_pattern(dim) - return _rinse_and_repeat % (one_line, one_line) - -def _get_multipolygon_pattern(dim): - return r'MULTIPOLYGON%s' % _get_one_multipolygon_pattern(dim) - - -class Point(Unicode): - """A point type whose native format is a WKT string. You can use - :func:`shapely.wkt.loads` to get a proper point type. - - It's a subclass of the :class:`Unicode` type, so regular Unicode constraints - apply. The only additional parameter is the number of dimensions. - - :param dim: Number of dimensons. - """ - - __type_name__ = None - - class Attributes(Unicode.Attributes): - dim = None - - @staticmethod - def Value(x, y, prec=15): - if isinstance(x, str) or isinstance(y, str): - assert isinstance(x, str) - assert isinstance(y, str) - return 'POINT(%s %s)' % (x, y) - - return ('POINT(%%3.%(prec)sf %%3.%(prec)sf)' % {'prec': prec}) % (x,y) - - def __new__(cls, dim=None, **kwargs): - assert dim in (None, 2, 3) - if dim is not None: - kwargs['dim'] = dim - kwargs['pattern'] = _get_point_pattern(dim) - kwargs['type_name'] = 'point%dd' % dim - - retval = SimpleModel.__new__(cls, **kwargs) - retval.__namespace__ = 'http://spyne.io/schema' - retval.__extends__ = Unicode - retval.__orig__ = Unicode - return retval - - -class Line(Unicode): - """A line type whose native format is a WKT string. You can use - :func:`shapely.wkt.loads` to get a proper line type. - - It's a subclass of the :class:`Unicode` type, so regular Unicode constraints - apply. The only additional parameter is the number of dimensions. - - :param dim: Number of dimensons. - """ - - __type_name__ = None - - class Attributes(Unicode.Attributes): - dim = None - - def __new__(cls, dim=None, **kwargs): - assert dim in (None, 2, 3) - if dim is not None: - kwargs['dim'] = dim - kwargs['pattern'] = _get_linestring_pattern(dim) - kwargs['type_name'] = 'line%dd' % dim - - retval = SimpleModel.__new__(cls, **kwargs) - retval.__namespace__ = 'http://spyne.io/schema' - retval.__extends__ = Unicode - retval.__orig__ = Unicode - return retval - -LineString = Line - - -class Polygon(Unicode): - """A polygon type whose native format is a WKT string. You can use - :func:`shapely.wkt.loads` to get a proper polygon type. - - It's a subclass of the :class:`Unicode` type, so regular Unicode constraints - apply. The only additional parameter is the number of dimensions. - - :param dim: Number of dimensons. - """ - __type_name__ = None - - class Attributes(Unicode.Attributes): - dim = None - - def __new__(cls, dim=None, **kwargs): - assert dim in (None, 2, 3) - if dim is not None: - kwargs['dim'] = dim - kwargs['pattern'] = _get_polygon_pattern(dim) - kwargs['type_name'] = 'polygon%dd' % dim - - retval = SimpleModel.__new__(cls, **kwargs) - retval.__namespace__ = 'http://spyne.io/schema' - retval.__extends__ = Unicode - retval.__orig__ = Unicode - return retval - - -class MultiPoint(Unicode): - """A MultiPoint type whose native format is a WKT string. You can use - :func:`shapely.wkt.loads` to get a proper MultiPoint type. - - It's a subclass of the :class:`Unicode` type, so regular Unicode constraints - apply. The only additional parameter is the number of dimensions. - - :param dim: Number of dimensons. - """ - - __type_name__ = None - - class Attributes(Unicode.Attributes): - dim = None - - def __new__(cls, dim=None, **kwargs): - assert dim in (None, 2, 3) - if dim is not None: - kwargs['dim'] = dim - kwargs['pattern'] = _get_multipoint_pattern(dim) - kwargs['type_name'] = 'multiPoint%dd' % dim - - retval = SimpleModel.__new__(cls, **kwargs) - retval.__namespace__ = 'http://spyne.io/schema' - retval.__extends__ = Unicode - retval.__orig__ = Unicode - return retval - - -class MultiLine(Unicode): - """A MultiLine type whose native format is a WKT string. You can use - :func:`shapely.wkt.loads` to get a proper MultiLine type. - - It's a subclass of the :class:`Unicode` type, so regular Unicode constraints - apply. The only additional parameter is the number of dimensions. - - :param dim: Number of dimensons. - """ - - __type_name__ = None - - class Attributes(Unicode.Attributes): - dim = None - - def __new__(cls, dim=None, **kwargs): - assert dim in (None, 2, 3) - if dim is not None: - kwargs['dim'] = dim - kwargs['pattern'] = _get_multilinestring_pattern(dim) - kwargs['type_name'] = 'multiLine%dd' % dim - - retval = SimpleModel.__new__(cls, **kwargs) - retval.__namespace__ = 'http://spyne.io/schema' - retval.__extends__ = Unicode - retval.__orig__ = Unicode - return retval - -MultiLineString = MultiLine - - -class MultiPolygon(Unicode): - """A MultiPolygon type whose native format is a WKT string. You can use - :func:`shapely.wkt.loads` to get a proper MultiPolygon type. - - It's a subclass of the :class:`Unicode` type, so regular Unicode constraints - apply. The only additional parameter is the number of dimensions. - - :param dim: Number of dimensons. - """ - - __type_name__ = None - - class Attributes(Unicode.Attributes): - dim = None - - def __new__(cls, dim=None, **kwargs): - assert dim in (None, 2, 3) - if dim is not None: - kwargs['dim'] = dim - kwargs['pattern'] = _get_multipolygon_pattern(dim) - kwargs['type_name'] = 'multipolygon%dd' % dim - - retval = SimpleModel.__new__(cls, **kwargs) - retval.__namespace__ = 'http://spyne.io/schema' - retval.__extends__ = Unicode - retval.__orig__ = Unicode - return retval - diff --git a/libs_crutch/contrib/spyne/model/primitive/string.py b/libs_crutch/contrib/spyne/model/primitive/string.py deleted file mode 100644 index bce51ae..0000000 --- a/libs_crutch/contrib/spyne/model/primitive/string.py +++ /dev/null @@ -1,307 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import unicode_literals - -import decimal -import uuid - -from spyne.model.primitive import NATIVE_MAP -from spyne.util import six -from spyne.model._base import SimpleModel -from spyne.model.primitive._base import re_match_with_span - - -UUID_PATTERN = "%(x)s{8}-%(x)s{4}-%(x)s{4}-%(x)s{4}-%(x)s{12}" % \ - {'x': '[a-fA-F0-9]'} - -LTREE_PATTERN = r"\w+(\.\w+)*" - -# Actual ltree max size is 65536 but it's advised to keep it under 2048. -LTREE_OPTIMAL_SIZE = 2048 -LTREE_MAXIMUM_SIZE = 65536 - - -def _gen_mime_type_pattern(strict, with_params): - ows = "[ \\t]*" # Optional WhiteSpace - token = "[0-9A-Za-z!#$%&'*+.^_`|~-]+" - quotedString = "\"(?:[^\"\\\\]|\\.)*\"" - if strict: - main_type = "(" \ - "application|audio|font|example|image|message|model|multipart" \ - "|text|video|x-(?:" + token + ")" \ - ")" - - else: - main_type = token - - param = token + "=" + "(?:" + token + "|" + quotedString + ");?" + ows - params = ";" + ows + param + "(" + param + ")*" - - if not with_params: - return main_type + "/" + "(" + token + ")" - else: - return main_type + "/" + "(" + token + ")" + params - - -MIME_TYPE_PATTERN_STRICT = \ - _gen_mime_type_pattern(strict=True, with_params=False) -MIME_TYPE_PATTERN_PERMISSIVE = \ - _gen_mime_type_pattern(strict=False, with_params=False) - -MEDIA_TYPE_PATTERN_STRICT = \ - _gen_mime_type_pattern(strict=True, with_params=True) -MEDIA_TYPE_PATTERN_PERMISSIVE = \ - _gen_mime_type_pattern(strict=False, with_params=True) - - -class Unicode(SimpleModel): - """The type to represent human-readable data. Its native format is `unicode` - or `str` with given encoding. - """ - - __type_name__ = 'string' - Value = six.text_type - - class Attributes(SimpleModel.Attributes): - """Customizable attributes of the :class:`spyne.model.primitive.Unicode` - type.""" - - min_len = 0 - """Minimum length of string. Can be set to any positive integer""" - - max_len = decimal.Decimal('inf') - """Maximum length of string. Can be set to ``decimal.Decimal('inf')`` to - accept strings of arbitrary length. You may also need to adjust - :const:`spyne.server.wsgi.MAX_CONTENT_LENGTH`.""" - - pattern = None - """A regular expression that matches the whole string. See here for more - info: http://www.regular-expressions.info/xml.html""" - - unicode_pattern = None - """Same as ``pattern``, but, will be compiled with ``re.UNICODE``. - See: https://docs.python.org/2/library/re.html#re.UNICODE""" - - encoding = None - """The encoding of binary data this class may have to deal with.""" - - unicode_errors = 'strict' - """The argument to the ``unicode`` builtin; one of 'strict', 'replace' - or 'ignore'.""" - - format = None - """A regular python string formatting string. See here: - http://docs.python.org/library/stdtypes.html#string-formatting""" - - cast = None - """Type override callable for casting non-unicode input to unicode.""" - - def __new__(cls, *args, **kwargs): - assert len(args) <= 1 - - if len(args) == 1: - kwargs['max_len'] = args[0] - - retval = SimpleModel.__new__(cls, ** kwargs) - - return retval - - @staticmethod - def is_default(cls): - return ( SimpleModel.is_default(cls) - and cls.Attributes.min_len == Unicode.Attributes.min_len - and cls.Attributes.max_len == Unicode.Attributes.max_len - and cls.Attributes.pattern == Unicode.Attributes.pattern - ) - - @staticmethod - def validate_string(cls, value): - return ( SimpleModel.validate_string(cls, value) - and (value is None or ( - cls.Attributes.min_len <= len(value) <= cls.Attributes.max_len - ))) - - @staticmethod - def validate_native(cls, value): - return (SimpleModel.validate_native(cls, value) - and (value is None or ( - re_match_with_span(cls.Attributes, value) - ))) - - -class String(Unicode): - pass - -if not six.PY2: - String = Unicode - - -class AnyUri(Unicode): - """A special kind of String type designed to hold an uri.""" - - __type_name__ = 'anyURI' - - class Attributes(String.Attributes): - text = None - """The text shown in link.""" - - anchor_class = None - """The class of the generated tag.""" - - class Value(object): - """A special object that is just a better way of carrying the - information carried with a link. - - :param href: The uri string. - :param text: The text data that goes with the link. This is a - ``str`` or a ``unicode`` instance. - :param content: The structured data that goes with the link. This is an - `lxml.etree.Element` instance. - """ - - def __init__(self, href, text=None, content=None): - self.href = href - self.text = text - self.content = content - - def __repr__(self): - return "Uri(href={0!r}, text={1!r}, content={2!r})" \ - .format(self.href, self.text, self.content) - - -class ImageUri(AnyUri): - """A special kind of String that holds the uri of an image.""" - - -def _uuid_validate_string(cls, value): - return ( SimpleModel.validate_string(cls, value) - and (value is None or ( - cls.Attributes.min_len <= len(value) <= cls.Attributes.max_len - and re_match_with_span(cls.Attributes, value) - ))) - - -def _Tuuid_validate(key): - from uuid import UUID - - def _uvalid(cls, v): - try: - UUID(**{key:v}) - except ValueError: - return False - return True - return _uvalid - - -_uuid_validate = { - None: _uuid_validate_string, - 'hex': _Tuuid_validate('hex'), - 'urn': _Tuuid_validate('urn'), - six.binary_type: _Tuuid_validate('bytes'), - 'bytes': _Tuuid_validate('bytes'), - 'bytes_le': _Tuuid_validate('bytes_le'), - 'fields': _Tuuid_validate('fields'), - int: _Tuuid_validate('int'), - 'int': _Tuuid_validate('int'), -} - - -class Uuid(Unicode(pattern=UUID_PATTERN)): - """Unicode subclass for Universially-Unique Identifiers.""" - - __namespace__ = 'http://spyne.io/schema' - __type_name__ = 'uuid' - Value = uuid.UUID - - class Attributes(Unicode(pattern=UUID_PATTERN).Attributes): - serialize_as = None - - @staticmethod - def validate_string(cls, value): - return _uuid_validate[cls.Attributes.serialize_as](cls, value) - - @staticmethod - def validate_native(cls, value): - return SimpleModel.validate_native(cls, value) - - -class Ltree(Unicode(LTREE_OPTIMAL_SIZE, unicode_pattern=LTREE_PATTERN)): - """A special kind of String type designed to hold the Ltree type from - Postgresql.""" - - __namespace__ = 'http://spyne.io/schema' - __type_name__ = 'ltreeString' - - -class LtreeLarge(Unicode(LTREE_MAXIMUM_SIZE, unicode_pattern=LTREE_PATTERN)): - """A special kind of String type designed to hold the Ltree type from - Postgresql.""" - - __namespace__ = 'http://spyne.io/schema' - __type_name__ = 'largeLtreeString' - - -class MimeTypeStrict(Unicode(unicode_pattern=MIME_TYPE_PATTERN_STRICT)): - """A special kind of String type designed to hold a mime type as defined - by IANA.""" - - __namespace__ = 'http://spyne.io/schema' - __type_name__ = 'strictMimeTypeString' - - -class MimeType(Unicode(unicode_pattern=MIME_TYPE_PATTERN_PERMISSIVE)): - """A special kind of String type designed to hold a forward-compatible - mime type that can have any string as main type.""" - - __namespace__ = 'http://spyne.io/schema' - __type_name__ = 'mimeTypeString' - - -class MediaTypeStrict(Unicode(unicode_pattern=MEDIA_TYPE_PATTERN_STRICT)): - """A special kind of String type designed to hold a mime type as defined - by IANA followed by arbitrary parameters. - - See: https://tools.ietf.org/html/rfc7231#section-3.1.1.1""" - - __namespace__ = 'http://spyne.io/schema' - __type_name__ = 'strictMediaTypeString' - - -class MediaType(Unicode(unicode_pattern=MEDIA_TYPE_PATTERN_PERMISSIVE)): - """A special kind of String type designed to hold a forward-compatible - media type that can have any string as main type. A media type is - essentially a mime type plus parameters. - - See: https://tools.ietf.org/html/rfc7231#section-3.1.1.1""" - - __namespace__ = 'http://spyne.io/schema' - __type_name__ = 'mediaTypeString' - - -if not six.PY2: - NATIVE_MAP.update({ - str: Unicode, - }) - -else: - NATIVE_MAP.update({ - str: String, - unicode: Unicode, - }) diff --git a/libs_crutch/contrib/spyne/model/primitive/xml.py b/libs_crutch/contrib/spyne/model/primitive/xml.py deleted file mode 100644 index d60494a..0000000 --- a/libs_crutch/contrib/spyne/model/primitive/xml.py +++ /dev/null @@ -1,195 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import re - -from spyne.const.xml import PATT_NMTOKEN -from spyne.model.primitive.string import Unicode - - -RE_BaseChar = re.compile( - u"[\u0041-\u005A]|[\u0061-\u007A]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|" - u"[\u00F8-\u00FF]|[\u0100-\u0131]|[\u0134-\u013E]|[\u0141-\u0148]|" - u"[\u014A-\u017E]|[\u0180-\u01C3]|[\u01CD-\u01F0]|[\u01F4-\u01F5]|" - u"[\u01FA-\u0217]|[\u0250-\u02A8]|[\u02BB-\u02C1]|\u0386|[\u0388-\u038A]|" - u"\u038C|[\u038E-\u03A1]|[\u03A3-\u03CE]|[\u03D0-\u03D6]|" - u"\u03DA|\u03DC|\u03DE|\u03E0|[\u03E2-\u03F3]|[\u0401-\u040C]|" - u"[\u040E-\u044F]|[\u0451-\u045C]|[\u045E-\u0481]|[\u0490-\u04C4]|" - u"[\u04C7-\u04C8]|[\u04CB-\u04CC]|[\u04D0-\u04EB]|[\u04EE-\u04F5]|" - u"[\u04F8-\u04F9]|[\u0531-\u0556]|\u0559|[\u0561-\u0586]|[\u05D0-\u05EA]|" - u"[\u05F0-\u05F2]|[\u0621-\u063A]|[\u0641-\u064A]|[\u0671-\u06B7]|" - u"[\u06BA-\u06BE]|[\u06C0-\u06CE]|[\u06D0-\u06D3]|\u06D5|[\u06E5-\u06E6]|" - u"[\u0905-\u0939]|\u093D|[\u0958-\u0961]|[\u0985-\u098C]|[\u098F-\u0990]|" - u"[\u0993-\u09A8]|[\u09AA-\u09B0]|\u09B2|[\u09B6-\u09B9]|[\u09DC-\u09DD]|" - u"[\u09DF-\u09E1]|[\u09F0-\u09F1]|[\u0A05-\u0A0A]|[\u0A0F-\u0A10]|" - u"[\u0A13-\u0A28]|[\u0A2A-\u0A30]|[\u0A32-\u0A33]|[\u0A35-\u0A36]|" - u"[\u0A38-\u0A39]|[\u0A59-\u0A5C]|\u0A5E|[\u0A72-\u0A74]|[\u0A85-\u0A8B]|" - u"\u0A8D|[\u0A8F-\u0A91]|[\u0A93-\u0AA8]|[\u0AAA-\u0AB0]|[\u0AB2-\u0AB3]|" - u"[\u0AB5-\u0AB9]|\u0ABD|\u0AE0|[\u0B05-\u0B0C]|[\u0B0F-\u0B10]|" - u"[\u0B13-\u0B28]|[\u0B2A-\u0B30]|[\u0B32-\u0B33]|[\u0B36-\u0B39]|\u0B3D|" - u"[\u0B5C-\u0B5D]|[\u0B5F-\u0B61]|[\u0B85-\u0B8A]|[\u0B8E-\u0B90]|" - u"[\u0B92-\u0B95]|[\u0B99-\u0B9A]|\u0B9C|[\u0B9E-\u0B9F]|[\u0BA3-\u0BA4]|" - u"[\u0BA8-\u0BAA]|[\u0BAE-\u0BB5]|[\u0BB7-\u0BB9]|[\u0C05-\u0C0C]|" - u"[\u0C0E-\u0C10]|[\u0C12-\u0C28]|[\u0C2A-\u0C33]|[\u0C35-\u0C39]|" - u"[\u0C60-\u0C61]|[\u0C85-\u0C8C]|[\u0C8E-\u0C90]|[\u0C92-\u0CA8]|" - u"[\u0CAA-\u0CB3]|[\u0CB5-\u0CB9]|\u0CDE|[\u0CE0-\u0CE1]|[\u0D05-\u0D0C]|" - u"[\u0D0E-\u0D10]|[\u0D12-\u0D28]|[\u0D2A-\u0D39]|[\u0D60-\u0D61]|" - u"[\u0E01-\u0E2E]|\u0E30|[\u0E32-\u0E33]|[\u0E40-\u0E45]|[\u0E81-\u0E82]|" - u"\u0E84|[\u0E87-\u0E88]|\u0E8A|\u0E8D|[\u0E94-\u0E97]|[\u0E99-\u0E9F]|" - u"[\u0EA1-\u0EA3]|\u0EA5|\u0EA7|[\u0EAA-\u0EAB]|[\u0EAD-\u0EAE]|\u0EB0|" - u"[\u0EB2-\u0EB3]|\u0EBD|[\u0EC0-\u0EC4]|[\u0F40-\u0F47]|[\u0F49-\u0F69]|" - u"[\u10A0-\u10C5]|[\u10D0-\u10F6]|\u1100|[\u1102-\u1103]|[\u1105-\u1107]|" - u"\u1109|[\u110B-\u110C]|[\u110E-\u1112]|\u113C|\u113E|\u1140|\u114C|" - u"\u114E|\u1150|[\u1154-\u1155]|\u1159|[\u115F-\u1161]|\u1163|\u1165|" - u"\u1167|\u1169|[\u116D-\u116E]|[\u1172-\u1173]|\u1175|\u119E|\u11A8|" - u"\u11AB|[\u11AE-\u11AF]|[\u11B7-\u11B8]|\u11BA|[\u11BC-\u11C2]|\u11EB|" - u"\u11F0|\u11F9|[\u1E00-\u1E9B]|[\u1EA0-\u1EF9]|[\u1F00-\u1F15]|" - u"[\u1F18-\u1F1D]|[\u1F20-\u1F45]|[\u1F48-\u1F4D]|[\u1F50-\u1F57]|\u1F59|" - u"\u1F5B|\u1F5D|[\u1F5F-\u1F7D]|[\u1F80-\u1FB4]|[\u1FB6-\u1FBC]|\u1FBE|" - u"[\u1FC2-\u1FC4]|[\u1FC6-\u1FCC]|[\u1FD0-\u1FD3]|[\u1FD6-\u1FDB]|" - u"[\u1FE0-\u1FEC]|[\u1FF2-\u1FF4]|[\u1FF6-\u1FFC]|\u2126|[\u212A-\u212B]|" - u"\u212E|[\u2180-\u2182]|[\u3041-\u3094]|[\u30A1-\u30FA]|[\u3105-\u312C]|" - u"[\uAC00-\uD7A3]", flags=re.UNICODE) - - -RE_Ideographic = re.compile(u"[\u4E00-\u9FA5]|\u3007|[\u3021-\u3029]", - flags=re.UNICODE) - -RE_CombiningChar= re.compile( - u"[\u0300-\u0345]|[\u0360-\u0361]|[\u0483-\u0486]|[\u0591-\u05A1]|" - u"[\u05A3-\u05B9]|[\u05BB-\u05BD]|\u05BF|[\u05C1-\u05C2]|\u05C4|" - u"[\u064B-\u0652]|\u0670|[\u06D6-\u06DC]|[\u06DD-\u06DF]|[\u06E0-\u06E4]|" - u"[\u06E7-\u06E8]|[\u06EA-\u06ED]|[\u0901-\u0903]|\u093C|[\u093E-\u094C]|" - u"\u094D|[\u0951-\u0954]|[\u0962-\u0963]|[\u0981-\u0983]|\u09BC|\u09BE|" - u"\u09BF|[\u09C0-\u09C4]|[\u09C7-\u09C8]|[\u09CB-\u09CD]|\u09D7|" - u"[\u09E2-\u09E3]|\u0A02|\u0A3C|\u0A3E|\u0A3F|[\u0A40-\u0A42]|" - u"[\u0A47-\u0A48]|[\u0A4B-\u0A4D]|[\u0A70-\u0A71]|[\u0A81-\u0A83]|\u0ABC|" - u"[\u0ABE-\u0AC5]|[\u0AC7-\u0AC9]|[\u0ACB-\u0ACD]|[\u0B01-\u0B03]|\u0B3C|" - u"[\u0B3E-\u0B43]|[\u0B47-\u0B48]|[\u0B4B-\u0B4D]|[\u0B56-\u0B57]|" - u"[\u0B82-\u0B83]|[\u0BBE-\u0BC2]|[\u0BC6-\u0BC8]|[\u0BCA-\u0BCD]|\u0BD7|" - u"[\u0C01-\u0C03]|[\u0C3E-\u0C44]|[\u0C46-\u0C48]|[\u0C4A-\u0C4D]|" - u"[\u0C55-\u0C56]|[\u0C82-\u0C83]|[\u0CBE-\u0CC4]|[\u0CC6-\u0CC8]|" - u"[\u0CCA-\u0CCD]|[\u0CD5-\u0CD6]|[\u0D02-\u0D03]|[\u0D3E-\u0D43]|" - u"[\u0D46-\u0D48]|[\u0D4A-\u0D4D]|\u0D57|\u0E31|[\u0E34-\u0E3A]|" - u"[\u0E47-\u0E4E]|\u0EB1|[\u0EB4-\u0EB9]|[\u0EBB-\u0EBC]|[\u0EC8-\u0ECD]|" - u"[\u0F18-\u0F19]|\u0F35|\u0F37|\u0F39|\u0F3E|\u0F3F|[\u0F71-\u0F84]|" - u"[\u0F86-\u0F8B]|[\u0F90-\u0F95]|\u0F97|[\u0F99-\u0FAD]|[\u0FB1-\u0FB7]|" - u"\u0FB9|[\u20D0-\u20DC]|\u20E1|[\u302A-\u302F]|\u3099|\u309A", - flags=re.UNICODE) - - -RE_Digit = re.compile( - u"[\u0030-\u0039]|[\u0660-\u0669]|[\u06F0-\u06F9]|[\u0966-\u096F]|" - u"[\u09E6-\u09EF]|[\u0A66-\u0A6F]|[\u0AE6-\u0AEF]|[\u0B66-\u0B6F]|" - u"[\u0BE7-\u0BEF]|[\u0C66-\u0C6F]|[\u0CE6-\u0CEF]|[\u0D66-\u0D6F]|" - u"[\u0E50-\u0E59]|[\u0ED0-\u0ED9]|[\u0F20-\u0F29]", flags=re.UNICODE) - - -RE_Extender = re.compile( - u"\u00B7|\u02D0|\u02D1|\u0387|\u0640|\u0E46|\u0EC6|\u3005|[\u3031-\u3035]|" - u"[\u309D-\u309E]|[\u30FC-\u30FE]", flags=re.UNICODE) - - -RE_Letter = re.compile(u'|'.join((RE_BaseChar.pattern, RE_Ideographic.pattern)), - flags=re.UNICODE) - - -RE_NameChar = re.compile(u'|'.join(( - RE_Letter.pattern, RE_Digit.pattern, '.', '-', '_', ':', - RE_CombiningChar.pattern, RE_Extender.pattern, - )), flags=re.UNICODE) - - -RE_NCNameChar = re.compile(u'|'.join(( - RE_Letter.pattern, RE_Digit.pattern, '.', '-', '_', # <= no column - RE_CombiningChar.pattern, RE_Extender.pattern, - )), flags=re.UNICODE) - - -class NormalizedString(Unicode): - __type_name__ = 'normalizedString' - __extends__ = Unicode - - class Attributes(Unicode.Attributes): - white_space = "replace" - - -class Token(NormalizedString): - __type_name__ = 'token' - - class Attributes(Unicode.Attributes): - white_space = "collapse" - -# https://www.w3.org/TR/2000/WD-xml-2e-20000814#NT-Name -class Name(Token): - __type_name__ = 'Name' - - class Attributes(Unicode.Attributes): - # https://www.w3.org/TR/2000/WD-xml-2e-20000814#NT-Name - pattern = '(%s)(%s)*' % ( - u'|'.join((RE_Letter.pattern, '_', ':')), - RE_NameChar.pattern - ) - - -# https://www.w3.org/TR/1999/REC-xml-names-19990114/#NT-NCName -class NCName(Name): - __type_name__ = 'NCName' - class Attributes(Unicode.Attributes): - pattern = "(%s|_)%s*" % (RE_Letter.pattern, RE_NCNameChar.pattern) - - -# https://www.w3.org/TR/1999/REC-xml-names-19990114/#NT-QName -class QName(Token): - __type_name__ = "QName" - - class Attributes(Unicode.Attributes): - """ - QName = (PrefixedName | UnprefixedName) - PrefixedName ::= Prefix ':' LocalPart - UnprefixedName ::= LocalPart - Prefix ::= NCName - LocalPart ::= NCName - - i.e. - - QName = (NCName:)?NCName - """ - pattern = "(%s:)?(%s)" % ( - NCName.Attributes.pattern, - NCName.Attributes.pattern, - ) - - -class NMToken(Unicode): - __type_name__ = 'NMTOKEN' - - class Attributes(Unicode.Attributes): - unicode_pattern = PATT_NMTOKEN - - -class ID(NCName): - __type_name__ = 'ID' - - -class Language(Token): - __type_name__ = 'language' - - class Attributes(Unicode.Attributes): - pattern = '[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*' diff --git a/libs_crutch/contrib/spyne/model/relational.py b/libs_crutch/contrib/spyne/model/relational.py deleted file mode 100644 index 4f2ab67..0000000 --- a/libs_crutch/contrib/spyne/model/relational.py +++ /dev/null @@ -1,46 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from spyne.model.complex import ComplexModel -from spyne.model.primitive import Unicode - - -class FileData(ComplexModel): - _type_info = [ - ('name', Unicode), - ('type', Unicode), - ('path', Unicode), - ] - - @property - def data(self): - return self._data - - @data.setter - def data(self, data): - self._data = data - - @property - def handle(self): - return self._handle - - @handle.setter - def handle(self, handle): - self._handle = handle - diff --git a/libs_crutch/contrib/spyne/model/table.py b/libs_crutch/contrib/spyne/model/table.py deleted file mode 100644 index 82cdfe4..0000000 --- a/libs_crutch/contrib/spyne/model/table.py +++ /dev/null @@ -1,227 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""This module is DEPRECATED. Create your own TableModel using -:func:`spyne.model.complex.TTableModel` - -Here's an example way of using the :class:`spyne.model.table.TableModel`: :: - - class User(TableModel, DeclarativeBase): - __namespace__ = 'spyne.examples.user_manager' - __tablename__ = 'spyne_user' - - user_id = Column(sqlalchemy.Integer, primary_key=True) - user_name = Column(sqlalchemy.String(256)) - first_name = Column(sqlalchemy.String(256)) - last_name = Column(sqlalchemy.String(256)) - -Defined this way, SQLAlchemy objects are regular Spyne objects that can be -used anywhere the regular Spyne types go. The definition for the `User` object -is quite similar to vanilla SQLAlchemy declarative syntax, save for two -elements: - -#. The object also bases on :class:`spyne.model.table.TableModel`, which - bridges SQLAlchemy and Spyne types. -#. It has a namespace declaration, which is just so the service looks good - on wsdl. - -The SQLAlchemy integration is far from perfect at the moment: - -* SQL constraints are not reflected to the interface document. -* It's not possible to define additional constraints for the Spyne schema. -* Object attributes defined by mechanisms other than Column and limited - uses of `relationship` (no string arguments) are not supported. - -If you need any of the above features, you need to separate the Spyne and -SQLAlchemy object definitions. - -Spyne makes it easy (to an extent) with the following syntax: :: - - class AlternativeUser(TableModel, DeclarativeBase): - __namespace__ = 'spyne.examples.user_manager' - __table__ = User.__table__ - -Here, The AlternativeUser object is automatically populated using columns from -the table definition. -""" - -import warnings -warnings.warn("%r module is deprecated. Please switch to " - "spyne.model.complex.TTableModel.\nHere's where the import " - "comes from:" % __name__) -import traceback -traceback.print_stack() - - -import logging -logger = logging.getLogger(__name__) - -import sqlalchemy - -from spyne.util.six import add_metaclass - -from sqlalchemy import Column -from sqlalchemy.orm import RelationshipProperty -from sqlalchemy.ext.declarative import DeclarativeMeta -from sqlalchemy.dialects.postgresql import UUID - -from spyne.model import primitive -from spyne.model import binary -from spyne.model import complex -from spyne.model.complex import Array -from spyne.model.complex import TypeInfo -from spyne.model.complex import ComplexModelBase -from spyne.model.complex import ComplexModelMeta - - -_type_map = { - sqlalchemy.Text: primitive.String, - sqlalchemy.String: primitive.String, - sqlalchemy.Unicode: primitive.String, - sqlalchemy.UnicodeText: primitive.String, - - sqlalchemy.Float: primitive.Float, - sqlalchemy.Numeric: primitive.Decimal, - sqlalchemy.BigInteger: primitive.Integer, - sqlalchemy.Integer: primitive.Integer, - sqlalchemy.SmallInteger: primitive.Integer, - - sqlalchemy.Binary: binary.ByteArray, - sqlalchemy.LargeBinary: binary.ByteArray, - sqlalchemy.Boolean: primitive.Boolean, - sqlalchemy.DateTime: primitive.DateTime, - sqlalchemy.Date: primitive.Date, - sqlalchemy.Time: primitive.Time, - - sqlalchemy.orm.relation: complex.Array, - - UUID: primitive.String(pattern="%(x)s{8}-" - "%(x)s{4}-" - "%(x)s{4}-" - "%(x)s{4}-" - "%(x)s{12}" % {'x': '[a-fA-F0-9]'}, - name='uuid') -} - - -def _process_item(v): - """This function maps sqlalchemy types to spyne types.""" - - rpc_type = None - if isinstance(v, Column): - if isinstance(v.type, sqlalchemy.Enum): - if v.type.convert_unicode: - rpc_type = primitive.Unicode(values=v.type.enums) - else: - rpc_type = primitive.String(values=v.type.enums) - - elif v.type in _type_map: - rpc_type = _type_map[v.type] - - elif type(v.type) in _type_map: - rpc_type = _type_map[type(v.type)] - - else: - raise Exception("soap_type was not found. maybe _type_map needs a " - "new entry. %r" % v) - - elif isinstance(v, RelationshipProperty): - v.enable_typechecks = False - # FIXME: Distinguish between *ToMany and *ToOne relationship. - # rpc_type = v.argument - rpc_type = Array(v.argument) - - return rpc_type - - -def _is_interesting(k, v): - if k.startswith('__'): - return False - - if isinstance(v, Column): - return True - - if isinstance(v, RelationshipProperty): - if getattr(v.argument, '_type_info', None) is None: - logger.warning("the argument to relationship should be a reference " - "to the real column, not a string.") - return False - - else: - return True - - -class TableModelMeta(DeclarativeMeta, ComplexModelMeta): - """This class uses the information in class definition dictionary to build - the _type_info dictionary that spyne relies on. It otherwise leaves - SQLAlchemy and its information alone. - """ - - def __new__(cls, cls_name, cls_bases, cls_dict): - if cls_dict.get("__type_name__", None) is None: - cls_dict["__type_name__"] = cls_name - - if cls_dict.get("_type_info", None) is None: - cls_dict["_type_info"] = _type_info = TypeInfo() - - def check_mixin_inheritance(bases): - for b in bases: - check_mixin_inheritance(b.__bases__) - - for k, v in vars(b).items(): - if _is_interesting(k, v): - _type_info[k] = _process_item(v) - - check_mixin_inheritance(cls_bases) - - def check_same_table_inheritance(bases): - for b in bases: - check_same_table_inheritance(b.__bases__) - - table = getattr(b, '__table__', None) - - if not (table is None): - for c in table.c: - _type_info[c.name] = _process_item(c) - - check_same_table_inheritance(cls_bases) - - # include from table - table = cls_dict.get('__table__', None) - if not (table is None): - for c in table.c: - _type_info[c.name] = _process_item(c) - - # own attributes - for k, v in cls_dict.items(): - if _is_interesting(k, v): - _type_info[k] = _process_item(v) - - return super(TableModelMeta, cls).__new__(cls, cls_name, cls_bases, cls_dict) - - -@add_metaclass(TableModelMeta) -class TableModel(ComplexModelBase): - """The main base class for complex types shared by both SQLAlchemy and - spyne. Classes that inherit from this class should also inherit from - an sqlalchemy.declarative base class. See the :ref:`manual-sqlalchemy` - section for more info. - """ - - _decl_class_registry = {} diff --git a/libs_crutch/contrib/spyne/protocol/__init__.py b/libs_crutch/contrib/spyne/protocol/__init__.py deleted file mode 100644 index a98a9b6..0000000 --- a/libs_crutch/contrib/spyne/protocol/__init__.py +++ /dev/null @@ -1,44 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.protocol`` package contains the -:class:`spyne.protocol.ProtocolBase`` abstract base class. Every protocol -implementation is a subclass of ``ProtocolBase``. -""" - -from spyne.protocol._base import ProtocolMixin -from spyne.protocol._inbase import InProtocolBase -from spyne.protocol._outbase import OutProtocolBase - - -class ProtocolBase(InProtocolBase, OutProtocolBase): - def __init__(self, app=None, validator=None, mime_type=None, - ignore_uncap=False, ignore_wrappers=False, binary_encoding=None, - string_encoding='utf8'): - - InProtocolBase.__init__(self, app=app, validator=validator, - mime_type=mime_type, ignore_wrappers=ignore_wrappers, - binary_encoding=binary_encoding) - - OutProtocolBase.__init__(self, app=app, mime_type=mime_type, - ignore_wrappers=ignore_wrappers, ignore_uncap=ignore_uncap, - binary_encoding=binary_encoding) - - self.default_string_encoding = string_encoding - self.ignore_empty_faultactor = True diff --git a/libs_crutch/contrib/spyne/protocol/_base.py b/libs_crutch/contrib/spyne/protocol/_base.py deleted file mode 100644 index e593350..0000000 --- a/libs_crutch/contrib/spyne/protocol/_base.py +++ /dev/null @@ -1,422 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import print_function - -import logging -logger = logging.getLogger(__name__) - -from datetime import datetime -from weakref import WeakKeyDictionary - -from spyne import ProtocolContext, EventManager -from spyne.const import DEFAULT_LOCALE -from spyne.model import Array -from spyne.error import ResourceNotFoundError -from spyne.util import DefaultAttrDict -from spyne.util.six import string_types - - -_MISSING = type("_MISSING", (object,), {})() - - -class ProtocolMixin(object): - mime_type = 'application/octet-stream' - - SOFT_VALIDATION = type("Soft", (object,), {}) - REQUEST = type("Request", (object,), {}) - RESPONSE = type("Response", (object,), {}) - - type = set() - """Set that contains keywords about a protocol.""" - - default_binary_encoding = None - """Default encoding for binary data. It could be e.g. base64.""" - - default_string_encoding = None - """Default encoding for text content. It could be e.g. UTF-8.""" - - type_attrs = {} - """Default customizations to be passed to underlying classes.""" - - def __init__(self, app=None, mime_type=None, ignore_wrappers=None, - binary_encoding=None, string_encoding=None): - self.__app = None - self.set_app(app) - - self.ignore_wrappers = ignore_wrappers - self.event_manager = EventManager(self) - self.binary_encoding = binary_encoding - if self.binary_encoding is None: - self.binary_encoding = self.default_binary_encoding - - self.string_encoding = string_encoding - if self.string_encoding is None: - self.string_encoding = self.default_string_encoding - - if mime_type is not None: - self.mime_type = mime_type - - self._attrcache = WeakKeyDictionary() - self._sortcache = WeakKeyDictionary() - - def _cast(self, cls_attrs, inst): - if cls_attrs.parser is not None: - return cls_attrs.parser(inst) - return inst - - _parse = _cast - - def _sanitize(self, cls_attrs, inst): - if cls_attrs.sanitizer is not None: - return cls_attrs.sanitizer(inst) - return inst - - def _datetime_from_sec(self, cls, value): - try: - return datetime.fromtimestamp(value) - except TypeError: - logger.error("Invalid value %r", value) - raise - - def _datetime_from_sec_float(self, cls, value): - try: - return datetime.fromtimestamp(value) - except TypeError: - logger.error("Invalid value %r", value) - raise - - def _datetime_from_msec(self, cls, value): - try: - return datetime.fromtimestamp(value // 1000) - except TypeError: - logger.error("Invalid value %r", value) - raise - - def _datetime_from_msec_float(self, cls, value): - try: - return datetime.fromtimestamp(value / 1000) - except TypeError: - logger.error("Invalid value %r", value) - raise - - def _datetime_from_usec(self, cls, value): - try: - return datetime.fromtimestamp(value / 1e6) - except TypeError: - logger.error("Invalid value %r", value) - raise - - def _get_datetime_format(self, cls_attrs): - # FIXME: this should be dt_format, all other aliases are to be - # deprecated - dt_format = cls_attrs.datetime_format - if dt_format is None: - dt_format = cls_attrs.dt_format - if dt_format is None: - dt_format = cls_attrs.date_format - if dt_format is None: - dt_format = cls_attrs.out_format - if dt_format is None: - dt_format = cls_attrs.format - - return dt_format - - def _get_date_format(self, cls_attrs): - date_format = cls_attrs.date_format - if date_format is None: - date_format = cls_attrs.format - - return date_format - - def _get_time_format(self, cls_attrs): - time_format = cls_attrs.time_format - if time_format is None: - time_format = cls_attrs.format - - return time_format - - @property - def app(self): - return self.__app - - @staticmethod - def strip_wrappers(cls, inst): - ti = getattr(cls, '_type_info', {}) - - while len(ti) == 1 and cls.Attributes._wrapper: - # Wrappers are auto-generated objects that have exactly one - # child type. - key, = ti.keys() - if not issubclass(cls, Array): - inst = getattr(inst, key, None) - cls, = ti.values() - ti = getattr(cls, '_type_info', {}) - - return cls, inst - - def set_app(self, value): - assert self.__app is None, "One protocol instance should belong to one " \ - "application instance. It currently belongs " \ - "to: %r" % self.__app - self.__app = value - - @staticmethod - def issubclass(sub, cls): - suborig = getattr(sub, '__orig__', None) - clsorig = getattr(cls, '__orig__', None) - return issubclass(sub if suborig is None else suborig, - cls if clsorig is None else clsorig) - - def get_cls_attrs(self, cls): - #DEBUG - #this one is getting way to spammy - #logger.debug("%r attrcache size: %d", self, len(self._attrcache)) - attr = self._attrcache.get(cls, None) - if attr is not None: - return attr - - self._attrcache[cls] = attr = DefaultAttrDict([ - (k, getattr(cls.Attributes, k)) - for k in dir(cls.Attributes) + META_ATTR - if not k.startswith('__')]) - - if cls.Attributes.prot_attrs: - cls_attrs = cls.Attributes.prot_attrs.get(self.__class__, {}) - # logger.debug("%r cls attr %r", cls, cls_attrs) - attr.update(cls_attrs) - - inst_attrs = cls.Attributes.prot_attrs.get(self, {}) - # logger.debug("%r inst attr %r", cls, cls_attrs) - attr.update(inst_attrs) - - return attr - - def get_context(self, parent, transport): - return ProtocolContext(parent, transport) - - def generate_method_contexts(self, ctx): - """Generates MethodContext instances for every callable assigned to the - given method handle. - - The first element in the returned list is always the primary method - context whereas the rest are all auxiliary method contexts. - """ - - call_handles = self.get_call_handles(ctx) - if len(call_handles) == 0: - raise ResourceNotFoundError(ctx.method_request_string) - - retval = [] - for d in call_handles: - assert d is not None - - c = ctx.copy() - c.descriptor = d - - retval.append(c) - - return retval - - def get_call_handles(self, ctx): - """Method to be overriden to perform any sort of custom method mapping - using any data in the method context. Returns a list of contexts. - Can return multiple contexts if a method_request_string matches more - than one function. (This is called the fanout mode.) - """ - - name = ctx.method_request_string - if not name.startswith(u"{"): - name = u'{%s}%s' % (self.app.interface.get_tns(), name) - - call_handles = self.app.interface.service_method_map.get(name, []) - - return call_handles - - def get_polymorphic_target(self, cls, inst): - """If the protocol is polymorphic, extract what's returned by the user - code. - """ - - if not self.polymorphic: - #DEBUG - #way too spammy - #logger.debug("PMORPH Skipped: %r is NOT polymorphic", self) - return cls, False - - orig_cls = cls.__orig__ or cls - - if inst.__class__ is orig_cls: - logger.debug("PMORPH Skipped: Instance class %r is the same as " - "designated base class", inst.__class__) - return cls, False - - if not isinstance(inst, orig_cls): - logger.debug("PMORPH Skipped: Instance class %r is not a subclass " - "of designated base class %r", inst.__class__, orig_cls) - return cls, False - - cls_attr = self.get_cls_attrs(cls) - polymap_cls = cls_attr.polymap.get(inst.__class__, None) - - if polymap_cls is not None: - logger.debug("PMORPH OK: cls switch with polymap: %r => %r", - cls, polymap_cls) - return polymap_cls, True - - else: - logger.debug("PMORPH OK: cls switch without polymap: %r => %r", - cls, inst.__class__) - return inst.__class__, True - - @staticmethod - def trc_verbose(cls, locale, default): - """Translate a class. - - :param cls: class - :param locale: locale string - :param default: default string if no translation found - :returns: translated string - """ - - if locale is None: - locale = DEFAULT_LOCALE - _log_locale = "default locale '%s'" - else: - _log_locale = "given locale '%s'" - - if cls.Attributes.translations is None: - retval = default - _log_tr = "translated to '%s' without any translations at all with" - - else: - retval = cls.Attributes.translations.get(locale, _MISSING) - if retval is _MISSING: - retval = default - _log_tr = "translated to '%s': No translation for" - else: - _log_tr = "translated to '%s' with" - - logger.debug(' '.join(("%r ", _log_tr, _log_locale)), - cls, retval, locale) - - return retval - - @staticmethod - def trc(cls, locale, default): - """Translate a class. - - :param cls: class - :param locale: locale string - :param default: default string if no translation found - :returns: translated string - """ - - if locale is None: - locale = DEFAULT_LOCALE - if cls.Attributes.translations is not None: - return cls.Attributes.translations.get(locale, default) - return default - - @staticmethod - def trd_verbose(trdict, locale, default): - """Translate from a translations dict. - - :param trdict: translation dict - :param locale: locale string - :param default: default string if no translation found - :returns: translated string - """ - - if locale is None: - locale = DEFAULT_LOCALE - _log_locale = "default locale '%s'" - else: - _log_locale = "given locale '%s'" - - if trdict is None: - retval = default - _log_tr = "translated to '%s' without any translations at all with" - - elif isinstance(trdict, string_types): - retval = trdict - _log_tr = "translated to '%s' regardless of" - - else: - retval = trdict.get(locale, _MISSING) - if retval is _MISSING: - retval = default - _log_tr = "translated to '%s': No translation for" - else: - _log_tr = "translated to '%s' with" - - logger.debug(' '.join(("%r ", _log_tr, _log_locale)), - trdict, retval, locale) - - return retval - - @staticmethod - def trd(trdict, locale, default): - """Translate from a translations dict. - - :param trdict: translation dict - :param locale: locale string - :param default: default string if no translation found - :returns: translated string - """ - - if locale is None: - locale = DEFAULT_LOCALE - if trdict is None: - return default - if isinstance(trdict, string_types): - return trdict - - return trdict.get(locale, default) - - def sort_fields(self, cls=None, items=None): - logger.debug("%r sortcache size: %d", self, len(self._sortcache)) - retval = self._sortcache.get(cls, None) - if retval is not None: - return retval - - if items is None: - items = list(cls.get_flat_type_info(cls).items()) - - indexes = {} - for k, v in items: - order = self.get_cls_attrs(v).order - if order is not None: - if order < 0: - indexes[k] = len(items) + order - else: - indexes[k] = order - - for k, v in items: - order = self.get_cls_attrs(v).order - if order is None: - indexes[k] = len(indexes) - - items.sort(key=lambda x: indexes[x[0]]) - self._sortcache[cls] = items - - return items - - -META_ATTR = ['nullable', 'default_factory'] diff --git a/libs_crutch/contrib/spyne/protocol/_inbase.py b/libs_crutch/contrib/spyne/protocol/_inbase.py deleted file mode 100644 index c124aaa..0000000 --- a/libs_crutch/contrib/spyne/protocol/_inbase.py +++ /dev/null @@ -1,716 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import print_function - -import logging -logger = logging.getLogger(__name__) - -import re -import pytz -import uuid - -from math import modf -from time import strptime, mktime -from datetime import timedelta, time, datetime, date -from decimal import Decimal as D, InvalidOperation - -from pytz import FixedOffset - -try: - from lxml import etree - from lxml import html -except ImportError: - etree = None - html = None - -from spyne.protocol._base import ProtocolMixin -from spyne.model import ModelBase, XmlAttribute, Array, Null, \ - ByteArray, File, ComplexModelBase, AnyXml, AnyHtml, Unicode, String, \ - Decimal, Double, Integer, Time, DateTime, Uuid, Date, Duration, Boolean, Any - -from spyne.error import ValidationError - -from spyne.model.binary import binary_decoding_handlers, BINARY_ENCODING_USE_DEFAULT - -from spyne.util import six -from spyne.model.enum import EnumBase -from spyne.model.primitive.datetime import TIME_PATTERN, DATE_PATTERN - -from spyne.util.cdict import cdict - - -_date_re = re.compile(DATE_PATTERN) -_time_re = re.compile(TIME_PATTERN) -_duration_re = re.compile( - r'(?P-?)' - r'P' - r'(?:(?P\d+)Y)?' - r'(?:(?P\d+)M)?' - r'(?:(?P\d+)D)?' - r'(?:T(?:(?P\d+)H)?' - r'(?:(?P\d+)M)?' - r'(?:(?P\d+(.\d+)?)S)?)?' - ) - - -class InProtocolBase(ProtocolMixin): - """This is the abstract base class for all input protocol implementations. - Child classes can implement only the required subset of the public methods. - - An output protocol must implement :func:`serialize` and - :func:`create_out_string`. - - An input protocol must implement :func:`create_in_document`, - :func:`decompose_incoming_envelope` and :func:`deserialize`. - - The ProtocolBase class supports the following events: - - * ``before_deserialize``: - Called before the deserialization operation is attempted. - - * ``after_deserialize``: - Called after the deserialization operation is finished. - - The arguments the constructor takes are as follows: - - :param app: The application this protocol belongs to. - :param mime_type: The mime_type this protocol should set for transports - that support this. This is a quick way to override the mime_type by - default instead of subclassing the releavant protocol implementation. - """ - - def __init__(self, app=None, validator=None, mime_type=None, - ignore_wrappers=False, binary_encoding=None, string_encoding=None): - - self.validator = None - - super(InProtocolBase, self).__init__(app=app, mime_type=mime_type, - ignore_wrappers=ignore_wrappers, - binary_encoding=binary_encoding, string_encoding=string_encoding) - - self.message = None - self.validator = None - self.set_validator(validator) - - if mime_type is not None: - self.mime_type = mime_type - - fsh = { - Any: self.any_from_bytes, - Null: self.null_from_bytes, - File: self.file_from_bytes, - Array: self.array_from_bytes, - Double: self.double_from_bytes, - String: self.string_from_bytes, - AnyXml: self.any_xml_from_bytes, - Boolean: self.boolean_from_bytes, - Integer: self.integer_from_bytes, - Unicode: self.unicode_from_bytes, - AnyHtml: self.any_html_from_bytes, - ByteArray: self.byte_array_from_bytes, - EnumBase: self.enum_base_from_bytes, - ModelBase: self.model_base_from_bytes, - XmlAttribute: self.xmlattribute_from_bytes, - ComplexModelBase: self.complex_model_base_from_bytes - } - - self._from_bytes_handlers = cdict(fsh) - self._from_unicode_handlers = cdict(fsh) - - self._from_bytes_handlers[Date] = self.date_from_bytes - self._from_bytes_handlers[Time] = self.time_from_bytes - self._from_bytes_handlers[Uuid] = self.uuid_from_bytes - self._from_bytes_handlers[Decimal] = self.decimal_from_bytes - self._from_bytes_handlers[DateTime] = self.datetime_from_bytes - self._from_bytes_handlers[Duration] = self.duration_from_bytes - - self._from_unicode_handlers[Date] = self.date_from_unicode - self._from_unicode_handlers[Uuid] = self.uuid_from_unicode - self._from_unicode_handlers[Time] = self.time_from_unicode - self._from_unicode_handlers[Decimal] = self.decimal_from_unicode - self._from_unicode_handlers[DateTime] = self.datetime_from_unicode - self._from_unicode_handlers[Duration] = self.duration_from_unicode - - - self._datetime_dsmap = { - None: self._datetime_from_unicode, - 'sec': self._datetime_from_sec, - 'sec_float': self._datetime_from_sec_float, - 'msec': self._datetime_from_msec, - 'msec_float': self._datetime_from_msec_float, - 'usec': self._datetime_from_usec, - } - - def _datetime_from_sec(self, cls, value): - try: - return datetime.fromtimestamp(value) - except TypeError: - logger.error("Invalid value %r", value) - raise - - def _datetime_from_sec_float(self, cls, value): - try: - return datetime.fromtimestamp(value) - except TypeError: - logger.error("Invalid value %r", value) - raise - - def _datetime_from_msec(self, cls, value): - try: - return datetime.fromtimestamp(value // 1000) - except TypeError: - logger.error("Invalid value %r", value) - raise - - def _datetime_from_msec_float(self, cls, value): - try: - return datetime.fromtimestamp(value / 1000) - except TypeError: - logger.error("Invalid value %r", value) - raise - - def _datetime_from_usec(self, cls, value): - try: - return datetime.fromtimestamp(value / 1e6) - except TypeError: - logger.error("Invalid value %r", value) - raise - - def create_in_document(self, ctx, in_string_encoding=None): - """Uses ``ctx.in_string`` to set ``ctx.in_document``.""" - - def decompose_incoming_envelope(self, ctx, message): - """Sets the ``ctx.method_request_string``, ``ctx.in_body_doc``, - ``ctx.in_header_doc`` and ``ctx.service`` properties of the ctx object, - if applicable. - """ - - def deserialize(self, ctx, message): - """Takes a MethodContext instance and a string containing ONE document - instance in the ``ctx.in_string`` attribute. - - Returns the corresponding native python object in the ctx.in_object - attribute. - """ - - def validate_document(self, payload): - """Method to be overriden to perform any sort of custom input - validation on the parsed input document. - """ - - def set_validator(self, validator): - """You must override this function if you want your protocol to support - validation.""" - - assert validator is None - - self.validator = None - - def from_bytes(self, class_, string, *args, **kwargs): - if string is None: - return None - - if isinstance(string, six.string_types) and \ - len(string) == 0 and class_.Attributes.empty_is_none: - return None - - handler = self._from_bytes_handlers[class_] - return handler(class_, string, *args, **kwargs) - - def from_unicode(self, class_, string, *args, **kwargs): - if string is None: - return None - #if not six.PY2: - # assert isinstance(string, str), \ - # "Invalid type passed to `from_unicode`: {}".format( - # (class_, type(string), string)) - - cls_attrs = self.get_cls_attrs(class_) - - if isinstance(string, six.string_types) and len(string) == 0 and \ - cls_attrs.empty_is_none: - return None - - handler = self._from_unicode_handlers[class_] - return handler(class_, string, *args, **kwargs) - - def null_from_bytes(self, cls, value): - return None - - def any_from_bytes(self, cls, value): - return value - - def any_xml_from_bytes(self, cls, string): - try: - return etree.fromstring(string) - except etree.XMLSyntaxError as e: - raise ValidationError(string, "%%r: %r" % e) - - def any_html_from_bytes(self, cls, string): - try: - return html.fromstring(string) - except etree.ParserError as e: - if e.args[0] == "Document is empty": - pass - else: - raise - - def uuid_from_unicode(self, cls, string, suggested_encoding=None): - attr = self.get_cls_attrs(cls) - ser_as = attr.serialize_as - encoding = attr.encoding - - if encoding is None: - encoding = suggested_encoding - - retval = string - - if ser_as in ('bytes', 'bytes_le'): - retval, = binary_decoding_handlers[encoding](string) - - try: - retval = _uuid_deserialize[ser_as](retval) - except ValueError as e: - raise ValidationError(e) - - return retval - - def uuid_from_bytes(self, cls, string, suggested_encoding=None, **_): - attr = self.get_cls_attrs(cls) - ser_as = attr.serialize_as - encoding = attr.encoding - - if encoding is None: - encoding = suggested_encoding - - retval = string - - if ser_as in ('bytes', 'bytes_le'): - retval, = binary_decoding_handlers[encoding](string) - elif isinstance(string, six.binary_type): - retval = string.decode('ascii') - - try: - retval = _uuid_deserialize[ser_as](retval) - except ValueError as e: - raise ValidationError(e) - - return retval - - def unicode_from_bytes(self, cls, value): - retval = value - - if isinstance(value, six.binary_type): - cls_attrs = self.get_cls_attrs(cls) - - if cls_attrs.encoding is not None: - retval = six.text_type(value, cls_attrs.encoding, - errors=cls_attrs.unicode_errors) - - elif self.string_encoding is not None: - retval = six.text_type(value, self.string_encoding, - errors=cls_attrs.unicode_errors) - - else: - retval = six.text_type(value, errors=cls_attrs.unicode_errors) - - return retval - - def string_from_bytes(self, cls, value): - retval = value - cls_attrs = self.get_cls_attrs(cls) - if isinstance(value, six.text_type): - if cls_attrs.encoding is None: - raise Exception("You need to define a source encoding for " - "decoding incoming unicode values.") - else: - retval = value.encode(cls_attrs.encoding) - - return retval - - def decimal_from_unicode(self, cls, string): - cls_attrs = self.get_cls_attrs(cls) - if cls_attrs.max_str_len is not None and len(string) > \ - cls_attrs.max_str_len: - raise ValidationError(string, "Decimal %%r longer than %d " - "characters" % cls_attrs.max_str_len) - - try: - return D(string) - except InvalidOperation as e: - raise ValidationError(string, "%%r: %r" % e) - - def decimal_from_bytes(self, cls, string): - return self.decimal_from_unicode(cls, - string.decode(self.default_string_encoding)) - - def double_from_bytes(self, cls, string): - try: - return float(string) - except (TypeError, ValueError) as e: - raise ValidationError(string, "%%r: %r" % e) - - def integer_from_bytes(self, cls, string): - cls_attrs = self.get_cls_attrs(cls) - - if isinstance(string, (six.text_type, six.binary_type)) and \ - cls_attrs.max_str_len is not None and \ - len(string) > cls_attrs.max_str_len: - raise ValidationError(string, - "Integer %%r longer than %d characters" - % cls_attrs.max_str_len) - - try: - return int(string) - except ValueError: - raise ValidationError(string, "Could not cast %r to integer") - - def time_from_unicode(self, cls, string): - """Expects ISO formatted times.""" - - match = _time_re.match(string) - if match is None: - raise ValidationError(string, "%%r does not match regex %r " % - _time_re.pattern) - - fields = match.groupdict(0) - microsec = fields.get('sec_frac') - if microsec is None or microsec == 0: - microsec = 0 - else: - microsec = min(999999, int(round(float(microsec) * 1e6))) - - return time(int(fields['hr']), int(fields['min']), - int(fields['sec']), microsec) - - def time_from_bytes(self, cls, string): - if isinstance(string, six.binary_type): - string = string.decode(self.default_string_encoding) - - return self.time_from_unicode(cls, string) - - def date_from_unicode_iso(self, cls, string): - """This is used by protocols like SOAP who need ISO8601-formatted dates - no matter what. - """ - - try: - return date(*(strptime(string, u'%Y-%m-%d')[0:3])) - - except ValueError: - match = cls._offset_re.match(string) - - if match: - year = int(match.group('year')) - month = int(match.group('month')) - day = int(match.group('day')) - - return date(year, month, day) - - raise ValidationError(string) - - def enum_base_from_bytes(self, cls, value): - if self.validator is self.SOFT_VALIDATION and not ( - cls.validate_string(cls, value)): - raise ValidationError(value) - return getattr(cls, value) - - def model_base_from_bytes(self, cls, value): - return cls.from_bytes(value) - - def datetime_from_unicode_iso(self, cls, string): - astz = self.get_cls_attrs(cls).as_timezone - - match = cls._utc_re.match(string) - if match: - tz = pytz.utc - retval = _parse_datetime_iso_match(match, tz=tz) - if astz is not None: - retval = retval.astimezone(astz) - return retval - - if match is None: - match = cls._offset_re.match(string) - if match: - tz_hr, tz_min = [int(match.group(x)) - for x in ("tz_hr", "tz_min")] - tz = FixedOffset(tz_hr * 60 + tz_min, {}) - retval = _parse_datetime_iso_match(match, tz=tz) - if astz is not None: - retval = retval.astimezone(astz) - return retval - - if match is None: - match = cls._local_re.match(string) - if match: - retval = _parse_datetime_iso_match(match) - if astz: - retval = retval.replace(tzinfo=astz) - return retval - - raise ValidationError(string) - - def datetime_from_unicode(self, cls, string): - serialize_as = self.get_cls_attrs(cls).serialize_as - return self._datetime_dsmap[serialize_as](cls, string) - - def datetime_from_bytes(self, cls, string): - if isinstance(string, six.binary_type): - string = string.decode(self.default_string_encoding) - - serialize_as = self.get_cls_attrs(cls).serialize_as - return self._datetime_dsmap[serialize_as](cls, string) - - def date_from_bytes(self, cls, string): - if isinstance(string, six.binary_type): - string = string.decode(self.default_string_encoding) - - date_format = self._get_date_format(self.get_cls_attrs(cls)) - try: - if date_format is not None: - dt = datetime.strptime(string, date_format) - return date(dt.year, dt.month, dt.day) - - return self.date_from_unicode_iso(cls, string) - - except ValueError as e: - match = cls._offset_re.match(string) - if match: - return date(int(match.group('year')), - int(match.group('month')), int(match.group('day'))) - else: - raise ValidationError(string, - "%%r: %s" % repr(e).replace("%", "%%")) - - def date_from_unicode(self, cls, string): - date_format = self._get_date_format(self.get_cls_attrs(cls)) - try: - if date_format is not None: - dt = datetime.strptime(string, date_format) - return date(dt.year, dt.month, dt.day) - - return self.date_from_unicode_iso(cls, string) - - except ValueError as e: - match = cls._offset_re.match(string) - if match: - return date(int(match.group('year')), - int(match.group('month')), int(match.group('day'))) - else: - # the message from ValueError is quite nice already - raise ValidationError(e.message, "%s") - - def duration_from_unicode(self, cls, string): - duration = _duration_re.match(string).groupdict(0) - if duration is None: - raise ValidationError(string, - "Time data '%%s' does not match regex '%s'" % - (_duration_re.pattern,)) - - days = int(duration['days']) - days += int(duration['months']) * 30 - days += int(duration['years']) * 365 - hours = int(duration['hours']) - minutes = int(duration['minutes']) - seconds = float(duration['seconds']) - f, i = modf(seconds) - seconds = i - microseconds = int(1e6 * f) - - delta = timedelta(days=days, hours=hours, minutes=minutes, - seconds=seconds, microseconds=microseconds) - - if duration['sign'] == "-": - delta *= -1 - - return delta - - def duration_from_bytes(self, cls, string): - if isinstance(string, six.binary_type): - string = string.decode(self.default_string_encoding) - - return self.duration_from_unicode(cls, string) - - def boolean_from_bytes(self, cls, string): - return string.lower() in ('true', '1') - - def byte_array_from_bytes(self, cls, value, suggested_encoding=None): - encoding = self.get_cls_attrs(cls).encoding - if encoding is BINARY_ENCODING_USE_DEFAULT: - encoding = suggested_encoding - return binary_decoding_handlers[encoding](value) - - def file_from_bytes(self, cls, value, suggested_encoding=None): - encoding = self.get_cls_attrs(cls).encoding - if encoding is BINARY_ENCODING_USE_DEFAULT: - encoding = suggested_encoding - - return File.Value(data=binary_decoding_handlers[encoding](value)) - - def complex_model_base_from_bytes(self, cls, string, **_): - raise TypeError("Only primitives can be deserialized from string.") - - def array_from_bytes(self, cls, string, **_): - if self.get_cls_attrs(cls).serialize_as != 'sd-list': - raise TypeError("Only primitives can be deserialized from string.") - - # sd-list being space-delimited list. - retval = [] - inner_type, = cls._type_info.values() - for s in string.split(): - retval.append(self.from_bytes(inner_type, s)) - - return retval - - def xmlattribute_from_bytes(self, cls, value): - return self.from_bytes(cls.type, value) - - def _datetime_from_unicode(self, cls, string): - cls_attrs = self.get_cls_attrs(cls) - - # get parser - parser = cls_attrs.parser - - # get date_format - dt_format = cls_attrs.dt_format - if dt_format is None: - dt_format = cls_attrs.date_format - if dt_format is None: - dt_format = cls_attrs.out_format - if dt_format is None: - dt_format = cls_attrs.format - - # parse the string - if parser is not None: - retval = parser(self, cls, string) - - elif dt_format is not None: - if six.PY2: - # FIXME: perhaps it should encode to string's encoding instead - # of utf8 all the time - if isinstance(dt_format, six.text_type): - dt_format = dt_format.encode('utf8') - if isinstance(string, six.text_type): - string = string.encode('utf8') - - retval = datetime.strptime(string, dt_format) - - astz = cls_attrs.as_timezone - if astz: - retval = retval.astimezone(cls_attrs.as_time_zone) - - else: - retval = self.datetime_from_unicode_iso(cls, string) - - return retval - - -_uuid_deserialize = { - None: lambda s: uuid.UUID(s), - 'hex': lambda s: uuid.UUID(hex=s), - 'urn': lambda s: uuid.UUID(hex=s), - 'bytes': lambda s: uuid.UUID(bytes=s), - 'bytes_le': lambda s: uuid.UUID(bytes_le=s), - 'fields': lambda s: uuid.UUID(fields=s), - 'int': lambda s: uuid.UUID(int=s), - ('int', int): lambda s: uuid.UUID(int=s), - ('int', str): lambda s: uuid.UUID(int=int(s)), -} - -if six.PY2: - _uuid_deserialize[('int', long)] = _uuid_deserialize[('int', int)] - - -def _parse_datetime_iso_match(date_match, tz=None): - fields = date_match.groupdict() - - year = int(fields.get('year')) - month = int(fields.get('month')) - day = int(fields.get('day')) - hour = int(fields.get('hr')) - minute = int(fields.get('min')) - second = int(fields.get('sec')) - usecond = fields.get("sec_frac") - if usecond is None: - usecond = 0 - else: - # we only get the most significant 6 digits because that's what - # datetime can handle. - usecond = min(999999, int(round(float(usecond) * 1e6))) - - return datetime(year, month, day, hour, minute, second, usecond, tz) - - -_dt_sec = lambda cls, val: \ - int(mktime(val.timetuple())) -_dt_sec_float = lambda cls, val: \ - mktime(val.timetuple()) + (val.microsecond / 1e6) - -_dt_msec = lambda cls, val: \ - int(mktime(val.timetuple())) * 1000 + (val.microsecond // 1000) -_dt_msec_float = lambda cls, val: \ - mktime(val.timetuple()) * 1000 + (val.microsecond / 1000.0) - -_dt_usec = lambda cls, val: \ - int(mktime(val.timetuple())) * 1000000 + val.microsecond - -_datetime_smap = { - 'sec': _dt_sec, - 'secs': _dt_sec, - 'second': _dt_sec, - 'seconds': _dt_sec, - - 'sec_float': _dt_sec_float, - 'secs_float': _dt_sec_float, - 'second_float': _dt_sec_float, - 'seconds_float': _dt_sec_float, - - 'msec': _dt_msec, - 'msecs': _dt_msec, - 'msecond': _dt_msec, - 'mseconds': _dt_msec, - 'millisecond': _dt_msec, - 'milliseconds': _dt_msec, - - 'msec_float': _dt_msec_float, - 'msecs_float': _dt_msec_float, - 'msecond_float': _dt_msec_float, - 'mseconds_float': _dt_msec_float, - 'millisecond_float': _dt_msec_float, - 'milliseconds_float': _dt_msec_float, - - 'usec': _dt_usec, - 'usecs': _dt_usec, - 'usecond': _dt_usec, - 'useconds': _dt_usec, - 'microsecond': _dt_usec, - 'microseconds': _dt_usec, -} - - -def _file_to_iter(f): - try: - data = f.read(65536) - while len(data) > 0: - yield data - data = f.read(65536) - - finally: - f.close() diff --git a/libs_crutch/contrib/spyne/protocol/_outbase.py b/libs_crutch/contrib/spyne/protocol/_outbase.py deleted file mode 100644 index f3ab858..0000000 --- a/libs_crutch/contrib/spyne/protocol/_outbase.py +++ /dev/null @@ -1,904 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import print_function, unicode_literals - -import logging -logger = logging.getLogger(__name__) - -import re -import uuid -import errno - -from os.path import isabs, join, abspath -from collections import deque -from datetime import datetime -from decimal import Decimal as D -from mmap import mmap, ACCESS_READ -from time import mktime, strftime - -try: - from lxml import etree - from lxml import html -except ImportError: - etree = None - html = None - -from spyne.protocol._base import ProtocolMixin -from spyne.model import ModelBase, XmlAttribute, SimpleModel, Null, \ - ByteArray, File, ComplexModelBase, AnyXml, AnyHtml, Unicode, Decimal, \ - Double, Integer, Time, DateTime, Uuid, Duration, Boolean, AnyDict, \ - AnyUri, PushBase, Date -from spyne.model.relational import FileData - -from spyne.const.http import HTTP_400, HTTP_401, HTTP_404, HTTP_405, HTTP_413, \ - HTTP_500 -from spyne.error import Fault, InternalError, ResourceNotFoundError, \ - RequestTooLongError, RequestNotAllowed, InvalidCredentialsError -from spyne.model.binary import binary_encoding_handlers, \ - BINARY_ENCODING_USE_DEFAULT - -from spyne.util import six -from spyne.util.cdict import cdict - - -class OutProtocolBase(ProtocolMixin): - """This is the abstract base class for all out protocol implementations. - Child classes can implement only the required subset of the public methods. - - An output protocol must implement :func:`serialize` and - :func:`create_out_string`. - - The OutProtocolBase class supports the following events: - - * ``before_serialize``: - Called before after the serialization operation is attempted. - - * ``after_serialize``: - Called after the serialization operation is finished. - - The arguments the constructor takes are as follows: - - :param app: The application this protocol belongs to. - :param mime_type: The mime_type this protocol should set for transports - that support this. This is a quick way to override the mime_type by - default instead of subclassing the releavant protocol implementation. - :param ignore_uncap: Silently ignore cases when the protocol is not capable - of serializing return values instead of raising a TypeError. - """ - - def __init__(self, app=None, mime_type=None, ignore_uncap=False, - ignore_wrappers=False, binary_encoding=None, string_encoding=None): - - super(OutProtocolBase, self).__init__(app=app, mime_type=mime_type, - ignore_wrappers=ignore_wrappers, - binary_encoding=binary_encoding, string_encoding=string_encoding) - - self.ignore_uncap = ignore_uncap - self.message = None - - if mime_type is not None: - self.mime_type = mime_type - - self._to_bytes_handlers = cdict({ - ModelBase: self.model_base_to_bytes, - File: self.file_to_bytes, - Time: self.time_to_bytes, - Uuid: self.uuid_to_bytes, - Null: self.null_to_bytes, - Date: self.date_to_bytes, - Double: self.double_to_bytes, - AnyXml: self.any_xml_to_bytes, - Unicode: self.unicode_to_bytes, - Boolean: self.boolean_to_bytes, - Decimal: self.decimal_to_bytes, - Integer: self.integer_to_bytes, - AnyHtml: self.any_html_to_bytes, - DateTime: self.datetime_to_bytes, - Duration: self.duration_to_bytes, - ByteArray: self.byte_array_to_bytes, - XmlAttribute: self.xmlattribute_to_bytes, - ComplexModelBase: self.complex_model_base_to_bytes, - }) - - self._to_unicode_handlers = cdict({ - ModelBase: self.model_base_to_unicode, - File: self.file_to_unicode, - Time: self.time_to_unicode, - Date: self.date_to_unicode, - Uuid: self.uuid_to_unicode, - Null: self.null_to_unicode, - Double: self.double_to_unicode, - AnyXml: self.any_xml_to_unicode, - AnyUri: self.any_uri_to_unicode, - AnyDict: self.any_dict_to_unicode, - AnyHtml: self.any_html_to_unicode, - Unicode: self.unicode_to_unicode, - Boolean: self.boolean_to_unicode, - Decimal: self.decimal_to_unicode, - Integer: self.integer_to_unicode, - # FIXME: Would we need a to_unicode for localized dates? - DateTime: self.datetime_to_unicode, - Duration: self.duration_to_unicode, - ByteArray: self.byte_array_to_unicode, - XmlAttribute: self.xmlattribute_to_unicode, - ComplexModelBase: self.complex_model_base_to_unicode, - }) - - self._to_bytes_iterable_handlers = cdict({ - File: self.file_to_bytes_iterable, - ByteArray: self.byte_array_to_bytes_iterable, - ModelBase: self.model_base_to_bytes_iterable, - SimpleModel: self.simple_model_to_bytes_iterable, - ComplexModelBase: self.complex_model_to_bytes_iterable, - }) - - - def serialize(self, ctx, message): - """Serializes ``ctx.out_object``. - - If ctx.out_stream is not None, ``ctx.out_document`` and - ``ctx.out_string`` are skipped and the response is written directly to - ``ctx.out_stream``. - - :param ctx: :class:`MethodContext` instance. - :param message: One of ``(ProtocolBase.REQUEST, ProtocolBase.RESPONSE)``. - """ - - def create_out_string(self, ctx, out_string_encoding=None): - """Uses ctx.out_document to set ctx.out_string""" - - def fault_to_http_response_code(self, fault): - """Special function to convert native Python exceptions to Http response - codes. - """ - - if isinstance(fault, RequestTooLongError): - return HTTP_413 - - if isinstance(fault, ResourceNotFoundError): - return HTTP_404 - - if isinstance(fault, RequestNotAllowed): - return HTTP_405 - - if isinstance(fault, InvalidCredentialsError): - return HTTP_401 - - if isinstance(fault, Fault) and (fault.faultcode.startswith('Client.') - or fault.faultcode == 'Client'): - return HTTP_400 - - return HTTP_500 - - def set_validator(self, validator): - """You must override this function if you want your protocol to support - validation.""" - - assert validator is None - - self.validator = None - - def to_bytes(self, cls, value, *args, **kwargs): - if value is None: - return None - - handler = self._to_bytes_handlers[cls] - retval = handler(cls, value, *args, **kwargs) - - # enable this only for testing. we're not as strict for performance - # reasons - # assert isinstance(retval, six.binary_type), \ - # "AssertionError: %r %r %r handler: %r" % \ - # (type(retval), six.binary_type, retval, handler) - return retval - - def to_unicode(self, cls, value, *args, **kwargs): - if value is None: - return None - - handler = self._to_unicode_handlers[cls] - retval = handler(cls, value, *args, **kwargs) - # enable this only for testing. we're not as strict for performance - # reasons as well as not to take the joy of dealing with duck typing - # from the user - # assert isinstance(retval, six.text_type), \ - # "AssertionError: %r %r handler: %r" % \ - # (type(retval), retval, handler) - - return retval - - def to_bytes_iterable(self, cls, value): - if value is None: - return [] - - if isinstance(value, PushBase): - return value - - handler = self._to_bytes_iterable_handlers[cls] - return handler(cls, value) - - def null_to_bytes(self, cls, value, **_): - return b"" - - def null_to_unicode(self, cls, value, **_): - return u"" - - def any_xml_to_bytes(self, cls, value, **_): - return etree.tostring(value) - - def any_xml_to_unicode(self, cls, value, **_): - return etree.tostring(value, encoding='unicode') - - def any_dict_to_unicode(self, cls, value, **_): - return repr(value) - - def any_html_to_bytes(self, cls, value, **_): - return html.tostring(value) - - def any_html_to_unicode(self, cls, value, **_): - return html.tostring(value, encoding='unicode') - - def uuid_to_bytes(self, cls, value, suggested_encoding=None, **_): - ser_as = self.get_cls_attrs(cls).serialize_as - retval = self.uuid_to_unicode(cls, value, - suggested_encoding=suggested_encoding, **_) - - if ser_as in ('bytes', 'bytes_le', 'fields', 'int', six.binary_type): - return retval - - return retval.encode('ascii') - - def uuid_to_unicode(self, cls, value, suggested_encoding=None, **_): - attr = self.get_cls_attrs(cls) - ser_as = attr.serialize_as - encoding = attr.encoding - - if encoding is None: - encoding = suggested_encoding - - retval = _uuid_serialize[ser_as](value) - if ser_as in ('bytes', 'bytes_le'): - retval = binary_encoding_handlers[encoding]((retval,)) - return retval - - def unicode_to_bytes(self, cls, value, **_): - retval = value - - cls_attrs = self.get_cls_attrs(cls) - - if isinstance(value, six.text_type): - if cls_attrs.encoding is not None: - retval = value.encode(cls_attrs.encoding) - elif self.default_string_encoding is not None: - retval = value.encode(self.default_string_encoding) - elif not six.PY2: - logger.warning("You need to set either an encoding for %r " - "or a default_string_encoding for %r", cls, self) - - if cls_attrs.str_format is not None: - return cls_attrs.str_format.format(value) - elif cls_attrs.format is not None: - return cls_attrs.format % retval - - return retval - - def any_uri_to_unicode(self, cls, value, **_): - return self.unicode_to_unicode(cls, value, **_) - - def unicode_to_unicode(self, cls, value, **_): # :))) - cls_attrs = self.get_cls_attrs(cls) - - retval = value - #retval = str(value) - #DEBUG - #print("SPYNE DEBUG2") - - #print(retval) - #print(type(retval)) - #print(cls) - if isinstance(value, six.binary_type): - if cls_attrs.encoding is not None: - retval = value.decode(cls_attrs.encoding) - - if self.default_string_encoding is not None: - retval = value.decode(self.default_string_encoding) - - elif not six.PY2: - logger.warning("You need to set either an encoding for %r " - "or a default_string_encoding for %r", cls, self) - - if cls_attrs.str_format is not None: - return cls_attrs.str_format.format(value) - elif cls_attrs.format is not None: - return cls_attrs.format % retval - - return retval - - def decimal_to_bytes(self, cls, value, **_): - return self.decimal_to_unicode(cls, value, **_).encode('utf8') - - def decimal_to_unicode(self, cls, value, **_): - D(value) # sanity check - cls_attrs = self.get_cls_attrs(cls) - - if cls_attrs.str_format is not None: - return cls_attrs.str_format.format(value) - elif cls_attrs.format is not None: - return cls_attrs.format % value - - return str(value) - - def double_to_bytes(self, cls, value, **_): - return self.double_to_unicode(cls, value, **_).encode('utf8') - - def double_to_unicode(self, cls, value, **_): - float(value) # sanity check - cls_attrs = self.get_cls_attrs(cls) - - if cls_attrs.str_format is not None: - return cls_attrs.str_format.format(value) - elif cls_attrs.format is not None: - return cls_attrs.format % value - - return repr(value) - - def integer_to_bytes(self, cls, value, **_): - return self.integer_to_unicode(cls, value, **_).encode('utf8') - - def integer_to_unicode(self, cls, value, **_): - int(value) # sanity check - cls_attrs = self.get_cls_attrs(cls) - - if cls_attrs.str_format is not None: - return cls_attrs.str_format.format(value) - elif cls_attrs.format is not None: - return cls_attrs.format % value - - return str(value) - - def time_to_bytes(self, cls, value, **kwargs): - return self.time_to_unicode(cls, value, **kwargs) - - def time_to_unicode(self, cls, value, **_): - """Returns ISO formatted times.""" - if isinstance(value, datetime): - value = value.time() - return value.isoformat() - - def date_to_bytes(self, cls, val, **_): - return self.date_to_unicode(cls, val, **_).encode("utf8") - - def date_to_unicode(self, cls, val, **_): - if isinstance(val, datetime): - val = val.date() - - sa = self.get_cls_attrs(cls).serialize_as - - if sa is None or sa in (str, 'str'): - return self._date_to_bytes(cls, val) - - return _datetime_smap[sa](cls, val) - - def datetime_to_bytes(self, cls, val, **_): - retval = self.datetime_to_unicode(cls, val, **_) - sa = self.get_cls_attrs(cls).serialize_as - if sa is None or sa in (six.text_type, str, 'str'): - return retval.encode('ascii') - return retval - - def datetime_to_unicode(self, cls, val, **_): - sa = self.get_cls_attrs(cls).serialize_as - - if sa is None or sa in (six.text_type, str, 'str'): - return self._datetime_to_unicode(cls, val) - - return _datetime_smap[sa](cls, val) - - def duration_to_bytes(self, cls, value, **_): - return self.duration_to_unicode(cls, value, **_).encode("utf8") - - def duration_to_unicode(self, cls, value, **_): - if value.days < 0: - value = -value - negative = True - else: - negative = False - - tot_sec = int(value.total_seconds()) - seconds = value.seconds % 60 - minutes = value.seconds // 60 - hours = minutes // 60 - minutes %= 60 - seconds = float(seconds) - useconds = value.microseconds - - retval = deque() - if negative: - retval.append("-P") - else: - retval.append("P") - if value.days != 0: - retval.append("%iD" % value.days) - - if tot_sec != 0 and tot_sec % 86400 == 0 and useconds == 0: - return ''.join(retval) - - retval.append('T') - - if hours > 0: - retval.append("%iH" % hours) - - if minutes > 0: - retval.append("%iM" % minutes) - - if seconds > 0 or useconds > 0: - retval.append("%i" % seconds) - if useconds > 0: - retval.append(".%i" % useconds) - retval.append("S") - - if len(retval) == 2: - retval.append('0S') - - return ''.join(retval) - - def boolean_to_bytes(self, cls, value, **_): - return str(bool(value)).lower().encode('ascii') - - def boolean_to_unicode(self, cls, value, **_): - return str(bool(value)).lower() - - def byte_array_to_bytes(self, cls, value, suggested_encoding=None, **_): - cls_attrs = self.get_cls_attrs(cls) - - encoding = cls_attrs.encoding - if encoding is BINARY_ENCODING_USE_DEFAULT: - if suggested_encoding is None: - encoding = self.binary_encoding - else: - encoding = suggested_encoding - - if encoding is None and isinstance(value, (list, tuple)) \ - and len(value) == 1 and isinstance(value[0], mmap): - return value[0] - - encoder = binary_encoding_handlers[encoding] - logger.debug("Using binary encoder %r for encoding %r", - encoder, encoding) - retval = encoder(value) - if encoding is not None and isinstance(retval, six.text_type): - retval = retval.encode('ascii') - - return retval - - def byte_array_to_unicode(self, cls, value, suggested_encoding=None, **_): - encoding = self.get_cls_attrs(cls).encoding - if encoding is BINARY_ENCODING_USE_DEFAULT: - if suggested_encoding is None: - encoding = self.binary_encoding - else: - encoding = suggested_encoding - - if encoding is None: - raise ValueError("Arbitrary binary data can't be serialized to " - "unicode") - - retval = binary_encoding_handlers[encoding](value) - if not isinstance(retval, six.text_type): - retval = retval.decode('ascii') - - return retval - - def byte_array_to_bytes_iterable(self, cls, value, **_): - return value - - def file_to_bytes(self, cls, value, suggested_encoding=None): - """ - :param cls: A :class:`spyne.model.File` subclass - :param value: Either a sequence of byte chunks or a - :class:`spyne.model.File.Value` instance. - """ - - encoding = self.get_cls_attrs(cls).encoding - if encoding is BINARY_ENCODING_USE_DEFAULT: - if suggested_encoding is None: - encoding = self.binary_encoding - else: - encoding = suggested_encoding - - if isinstance(value, File.Value): - if value.data is not None: - return binary_encoding_handlers[encoding](value.data) - - if value.handle is not None: - # maybe we should have used the sweeping except: here. - if hasattr(value.handle, 'fileno'): - if six.PY2: - fileno = value.handle.fileno() - data = (mmap(fileno, 0, access=ACCESS_READ),) - else: - import io - try: - fileno = value.handle.fileno() - data = mmap(fileno, 0, access=ACCESS_READ) - except io.UnsupportedOperation: - data = (value.handle.read(),) - else: - data = (value.handle.read(),) - - return binary_encoding_handlers[encoding](data) - - if value.path is not None: - handle = open(value.path, 'rb') - fileno = handle.fileno() - data = mmap(fileno, 0, access=ACCESS_READ) - - return binary_encoding_handlers[encoding](data) - - assert False, "Unhandled file type" - - if isinstance(value, FileData): - try: - return binary_encoding_handlers[encoding](value.data) - except Exception as e: - logger.error("Error encoding value to binary. Error: %r, Value: %r", - e, value) - raise - - try: - return binary_encoding_handlers[encoding](value) - except Exception as e: - logger.error("Error encoding value to binary. Error: %r, Value: %r", - e, value) - raise - - def file_to_unicode(self, cls, value, suggested_encoding=None): - """ - :param cls: A :class:`spyne.model.File` subclass - :param value: Either a sequence of byte chunks or a - :class:`spyne.model.File.Value` instance. - """ - - cls_attrs = self.get_cls_attrs(cls) - encoding = cls_attrs.encoding - if encoding is BINARY_ENCODING_USE_DEFAULT: - encoding = suggested_encoding - - if encoding is None and cls_attrs.mode is File.TEXT: - raise ValueError("Arbitrary binary data can't be serialized to " - "unicode.") - - retval = self.file_to_bytes(cls, value, suggested_encoding) - if not isinstance(retval, six.text_type): - retval = retval.decode('ascii') - return retval - - def file_to_bytes_iterable(self, cls, value, **_): - if value.data is not None: - if isinstance(value.data, (list, tuple)) and \ - isinstance(value.data[0], mmap): - return _file_to_iter(value.data[0]) - return iter(value.data) - - if value.handle is not None: - f = value.handle - f.seek(0) - return _file_to_iter(f) - - assert value.path is not None, "You need to write data to " \ - "persistent storage first if you want to read it back." - - try: - path = value.path - if not isabs(value.path): - path = join(value.store, value.path) - assert abspath(path).startswith(value.store), \ - "No relative paths are allowed" - return _file_to_iter(open(path, 'rb')) - - except IOError as e: - if e.errno == errno.ENOENT: - raise ResourceNotFoundError(value.path) - else: - raise InternalError("Error accessing requested file") - - def simple_model_to_bytes_iterable(self, cls, value, **kwargs): - retval = self.to_bytes(cls, value, **kwargs) - if retval is None: - return (b'',) - return (retval,) - - def complex_model_to_bytes_iterable(self, cls, value, **_): - if self.ignore_uncap: - return tuple() - raise TypeError("This protocol can only serialize primitives.") - - def complex_model_base_to_bytes(self, cls, value, **_): - raise TypeError("Only primitives can be serialized to string.") - - def complex_model_base_to_unicode(self, cls, value, **_): - raise TypeError("Only primitives can be serialized to string.") - - def xmlattribute_to_bytes(self, cls, string, **kwargs): - return self.to_bytes(cls.type, string, **kwargs) - - def xmlattribute_to_unicode(self, cls, string, **kwargs): - return self.to_unicode(cls.type, string, **kwargs) - - def model_base_to_bytes_iterable(self, cls, value, **kwargs): - return cls.to_bytes_iterable(value, **kwargs) - - def model_base_to_bytes(self, cls, value, **kwargs): - return cls.to_bytes(value, **kwargs) - - def model_base_to_unicode(self, cls, value, **kwargs): - return cls.to_unicode(value, **kwargs) - - def _datetime_to_unicode(self, cls, value, **_): - """Returns ISO formatted datetimes.""" - - cls_attrs = self.get_cls_attrs(cls) - - if cls_attrs.as_timezone is not None and value.tzinfo is not None: - value = value.astimezone(cls_attrs.as_timezone) - - if not cls_attrs.timezone: - value = value.replace(tzinfo=None) - - dt_format = self._get_datetime_format(cls_attrs) - - if dt_format is None: - retval = value.isoformat() - - elif six.PY2 and isinstance(dt_format, unicode): - retval = self.strftime(value, dt_format.encode('utf8')).decode('utf8') - - else: - retval = self.strftime(value, dt_format) - - # FIXME: must deprecate string_format, this should have been str_format - str_format = cls_attrs.string_format - if str_format is None: - str_format = cls_attrs.str_format - if str_format is not None: - return str_format.format(value) - - # FIXME: must deprecate interp_format, this should have been just format - interp_format = cls_attrs.interp_format - if interp_format is not None: - return interp_format.format(value) - - return retval - - def _date_to_bytes(self, cls, value, **_): - cls_attrs = self.get_cls_attrs(cls) - - date_format = cls_attrs.date_format - if date_format is None: - retval = value.isoformat() - - elif six.PY2 and isinstance(date_format, unicode): - date_format = date_format.encode('utf8') - retval = self.strftime(value, date_format).decode('utf8') - - else: - retval = self.strftime(value, date_format) - - str_format = cls_attrs.str_format - if str_format is not None: - return str_format.format(value) - - format = cls_attrs.format - if format is not None: - return format.format(value) - - return retval - - # Format a datetime through its full proleptic Gregorian date range. - # http://code.activestate.com/recipes/ - # 306860-proleptic-gregorian-dates-and-strftime-before-1900/ - # http://stackoverflow.com/a/32206673 - # - # >>> strftime(datetime.date(1850, 8, 2), "%Y/%M/%d was a %A") - # '1850/00/02 was a Friday' - # >>> - - - # remove the unsupposed "%s" command. But don't - # do it if there's an even number of %s before the s - # because those are all escaped. Can't simply - # remove the s because the result of - # %sY - # should be %Y if %s isn't supported, not the - # 4 digit year. - _illegal_s = re.compile(r"((^|[^%])(%%)*%s)") - - @staticmethod - def _findall_datetime(text, substr): - # Also finds overlaps - sites = [] - i = 0 - while 1: - j = text.find(substr, i) - if j == -1: - break - sites.append(j) - i=j+1 - return sites - - # Every 28 years the calendar repeats, except through century leap - # years where it's 6 years. But only if you're using the Gregorian - # calendar. ;) - - @classmethod - def strftime(cls, dt, fmt): - if cls._illegal_s.search(fmt): - raise TypeError("This strftime implementation does not handle %s") - if dt.year > 1900: - return dt.strftime(fmt) - - year = dt.year - # For every non-leap year century, advance by - # 6 years to get into the 28-year repeat cycle - delta = 2000 - year - off = 6*(delta // 100 + delta // 400) - year += off - - # Move to around the year 2000 - year += ((2000 - year) // 28) * 28 - timetuple = dt.timetuple() - s1 = strftime(fmt, (year,) + timetuple[1:]) - sites1 = cls._findall_datetime(s1, str(year)) - - s2 = strftime(fmt, (year+28,) + timetuple[1:]) - sites2 = cls._findall_datetime(s2, str(year+28)) - - sites = [] - for site in sites1: - if site in sites2: - sites.append(site) - - s = s1 - syear = "%4d" % (dt.year,) - for site in sites: - s = s[:site] + syear + s[site+4:] - return s - - -_uuid_serialize = { - None: str, - str: str, - 'str': str, - - 'hex': lambda u: u.hex, - 'urn': lambda u: u.urn, - 'bytes': lambda u: u.bytes, - 'bytes_le': lambda u: u.bytes_le, - 'fields': lambda u: u.fields, - - int: lambda u: u.int, - 'int': lambda u: u.int, -} - -_uuid_deserialize = { - None: uuid.UUID, - str: uuid.UUID, - 'str': uuid.UUID, - - 'hex': lambda s: uuid.UUID(hex=s), - 'urn': lambda s: uuid.UUID(hex=s), - 'bytes': lambda s: uuid.UUID(bytes=s), - 'bytes_le': lambda s: uuid.UUID(bytes_le=s), - 'fields': lambda s: uuid.UUID(fields=s), - - int: lambda s: uuid.UUID(int=s), - 'int': lambda s: uuid.UUID(int=s), - - (int, int): lambda s: uuid.UUID(int=s), - ('int', int): lambda s: uuid.UUID(int=s), - - (int, str): lambda s: uuid.UUID(int=int(s)), - ('int', str): lambda s: uuid.UUID(int=int(s)), -} - -if six.PY2: - _uuid_deserialize[('int', long)] = _uuid_deserialize[('int', int)] - _uuid_deserialize[(int, long)] = _uuid_deserialize[('int', int)] - - -def _parse_datetime_iso_match(date_match, tz=None): - fields = date_match.groupdict() - - year = int(fields.get('year')) - month = int(fields.get('month')) - day = int(fields.get('day')) - hour = int(fields.get('hr')) - minute = int(fields.get('min')) - second = int(fields.get('sec')) - usecond = fields.get("sec_frac") - if usecond is None: - usecond = 0 - else: - # we only get the most significant 6 digits because that's what - # datetime can handle. - usecond = int(round(float(usecond) * 1e6)) - - return datetime(year, month, day, hour, minute, second, usecond, tz) - - -_dt_sec = lambda cls, val: \ - int(mktime(val.timetuple())) -_dt_sec_float = lambda cls, val: \ - mktime(val.timetuple()) + (val.microsecond / 1e6) - -_dt_msec = lambda cls, val: \ - int(mktime(val.timetuple())) * 1000 + (val.microsecond // 1000) -_dt_msec_float = lambda cls, val: \ - mktime(val.timetuple()) * 1000 + (val.microsecond / 1000.0) - -_dt_usec = lambda cls, val: \ - int(mktime(val.timetuple())) * 1000000 + val.microsecond - -_datetime_smap = { - 'sec': _dt_sec, - 'secs': _dt_sec, - 'second': _dt_sec, - 'seconds': _dt_sec, - - 'sec_float': _dt_sec_float, - 'secs_float': _dt_sec_float, - 'second_float': _dt_sec_float, - 'seconds_float': _dt_sec_float, - - 'msec': _dt_msec, - 'msecs': _dt_msec, - 'msecond': _dt_msec, - 'mseconds': _dt_msec, - 'millisecond': _dt_msec, - 'milliseconds': _dt_msec, - - 'msec_float': _dt_msec_float, - 'msecs_float': _dt_msec_float, - 'msecond_float': _dt_msec_float, - 'mseconds_float': _dt_msec_float, - 'millisecond_float': _dt_msec_float, - 'milliseconds_float': _dt_msec_float, - - 'usec': _dt_usec, - 'usecs': _dt_usec, - 'usecond': _dt_usec, - 'useconds': _dt_usec, - 'microsecond': _dt_usec, - 'microseconds': _dt_usec, -} - - -def _file_to_iter(f): - try: - data = f.read(8192) - while len(data) > 0: - yield data - data = f.read(8192) - - finally: - f.close() - - -META_ATTR = ['nullable', 'default_factory'] diff --git a/libs_crutch/contrib/spyne/protocol/cloth/__init__.py b/libs_crutch/contrib/spyne/protocol/cloth/__init__.py deleted file mode 100644 index 2bf9b93..0000000 --- a/libs_crutch/contrib/spyne/protocol/cloth/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.protocol.cloth`` package contains an EXPERIMENTAL protocol -for clothing otherwise boring data. -""" - -from spyne.protocol.cloth._base import XmlCloth -# huge hack to have the last line of microformat.py execute -import spyne.protocol.html diff --git a/libs_crutch/contrib/spyne/protocol/cloth/_base.py b/libs_crutch/contrib/spyne/protocol/cloth/_base.py deleted file mode 100644 index 4ea7fe6..0000000 --- a/libs_crutch/contrib/spyne/protocol/cloth/_base.py +++ /dev/null @@ -1,326 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import print_function - -import logging -logger = logging.getLogger(__name__) - -from inspect import isgenerator - -from lxml import etree -from lxml.etree import LxmlSyntaxError - -from spyne import ProtocolContext, BODY_STYLE_WRAPPED, ByteArray, File, Array -from spyne.util import Break, coroutine -from spyne.protocol import ProtocolMixin - -from spyne.protocol.cloth.to_parent import ToParentMixin -from spyne.protocol.cloth.to_cloth import ToClothMixin -from spyne.util.six import BytesIO -from spyne.util.color import R, B -from spyne.util.tlist import tlist - - -class XmlClothProtocolContext(ProtocolContext): - def __init__(self, parent, transport, type=None): - super(XmlClothProtocolContext, self).__init__(parent, transport, type) - - self.inst_stack = tlist([], tuple) - self.prot_stack = tlist([], ProtocolMixin) - self.doctype_written = False - - -class XmlCloth(ToParentMixin, ToClothMixin): - mime_type = 'text/xml' - HtmlMicroFormat = None - - def __init__(self, app=None, encoding='utf8', doctype=None, - mime_type=None, ignore_uncap=False, ignore_wrappers=False, - cloth=None, cloth_parser=None, polymorphic=True, - strip_comments=True, use_ns=None, skip_root_tag=False): - - super(XmlCloth, self).__init__(app=app, mime_type=mime_type, - ignore_uncap=ignore_uncap, ignore_wrappers=ignore_wrappers, - polymorphic=polymorphic) - - self._init_cloth(cloth, cloth_parser, strip_comments) - self.developer_mode = False - self.encoding = encoding - self.default_method = 'xml' - self.doctype = doctype - self.use_ns = use_ns - self.skip_root_tag = skip_root_tag - - def get_context(self, parent, transport): - return XmlClothProtocolContext(parent, transport) - - def serialize(self, ctx, message): - """Uses ``ctx.out_object``, ``ctx.out_header`` or ``ctx.out_error`` to - set ``ctx.out_body_doc``, ``ctx.out_header_doc`` and - ``ctx.out_document`` as an ``lxml.etree._Element instance``. - - Not meant to be overridden. - """ - - assert message in (self.REQUEST, self.RESPONSE) - - self.event_manager.fire_event('before_serialize', ctx) - - if ctx.out_stream is None: - ctx.out_stream = BytesIO() - logger.debug("%r %d", ctx.out_stream, id(ctx.out_stream)) - - if ctx.out_error is not None: - # All errors at this point must be Fault subclasses. - inst = ctx.out_error - cls = inst.__class__ - name = cls.get_type_name() - - if self.developer_mode: - # FIXME: the eff is this? - ctx.out_object = (inst,) - - retval = self._incgen(ctx, cls, inst, name) - else: - with self.docfile(ctx.out_stream, encoding=self.encoding) as xf: - retval = self.to_parent(ctx, cls, inst, xf, name) - - else: - assert message is self.RESPONSE - result_class = ctx.descriptor.out_message - - name = result_class.get_type_name() - if ctx.descriptor.body_style == BODY_STYLE_WRAPPED: - if self.ignore_wrappers: - result_inst = ctx.out_object[0] - while result_class.Attributes._wrapper and \ - len(result_class._type_info) == 1: - result_class, = result_class._type_info.values() - - else: - result_inst = result_class() - - for i, attr_name in enumerate( - result_class._type_info.keys()): - setattr(result_inst, attr_name, ctx.out_object[i]) - - else: - result_inst, = ctx.out_object - - retval = self._incgen(ctx, result_class, result_inst, name) - - self.event_manager.fire_event('after_serialize', ctx) - - return retval - - def create_out_string(self, ctx, charset=None): - """Sets an iterable of string fragments to ctx.out_string if the output - is a StringIO object, which means we're run by a sync framework. Async - frameworks have the out_stream write directly to the output stream so - out_string should not be used. - """ - - if isinstance(ctx.out_stream, BytesIO): - ctx.out_string = [ctx.out_stream.getvalue()] - - @coroutine - def _incgen(self, ctx, cls, inst, name): - """Entry point to the (stack of) XmlCloth-based protocols. - - Not supposed to be overridden. - """ - - if name is None: - name = cls.get_type_name() - - try: - with self.docfile(ctx.out_stream, encoding=self.encoding) as xf: - ctx.outprot_ctx.doctype_written = False - ctx.protocol.prot_stack = tlist([], ProtocolMixin) - ret = self.subserialize(ctx, cls, inst, xf, name) - - if isgenerator(ret): # Poor man's yield from - try: - while True: - sv2 = (yield) - ret.send(sv2) - - except Break as b: - try: - ret.throw(b) - except StopIteration: - pass - - except LxmlSyntaxError as e: - if e.msg == 'no content written': - pass - else: - raise - - def docfile(self, *args, **kwargs): - logger.debug("Starting file with %r %r", args, kwargs) - return etree.xmlfile(*args, **kwargs) - - def _get_doctype(self, cloth): - if self.doctype is not None: - return self.doctype - - if cloth is not None: - return cloth.getroottree().docinfo.doctype - - if self._root_cloth is not None: - return self._root_cloth.getroottree().docinfo.doctype - - if self._cloth is not None: - return self._cloth.getroottree().docinfo.doctype - - def write_doctype(self, ctx, parent, cloth=None): - dt = self._get_doctype(cloth) - if dt is None: - return - - parent.write_doctype(dt) - ctx.outprot_ctx.doctype_written = True - logger.debug("Doctype written as: '%s'", dt) - - @staticmethod - def get_class_cloth(cls): - return cls.Attributes._xml_cloth - - @staticmethod - def get_class_root_cloth(cls): - return cls.Attributes._xml_root_cloth - - def check_class_cloths(self, ctx, cls, inst, parent, name, **kwargs): - c = self.get_class_root_cloth(cls) - eltstack = getattr(ctx.protocol, 'eltstack', []) - if c is not None and len(eltstack) == 0 and not (eltstack[-1] is c): - if not ctx.outprot_ctx.doctype_written: - self.write_doctype(ctx, parent, c) - - logger.debug("to object root cloth") - return True, self.to_root_cloth(ctx, cls, inst, c, parent, name, - **kwargs) - c = self.get_class_cloth(cls) - if c is not None: - if not ctx.outprot_ctx.doctype_written: - self.write_doctype(ctx, parent, c) - - logger.debug("to object cloth") - return True, self.to_parent_cloth(ctx, cls, inst, c, parent, name, - **kwargs) - return False, None - - @coroutine - def subserialize(self, ctx, cls, inst, parent, name='', **kwargs): - """Bridge between multiple XmlCloth-based protocols. - - Not supposed to be overridden. - """ - - pstack = ctx.protocol.prot_stack - pstack.append(self) - logger.debug("%s push prot %r. newlen: %d", R("%"), self, len(pstack)) - - have_cloth = False - - cls_cloth = self.get_class_cloth(cls) - if cls_cloth is not None: - logger.debug("to object cloth for %s", cls.get_type_name()) - ret = self.to_parent_cloth(ctx, cls, inst, cls_cloth, parent, name) - - elif self._root_cloth is not None: - logger.debug("to root cloth for %s", cls.get_type_name()) - ret = self.to_root_cloth(ctx, cls, inst, self._root_cloth, - parent, name) - have_cloth = True - - elif self._cloth is not None: - logger.debug("to parent protocol cloth for %s", cls.get_type_name()) - ret = self.to_parent_cloth(ctx, cls, inst, self._cloth, parent, - name) - have_cloth = True - - else: - logger.debug("to parent for %s", cls.get_type_name()) - ret = self.start_to_parent(ctx, cls, inst, parent, name, **kwargs) - - if isgenerator(ret): # Poor man's yield from - try: - while True: - sv2 = (yield) - ret.send(sv2) - - except Break as b: - try: - ret.throw(b) - except StopIteration: - pass - finally: - self._finalize_protocol(ctx, parent, have_cloth) - else: - self._finalize_protocol(ctx, parent, have_cloth) - - pstack.pop() - logger.debug("%s pop prot %r. newlen: %d", B("%"), self, len(pstack)) - - def _finalize_protocol(self, ctx, parent, have_cloth): - if have_cloth: - self._close_cloth(ctx, parent) - return - - if len(ctx.protocol.prot_stack) == 1 and len(ctx.protocol.eltstack) > 0: - self._close_cloth(ctx, parent) - return - - @staticmethod - def _gen_tagname(ns, name): - if ns is not None: - name = "{%s}%s" % (ns, name) - return name - - def _gen_attrib_dict(self, inst, fti): - attrs = {} - - for field_name, field_type in fti.attrs.items(): - ns = field_type._ns - if ns is None: - ns = field_type.Attributes.sub_ns - - sub_name = field_type.Attributes.sub_name - if sub_name is None: - sub_name = field_name - - val = getattr(inst, field_name, None) - sub_name = self._gen_tagname(ns, sub_name) - - if issubclass(field_type.type, (ByteArray, File)): - valstr = self.to_unicode(field_type.type, val, - self.binary_encoding) - else: - valstr = self.to_unicode(field_type.type, val) - - if valstr is not None: - attrs[sub_name] = valstr - - return attrs - - def decompose_incoming_envelope(self, ctx, message): - raise NotImplementedError("This is an output-only protocol.") diff --git a/libs_crutch/contrib/spyne/protocol/cloth/to_cloth.py b/libs_crutch/contrib/spyne/protocol/cloth/to_cloth.py deleted file mode 100644 index 6dd59cd..0000000 --- a/libs_crutch/contrib/spyne/protocol/cloth/to_cloth.py +++ /dev/null @@ -1,865 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -from __future__ import print_function - -import logging -logger_c = logging.getLogger("%s.cloth" % __name__) -logger_s = logging.getLogger("%s.serializer" % __name__) - -from lxml import html, etree -from copy import deepcopy -from inspect import isgenerator - -from spyne.util import Break, coroutine -from spyne.util.oset import oset -from spyne.util.six import string_types -from spyne.util.color import R, B -from spyne.model import Array, AnyXml, AnyHtml, ModelBase, ComplexModelBase, \ - PushBase, XmlAttribute, AnyUri, XmlData, Any - -from spyne.protocol import OutProtocolBase -from spyne.util.cdict import cdict - -_revancestors = lambda elt: list(reversed(tuple(elt.iterancestors()))) - -_NODATA = type("_NODATA", (object,), {}) - - -def _prevsibls(elt, strip_comments, since=None): - return reversed(list(_prevsibls_since(elt, strip_comments, since))) - - -def _prevsibls_since(elt, strip_comments, since): - if since is elt: - return - - for prevsibl in elt.itersiblings(preceding=True): - if prevsibl is since: - break - - if strip_comments and isinstance(elt, etree.CommentBase): - if elt.text.startswith('[if ') and elt.text.endswith('[endif]'): - pass - else: - continue - - yield prevsibl - - -def _set_identifier_prefix(obj, prefix, mrpc_id='mrpc', id_attr='id', - data_tag='data', data_attr='data', attr_attr='attr', - root_attr='root', tagbag_attr='tagbag'): - obj.ID_PREFIX = prefix - - obj.MRPC_ID = '{}{}'.format(prefix, mrpc_id) - obj.ID_ATTR_NAME = '{}{}'.format(prefix, id_attr) - obj.DATA_TAG_NAME = '{}{}'.format(prefix, data_tag) - obj.DATA_ATTR_NAME = '{}{}'.format(prefix, data_attr) - obj.ATTR_ATTR_NAME = '{}{}'.format(prefix, attr_attr) - obj.ROOT_ATTR_NAME = '{}{}'.format(prefix, root_attr) - obj.TAGBAG_ATTR_NAME = '{}{}'.format(prefix, tagbag_attr) - # FIXME: get rid of this. We don't want logic creep inside cloths - obj.WRITE_CONTENTS_WHEN_NOT_NONE = '{}write-contents'.format(prefix) - - obj.SPYNE_ATTRS = { - obj.ID_ATTR_NAME, - obj.DATA_ATTR_NAME, - obj.ATTR_ATTR_NAME, - obj.ROOT_ATTR_NAME, - obj.TAGBAG_ATTR_NAME, - obj.WRITE_CONTENTS_WHEN_NOT_NONE, - } - - -class ClothParserMixin(object): - ID_PREFIX = 'spyne-' - - # these are here for documentation purposes. The are all reinitialized with - # the call ta _set_identifier_prefix below the class definition - ID_ATTR_NAME = 'spyne-id' - DATA_TAG_NAME = 'spyne-data' - DATA_ATTR_NAME = 'spyne-data' - ATTR_ATTR_NAME = 'spyne-attr' - ROOT_ATTR_NAME = 'spyne-root' - TAGBAG_ATTR_NAME = 'spyne-tagbag' - WRITE_CONTENTS_WHEN_NOT_NONE = 'spyne-write-contents' - - def set_identifier_prefix(self, what): - _set_identifier_prefix(self, what) - return self - - @classmethod - def from_xml_cloth(cls, cloth, strip_comments=True): - retval = cls() - retval._init_cloth(cloth, cloth_parser=etree.XMLParser(), - strip_comments=strip_comments) - return retval - - @classmethod - def from_html_cloth(cls, cloth, strip_comments=True): - retval = cls() - retval._init_cloth(cloth, cloth_parser=html.HTMLParser(), - strip_comments=strip_comments) - return retval - - @staticmethod - def _strip_comments(root): - for elt in root.iter(): - if isinstance(elt, etree.CommentBase): - if elt.getparent() is not None: - if elt.text.startswith('[if ') \ - and elt.text.endswith('[endif]'): - pass - else: - elt.getparent().remove(elt) - - def _parse_file(self, file_name, cloth_parser): - cloth = etree.parse(file_name, parser=cloth_parser) - return cloth.getroot() - - def _init_cloth(self, cloth, cloth_parser, strip_comments): - """Called from XmlCloth.__init__ in order to not break the dunder init - signature consistency""" - - self._cloth = None - self._root_cloth = None - self.strip_comments = strip_comments - - self._mrpc_cloth = self._root_cloth = None - - if cloth is None: - return - - if isinstance(cloth, string_types): - cloth = self._parse_file(cloth, cloth_parser) - - if strip_comments: - self._strip_comments(cloth) - - q = "//*[@%s]" % self.ROOT_ATTR_NAME - elts = cloth.xpath(q) - if len(elts) > 0: - logger_c.debug("Using %r as root cloth.", cloth) - self._root_cloth = elts[0] - else: - logger_c.debug("Using %r as plain cloth.", cloth) - self._cloth = cloth - - self._mrpc_cloth = self._pop_elt(cloth, 'mrpc_entry') - - def _pop_elt(self, elt, what): - query = '//*[@%s="%s"]' % (self.ID_ATTR_NAME, what) - retval = elt.xpath(query) - if len(retval) > 1: - raise ValueError("more than one element found for query %r" % query) - - elif len(retval) == 1: - retval = retval[0] - next(retval.iterancestors()).remove(retval) - return retval - - -_set_identifier_prefix(ClothParserMixin, ClothParserMixin.ID_PREFIX) - - -class ToClothMixin(OutProtocolBase, ClothParserMixin): - def __init__(self, app=None, mime_type=None, ignore_uncap=False, - ignore_wrappers=False, polymorphic=True): - super(ToClothMixin, self).__init__(app=app, mime_type=mime_type, - ignore_uncap=ignore_uncap, ignore_wrappers=ignore_wrappers) - - self.polymorphic = polymorphic - self.rendering_handlers = cdict({ - ModelBase: self.model_base_to_cloth, - AnyXml: self.xml_to_cloth, - Any: self.any_to_cloth, - AnyHtml: self.html_to_cloth, - AnyUri: self.any_uri_to_cloth, - ComplexModelBase: self.complex_to_cloth, - }) - - def _get_elts(self, elt, tag_id=None): - if tag_id is None: - return elt.xpath('.//*[@*[starts-with(name(), "%s")]]' % - self.ID_PREFIX) - return elt.xpath('.//*[@*[starts-with(name(), "%s")]="%s"]' % ( - self.ID_PREFIX, tag_id)) - - def _get_outmost_elts(self, tmpl, tag_id=None): - ids = set() - - # we assume xpath() returns elements in top to bottom (or outside to - # inside) order. - for elt in self._get_elts(tmpl, tag_id): - if elt is tmpl: # FIXME: kill this - logger_c.debug("Don't send myself") - continue # don't send myself - - if len(set((id(e) for e in elt.iterancestors())) & ids) > 0: - logger_c.debug("Don't send grandchildren") - continue # don't send grandchildren - - if id(elt) in ids: # FIXME: this check should be safe to remove - logger_c.debug("Don't send what's already been sent") - continue # don't send what's already been sent - - if self.ID_ATTR_NAME in elt.attrib: - # Prevent primitive attrs like spyne-attr from interfering - # with elt descent - ids.add(id(elt)) - - yield elt - - def _get_clean_elt(self, elt, what): - query = '//*[@%s="%s"]' % (self.ID_ATTR_NAME, what) - retval = elt.xpath(query) - if len(retval) > 1: - raise ValueError("more than one element found for query %r" % query) - - elif len(retval) == 1: - retval = retval[0] - del retval.attrib[self.ID_ATTR_NAME] - return retval - - def _get_elts_by_id(self, elt, what): - retval = elt.xpath('//*[@id="%s"]' % what) - logger_c.debug("id=%r got %r", what, retval) - return retval - - def _is_tagbag(self, elt): - return self.TAGBAG_ATTR_NAME in elt.attrib - - @staticmethod - def _methods(ctx, cls, inst): - while cls.Attributes._wrapper and len(cls._type_info) > 0: - cls, = cls._type_info.values() - - if cls.Attributes.methods is not None: - for k, v in cls.Attributes.methods.items(): - is_shown = True - if v.when is not None: - is_shown = v.when(inst, ctx) - - if is_shown: - yield k, v - - def _actions_to_cloth(self, ctx, cls, inst, template): - if self._mrpc_cloth is None: - logger_c.warning("missing 'mrpc_template'") - return - - for elt in self._get_elts(template, self.MRPC_ID): - for k, v in self._methods(ctx, cls, inst): - href = v.in_message.get_type_name() - text = v.translate(ctx.locale, v.in_message.get_type_name()) - - mrpc_template = deepcopy(self._mrpc_cloth) - anchor = self._get_clean_elt(mrpc_template, 'mrpc_link') - anchor.attrib['href'] = href - - text_elt = self._get_clean_elt(mrpc_template, 'mrpc_text') - if text_elt is not None: - text_elt.text = text - else: - anchor.text = text - - elt.append(mrpc_template) - # mutable default ok because readonly - def _enter_cloth(self, ctx, cloth, parent, attrib={}, skip=False, - method=None, skip_dupe=False): - """Enters the given tag in the document by using the shortest path from - current tag. - - 1. Moves up the tree by writing all tags so that the set of ancestors - of the current tag are a subset of the ancestors of the parent tag - 2. Writes all tags until hitting a direct ancestor, enters it, and - keeps writing previous siblings of ancestor tags and entering - ancestor tags until hitting the target tag. - 3. Enters the target tag and returns - - There is no _exit_cloth because exiting from tags is done - automatically with subsequent calls to _enter_cloth and finally to - _close_cloth. - - :param ctx: A MethodContext instance - :param cloth: The target cloth -- an ``lxml.etree._Element`` instance. - :param parent: The target stream -- typically an - ``lxml.etree._IncrementalFileWriter`` instance. - :param attrib: A dict of additional attributes for the target cloth. - :param skip: When True, the target tag is actually not entered. - Typically used for XmlData and friends. - :param method: One of ``(None, 'html', 'xml')``. When not ``None``, - overrides the output method of lxml. - :param skip_dupe: When ``False`` (the default) if this function is - called repeatedly for the same tag, the tag is exited and reentered. - This typically happens for types with ``max_occurs`` > 1 - (eg. arrays). - """ - - logger_c.debug("entering %s %r nsmap=%r attrib=%r skip=%s method=%s", - cloth.tag, cloth.attrib, cloth.nsmap, attrib, skip, method) - - if not ctx.outprot_ctx.doctype_written: - self.write_doctype(ctx, parent, cloth) - - tags = ctx.protocol.tags - rootstack = ctx.protocol.rootstack - assert isinstance(rootstack, oset) - - eltstack = ctx.protocol.eltstack - ctxstack = ctx.protocol.ctxstack - - cureltstack = eltstack[rootstack.back] - curctxstack = ctxstack[rootstack.back] - - if skip_dupe and len(cureltstack) > 0 and cureltstack[-1] is cloth: - return - - cloth_root = cloth.getroottree().getroot() - if not cloth_root in rootstack: - rootstack.add(cloth_root) - cureltstack = eltstack[rootstack.back] - curctxstack = ctxstack[rootstack.back] - - assert rootstack.back == cloth_root - - while rootstack.back != cloth_root: - self._close_cloth(ctx, parent) - - last_elt = None - if len(cureltstack) > 0: - last_elt = cureltstack[-1] - - ancestors = _revancestors(cloth) - - # move up in tag stack until the ancestors of both - # source and target tags match - while ancestors[:len(cureltstack)] != cureltstack: - elt = cureltstack.pop() - elt_ctx = curctxstack.pop() - - last_elt = elt - if elt_ctx is not None: - self.event_manager.fire_event(("before_exit", elt), ctx, parent) - elt_ctx.__exit__(None, None, None) - logger_c.debug("\texit norm %s %s", elt.tag, elt.attrib) - if elt.tail is not None: - parent.write(elt.tail) - - # unless we're at the same level as the relevant ancestor of the - # target node - if ancestors[:len(cureltstack)] != cureltstack: - # write following siblings before closing parent node - for sibl in elt.itersiblings(preceding=False): - logger_c.debug("\twrite exit sibl %s %r %d", - sibl.tag, sibl.attrib, id(sibl)) - parent.write(sibl) - - # write remaining ancestors of the target node. - for anc in ancestors[len(cureltstack):]: - # write previous siblings of ancestors (if any) - prevsibls = _prevsibls(anc, self.strip_comments, since=last_elt) - for elt in prevsibls: - if id(elt) in tags: - logger_c.debug("\tskip anc prevsibl %s %r", - elt.tag, elt.attrib) - continue - - logger_c.debug("\twrite anc prevsibl %s %r 0x%x", - elt.tag, elt.attrib, id(elt)) - parent.write(elt) - - # enter the ancestor node - kwargs = {} - if len(cureltstack) == 0: - # if this is the first node ever, initialize namespaces as well - kwargs['nsmap'] = anc.nsmap - - anc_ctx = parent.element(anc.tag, anc.attrib, **kwargs) - anc_ctx.__enter__() - logger_c.debug("\tenter norm %s %r 0x%x method: %r", anc.tag, - anc.attrib, id(anc), method) - if anc.text is not None: - parent.write(anc.text) - - rootstack.add(anc.getroottree().getroot()) - cureltstack = eltstack[rootstack.back] - curctxstack = ctxstack[rootstack.back] - cureltstack.append(anc) - curctxstack.append(anc_ctx) - - # now that at the same level as the target node, - # write its previous siblings - prevsibls = _prevsibls(cloth, self.strip_comments, since=last_elt) - for elt in prevsibls: - if elt is last_elt: - continue - - if id(elt) in tags: - logger_c.debug("\tskip cloth prevsibl %s %r", - elt.tag, elt.attrib) - continue - - logger_c.debug("\twrite cloth prevsibl %s %r", elt.tag, elt.attrib) - parent.write(elt) - - skip = skip or (cloth.tag == self.DATA_TAG_NAME) - - if skip: - tags.add(id(cloth)) - if method is not None: - curtag = parent.method(method) - curtag.__enter__() - else: - curtag = None - - else: - # finally, enter the target node. - cloth_attrib = dict([(k, v) for k, v in cloth.attrib.items() - if not k in self.SPYNE_ATTRS]) - - cloth_attrib.update(attrib) - - self.event_manager.fire_event(("before_entry", cloth), ctx, - parent, cloth_attrib) - - kwargs = {} - if len(cureltstack) == 0: - # if this is the first node ever, initialize namespaces as well - kwargs['nsmap'] = cloth.nsmap - if method is not None: - kwargs['method'] = method - curtag = parent.element(cloth.tag, cloth_attrib, **kwargs) - curtag.__enter__() - if cloth.text is not None: - parent.write(cloth.text) - - rootstack.add(cloth.getroottree().getroot()) - cureltstack = eltstack[rootstack.back] - curctxstack = ctxstack[rootstack.back] - - cureltstack.append(cloth) - curctxstack.append(curtag) - - logger_c.debug("") - - def _close_cloth(self, ctx, parent): - rootstack = ctx.protocol.rootstack - close_until = rootstack.back - cureltstack = ctx.protocol.eltstack[close_until] - curctxstack = ctx.protocol.ctxstack[close_until] - - for elt, elt_ctx in reversed(tuple(zip(cureltstack, curctxstack))): - if elt_ctx is not None: - self.event_manager.fire_event(("before_exit", elt), ctx, parent) - elt_ctx.__exit__(None, None, None) - logger_c.debug("exit %s close", elt.tag) - if elt.tail is not None: - parent.write(elt.tail) - - for sibl in elt.itersiblings(preceding=False): - logger_c.debug("write %s nextsibl", sibl.tag) - parent.write(sibl) - if sibl.tail is not None: - parent.write(sibl.tail) - - if elt is close_until: - logger_c.debug("closed until %r, breaking out", close_until) - break - - del ctx.protocol.eltstack[close_until] - del ctx.protocol.ctxstack[close_until] - - if len(rootstack) > 0: - rootstack.pop() - - @coroutine - def to_parent_cloth(self, ctx, cls, inst, cloth, parent, name, - from_arr=False, **kwargs): - cls_cloth = self.get_class_cloth(cls) - if cls_cloth is not None: - logger_c.debug("%r to object cloth", cls) - cloth = cls_cloth - ctx.protocol[self].rootstack.add(cloth) - - ret = self.to_cloth(ctx, cls, inst, cloth, parent, '') - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as e: - try: - ret.throw(e) - except (Break, StopIteration, GeneratorExit): - pass - - @coroutine - def to_root_cloth(self, ctx, cls, inst, cloth, parent, name): - if len(ctx.protocol.eltstack) > 0: - ctx.protocol[self].rootstack.add(cloth) - - cls_attrs = self.get_cls_attrs(cls) - self._enter_cloth(ctx, cloth, parent, method=cls_attrs.method) - - ret = self.start_to_parent(ctx, cls, inst, parent, name) - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as e: - try: - ret.throw(e) - except (Break, StopIteration, GeneratorExit): - pass - - # TODO: Maybe DRY this with to_parent? - @coroutine - def to_cloth(self, ctx, cls, inst, cloth, parent, name=None, - from_arr=False, as_attr=False, as_data=False, **kwargs): - - prot_name = self.__class__.__name__ - - if issubclass(cls, XmlAttribute): - cls = cls.type - as_attr = True - - elif issubclass(cls, XmlData): - cls = cls.type - as_data = True - - pushed = False - if cloth is None: - logger_c.debug("No cloth fround, switching to to_parent...") - ret = self.to_parent(ctx, cls, inst, parent, name, **kwargs) - - else: - cls, _ = self.get_polymorphic_target(cls, inst) - cls_attrs = self.get_cls_attrs(cls) - - inst = self._sanitize(cls_attrs, inst) - - # if instance is None, use the default factory to generate one - _df = cls_attrs.default_factory - if inst is None and callable(_df): - inst = _df() - - # if instance is still None, use the default value - if inst is None: - inst = cls_attrs.default - - # if there's a subprotocol, switch to it - subprot = cls_attrs.prot - if subprot is not None and not (subprot is self): - # we can't do this because subprotocols don't accept cloths. - # so we need to enter the cloth, which make it too late to - # set attributes. - assert not as_attr, "No subprot supported for fields " \ - "to be serialized as attributes, use type casting with " \ - "customized serializers in the current protocol instead." - - self._enter_cloth(ctx, cloth, parent, - method=cls_attrs.method, skip=as_data) - - ret = subprot.subserialize(ctx, cls, inst, parent, name, - as_attr=as_attr, as_data=as_data, **kwargs) - - # if there is no subprotocol, try rendering the value - else: - ret = None - - # try rendering the null value - if inst is None: - if cls_attrs.min_occurs > 0: - attrs = {} - if as_attr: - # FIXME: test needed - attrs[name] = '' - - self._enter_cloth(ctx, cloth, parent, attrib=attrs, - method=cls_attrs.method) - identifier = "%s.%s" % (prot_name, "null_to_cloth") - logger_s.debug("Writing '%s' using %s type: %s.", name, - identifier, cls.get_type_name()) - parent.write(cloth) - - else: - logger_s.debug("Skipping '%s' type: %s because empty.", - name, cls.get_type_name()) - self._enter_cloth(ctx, cloth, parent, skip=True, - method=cls_attrs.method) - - elif as_data: - # we only support XmlData of a primitive.,. is this a - # problem? - ret = self.to_unicode(cls, inst) - if ret is not None: - parent.write(ret) - - elif as_attr: - sub_name = cls_attrs.sub_name - if sub_name is None: - sub_name = name - attrs = {sub_name: self.to_unicode(cls, inst)} - - self._enter_cloth(ctx, cloth, parent, attrib=attrs, - method=cls_attrs.method) - - else: - # push the instance at hand to instance stack. this makes it - # easier for protocols to make decisions based on parents of - # instances at hand. - pushed = True - logger_c.debug("%s %r pushed %r %r", R("#"), self, cls, inst) - ctx.outprot_ctx.inst_stack.append((cls, inst, from_arr)) - - # try rendering the array value - if not from_arr and cls.Attributes.max_occurs > 1: - ret = self.array_to_cloth(ctx, cls, inst, cloth, parent, - as_attr=as_attr, name=name) - else: - # try rendering anything else - handler = self.rendering_handlers[cls] - - # disabled for performance reasons - # identifier = "%s.%s" % (prot_name, handler.__name__) - # from spyne.util.web import log_repr - # logger_s.debug("Writing %s using %s for %s. Inst: %r", - # name, identifier, cls.get_type_name(), - # log_repr(inst, cls, from_array=from_arr)) - - ret = handler(ctx, cls, inst, cloth, parent, name=name, - as_attr=as_attr) - - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as e: - try: - ret.throw(e) - except (Break, StopIteration, GeneratorExit): - pass - finally: - if pushed: - logger_c.debug("%s %r popped %r %r", B("#"), - self, cls, inst) - ctx.outprot_ctx.inst_stack.pop() - - else: - if pushed: - logger_c.debug("%s %r popped %r %r", B("#"), self, cls, inst) - ctx.outprot_ctx.inst_stack.pop() - - def model_base_to_cloth(self, ctx, cls, inst, cloth, parent, name, - **kwargs): - cls_attrs = self.get_cls_attrs(cls) - self._enter_cloth(ctx, cloth, parent, method=cls_attrs.method) - - # FIXME: Does it make sense to do this in other types? - if self.WRITE_CONTENTS_WHEN_NOT_NONE in cloth.attrib: - logger_c.debug("Writing contents for %r", cloth) - for c in cloth: - parent.write(c) - - else: - parent.write(self.to_unicode(cls, inst)) - - def xml_to_cloth(self, ctx, cls, inst, cloth, parent, name, **_): - cls_attrs = self.get_cls_attrs(cls) - self._enter_cloth(ctx, cloth, parent, method=cls_attrs.method) - if isinstance(inst, string_types): - inst = etree.fromstring(inst) - parent.write(inst) - - def any_to_cloth(self, ctx, cls, inst, cloth, parent, name, **_): - cls_attrs = self.get_cls_attrs(cls) - self._enter_cloth(ctx, cloth, parent, method=cls_attrs.method) - parent.write(inst) - - def html_to_cloth(self, ctx, cls, inst, cloth, parent, name, **_): - cls_attrs = self.get_cls_attrs(cls) - self._enter_cloth(ctx, cloth, parent, method=cls_attrs.method) - if isinstance(inst, string_types): - inst = html.fromstring(inst) - parent.write(inst) - - def any_uri_to_cloth(self, ctx, cls, inst, cloth, parent, name, **kwargs): - cls_attrs = self.get_cls_attrs(cls) - self._enter_cloth(ctx, cloth, parent, method=cls_attrs.method) - self.any_uri_to_parent(ctx, cls, inst, parent, name, **kwargs) - - @coroutine - def complex_to_cloth(self, ctx, cls, inst, cloth, parent, name=None, - as_attr=False, **kwargs): - fti = cls.get_flat_type_info(cls) - cls_attrs = self.get_cls_attrs(cls) - - # It's actually an odict but that's irrelevant here. - fti_check = dict(fti.items()) - elt_check = set() - - attrib = self._gen_attrib_dict(inst, fti) - self._enter_cloth(ctx, cloth, parent, attrib=attrib, - method=cls_attrs.method) - - for elt in self._get_elts(cloth, self.MRPC_ID): - self._actions_to_cloth(ctx, cls, inst, elt) - - if self._is_tagbag(cloth): - logger_c.debug("%r(%r) IS a tagbag", cloth, cloth.attrib) - elts = self._get_elts(cloth) - else: - logger_c.debug("%r(%r) is NOT a tagbag", cloth, cloth.attrib) - elts = self._get_outmost_elts(cloth) - - # Check for xmldata after entering the cloth. - as_data_field = cloth.attrib.get(self.DATA_ATTR_NAME, None) - if as_data_field is not None: - self._process_field(ctx, cls, inst, parent, cloth, fti, - as_data_field, as_attr, True, fti_check, elt_check, **kwargs) - - for elt in elts: - for k_attr, as_attr, as_data in ((self.ID_ATTR_NAME, False, False), - (self.ATTR_ATTR_NAME, True, False), - (self.DATA_ATTR_NAME, False, True)): - field_name = elt.attrib.get(k_attr, None) - if field_name is None: - continue - - if elt.tag == self.DATA_TAG_NAME: - as_data = True - - ret = self._process_field(ctx, cls, inst, parent, elt, fti, - field_name, as_attr=as_attr, as_data=as_data, - fti_check=fti_check, elt_check=elt_check, **kwargs) - - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as e: - try: - ret.throw(e) - except StopIteration: - pass - finally: - # cf below - if not (as_attr or as_data): - break - else: - # this is here so that attribute on complex model doesn't get - # mixed with in-line attr inside complex model. if an element - # has spyne-id, all other attrs are ignored and are processed - # by the object's serializer not its parent. - if not (as_attr or as_data): - break - - if len(fti_check) > 0: - logger_s.debug("No element found for the following fields: %r", - list(fti_check.keys())) - if len(elt_check) > 0: - logger_s.debug("No field found for element the following " - "elements: %r", list(elt_check)) - - def _process_field(self, ctx, cls, inst, parent, - elt, fti, field_name, as_attr, as_data, fti_check, elt_check, - **kwargs): - field_type = fti.get(field_name, None) - fti_check.pop(field_name, None) - - if field_type is None: - logger_c.warning("elt id %r not in %r", field_name, cls) - elt_check.add(field_name) - self._enter_cloth(ctx, elt, parent, skip=True) - return - - cls_attrs = self.get_cls_attrs(field_type) - if cls_attrs.exc: - logger_c.debug("Skipping elt id %r because " - "it was excluded", field_name) - return - - sub_name = cls_attrs.sub_name - if sub_name is None: - sub_name = field_name - - if issubclass(cls, Array): - # if cls is an array, inst should already be a sequence type - # (eg list), so there's no point in doing a getattr -- we will - # unwrap it and serialize it in the next round of to_cloth call. - val = inst - else: - val = getattr(inst, field_name, None) - - if as_data: - self._enter_cloth(ctx, elt, parent, skip=True, skip_dupe=True, - method=cls_attrs.method) - - return self.to_cloth(ctx, field_type, val, elt, parent, - name=sub_name, as_attr=as_attr, as_data=as_data, **kwargs) - - @coroutine - def array_to_cloth(self, ctx, cls, inst, cloth, parent, name=None, - **kwargs): - if isinstance(inst, PushBase): - while True: - sv = (yield) - ret = self.to_cloth(ctx, cls, sv, cloth, parent, - name=name, from_arr=True, **kwargs) - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as e: - try: - ret.throw(e) - except StopIteration: - pass - - else: - sv = _NODATA - - for sv in inst: - was_empty = False - - ret = self.to_cloth(ctx, cls, sv, cloth, parent, - from_arr=True, name=name, **kwargs) - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as e: - try: - ret.throw(e) - except StopIteration: - pass - - if sv is _NODATA: - # FIXME: what if min_occurs >= 1? - # fake entering the cloth to prevent it from being flushed as - # parent or sibling of another node later. - self._enter_cloth(ctx, cloth, parent, skip=True) diff --git a/libs_crutch/contrib/spyne/protocol/cloth/to_parent.py b/libs_crutch/contrib/spyne/protocol/cloth/to_parent.py deleted file mode 100644 index e8b974c..0000000 --- a/libs_crutch/contrib/spyne/protocol/cloth/to_parent.py +++ /dev/null @@ -1,522 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import print_function - -import logging -logger = logging.getLogger(__name__) - -from inspect import isgenerator -from spyne.util.six.moves.collections_abc import Iterable - -from lxml import etree, html -from lxml.builder import E - -from spyne.const.xml import NS_XSI, NS_SOAP11_ENV, SOAP11_ENV -from spyne.model import PushBase, ComplexModelBase, AnyXml, Fault, AnyDict, \ - AnyHtml, ModelBase, ByteArray, XmlData, Any, AnyUri, ImageUri, XmlAttribute - -from spyne.model.enum import EnumBase -from spyne.protocol import OutProtocolBase -from spyne.protocol.xml import SchemaValidationError -from spyne.util import coroutine, Break, six -from spyne.util.cdict import cdict -from spyne.util.etreeconv import dict_to_etree -from spyne.util.color import R, B - -from spyne.util.six import string_types - - -class ToParentMixin(OutProtocolBase): - def __init__(self, app=None, mime_type=None, ignore_uncap=False, - ignore_wrappers=False, polymorphic=True): - super(ToParentMixin, self).__init__(app=app, mime_type=mime_type, - ignore_uncap=ignore_uncap, ignore_wrappers=ignore_wrappers) - - self.polymorphic = polymorphic - self.use_global_null_handler = True - - self.serialization_handlers = cdict({ - ModelBase: self.model_base_to_parent, - - AnyXml: self.any_xml_to_parent, - AnyUri: self.any_uri_to_parent, - ImageUri: self.imageuri_to_parent, - AnyDict: self.any_dict_to_parent, - AnyHtml: self.any_html_to_parent, - Any: self.any_to_parent, - - Fault: self.fault_to_parent, - EnumBase: self.enum_to_parent, - ByteArray: self.byte_array_to_parent, - ComplexModelBase: self.complex_to_parent, - SchemaValidationError: self.schema_validation_error_to_parent, - }) - - def start_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - """This is what subserialize calls""" - - # if no doctype was written, write it - if not ctx.outprot_ctx.doctype_written: - self.write_doctype(ctx, parent) - - return self.to_parent(ctx, cls, inst, parent, name, **kwargs) - - @staticmethod - def get_subprot(ctx, cls_attrs, nosubprot=False): - subprot = cls_attrs.prot - if subprot is not None and not nosubprot and not \ - (subprot in ctx.protocol.prot_stack): - return subprot - return None - - def to_subprot(self, ctx, cls, inst, parent, name, subprot, **kwargs): - return subprot.subserialize(ctx, cls, inst, parent, name, **kwargs) - - @coroutine - def to_parent(self, ctx, cls, inst, parent, name, nosubprot=False, **kwargs): - pushed = False - has_cloth = False - - prot_name = self.__class__.__name__ - - cls, switched = self.get_polymorphic_target(cls, inst) - cls_attrs = self.get_cls_attrs(cls) - if cls_attrs.out_type: - logger.debug("out_type from %r to %r", cls, cls_attrs.out_type) - cls = cls_attrs.out_type - cls_attrs = self.get_cls_attrs(cls) - - inst = self._sanitize(cls_attrs, inst) - - # if there is a subprotocol, switch to it - subprot = self.get_subprot(ctx, cls_attrs, nosubprot) - if subprot is not None: - logger.debug("Subprot from %r to %r", self, subprot) - ret = self.to_subprot(ctx, cls, inst, parent, name, subprot, - **kwargs) - else: - # if there is a class cloth, switch to it - has_cloth, cor_handle = self.check_class_cloths(ctx, cls, inst, - parent, name, **kwargs) - if has_cloth: - ret = cor_handle - - else: - # if instance is None, use the default factory to generate one - _df = cls_attrs.default_factory - if inst is None and callable(_df): - inst = _df() - - # if instance is still None, use the default value - if inst is None: - inst = cls_attrs.default - - # if instance is still None, use the global null handler to - # serialize it - if inst is None and self.use_global_null_handler: - identifier = prot_name + '.null_to_parent' - logger.debug("Writing %s using %s for %s.", name, - identifier, cls.get_type_name()) - self.null_to_parent(ctx, cls, inst, parent, name, **kwargs) - - return - - # if requested, ignore wrappers - if self.ignore_wrappers and issubclass(cls, ComplexModelBase): - cls, inst = self.strip_wrappers(cls, inst) - - # if cls is an iterable of values and it's not being iterated - # on, do it - from_arr = kwargs.get('from_arr', False) - # we need cls.Attributes here because we need the ACTUAL attrs - # that were set by the Array.__new__ - if not from_arr and cls.Attributes.max_occurs > 1: - ret = self.array_to_parent(ctx, cls, inst, parent, name, - **kwargs) - else: - # fetch the serializer for the class at hand - try: - handler = self.serialization_handlers[cls] - - except KeyError: - # if this protocol uncapable of serializing this class - if self.ignore_uncap: - logger.debug("Ignore uncap %r", name) - return # ignore it if requested - - # raise the error otherwise - logger.error("%r is missing handler for " - "%r for field %r", self, cls, name) - raise - - # push the instance at hand to instance stack. this makes it - # easier for protocols to make decisions based on parents - # of instances at hand. - ctx.outprot_ctx.inst_stack.append( (cls, inst, from_arr) ) - pushed = True - logger.debug("%s %r pushed %r using %r", - R("$"), self, cls, handler) - - # disabled for performance reasons - # from spyne.util.web import log_repr - # identifier = "%s.%s" % (prot_name, handler.__name__) - # log_str = log_repr(inst, cls, - # from_array=kwargs.get('from_arr', None)) - # logger.debug("Writing %s using %s for %s. Inst: %r", name, - # identifier, cls.get_type_name(), log_str) - - # finally, serialize the value. ret is the coroutine handle - ret = handler(ctx, cls, inst, parent, name, **kwargs) - - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - - except Break as e: - try: - ret.throw(e) - - except (Break, StopIteration, GeneratorExit): - pass - - finally: - if has_cloth: - self._close_cloth(ctx, parent) - - if pushed: - logger.debug("%s %r popped %r %r", B("$"), self, cls, - inst) - ctx.outprot_ctx.inst_stack.pop() - - else: - if has_cloth: - self._close_cloth(ctx, parent) - - if pushed: - logger.debug("%s %r popped %r %r", B("$"), self, cls, inst) - ctx.outprot_ctx.inst_stack.pop() - - @coroutine - def array_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - if inst is None: - inst = () - - ser_subprot = self.get_subprot(ctx, self.get_cls_attrs(cls)) - - # FIXME: it's sad that this function has the same code twice. - - if isinstance(inst, PushBase): - # this will be popped by pusher_try_close - ctx.pusher_stack.append(inst) - - i = 0 - - try: - while True: - sv = (yield) - - # disabled because to_parent is supposed to take care of this - #ctx.protocol.inst_stack.append((cls, sv, True)) - kwargs['from_arr'] = True - kwargs['array_index'] = i - - if ser_subprot is not None: - ser_subprot.column_table_before_row(ctx, cls, inst, - parent, name, **kwargs) - - ret = self.to_parent(ctx, cls, sv, parent, name, **kwargs) - - i += 1 - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - - except Break as e: - try: - ret.throw(e) - except StopIteration: - pass - - finally: - # disabled because to_parent is supposed to take care of this - #popped_val = ctx.protocol.inst_stack.pop() - #assert popped_val is sv - - if ser_subprot is not None: - ser_subprot.column_table_before_row(ctx, cls, - inst, parent, name, **kwargs) - else: - # disabled because to_parent is supposed to take care of this - #popped_val = ctx.protocol.inst_stack.pop() - #assert popped_val is sv - - if ser_subprot is not None: - ser_subprot.column_table_after_row(ctx, cls, inst, - parent, name, **kwargs) - - except Break: - # pusher is done with pushing - pass - - else: - assert isinstance(inst, Iterable), ("%r is not iterable" % (inst,)) - - for i, sv in enumerate(inst): - # disabled because to_parent is supposed to take care of this - #ctx.protocol.inst_stack.append((cls, sv, True) - kwargs['from_arr'] = True - kwargs['array_index'] = i - - if ser_subprot is not None: - ser_subprot.column_table_before_row(ctx, cls, inst, parent, - name, **kwargs) - - ret = self.to_parent(ctx, cls, sv, parent, name, **kwargs) - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - - except Break as e: - try: - ret.throw(e) - except StopIteration: - pass - - finally: - # disabled because to_parent is supposed to take care of this - #popped_val = ctx.protocol.inst_stack.pop() - #assert popped_val is sv - - if ser_subprot is not None: - ser_subprot.column_table_after_row(ctx, cls, inst, - parent, name, **kwargs) - - else: - # disabled because to_parent is supposed to take care of this - #popped_val = ctx.protocol.inst_stack.pop() - #assert popped_val is sv - - if ser_subprot is not None: - ser_subprot.column_table_after_row(ctx, cls, inst, - parent, name, **kwargs) - - def not_supported(self, ctx, cls, *args, **kwargs): - if not self.ignore_uncap: - raise NotImplementedError("Serializing %r not supported!" % cls) - - def any_uri_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - self.model_base_to_parent(ctx, cls, inst, parent, name, **kwargs) - - def imageuri_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - self.model_base_to_parent(ctx, cls, inst, parent, name, **kwargs) - - def byte_array_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - parent.write(E(name, self.to_unicode(cls, inst, self.binary_encoding))) - - def model_base_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - parent.write(E(name, self.to_unicode(cls, inst))) - - def null_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - parent.write(E(name, **{'{%s}nil' % NS_XSI: 'true'})) - - def enum_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - self.model_base_to_parent(ctx, cls, str(inst), parent, name) - - def any_xml_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - if isinstance(inst, string_types): - inst = etree.fromstring(inst) - - parent.write(E(name, inst)) - - def any_html_to_unicode(self, cls, inst, **_): - if isinstance(inst, (str, six.text_type)): - inst = html.fromstring(inst) - - return inst - - def any_html_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - cls_attrs = self.get_cls_attrs(cls) - - if cls_attrs.as_string: - if not (isinstance(inst, str) or isinstance(inst, six.text_type)): - inst = html.tostring(inst) - - else: - if isinstance(inst, str) or isinstance(inst, six.text_type): - inst = html.fromstring(inst) - - parent.write(E(name, inst)) - - def any_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - parent.write(E(name, inst)) - - def any_dict_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - elt = E(name) - dict_to_etree(inst, elt) - parent.write(E(name, elt)) - - def _gen_sub_name(self, cls, cls_attrs, k, use_ns=None): - if self.use_ns is not None and use_ns is None: - use_ns = self.use_ns - - sub_ns = cls_attrs.sub_ns - if sub_ns is None: - sub_ns = cls.get_namespace() - - sub_name = cls_attrs.sub_name - if sub_name is None: - sub_name = k - - if use_ns: - name = "{%s}%s" % (sub_ns, sub_name) - else: - name = sub_name - - return name - - @coroutine - def _write_members(self, ctx, cls, inst, parent, use_ns=None, **kwargs): - if self.use_ns is not None and use_ns is None: - use_ns = self.use_ns - - for k, v in self.sort_fields(cls): - attr = self.get_cls_attrs(v) - if attr.exc: - prot_name = self.__class__.__name__ - logger.debug("%s: excluded for %s.", k, prot_name) - continue - - if issubclass(v, XmlAttribute): - continue - - try: # e.g. SqlAlchemy could throw NoSuchColumnError - subvalue = getattr(inst, k, None) - except: - subvalue = None - - # This is a tight loop, so enable this only when necessary. - # logger.debug("get %r(%r) from %r: %r" % (k, v, inst, subvalue)) - - sub_name = self._gen_sub_name(cls, attr, k, use_ns) - - if issubclass(v, XmlData): - subvalstr = self.to_unicode(v.type, subvalue) - if subvalstr is not None: - parent.write(subvalstr) - continue - - if subvalue is not None or attr.min_occurs > 0: - ret = self.to_parent(ctx, v, subvalue, parent, sub_name, - use_ns=use_ns, **kwargs) - if ret is not None: - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as b: - try: - ret.throw(b) - except StopIteration: - pass - - @coroutine - def _complex_to_parent_do(self, ctx, cls, inst, parent, **kwargs): - # parent.write(u"\u200c") # zero-width non-joiner - parent.write(" ") # FIXME: to force empty tags to be sent as - # instead of - ret = self._write_members(ctx, cls, inst, parent, **kwargs) - if ret is not None: - try: - while True: - sv2 = (yield) # may throw Break - ret.send(sv2) - - except Break: - try: - ret.throw(Break()) - except StopIteration: - pass - - def complex_to_parent(self, ctx, cls, inst, parent, name, - from_arr=False, use_ns=None, **kwargs): - if not from_arr: - inst = cls.get_serialization_instance(inst) - - attrib = self._gen_attrib_dict(inst, cls.get_flat_type_info(cls)) - - if self.skip_root_tag: - self._complex_to_parent_do(ctx, cls, inst, parent, - from_arr=from_arr, **kwargs) - - else: - if name is None or name == '': - name = self._gen_sub_name(cls, self.get_cls_attrs(cls), - cls.get_type_name(), use_ns) - logger.debug("name is empty, long live name: %s, cls: %r", - name, cls) - - with parent.element(name, attrib=attrib): - self._complex_to_parent_do(ctx, cls, inst, parent, - from_arr=from_arr, **kwargs) - - def fault_to_parent(self, ctx, cls, inst, parent, name): - PREF_SOAP_ENV = ctx.app.interface.prefmap[NS_SOAP11_ENV] - tag_name = SOAP11_ENV("Fault") - - with parent.element(tag_name): - parent.write( - E("faultcode", '%s:%s' % (PREF_SOAP_ENV, inst.faultcode)), - E("faultstring", inst.faultstring), - E("faultactor", inst.faultactor), - ) - - if isinstance(inst.detail, etree._Element): - parent.write(E.detail(inst.detail)) - - # add other nonstandard fault subelements with get_members_etree - self._write_members(ctx, cls, inst, parent) - # no need to track the returned generator because we expect no - # PushBase instance here. - - def schema_validation_error_to_parent(self, ctx, cls, inst, parent, **_): - PREF_SOAP_ENV = ctx.app.interface.prefmap[NS_SOAP11_ENV] - tag_name = SOAP11_ENV("Fault") - - with parent.element(tag_name): - parent.write( - E("faultcode", '%s:%s' % (PREF_SOAP_ENV, inst.faultcode)), - # HACK: Does anyone know a better way of injecting raw xml entities? - E("faultstring", html.fromstring(inst.faultstring).text), - E("faultactor", inst.faultactor), - ) - - if isinstance(inst.detail, etree._Element): - parent.write(E.detail(inst.detail)) - - # add other nonstandard fault subelements with get_members_etree - self._write_members(ctx, cls, inst, parent) - # no need to track the returned generator because we expect no - # PushBase instance here. diff --git a/libs_crutch/contrib/spyne/protocol/csv.py b/libs_crutch/contrib/spyne/protocol/csv.py deleted file mode 100644 index 915ca88..0000000 --- a/libs_crutch/contrib/spyne/protocol/csv.py +++ /dev/null @@ -1,136 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.protocol.csv`` package contains the Csv output protocol. - -This protocol is here merely for illustration purposes. While it is in a -somewhat working state, it is not that easy to use. Expect a revamp in the -coming versions. -""" - -from __future__ import absolute_import - -import logging -logger = logging.getLogger(__name__) - -import csv - -from spyne import ComplexModelBase -from spyne.util import six -from spyne.protocol.dictdoc import HierDictDocument - -if six.PY2: - from StringIO import StringIO -else: - from io import StringIO - - -def _complex_to_csv(prot, ctx): - cls, = ctx.descriptor.out_message._type_info.values() - - queue = StringIO() - - serializer, = cls._type_info.values() - - if issubclass(serializer, ComplexModelBase): - type_info = serializer.get_flat_type_info(serializer) - keys = [k for k, _ in prot.sort_fields(serializer)] - - else: - type_info = {serializer.get_type_name(): serializer} - keys = list(type_info.keys()) - - if ctx.out_error is not None: - writer = csv.writer(queue, dialect=csv.excel) - writer.writerow(['Error in generating the document']) - if ctx.out_error is not None: - for r in ctx.out_error.to_bytes_iterable(ctx.out_error): - writer.writerow([r]) - - yield queue.getvalue() - queue.truncate(0) - - else: - writer = csv.DictWriter(queue, dialect=csv.excel, fieldnames=keys) - if prot.header: - titles = {} - for k in keys: - v = type_info[k] - titles[k] = prot.trc(v, ctx.locale, k) - - writer.writerow(titles) - - yield queue.getvalue() - queue.truncate(0) - - if ctx.out_object[0] is not None: - for v in ctx.out_object[0]: - d = prot._to_dict_value(serializer, v, set()) - if six.PY2: - for k in d: - if isinstance(d[k], unicode): - d[k] = d[k].encode('utf8') - - writer.writerow(d) - yval = queue.getvalue() - yield yval - queue.truncate(0) - - -class Csv(HierDictDocument): - mime_type = 'text/csv' - text_based = True - - type = set(HierDictDocument.type) - type.add('csv') - - def __init__(self, app=None, validator=None, mime_type=None, - ignore_uncap=False, ignore_wrappers=True, complex_as=dict, - ordered=False, polymorphic=False, header=True): - - super(Csv, self).__init__(app=app, validator=validator, - mime_type=mime_type, ignore_uncap=ignore_uncap, - ignore_wrappers=ignore_wrappers, complex_as=complex_as, - ordered=ordered, polymorphic=polymorphic) - - self.header = header - - def create_in_document(self, ctx): - raise NotImplementedError() - - def serialize(self, ctx, message): - assert message in (self.RESPONSE, ) - - if ctx.out_object is None: - ctx.out_object = [] - - assert len(ctx.descriptor.out_message._type_info) == 1, \ - "CSV Serializer supports functions with exactly one return type: " \ - "%r" % ctx.descriptor.out_message._type_info - - def create_out_string(self, ctx): - ctx.out_string = _complex_to_csv(self, ctx) - if 'http' in ctx.transport.type: - ctx.transport.resp_headers['Content-Disposition'] = ( - 'attachment; filename=%s.csv;' % ctx.descriptor.name) - - def any_uri_to_unicode(self, cls, value, **_): - if isinstance(value, cls.Value): - value = value.text - return super(Csv, self).any_uri_to_unicode(cls, value, **_) diff --git a/libs_crutch/contrib/spyne/protocol/dictdoc/__init__.py b/libs_crutch/contrib/spyne/protocol/dictdoc/__init__.py deleted file mode 100644 index c0cee4d..0000000 --- a/libs_crutch/contrib/spyne/protocol/dictdoc/__init__.py +++ /dev/null @@ -1,137 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.protocol.dictdoc`` module contains an abstract -protocol that deals with hierarchical and flat dicts as {in,out}_documents. - -Flattening -========== - -Plain HTTP does not support hierarchical key-value stores. Spyne makes plain -HTTP fake hierarchical dicts with two small hacks. - -Let's look at the following object hierarchy: :: - - class Inner(ComplexModel): - c = Integer - d = Array(Integer) - - class Outer(ComplexModel): - a = Integer - b = Inner - -For example, the ``Outer(a=1, b=Inner(c=2))`` object would correspond to the -following hierarchichal dict representation: :: - - {'a': 1, 'b': { 'c': 2 }} - -Here's what we do to deserialize the above object structure from a flat dict: - -1. Object hierarchies are flattened. e.g. the flat representation of the above - dict is: ``{'a': 1, 'b.c': 2}``. -2. Arrays of objects are sent using variables with array indexes in square - brackets. So the request with the following query object: :: - - {'a': 1, 'b.d[0]': 1, 'b.d[1]': 2}} - - ... corresponds to: :: - - {'a': 1, 'b': { 'd': [1,2] }} - - If we had: :: - - class Inner(ComplexModel): - c = Integer - - class Outer(ComplexModel): - a = Integer - b = Array(SomeObject) - - Or the following object: :: - - {'a': 1, 'b[0].c': 1, 'b[1].c': 2}} - - ... would correspond to: :: - - {'a': 1, 'b': [{ 'c': 1}, {'c': 2}]} - - ... which would deserialize as: :: - - Outer(a=1, b=[Inner(c=1), Inner(c=2)]) - -These hacks are both slower to process and bulkier on wire, so use class -hierarchies with HTTP only when performance is not that much of a concern. - -Cookies -======= - -Cookie headers are parsed and fields within HTTP requests are assigned to -fields in the ``in_header`` class, if defined. - -It's also possible to get the ``Cookie`` header intact by defining an -``in_header`` object with a field named ``Cookie`` (case sensitive). - -As an example, let's assume the following HTTP request: :: - - GET / HTTP/1.0 - Cookie: v1=4;v2=8 - (...) - -The keys ``v1`` and ``v2`` are passed to the instance of the ``in_header`` -class if it has fields named ``v1`` or ``v2``\\. - -Wrappers -======== - -Wrapper objects are an artifact of the Xml world, which don't really make sense -in other protocols. Let's look at the following object: :: - - v = Permission(application='app', feature='f1'), - -Here's how it would be serialized to XML: :: - - - app - f1 - - -With ``ignore_wrappers=True`` (which is the default) This gets serialized to -dict as follows: :: - - { - "application": "app", - "feature": "f1" - } - -When ``ignore_wrappers=False``, the same value/type combination would result in -the following dict: :: - - {"Permission": { - { - "application": "app", - "feature": "f1" - } - }, - -This could come in handy in case you don't know what type to expect. -""" - -from spyne.protocol.dictdoc._base import DictDocument -from spyne.protocol.dictdoc.hier import HierDictDocument -from spyne.protocol.dictdoc.simple import SimpleDictDocument diff --git a/libs_crutch/contrib/spyne/protocol/dictdoc/_base.py b/libs_crutch/contrib/spyne/protocol/dictdoc/_base.py deleted file mode 100644 index a10b121..0000000 --- a/libs_crutch/contrib/spyne/protocol/dictdoc/_base.py +++ /dev/null @@ -1,147 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logger = logging.getLogger(__name__) - -import re -RE_HTTP_ARRAY_INDEX = re.compile("\\[([0-9]+)\\]") - -from spyne.error import ValidationError - -from spyne.model import Fault, Array, AnyXml, AnyHtml, Uuid, DateTime, Date, \ - Time, Duration - -from spyne.protocol import ProtocolBase - - -class DictDocument(ProtocolBase): - """An abstract protocol that can use hierarchical or flat dicts as input - and output documents. - - Implement ``serialize()``, ``deserialize()``, ``create_in_document()`` and - ``create_out_string()`` to use this. - """ - - # flags to be used in tests - _decimal_as_string = False - _huge_numbers_as_string = False - text_based = False - - def __init__(self, app=None, validator=None, mime_type=None, - ignore_uncap=False, ignore_wrappers=True, complex_as=dict, - ordered=False, polymorphic=False, key_encoding=None): - - super(DictDocument, self).__init__(app, validator, mime_type, - ignore_uncap, ignore_wrappers) - - self.key_encoding = key_encoding - self.polymorphic = polymorphic - self.complex_as = complex_as - self.ordered = ordered - if ordered: - raise NotImplementedError('ordered=True') - - self.stringified_types = (DateTime, Date, Time, Uuid, Duration, - AnyXml, AnyHtml) - - def set_validator(self, validator): - """Sets the validator for the protocol. - - :param validator: one of ('soft', None) - """ - - if validator == 'soft' or validator is self.SOFT_VALIDATION: - self.validator = self.SOFT_VALIDATION - elif validator is None: - self.validator = None - else: - raise ValueError(validator) - - def decompose_incoming_envelope(self, ctx, message): - """Sets ``ctx.in_body_doc``, ``ctx.in_header_doc`` and - ``ctx.method_request_string`` using ``ctx.in_document``. - """ - - assert message in (ProtocolBase.REQUEST, ProtocolBase.RESPONSE) - - # set ctx.in_header - ctx.transport.in_header_doc = None # use an rpc protocol if you want headers. - - doc = ctx.in_document - - ctx.in_header_doc = None - ctx.in_body_doc = doc - - if message is ProtocolBase.REQUEST: - #logger.debug('\theader : %r', ctx.in_header_doc) - #logger.debug('\tbody : %r', ctx.in_body_doc) - - if not isinstance(doc, dict) or len(doc) != 1: - raise ValidationError(doc, - "Need a dictionary with exactly one key as method name.") - - if len(doc) == 0: - raise Fault("Client", "Empty request") - - ctx.method_request_string = self.gen_method_request_string(ctx) - - def gen_method_request_string(self, ctx): - """Uses information in context object to return a method_request_string. - - Returns a string in the form of "{namespaces}method name". - """ - - mrs, = ctx.in_body_doc.keys() - return '{%s}%s' % (self.app.interface.get_tns(), mrs) - - def deserialize(self, ctx, message): - raise NotImplementedError() - - def serialize(self, ctx, message): - raise NotImplementedError() - - def create_in_document(self, ctx, in_string_encoding=None): - raise NotImplementedError() - - def create_out_string(self, ctx, out_string_encoding='utf8'): - raise NotImplementedError() - - def _check_freq_dict(self, cls, d, fti=None): - if fti is None: - fti = cls.get_flat_type_info(cls) - - for k, v in fti.items(): - val = d[k] - - attrs = self.get_cls_attrs(v) - min_o, max_o = attrs.min_occurs, attrs.max_occurs - - if issubclass(v, Array) and v.Attributes.max_occurs == 1: - v, = v._type_info.values() - attrs = self.get_cls_attrs(v) - min_o, max_o = attrs.min_occurs, attrs.max_occurs - - if val < min_o: - raise ValidationError("%r.%s" % (cls, k), - '%%s member must occur at least %d times.' % min_o) - - elif val > max_o: - raise ValidationError("%r.%s" % (cls, k), - '%%s member must occur at most %d times.' % max_o) diff --git a/libs_crutch/contrib/spyne/protocol/dictdoc/hier.py b/libs_crutch/contrib/spyne/protocol/dictdoc/hier.py deleted file mode 100644 index c0ea199..0000000 --- a/libs_crutch/contrib/spyne/protocol/dictdoc/hier.py +++ /dev/null @@ -1,569 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import print_function - -import logging -logger = logging.getLogger(__name__) - -import re -RE_HTTP_ARRAY_INDEX = re.compile("\\[([0-9]+)\\]") - -from mmap import mmap -from collections import defaultdict -from spyne.util.six.moves.collections_abc import Iterable as AbcIterable - -from spyne.util import six -from spyne.error import ValidationError -from spyne.error import ResourceNotFoundError - -from spyne.model import ByteArray, File, Fault, ComplexModelBase, Array, Any, \ - AnyDict, Uuid, Unicode - -from spyne.protocol.dictdoc import DictDocument - - -class HierDictDocument(DictDocument): - """This protocol contains logic for protocols that serialize and deserialize - hierarchical dictionaries. Examples include: Json, MessagePack and Yaml. - - Implement ``create_in_document()`` and ``create_out_string()`` to use this. - """ - - VALID_UNICODE_SOURCES = (six.text_type, six.binary_type, memoryview, - mmap, bytearray) - - from_serstr = DictDocument.from_unicode - to_serstr = DictDocument.to_unicode - - def get_class_name(self, cls): - class_name = cls.get_type_name() - if not six.PY2: - if isinstance(class_name, bytes): - class_name = class_name.decode('utf8') - - return class_name - - def get_complex_as(self, attr): - if attr.complex_as is None: - return self.complex_as - return attr.complex_as - - def deserialize(self, ctx, message): - assert message in (self.REQUEST, self.RESPONSE) - - self.event_manager.fire_event('before_deserialize', ctx) - - if ctx.descriptor is None: - raise ResourceNotFoundError(ctx.method_request_string) - - # instantiate the result message - if message is self.REQUEST: - body_class = ctx.descriptor.in_message - elif message is self.RESPONSE: - body_class = ctx.descriptor.out_message - else: - raise ValueError(message) # should be impossible - - if body_class: - # assign raw result to its wrapper, result_message - doc = ctx.in_body_doc - - class_name = self.get_class_name(body_class) - if self.ignore_wrappers: - doc = doc.get(class_name, None) - - result_message = self._doc_to_object(ctx, body_class, doc, - self.validator) - ctx.in_object = result_message - - else: - ctx.in_object = [] - - self.event_manager.fire_event('after_deserialize', ctx) - - def _fault_to_doc(self, inst, cls=None): - if cls is None: - cls = Fault - - if self.complex_as is list: - return [cls.to_list(inst.__class__, inst, self)] - - elif self.complex_as is tuple: - fault_as_list = [Fault.to_list(inst.__class__, inst, self)] - return tuple(fault_as_list) - - else: - return [Fault.to_dict(inst.__class__, inst, self)] - - def serialize(self, ctx, message): - assert message in (self.REQUEST, self.RESPONSE) - - self.event_manager.fire_event('before_serialize', ctx) - - if ctx.out_error is not None: - ctx.out_document = self._fault_to_doc(ctx.out_error) - return - - # get the result message - if message is self.REQUEST: - out_type = ctx.descriptor.in_message - - elif message is self.RESPONSE: - out_type = ctx.descriptor.out_message - - else: - assert False - - if out_type is None: - return - - # assign raw result to its wrapper, result_message - if ctx.descriptor.is_out_bare(): - out_instance, = ctx.out_object - - else: - out_type_info = out_type.get_flat_type_info(out_type) - - # instantiate the result message - out_instance = out_type() - - for i, (k, v) in enumerate(out_type_info.items()): - attrs = self.get_cls_attrs(v) - out_instance._safe_set(k, ctx.out_object[i], v, attrs) - - ctx.out_document = self._object_to_doc(out_type, out_instance, set()), - - logger.debug("Retval: %r", ctx.out_document) - self.event_manager.fire_event('after_serialize', ctx) - - def validate(self, key, cls, inst): - if inst is None and self.get_cls_attrs(cls).nullable: - pass - - elif issubclass(cls, Unicode) and not isinstance(inst, - self.VALID_UNICODE_SOURCES): - raise ValidationError([key, inst]) - - def _from_dict_value(self, ctx, key, cls, inst, validator): - if validator is self.SOFT_VALIDATION: - self.validate(key, cls, inst) - - cls_attrs = self.get_cls_attrs(cls) - complex_as = self.get_complex_as(cls_attrs) - if complex_as is list or complex_as is tuple: - check_complex_as = (list, tuple) - else: - check_complex_as = complex_as - - # get native type - if issubclass(cls, File): - if isinstance(inst, check_complex_as): - cls = cls_attrs.type or cls - inst = self._parse(cls_attrs, inst) - retval = self._doc_to_object(ctx, cls, inst, validator) - - else: - retval = self.from_serstr(cls, inst, self.binary_encoding) - - else: - inst = self._parse(cls_attrs, inst) - - if issubclass(cls, (Any, AnyDict)): - retval = inst - - elif issubclass(cls, ComplexModelBase): - retval = self._doc_to_object(ctx, cls, inst, validator) - - else: - if cls_attrs.empty_is_none and inst in (u'', b''): - inst = None - - if (validator is self.SOFT_VALIDATION - and isinstance(inst, six.string_types) - and not cls.validate_string(cls, inst)): - raise ValidationError([key, inst]) - - if issubclass(cls, (ByteArray, Uuid)): - retval = self.from_serstr(cls, inst, self.binary_encoding) - - elif issubclass(cls, Unicode): - if isinstance(inst, bytearray): - retval = six.text_type(inst, - encoding=cls_attrs.encoding or 'ascii', - errors=cls_attrs.unicode_errors) - - elif isinstance(inst, memoryview): - # FIXME: memoryview needs a .decode() function to avoid - # needless copying here - retval = inst.tobytes().decode( - cls_attrs.encoding or 'ascii', - errors=cls_attrs.unicode_errors) - - elif isinstance(inst, mmap): - # FIXME: mmap needs a .decode() function to avoid - # needless copying here - retval = mmap[:].decode(cls_attrs.encoding, - errors=cls_attrs.unicode_errors) - - elif isinstance(inst, six.binary_type): - retval = self.unicode_from_bytes(cls, inst) - - else: - retval = inst - - else: - retval = self.from_serstr(cls, inst) - - # validate native type - if validator is self.SOFT_VALIDATION: - if not cls.validate_native(cls, retval): - raise ValidationError([key, retval]) - - return retval - - def _doc_to_object(self, ctx, cls, doc, validator=None): - if doc is None: - return [] - - if issubclass(cls, Any): - doc = self._cast(self.get_cls_attrs(cls), doc) - return doc - - if issubclass(cls, Array): - doc = self._cast(self.get_cls_attrs(cls), doc) - retval = [] - (serializer,) = cls._type_info.values() - - if not isinstance(doc, AbcIterable): - raise ValidationError(doc) - - for i, child in enumerate(doc): - retval.append(self._from_dict_value(ctx, i, serializer, child, - validator)) - - return retval - - cls_attrs = self.get_cls_attrs(cls) - if not self.ignore_wrappers and not cls_attrs.not_wrapped: - if not isinstance(doc, dict): - raise ValidationError(doc, "Wrapper documents must be dicts") - - if len(doc) == 0: - return None - - if len(doc) > 1: - raise ValidationError(doc, "There can be only one entry in a " - "wrapper dict") - - subclasses = cls.get_subclasses() - (class_name, doc), = doc.items() - if not six.PY2 and isinstance(class_name, bytes): - class_name = class_name.decode('utf8') - - if cls.get_type_name() != class_name and subclasses is not None \ - and len(subclasses) > 0: - for subcls in subclasses: - if subcls.get_type_name() == class_name: - break - else: - raise ValidationError(class_name, - "Class name %%r is not registered as a subclass of %r" % - cls.get_type_name()) - - if not self.issubclass(subcls, cls): - raise ValidationError(class_name, - "Class name %%r is not a subclass of %r" % - cls.get_type_name()) - cls = subcls - - inst = cls.get_deserialization_instance(ctx) - - # get all class attributes, including the ones coming from - # parent classes. - flat_type_info = cls.get_flat_type_info(cls) - if flat_type_info is None: - logger.critical("No flat_type_info found for type %r", cls) - raise TypeError(cls) - - # this is for validating cls.Attributes.{min,max}_occurs - frequencies = defaultdict(int) - - try: - items = doc.items() - except AttributeError: - # Input is not a dict, so we assume it's a sequence that we can pair - # with the incoming sequence with field names. - # TODO: cache this - try: - items = zip([k for k, v in flat_type_info.items() - if not self.get_cls_attrs(v).exc], doc) - except TypeError as e: - logger.error("Invalid document %r for %r", doc, cls) - raise ValidationError(doc) - - # parse input to set incoming data to related attributes. - for k, v in items: - if self.key_encoding is not None and isinstance(k, bytes): - try: - k = k.decode(self.key_encoding) - except UnicodeDecodeError: - raise ValidationError(k) - - member = flat_type_info.get(k, None) - if member is None: - member, k = flat_type_info.alt.get(k, (None, k)) - if member is None: - continue - - member_attrs = self.get_cls_attrs(member) - - if member_attrs.exc: - continue - - mo = member_attrs.max_occurs - if mo > 1: - subinst = getattr(inst, k, None) - if subinst is None: - subinst = [] - - for a in v: - subinst.append( - self._from_dict_value(ctx, k, member, a, validator)) - - else: - subinst = self._from_dict_value(ctx, k, member, v, validator) - - inst._safe_set(k, subinst, member, member_attrs) - - frequencies[k] += 1 - - attrs = self.get_cls_attrs(cls) - if validator is self.SOFT_VALIDATION and attrs.validate_freq: - self._check_freq_dict(cls, frequencies, flat_type_info) - - return inst - - def _object_to_doc(self, cls, inst, tags=None): - if inst is None: - return None - - if tags is None: - tags = set() - - retval = None - - if isinstance(inst, Fault): - retval = None - inst_id = id(inst) - if not (inst_id in tags): - retval = self._fault_to_doc(inst, cls) - tags.add(inst_id) - return retval - - cls_attrs = self.get_cls_attrs(cls) - if cls_attrs.exc: - return - - cls_orig = None - if cls_attrs.out_type is not None: - cls_orig = cls - cls = cls_attrs.out_type - # remember to do this if cls_attrs are needed below - # (currently cls_attrs is not used so we don't do this) - # cls_attrs = self.get_cls_attrs(cls) - - elif cls_attrs.type is not None: - cls_orig = cls - cls = cls_attrs.type - # remember to do this if cls_attrs are needed below - # (currently cls_attrs is not used so we don't do this) - # cls_attrs = self.get_cls_attrs(cls) - - if self.ignore_wrappers: - ti = getattr(cls, '_type_info', {}) - - while cls.Attributes._wrapper and len(ti) == 1: - # Wrappers are auto-generated objects that have exactly one - # child type. - key, = ti.keys() - if not issubclass(cls, Array): - inst = getattr(inst, key, None) - cls, = ti.values() - ti = getattr(cls, '_type_info', {}) - - # transform the results into a dict: - if cls.Attributes.max_occurs > 1: - if inst is not None: - retval = [] - - for subinst in inst: - if id(subinst) in tags: - # even when there is ONE already-serialized instance, - # we throw the whole thing away. - logger.debug("Throwing the whole array away because " - "found %d", id(subinst)) - - # this is DANGEROUS - #logger.debug("Said array: %r", inst) - - return None - - retval.append(self._to_dict_value(cls, subinst, tags, - cls_orig=cls_orig or cls)) - - else: - retval = self._to_dict_value(cls, inst, tags, - cls_orig=cls_orig or cls) - - return retval - - def _get_member_pairs(self, cls, inst, tags): - old_len = len(tags) - tags = tags | {id(inst)} - assert len(tags) > old_len, ("Offending instance: %r" % inst) - - for k, v in self.sort_fields(cls): - subattr = self.get_cls_attrs(v) - - if subattr.exc: - continue - - try: - subinst = getattr(inst, k, None) - - # to guard against e.g. sqlalchemy throwing NoSuchColumnError - except Exception as e: - logger.error("Error getting %r: %r" % (k, e)) - subinst = None - - if subinst is None: - subinst = subattr.default - else: - if id(subinst) in tags: - continue - - logger.debug("%s%r%r", " " * len(tags), k, v) - val = self._object_to_doc(v, subinst, tags) - min_o = subattr.min_occurs - - complex_as = self.get_complex_as(subattr) - if val is not None or min_o > 0 or complex_as is list: - sub_name = subattr.sub_name - if sub_name is None: - sub_name = k - - yield (sub_name, val) - - def _to_dict_value(self, cls, inst, tags, cls_orig=None): - if cls_orig is None: - cls_orig = cls - cls, switched = self.get_polymorphic_target(cls, inst) - cls_attrs = self.get_cls_attrs(cls) - - inst = self._sanitize(cls_attrs, inst) - - if issubclass(cls_orig, File): - cls_orig_attrs = self.get_cls_attrs(cls_orig) - if not isinstance(inst, cls_orig_attrs.type): - return self.to_serstr(cls_orig, inst, self.binary_encoding) - - retval = self._complex_to_doc(cls_orig_attrs.type, inst, tags) - complex_as = self.get_complex_as(cls_orig_attrs) - - if complex_as is dict and not self.ignore_wrappers: - retval = next(iter(retval.values())) - - return retval - - if issubclass(cls, (Any, AnyDict)): - return inst - - if issubclass(cls, Array): - st, = cls._type_info.values() - return self._object_to_doc(st, inst, tags) - - if issubclass(cls, ComplexModelBase): - return self._complex_to_doc(cls, inst, tags) - - if issubclass(cls, (ByteArray, Uuid)): - return self.to_serstr(cls, inst, self.binary_encoding) - - return self.to_serstr(cls, inst) - - def _complex_to_doc(self, cls, inst, tags): - cls_attrs = self.get_cls_attrs(cls) - sf = cls_attrs.simple_field - if sf is not None: - # we want this to throw when sf does not exist - subcls = cls.get_flat_type_info(cls)[sf] - - subinst = getattr(inst, sf, None) - - logger.debug("Render complex object %s to the value %r of its " - "field '%s'", cls.get_type_name(), subinst, sf) - - return self.to_unicode(subcls, subinst) - - cls_attr = self.get_cls_attrs(cls) - complex_as = self.get_complex_as(cls_attr) - if complex_as is list or \ - getattr(cls.Attributes, 'serialize_as', False) is list: - return list(self._complex_to_list(cls, inst, tags)) - return self._complex_to_dict(cls, inst, tags) - - def _complex_to_dict(self, cls, inst, tags): - inst = cls.get_serialization_instance(inst) - cls_attr = self.get_cls_attrs(cls) - complex_as = self.get_complex_as(cls_attr) - - if self.key_encoding is None: - d = complex_as(self._get_member_pairs(cls, inst, tags)) - - if (self.ignore_wrappers or cls_attr.not_wrapped) \ - and not bool(cls_attr.wrapper): - return d - - else: - if isinstance(cls_attr.wrapper, - (six.text_type, six.binary_type)): - return {cls_attr.wrapper: d} - else: - return {cls.get_type_name(): d} - else: - d = complex_as( (k.encode(self.key_encoding), v) for k, v in - self._get_member_pairs(cls, inst, tags) ) - - if (self.ignore_wrappers or cls_attr.not_wrapped) \ - and not bool(cls_attr.wrapper): - return d - - else: - if isinstance(cls_attr.wrapper, six.text_type): - return {cls_attr.wrapper.encode(self.key_encoding): d} - elif isinstance(cls_attr.wrapper, six.binary_type): - return {cls_attr.wrapper: d} - else: - return {cls.get_type_name().encode(self.key_encoding): d} - - def _complex_to_list(self, cls, inst, tags): - inst = cls.get_serialization_instance(inst) - - for k, v in self._get_member_pairs(cls, inst, tags): - yield v diff --git a/libs_crutch/contrib/spyne/protocol/dictdoc/simple.py b/libs_crutch/contrib/spyne/protocol/dictdoc/simple.py deleted file mode 100644 index 6b608bd..0000000 --- a/libs_crutch/contrib/spyne/protocol/dictdoc/simple.py +++ /dev/null @@ -1,404 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logger = logging.getLogger(__name__) - -import re -RE_HTTP_ARRAY_INDEX = re.compile("\\[([0-9]+)\\]") - -from collections import deque -from collections import defaultdict - -from spyne.util import six -from spyne.error import ValidationError - -from spyne.model import ByteArray, String, File, ComplexModelBase, Array, \ - SimpleModel, Any, AnyDict, Unicode - -from spyne.protocol.dictdoc import DictDocument - - -def _s2cmi(m, nidx): - """ - Sparse to contiguous mapping inserter. - - >>> m1={3:0, 4:1, 7:2} - >>> _s2cmi(m1, 5); m1 - 1 - {3: 0, 4: 1, 5: 2, 7: 3} - >>> _s2cmi(m1, 0); m1 - 0 - {0: 0, 3: 1, 4: 2, 5: 3, 7: 4} - >>> _s2cmi(m1, 8); m1 - 4 - {0: 0, 3: 1, 4: 2, 5: 3, 7: 4, 8: 5} - """ - nv = -1 - for i, v in m.items(): - if i >= nidx: - m[i] += 1 - elif v > nv: - nv = v - m[nidx] = nv + 1 - return nv + 1 - - -def _fill(inst_class, frequencies): - """This function initializes the frequencies dict with null values. If this - is not done, it won't be possible to catch missing elements when validating - the incoming document. - """ - - ctype_info = inst_class.get_flat_type_info(inst_class) - cfreq_key = inst_class, 0 - - for k, v in ctype_info.items(): - if v.Attributes.min_occurs > 0: - frequencies[cfreq_key][k] = 0 - - -class SimpleDictDocument(DictDocument): - """This protocol contains logic for protocols that serialize and deserialize - flat dictionaries. The only example as of now is Http. - """ - - def __init__(self, app=None, validator=None, mime_type=None, - ignore_uncap=False, ignore_wrappers=True, complex_as=dict, - ordered=False, hier_delim='.', strict_arrays=False): - super(SimpleDictDocument, self).__init__(app=app, validator=validator, - mime_type=mime_type, ignore_uncap=ignore_uncap, - ignore_wrappers=ignore_wrappers, complex_as=complex_as, - ordered=ordered) - - self.hier_delim = hier_delim - self.strict_arrays = strict_arrays - - def _to_native_values(self, cls, member, orig_k, k, v, req_enc, validator): - value = [] - - for v2 in v: - # some wsgi implementations pass unicode strings, some pass str - # strings. we get unicode here when we can and should. - if v2 is not None and req_enc is not None \ - and not issubclass(member.type, String) \ - and issubclass(member.type, Unicode) \ - and not isinstance(v2, six.text_type): - try: - v2 = v2.decode(req_enc) - except UnicodeDecodeError as e: - raise ValidationError(v2, "%r while decoding %%r" % e) - - # validate raw data (before deserialization) - try: - if (validator is self.SOFT_VALIDATION and not - member.type.validate_string(member.type, v2)): - raise ValidationError([orig_k, v2]) - - except TypeError: - raise ValidationError([orig_k, v2]) - - cls_attrs = self.get_cls_attrs(member.type) - v2 = self._parse(cls_attrs, v2) - - # deserialize to native type - if issubclass(member.type, File): - if isinstance(v2, File.Value): - native_v2 = v2 - else: - native_v2 = self.from_unicode(member.type, v2, - self.binary_encoding) - - elif issubclass(member.type, ByteArray): - native_v2 = self.from_unicode(member.type, v2, - self.binary_encoding) - else: - try: - native_v2 = self.from_unicode(member.type, v2) - except ValidationError as e: - ns = "%s.%s" % (cls.get_namespace(), cls.get_type_name()) - raise ValidationError(e.faultstring, - "Validation failed for %s.%s: %%s" % (ns, k)) - - # validate native data (after deserialization) - native_v2 = self._sanitize(cls_attrs, native_v2) - if validator is self.SOFT_VALIDATION: - if not member.type.validate_native(member.type, native_v2): - raise ValidationError([orig_k, v2]) - - value.append(native_v2) - - return value - - def simple_dict_to_object(self, ctx, doc, cls, validator=None, req_enc=None): - """Converts a flat dict to a native python object. - - See :func:`spyne.model.complex.ComplexModelBase.get_flat_type_info`. - """ - - if issubclass(cls, (Any, AnyDict)): - return doc - - if not issubclass(cls, ComplexModelBase): - raise NotImplementedError("Interestingly, deserializing non complex" - " types is not yet implemented. You can" - " use a ComplexModel to wrap that field." - " Otherwise, patches are welcome.") - - # this is for validating cls.Attributes.{min,max}_occurs - frequencies = defaultdict(lambda: defaultdict(int)) - if validator is self.SOFT_VALIDATION: - _fill(cls, frequencies) - - if issubclass(cls, Array): - # we need the wrapper object instance here as it's a root object - retval = cls.get_serialization_instance([]) - else: - retval = cls.get_deserialization_instance(ctx) - - simple_type_info = cls.get_simple_type_info_with_prot(cls, self, - hier_delim=self.hier_delim) - - logger.debug("Simple type info key: %r", simple_type_info.keys()) - - idxmap = defaultdict(dict) - for orig_k, v in sorted(doc.items(), key=lambda _k: _k[0]): - k = RE_HTTP_ARRAY_INDEX.sub("", orig_k) - - member = simple_type_info.get(k, None) - if member is None: - logger.debug("\tdiscarding field %r" % k) - continue - - if member.can_be_empty: - if v != ['empty']: # maybe raise a ValidationError instead? - # 'empty' is the only valid value at this point after all - continue - - assert issubclass(member.type, ComplexModelBase) - - if issubclass(member.type, Array): - value = [] - - elif self.get_cls_attrs(member.type).max_occurs > 1: - value = [] - - else: - value = [member.type.get_deserialization_instance(ctx)] - # do we have to ignore later assignments? they're illegal - # but not harmful. - else: - # extract native values from the list of strings in the flat dict - # entries. - value = self._to_native_values(cls, member, orig_k, k, v, - req_enc, validator) - - - # assign the native value to the relevant class in the nested object - # structure. - cinst = retval - ctype_info = cls.get_flat_type_info(cls) - ccls_attr = self.get_cls_attrs(cls) - value = self._cast(ccls_attr, value) - - idx, nidx = 0, 0 - pkey = member.path[0] - cfreq_key = cls, idx - - indexes = deque(RE_HTTP_ARRAY_INDEX.findall(orig_k)) - for pkey in member.path[:-1]: - nidx = 0 - ncls, ninst = ctype_info[pkey], getattr(cinst, pkey, None) - nattrs = self.get_cls_attrs(ncls) - if issubclass(ncls, Array): - ncls, = ncls._type_info.values() - - ncls_attrs = self.get_cls_attrs(ncls) - mo = ncls_attrs.max_occurs - if mo > 1: - if len(indexes) == 0: - nidx = 0 - else: - nidx = int(indexes.popleft()) - - if ninst is None: - ninst = [] - cinst._safe_set(pkey, ninst, ncls, nattrs) - - if self.strict_arrays: - if len(ninst) == 0: - newval = ncls.get_deserialization_instance(ctx) - ninst.append(newval) - frequencies[cfreq_key][pkey] += 1 - - if nidx > len(ninst): - raise ValidationError(orig_k, - "%%r Invalid array index %d." % idx) - if nidx == len(ninst): - ninst.append(ncls.get_deserialization_instance(ctx)) - frequencies[cfreq_key][pkey] += 1 - - cinst = ninst[nidx] - - else: - _m = idxmap[id(ninst)] - cidx = _m.get(nidx, None) - if cidx is None: - cidx = _s2cmi(_m, nidx) - newval = ncls.get_deserialization_instance(ctx) - ninst.insert(cidx, newval) - frequencies[cfreq_key][pkey] += 1 - cinst = ninst[cidx] - - assert cinst is not None, ninst - - else: - if ninst is None: - ninst = ncls.get_deserialization_instance(ctx) - cinst._safe_set(pkey, ninst, ncls, nattrs) - frequencies[cfreq_key][pkey] += 1 - - cinst = ninst - - cfreq_key = cfreq_key + (ncls, nidx) - idx = nidx - ctype_info = ncls.get_flat_type_info(ncls) - - frequencies[cfreq_key][member.path[-1]] += len(value) - - member_attrs = self.get_cls_attrs(member.type) - if member_attrs.max_occurs > 1: - _v = getattr(cinst, member.path[-1], None) - is_set = True - if _v is None: - is_set = cinst._safe_set(member.path[-1], value, - member.type, member_attrs) - else: - _v.extend(value) - - set_skip = 'set ' if is_set else 'SKIP' - logger.debug("\t%s arr %r(%r) = %r" % - (set_skip, member.path, pkey, value)) - - else: - is_set = cinst._safe_set(member.path[-1], value[0], - member.type, member_attrs) - - set_skip = 'set ' if is_set else 'SKIP' - logger.debug("\t%s val %r(%r) = %r" % - (set_skip, member.path, pkey, value[0])) - - if validator is self.SOFT_VALIDATION: - logger.debug("\tvalidate_freq: \n%r", frequencies) - for k, d in frequencies.items(): - for i, path_cls in enumerate(k[:-1:2]): - attrs = self.get_cls_attrs(path_cls) - if not attrs.validate_freq: - logger.debug("\t\tskip validate_freq: %r", k[:i*2]) - break - else: - path_cls = k[-2] - logger.debug("\t\tdo validate_freq: %r", k) - self._check_freq_dict(path_cls, d) - - if issubclass(cls, Array): - # unwrap the request object - array_name, = cls._type_info.keys() - retval = getattr(retval, array_name) - - return retval - - def object_to_simple_dict(self, cls, inst, retval=None, - prefix=None, subinst_eater=lambda prot, v, t: v, tags=None): - """Converts a native python object to a flat dict. - - See :func:`spyne.model.complex.ComplexModelBase.get_flat_type_info`. - """ - - if retval is None: - retval = {} - - if prefix is None: - prefix = [] - - if inst is None and self.get_cls_attrs(cls).min_occurs == 0: - return retval - - if tags is None: - tags = set([id(inst)]) - else: - if id(inst) in tags: - return retval - - if issubclass(cls, ComplexModelBase): - fti = cls.get_flat_type_info(cls) - - for k, v in fti.items(): - new_prefix = list(prefix) - cls_attrs = self.get_cls_attrs(v) - sub_name = cls_attrs.sub_name - if sub_name is None: - sub_name = k - new_prefix.append(sub_name) - subinst = getattr(inst, k, None) - - if (issubclass(v, Array) or v.Attributes.max_occurs > 1) and \ - subinst is not None: - if issubclass(v, Array): - subtype, = v._type_info.values() - else: - subtype = v - - # for simple types, the same key is repeated with multiple - # values - if issubclass(subtype, SimpleModel): - key = self.hier_delim.join(new_prefix) - l = [] - for ssv in subinst: - l.append(subinst_eater(self, ssv, subtype)) - retval[key] = l - - else: - # for complex types, brackets are used for each value. - last_prefix = new_prefix[-1] - i = -1 - for i, ssv in enumerate(subinst): - new_prefix[-1] = '%s[%d]' % (last_prefix, i) - self.object_to_simple_dict(subtype, ssv, - retval, new_prefix, - subinst_eater=subinst_eater, tags=tags) - - if i == -1: - key = self.hier_delim.join(new_prefix) - retval[key] = 'empty' - - else: - self.object_to_simple_dict(v, subinst, retval, new_prefix, - subinst_eater=subinst_eater, tags=tags) - - else: - key = self.hier_delim.join(prefix) - - if key in retval: - raise ValueError("%r.%s conflicts with previous value %r" % - (cls, key, retval[key])) - - retval[key] = subinst_eater(self, inst, cls) - - return retval diff --git a/libs_crutch/contrib/spyne/protocol/html/__init__.py b/libs_crutch/contrib/spyne/protocol/html/__init__.py deleted file mode 100644 index 43b6c5c..0000000 --- a/libs_crutch/contrib/spyne/protocol/html/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -""" -This package contains some basic html output protocols. -""" - -from spyne.protocol.html._base import HtmlBase -from spyne.protocol.html._base import HtmlCloth -from spyne.protocol.html._base import parse_html_fragment_file -from spyne.protocol.html.table import HtmlColumnTable -from spyne.protocol.html.table import HtmlRowTable -from spyne.protocol.html.microformat import HtmlMicroFormat -from spyne.protocol.html.addtl import PrettyFormat -from spyne.protocol.html.addtl import BooleanListProtocol - - -# FIXME: REMOVE ME -def translate(cls, locale, default): - retval = None - if cls.Attributes.translations is not None: - retval = cls.Attributes.translations.get(locale, None) - if retval is None: - return default - return retval diff --git a/libs_crutch/contrib/spyne/protocol/html/_base.py b/libs_crutch/contrib/spyne/protocol/html/_base.py deleted file mode 100644 index 142118e..0000000 --- a/libs_crutch/contrib/spyne/protocol/html/_base.py +++ /dev/null @@ -1,251 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logger = logging.getLogger(__name__) - -from collections import defaultdict - -from lxml import etree, html -from lxml.html.builder import E - -from spyne.util import coroutine, Break, six -from spyne.util.oset import oset -from spyne.util.etreeconv import dict_to_etree - -from spyne.protocol.cloth import XmlCloth -from spyne.protocol.cloth._base import XmlClothProtocolContext - - -def parse_html_fragment_file(T_FILES): - elt = html.fromstring(open(T_FILES).read()) - elt.getparent().remove(elt) - return elt - - -class HtmlClothProtocolContext(XmlClothProtocolContext): - def __init__(self, parent, transport, type=None): - super(HtmlClothProtocolContext, self).__init__(parent, transport, type) - - self.assets = [] - self.eltstack = defaultdict(list) - self.ctxstack = defaultdict(list) - self.rootstack = oset() - self.tags = set() - self.objcache = dict() - - # these are supposed to be for neurons.base.screen.ScreenBase subclasses - self.screen = None - self.prev_view = None - self.next_view = None - - -class HtmlCloth(XmlCloth): - mime_type = 'text/html; charset=UTF-8' - - def __init__(self, app=None, encoding='utf8', - mime_type=None, ignore_uncap=False, ignore_wrappers=False, - cloth=None, cloth_parser=None, polymorphic=True, - strip_comments=True, hier_delim='.', doctype=None): - - super(HtmlCloth, self).__init__(app=app, encoding=encoding, - mime_type=mime_type, ignore_uncap=ignore_uncap, - ignore_wrappers=ignore_wrappers, cloth=cloth, - cloth_parser=cloth_parser, polymorphic=polymorphic, - strip_comments=strip_comments) - - self.hier_delim = hier_delim - self.doctype = doctype - self.default_method = 'html' - - def _parse_file(self, file_name, cloth_parser): - if cloth_parser is None: - cloth_parser = html.HTMLParser() - - cloth = html.parse(file_name, parser=cloth_parser) - return cloth.getroot() - - def docfile(self, *args, **kwargs): - logger.debug("Starting file with %r %r", args, kwargs) - return etree.htmlfile(*args, **kwargs) - - def get_context(self, parent, transport): - return HtmlClothProtocolContext(parent, transport) - - @staticmethod - def get_class_cloth(cls): - return cls.Attributes._html_cloth - - @staticmethod - def get_class_root_cloth(cls): - return cls.Attributes._html_root_cloth - - def dict_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - parent.write(repr(inst)) - - @staticmethod - def add_html_attr(attr_name, attr_dict, class_name): - if attr_name in attr_dict: - attr_dict[attr_name] = ' '.join( - (attr_dict.get('class', ''), class_name)) - else: - attr_dict[attr_name] = class_name - - @staticmethod - def add_style(attr_dict, data): - style = attr_dict.get('style', None) - - if style is not None: - attr_dict['style'] = ';'.join(style, data) - - else: - attr_dict['style'] = data - - @staticmethod - def selsafe(s): - return s.replace('[', '').replace(']', '').replace('.', '__') - - @coroutine - def complex_to_parent(self, ctx, cls, inst, parent, name, use_ns=False, - **kwargs): - inst = cls.get_serialization_instance(inst) - - # TODO: Put xml attributes as well in the below element() call. - with parent.element(name): - ret = self._write_members(ctx, cls, inst, parent, use_ns=False, - **kwargs) - if ret is not None: - try: - while True: - sv2 = (yield) # may throw Break - ret.send(sv2) - - except Break: - try: - ret.throw(Break()) - except StopIteration: - pass - - def gen_anchor(self, cls, inst, name, anchor_class=None): - assert name is not None - cls_attrs = self.get_cls_attrs(cls) - - href = getattr(inst, 'href', None) - if href is None: # this is not a AnyUri.Value instance. - href = inst - - content = None - text = cls_attrs.text - - else: - content = getattr(inst, 'content', None) - text = getattr(inst, 'text', None) - if text is None: - text = cls_attrs.text - - if anchor_class is None: - anchor_class = cls_attrs.anchor_class - - if text is None: - text = name - - retval = E.a(text) - - if href is not None: - retval.attrib['href'] = href - - if anchor_class is not None: - retval.attrib['class'] = anchor_class - - if content is not None: - retval.append(content) - - return retval - - def any_uri_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - retval = self.gen_anchor(cls, inst, name) - parent.write(retval) - - def imageuri_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - # with ImageUri, content is ignored. - href = getattr(inst, 'href', None) - if href is None: # this is not a AnyUri.Value instance. - href = inst - text = getattr(cls.Attributes, 'text', None) - - else: - text = getattr(inst, 'text', None) - if text is None: - text = getattr(cls.Attributes, 'text', None) - - retval = E.img(src=href) - if text is not None: - retval.attrib['alt'] = text - - parent.write(retval) - - def byte_array_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - ret = self.to_unicode(cls, inst, self.binary_encoding) - - if ret is not None: - parent.write(ret) - - def model_base_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - ret = self.to_unicode(cls, inst) - - if ret is not None: - parent.write(ret) - - def null_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - pass - - def any_xml_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - if isinstance(inst, (six.text_type, six.binary_type)): - inst = etree.fromstring(inst) - - parent.write(inst) - - def any_html_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - cls_attrs = self.get_cls_attrs(cls) - - if cls_attrs.as_string: - if not (isinstance(inst, str) or isinstance(inst, six.text_type)): - inst = html.tostring(inst) - - else: - if isinstance(inst, str) or isinstance(inst, six.text_type): - inst = html.fromstring(inst) - - parent.write(inst) - - def any_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - parent.write(inst) - - def any_dict_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - elt = E('foo') - dict_to_etree(inst, elt) - - parent.write(elt[0]) - - def fault_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - self.complex_to_parent(ctx, cls, inst, parent, name, **kwargs) - - -# FIXME: Deprecated -HtmlBase = HtmlCloth diff --git a/libs_crutch/contrib/spyne/protocol/html/addtl.py b/libs_crutch/contrib/spyne/protocol/html/addtl.py deleted file mode 100644 index c89af8a..0000000 --- a/libs_crutch/contrib/spyne/protocol/html/addtl.py +++ /dev/null @@ -1,52 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from lxml.builder import E -from pprint import pformat - -from spyne import Boolean -from spyne.protocol.html import HtmlBase - - -class PrettyFormat(HtmlBase): - def to_parent(self, ctx, cls, inst, parent, name, **kwargs): - parent.write(E.pre(pformat(inst))) - - -class BooleanListProtocol(HtmlBase): - def __init__(self, nothing=None): - super(BooleanListProtocol, self).__init__() - - self.nothing = nothing - - def to_parent(self, ctx, cls, inst, parent, name, nosubprot=False, **kwargs): - if inst is None: - return - - wrote_nothing = True - for k, v in cls.get_flat_type_info(cls).items(): - if not issubclass(v, Boolean): - continue - - if getattr(inst, k, False): - parent.write(E.p(self.trc(cls, ctx.locale, k))) - wrote_nothing = False - - if wrote_nothing and self.nothing is not None: - parent.write(E.p(self.nothing)) diff --git a/libs_crutch/contrib/spyne/protocol/html/microformat.py b/libs_crutch/contrib/spyne/protocol/html/microformat.py deleted file mode 100644 index b78e1e1..0000000 --- a/libs_crutch/contrib/spyne/protocol/html/microformat.py +++ /dev/null @@ -1,197 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from inspect import isgenerator - -from lxml.html.builder import E - -from spyne.util import six, coroutine, Break -from spyne.util.cdict import cdict - -from spyne.model import Array, AnyHtml, ComplexModelBase, ByteArray, \ - ModelBase, PushBase, ImageUri, AnyUri - -from spyne.protocol.html import HtmlBase - - -class HtmlMicroFormat(HtmlBase): - def __init__(self, app=None, ignore_uncap=False, ignore_wrappers=False, - cloth=None, cloth_parser=None, polymorphic=True, - doctype="", - root_tag='div', child_tag='div', field_name_attr='class', - field_name_tag=None, field_name_class='field_name', - before_first_root=None): - """Protocol that returns the response object according to the "html - microformat" specification. See - https://en.wikipedia.org/wiki/Microformats for more info. - - The simple flavour is like the XmlDocument protocol, but returns data in -
or tags. - - :param app: A spyne.application.Application instance. - :param root_tag: The type of the root tag that encapsulates the return - data. - :param child_tag: The type of the tag that encapsulates the fields of - the returned object. - :param field_name_attr: The name of the attribute that will contain the - field names of the complex object children. - """ - - super(HtmlMicroFormat, self).__init__(app=app, - ignore_uncap=ignore_uncap, ignore_wrappers=ignore_wrappers, - cloth=cloth, cloth_parser=cloth_parser, polymorphic=polymorphic, - hier_delim=None, doctype=doctype) - - if six.PY2: - text_type = basestring - else: - text_type = str - - assert isinstance(root_tag, text_type) - assert isinstance(child_tag, text_type) - assert isinstance(field_name_attr, text_type) - assert field_name_tag is None or isinstance(field_name_tag, text_type) - - self.root_tag = root_tag - self.child_tag = child_tag - self.field_name_attr = field_name_attr - self.field_name_tag = field_name_tag - if field_name_tag is not None: - self.field_name_tag = E(field_name_tag) - self._field_name_class = field_name_class - if before_first_root is not None: - self.event_manager.add_listener("before_first_root", - before_first_root) - - self.serialization_handlers = cdict({ - Array: self.array_to_parent, - AnyUri: self.any_uri_to_parent, - AnyHtml: self.any_html_to_parent, - ImageUri: self.imageuri_to_parent, - ByteArray: self.not_supported, - ModelBase: self.model_base_to_parent, - ComplexModelBase: self.complex_model_to_parent, - }) - - def anyuri_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - retval = self.gen_anchor(cls, inst, parent) - retval.attrib[self.field_name_attr] = name - parent.write(retval) - - def model_base_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - retval = E(self.child_tag, **{self.field_name_attr: name}) - data_str = self.to_unicode(cls, inst) - - if self.field_name_tag is not None: - field_name = cls.Attributes.translations.get( name) - field_name_tag = self.field_name_tag(field_name, - **{'class':self._field_name_class}) - field_name_tag.tail = data_str - retval.append(field_name_tag) - - else: - retval.text = data_str - - parent.write(retval) - - def start_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - """This is what subserialize calls""" - - # if no doctype was written, write it - if not getattr(ctx.outprot_ctx, 'doctype_written', False): - if len(ctx.protocol.prot_stack) == 1: - if self.doctype is not None: - parent.write_doctype(self.doctype) - - # set this to true as no doctype can be written after this - # stage anyway. - ctx.outprot_ctx.doctype_written = True - - return self.to_parent(ctx, cls, inst, parent, name, **kwargs) - - @coroutine - def complex_model_to_parent(self, ctx, cls, inst, parent, name, - use_ns=False, **kwargs): - attrs = {self.field_name_attr: name} - - if not getattr(ctx.protocol, 'before_first_root', False): - self.event_manager.fire_event("before_first_root", - ctx, cls, inst, parent, name, **kwargs) - ctx.protocol.before_first_root = True - - with parent.element(self.root_tag, attrs): - ret = self._write_members(ctx, cls, inst, parent, use_ns=False, - **kwargs) - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as e: - try: - ret.throw(e) - except StopIteration: - pass - - @coroutine - def array_to_parent(self, ctx, cls, inst, parent, name, from_arr=False, **kwargs): - attrs = {self.field_name_attr: name} - - if issubclass(cls, Array): - cls, = cls._type_info.values() - - name = cls.get_type_name() - with parent.element(self.root_tag, attrs): - if isinstance(inst, PushBase): - while True: - sv = (yield) - ret = self.to_parent(ctx, cls, sv, parent, name, - from_arr=True, **kwargs) - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as e: - try: - ret.throw(e) - except StopIteration: - pass - - else: - for sv in inst: - ret = self.to_parent(ctx, cls, sv, parent, name, - from_arr=True, **kwargs) - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as e: - try: - ret.throw(e) - except StopIteration: - pass - - def null_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - return [ E(self.child_tag, **{self.field_name_attr: name}) ] - -# FIXME: yuck. -from spyne.protocol.cloth import XmlCloth -XmlCloth.HtmlMicroFormat = HtmlMicroFormat diff --git a/libs_crutch/contrib/spyne/protocol/html/table/__init__.py b/libs_crutch/contrib/spyne/protocol/html/table/__init__.py deleted file mode 100644 index 0c52d57..0000000 --- a/libs_crutch/contrib/spyne/protocol/html/table/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ - -from spyne.protocol.html.table._base import HtmlTableBase -from spyne.protocol.html.table.row import HtmlRowTable -from spyne.protocol.html.table.column import HtmlColumnTable diff --git a/libs_crutch/contrib/spyne/protocol/html/table/_base.py b/libs_crutch/contrib/spyne/protocol/html/table/_base.py deleted file mode 100644 index 3471419..0000000 --- a/libs_crutch/contrib/spyne/protocol/html/table/_base.py +++ /dev/null @@ -1,69 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -from spyne.protocol.html import HtmlBase - - -class HtmlTableBase(HtmlBase): - def __init__(self, app=None, ignore_uncap=False, ignore_wrappers=True, - cloth=None, cloth_parser=None, header=True, table_name_attr='class', - table_name=None, table_class=None, border=0, row_class=None, - field_name_attr='class', field_type_name_attr='class', - cell_class=None, header_cell_class=None, polymorphic=True, - hier_delim='.', doctype=None, link_gen=None, mrpc_delim_text='|', - table_width=None): - - super(HtmlTableBase, self).__init__(app=app, - ignore_uncap=ignore_uncap, ignore_wrappers=ignore_wrappers, - cloth=cloth, cloth_parser=cloth_parser, polymorphic=polymorphic, - hier_delim=hier_delim, doctype=doctype) - - self.header = header - self.table_name_attr = table_name_attr - self.table_name = table_name - self.field_name_attr = field_name_attr - self.field_type_name_attr = field_type_name_attr - self.border = border - self.row_class = row_class - self.cell_class = cell_class - self.header_cell_class = header_cell_class - self.link_gen = link_gen - self.table_class = table_class - self.table_width = table_width - self.mrpc_delim_text = mrpc_delim_text - - def null_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - pass - - def add_field_attrs(self, attr_dict, name, cls): - if self.field_name_attr: - self.add_html_attr(self.field_name_attr, attr_dict, name) - - if self.field_type_name_attr: - types = set() - c = cls - while c is not None: - if c.Attributes._explicit_type_name or c.__extends__ is None: - types.add(c.get_type_name()) - - c = c.__extends__ - - self.add_html_attr(self.field_type_name_attr, attr_dict, - ' '.join(types)) diff --git a/libs_crutch/contrib/spyne/protocol/html/table/column.py b/libs_crutch/contrib/spyne/protocol/html/table/column.py deleted file mode 100644 index 36b5f50..0000000 --- a/libs_crutch/contrib/spyne/protocol/html/table/column.py +++ /dev/null @@ -1,336 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import print_function - -import logging -logger = logging.getLogger(__name__) - -from inspect import isgenerator - -from lxml.html.builder import E - -from spyne import ModelBase, ComplexModelBase, Array -from spyne.util import coroutine, Break, urlencode -from spyne.util.oset import oset -from spyne.protocol.html.table import HtmlTableBase - - -class HtmlColumnTableRowProtocol(object): - def column_table_gen_header(self, ctx, cls, parent, name, **kwargs): - return False - - def column_table_before_row(self, ctx, cls, inst, parent, name, **kwargs): - pass - - def column_table_after_row(self, ctx, cls, inst, parent, name, **kwargs): - pass - - - -class HtmlColumnTable(HtmlTableBase, HtmlColumnTableRowProtocol): - """Protocol that returns the response object as a html table. - - Returns one record per table row in a table that has as many columns as - field names, just like a regular spreadsheet. - - This is not quite unlike the HtmlMicroFormatprotocol, but returns data - as a html table using the tag. - - Generally used to serialize Array()'s of ComplexModel objects. If an - array has prot=HtmlColumnTable, its serializer (what's inside the Array( )) - must implement HtmlColumnTableRowProtocol interface. - - :param app: A spyne.application.Application instance. - :param header: Boolean value to determine whether to show field - names in the beginning of the table or not. Defaults to True. Set to - False to skip headers. - :param table_name_attr: The name of the attribute that will contain the - response name of the complex object in the table tag. Set to None to - disable. - :param table_name: When not none, overrides what goes in `table_name_attr`. - :param table_class: When not none, specifies what goes in `class` attribute - in the `
` tag. Table name gets appended when - `table_name_attr == 'class'` - :param field_name_attr: The name of the attribute that will contain the - field names of the complex object children for every table cell. Set - to None to disable. - :param row_class: value that goes inside the - :param cell_class: value that goes inside the tags are generated before exiting the
- :param header_cell_class: value that goes inside the - :param mrpc_delim_text: The text that goes between mrpc calls. - """ - - def __init__(self, *args, **kwargs): - before_table = kwargs.pop('before_table', None) - - super(HtmlColumnTable, self).__init__(*args, **kwargs) - - self.serialization_handlers.update({ - ModelBase: self.model_base_to_parent, - ComplexModelBase: self.complex_model_to_parent, - Array: self.array_to_parent, - }) - - if before_table is not None: - self.event_manager.add_listener("before_table", before_table) - - def model_base_to_parent(self, ctx, cls, inst, parent, name, - from_arr=False, **kwargs): - inst_str = '' - if inst is not None: - inst_str = self.to_unicode(cls, inst) - - if from_arr: - td_attrs = {} - - self.add_field_attrs(td_attrs, name, cls) - - parent.write(E.tr(E.td(inst_str, **td_attrs))) - - else: - parent.write(inst_str) - - @coroutine - def _gen_row(self, ctx, cls, inst, parent, name, from_arr=False, - array_index=None, **kwargs): - - # because HtmlForm* protocols don't use the global null handler, it's - # possible for null values to reach here. - if inst is None: - return - - logger.debug("Generate row for %r", cls) - - mrpc_delim_elt = '' - if self.mrpc_delim_text is not None: - mrpc_delim_elt = E.span(self.mrpc_delim_text, - **{'class': 'mrpc-delimiter'}) - mrpc_delim_elt.tail = ' ' - - with parent.element('tr'): - for k, v in self.sort_fields(cls): - cls_attr = self.get_cls_attrs(v) - if cls_attr.exc: - logger.debug("\tExclude table cell %r type %r for %r", - k, v, cls) - continue - - try: - sub_value = getattr(inst, k, None) - except: # e.g. SQLAlchemy could throw NoSuchColumnError - sub_value = None - - sub_name = cls_attr.sub_name - if sub_name is None: - sub_name = k - - if self.hier_delim is not None: - if array_index is None: - sub_name = "%s%s%s" % (name, self.hier_delim, sub_name) - else: - sub_name = "%s[%d]%s%s" % (name, array_index, - self.hier_delim, sub_name) - - logger.debug("\tGenerate table cell %r type %r for %r", - sub_name, v, cls) - - td_attrs = {} - - self.add_field_attrs(td_attrs, cls_attr.sub_name or k, v) - - if cls_attr.hidden: - self.add_style(td_attrs, 'display:None') - - with parent.element('td', td_attrs): - ret = self.to_parent(ctx, v, sub_value, parent, sub_name, - from_arr=from_arr, array_index=array_index, **kwargs) - - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - - except Break as b: - try: - ret.throw(b) - except StopIteration: - pass - - m = cls.Attributes.methods - if m is not None and len(m) > 0: - td_attrs = {'class': 'mrpc-cell'} - - with parent.element('td', td_attrs): - first = True - - for mn, md in self._methods(ctx, cls, inst): - if first: - first = False - elif mrpc_delim_elt is not None: - parent.write(" ") - parent.write(mrpc_delim_elt) - - pd = {} - for k, v in self.sort_fields(cls): - if getattr(v.Attributes, 'primary_key', None): - r = self.to_unicode(v, getattr(inst, k, None)) - if r is not None: - pd[k] = r - - params = urlencode(pd) - - mdid2key = ctx.app.interface.method_descriptor_id_to_key - href = mdid2key[id(md)].rsplit("}", 1)[-1] - text = md.translate(ctx.locale, - md.in_message.get_type_name()) - parent.write(E.a( - text, - href="%s?%s" % (href, params), - **{'class': 'mrpc-operation'} - )) - - logger.debug("Generate row for %r done.", cls) - self.extend_data_row(ctx, cls, inst, parent, name, - array_index=array_index, **kwargs) - - def _gen_thead(self, ctx, cls, parent, name): - logger.debug("Generate header for %r", cls) - - with parent.element('thead'): - with parent.element('tr'): - if issubclass(cls, ComplexModelBase): - fti = self.sort_fields(cls) - for k, v in fti: - cls_attr = self.get_cls_attrs(v) - if cls_attr.exc: - continue - - th_attrs = {} - self.add_field_attrs(th_attrs, k, cls) - - if cls_attr.hidden: - self.add_style(th_attrs, 'display:None') - - header_name = self.trc(v, ctx.locale, k) - parent.write(E.th(header_name, **th_attrs)) - - m = cls.Attributes.methods - if m is not None and len(m) > 0: - th_attrs = {'class': 'mrpc-cell'} - parent.write(E.th(**th_attrs)) - - else: - th_attrs = {} - self.add_field_attrs(th_attrs, name, cls) - - header_name = self.trc(cls, ctx.locale, name) - - parent.write(E.th(header_name, **th_attrs)) - - self.extend_header_row(ctx, cls, parent, name) - - @coroutine - def _gen_table(self, ctx, cls, inst, parent, name, gen_rows, **kwargs): - logger.debug("Generate table for %r", cls) - cls_attrs = self.get_cls_attrs(cls) - - attrib = {} - table_class = oset() - if self.table_class is not None: - table_class.add(self.table_class) - - if self.table_name_attr is not None: - tn = (self.table_name - if self.table_name is not None else cls.get_type_name()) - - if self.table_name_attr == 'class': - table_class.add(tn) - else: - attrib[self.table_name_attr] = tn - - attrib['class'] = ' '.join(table_class) - if self.table_width is not None: - attrib['width'] = self.table_width - - self.event_manager.fire_event('before_table', ctx, cls, inst, parent, - name, prot=self, **kwargs) - - with parent.element('table', attrib): - write_header = self.header - if cls_attrs.header is False: - write_header = cls_attrs.header - - if write_header: - ret = False - - subprot = self.get_subprot(ctx, cls_attrs) - if subprot is not None: - ret = subprot.column_table_gen_header(ctx, cls, parent, - name) - if not ret: - self._gen_thead(ctx, cls, parent, name) - - with parent.element('tbody'): - ret = gen_rows(ctx, cls, inst, parent, name, **kwargs) - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as b: - try: - ret.throw(b) - except StopIteration: - pass - - self.extend_table(ctx, cls, parent, name, **kwargs) - - def complex_model_to_parent(self, ctx, cls, inst, parent, name, - from_arr=False, **kwargs): - # If this is direct child of an array, table is already set up in - # array_to_parent. - if from_arr: - return self._gen_row(ctx, cls, inst, parent, name, **kwargs) - else: - return self.wrap_table(ctx, cls, inst, parent, name, self._gen_row, - **kwargs) - - def array_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - return self.wrap_table(ctx, cls, inst, parent, name, - super(HtmlColumnTable, self).array_to_parent, **kwargs) - - def wrap_table(self, ctx, cls, inst, parent, name, gen_rows, **kwargs): - return self._gen_table(ctx, cls, inst, parent, name, gen_rows, **kwargs) - - def extend_table(self, ctx, cls, parent, name, **kwargs): - """This is called as the last operation during the table body generation - after all the
tag - which in turn is inside a tag.""" - - def extend_data_row(self, ctx, cls, inst, parent, name, **kwargs): - """This is called as the last operation during the row generation - after all the tag which - in turn is inside a tag.""" - - def extend_header_row(self, ctx, cls, parent, name, **kwargs): - """This is called once as the last operation during the table header - generation after all the - tag which in turn is inside a tag.""" diff --git a/libs_crutch/contrib/spyne/protocol/html/table/row.py b/libs_crutch/contrib/spyne/protocol/html/table/row.py deleted file mode 100644 index 5a33e1a..0000000 --- a/libs_crutch/contrib/spyne/protocol/html/table/row.py +++ /dev/null @@ -1,216 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import print_function - -import logging -logger = logging.getLogger(__name__) - -from inspect import isgenerator - -from lxml.html.builder import E - -from spyne import ModelBase, ByteArray, ComplexModelBase, Array, AnyUri, \ - ImageUri -from spyne.util import coroutine, Break -from spyne.util.cdict import cdict -from spyne.protocol.html.table import HtmlTableBase - - -class HtmlRowTable(HtmlTableBase): - """Protocol that returns the response object as a html table. - - The simple flavour is like the HtmlMicroFormatprotocol, but returns data - as a html table using the
tags are generated before exiting the
tags are generated before exiting the
tag. - - Returns one record per table in a table with two columns. - - :param app: A spyne.application.Application instance. - :param header: Boolean value to determine whether to show field - names in the beginning of the table or not. Defaults to True. Set to - False to skip headers. - :param table_name_attr: The name of the attribute that will contain the - response name of the complex object in the table tag. Set to None to - disable. - :param table_name: When not none, overrides what goes in `table_name_attr`. - :param table_class: When not none, specifies what goes in `class` attribute - in the `
` tag. Table name gets appended when - `table_name_attr == 'class'` - :param field_name_attr: The name of the attribute that will contain the - field names of the complex object children for every table cell. Set - to None to disable. - :param row_class: value that goes inside the - :param cell_class: value that goes inside the
- :param header_cell_class: value that goes inside the - """ - - def __init__(self, *args, **kwargs): - super(HtmlRowTable, self).__init__(*args, **kwargs) - - self.serialization_handlers = cdict({ - ModelBase: self.model_base_to_parent, - AnyUri: self.any_uri_to_parent, - ImageUri: self.imageuri_to_parent, - ByteArray: self.not_supported, - ComplexModelBase: self.complex_model_to_parent, - Array: self.array_to_parent, - }) - - def model_base_to_parent(self, ctx, cls, inst, parent, name, from_arr=False, - **kwargs): - if from_arr: - td_attrib = {} - if False and self.field_name_attr: - td_attrib[self.field_name_attr] = name - - parent.write(E.tr(E.td(self.to_unicode(cls, inst), **td_attrib))) - else: - parent.write(self.to_unicode(cls, inst)) - - @coroutine - def complex_model_to_parent(self, ctx, cls, inst, parent, name, - from_arr=False, **kwargs): - attrib = {} - if self.table_name_attr is not None: - attrib[self.table_name_attr] = cls.get_type_name() - if self.table_width is not None: - attrib['width'] = self.table_width - - with parent.element('table', attrib): - with parent.element('tbody'): - for k, v in self.sort_fields(cls): - sub_attrs = self.get_cls_attrs(v) - if sub_attrs.exc: - logger.debug("\tExclude table cell %r type %r for %r", - k, v, cls) - continue - try: - sub_value = getattr(inst, k, None) - except: # e.g. SQLAlchemy could throw NoSuchColumnError - sub_value = None - - sub_name = v.Attributes.sub_name - if sub_name is None: - sub_name = k - - tr_attrs = {} - if self.row_class is not None: - self.add_html_attr('class', tr_attrs, self.row_class) - - with parent.element('tr', tr_attrs): - th_attrs = {} - - if self.header_cell_class is not None: - self.add_html_attr('class', th_attrs, - self.header_cell_class) - - self.add_field_attrs(th_attrs, sub_name, v) - - if sub_attrs.hidden: - self.add_style(th_attrs, 'display:None') - - if self.header: - parent.write(E.th( - self.trc(v, ctx.locale, sub_name), - **th_attrs - )) - - td_attrs = {} - if self.cell_class is not None: - self.add_html_attr('class', td_attrs, - self.cell_class) - - self.add_field_attrs(td_attrs, sub_name, v) - - if sub_attrs.hidden: - self.add_style(td_attrs, 'display:None') - - with parent.element('td', td_attrs): - ret = self.to_parent(ctx, v, sub_value, parent, - sub_name, **kwargs) - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as b: - try: - ret.throw(b) - except StopIteration: - pass - - @coroutine - def array_to_parent(self, ctx, cls, inst, parent, name, **kwargs): - with parent.element('div'): - if issubclass(cls, ComplexModelBase): - ret = super(HtmlRowTable, self).array_to_parent( - ctx, cls, inst, parent, name, **kwargs) - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as b: - try: - ret.throw(b) - except StopIteration: - pass - else: - table_attrib = {} - if self.table_name_attr: - table_attrib = {self.table_name_attr: name} - if self.table_width is not None: - table_attrib['width'] = self.table_width - - with parent.element('table', table_attrib): - tr_attrib = {} - if self.row_class is not None: - tr_attrib['class'] = self.row_class - with parent.element('tr', tr_attrib): - if self.header: - parent.write(E.th(self.trc(cls, ctx.locale, - cls.get_type_name()))) - td_attrs = {} - - if self.cell_class is not None: - self.add_html_attr('class', td_attrs, - self.cell_class) - - self.add_field_attrs(td_attrs, name, cls) - - cls_attrs = self.get_cls_attrs(cls) - - if cls_attrs.hidden: - self.add_style(td_attrs, 'display:None') - - with parent.element('td', td_attrs): - with parent.element('table'): - ret = super(HtmlRowTable, self) \ - .array_to_parent(ctx, cls, inst, parent, - name, **kwargs) - if isgenerator(ret): - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as b: - try: - ret.throw(b) - except StopIteration: - pass diff --git a/libs_crutch/contrib/spyne/protocol/http.py b/libs_crutch/contrib/spyne/protocol/http.py deleted file mode 100644 index 62f130e..0000000 --- a/libs_crutch/contrib/spyne/protocol/http.py +++ /dev/null @@ -1,475 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.protocol.http`` module contains the HttpRpc protocol -implementation. - -This module is EXPERIMENTAL. You may not recognize the code here next time you -look at it. -""" - -import logging -logger = logging.getLogger(__name__) - -import re -import pytz -import tempfile - -from spyne import BODY_STYLE_WRAPPED, MethodDescriptor, PushBase -from spyne.util import six, coroutine, Break -from spyne.util.six import string_types, BytesIO -from spyne.error import ResourceNotFoundError -from spyne.model.binary import BINARY_ENCODING_URLSAFE_BASE64, File -from spyne.model.primitive import DateTime -from spyne.protocol.dictdoc import SimpleDictDocument - - -TEMPORARY_DIR = None -STREAM_READ_BLOCK_SIZE = 0x4000 -SWAP_DATA_TO_FILE_THRESHOLD = 512 * 1024 - - - -_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]") -_QuotePatt = re.compile(r"[\\].") -_nulljoin = ''.join - -def _unquote_cookie(str): - """Handle double quotes and escaping in cookie values. - This method is copied verbatim from the Python 3.5 standard - library (http.cookies._unquote) so we don't have to depend on - non-public interfaces. - """ - # If there aren't any doublequotes, - # then there can't be any special characters. See RFC 2109. - if str is None or len(str) < 2: - return str - if str[0] != '"' or str[-1] != '"': - return str - - # We have to assume that we must decode this string. - # Down to work. - - # Remove the "s - str = str[1:-1] - - # Check for special sequences. Examples: - # \012 --> \n - # \" --> " - # - i = 0 - n = len(str) - res = [] - while 0 <= i < n: - o_match = _OctalPatt.search(str, i) - q_match = _QuotePatt.search(str, i) - if not o_match and not q_match: # Neither matched - res.append(str[i:]) - break - # else: - j = k = -1 - if o_match: - j = o_match.start(0) - if q_match: - k = q_match.start(0) - if q_match and (not o_match or k < j): # QuotePatt matched - res.append(str[i:k]) - res.append(str[k+1]) - i = k + 2 - else: # OctalPatt matched - res.append(str[i:j]) - res.append(chr(int(str[j+1:j+4], 8))) - i = j + 4 - return _nulljoin(res) - - -def _parse_cookie(cookie): - """Parse a ``Cookie`` HTTP header into a dict of name/value pairs. - This function attempts to mimic browser cookie parsing behavior; - it specifically does not follow any of the cookie-related RFCs - (because browsers don't either). - The algorithm used is identical to that used by Django version 1.9.10. - """ - cookiedict = {} - for chunk in cookie.split(str(';')): - if str('=') in chunk: - key, val = chunk.split(str('='), 1) - else: - # Assume an empty name per - # https://bugzilla.mozilla.org/show_bug.cgi?id=169091 - key, val = str(''), chunk - key, val = key.strip(), val.strip() - if key or val: - # unquote using Python's algorithm. - cookiedict[key] = _unquote_cookie(val) - return cookiedict - - -def get_stream_factory(dir=None, delete=True): - def stream_factory(total_content_length, filename, content_type, - content_length=None): - if total_content_length >= SWAP_DATA_TO_FILE_THRESHOLD or \ - delete == False: - if delete == False: - # You need python >= 2.6 for this. - retval = tempfile.NamedTemporaryFile('wb+', dir=dir, - delete=delete) - else: - retval = tempfile.NamedTemporaryFile('wb+', dir=dir) - else: - retval = BytesIO() - - return retval - - return stream_factory - -_weekday = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] -_month = ['w00t', "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", - "Oct", "Nov", "Dec"] - -def _header_to_bytes(prot, val, cls): - if issubclass(cls, DateTime): - if val.tzinfo is not None: - val = val.astimezone(pytz.utc) - else: - val = val.replace(tzinfo=pytz.utc) - - return "%s, %02d %s %04d %02d:%02d:%02d GMT" % ( - _weekday[val.weekday()], val.day, _month[val.month], - val.year, val.hour, val.minute, val.second) - else: - # because wsgi_ref wants header values in unicode. - return prot.to_unicode(cls, val) - - -class HttpRpc(SimpleDictDocument): - """The so-called HttpRpc protocol implementation. It only works with Http - (wsgi and twisted) transports. - - :param app: An :class:'spyne.application.Application` instance. - :param validator: Validation method to use. One of (None, 'soft') - :param mime_type: Default mime type to set. Default is - 'application/octet-stream' - :param tmp_dir: Temporary directory to store partial file uploads. Default - is to use the OS default. - :param tmp_delete_on_close: The ``delete`` argument to the - :class:`tempfile.NamedTemporaryFile`. - See: http://docs.python.org/2/library/tempfile.html#tempfile.NamedTemporaryFile. - :param ignore_uncap: As HttpRpc can't serialize complex models, it throws a - server exception when the return type of the user function is Complex. - Passing ``True`` to this argument prevents that by ignoring the return - value. - """ - - mime_type = 'text/plain' - default_binary_encoding = BINARY_ENCODING_URLSAFE_BASE64 - default_string_encoding = 'UTF-8' - - type = set(SimpleDictDocument.type) - type.add('http') - - def __init__(self, app=None, validator=None, mime_type=None, - tmp_dir=None, tmp_delete_on_close=True, ignore_uncap=False, - parse_cookie=True, hier_delim=".", strict_arrays=False): - super(HttpRpc, self).__init__(app, validator, mime_type, - ignore_uncap=ignore_uncap, hier_delim=hier_delim, - strict_arrays=strict_arrays) - - self.tmp_dir = tmp_dir - self.tmp_delete_on_close = tmp_delete_on_close - self.parse_cookie = parse_cookie - - def get_tmp_delete_on_close(self): - return self.__tmp_delete_on_close - - def set_tmp_delete_on_close(self, val): - self.__tmp_delete_on_close = val - self.stream_factory = get_stream_factory(self.tmp_dir, - self.__tmp_delete_on_close) - - tmp_delete_on_close = property(get_tmp_delete_on_close, - set_tmp_delete_on_close) - - def set_validator(self, validator): - if validator == 'soft' or validator is self.SOFT_VALIDATION: - self.validator = self.SOFT_VALIDATION - elif validator is None: - self.validator = None - else: - raise ValueError(validator) - - def create_in_document(self, ctx, in_string_encoding=None): - assert ctx.transport.type.endswith('http'), \ - ("This protocol only works with an http transport, not %r, (in %r)" - % (ctx.transport.type, ctx.transport)) - - ctx.in_document = ctx.transport.req - ctx.transport.request_encoding = in_string_encoding - - def decompose_incoming_envelope(self, ctx, message): - assert message == SimpleDictDocument.REQUEST - - ctx.transport.itself.decompose_incoming_envelope(self, ctx, message) - - if self.parse_cookie: - cookies = ctx.in_header_doc.get('cookie', None) - if cookies is None: - cookies = ctx.in_header_doc.get('Cookie', None) - - if cookies is not None: - for cookie_string in cookies: - logger.debug("Loading cookie string %r", cookie_string) - cookie = _parse_cookie(cookie_string) - for k, v in cookie.items(): - l = ctx.in_header_doc.get(k, []) - l.append(v) - ctx.in_header_doc[k] = l - - logger.debug('\theader : %r' % (ctx.in_header_doc)) - logger.debug('\tbody : %r' % (ctx.in_body_doc)) - - def deserialize(self, ctx, message): - assert message in (self.REQUEST,) - - self.event_manager.fire_event('before_deserialize', ctx) - - if ctx.descriptor is None: - raise ResourceNotFoundError(ctx.method_request_string) - - req_enc = getattr(ctx.transport, 'request_encoding', None) - if req_enc is None: - req_enc = ctx.in_protocol.default_string_encoding - - if ctx.descriptor.in_header is not None: - # HttpRpc supports only one header class - in_header_class = ctx.descriptor.in_header[0] - ctx.in_header = self.simple_dict_to_object(ctx, ctx.in_header_doc, - in_header_class, self.validator, req_enc=req_enc) - - if ctx.descriptor.in_message is not None: - ctx.in_object = self.simple_dict_to_object(ctx, ctx.in_body_doc, - ctx.descriptor.in_message, self.validator, req_enc=req_enc) - - self.event_manager.fire_event('after_deserialize', ctx) - - def serialize(self, ctx, message): - retval = None - - assert message in (self.RESPONSE,) - - if ctx.out_document is not None: - return - - if ctx.out_error is not None: - ctx.transport.mime_type = 'text/plain' - ctx.out_document = ctx.out_error.to_bytes_iterable(ctx.out_error) - - else: - retval = self._handle_rpc(ctx) - - self.event_manager.fire_event('serialize', ctx) - - return retval - - @coroutine - def _handle_rpc_nonempty(self, ctx): - result_class = ctx.descriptor.out_message - - out_class = None - out_object = None - - if ctx.descriptor.body_style is BODY_STYLE_WRAPPED: - fti = result_class.get_flat_type_info(result_class) - - if len(fti) > 1 and not self.ignore_uncap: - raise TypeError("HttpRpc protocol can only serialize " - "functions with a single return type.") - - if len(fti) == 1: - out_class, = fti.values() - out_object, = ctx.out_object - - else: - out_class = result_class - out_object, = ctx.out_object - - if out_class is not None: - if issubclass(out_class, File) and not \ - isinstance(out_object, (list, tuple, string_types)) \ - and out_object.type is not None: - ctx.transport.set_mime_type(str(out_object.type)) - - ret = self.to_bytes_iterable(out_class, out_object) - - if not isinstance(ret, PushBase): - ctx.out_document = ret - - else: - ctx.transport.itself.set_out_document_push(ctx) - while True: - sv = yield - ctx.out_document.send(sv) - - def _handle_rpc(self, ctx): - retval = None - - # assign raw result to its wrapper, result_message - if ctx.out_object is None or len(ctx.out_object) < 1: - ctx.out_document = [''] - - else: - retval = self._handle_rpc_nonempty(ctx) - - header_class = ctx.descriptor.out_header - if header_class is not None: - # HttpRpc supports only one header class - header_class = header_class[0] - - # header - if ctx.out_header is not None: - out_header = ctx.out_header - if isinstance(ctx.out_header, (list, tuple)): - out_header = ctx.out_header[0] - - ctx.out_header_doc = self.object_to_simple_dict(header_class, - out_header, subinst_eater=_header_to_bytes) - - return retval - - def create_out_string(self, ctx, out_string_encoding='utf8'): - if ctx.out_string is not None: - return - - ctx.out_string = ctx.out_document - - def boolean_from_bytes(self, cls, string): - return string.lower() in ('true', '1', 'checked', 'on') - - def integer_from_bytes(self, cls, string): - if string == '': - return None - - return super(HttpRpc, self).integer_from_bytes(cls, string) - - -_fragment_pattern_re = re.compile('<([A-Za-z0-9_]+)>') -_full_pattern_re = re.compile('{([A-Za-z0-9_]+)}') - - -class HttpPattern(object): - """Experimental. Stay away. - - :param address: Address pattern - :param verb: HTTP Verb pattern - :param host: HTTP "Host:" header pattern - """ - - @staticmethod - def _compile_url_pattern(pattern): - """where <> placeholders don't contain slashes.""" - - if pattern is None: - return None - pattern = _fragment_pattern_re.sub(r'(?P<\1>[^/]*)', pattern) - pattern = _full_pattern_re.sub(r'(?P<\1>[^/]*)', pattern) - return re.compile(pattern) - - @staticmethod - def _compile_host_pattern(pattern): - """where <> placeholders don't contain dots.""" - - if pattern is None: - return None - pattern = _fragment_pattern_re.sub(r'(?P<\1>[^\.]*)', pattern) - pattern = _full_pattern_re.sub(r'(?P<\1>.*)', pattern) - return re.compile(pattern) - - @staticmethod - def _compile_verb_pattern(pattern): - """where <> placeholders are same as {} ones.""" - - if pattern is None: - return None - pattern = _fragment_pattern_re.sub(r'(?P<\1>.*)', pattern) - pattern = _full_pattern_re.sub(r'(?P<\1>.*)', pattern) - return re.compile(pattern) - - def __init__(self, address=None, verb=None, host=None, endpoint=None): - self.address = address - self.host = host - self.verb = verb - - self.endpoint = endpoint - if self.endpoint is not None: - assert isinstance(self.endpoint, MethodDescriptor) - - def hello(self, descriptor): - if self.address is None: - self.address = descriptor.name - - @property - def address(self): - return self.__address - - @address.setter - def address(self, what): - if what is not None and not what.startswith('/'): - what = '/' + what - - self.__address = what - self.address_re = self._compile_url_pattern(what) - - @property - def host(self): - return self.__host - - @host.setter - def host(self, what): - self.__host = what - self.host_re = self._compile_host_pattern(what) - - @property - def verb(self): - return self.__verb - - @verb.setter - def verb(self, what): - self.__verb = what - self.verb_re = self._compile_verb_pattern(what) - - def as_werkzeug_rule(self): - from werkzeug.routing import Rule - from spyne.util.invregexp import invregexp - - methods = None - if self.verb is not None: - methods = invregexp(self.verb) - - host = self.host - if host is None: - host = '<__ignored>' # for some reason, this is necessary when - # host_matching is enabled. - - return Rule(self.address, host=host, endpoint=self.endpoint.name, - methods=methods) - - def __repr__(self): - return "HttpPattern(address=%r, host=%r, verb=%r, endpoint=%r)" % ( - self.address, self.host, self.verb, - None if self.endpoint is None else self.endpoint.name) diff --git a/libs_crutch/contrib/spyne/protocol/json.py b/libs_crutch/contrib/spyne/protocol/json.py deleted file mode 100644 index 16acb11..0000000 --- a/libs_crutch/contrib/spyne/protocol/json.py +++ /dev/null @@ -1,425 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.protocol.json`` package contains the Json-related protocols. -Currently, only :class:`spyne.protocol.json.JsonDocument` is supported. - -Initially released in 2.8.0-rc. - -Missing Types -============= - -The JSON standard does not define every type that Spyne supports. These include -Date/Time types as well as arbitrary-length integers and arbitrary-precision -decimals. Integers are parsed to ``int``\s or ``long``\s seamlessly but -``Decimal``\s are only parsed correctly when they come off as strings. - -While it's possible to e.g. (de)serialize floats to ``Decimal``\s by adding -hooks to ``parse_float`` [#]_ (and convert later as necessary), such -customizations apply to the whole incoming document which pretty much messes up -``AnyDict`` serialization and deserialization. - -It also wasn't possible to work with ``object_pairs_hook`` as Spyne's parsing -is always "from outside to inside" whereas ``object_pairs_hook`` is passed -``dict``\s basically in any order "from inside to outside". - -.. [#] http://docs.python.org/2/library/json.html#json.loads -""" - -from __future__ import absolute_import - -import logging -logger = logging.getLogger(__name__) - -from itertools import chain -from spyne.util import six - - -try: - import simplejson as json - from simplejson.decoder import JSONDecodeError -except ImportError: - import json - JSONDecodeError = ValueError - -from spyne.error import ValidationError -from spyne.error import ResourceNotFoundError - -from spyne.model.binary import BINARY_ENCODING_BASE64 -from spyne.model.primitive import Date -from spyne.model.primitive import Time -from spyne.model.primitive import DateTime -from spyne.model.primitive import Double -from spyne.model.primitive import Integer -from spyne.model.primitive import Boolean -from spyne.model.fault import Fault -from spyne.protocol.dictdoc import HierDictDocument - - -# TODO: use this as default -class JsonEncoder(json.JSONEncoder): - def default(self, o): - try: - return super(JsonEncoder, self).default(o) - - except TypeError as e: - # if json can't serialize it, it's possibly a generator. If not, - # additional hacks are welcome :) - if logger.level == logging.DEBUG: - logger.exception(e) - return list(o) - - -NON_NUMBER_TYPES = tuple({list, dict, six.text_type, six.binary_type}) - - -class JsonDocument(HierDictDocument): - """An implementation of the json protocol that uses simplejson package when - available, json package otherwise. - - :param ignore_wrappers: Does not serialize wrapper objects. - :param complex_as: One of (list, dict). When list, the complex objects are - serialized to a list of values instead of a dict of key/value pairs. - """ - - mime_type = 'application/json' - text_based = True - - type = set(HierDictDocument.type) - type.add('json') - - default_binary_encoding = BINARY_ENCODING_BASE64 - - # flags used just for tests - _decimal_as_string = True - - def __init__(self, app=None, validator=None, mime_type=None, - ignore_uncap=False, - # DictDocument specific - ignore_wrappers=True, complex_as=dict, ordered=False, - default_string_encoding=None, polymorphic=False, - **kwargs): - - super(JsonDocument, self).__init__(app, validator, mime_type, ignore_uncap, - ignore_wrappers, complex_as, ordered, polymorphic) - - # this is needed when we're overriding a regular instance attribute - # with a property. - self.__message = HierDictDocument.__getattribute__(self, 'message') - - self._from_unicode_handlers[Double] = self._ret_number - self._from_unicode_handlers[Boolean] = self._ret_bool - self._from_unicode_handlers[Integer] = self._ret_number - - self._to_unicode_handlers[Double] = self._ret - self._to_unicode_handlers[Boolean] = self._ret - self._to_unicode_handlers[Integer] = self._ret - - self.default_string_encoding = default_string_encoding - self.kwargs = kwargs - - def _ret(self, cls, value): - return value - - def _ret_number(self, cls, value): - if isinstance(value, NON_NUMBER_TYPES): - raise ValidationError(value) - if value in (True, False): - return int(value) - return value - - def _ret_bool(self, cls, value): - if value is None or value in (True, False): - return value - raise ValidationError(value) - - def validate(self, key, cls, val): - super(JsonDocument, self).validate(key, cls, val) - - if issubclass(cls, (DateTime, Date, Time)) and not ( - isinstance(val, six.string_types) and - cls.validate_string(cls, val)): - raise ValidationError(key, val) - - @property - def message(self): - return self.__message - - @message.setter - def message(self, val): - if val is self.RESPONSE and not ('cls' in self.kwargs): - self.kwargs['cls'] = JsonEncoder - self.__message = val - - def create_in_document(self, ctx, in_string_encoding=None): - """Sets ``ctx.in_document`` using ``ctx.in_string``.""" - - try: - in_string = b''.join(ctx.in_string) - if not isinstance(in_string, six.text_type): - if in_string_encoding is None: - in_string_encoding = self.default_string_encoding - if in_string_encoding is not None: - in_string = in_string.decode(in_string_encoding) - ctx.in_document = json.loads(in_string, **self.kwargs) - - except JSONDecodeError as e: - raise Fault('Client.JsonDecodeError', repr(e)) - - def create_out_string(self, ctx, out_string_encoding='utf8'): - """Sets ``ctx.out_string`` using ``ctx.out_document``.""" - if out_string_encoding is None: - ctx.out_string = (json.dumps(o, **self.kwargs) - for o in ctx.out_document) - else: - ctx.out_string = ( - json.dumps(o, **self.kwargs).encode(out_string_encoding) - for o in ctx.out_document) - - -# Continuation of http://stackoverflow.com/a/24184379/1520211 -class HybridHttpJsonDocument(JsonDocument): - """This protocol lets you have the method name as the last fragment in the - request url. Eg. instead of sending a HTTP POST request to - - http://api.endpoint/json/ - - containing: :: - - { - "method_name": { - "arg1" : 42, - "arg2" : "foo" - } - } - - you will have to send the request to - - http://api.endpoint/json/method_name - - containing: :: - - { - "arg1" : 42, - "arg2" : "foo" - } - - Part of request data comes from HTTP and part of it comes from Json, hence - the name. - """ - - def create_in_document(self, ctx, in_string_encoding=None): - super(HybridHttpJsonDocument, self).create_in_document(ctx) - - url_fragment = ctx.transport.get_path().split('/')[-1] - - ctx.in_document = {url_fragment: ctx.in_document} - - - -class JsonP(JsonDocument): - """The JsonP protocol puts the reponse document inside a designated - javascript function call. The input protocol is identical to the - JsonDocument protocol. - - :param callback_name: The name of the function call that will wrapp all - response documents. - - For other arguents, see :class:`spyne.protocol.json.JsonDocument`. - """ - - type = set(HierDictDocument.type) - type.add('jsonp') - - def __init__(self, callback_name, *args, **kwargs): - super(JsonP, self).__init__(*args, **kwargs) - self.callback_name = callback_name - - def create_out_string(self, ctx, out_string_encoding='utf8'): - super(JsonP, self).create_out_string(ctx, - out_string_encoding=out_string_encoding) - - if out_string_encoding is None: - ctx.out_string = chain( - (self.callback_name, '('), - ctx.out_string, - (');',), - ) - else: - ctx.out_string = chain( - [self.callback_name.encode(out_string_encoding), b'('], - ctx.out_string, - [b');'], - ) - - -class _SpyneJsonRpc1(JsonDocument): - version = 1 - VERSION = 'ver' - BODY = 'body' - HEAD = 'head' - FAULT = 'fault' - - def decompose_incoming_envelope(self, ctx, message=JsonDocument.REQUEST): - indoc = ctx.in_document - if not isinstance(indoc, dict): - raise ValidationError(indoc, "Invalid Request") - - ver = indoc.get(self.VERSION) - if ver is None: - raise ValidationError(ver, "Unknown Version") - - body = indoc.get(self.BODY) - err = indoc.get(self.FAULT) - if body is None and err is None: - raise ValidationError((body, err), "Request data not found") - - ctx.protocol.error = False - if err is not None: - ctx.in_body_doc = err - ctx.protocol.error = True - else: - if not isinstance(body, dict): - raise ValidationError(body, "Request body not found") - if not len(body) == 1: - raise ValidationError(body, "Need len(body) == 1") - - ctx.in_header_doc = indoc.get(self.HEAD) - if not isinstance(ctx.in_header_doc, list): - ctx.in_header_doc = [ctx.in_header_doc] - - (ctx.method_request_string,ctx.in_body_doc), = body.items() - - def deserialize(self, ctx, message): - assert message in (self.REQUEST, self.RESPONSE) - - self.event_manager.fire_event('before_deserialize', ctx) - - if ctx.descriptor is None: - raise ResourceNotFoundError(ctx.method_request_string) - - if ctx.protocol.error: - ctx.in_object = None - ctx.in_error = self._doc_to_object(ctx, Fault, ctx.in_body_doc) - - else: - if message is self.REQUEST: - header_class = ctx.descriptor.in_header - body_class = ctx.descriptor.in_message - - elif message is self.RESPONSE: - header_class = ctx.descriptor.out_header - body_class = ctx.descriptor.out_message - - # decode header objects - if (ctx.in_header_doc is not None and header_class is not None): - headers = [None] * len(header_class) - for i, (header_doc, head_class) in enumerate( - zip(ctx.in_header_doc, header_class)): - if header_doc is not None and i < len(header_doc): - headers[i] = self._doc_to_object(ctx, head_class, - header_doc) - - if len(headers) == 1: - ctx.in_header = headers[0] - else: - ctx.in_header = headers - # decode method arguments - if ctx.in_body_doc is None: - ctx.in_object = [None] * len(body_class._type_info) - else: - ctx.in_object = self._doc_to_object(ctx, body_class, - ctx.in_body_doc) - - self.event_manager.fire_event('after_deserialize', ctx) - - def serialize(self, ctx, message): - assert message in (self.REQUEST, self.RESPONSE) - - self.event_manager.fire_event('before_serialize', ctx) - - ctx.out_document = { - "ver": self.version, - } - if ctx.out_error is not None: - ctx.out_document[self.FAULT] = Fault.to_dict(Fault, - ctx.out_error, self) - - else: - if message is self.REQUEST: - header_message_class = ctx.descriptor.in_header - body_message_class = ctx.descriptor.in_message - - elif message is self.RESPONSE: - header_message_class = ctx.descriptor.out_header - body_message_class = ctx.descriptor.out_message - - # assign raw result to its wrapper, result_message - out_type_info = body_message_class._type_info - out_object = body_message_class() - bm_attrs = self.get_cls_attrs(body_message_class) - - keys = iter(out_type_info) - values = iter(ctx.out_object) - while True: - try: - k = next(keys) - except StopIteration: - break - try: - v = next(values) - except StopIteration: - v = None - - out_object._safe_set(k, v, body_message_class, bm_attrs) - - ctx.out_document[self.BODY] = ctx.out_body_doc = \ - self._object_to_doc(body_message_class, out_object) - - # header - if ctx.out_header is not None and header_message_class is not None: - if isinstance(ctx.out_header, (list, tuple)): - out_headers = ctx.out_header - else: - out_headers = (ctx.out_header,) - - ctx.out_header_doc = out_header_doc = [] - - for header_class, out_header in zip(header_message_class, - out_headers): - out_header_doc.append(self._object_to_doc(header_class, - out_header)) - - if len(out_header_doc) > 1: - ctx.out_document[self.HEAD] = out_header_doc - else: - ctx.out_document[self.HEAD] = out_header_doc[0] - - self.event_manager.fire_event('after_serialize', ctx) - - -_json_rpc_flavors = { - 'spyne': _SpyneJsonRpc1 -} - -def JsonRpc(flavour, *args, **kwargs): - assert flavour in _json_rpc_flavors, "Unknown JsonRpc flavor. " \ - "Accepted ones are: %r" % tuple(_json_rpc_flavors) - - return _json_rpc_flavors[flavour](*args, **kwargs) diff --git a/libs_crutch/contrib/spyne/protocol/msgpack.py b/libs_crutch/contrib/spyne/protocol/msgpack.py deleted file mode 100644 index 71563fa..0000000 --- a/libs_crutch/contrib/spyne/protocol/msgpack.py +++ /dev/null @@ -1,361 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.protocol.msgpack`` module contains implementations for protocols -that use MessagePack as serializer. - -Initially released in 2.8.0-rc. - -""" - -from __future__ import absolute_import - -import logging -logger = logging.getLogger(__name__) - -import msgpack - -from spyne import ValidationError -from spyne.util import six -from spyne.model.fault import Fault -from spyne.model.primitive import Double -from spyne.model.primitive import Boolean -from spyne.model.primitive import Integer -from spyne.protocol.dictdoc import HierDictDocument - - -class MessagePackDecodeError(Fault): - CODE = "Client.MessagePackDecodeError" - - def __init__(self, data=None): - super(MessagePackDecodeError, self) \ - .__init__(self.CODE, data) - - -NON_NUMBER_TYPES = tuple({list, dict, six.text_type, six.binary_type}) - - -class MessagePackDocument(HierDictDocument): - """An integration class for the msgpack protocol.""" - - mime_type = 'application/x-msgpack' - text_based = False - - type = set(HierDictDocument.type) - type.add('msgpack') - - default_string_encoding = 'UTF-8' - from_serstr = HierDictDocument.from_bytes - to_serstr = HierDictDocument.to_bytes - - # flags to be used in tests - _decimal_as_string = True - _huge_numbers_as_string = True - - def __init__(self, app=None, validator=None, mime_type=None, - ignore_uncap=False, - # DictDocument specific - ignore_wrappers=True, - complex_as=dict, - ordered=False, - polymorphic=False, - key_encoding='utf8', - # MessagePackDocument specific - mw_packer=msgpack.Unpacker, - mw_unpacker=msgpack.Unpacker, - use_list=False, - raw=False, - use_bin_type=True, - **kwargs): - - super(MessagePackDocument, self).__init__(app, validator, mime_type, - ignore_uncap, ignore_wrappers, complex_as, ordered, polymorphic, - key_encoding) - - self.mw_packer = mw_packer - self.mw_unpacker = mw_unpacker - - # unpacker - if not raw: - self.from_serstr = self.from_unicode - - if use_bin_type: - self.from_serstr = self.from_unicode - - self.kwargs = kwargs - if raw != False: - kwargs['raw'] = raw - - if use_list != True: - kwargs['use_list'] = use_list - - if use_bin_type != True: - kwargs['use_bin_type'] = use_bin_type - - self._from_bytes_handlers[Double] = self._ret_number - self._from_bytes_handlers[Boolean] = self._ret_bool - self._from_bytes_handlers[Integer] = self.integer_from_bytes - - self._from_unicode_handlers[Double] = self._ret_number - self._from_unicode_handlers[Boolean] = self._ret_bool - self._from_unicode_handlers[Integer] = self.integer_from_bytes - - self._to_bytes_handlers[Double] = self._ret_number - self._to_bytes_handlers[Boolean] = self._ret_bool - self._to_bytes_handlers[Integer] = self.integer_to_bytes - - self._to_unicode_handlers[Double] = self._ret_number - self._to_unicode_handlers[Boolean] = self._ret_bool - self._to_unicode_handlers[Integer] = self.integer_to_bytes - - def _ret(self, _, value): - return value - - def _ret_number(self, _, value): - if isinstance(value, NON_NUMBER_TYPES): - raise ValidationError(value) - if value in (True, False): - return int(value) - return value - - def _ret_bool(self, _, value): - if value is None or value in (True, False): - return value - raise ValidationError(value) - - def get_class_name(self, cls): - class_name = cls.get_type_name() - if not six.PY2: - if not isinstance(class_name, bytes): - class_name = class_name.encode(self.default_string_encoding) - - return class_name - - def create_in_document(self, ctx, in_string_encoding=None): - """Sets ``ctx.in_document``, using ``ctx.in_string``. - - :param ctx: The MethodContext object - :param in_string_encoding: MessagePack is a binary protocol. So this - argument is ignored. - """ - - # handle mmap objects from in ctx.in_string as returned by - # TwistedWebResource.handle_rpc. - if isinstance(ctx.in_string, (list, tuple)) \ - and len(ctx.in_string) == 1 \ - and isinstance(ctx.in_string[0], memoryview): - unpacker = self.mw_unpacker(**self.kwargs) - unpacker.feed(ctx.in_string[0]) - ctx.in_document = next(x for x in unpacker) - - else: - try: - ctx.in_document = msgpack.unpackb(b''.join(ctx.in_string)) - except ValueError as e: - raise MessagePackDecodeError(' '.join(e.args)) - - def gen_method_request_string(self, ctx): - """Uses information in context object to return a method_request_string. - - Returns a string in the form of "{namespaces}method name". - """ - - mrs, = ctx.in_body_doc.keys() - if not six.PY2 and isinstance(mrs, bytes): - mrs = mrs.decode(self.key_encoding) - - return '{%s}%s' % (self.app.interface.get_tns(), mrs) - - def create_out_string(self, ctx, out_string_encoding='utf8'): - ctx.out_string = (msgpack.packb(o) for o in ctx.out_document) - - def integer_from_bytes(self, cls, value): - if isinstance(value, (six.text_type, six.binary_type)): - return super(MessagePackDocument, self) \ - .integer_from_bytes(cls, value) - return value - - def integer_to_bytes(self, cls, value, **_): - # if it's inside the range msgpack can deal with - if -1<<63 <= value < 1<<64: - return value - else: - return super(MessagePackDocument, self).integer_to_bytes(cls, value) - - -class MessagePackRpc(MessagePackDocument): - """An integration class for the msgpack-rpc protocol.""" - - mime_type = 'application/x-msgpack' - - MSGPACK_REQUEST = 0 - MSGPACK_RESPONSE = 1 - MSGPACK_NOTIFY = 2 - MSGPACK_ERROR = 3 - - def create_out_string(self, ctx, out_string_encoding='utf8'): - ctx.out_string = (msgpack.packb(o) for o in ctx.out_document) - - def create_in_document(self, ctx, in_string_encoding=None): - """Sets ``ctx.in_document``, using ``ctx.in_string``. - - :param ctx: The MethodContext object - :param in_string_encoding: MessagePack is a binary protocol. So this - argument is ignored. - """ - - # TODO: Use feed api - try: - ctx.in_document = msgpack.unpackb(b''.join(ctx.in_string), - **self.kwargs) - - except ValueError as e: - raise MessagePackDecodeError(''.join(e.args)) - - try: - len(ctx.in_document) - except TypeError: - raise MessagePackDecodeError("Input must be a sequence.") - - if not (3 <= len(ctx.in_document) <= 4): - raise MessagePackDecodeError("Length of input iterable must be " - "either 3 or 4") - - def decompose_incoming_envelope(self, ctx, message): - # FIXME: For example: {0: 0, 1: 0, 2: "some_call", 3: [1,2,3]} will also - # work. Is this a problem? - - # FIXME: Msgid is ignored. Is this a problem? - msgparams = [] - if len(ctx.in_document) == 3: - msgtype, msgid, msgname_or_error = ctx.in_document - - else: - msgtype, msgid, msgname_or_error, msgparams = ctx.in_document - - if not six.PY2: - if isinstance(msgname_or_error, bytes): - msgname_or_error = msgname_or_error.decode( - self.default_string_encoding) - - if msgtype == MessagePackRpc.MSGPACK_REQUEST: - assert message == MessagePackRpc.REQUEST - - elif msgtype == MessagePackRpc.MSGPACK_RESPONSE: - assert message == MessagePackRpc.RESPONSE - - elif msgtype == MessagePackRpc.MSGPACK_NOTIFY: - raise NotImplementedError() - - else: - raise MessagePackDecodeError("Unknown message type %r" % msgtype) - - ctx.method_request_string = '{%s}%s' % (self.app.interface.get_tns(), - msgname_or_error) - - # MessagePackRpc does not seem to have Header support - ctx.in_header_doc = None - - if isinstance(msgname_or_error, dict) and msgname_or_error: - # we got an error - ctx.in_error = msgname_or_error - else: - ctx.in_body_doc = msgparams - - # logger.debug('\theader : %r', ctx.in_header_doc) - # logger.debug('\tbody : %r', ctx.in_body_doc) - - def deserialize(self, ctx, message): - assert message in (self.REQUEST, self.RESPONSE) - - self.event_manager.fire_event('before_deserialize', ctx) - - if ctx.descriptor is None: - raise Fault("Client", "Method %r not found." % - ctx.method_request_string) - - # instantiate the result message - if message is self.REQUEST: - body_class = ctx.descriptor.in_message - elif message is self.RESPONSE: - body_class = ctx.descriptor.out_message - else: - raise Exception("what?") - - if ctx.in_error: - ctx.in_error = Fault(**ctx.in_error) - - elif body_class: - ctx.in_object = self._doc_to_object(ctx, - body_class, ctx.in_body_doc, self.validator) - - else: - ctx.in_object = [] - - self.event_manager.fire_event('after_deserialize', ctx) - - def serialize(self, ctx, message): - assert message in (self.REQUEST, self.RESPONSE) - - self.event_manager.fire_event('before_serialize', ctx) - - if ctx.out_error is not None: - ctx.out_document = [ - [MessagePackRpc.MSGPACK_ERROR, 0, - Fault.to_dict(ctx.out_error.__class__, ctx.out_error)] - ] - return - - # get the result message - if message is self.REQUEST: - out_type = ctx.descriptor.in_message - msgtype = MessagePackRpc.MSGPACK_REQUEST - method_name_or_error = ctx.descriptor.operation_name - - elif message is self.RESPONSE: - out_type = ctx.descriptor.out_message - msgtype = MessagePackRpc.MSGPACK_RESPONSE - method_name_or_error = None - - else: - raise Exception("what?") - - if out_type is None: - return - - out_type_info = out_type._type_info - - # instantiate the result message - out_instance = out_type() - - # assign raw result to its wrapper, result_message - for i, (k, v) in enumerate(out_type_info.items()): - attrs = self.get_cls_attrs(v) - out_instance._safe_set(k, ctx.out_object[i], v, attrs) - - # transform the results into a dict: - if out_type.Attributes.max_occurs > 1: - params = (self._to_dict_value(out_type, inst, set()) - for inst in out_instance) - else: - params = self._to_dict_value(out_type, out_instance, set()) - - ctx.out_document = [[msgtype, 0, method_name_or_error, params]] - - self.event_manager.fire_event('after_serialize', ctx) diff --git a/libs_crutch/contrib/spyne/protocol/soap/__init__.py b/libs_crutch/contrib/spyne/protocol/soap/__init__.py deleted file mode 100644 index 754461f..0000000 --- a/libs_crutch/contrib/spyne/protocol/soap/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.protocol.soap`` package contains an implementation of a subset -of the Soap 1.1 standard and awaits for volunteers for implementing the -brand new Soap 1.2 standard. - -Patches are welcome. -""" - -from spyne.protocol.soap.soap11 import Soap11 -from spyne.protocol.soap.soap11 import _from_soap -from spyne.protocol.soap.soap11 import _parse_xml_string -from spyne.protocol.soap.soap12 import Soap12 \ No newline at end of file diff --git a/libs_crutch/contrib/spyne/protocol/soap/mime.py b/libs_crutch/contrib/spyne/protocol/soap/mime.py deleted file mode 100644 index d4aaa1d..0000000 --- a/libs_crutch/contrib/spyne/protocol/soap/mime.py +++ /dev/null @@ -1,315 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.protocol.soap.mime`` module contains additional logic for using -optimized encodings for binary when encapsulating Soap 1.1 messages in Http. - -The functionality in this code seems to work at first glance but is not well -tested. - -Testcases and preferably improvements are most welcome. -""" - -from __future__ import print_function, unicode_literals - -import logging -logger = logging.getLogger(__name__) - -import re - -from base64 import b64encode -from itertools import chain - -from lxml import etree - -from email import generator -from email.mime.multipart import MIMEMultipart -from email.mime.application import MIMEApplication -from email.encoders import encode_7or8bit - -from spyne import ValidationError -from spyne.util import six -from spyne.model.binary import ByteArray, File -from spyne.const.xml import NS_XOP - -if six.PY2: - from email import message_from_string as message_from_bytes -else: - from email import message_from_bytes - - -XPATH_NSDICT = dict(xop=NS_XOP) - - -def _join_attachment(ns_soap_env, href_id, envelope, payload, prefix=True): - """Places the data from an attachment back into a SOAP message, replacing - its xop:Include element or href. - - Returns a tuple of length 2 with the new message and the number of - replacements made - - :param id: content-id or content-location of attachment - :param prefix: Set this to true if id is content-id or false if it is - content-location. It prefixes a "cid:" to the href value. - :param envelope: soap envelope string to be operated on - :param payload: attachment data - """ - - # grab the XML element of the message in the SOAP body - soaptree = etree.fromstring(envelope) - soapbody = soaptree.find("{%s}Body" % ns_soap_env) - - if soapbody is None: - raise ValidationError(None, "SOAP Body tag not found") - - message = None - for child in list(soapbody): - if child.tag != "{%s}Fault" % ns_soap_env: - message = child - break - - idprefix = '' - - if prefix: - idprefix = "cid:" - href_id = "%s%s" % (idprefix, href_id,) - - num = 0 - xpath = ".//xop:Include[@href=\"{}\"]".format(href_id) - - for num, node in enumerate(message.xpath(xpath, namespaces=XPATH_NSDICT)): - parent = node.getparent() - parent.remove(node) - parent.text = payload - - return etree.tostring(soaptree), num - - -def collapse_swa(ctx, content_type, ns_soap_env): - """ - Translates an SwA multipart/related message into an application/soap+xml - message. - - Returns the 'appication/soap+xml' version of the given HTTP body. - - References: - SwA http://www.w3.org/TR/SOAP-attachments - XOP http://www.w3.org/TR/xop10/ - MTOM http://www.w3.org/TR/soap12-mtom/ - http://www.w3.org/Submission/soap11mtom10/ - - :param content_type: value of the Content-Type header field, parsed by - cgi.parse_header() function - :param ctx: request context - """ - - envelope = ctx.in_string - # convert multipart messages back to pure SOAP - mime_type, content_data = content_type - if not six.PY2: - assert isinstance(mime_type, six.text_type) - - if u'multipart/related' not in mime_type: - return envelope - - charset = content_data.get('charset', None) - if charset is None: - charset = 'ascii' - - boundary = content_data.get('boundary', None) - if boundary is None: - raise ValidationError(None, u"Missing 'boundary' value from " - u"Content-Type header") - - envelope = list(envelope) - - # What an ugly hack... - request = MIMEMultipart('related', boundary=boundary) - msg_string = re.sub(r"\n\n.*", '', request.as_string()) - msg_string = chain( - (msg_string.encode(charset), generator.NL.encode('ascii')), - (e for e in envelope), - ) - - msg_string = b''.join(msg_string) - msg = message_from_bytes(msg_string) # our message - - soapmsg = None - root = msg.get_param('start') - - # walk through sections, reconstructing pure SOAP - for part in msg.walk(): - # skip the multipart container section - if part.get_content_maintype() == 'multipart': - continue - - # detect main soap section - if (part.get('Content-ID') and part.get('Content-ID') == root) or \ - (root is None and part == msg.get_payload()[0]): - soapmsg = part.get_payload() - continue - - # binary packages - cte = part.get("Content-Transfer-Encoding") - - if cte != 'base64': - payload = b64encode(part.get_payload(decode=True)) - else: - payload = part.get_payload() - - cid = part.get("Content-ID").strip("<>") - cloc = part.get("Content-Location") - numreplaces = None - - # Check for Content-ID and make replacement - if cid: - soapmsg, numreplaces = _join_attachment( - ns_soap_env, cid, soapmsg, payload) - - # Check for Content-Location and make replacement - if cloc and not cid and not numreplaces: - soapmsg, numreplaces = _join_attachment( - ns_soap_env, cloc, soapmsg, payload, - False) - - if soapmsg is None: - raise ValidationError(None, "Invalid MtoM request") - - return (soapmsg,) - - -def apply_mtom(headers, envelope, params, paramvals): - """Apply MTOM to a SOAP envelope, separating attachments into a - MIME multipart message. - - Returns a tuple of length 2 with dictionary of headers and string of body - that can be sent with HTTPConnection - - References: - XOP http://www.w3.org/TR/xop10/ - MTOM http://www.w3.org/TR/soap12-mtom/ - http://www.w3.org/Submission/soap11mtom10/ - - :param headers Headers dictionary of the SOAP message that would - originally be sent. - :param envelope Iterable containing SOAP envelope string that would have - originally been sent. - :param params params attribute from the Message object used for the SOAP - :param paramvals values of the params, passed to Message.to_parent - """ - - # grab the XML element of the message in the SOAP body - envelope = ''.join(envelope) - - soaptree = etree.fromstring(envelope) - soapbody = soaptree.find("{%s}Body" % _ns_soap_env) - - message = None - for child in list(soapbody): - if child.tag == ("{%s}Fault" % _ns_soap_env): - return headers, envelope - else: - message = child - break - - # Get additional parameters from original Content-Type - ctarray = [] - for n, v in headers.items(): - if n.lower() == 'content-type': - ctarray = v.split(';') - break - - roottype = ctarray[0].strip() - rootparams = {} - for ctparam in ctarray[1:]: - n, v = ctparam.strip().split('=') - rootparams[n] = v.strip("\"'") - - # Set up initial MIME parts. - mtompkg = MIMEMultipart('related', boundary='?//<><>spyne_MIME_boundary<>') - rootpkg = MIMEApplication(envelope, 'xop+xml', encode_7or8bit) - - # Set up multipart headers. - del mtompkg['mime-version'] - mtompkg.set_param('start-info', roottype) - mtompkg.set_param('start', '') - if 'SOAPAction' in headers: - mtompkg.add_header('SOAPAction', headers.get('SOAPAction')) - - # Set up root SOAP part headers. - del rootpkg['mime-version'] - - rootpkg.add_header('Content-ID', '') - - for n, v in rootparams.items(): - rootpkg.set_param(n, v) - - rootpkg.set_param('type', roottype) - - mtompkg.attach(rootpkg) - - # Extract attachments from SOAP envelope. - for i in range(len(params)): - name, typ = params[i] - - if issubclass(typ, (ByteArray, File)): - id = "SpyneAttachment_%s" % (len(mtompkg.get_payload()), ) - - param = message[i] - param.text = "" - - incl = etree.SubElement(param, "{%s}Include" % _ns_xop) - incl.attrib["href"] = "cid:%s" % id - - if paramvals[i].fileName and not paramvals[i].data: - paramvals[i].load_from_file() - - if issubclass(type, File): - data = paramvals[i].data - else: - data = ''.join(paramvals[i]) - - attachment = MIMEApplication(data, _encoder=encode_7or8bit) - - del attachment['mime-version'] - - attachment.add_header('Content-ID', '<%s>' % (id, )) - mtompkg.attach(attachment) - - # Update SOAP envelope. - rootpkg.set_payload(etree.tostring(soaptree)) - - # extract body string from MIMEMultipart message - bound = '--%s' % (mtompkg.get_boundary(), ) - marray = mtompkg.as_string().split(bound) - mtombody = bound - mtombody += bound.join(marray[1:]) - - # set Content-Length - mtompkg.add_header("Content-Length", str(len(mtombody))) - - # extract dictionary of headers from MIMEMultipart message - mtomheaders = {} - for name, value in mtompkg.items(): - mtomheaders[name] = value - - if len(mtompkg.get_payload()) <= 1: - return headers, envelope - - return mtomheaders, [mtombody] diff --git a/libs_crutch/contrib/spyne/protocol/soap/soap11.py b/libs_crutch/contrib/spyne/protocol/soap/soap11.py deleted file mode 100644 index 75e1add..0000000 --- a/libs_crutch/contrib/spyne/protocol/soap/soap11.py +++ /dev/null @@ -1,379 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.protocol.soap.soap11`` module contains the implementation of a -subset of the Soap 1.1 standard. - -Except the binary optimizations (MtoM, attachments, etc) that are beta quality, -this protocol is production quality. - -One must specifically enable the debug output for the Xml protocol to see the -actual document exchange. That's because the xml formatting code is run only -when explicitly enabled due to performance reasons. :: - - logging.getLogger('spyne.protocol.xml').setLevel(logging.DEBUG) - -Initially released in soaplib-0.8.0. - -Logs valid documents to %r and invalid documents to %r. -""" % (__name__, __name__ + ".invalid") - -import logging -logger = logging.getLogger(__name__) -logger_invalid = logging.getLogger(__name__ + ".invalid") - -import cgi - -from itertools import chain - -import spyne.const.xml as ns - -from lxml import etree -from lxml.etree import XMLSyntaxError -from lxml.etree import XMLParser - -from spyne import BODY_STYLE_WRAPPED -from spyne.util import six -from spyne.const.xml import DEFAULT_NS -from spyne.const.http import HTTP_405, HTTP_500 -from spyne.error import RequestNotAllowed -from spyne.model.fault import Fault -from spyne.model.primitive import Date, Time, DateTime -from spyne.protocol.xml import XmlDocument -from spyne.protocol.soap.mime import collapse_swa -from spyne.server.http import HttpTransportContext - - -def _from_soap(in_envelope_xml, xmlids=None, **kwargs): - """Parses the xml string into the header and payload. - """ - ns_soap = kwargs.pop('ns', ns.NS_SOAP11_ENV) - - if xmlids: - resolve_hrefs(in_envelope_xml, xmlids) - - if in_envelope_xml.tag != '{%s}Envelope' % ns_soap: - raise Fault('Client.SoapError', 'No {%s}Envelope element was found!' % - ns_soap) - - header_envelope = in_envelope_xml.xpath('e:Header', - namespaces={'e': ns_soap}) - body_envelope = in_envelope_xml.xpath('e:Body', - namespaces={'e': ns_soap}) - - if len(header_envelope) == 0 and len(body_envelope) == 0: - raise Fault('Client.SoapError', 'Soap envelope is empty!') - - header = None - if len(header_envelope) > 0: - header = header_envelope[0].getchildren() - - body = None - if len(body_envelope) > 0 and len(body_envelope[0]) > 0: - body = body_envelope[0][0] - - return header, body - - -def _parse_xml_string(xml_string, parser, charset=None): - xml_string = iter(xml_string) - chunk = next(xml_string) - if isinstance(chunk, six.binary_type): - string = b''.join(chain( (chunk,), xml_string )) - else: - string = ''.join(chain( (chunk,), xml_string )) - - if charset: - string = string.decode(charset) - - try: - try: - root, xmlids = etree.XMLID(string, parser) - - except ValueError as e: - logger.debug('ValueError: Deserializing from unicode strings with ' - 'encoding declaration is not supported by lxml.') - root, xmlids = etree.XMLID(string.encode(charset), parser) - - except XMLSyntaxError as e: - logger_invalid.error("%r in string %r", e, string) - raise Fault('Client.XMLSyntaxError', str(e)) - - return root, xmlids - - -# see http://www.w3.org/TR/2000/NOTE-SOAP-20000508/ -# section 5.2.1 for an example of how the id and href attributes are used. -def resolve_hrefs(element, xmlids): - for e in element: - if e.get('id'): - continue # don't need to resolve this element - - elif e.get('href'): - resolved_element = xmlids[e.get('href').replace('#', '')] - if resolved_element is None: - continue - resolve_hrefs(resolved_element, xmlids) - - # copies the attributes - [e.set(k, v) for k, v in resolved_element.items()] - - # copies the children - [e.append(child) for child in resolved_element.getchildren()] - - # copies the text - e.text = resolved_element.text - - else: - resolve_hrefs(e, xmlids) - - return element - - -class Soap11(XmlDocument): - """The base implementation of a subset of the Soap 1.1 standard. The - document is available here: http://www.w3.org/TR/soap11/ - - :param app: The owner application instance. - :param validator: One of (None, 'soft', 'lxml', 'schema', - ProtocolBase.SOFT_VALIDATION, XmlDocument.SCHEMA_VALIDATION). - Both ``'lxml'`` and ``'schema'`` values are equivalent to - ``XmlDocument.SCHEMA_VALIDATION``. - :param xml_declaration: Whether to add xml_declaration to the responses - Default is 'True'. - :param cleanup_namespaces: Whether to add clean up namespace declarations - in the response document. Default is 'True'. - :param encoding: The suggested string encoding for the returned xml - documents. The transport can override this. - :param pretty_print: When ``True``, returns the document in a pretty-printed - format. - """ - - mime_type = 'text/xml; charset=utf-8' - - type = set(XmlDocument.type) - type.update(('soap', 'soap11')) - - ns_soap_env = ns.NS_SOAP11_ENV - ns_soap_enc = ns.NS_SOAP11_ENC - - def __init__(self, *args, **kwargs): - super(Soap11, self).__init__(*args, **kwargs) - - # SOAP requires DateTime strings to be in iso format. The following - # lines make sure custom datetime formatting via - # DateTime(dt_format="...") (or similar) is bypassed. - self._to_unicode_handlers[Time] = lambda cls, value: value.isoformat() - self._to_unicode_handlers[DateTime] = lambda cls, value: value.isoformat() - - self._from_unicode_handlers[Date] = self.date_from_unicode_iso - self._from_unicode_handlers[DateTime] = self.datetime_from_unicode_iso - - def create_in_document(self, ctx, charset=None): - if isinstance(ctx.transport, HttpTransportContext): - # according to the soap-via-http standard, soap requests must only - # work with proper POST requests. - content_type = ctx.transport.get_request_content_type() - http_verb = ctx.transport.get_request_method() - if content_type is None or http_verb != "POST": - ctx.transport.resp_code = HTTP_405 - raise RequestNotAllowed( - "You must issue a POST request with the Content-Type " - "header properly set.") - - content_type = cgi.parse_header(content_type) - ctx.in_string = collapse_swa(ctx, content_type, self.ns_soap_env) - - ctx.in_document = _parse_xml_string(ctx.in_string, - XMLParser(**self.parser_kwargs), - charset) - - def decompose_incoming_envelope(self, ctx, message=XmlDocument.REQUEST): - envelope_xml, xmlids = ctx.in_document - header_document, body_document = _from_soap(envelope_xml, xmlids, - ns=self.ns_soap_env) - - ctx.in_document = envelope_xml - - if body_document.tag == '{%s}Fault' % self.ns_soap_env: - ctx.in_body_doc = body_document - - else: - ctx.in_header_doc = header_document - ctx.in_body_doc = body_document - ctx.method_request_string = ctx.in_body_doc.tag - self.validate_body(ctx, message) - - def deserialize(self, ctx, message): - """Takes a MethodContext instance and a string containing ONE soap - message. - Returns the corresponding native python object - - Not meant to be overridden. - """ - - assert message in (self.REQUEST, self.RESPONSE) - - self.event_manager.fire_event('before_deserialize', ctx) - - if ctx.in_body_doc.tag == "{%s}Fault" % self.ns_soap_env: - ctx.in_object = None - ctx.in_error = self.from_element(ctx, Fault, ctx.in_body_doc) - - else: - if message is self.REQUEST: - header_class = ctx.descriptor.in_header - body_class = ctx.descriptor.in_message - - elif message is self.RESPONSE: - header_class = ctx.descriptor.out_header - body_class = ctx.descriptor.out_message - - # decode header objects - # header elements are returned in header_class order which need not match the incoming XML - if (ctx.in_header_doc is not None and header_class is not None): - headers = [None] * len(header_class) - in_header_dict = dict( [(element.tag, element) - for element in ctx.in_header_doc]) - for i, head_class in enumerate(header_class): - if i < len(header_class): - nsval = "{%s}%s" % (head_class.__namespace__, - head_class.__type_name__) - header_doc = in_header_dict.get(nsval, None) - if header_doc is not None: - headers[i] = self.from_element(ctx, head_class, - header_doc) - - if len(headers) == 1: - ctx.in_header = headers[0] - else: - ctx.in_header = headers - - # decode method arguments - if ctx.in_body_doc is None: - ctx.in_object = [None] * len(body_class._type_info) - else: - ctx.in_object = self.from_element(ctx, body_class, - ctx.in_body_doc) - - self.event_manager.fire_event('after_deserialize', ctx) - - def serialize(self, ctx, message): - """Uses ctx.out_object, ctx.out_header or ctx.out_error to set - ctx.out_body_doc, ctx.out_header_doc and ctx.out_document as an - lxml.etree._Element instance. - - Not meant to be overridden. - """ - - assert message in (self.REQUEST, self.RESPONSE) - - self.event_manager.fire_event('before_serialize', ctx) - - # construct the soap response, and serialize it - nsmap = self.app.interface.nsmap - ctx.out_document = etree.Element('{%s}Envelope' % self.ns_soap_env, - nsmap=nsmap) - if ctx.out_error is not None: - # FIXME: There's no way to alter soap response headers for the user. - ctx.out_body_doc = out_body_doc = etree.SubElement(ctx.out_document, - '{%s}Body' % self.ns_soap_env, nsmap=nsmap) - self.to_parent(ctx, ctx.out_error.__class__, ctx.out_error, - out_body_doc, self.app.interface.get_tns()) - - else: - if message is self.REQUEST: - header_message_class = ctx.descriptor.in_header - body_message_class = ctx.descriptor.in_message - - elif message is self.RESPONSE: - header_message_class = ctx.descriptor.out_header - body_message_class = ctx.descriptor.out_message - - # body - ctx.out_body_doc = out_body_doc = etree.Element( - '{%s}Body' % self.ns_soap_env) - - # assign raw result to its wrapper, result_message - if ctx.descriptor.body_style is BODY_STYLE_WRAPPED: - out_type_info = body_message_class._type_info - out_object = body_message_class() - bm_attrs = self.get_cls_attrs(body_message_class) - - keys = iter(out_type_info) - values = iter(ctx.out_object) - while True: - try: - k = next(keys) - except StopIteration: - break - try: - v = next(values) - except StopIteration: - v = None - - out_object._safe_set(k, v, body_message_class, bm_attrs) - - self.to_parent(ctx, body_message_class, out_object, - out_body_doc, body_message_class.get_namespace()) - - else: - out_object = ctx.out_object[0] - - sub_ns = body_message_class.Attributes.sub_ns - if sub_ns is None: - sub_ns = body_message_class.get_namespace() - if sub_ns is DEFAULT_NS: - sub_ns = self.app.interface.get_tns() - - sub_name = body_message_class.Attributes.sub_name - if sub_name is None: - sub_name = body_message_class.get_type_name() - - self.to_parent(ctx, body_message_class, out_object, out_body_doc, - sub_ns, sub_name) - - # header - if ctx.out_header is not None and header_message_class is not None: - ctx.out_header_doc = soap_header_elt = etree.SubElement( - ctx.out_document, '{%s}Header' % self.ns_soap_env) - - if isinstance(ctx.out_header, (list, tuple)): - out_headers = ctx.out_header - else: - out_headers = (ctx.out_header,) - - for header_class, out_header in zip(header_message_class, - out_headers): - self.to_parent(ctx, - header_class, out_header, - soap_header_elt, - header_class.get_namespace(), - header_class.get_type_name(), - ) - - ctx.out_document.append(ctx.out_body_doc) - - if self.cleanup_namespaces: - etree.cleanup_namespaces(ctx.out_document) - - self.event_manager.fire_event('after_serialize', ctx) - - def fault_to_http_response_code(self, fault): - return HTTP_500 diff --git a/libs_crutch/contrib/spyne/protocol/soap/soap12.py b/libs_crutch/contrib/spyne/protocol/soap/soap12.py deleted file mode 100644 index 69858a1..0000000 --- a/libs_crutch/contrib/spyne/protocol/soap/soap12.py +++ /dev/null @@ -1,152 +0,0 @@ -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -""" -The ``spyne.protoco.soap.soap12`` module contains the implementation of a -subset of the Soap 1.2 standard. - -This modules is EXPERIMENTAL. -More info can be found at: https://www.w3.org/TR/soap12-part1/ -""" - -import logging - -from lxml.builder import E - -from spyne.protocol.soap.soap11 import Soap11 -from spyne.protocol.xml import _append -from spyne.util.six import string_types -from spyne.util.etreeconv import root_dict_to_etree -from spyne.const.xml import NS_SOAP12_ENV, NS_XML, PREFMAP - - -logger = logging.getLogger(__name__) -logger_invalid = logging.getLogger(__name__ + ".invalid") - - -class Soap12(Soap11): - """ - The base implementation of a subset of the Soap 1.2 standard. The - document is available here: http://www.w3.org/TR/soap12/ - """ - mime_type = 'application/soap+xml; charset=utf-8' - - soap_env = PREFMAP[NS_SOAP12_ENV] - ns_soap_env = NS_SOAP12_ENV - - type = set(Soap11.type) - type.discard('soap11') - type.update(('soap', 'soap12')) - - def generate_subcode(self, value, subcode=None): - subcode_node = E("{%s}Subcode" % self.ns_soap_env) - subcode_node.append(E("{%s}Value" % self.ns_soap_env, value)) - if subcode: - subcode_node.append(subcode) - return subcode_node - - def gen_fault_codes(self, faultstring): - faultstrings = faultstring.split('.') - value = faultstrings.pop(0) - if value == 'Client': - value = '%s:Sender' % self.soap_env - elif value == 'Server': - value = '%s:Receiver' % self.soap_env - else: - raise TypeError('Wrong fault code, got', type(faultstring)) - - return value, faultstrings - - def generate_faultcode(self, element): - nsmap = element.nsmap - faultcode = [] - faultcode.append(element.find('soap:Code/soap:Value', namespaces=nsmap).text) - subcode = element.find('soap:Code/soap:Subcode', namespaces=nsmap) - while subcode is not None: - faultcode.append(subcode.find('soap:Value', namespaces=nsmap).text) - subcode = subcode.find('soap:Subcode', namespaces=nsmap) - - return '.'.join(faultcode) - - def fault_to_parent(self, ctx, cls, inst, parent, ns, **_): - reason = E("{%s}Reason" % self.ns_soap_env) - reason.append(E("{%s}Text" % self.ns_soap_env, inst.faultstring, - **{'{%s}lang' % NS_XML: inst.lang})) - - subelts = [ - reason, - E("{%s}Role" % self.ns_soap_env, inst.faultactor), - ] - - return self._fault_to_parent_impl(ctx, cls, inst, parent, ns, subelts) - - def _fault_to_parent_impl(self, ctx, cls, inst, parent, ns, subelts, **_): - tag_name = "{%s}Fault" % self.ns_soap_env - - if isinstance(inst.faultcode, string_types): - value, faultcodes = self.gen_fault_codes(inst.faultcode) - - code = E("{%s}Code" % self.ns_soap_env) - code.append(E("{%s}Value" % self.ns_soap_env, value)) - - child_subcode = False - for value in faultcodes[::-1]: - if child_subcode: - child_subcode = self.generate_subcode(value, child_subcode) - else: - child_subcode = self.generate_subcode(value) - if child_subcode != 0: - code.append(child_subcode) - - _append(subelts, code) - - if isinstance(inst.detail, dict): - _append(subelts, E('{%s}Detail' % self.ns_soap_env, root_dict_to_etree(inst.detail))) - - elif inst.detail is None: - pass - - else: - raise TypeError('Fault detail Must be dict, got', type(inst.detail)) - - return self.gen_members_parent(ctx, cls, inst, parent, tag_name, - subelts, add_type=False) - - def schema_validation_error_to_parent(self, ctx, cls, inst, parent, ns, **_): - subelts = [ - E("{%s}Reason" % self.soap_env, inst.faultstring), - E("{%s}Role" % self.soap_env, inst.faultactor), - ] - - return self._fault_to_parent_impl(ctx, cls, inst, parent, ns, subelts) - - def fault_from_element(self, ctx, cls, element): - nsmap = element.nsmap - - code = self.generate_faultcode(element) - reason = element.find("soap:Reason/soap:Text", namespaces=nsmap).text.strip() - role = element.find("soap:Role", namespaces=nsmap) - node = element.find("soap:Node", namespaces=nsmap) - detail = element.find("soap:Detail", namespaces=nsmap) - faultactor = '' - if role is not None: - faultactor += role.text.strip() - if node is not None: - faultactor += node.text.strip() - return cls(faultcode=code, faultstring=reason, - faultactor=faultactor, detail=detail) diff --git a/libs_crutch/contrib/spyne/protocol/xml.py b/libs_crutch/contrib/spyne/protocol/xml.py deleted file mode 100644 index f993032..0000000 --- a/libs_crutch/contrib/spyne/protocol/xml.py +++ /dev/null @@ -1,1160 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -"""The ``spyne.protocol.xml`` module contains an xml-based protocol that -serializes python objects to xml using Xml Schema conventions. - -Logs valid documents to ``'spyne.protocol.xml'`` and invalid documents to -``spyne.protocol.xml.invalid``. Use the usual ``logging.getLogger()`` and -friends to configure how these get logged. - -Warning! You can get a lot of crap in the 'invalid' logger. You're not advised -to turn it on for a production system. -""" - - -import logging -logger = logging.getLogger('spyne.protocol.xml') -logger_invalid = logging.getLogger('spyne.protocol.xml.invalid') - -from inspect import isgenerator -from collections import defaultdict - -from lxml import etree -from lxml import html -from lxml.builder import E -from lxml.etree import XMLSyntaxError -from lxml.etree import XMLParser - -from spyne import BODY_STYLE_WRAPPED - -from spyne.util import Break, coroutine -from spyne.util.six import text_type, string_types -from spyne.util.cdict import cdict -from spyne.util.etreeconv import etree_to_dict, dict_to_etree,\ - root_dict_to_etree -from spyne.const.xml import XSI, NS_SOAP11_ENC - -from spyne.error import Fault -from spyne.error import ValidationError -from spyne.const.ansi_color import LIGHT_GREEN -from spyne.const.ansi_color import LIGHT_RED -from spyne.const.ansi_color import END_COLOR -from spyne.const.xml import NS_SOAP11_ENV -from spyne.const.xml import PREFMAP, DEFAULT_NS - -from spyne.model import Any, ModelBase, Array, Iterable, ComplexModelBase, \ - AnyHtml, AnyXml, AnyDict, Unicode, PushBase, File, ByteArray, XmlData, \ - XmlAttribute -from spyne.model.binary import BINARY_ENCODING_BASE64 -from spyne.model.enum import EnumBase - -from spyne.protocol import ProtocolBase - -from spyne.util import six - -if six.PY2: - STR_TYPES = (str, unicode) -else: - STR_TYPES = (str, bytes) - - -NIL_ATTR = {XSI('nil'): 'true'} -XSI_TYPE = XSI('type') - - -def _append(parent, child_elt): - if hasattr(parent, 'append'): - parent.append(child_elt) - else: - parent.write(child_elt) - - -def _gen_tagname(ns, name): - if ns is not None: - name = "{%s}%s" % (ns, name) - return name - - -class SchemaValidationError(Fault): - """Raised when the input stream could not be validated by the Xml Schema.""" - - CODE = 'Client.SchemaValidationError' - - def __init__(self, faultstring): - super(SchemaValidationError, self).__init__(self.CODE, faultstring) - - -class SubXmlBase(ProtocolBase): - def subserialize(self, ctx, cls, inst, parent, ns=None, name=None): - return self.to_parent(ctx, cls, inst, parent, name) - - def to_parent(self, ctx, cls, inst, parent, ns, *args, **kwargs): - """Serializes inst to an Element instance and appends it to the 'parent'. - - :param self: The protocol that will be used to serialize the given - value. - :param cls: The type of the value that's going to determine how to - pack the given value. - :param inst: The value to be set for the 'text' element of the newly - created SubElement - :param parent: The parent Element to which the new child will be - appended. - :param ns: The target namespace of the new SubElement, used with - 'name' to set the tag. - :param name: The tag name of the new SubElement, 'retval' by default. - """ - raise NotImplementedError() - - -class XmlDocument(SubXmlBase): - """The Xml input and output protocol, using the information from the Xml - Schema generated by Spyne types. - - See the following material for more (much much more!) information. - - * http://www.w3.org/TR/xmlschema-0/ - * http://www.w3.org/TR/xmlschema-1/ - * http://www.w3.org/TR/xmlschema-2/ - - Receiving Xml from untrusted sources is a dodgy security dance as the Xml - attack surface is /huge/. - - Spyne's ```lxml.etree.XMLParser``` instance has ```resolve_pis```, - ```load_dtd```, ```resolve_entities```, ```dtd_validation```, - ```huge_tree``` Defaults to ``False`` - - Having ```resolve_entities``` disabled will prevent the 'lxml' validation - for documents with custom xml entities defined in the DTD. See the example - in examples/xml/validation_error to play with the settings that work best - for you. Please note that enabling ```resolve_entities``` is a security - hazard that can lead to disclosure of sensitive information. - - See https://pypi.python.org/pypi/defusedxml for a pragmatic overview of - Xml security in Python world. - - :param app: The owner application instance. - - :param validator: One of (None, 'soft', 'lxml', 'schema', - ProtocolBase.SOFT_VALIDATION, XmlDocument.SCHEMA_VALIDATION). - Both ``'lxml'`` and ``'schema'`` values are equivalent to - ``XmlDocument.SCHEMA_VALIDATION``. - - Defaults to ``None``. - - :param replace_null_with_default: If ``False``, does not replace incoming - explicit null values with denoted default values. This is against Xml - Schema standard but consistent with other Spyne protocol - implementations. Set this to False if you want cross-protocol - compatibility. - - Defaults to ``True``. - - Relevant quote from xml schema primer - (http://www.w3.org/TR/xmlschema-0/): - - .. - When a value is present and is null The schema processor treats - defaulted elements slightly differently. When an element is declared - with a default value, the value of the element is whatever value - appears as the element's content in the instance document; if the - element appears without any content, the schema processor provides - the element with a value equal to that of the default attribute. - However, if the element does not appear in the instance document, - the schema processor does not provide the element at all. In - summary, the differences between element and attribute defaults can - be stated as: Default attribute values apply when attributes are - missing, and default element values apply when elements are empty. - - :param xml_declaration: Whether to add xml_declaration to the responses - - Defaults to ``True``. - - :param cleanup_namespaces: Whether to add clean up namespace declarations - in the response document. - - Defaults to ``True``. - - :param encoding: The suggested string encoding for the returned xml - documents. The transport can override this. - - Defaults to ``None``. - - :param pretty_print: When ``True``, returns the document in a pretty-printed - format. - - Defaults to ``False``. - - :param parse_xsi_type: Set to ``False`` to disable parsing of ``xsi:type`` - attribute, effectively disabling polymorphism. - - Defaults to ``True``. - - The following are passed straight to the ``XMLParser()`` instance from - lxml. Docs are also plagiarized from the lxml documentation. Please note - that some of the defaults are different to make parsing safer by default. - - :param attribute_defaults: read the DTD (if referenced by the document) and - add the default attributes from it. - - Defaults to ``False`` - - :param dtd_validation: validate while parsing (if a DTD was referenced). - - Defaults to ``False`` - - :param load_dtd: load and parse the DTD while parsing (no validation is - performed). - - Defaults to ``False``. - - :param no_network: prevent network access when looking up external - documents. - - Defaults to ``True``. - - :param ns_clean: try to clean up redundant namespace declarations. - Please note that this is for incoming documents. - See ``cleanup_namespaces`` parameter for output documents. - - Defaults to ``False``. - - :param recover: try hard to parse through broken Xml. - - Defaults to ``False``. - - :param remove_blank_text: discard blank text nodes between tags, also known - as ignorable whitespace. This is best used together with a DTD or schema - (which tells data and noise apart), otherwise a heuristic will be - applied. - - Defaults to ``False``. - - :param remove_pis: When ``True`` xml parser discards processing - instructions. - - Defaults to ``True``. - - :param strip_cdata: replace CDATA sections by normal text content. - - Defaults to ``True`` - - :param resolve_entities: replace entities by their text value. - - Defaults to ``False``. - - :param huge_tree: disable security restrictions and support very deep trees - and very long text content. (only affects libxml2 2.7+) - - Defaults to ``False``. - - :param compact: use compact storage for short text content. - - Defaults to ``True``. - - """ - - SCHEMA_VALIDATION = type("Schema", (object,), {}) - - mime_type = 'text/xml' - default_binary_encoding = BINARY_ENCODING_BASE64 - - type = set(ProtocolBase.type) - type.add('xml') - - soap_env = PREFMAP[NS_SOAP11_ENV] - ns_soap_env = NS_SOAP11_ENV - ns_soap_enc = NS_SOAP11_ENC - - def __init__(self, app=None, validator=None, - replace_null_with_default=True, - xml_declaration=True, - cleanup_namespaces=True, encoding=None, pretty_print=False, - attribute_defaults=False, - dtd_validation=False, - load_dtd=False, - no_network=True, - ns_clean=False, - recover=False, - remove_blank_text=False, - remove_pis=True, - strip_cdata=True, - resolve_entities=False, - huge_tree=False, - compact=True, - binary_encoding=None, - parse_xsi_type=True, - polymorphic=False, - ): - - super(XmlDocument, self).__init__(app, validator, - binary_encoding=binary_encoding) - - self.validation_schema = None - self.xml_declaration = xml_declaration - self.cleanup_namespaces = cleanup_namespaces - self.replace_null_with_default = replace_null_with_default - - if encoding is None: - self.encoding = 'UTF-8' - else: - self.encoding = encoding - - self.polymorphic = polymorphic - self.pretty_print = pretty_print - self.parse_xsi_type = parse_xsi_type - - self.serialization_handlers = cdict({ - Any: self.any_to_parent, - Fault: self.fault_to_parent, - EnumBase: self.enum_to_parent, - AnyXml: self.any_xml_to_parent, - XmlData: self.xmldata_to_parent, - AnyDict: self.any_dict_to_parent, - AnyHtml: self.any_html_to_parent, - ModelBase: self.modelbase_to_parent, - ByteArray: self.byte_array_to_parent, - ComplexModelBase: self.complex_to_parent, - XmlAttribute: self.xmlattribute_to_parent, - SchemaValidationError: self.schema_validation_error_to_parent, - }) - - self.deserialization_handlers = cdict({ - AnyHtml: self.html_from_element, - AnyXml: self.xml_from_element, - Any: self.xml_from_element, - Array: self.array_from_element, - Fault: self.fault_from_element, - AnyDict: self.dict_from_element, - EnumBase: self.enum_from_element, - ModelBase: self.base_from_element, - Unicode: self.unicode_from_element, - Iterable: self.iterable_from_element, - ByteArray: self.byte_array_from_element, - ComplexModelBase: self.complex_from_element, - }) - - self.parser_kwargs = dict( - attribute_defaults=attribute_defaults, - dtd_validation=dtd_validation, - load_dtd=load_dtd, - no_network=no_network, - ns_clean=ns_clean, - recover=recover, - remove_blank_text=remove_blank_text, - remove_comments=True, - remove_pis=remove_pis, - strip_cdata=strip_cdata, - resolve_entities=resolve_entities, - huge_tree=huge_tree, - compact=compact, - encoding=encoding, - ) - - def set_validator(self, validator): - if validator in ('lxml', 'schema') or \ - validator is self.SCHEMA_VALIDATION: - self.validate_document = self.__validate_lxml - self.validator = self.SCHEMA_VALIDATION - - elif validator == 'soft' or validator is self.SOFT_VALIDATION: - self.validator = self.SOFT_VALIDATION - - elif validator is None: - pass - - else: - raise ValueError(validator) - - self.validation_schema = None - - def validate_body(self, ctx, message): - """Sets ctx.method_request_string and calls :func:`generate_contexts` - for validation.""" - - assert message in (self.REQUEST, self.RESPONSE), message - - line_header = LIGHT_RED + "Error:" + END_COLOR - try: - self.validate_document(ctx.in_body_doc) - if message is self.REQUEST: - line_header = LIGHT_GREEN + "Method request string:" + END_COLOR - else: - line_header = LIGHT_RED + "Response:" + END_COLOR - finally: - if logger.level == logging.DEBUG: - logger.debug("%s %s" % (line_header, ctx.method_request_string)) - logger.debug(etree.tostring(ctx.in_document, pretty_print=True)) - - def set_app(self, value): - ProtocolBase.set_app(self, value) - - self.validation_schema = None - - if self.validator is self.SCHEMA_VALIDATION and value is not None: - from spyne.interface.xml_schema import XmlSchema - - xml_schema = XmlSchema(value.interface) - xml_schema.build_validation_schema() - - self.validation_schema = xml_schema.validation_schema - - def __validate_lxml(self, payload): - ret = self.validation_schema.validate(payload) - - logger.debug("Validated ? %r" % ret) - if ret == False: - error_text = text_type(self.validation_schema.error_log.last_error) - raise SchemaValidationError(error_text.encode('ascii', - 'xmlcharrefreplace')) - - def create_in_document(self, ctx, charset=None): - """Uses the iterable of string fragments in ``ctx.in_string`` to set - ``ctx.in_document``.""" - - string = b''.join(ctx.in_string) - try: - try: - ctx.in_document = etree.fromstring(string, - parser=XMLParser(**self.parser_kwargs)) - - except ValueError: - logger.debug('ValueError: Deserializing from unicode strings ' - 'with encoding declaration is not supported by ' - 'lxml.') - ctx.in_document = etree.fromstring(string.decode(charset), - self.parser) - except XMLSyntaxError as e: - logger_invalid.error("%r in string %r", e, string) - raise Fault('Client.XMLSyntaxError', str(e)) - - def decompose_incoming_envelope(self, ctx, message): - assert message in (self.REQUEST, self.RESPONSE) - - ctx.in_header_doc = None # If you need header support, you should use Soap - ctx.in_body_doc = ctx.in_document - ctx.method_request_string = ctx.in_body_doc.tag - self.validate_body(ctx, message) - - def from_element(self, ctx, cls, element): - cls_attrs = self.get_cls_attrs(cls) - - if bool(element.get(XSI('nil'))): - if self.validator is self.SOFT_VALIDATION and not \ - cls_attrs.nillable: - raise ValidationError(None) - - if self.replace_null_with_default: - return cls_attrs.default - - return None - - # if present, use the xsi:type="ns0:ObjectName" - # attribute to instantiate subclass objects - if self.parse_xsi_type: - xsi_type = element.get(XSI_TYPE, None) - if xsi_type is not None: - if ":" in xsi_type: - prefix, objtype = xsi_type.split(':', 1) - else: - prefix, objtype = None, xsi_type - - ns = element.nsmap.get(prefix) - if ns is not None: - classkey = "{%s}%s" % (ns, objtype) - - else: - logger.error("xsi:type namespace prefix " - "'%s' in '%s' not found in %r", - ns, xsi_type, element.nsmap) - - raise ValidationError(xsi_type) - - newclass = ctx.app.interface.classes.get(classkey, None) - if newclass is None: - logger.error("xsi:type '%s' interpreted as class key '%s' " - "is not recognized", xsi_type, classkey) - raise ValidationError(xsi_type) - - cls = newclass - logger.debug("xsi:type '%s' overrides %r to %r", xsi_type, - cls, newclass) - - handler = self.deserialization_handlers[cls] - return handler(ctx, cls, element) - - def to_parent(self, ctx, cls, inst, parent, ns, *args, **kwargs): - cls, add_type = self.get_polymorphic_target(cls, inst) - cls_attrs = self.get_cls_attrs(cls) - - subprot = cls_attrs.prot - if subprot is not None and isinstance(subprot, SubXmlBase): - return subprot.subserialize(ctx, cls, inst, parent, ns, - *args, **kwargs) - - handler = self.serialization_handlers[cls] - - if inst is None: - inst = cls_attrs.default - - if inst is None: - return self.null_to_parent(ctx, cls, inst, parent, ns, - *args, **kwargs) - - if cls_attrs.exc: - return - kwargs['add_type'] = add_type - return handler(ctx, cls, inst, parent, ns, *args, **kwargs) - - def deserialize(self, ctx, message): - """Takes a MethodContext instance and a string containing ONE root xml - tag. - - Returns the corresponding native python object. - - Not meant to be overridden. - """ - - assert message in (self.REQUEST, self.RESPONSE) - - self.event_manager.fire_event('before_deserialize', ctx) - - if ctx.descriptor is None: - if ctx.in_error is None: - raise Fault("Client", "Method %r not found." % - ctx.method_request_string) - else: - raise ctx.in_error - - if message is self.REQUEST: - body_class = ctx.descriptor.in_message - elif message is self.RESPONSE: - body_class = ctx.descriptor.out_message - - # decode method arguments - if ctx.in_body_doc is None: - ctx.in_object = [None] * len(body_class._type_info) - else: - ctx.in_object = self.from_element(ctx, body_class, ctx.in_body_doc) - - if logger.level == logging.DEBUG and message is self.REQUEST: - line_header = '%sRequest%s' % (LIGHT_GREEN, END_COLOR) - - outdoc_str = None - if ctx.out_document is not None: - outdoc_str = etree.tostring(ctx.out_document, - xml_declaration=self.xml_declaration, pretty_print=True) - - logger.debug("%s %s" % (line_header, outdoc_str)) - - self.event_manager.fire_event('after_deserialize', ctx) - - def serialize(self, ctx, message): - """Uses ``ctx.out_object``, ``ctx.out_header`` or ``ctx.out_error`` to - set ``ctx.out_body_doc``, ``ctx.out_header_doc`` and - ``ctx.out_document`` as an ``lxml.etree._Element instance``. - - Not meant to be overridden. - """ - - assert message in (self.REQUEST, self.RESPONSE) - - self.event_manager.fire_event('before_serialize', ctx) - - if ctx.out_error is not None: - tmp_elt = etree.Element('punk') - retval = self.to_parent(ctx, ctx.out_error.__class__, ctx.out_error, - tmp_elt, self.app.interface.get_tns()) - ctx.out_document = tmp_elt[0] - - else: - if message is self.REQUEST: - result_message_class = ctx.descriptor.in_message - elif message is self.RESPONSE: - result_message_class = ctx.descriptor.out_message - - # assign raw result to its wrapper, result_message - if ctx.descriptor.body_style == BODY_STYLE_WRAPPED: - result_inst = result_message_class() - - for i, (k, v) in enumerate( - result_message_class._type_info.items()): - attrs = self.get_cls_attrs(v) - result_inst._safe_set(k, ctx.out_object[i], v, attrs) - - else: - result_inst = ctx.out_object - - if ctx.out_stream is None: - tmp_elt = etree.Element('punk') - retval = self.to_parent(ctx, result_message_class, - result_inst, tmp_elt, self.app.interface.get_tns()) - ctx.out_document = tmp_elt[0] - - else: - retval = self.incgen(ctx, result_message_class, - result_inst, self.app.interface.get_tns()) - - if self.cleanup_namespaces and ctx.out_document is not None: - etree.cleanup_namespaces(ctx.out_document) - - self.event_manager.fire_event('after_serialize', ctx) - - return retval - - def create_out_string(self, ctx, charset=None): - """Sets an iterable of string fragments to ctx.out_string""" - - if charset is None: - charset = self.encoding - - ctx.out_string = [etree.tostring(ctx.out_document, - encoding=charset, - pretty_print=self.pretty_print, - xml_declaration=self.xml_declaration)] - - if logger.level == logging.DEBUG: - logger.debug('%sResponse%s %s' % (LIGHT_RED, END_COLOR, - etree.tostring(ctx.out_document, - pretty_print=True, encoding='UTF-8'))) - - @coroutine - def incgen(self, ctx, cls, inst, ns, name=None): - if name is None: - name = cls.get_type_name() - with etree.xmlfile(ctx.out_stream) as xf: - ret = self.to_parent(ctx, cls, inst, xf, ns, name) - if isgenerator(ret): - try: - while True: - y = (yield) # may throw Break - ret.send(y) - - except Break: - try: - ret.throw(Break()) - except StopIteration: - pass - - if hasattr(ctx.out_stream, 'finish'): - ctx.out_stream.finish() - - def _gen_tag(self, cls, ns, name, add_type=False, **_): - if ns is not None: - name = "{%s}%s" % (ns, name) - - retval = E(name) - if add_type: - retval.attrib[XSI_TYPE] = cls.get_type_name_ns(self.app.interface) - - return retval - - def byte_array_to_parent(self, ctx, cls, inst, parent, ns, name='retval', - **kwargs): - elt = self._gen_tag(cls, ns, name, **kwargs) - elt.text = self.to_unicode(cls, inst, self.binary_encoding) - _append(parent, elt) - - def modelbase_to_parent(self, ctx, cls, inst, parent, ns, name='retval', - **kwargs): - elt = self._gen_tag(cls, ns, name, **kwargs) - elt.text = self.to_unicode(cls, inst) - _append(parent, elt) - - def null_to_parent(self, ctx, cls, inst, parent, ns, name='retval', - **kwargs): - if issubclass(cls, XmlAttribute): - return - - elif issubclass(cls, XmlData): - parent.attrib.update(NIL_ATTR) - - else: - elt = self._gen_tag(cls, ns, name, **kwargs) - elt.attrib.update(NIL_ATTR) - _append(parent, elt) - - def null_from_element(self, ctx, cls, element): - return None - - def xmldata_to_parent(self, ctx, cls, inst, parent, ns, name, - add_type=False, **_): - cls_attrs = self.get_cls_attrs(cls) - - ns = cls._ns - if ns is None: - ns = cls_attrs.sub_ns - - name = _gen_tagname(ns, name) - - if add_type: - parent.attrib[XSI_TYPE] = cls.get_type_name_ns(self.app.interface) - - cls.marshall(self, name, inst, parent) - - def xmlattribute_to_parent(self, ctx, cls, inst, parent, ns, name, **_): - ns = cls._ns - cls_attrs = self.get_cls_attrs(cls) - if ns is None: - ns = cls_attrs.sub_ns - - name = _gen_tagname(ns, name) - - if inst is not None: - if issubclass(cls.type, (ByteArray, File)): - parent.set(name, self.to_unicode(cls.type, inst, - self.binary_encoding)) - else: - parent.set(name, self.to_unicode(cls.type, inst)) - - @coroutine - def gen_members_parent(self, ctx, cls, inst, parent, tag_name, subelts, - add_type): - attrib = {} - if add_type: - tnn = cls.get_type_name_ns(self.app.interface) - if tnn != None: - attrib[XSI_TYPE] = tnn - else: - # this only happens on incomplete interface states for eg. - # get_object_as_xml where the full init is not performed for - # perf reasons - attrib[XSI_TYPE] = cls.get_type_name() - - if isinstance(parent, etree._Element): - elt = etree.SubElement(parent, tag_name, attrib=attrib) - elt.extend(subelts) - ret = self._get_members_etree(ctx, cls, inst, elt) - - if isgenerator(ret): - try: - while True: - y = (yield) # may throw Break - ret.send(y) - - except Break: - try: - ret.throw(Break()) - except StopIteration: - pass - - else: - with parent.element(tag_name, attrib=attrib): - for e in subelts: - parent.write(e) - ret = self._get_members_etree(ctx, cls, inst, parent) - if isgenerator(ret): - try: - while True: - y = (yield) - ret.send(y) - - except Break: - try: - ret.throw(Break()) - except StopIteration: - pass - - @coroutine - def _get_members_etree(self, ctx, cls, inst, parent): - try: - parent_cls = getattr(cls, '__extends__', None) - if not (parent_cls is None): - ret = self._get_members_etree(ctx, parent_cls, inst, parent) - if ret is not None: - try: - while True: - sv2 = (yield) # may throw Break - ret.send(sv2) - - except Break: - try: - ret.throw(Break()) - except StopIteration: - pass - - for k, v in cls._type_info.items(): - sub_cls_attrs = self.get_cls_attrs(v) - if sub_cls_attrs.exc: - continue - try: - subvalue = getattr(inst, k, None) - except: # e.g. SqlAlchemy could throw NoSuchColumnError - subvalue = None - - # This is a tight loop, so enable this only when necessary. - # logger.debug("get %r(%r) from %r: %r" % (k, v, inst, subvalue)) - - sub_ns = v.Attributes.sub_ns - if sub_ns is None: - sub_ns = cls.get_namespace() - - sub_name = v.Attributes.sub_name - if sub_name is None: - sub_name = k - - mo = v.Attributes.max_occurs - if subvalue is not None and mo > 1: - if isinstance(subvalue, PushBase): - while True: - sv = (yield) - ret = self.to_parent(ctx, v, sv, parent, sub_ns, - sub_name) - if ret is not None: - try: - while True: - sv2 = (yield) # may throw Break - ret.send(sv2) - - except Break: - try: - ret.throw(Break()) - except StopIteration: - pass - - else: - for sv in subvalue: - ret = self.to_parent(ctx, v, sv, parent, sub_ns, - sub_name) - - if ret is not None: - try: - while True: - sv2 = (yield) # may throw Break - ret.send(sv2) - - except Break: - try: - ret.throw(Break()) - except StopIteration: - pass - - # Don't include empty values for - # non-nillable optional attributes. - elif subvalue is not None or v.Attributes.min_occurs > 0: - ret = self.to_parent(ctx, v, subvalue, parent, sub_ns, - sub_name) - if ret is not None: - try: - while True: - sv2 = (yield) - ret.send(sv2) - except Break as b: - try: - ret.throw(b) - except StopIteration: - pass - - except Break: - pass - - def complex_to_parent(self, ctx, cls, inst, parent, ns, name=None, - add_type=False, **_): - cls_attrs = self.get_cls_attrs(cls) - - sub_name = cls_attrs.sub_name - if sub_name is not None: - name = sub_name - if name is None: - name = cls.get_type_name() - - sub_ns = cls_attrs.sub_ns - if not sub_ns in (None, DEFAULT_NS): - ns = sub_ns - - tag_name = _gen_tagname(ns, name) - - inst = cls.get_serialization_instance(inst) - - return self.gen_members_parent(ctx, cls, inst, parent, tag_name, [], - add_type) - - def _fault_to_parent_impl(self, ctx, cls, inst, parent, ns, subelts, **_): - tag_name = "{%s}Fault" % self.ns_soap_env - - # Accepting raw lxml objects as detail is DEPRECATED. It's also not - # documented. It's kept for backwards-compatibility purposes. - if isinstance(inst.detail, string_types + (etree._Element,)): - _append(subelts, E('detail', inst.detail)) - - elif isinstance(inst.detail, dict): - if len(inst.detail) > 0: - _append(subelts, root_dict_to_etree({'detail':inst.detail})) - - elif inst.detail is None: - pass - - else: - raise TypeError('Fault detail Must be dict, got', type(inst.detail)) - - # add other nonstandard fault subelements with get_members_etree - return self.gen_members_parent(ctx, cls, inst, parent, tag_name, - subelts, add_type=False) - - def fault_to_parent(self, ctx, cls, inst, parent, ns, *args, **kwargs): - subelts = [ - E("faultcode", '%s:%s' % (self.soap_env, inst.faultcode)), - E("faultstring", inst.faultstring), - E("faultactor", inst.faultactor), - ] - - return self._fault_to_parent_impl(ctx, cls, inst, parent, ns, subelts) - - def schema_validation_error_to_parent(self, ctx, cls, inst, parent, ns,**_): - subelts = [ - E("faultcode", '%s:%s' % (self.soap_env, inst.faultcode)), - # HACK: Does anyone know a better way of injecting raw xml entities? - E("faultstring", html.fromstring(inst.faultstring).text), - E("faultactor", inst.faultactor), - ] - if inst.detail != None: - _append(subelts, E('detail', inst.detail)) - - # add other nonstandard fault subelements with get_members_etree - return self._fault_to_parent_impl(ctx, cls, inst, parent, ns, subelts) - - def enum_to_parent(self, ctx, cls, inst, parent, ns, name='retval', **kwargs): - self.modelbase_to_parent(ctx, cls, str(inst), parent, ns, name, **kwargs) - - def any_xml_to_parent(self, ctx, cls, inst, parent, ns, name, **_): - if isinstance(inst, STR_TYPES): - inst = etree.fromstring(inst) - - _append(parent, E(_gen_tagname(ns, name), inst)) - - def any_to_parent(self, ctx, cls, inst, parent, ns, name, **_): - _append(parent, E(_gen_tagname(ns, name), inst)) - - def any_html_to_parent(self, ctx, cls, inst, parent, ns, name, **_): - if isinstance(inst, string_types) and len(inst) > 0: - inst = html.fromstring(inst) - - _append(parent, E(_gen_tagname(ns, name), inst)) - - def any_dict_to_parent(self, ctx, cls, inst, parent, ns, name, **_): - elt = E(_gen_tagname(ns, name)) - dict_to_etree(inst, elt) - - _append(parent, elt) - - def complex_from_element(self, ctx, cls, elt): - inst = cls.get_deserialization_instance(ctx) - - flat_type_info = cls.get_flat_type_info(cls) - - # this is for validating cls.Attributes.{min,max}_occurs - frequencies = defaultdict(int) - cls_attrs = self.get_cls_attrs(cls) - - if cls_attrs._xml_tag_body_as is not None: - for xtba_key, xtba_type in cls_attrs._xml_tag_body_as: - xtba_attrs = self.get_cls_attrs(xtba_type.type) - if issubclass(xtba_type.type, (ByteArray, File)): - value = self.from_unicode(xtba_type.type, elt.text, - self.binary_encoding) - else: - value = self.from_unicode(xtba_type.type, elt.text) - - inst._safe_set(xtba_key, value, xtba_type.type, xtba_attrs) - - # parse input to set incoming data to related attributes. - for c in elt: - if isinstance(c, etree._Comment): - continue - - key = c.tag.split('}', 1)[-1] - frequencies[key] += 1 - - member = flat_type_info.get(key, None) - if member is None: - member, key = cls._type_info_alt.get(key, (None, key)) - if member is None: - member, key = cls._type_info_alt.get(c.tag, (None, key)) - if member is None: - continue - - member_attrs = self.get_cls_attrs(member) - mo = member_attrs.max_occurs - if mo > 1: - value = getattr(inst, key, None) - if value is None: - value = [] - - value.append(self.from_element(ctx, member, c)) - - else: - value = self.from_element(ctx, member, c) - - inst._safe_set(key, value, member, member_attrs) - - for key, value_str in c.attrib.items(): - submember = flat_type_info.get(key, None) - - if submember is None: - submember, key = cls._type_info_alt.get(key, (None, key)) - if submember is None: - continue - - submember_attrs = self.get_cls_attrs(submember) - mo = submember_attrs.max_occurs - if mo > 1: - value = getattr(inst, key, None) - if value is None: - value = [] - - value.append(self.from_unicode(submember.type, value_str)) - - else: - value = self.from_unicode(submember.type, value_str) - - inst._safe_set(key, value, submember.type, submember_attrs) - - for key, value_str in elt.attrib.items(): - member = flat_type_info.get(key, None) - if member is None: - member, key = cls._type_info_alt.get(key, (None, key)) - if member is None: - continue - - if not issubclass(member, XmlAttribute): - continue - - if issubclass(member.type, (ByteArray, File)): - value = self.from_unicode(member.type, value_str, - self.binary_encoding) - else: - value = self.from_unicode(member.type, value_str) - - member_attrs = self.get_cls_attrs(member.type) - inst._safe_set(key, value, member.type, member_attrs) - - if self.validator is self.SOFT_VALIDATION: - for key, c in flat_type_info.items(): - val = frequencies.get(key, 0) - attr = self.get_cls_attrs(c) - if val < attr.min_occurs or val > attr.max_occurs: - raise Fault('Client.ValidationError', '%r member does not ' - 'respect frequency constraints.' % key) - - return inst - - def array_from_element(self, ctx, cls, element): - retval = [ ] - (serializer,) = cls._type_info.values() - - for child in element.getchildren(): - retval.append(self.from_element(ctx, serializer, child)) - - return retval - - def iterable_from_element(self, ctx, cls, element): - (serializer,) = cls._type_info.values() - - for child in element.getchildren(): - yield self.from_element(ctx, serializer, child) - - def enum_from_element(self, ctx, cls, element): - if self.validator is self.SOFT_VALIDATION and not ( - cls.validate_string(cls, element.text)): - raise ValidationError(element.text) - return getattr(cls, element.text) - - def fault_from_element(self, ctx, cls, element): - code = element.find('faultcode').text - string = element.find('faultstring').text - factor = element.find('faultactor') - if factor is not None: - factor = factor.text - detail = element.find('detail') - - return cls(faultcode=code, faultstring=string, faultactor=factor, - detail=detail) - - def xml_from_element(self, ctx, cls, element): - children = element.getchildren() - retval = None - - if children: - retval = element.getchildren()[0] - - return retval - - def html_from_element(self, ctx, cls, element): - children = element.getchildren() - retval = None - - if len(children) == 1: - retval = children[0] - # this is actually a workaround to a case that should never exist -- - # anyXml types should only have one child tag. - elif len(children) > 1: - retval = E.html(*children) - - return retval - - def dict_from_element(self, ctx, cls, element): - children = element.getchildren() - if children: - return etree_to_dict(element) - - return None - - def unicode_from_element(self, ctx, cls, element): - if self.validator is self.SOFT_VALIDATION and not ( - cls.validate_string(cls, element.text)): - raise ValidationError(element.text) - - s = element.text - if s is None: - s = '' - - retval = self.from_unicode(cls, s) - - if self.validator is self.SOFT_VALIDATION and not ( - cls.validate_native(cls, retval)): - raise ValidationError(retval) - - return retval - - def base_from_element(self, ctx, cls, element): - if self.validator is self.SOFT_VALIDATION and not ( - cls.validate_string(cls, element.text)): - raise ValidationError(element.text) - - retval = self.from_unicode(cls, element.text) - - if self.validator is self.SOFT_VALIDATION and not ( - cls.validate_native(cls, retval)): - raise ValidationError(retval) - - return retval - - def byte_array_from_element(self, ctx, cls, element): - if self.validator is self.SOFT_VALIDATION and not ( - cls.validate_string(cls, element.text)): - raise ValidationError(element.text) - - retval = self.from_unicode(cls, element.text, self.binary_encoding) - - if self.validator is self.SOFT_VALIDATION and not ( - cls.validate_native(cls, retval)): - raise ValidationError(retval) - - return retval diff --git a/libs_crutch/contrib/spyne/protocol/yaml.py b/libs_crutch/contrib/spyne/protocol/yaml.py deleted file mode 100644 index 1a9a46a..0000000 --- a/libs_crutch/contrib/spyne/protocol/yaml.py +++ /dev/null @@ -1,187 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.protocol.yaml`` package contains the Yaml-related protocols. -Currently, only :class:`spyne.protocol.yaml.YamlDocument` is supported. - -Initially released in 2.10.0-rc. -""" - -from __future__ import absolute_import - -import logging -logger = logging.getLogger(__name__) - -from spyne import ValidationError -from spyne.util import six -from spyne.model.binary import BINARY_ENCODING_BASE64 -from spyne.model.primitive import Boolean -from spyne.model.primitive import Integer -from spyne.model.primitive import Double -from spyne.model.fault import Fault -from spyne.protocol.dictdoc import HierDictDocument - -import yaml - -from yaml.parser import ParserError -try: - from yaml import CLoader as Loader - from yaml import CDumper as Dumper - from yaml import CSafeLoader as SafeLoader - from yaml import CSafeDumper as SafeDumper - -except ImportError: - from yaml import Loader - from yaml import Dumper - from yaml import SafeLoader - from yaml import SafeDumper - - -NON_NUMBER_TYPES = tuple({list, dict, six.text_type, six.binary_type}) - - -class YamlDocument(HierDictDocument): - """An implementation of the Yaml protocol that uses the PyYaml package. - See ProtocolBase ctor docstring for its arguments. Yaml-specific arguments - follow: - - :param safe: Use ``safe_dump`` instead of ``dump`` and ``safe_load`` instead - of ``load``. This is not a security feature, search for 'safe_dump' in - http://www.pyyaml.org/wiki/PyYAMLDocumentation - :param kwargs: See the yaml documentation in ``load, ``safe_load``, ``dump`` - or ``safe_dump`` depending on whether you use yaml as an input or output - protocol. - - For the output case, Spyne sets ``default_flow_style=False`` and - ``indent=4`` by default. - """ - - mime_type = 'text/yaml' - - type = set(HierDictDocument.type) - type.add('yaml') - - text_based = True - - default_binary_encoding = BINARY_ENCODING_BASE64 - - # for test classes - _decimal_as_string = True - - def __init__(self, app=None, validator=None, mime_type=None, - ignore_uncap=False, - # DictDocument specific - ignore_wrappers=True, - complex_as=dict, - ordered=False, - polymorphic=False, - # YamlDocument specific - safe=True, - encoding='UTF-8', - allow_unicode=True, - **kwargs): - - super(YamlDocument, self).__init__(app, validator, mime_type, - ignore_uncap, ignore_wrappers, complex_as, ordered, polymorphic) - - self._from_unicode_handlers[Double] = self._ret_number - self._from_unicode_handlers[Boolean] = self._ret_bool - self._from_unicode_handlers[Integer] = self._ret_number - - self._to_unicode_handlers[Double] = self._ret - self._to_unicode_handlers[Boolean] = self._ret - self._to_unicode_handlers[Integer] = self._ret - - loader = Loader - dumper = Dumper - if safe: - loader = SafeLoader - dumper = SafeDumper - - self.in_kwargs = dict(kwargs) - self.out_kwargs = dict(kwargs) - - self.in_kwargs['Loader'] = loader - self.out_kwargs['Dumper'] = dumper - - loader.add_constructor('tag:yaml.org,2002:python/unicode', - _unicode_loader) - - self.out_kwargs['encoding'] = encoding - self.out_kwargs['allow_unicode'] = allow_unicode - - if not 'indent' in self.out_kwargs: - self.out_kwargs['indent'] = 4 - - if not 'default_flow_style' in self.out_kwargs: - self.out_kwargs['default_flow_style'] = False - - def _ret(self, _, value): - return value - - def _ret_number(self, _, value): - if isinstance(value, NON_NUMBER_TYPES): - raise ValidationError(value) - if value in (True, False): - return int(value) - return value - - def _ret_bool(self, _, value): - if value is None or value in (True, False): - return value - raise ValidationError(value) - - def create_in_document(self, ctx, in_string_encoding=None): - """Sets ``ctx.in_document`` using ``ctx.in_string``.""" - - if in_string_encoding is None: - in_string_encoding = 'UTF-8' - - try: - try: - s = b''.join(ctx.in_string).decode(in_string_encoding) - except TypeError: - s = ''.join(ctx.in_string) - - ctx.in_document = yaml.load(s, **self.in_kwargs) - - except ParserError as e: - raise Fault('Client.YamlDecodeError', repr(e)) - - def create_out_string(self, ctx, out_string_encoding='utf8'): - """Sets ``ctx.out_string`` using ``ctx.out_document``.""" - - ctx.out_string = (yaml.dump(o, **self.out_kwargs) - for o in ctx.out_document) - if six.PY2 and out_string_encoding is not None: - ctx.out_string = ( - yaml.dump(o, **self.out_kwargs).encode(out_string_encoding) - for o in ctx.out_document) - - -def _unicode_loader(loader, node): - return node.value - - -def _decimal_to_bytes(): - pass - - -def _decimal_from_bytes(): - pass diff --git a/libs_crutch/contrib/spyne/server/__init__.py b/libs_crutch/contrib/spyne/server/__init__.py deleted file mode 100644 index 5ab6584..0000000 --- a/libs_crutch/contrib/spyne/server/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.server`` package contains the server transports.""" - -from spyne.server._base import ServerBase -from spyne.server.null import NullServer diff --git a/libs_crutch/contrib/spyne/server/_base.py b/libs_crutch/contrib/spyne/server/_base.py deleted file mode 100644 index 5f27140..0000000 --- a/libs_crutch/contrib/spyne/server/_base.py +++ /dev/null @@ -1,260 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logger = logging.getLogger(__name__) - -from inspect import isgenerator - -from spyne import EventManager -from spyne.auxproc import process_contexts -from spyne.interface import AllYourInterfaceDocuments -from spyne.model import Fault, PushBase -from spyne.protocol import ProtocolBase -from spyne.util import Break, coroutine - - -class ServerBase(object): - """This class is the abstract base class for all server transport - implementations. Unlike the client transports, this class does not define - a pure-virtual method that needs to be implemented by all base classes. - - If there needs to be a call to start the main loop, it's called - ``serve_forever()`` by convention. - """ - - transport = None - """The transport type, which is a URI string to its definition by - convention.""" - - def __init__(self, app): - self.app = app - self.app.transport = self.transport # FIXME: this is weird - self.appinit() - - self.event_manager = EventManager(self) - self.doc = AllYourInterfaceDocuments(app.interface) - - def appinit(self): - self.app.reinitialize(self) - - def generate_contexts(self, ctx, in_string_charset=None): - """Calls create_in_document and decompose_incoming_envelope to get - method_request string in order to generate contexts. - """ - try: - # sets ctx.in_document - self.app.in_protocol.create_in_document(ctx, in_string_charset) - - # sets ctx.in_body_doc, ctx.in_header_doc and - # ctx.method_request_string - self.app.in_protocol.decompose_incoming_envelope(ctx, - ProtocolBase.REQUEST) - - # returns a list of contexts. multiple contexts can be returned - # when the requested method also has bound auxiliary methods. - retval = self.app.in_protocol.generate_method_contexts(ctx) - - except Fault as e: - ctx.in_object = None - ctx.in_error = e - ctx.out_error = e - - retval = (ctx,) - - ctx.fire_event('method_exception_object') - - return retval - - def get_in_object(self, ctx): - """Uses the ``ctx.in_string`` to set ``ctx.in_body_doc``, which in turn - is used to set ``ctx.in_object``.""" - - try: - # sets ctx.in_object and ctx.in_header - self.app.in_protocol.deserialize(ctx, - message=self.app.in_protocol.REQUEST) - - except Fault as e: - logger.exception(e) - logger.debug("Failed document is: %s", ctx.in_document) - - ctx.in_object = None - ctx.in_error = e - ctx.out_error = e - - ctx.fire_event('method_exception_object') - - def get_out_object(self, ctx): - """Calls the matched user function by passing it the ``ctx.in_object`` - to set ``ctx.out_object``.""" - - if ctx.in_error is None: - # event firing is done in the spyne.application.Application - self.app.process_request(ctx) - else: - raise ctx.in_error - - def convert_pull_to_push(self, ctx, gen): - oobj, = ctx.out_object - if oobj is None: - gen.throw(Break()) - - elif isinstance(oobj, PushBase): - pass - - elif len(ctx.pusher_stack) > 0: - oobj = ctx.pusher_stack[-1] - assert isinstance(oobj, PushBase) - - else: - raise ValueError("%r is not a PushBase instance" % oobj) - - retval = self.init_interim_push(oobj, ctx, gen) - return self.pusher_try_close(ctx, oobj, retval) - - def get_out_string_pull(self, ctx): - """Uses the ``ctx.out_object`` to set ``ctx.out_document`` and later - ``ctx.out_string``.""" - - # This means the user wanted to override the way Spyne generates the - # outgoing byte stream. So we leave it alone. - if ctx.out_string is not None: - return - - if ctx.out_document is None: - ret = ctx.out_protocol.serialize(ctx, message=ProtocolBase.RESPONSE) - - if isgenerator(ret) and ctx.out_object is not None and \ - len(ctx.out_object) == 1: - if len(ctx.pusher_stack) > 0: - # we suspend request processing here because there now - # seems to be a PushBase waiting for input. - return self.convert_pull_to_push(ctx, ret) - - self.finalize_context(ctx) - - def finalize_context(self, ctx): - if ctx.out_error is None: - ctx.fire_event('method_return_document') - else: - ctx.fire_event('method_exception_document') - - ctx.out_protocol.create_out_string(ctx) - - if ctx.out_error is None: - ctx.fire_event('method_return_string') - else: - ctx.fire_event('method_exception_string') - - if ctx.out_string is None: - ctx.out_string = (b'',) - - # for backwards compatibility - get_out_string = get_out_string_pull - - @coroutine - def get_out_string_push(self, ctx): - """Uses the ``ctx.out_object`` to directly set ``ctx.out_string``.""" - - ret = ctx.out_protocol.serialize(ctx, message=ProtocolBase.RESPONSE) - if isgenerator(ret): - try: - while True: - y = (yield) - ret.send(y) - - except Break: - try: - ret.throw(Break()) - except StopIteration: - pass - - self.finalize_context(ctx) - - def serve_forever(self): - """Implement your event loop here, if needed.""" - - raise NotImplementedError() - - def init_interim_push(self, ret, p_ctx, gen): - assert isinstance(ret, PushBase) - assert p_ctx.out_stream is not None - - # we don't add interim pushers to the stack because we don't know - # where to find them in the out_object's hierarchy. whatever finds - # one in the serialization pipeline has to push it to pusher_stack so - # the machinery in ServerBase can initialize them using this function. - - # fire events - p_ctx.fire_event('method_return_push') - - def _cb_push_finish(): - process_contexts(self, (), p_ctx) - - return self.pusher_init(p_ctx, gen, _cb_push_finish, ret, interim=True) - - def pusher_init(self, p_ctx, gen, _cb_push_finish, pusher, interim): - return pusher.init(p_ctx, gen, _cb_push_finish, None, interim) - - def pusher_try_close(self, ctx, pusher, _): - logger.debug("Closing pusher with ret=%r", pusher) - - pusher.close() - - popped = ctx.pusher_stack.pop() - assert popped is pusher - - def init_root_push(self, ret, p_ctx, others): - assert isinstance(ret, PushBase) - - if ret in p_ctx.pusher_stack: - logger.warning('PushBase reinit avoided.') - return - - p_ctx.pusher_stack.append(ret) - - # fire events - p_ctx.fire_event('method_return_push') - - # start push serialization - gen = self.get_out_string_push(p_ctx) - - assert isgenerator(gen), "It looks like this protocol is not " \ - "async-compliant yet." - - def _cb_push_finish(): - process_contexts(self, others, p_ctx) - - retval = self.pusher_init(p_ctx, gen, _cb_push_finish, ret, - interim=False) - - self.pusher_try_close(p_ctx, ret, retval) - - return retval - - @staticmethod - def set_out_document_push(ctx): - ctx.out_document = _write() - ctx.out_document.send(None) - - -def _write(): - v = yield - yield v diff --git a/libs_crutch/contrib/spyne/server/django.py b/libs_crutch/contrib/spyne/server/django.py deleted file mode 100644 index a611057..0000000 --- a/libs_crutch/contrib/spyne/server/django.py +++ /dev/null @@ -1,390 +0,0 @@ -# encoding: utf-8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.server.django`` module contains a Django-compatible Http -transport. It's a thin wrapper around -:class:`spyne.server.wsgi.WsgiApplication`. -""" - -from __future__ import absolute_import - -import logging -logger = logging.getLogger(__name__) - -from functools import update_wrapper - -from spyne import Address -from spyne.application import get_fault_string_from_exception, Application -from spyne.auxproc import process_contexts -from spyne.interface import AllYourInterfaceDocuments -from spyne.model.fault import Fault -from spyne.protocol.soap import Soap11 -from spyne.protocol.http import HttpRpc -from spyne.server.http import HttpBase, HttpMethodContext, HttpTransportContext -from spyne.server.wsgi import WsgiApplication -from spyne.util import _bytes_join -from spyne.util.address import address_parser - -from django.http import HttpResponse, HttpResponseNotAllowed, Http404 -from django.views.decorators.csrf import csrf_exempt - -try: - from django.http import StreamingHttpResponse -except ImportError as _import_error: - _local_import_error = _import_error - def StreamingHttpResponse(*args, **kwargs): - raise _local_import_error - - -class DjangoApplication(WsgiApplication): - """You should use this for regular RPC.""" - - HttpResponseObject = HttpResponse - - # noinspection PyMethodOverriding - # because this is VERY similar to a Wsgi app - # but not that much. - def __call__(self, request): - retval = self.HttpResponseObject() - - def start_response(status, headers): - # Status is one of spyne.const.http - status, reason = status.split(' ', 1) - - retval.status_code = int(status) - for header, value in headers: - retval[header] = value - - environ = request.META.copy() - - # FIXME: No idea what these two did. - # They were commented out to fix compatibility issues with - # Django-1.2.x - # See http://github.com/arskom/spyne/issues/222. - - # If you don't override wsgi.input django and spyne will read - # the same buffer twice. If django read whole buffer spyne - # would hang waiting for extra request data. Use DjangoServer instead - # of monkeypatching wsgi.inpu. - - #environ['wsgi.input'] = request - #environ['wsgi.multithread'] = False - - response = WsgiApplication.__call__(self, environ, start_response) - self.set_response(retval, response) - - return retval - - def set_response(self, retval, response): - retval.content = _bytes_join(response, b"") - - -class StreamingDjangoApplication(DjangoApplication): - """You should use this when you're generating HUGE data as response. - - New in Django 1.5. - """ - - HttpResponseObject = StreamingHttpResponse - - def set_response(self, retval, response): - retval.streaming_content = response - - -class DjangoHttpTransportContext(HttpTransportContext): - def get_path(self): - return self.req.path - - def get_request_method(self): - return self.req.method - - def get_request_content_type(self): - return self.req.META['CONTENT_TYPE'] - - def get_path_and_qs(self): - return self.req.get_full_path() - - def get_cookie(self, key): - return self.req.COOKIES[key] - - def get_peer(self): - addr, port = address_parser.get_ip(self.req.META),\ - address_parser.get_port(self.req.META) - - if address_parser.is_valid_ipv4(addr, port): - return Address(type=Address.TCP4, host=addr, port=port) - - if address_parser.is_valid_ipv6(addr, port): - return Address(type=Address.TCP6, host=addr, port=port) - - -class DjangoHttpMethodContext(HttpMethodContext): - HttpTransportContext = DjangoHttpTransportContext - - -class DjangoServer(HttpBase): - """Server talking in Django request/response objects.""" - - def __init__(self, app, chunked=False, cache_wsdl=True): - super(DjangoServer, self).__init__(app, chunked=chunked) - self._wsdl = None - self._cache_wsdl = cache_wsdl - - def handle_rpc(self, request, *args, **kwargs): - """Handle rpc request. - - :params request: Django HttpRequest instance. - :returns: HttpResponse instance. - - """ - contexts = self.get_contexts(request) - p_ctx, others = contexts[0], contexts[1:] - - # TODO: Rate limiting - p_ctx.active = True - - if p_ctx.in_error: - return self.handle_error(p_ctx, others, p_ctx.in_error) - - self.get_in_object(p_ctx) - if p_ctx.in_error: - logger.error(p_ctx.in_error) - return self.handle_error(p_ctx, others, p_ctx.in_error) - - self.get_out_object(p_ctx) - if p_ctx.out_error: - return self.handle_error(p_ctx, others, p_ctx.out_error) - - try: - self.get_out_string(p_ctx) - - except Exception as e: - logger.exception(e) - p_ctx.out_error = Fault('Server', - get_fault_string_from_exception(e)) - return self.handle_error(p_ctx, others, p_ctx.out_error) - - have_protocol_headers = (isinstance(p_ctx.out_protocol, HttpRpc) and - p_ctx.out_header_doc is not None) - - if have_protocol_headers: - p_ctx.transport.resp_headers.update(p_ctx.out_header_doc) - - if p_ctx.descriptor and p_ctx.descriptor.mtom: - raise NotImplementedError - - if self.chunked: - response = StreamingHttpResponse(p_ctx.out_string) - else: - response = HttpResponse(b''.join(p_ctx.out_string)) - - return self.response(response, p_ctx, others) - - def handle_wsdl(self, request, *args, **kwargs): - """Return services WSDL.""" - ctx = HttpMethodContext(self, request, - 'text/xml; charset=utf-8') - - if self.doc.wsdl11 is None: - raise Http404('WSDL is not available') - - if self._wsdl is None: - # Interface document building is not thread safe so we don't use - # server interface document shared between threads. Instead we - # create and build interface documents in current thread. This - # section can be safely repeated in another concurrent thread. - doc = AllYourInterfaceDocuments(self.app.interface) - doc.wsdl11.build_interface_document(request.build_absolute_uri()) - wsdl = doc.wsdl11.get_interface_document() - - if self._cache_wsdl: - self._wsdl = wsdl - else: - wsdl = self._wsdl - - ctx.transport.wsdl = wsdl - - response = HttpResponse(ctx.transport.wsdl) - return self.response(response, ctx, ()) - - def handle_error(self, p_ctx, others, error): - """Serialize errors to an iterable of strings and return them. - - :param p_ctx: Primary (non-aux) context. - :param others: List if auxiliary contexts (can be empty). - :param error: One of ctx.{in,out}_error. - """ - - if p_ctx.transport.resp_code is None: - p_ctx.transport.resp_code = \ - p_ctx.out_protocol.fault_to_http_response_code(error) - - self.get_out_string(p_ctx) - resp = HttpResponse(b''.join(p_ctx.out_string)) - return self.response(resp, p_ctx, others, error) - - def get_contexts(self, request): - """Generate contexts for rpc request. - - :param request: Django HttpRequest instance. - :returns: generated contexts - """ - - initial_ctx = DjangoHttpMethodContext(self, request, - self.app.out_protocol.mime_type) - - initial_ctx.in_string = [request.body] - return self.generate_contexts(initial_ctx) - - def response(self, response, p_ctx, others, error=None): - """Populate response with transport headers and finalize it. - - :param response: Django HttpResponse. - :param p_ctx: Primary (non-aux) context. - :param others: List if auxiliary contexts (can be empty). - :param error: One of ctx.{in,out}_error. - :returns: Django HttpResponse - - """ - for h, v in p_ctx.transport.resp_headers.items(): - if v is not None: - response[h] = v - - if p_ctx.transport.resp_code: - response.status_code = int(p_ctx.transport.resp_code[:3]) - - try: - process_contexts(self, others, p_ctx, error=error) - except Exception as e: - # Report but ignore any exceptions from auxiliary methods. - logger.exception(e) - - p_ctx.close() - - return response - - -class DjangoView(object): - """Represent spyne service as Django class based view.""" - - application = None - server = None - services = () - tns = 'spyne.application' - name = 'Application' - in_protocol = Soap11(validator='lxml') - out_protocol = Soap11() - interface = None - chunked = False - cache_wsdl = True - - http_method_names = ['get', 'post', 'put', 'patch', 'delete', 'head', - 'options', 'trace'] - - def __init__(self, server, **kwargs): - self.server = server - - for key, value in kwargs.items(): - setattr(self, key, value) - - @classmethod - def as_view(cls, **initkwargs): - """Register application, server and create new view. - - :returns: callable view function - """ - - # sanitize keyword arguments - for key in initkwargs: - if key in cls.http_method_names: - raise TypeError("You tried to pass in the %s method name as a " - "keyword argument to %s(). Don't do that." - % (key, cls.__name__)) - if not hasattr(cls, key): - raise TypeError("%s() received an invalid keyword %r. as_view " - "only accepts arguments that are already " - "attributes of the class." % (cls.__name__, - key)) - - def get(key): - value = initkwargs.get(key) - return value if value is not None else getattr(cls, key) - - application = get('application') or Application( - services=get('services'), - tns=get('tns'), - name=get('name'), - in_protocol=get('in_protocol'), - out_protocol=get('out_protocol'), - ) - server = get('server') or DjangoServer(application, - chunked=get('chunked'), - cache_wsdl=get('cache_wsdl')) - - def view(request, *args, **kwargs): - self = cls(server=server, **initkwargs) - if hasattr(self, 'get') and not hasattr(self, 'head'): - self.head = self.get - self.request = request - self.args = args - self.kwargs = kwargs - return self.dispatch(request, *args, **kwargs) - - # take name and docstring from class - update_wrapper(view, cls, updated=()) - - # and possible attributes set by decorators - # like csrf_exempt from dispatch - update_wrapper(view, cls.dispatch, assigned=()) - return view - - @csrf_exempt - def dispatch(self, request, *args, **kwargs): - # Try to dispatch to the right method; if a method doesn't exist, - # defer to the error handler. Also defer to the error handler if the - # request method isn't on the approved list. - if request.method.lower() in self.http_method_names: - handler = getattr(self, request.method.lower(), - self.http_method_not_allowed) - else: - handler = self.http_method_not_allowed - return handler(request, *args, **kwargs) - - def get(self, request, *args, **kwargs): - return self.server.handle_wsdl(request, *args, **kwargs) - - def post(self, request, *args, **kwargs): - return self.server.handle_rpc(request, *args, **kwargs) - - def http_method_not_allowed(self, request, *args, **kwargs): - logger.warning('Method Not Allowed (%s): %s', request.method, - request.path, extra={'status_code': 405, 'request': - self.request}) - return HttpResponseNotAllowed(self._allowed_methods()) - - def options(self, request, *args, **kwargs): - """Handle responding to requests for the OPTIONS HTTP verb.""" - - response = HttpResponse() - response['Allow'] = ', '.join(self._allowed_methods()) - response['Content-Length'] = '0' - return response - - def _allowed_methods(self): - return [m.upper() for m in self.http_method_names if hasattr(self, m)] diff --git a/libs_crutch/contrib/spyne/server/http.py b/libs_crutch/contrib/spyne/server/http.py deleted file mode 100644 index 6c621b2..0000000 --- a/libs_crutch/contrib/spyne/server/http.py +++ /dev/null @@ -1,326 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from collections import defaultdict - -from email import utils -from email.utils import encode_rfc2231 -from email.message import tspecials - -from spyne import TransportContext, MethodDescriptor, MethodContext, Redirect -from spyne.server import ServerBase -from spyne.protocol.http import HttpPattern -from spyne.const.http import gen_body_redirect, HTTP_301, HTTP_302, HTTP_303, \ - HTTP_307 - - -class HttpRedirect(Redirect): - def __init__(self, ctx, location, orig_exc=None, code=HTTP_302): - super(HttpRedirect, self) \ - .__init__(ctx, location, orig_exc=orig_exc) - - self.ctx = ctx - self.location = location - self.orig_exc = orig_exc - self.code = code - - def do_redirect(self): - if not isinstance(self.ctx.transport, HttpTransportContext): - if self.orig_exc is not None: - raise self.orig_exc - raise TypeError(self.ctx.transport) - - self.ctx.transport.respond(self.code, location=self.location) - -# -# Plagiarized HttpTransport.add_header() and _formatparam() function from -# Python 2.7 stdlib. -# -# Copyright (C) 2001-2007 Python Software Foundation -# Author: Barry Warsaw -# Contact: email-sig@python.org -# -def _formatparam(param, value=None, quote=True): - """Convenience function to format and return a key=value pair. - - This will quote the value if needed or if quote is true. If value is a - three tuple (charset, language, value), it will be encoded according - to RFC2231 rules. If it contains non-ascii characters it will likewise - be encoded according to RFC2231 rules, using the utf-8 charset and - a null language. - """ - if value is None or len(value) == 0: - return param - - # A tuple is used for RFC 2231 encoded parameter values where items - # are (charset, language, value). charset is a string, not a Charset - # instance. RFC 2231 encoded values are never quoted, per RFC. - if isinstance(value, tuple): - # Encode as per RFC 2231 - param += '*' - value = encode_rfc2231(value[2], value[0], value[1]) - return '%s=%s' % (param, value) - - try: - value.encode('ascii') - - except UnicodeEncodeError: - param += '*' - value = encode_rfc2231(value, 'utf-8', '') - return '%s=%s' % (param, value) - - # BAW: Please check this. I think that if quote is set it should - # force quoting even if not necessary. - if quote or tspecials.search(value): - return '%s="%s"' % (param, utils.quote(value)) - - return '%s=%s' % (param, value) - - -class HttpTransportContext(TransportContext): - """The abstract base class that is used in the transport attribute of the - :class:`HttpMethodContext` class and its subclasses.""" - - def __init__(self, parent, transport, request, content_type): - super(HttpTransportContext, self).__init__(parent, transport, 'http') - - self.req = request - """HTTP Request. This is transport-specific""" - - self.resp_headers = {} - """HTTP Response headers.""" - - self.mime_type = content_type - - self.resp_code = None - """HTTP Response code.""" - - self.wsdl = None - """The WSDL document that is being returned. Only relevant when handling - WSDL requests.""" - - self.wsdl_error = None - """The error when handling WSDL requests.""" - - def get_mime_type(self): - return self.resp_headers.get('Content-Type', None) - - def set_mime_type(self, what): - self.resp_headers['Content-Type'] = what - - def respond(self, resp_code, **kwargs): - self.resp_code = resp_code - if resp_code in (HTTP_301, HTTP_302, HTTP_303, HTTP_307): - l = kwargs.pop('location') - self.resp_headers['Location'] = l - self.parent.out_string = [gen_body_redirect(resp_code, l)] - self.mime_type = 'text/html' - - else: - # So that deserialization is skipped. - self.parent.out_string = [] - - def get_path(self): - raise NotImplementedError() - - def get_request_method(self): - raise NotImplementedError() - - def get_request_content_type(self): - raise NotImplementedError() - - def get_path_and_qs(self): - raise NotImplementedError() - - def get_cookie(self, key): - raise NotImplementedError() - - def get_peer(self): - raise NotImplementedError() - - @staticmethod - def gen_header(_value, **kwargs): - parts = [] - - for k, v in kwargs.items(): - if v is None: - parts.append(k.replace('_', '-')) - - else: - parts.append(_formatparam(k.replace('_', '-'), v)) - - if _value is not None: - parts.insert(0, _value) - - return '; '.join(parts) - - def add_header(self, _name, _value, **kwargs): - """Extended header setting. - - name is the header field to add. keyword arguments can be used to set - additional parameters for the header field, with underscores converted - to dashes. Normally the parameter will be added as key="value" unless - value is None, in which case only the key will be added. If a - parameter value contains non-ASCII characters it can be specified as a - three-tuple of (charset, language, value), in which case it will be - encoded according to RFC2231 rules. Otherwise it will be encoded using - the utf-8 charset and a language of ''. - - Examples: - - msg.add_header('content-disposition', 'attachment', filename='bud.gif') - msg.add_header('content-disposition', 'attachment', - filename=('utf-8', '', Fußballer.ppt')) - msg.add_header('content-disposition', 'attachment', - filename='Fußballer.ppt')) - """ - - self.resp_headers[_name] = self.gen_header(_value, **kwargs) - - mime_type = property( - lambda self: self.get_mime_type(), - lambda self, what: self.set_mime_type(what), - ) - """Provides an easy way to set outgoing mime type. Synonym for - `content_type`""" - - content_type = mime_type - """Provides an easy way to set outgoing mime type. Synonym for - `mime_type`""" - - -class HttpMethodContext(MethodContext): - """The Http-Specific method context. Http-Specific information is stored in - the transport attribute using the :class:`HttpTransportContext` class. - """ - - # because ctor signatures differ between TransportContext and - # HttpTransportContext, we needed a new variable - TransportContext = None - HttpTransportContext = HttpTransportContext - - def __init__(self, transport, req_env, content_type): - super(HttpMethodContext, self).__init__(transport, MethodContext.SERVER) - - self.transport = self.HttpTransportContext(self, transport, - req_env, content_type) - """Holds the HTTP-specific information""" - - def set_out_protocol(self, what): - self._out_protocol = what - if self._out_protocol.app is None: - self._out_protocol.set_app(self.app) - if isinstance(self.transport, HttpTransportContext): - self.transport.set_mime_type(what.mime_type) - - out_protocol = property(MethodContext.get_out_protocol, set_out_protocol) - """Assigning an out protocol overrides the mime type of the transport.""" - - -class HttpBase(ServerBase): - transport = 'http://schemas.xmlsoap.org/soap/http' - - def __init__(self, app, chunked=False, - max_content_length=2 * 1024 * 1024, - block_length=8 * 1024): - super(HttpBase, self).__init__(app) - - self.chunked = chunked - self.max_content_length = max_content_length - self.block_length = block_length - - self._http_patterns = set() - - for k, v in self.app.interface.service_method_map.items(): - # p_ stands for primary - p_method_descriptor = v[0] - for patt in p_method_descriptor.patterns: - if isinstance(patt, HttpPattern): - self._http_patterns.add(patt) - - # this makes sure similar addresses with patterns are evaluated after - # addresses with wildcards, which puts the more specific addresses to - # the front. - self._http_patterns = list(reversed(sorted(self._http_patterns, - key=lambda x: (x.address, x.host) ))) - - def match_pattern(self, ctx, method='', path='', host=''): - """Sets ctx.method_request_string if there's a match. It's O(n) which - means you should keep your number of patterns as low as possible. - - :param ctx: A MethodContext instance - :param method: The verb in the HTTP Request (GET, POST, etc.) - :param host: The contents of the ``Host:`` header - :param path: Path but not the arguments. (i.e. stuff before '?', if it's - there) - """ - - if not path.startswith('/'): - path = '/{}'.format(path) - - params = defaultdict(list) - for patt in self._http_patterns: - assert isinstance(patt, HttpPattern) - - if patt.verb is not None: - match = patt.verb_re.match(method) - if match is None: - continue - if not (match.span() == (0, len(method))): - continue - - for k,v in match.groupdict().items(): - params[k].append(v) - - if patt.host is not None: - match = patt.host_re.match(host) - if match is None: - continue - if not (match.span() == (0, len(host))): - continue - - for k, v in match.groupdict().items(): - params[k].append(v) - - if patt.address is None: - if path.split('/')[-1] != patt.endpoint.name: - continue - - else: - match = patt.address_re.match(path) - if match is None: - continue - - if not (match.span() == (0, len(path))): - continue - - for k,v in match.groupdict().items(): - params[k].append(v) - - d = patt.endpoint - assert isinstance(d, MethodDescriptor) - ctx.method_request_string = d.name - - break - - return params - - @property - def has_patterns(self): - return len(self._http_patterns) > 0 diff --git a/libs_crutch/contrib/spyne/server/msgpack.py b/libs_crutch/contrib/spyne/server/msgpack.py deleted file mode 100644 index 1c0fd4d..0000000 --- a/libs_crutch/contrib/spyne/server/msgpack.py +++ /dev/null @@ -1,217 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import absolute_import - -import logging -logger = logging.getLogger(__name__) - -import msgpack - -from mmap import mmap -from collections import OrderedDict - -from spyne import MethodContext, TransportContext, Address -from spyne.auxproc import process_contexts -from spyne.error import ValidationError, InternalError -from spyne.server import ServerBase -from spyne.util.six import binary_type - -try: - from twisted.internet.defer import Deferred -except ImportError as e: - def Deferred(*_, **__): raise e - - -MSGPACK_SHELL_OVERHEAD = 10 - - -def _process_v1_msg(prot, msg): - header = None - body = msg[1] - if not isinstance(body, (binary_type, mmap, memoryview)): - raise ValidationError(body, "Body must be a bytestream.") - - if len(msg) > 2: - header = msg[2] - if not isinstance(header, dict): - raise ValidationError(header, "Header must be a dict.") - for k, v in header.items(): - header[k] = msgpack.unpackb(v) - - ctx = MessagePackMethodContext(prot, MessagePackMethodContext.SERVER) - ctx.in_string = [body] - ctx.transport.in_header = header - - return ctx - - -class MessagePackTransportContext(TransportContext): - def __init__(self, parent, transport): - super(MessagePackTransportContext, self).__init__(parent, transport) - - self.in_header = None - self.protocol = None - self.inreq_queue = OrderedDict() - self.request_len = None - - def get_peer(self): - if self.protocol is not None: - peer = self.protocol.transport.getPeer() - return Address.from_twisted_address(peer) - - -class MessagePackOobMethodContext(object): - __slots__ = 'd' - - def __init__(self): - if Deferred is not None: - self.d = Deferred() - else: - self.d = None - - def close(self): - if self.d is not None and not self.d.called: - self.d.cancel() - - -class MessagePackMethodContext(MethodContext): - TransportContext = MessagePackTransportContext - - def __init__(self, transport, way): - self.oob_ctx = None - - super(MessagePackMethodContext, self).__init__(transport, way) - - def close(self): - super(MessagePackMethodContext, self).close() - if self.transport is not None: - self.transport.protocol = None - self.transport = None - - if self.oob_ctx is not None: - self.oob_ctx.close() - - -class MessagePackTransportBase(ServerBase): - # These are all placeholders that need to be overridden in subclasses - OUT_RESPONSE_NO_ERROR = None - OUT_RESPONSE_CLIENT_ERROR = None - OUT_RESPONSE_SERVER_ERROR = None - - IN_REQUEST = None - - def __init__(self, app): - super(MessagePackTransportBase, self).__init__(app) - - self._version_map = { - self.IN_REQUEST: _process_v1_msg - } - - def produce_contexts(self, msg): - """Produce contexts based on incoming message. - - :param msg: Parsed request in this format: `[IN_REQUEST, body, header]` - """ - - if not isinstance(msg, (list, tuple)): - logger.debug("Incoming request: %r", msg) - raise ValidationError(msg, "Request must be a list") - - if not len(msg) >= 2: - logger.debug("Incoming request: %r", msg) - raise ValidationError(len(msg), "Request must have at least two " - "elements. It has %r") - - if not isinstance(msg[0], int): - logger.debug("Incoming request: %r", msg) - raise ValidationError(msg[0], "Request version must be an integer. " - "It was %r") - - processor = self._version_map.get(msg[0], None) - if processor is None: - logger.debug("Invalid incoming request: %r", msg) - raise ValidationError(msg[0], "Unknown request type %r") - - msglen = len(msg[1]) - # shellen = len(msgpack.packb(msg)) - # logger.debug("Shell size: %d, message size: %d, diff: %d", - # shellen, msglen, shellen - msglen) - # some approx. msgpack overhead based on observations of what's above. - msglen += MSGPACK_SHELL_OVERHEAD - - initial_ctx = processor(self, msg) - contexts = self.generate_contexts(initial_ctx) - - p_ctx, others = contexts[0], contexts[1:] - p_ctx.transport.request_len = msglen - - return p_ctx, others - - def process_contexts(self, contexts): - p_ctx, others = contexts[0], contexts[1:] - - if p_ctx.in_error: - return self.handle_error(p_ctx, others, p_ctx.in_error) - - self.get_in_object(p_ctx) - if p_ctx.in_error: - logger.error(p_ctx.in_error) - return self.handle_error(p_ctx, others, p_ctx.in_error) - - self.get_out_object(p_ctx) - if p_ctx.out_error: - return self.handle_error(p_ctx, others, p_ctx.out_error) - - try: - self.get_out_string(p_ctx) - - except Exception as e: - logger.exception(e) - contexts.out_error = InternalError("Serialization Error.") - return self.handle_error(contexts, others, contexts.out_error) - - def handle_error(self, p_ctx, others, error): - self.get_out_string(p_ctx) - - try: - process_contexts(self, others, p_ctx, error=error) - except Exception as e: - # Report but ignore any exceptions from auxiliary methods. - logger.exception(e) - - def handle_transport_error(self, error): - return msgpack.dumps(str(error)) - - def pack(self, ctx): - ctx.out_string = msgpack.packb([self.OUT_RESPONSE_NO_ERROR, - b''.join(ctx.out_string)]), - - -class MessagePackServerBase(MessagePackTransportBase): - """Contains the transport protocol logic but not the transport itself. - - Subclasses should implement logic to move bitstreams in and out of this - class.""" - - OUT_RESPONSE_NO_ERROR = 0 - OUT_RESPONSE_CLIENT_ERROR = 1 - OUT_RESPONSE_SERVER_ERROR = 2 - - IN_REQUEST = 1 diff --git a/libs_crutch/contrib/spyne/server/null.py b/libs_crutch/contrib/spyne/server/null.py deleted file mode 100644 index 2279bd3..0000000 --- a/libs_crutch/contrib/spyne/server/null.py +++ /dev/null @@ -1,228 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.server.null`` module contains the NullServer class and its helper -objects. - -The name comes from the "null modem connection". Look it up. -""" - -from __future__ import absolute_import - -import logging -logger = logging.getLogger(__name__) - -from spyne import MethodContext, BODY_STYLE_BARE, ComplexModelBase, \ - BODY_STYLE_EMPTY, BODY_STYLE_OUT_BARE, BODY_STYLE_EMPTY_OUT_BARE - -from spyne.client import Factory -from spyne.const.ansi_color import LIGHT_RED -from spyne.const.ansi_color import LIGHT_BLUE -from spyne.const.ansi_color import END_COLOR -from spyne.server import ServerBase - - -_big_header = ('=' * 40) + LIGHT_RED -_big_footer = END_COLOR + ('=' * 40) -_small_header = ('-' * 20) + LIGHT_BLUE -_small_footer = END_COLOR + ('-' * 20) - - -class NullServer(ServerBase): - """A server that doesn't support any transport at all -- it's implemented - to test services without having to run a server. - - Note that: - 1) ``**kwargs`` overwrite ``*args``. - 2) You can do: :: - - logging.getLogger('spyne.server.null').setLevel(logging.CRITICAL) - - to hide context delimiters in logs. - """ - - transport = 'noconn://null.spyne' - MethodContext = MethodContext - - def __init__(self, app, ostr=False, locale='C', appinit=True): - self.do_appinit = appinit - - super(NullServer, self).__init__(app) - - self.service = _FunctionProxy(self, self.app, is_async=False) - self.is_async = _FunctionProxy(self, self.app, is_async=True) - self.factory = Factory(self.app) - self.ostr = ostr - self.locale = locale - self.url = "http://spyne.io/null" - - def appinit(self): - if self.do_appinit: - super(NullServer, self).appinit() - - def get_wsdl(self): - return self.app.get_interface_document(self.url) - - def set_options(self, **kwargs): - self.service.in_header = kwargs.get('soapheaders', - self.service.in_header) - - def get_services(self): - return self.app.interface.service_method_map - - -class _FunctionProxy(object): - def __init__(self, server, app, is_async): - self._app = app - self._server = server - self.in_header = None - self.is_async = is_async - - def __getattr__(self, key): - return _FunctionCall(self._app, self._server, key, self.in_header, - self._server.ostr, self._server.locale, self.is_async) - - def __getitem__(self, key): - return self.__getattr__(key) - - -class _FunctionCall(object): - def __init__(self, app, server, key, in_header, ostr, locale, async_): - self.app = app - - self._key = key - self._server = server - self._in_header = in_header - self._ostr = ostr - self._locale = locale - self._async = async_ - - def __call__(self, *args, **kwargs): - initial_ctx = self._server.MethodContext(self, MethodContext.SERVER) - initial_ctx.method_request_string = self._key - initial_ctx.in_header = self._in_header - initial_ctx.transport.type = NullServer.transport - initial_ctx.locale = self._locale - - contexts = self.app.in_protocol.generate_method_contexts(initial_ctx) - - retval = None - logger.warning("%s start request %s" % (_big_header, _big_footer)) - - if self._async: - from twisted.internet.defer import Deferred - - for cnt, ctx in enumerate(contexts): - # this reconstruction is quite costly. I wonder whether it's a - # problem though. - - _type_info = ctx.descriptor.in_message._type_info - ctx.in_object = [None] * len(_type_info) - for i in range(len(args)): - ctx.in_object[i] = args[i] - - for i, k in enumerate(_type_info.keys()): - val = kwargs.get(k, None) - if val is not None: - ctx.in_object[i] = val - - if ctx.descriptor.body_style == BODY_STYLE_BARE: - ctx.in_object = ctx.descriptor.in_message \ - .get_serialization_instance(ctx.in_object) - - if cnt == 0: - p_ctx = ctx - else: - ctx.descriptor.aux.initialize_context(ctx, p_ctx, error=None) - - # do logging.getLogger('spyne.server.null').setLevel(logging.CRITICAL) - # to hide the following - logger.warning("%s start context %s" % (_small_header, - _small_footer)) - logger.info("%r.%r" % (ctx.service_class, - ctx.descriptor.function)) - try: - self.app.process_request(ctx) - finally: - logger.warning("%s end context %s" % (_small_header, - _small_footer)) - - if cnt == 0: - if self._async and isinstance(ctx.out_object[0], Deferred): - retval = ctx.out_object[0] - retval.addCallback(_cb_async, ctx, cnt, self) - - else: - retval = _cb_sync(ctx, cnt, self) - - if not self._async: - p_ctx.close() - - logger.warning("%s end request %s" % (_big_header, _big_footer)) - - return retval - - -def _cb_async(ret, ctx, cnt, fc): - if issubclass(ctx.descriptor.out_message, ComplexModelBase): - if len(ctx.descriptor.out_message._type_info) == 0: - ctx.out_object = [None] - - elif len(ctx.descriptor.out_message._type_info) == 1: - ctx.out_object = [ret] - - else: - ctx.out_object = ret - - else: - ctx.out_object = [ret] - - return _cb_sync(ctx, cnt, fc) - - -def _cb_sync(ctx, cnt, fc): - retval = None - - if ctx.out_error: - raise ctx.out_error - - else: - if ctx.descriptor.is_out_bare(): - retval = ctx.out_object[0] - - elif ctx.descriptor.body_style is BODY_STYLE_EMPTY: - retval = None - - elif len(ctx.descriptor.out_message._type_info) == 0: - retval = None - - elif len(ctx.descriptor.out_message._type_info) == 1: - retval = ctx.out_object[0] - - else: - retval = ctx.out_object - - if cnt == 0 and fc._ostr: - fc._server.get_out_string(ctx) - retval = ctx.out_string - - if cnt > 0: - ctx.close() - - return retval diff --git a/libs_crutch/contrib/spyne/server/pyramid.py b/libs_crutch/contrib/spyne/server/pyramid.py deleted file mode 100644 index 7319da2..0000000 --- a/libs_crutch/contrib/spyne/server/pyramid.py +++ /dev/null @@ -1,58 +0,0 @@ -# encoding: utf-8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.server.pyramid`` module contains a Pyramid-compatible Http -transport. It's a thin wrapper around -:class:`spyne.server.wsgi.WsgiApplication`. -""" - -from __future__ import absolute_import - -from pyramid.response import Response -from spyne.server.wsgi import WsgiApplication - - -class PyramidApplication(WsgiApplication): - """Pyramid View Wrapper. Use this for regular RPC""" - - def __call__(self, request): - retval = Response() - - def start_response(status, headers): - status, reason = status.split(' ', 1) - - retval.status_int = int(status) - for header, value in headers: - retval.headers[header] = value - - response = WsgiApplication.__call__(self, request.environ, - start_response) - retval.body = b"".join(response) - - return retval - - def set_response(self, retval, response): - retval.body = b"".join(response) - - -class StreamingPyramidApplication(WsgiApplication): - """You should use this when you're generating HUGE data as response.""" - - def set_response(self, retval, response): - retval.app_iter = response diff --git a/libs_crutch/contrib/spyne/server/twisted/__init__.py b/libs_crutch/contrib/spyne/server/twisted/__init__.py deleted file mode 100644 index c22dcaa..0000000 --- a/libs_crutch/contrib/spyne/server/twisted/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -def log_and_let_go(err, logger): - logger.error(err.getTraceback()) - return err - - -from spyne.server.twisted.http import TwistedWebResource -from spyne.server.twisted.websocket import TwistedWebSocketResource diff --git a/libs_crutch/contrib/spyne/server/twisted/_base.py b/libs_crutch/contrib/spyne/server/twisted/_base.py deleted file mode 100644 index 98a386c..0000000 --- a/libs_crutch/contrib/spyne/server/twisted/_base.py +++ /dev/null @@ -1,79 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from twisted.internet.defer import Deferred -from twisted.internet.interfaces import IPullProducer -from twisted.web.iweb import UNKNOWN_LENGTH - -from zope.interface import implementer - - -@implementer(IPullProducer) -class Producer(object): - deferred = None - - def __init__(self, body, consumer): - """:param body: an iterable of strings""" - - # check to see if we can determine the length - try: - len(body) # iterator? - self.length = sum([len(fragment) for fragment in body]) - self.body = iter(body) - - except TypeError: - self.length = UNKNOWN_LENGTH - self.body = body - - self.deferred = Deferred() - - self.consumer = consumer - - def resumeProducing(self): - try: - chunk = next(self.body) - - except StopIteration as e: - self.consumer.unregisterProducer() - if self.deferred is not None: - self.deferred.callback(self.consumer) - self.deferred = None - return - - self.consumer.write(chunk) - - def pauseProducing(self): - pass - - def stopProducing(self): - if self.deferred is not None: - self.deferred.errback( - Exception("Consumer asked us to stop producing")) - self.deferred = None - - -from spyne import Address -_TYPE_MAP = {'TCP': Address.TCP4, 'TCP6': Address.TCP6, - 'UDP': Address.UDP4, 'UDP6': Address.UDP6} - -def _address_from_twisted_address(peer): - return Address( - type=_TYPE_MAP.get(peer.type, None), host=peer.host, port=peer.port) - -Address.from_twisted_address = staticmethod(_address_from_twisted_address) diff --git a/libs_crutch/contrib/spyne/server/twisted/http.py b/libs_crutch/contrib/spyne/server/twisted/http.py deleted file mode 100644 index 0c76176..0000000 --- a/libs_crutch/contrib/spyne/server/twisted/http.py +++ /dev/null @@ -1,790 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.server.twisted`` module contains a server transport compatible -with the Twisted event loop. It uses the TwistedWebResource object as transport. - -Also see the twisted examples in the examples directory of the source -distribution. - -If you want to have a hard-coded URL in the wsdl document, this is how to do -it: :: - - resource = TwistedWebResource(...) - resource.http_transport.doc.wsdl11.build_interface_document("http://example.com") - -This is not strictly necessary. If you don't do this, Spyne will get the URL -from the first request, build the wsdl on-the-fly and cache it as a string in -memory for later requests. However, if you want to make sure you only have this -url on the WSDL, this is how to do it. Note that if your client takes the -information in wsdl seriously, all requests will go to the designated url above -which can make testing a bit difficult. Use in moderation. -""" - -from __future__ import absolute_import - -import logging -logger = logging.getLogger(__name__) - -import re -import cgi -import gzip -import shutil -import threading - -from os import fstat -from mmap import mmap -from inspect import isclass -from collections import namedtuple -from tempfile import TemporaryFile - -from twisted.web import static -from twisted.web.server import NOT_DONE_YET, Request -from twisted.web.resource import Resource, NoResource, ForbiddenResource -from twisted.web.static import getTypeAndEncoding -from twisted.python.log import err -from twisted.python.failure import Failure -from twisted.internet import reactor -from twisted.internet.task import deferLater -from twisted.internet.defer import Deferred -from twisted.internet.threads import deferToThread - -from spyne import Redirect, Address -from spyne.application import logger_server -from spyne.application import get_fault_string_from_exception - -from spyne.util import six -from spyne.error import InternalError -from spyne.auxproc import process_contexts -from spyne.const.ansi_color import LIGHT_GREEN -from spyne.const.ansi_color import END_COLOR -from spyne.const.http import HTTP_404, HTTP_200 - -from spyne.model import PushBase, File, ComplexModelBase -from spyne.model.fault import Fault - -from spyne.protocol.http import HttpRpc - -from spyne.server.http import HttpBase -from spyne.server.http import HttpMethodContext -from spyne.server.http import HttpTransportContext -from spyne.server.twisted._base import Producer -from spyne.server.twisted import log_and_let_go - -from spyne.util.address import address_parser -from spyne.util.six import text_type, string_types -from spyne.util.six.moves.urllib.parse import unquote - -if not six.PY2: - from urllib.request import unquote_to_bytes - - -def _render_file(file, request): - """ - Begin sending the contents of this L{File} (or a subset of the - contents, based on the 'range' header) to the given request. - """ - file.restat(False) - - if file.type is None: - file.type, file.encoding = getTypeAndEncoding(file.basename(), - file.contentTypes, - file.contentEncodings, - file.defaultType) - - if not file.exists(): - return file.childNotFound.render(request) - - if file.isdir(): - return file.redirect(request) - - request.setHeader('accept-ranges', 'bytes') - - try: - fileForReading = file.openForReading() - except IOError as e: - import errno - - if e[0] == errno.EACCES: - return ForbiddenResource().render(request) - else: - raise - - #if request.setLastModified(file.getmtime()) is CACHED: - # return '' - - producer = file.makeProducer(request, fileForReading) - - if request.method == 'HEAD': - return '' - - producer.start() - # and make sure the connection doesn't get closed - return NOT_DONE_YET - - -def _set_response_headers(request, headers): - retval = [] - - for k, v in headers.items(): - if isinstance(v, (list, tuple)): - request.responseHeaders.setRawHeaders(k, v) - else: - request.responseHeaders.setRawHeaders(k, [v]) - - return retval - - -def _reconstruct_url(request): - # HTTP "Hosts" header only supports ascii - - server_name = request.getHeader(b"x-forwarded-host") - server_port = request.getHeader(b"x-forwarded-port") - if server_port is not None: - try: - server_port = int(server_port) - except Exception as e: - logger.debug("Ignoring exception: %r for value %r", e, server_port) - server_port = None - - is_secure = request.getHeader(b"x-forwarded-proto") - if is_secure is not None: - is_secure = is_secure == 'https' - - if server_name is None: - server_name = request.getRequestHostname().decode('ascii') - if server_port is None: - server_port = request.getHost().port - if is_secure is None: - is_secure = bool(request.isSecure()) - - if (is_secure, server_port) not in ((True, 443), (False, 80)): - server_name = '%s:%d' % (server_name, server_port) - - if is_secure: - url_scheme = 'https' - else: - url_scheme = 'http' - - uri = _decode_path(request.uri) - return ''.join([url_scheme, "://", server_name, uri]) - - -class _Transformer(object): - def __init__(self, req): - self.req = req - - def get(self, key, default): - key = key.lower() - if key.startswith((b'http_', b'http-')): - key = key[5:] - - retval = self.req.getHeader(key) - if retval is None: - retval = default - return retval - - -class TwistedHttpTransportContext(HttpTransportContext): - def set_mime_type(self, what): - if isinstance(what, text_type): - what = what.encode('ascii', errors='replace') - super(TwistedHttpTransportContext, self).set_mime_type(what) - self.req.setHeader('Content-Type', what) - - def get_cookie(self, key): - return self.req.getCookie(key) - - def get_path(self): - return self.req.URLPath().path - - def get_path_and_qs(self): - return self.req.uri - - def get_request_method(self): - return self.req.method - - def get_request_content_type(self): - return self.req.getHeader("Content-Type") - - def get_peer(self): - peer = Address.from_twisted_address(self.req.transport.getPeer()) - addr = address_parser.get_ip(_Transformer(self.req)) - - if addr is None: - return peer - - if address_parser.is_valid_ipv4(addr): - return Address(type=Address.TCP4, host=addr, port=0) - - if address_parser.is_valid_ipv6(addr): - return Address(type=Address.TCP6, host=addr, port=0) - - -class TwistedHttpMethodContext(HttpMethodContext): - HttpTransportContext = TwistedHttpTransportContext - - -def _decode_path(fragment): - if six.PY2: - return unquote(fragment).decode('utf8') - - return unquote_to_bytes(fragment).decode('utf8') - - -class TwistedHttpTransport(HttpBase): - def __init__(self, app, chunked=False, max_content_length=2 * 1024 * 1024, - block_length=8 * 1024): - super(TwistedHttpTransport, self).__init__(app, chunked=chunked, - max_content_length=max_content_length, block_length=block_length) - - self.reactor_thread = None - def _cb(): - self.reactor_thread = threading.current_thread() - - deferLater(reactor, 0, _cb) - - def pusher_init(self, p_ctx, gen, _cb_push_finish, pusher, interim): - if pusher.orig_thread != self.reactor_thread: - return deferToThread(super(TwistedHttpTransport, self).pusher_init, - p_ctx, gen, _cb_push_finish, pusher, interim) - - return super(TwistedHttpTransport, self).pusher_init( - p_ctx, gen, _cb_push_finish, pusher, interim) - - @staticmethod - def set_out_document_push(ctx): - class _ISwearImAGenerator(object): - def send(self, data): - if not data: return - ctx.out_stream.write(data) - - ctx.out_document = _ISwearImAGenerator() - - def pusher_try_close(self, ctx, pusher, retval): - # the whole point of this function is to call ctx.out_stream.finish() - # when a *root* pusher has no more data to send. interim pushers don't - # have to close anything. - if isinstance(retval, Deferred): - def _eb_push_close(f): - assert isinstance(f, Failure) - - logger.error(f.getTraceback()) - - subretval = super(TwistedHttpTransport, self) \ - .pusher_try_close(ctx, pusher, retval) - - if not pusher.interim: - ctx.out_stream.finish() - - return subretval - - def _cb_push_close(r): - def _eb_inner(f): - if not pusher.interim: - ctx.out_stream.finish() - - return f - - if not isinstance(r, Deferred): - retval = super(TwistedHttpTransport, self) \ - .pusher_try_close(ctx, pusher, r) - if not pusher.interim: - ctx.out_stream.finish() - - return retval - - return r \ - .addCallback(_cb_push_close) \ - .addErrback(_eb_inner) \ - .addErrback(log_and_let_go, logger) - - return retval \ - .addCallback(_cb_push_close) \ - .addErrback(_eb_push_close) \ - .addErrback(log_and_let_go, logger) - - super(TwistedHttpTransport, self).pusher_try_close(ctx, pusher, retval) - - if not pusher.interim: - retval = ctx.out_stream.finish() - - return retval - - def decompose_incoming_envelope(self, prot, ctx, message): - """This function is only called by the HttpRpc protocol to have the - twisted web's Request object is parsed into ``ctx.in_body_doc`` and - ``ctx.in_header_doc``. - """ - - request = ctx.in_document - assert isinstance(request, Request) - - ctx.in_header_doc = dict(request.requestHeaders.getAllRawHeaders()) - ctx.in_body_doc = request.args - - for fi in ctx.transport.file_info: - assert isinstance(fi, _FileInfo) - if fi.file_name is None: - continue - - l = ctx.in_body_doc.get(fi.field_name, None) - if l is None: - l = ctx.in_body_doc[fi.field_name] = [] - - l.append( - File.Value(name=fi.file_name, type=fi.file_type, data=fi.data) - ) - - # this is a huge hack because twisted seems to take the slashes in urls - # too seriously. - postpath = getattr(request, 'realpostpath', None) - if postpath is None: - postpath = request.path - - if postpath is not None: - postpath = _decode_path(postpath) - - params = self.match_pattern(ctx, request.method, postpath, - request.getHeader(b'Host')) - - if ctx.method_request_string is None: # no pattern match - ctx.method_request_string = u'{%s}%s' % ( - self.app.interface.get_tns(), - _decode_path(request.path.rsplit(b'/', 1)[-1])) - - logger.debug(u"%sMethod name: %r%s" % (LIGHT_GREEN, - ctx.method_request_string, END_COLOR)) - - for k, v in params.items(): - val = ctx.in_body_doc.get(k, []) - val.extend(v) - ctx.in_body_doc[k] = val - - r = {} - for k, v in ctx.in_body_doc.items(): - l = [] - for v2 in v: - if isinstance(v2, string_types): - l.append(unquote(v2)) - else: - l.append(v2) - r[k] = l - ctx.in_body_doc = r - - # This is consistent with what server.wsgi does. - if request.method in ('POST', 'PUT', 'PATCH'): - for k, v in ctx.in_body_doc.items(): - if v == ['']: - ctx.in_body_doc[k] = [None] - - -FIELD_NAME_RE = re.compile(r'name="([^"]+)"') -FILE_NAME_RE = re.compile(r'filename="([^"]+)"') -_FileInfo = namedtuple("_FileInfo", "field_name file_name file_type data") - - -def _get_file_info(ctx): - """We need this hack because twisted doesn't offer a way to get file name - from Content-Disposition header. - """ - - retval = [] - - request = ctx.transport.req - headers = request.getAllHeaders() - content_type = headers.get('content-type', None) - if content_type is None: - return retval - - content = request.content - - content_encoding = headers.get('content-encoding', None) - if content_encoding == b'gzip': - request.content.seek(0) - content = TemporaryFile() - with gzip.GzipFile(fileobj=request.content) as ifstr: - shutil.copyfileobj(ifstr, content) - content.seek(0) - - img = cgi.FieldStorage( - fp=content, - headers=ctx.in_header_doc, - environ={ - 'REQUEST_METHOD': request.method, - 'CONTENT_TYPE': content_type, - } - ) - - try: - keys = img.keys() - except TypeError: - return retval - - for k in keys: - fields = img[k] - - if isinstance(fields, cgi.FieldStorage): - fields = (fields,) - - for field in fields: - file_type = field.type - file_name = field.disposition_options.get('filename', None) - if file_name is not None: - retval.append(_FileInfo(k, file_name, file_type, - [mmap(field.file.fileno(), 0)])) - - return retval - - -def _has_fd(istr): - if not hasattr(istr, 'fileno'): - return False - try: - istr.fileno() - except IOError: - return False - else: - return True - - -def get_twisted_child_with_default(res, path, request): - # this hack is necessary because twisted takes the slash character in - # http requests too seriously. i.e. it insists that a leaf node can only - # handle the last path fragment. - if res.prepath is None: - request.realprepath = b'/' + b'/'.join(request.prepath) - else: - if not res.prepath.startswith('/'): - request.realprepath = b'/' + res.prepath - else: - request.realprepath = res.prepath - - if path in res.children: - retval = res.children[path] - else: - retval = res.getChild(path, request) - - if isinstance(retval, NoResource): - retval = res - else: - request.realpostpath = request.path[ - len(path) + (0 if path.startswith(b'/') else 1):] - - return retval - - -class TwistedWebResource(Resource): - """A server transport that exposes the application as a twisted web - Resource. - """ - - def __init__(self, app, chunked=False, max_content_length=2 * 1024 * 1024, - block_length=8 * 1024, prepath=None): - Resource.__init__(self) - self.app = app - - self.http_transport = TwistedHttpTransport(app, chunked, - max_content_length, block_length) - self._wsdl = None - self.prepath = prepath - - def getChildWithDefault(self, path, request): - return get_twisted_child_with_default(self, path, request) - - def render(self, request): - if request.method == b'GET' and ( - request.uri.endswith(b'.wsdl') or request.uri.endswith(b'?wsdl')): - return self.__handle_wsdl_request(request) - return self.handle_rpc(request) - - def handle_rpc_error(self, p_ctx, others, error, request): - logger.error(error) - resp_code = p_ctx.transport.resp_code - # If user code set its own response code, don't touch it. - if resp_code is None: - resp_code = p_ctx.out_protocol.fault_to_http_response_code(error) - - request.setResponseCode(int(resp_code[:3])) - _set_response_headers(request, p_ctx.transport.resp_headers) - - # In case user code set its own out_* attributes before failing. - p_ctx.out_document = None - p_ctx.out_string = None - - p_ctx.out_object = error - self.http_transport.get_out_string(p_ctx) - - retval = b''.join(p_ctx.out_string) - - p_ctx.close() - - process_contexts(self.http_transport, others, p_ctx, error=error) - - return retval - - def handle_rpc(self, request): - initial_ctx = TwistedHttpMethodContext(self.http_transport, request, - self.http_transport.app.out_protocol.mime_type) - - if _has_fd(request.content): - f = request.content - - # it's best to avoid empty mappings. - if fstat(f.fileno()).st_size == 0: - initial_ctx.in_string = [''] - else: - initial_ctx.in_string = [mmap(f.fileno(), 0)] - else: - request.content.seek(0) - initial_ctx.in_string = [request.content.read()] - - initial_ctx.transport.file_info = _get_file_info(initial_ctx) - - contexts = self.http_transport.generate_contexts(initial_ctx) - p_ctx, others = contexts[0], contexts[1:] - - p_ctx.active = True - p_ctx.out_stream = request - # TODO: Rate limiting - p_ctx.active = True - - if p_ctx.in_error: - return self.handle_rpc_error(p_ctx, others, p_ctx.in_error, request) - - else: - self.http_transport.get_in_object(p_ctx) - - if p_ctx.in_error: - return self.handle_rpc_error(p_ctx, others, p_ctx.in_error, - request) - - self.http_transport.get_out_object(p_ctx) - if p_ctx.out_error: - return self.handle_rpc_error(p_ctx, others, p_ctx.out_error, - request) - - ret = p_ctx.out_object[0] - retval = NOT_DONE_YET - if isinstance(ret, Deferred): - ret.addCallback(_cb_deferred, request, p_ctx, others, resource=self) - ret.addErrback(_eb_deferred, request, p_ctx, others, resource=self) - ret.addErrback(log_and_let_go, logger) - - elif isinstance(ret, PushBase): - self.http_transport.init_root_push(ret, p_ctx, others) - - else: - try: - retval = _cb_deferred(p_ctx.out_object, request, p_ctx, others, - self, cb=False) - except Exception as e: - logger_server.exception(e) - try: - _eb_deferred(Failure(), request, p_ctx, others, - resource=self) - except Exception as e: - logger_server.exception(e) - - return retval - - def __handle_wsdl_request(self, request): - # disabled for performance reasons. - # logger.debug("WSDL request headers: %r", - # list(request.requestHeaders.getAllRawHeaders())) - - ctx = TwistedHttpMethodContext(self.http_transport, request, - "text/xml; charset=utf-8") - url = _reconstruct_url(request) - - if self.http_transport.doc.wsdl11 is None: - return HTTP_404 - - if self._wsdl is None: - self._wsdl = self.http_transport.doc.wsdl11.get_interface_document() - - ctx.transport.wsdl = self._wsdl - _set_response_headers(request, ctx.transport.resp_headers) - - try: - if self._wsdl is None: - self.http_transport.doc.wsdl11.build_interface_document(url) - ctx.transport.wsdl = self._wsdl = \ - self.http_transport.doc.wsdl11.get_interface_document() - - assert ctx.transport.wsdl is not None - - self.http_transport.event_manager.fire_event('wsdl', ctx) - - return ctx.transport.wsdl - - except Exception as e: - ctx.transport.wsdl_error = e - self.http_transport.event_manager.fire_event('wsdl_exception', ctx) - raise - - finally: - ctx.close() - - -def _cb_request_finished(retval, request, p_ctx): - request.finish() - p_ctx.close() - - -def _eb_request_finished(retval, request, p_ctx): - err(request) - p_ctx.close() - request.finish() - - -def _cb_deferred(ret, request, p_ctx, others, resource, cb=True): - ### set response headers - resp_code = p_ctx.transport.resp_code - - # If user code set its own response code, don't touch it. - if resp_code is None: - resp_code = HTTP_200 - request.setResponseCode(int(resp_code[:3])) - - _set_response_headers(request, p_ctx.transport.resp_headers) - - ### normalize response data - om = p_ctx.descriptor.out_message - single_class = None - if cb: - if p_ctx.descriptor.is_out_bare(): - p_ctx.out_object = [ret] - - elif (not issubclass(om, ComplexModelBase)) or len(om._type_info) <= 1: - p_ctx.out_object = [ret] - if len(om._type_info) == 1: - single_class, = om._type_info.values() - else: - p_ctx.out_object = ret - else: - p_ctx.out_object = ret - - ### start response - retval = NOT_DONE_YET - - if isinstance(ret, PushBase): - resource.http_transport.init_root_push(ret, p_ctx, others) - - elif ((isclass(om) and issubclass(om, File)) or - (isclass(single_class) and issubclass(single_class, File))) and \ - isinstance(p_ctx.out_protocol, HttpRpc) and \ - getattr(ret, 'abspath', None) is not None: - - file = static.File(ret.abspath, - defaultType=str(ret.type) or 'application/octet-stream') - retval = _render_file(file, request) - if retval != NOT_DONE_YET and cb: - request.write(retval) - request.finish() - p_ctx.close() - else: - def _close_only_context(ret): - p_ctx.close() - - request.notifyFinish() \ - .addCallback(_close_only_context) \ - .addErrback(_eb_request_finished, request, p_ctx) \ - .addErrback(log_and_let_go, logger) - - else: - ret = resource.http_transport.get_out_string(p_ctx) - - if not isinstance(ret, Deferred): - producer = Producer(p_ctx.out_string, request) - producer.deferred \ - .addCallback(_cb_request_finished, request, p_ctx) \ - .addErrback(_eb_request_finished, request, p_ctx) \ - .addErrback(log_and_let_go, logger) - - try: - request.registerProducer(producer, False) - except Exception as e: - logger_server.exception(e) - try: - _eb_deferred(Failure(), request, p_ctx, others, resource) - except Exception as e: - logger_server.exception(e) - raise - - else: - def _cb(ret): - if isinstance(ret, Deferred): - return ret \ - .addCallback(_cb) \ - .addErrback(_eb_request_finished, request, p_ctx) \ - .addErrback(log_and_let_go, logger) - else: - return _cb_request_finished(ret, request, p_ctx) - - ret \ - .addCallback(_cb) \ - .addErrback(_eb_request_finished, request, p_ctx) \ - .addErrback(log_and_let_go, logger) - - process_contexts(resource.http_transport, others, p_ctx) - - return retval - - -def _eb_deferred(ret, request, p_ctx, others, resource): - # DRY this with what's in Application.process_request - if ret.check(Redirect): - try: - ret.value.do_redirect() - - # Now that the processing is switched to the outgoing message, - # point ctx.protocol to ctx.out_protocol - p_ctx.protocol = p_ctx.outprot_ctx - - _cb_deferred(None, request, p_ctx, others, resource, cb=False) - - p_ctx.fire_event('method_redirect') - - except Exception as e: - logger_server.exception(e) - p_ctx.out_error = Fault('Server', get_fault_string_from_exception(e)) - - p_ctx.fire_event('method_redirect_exception') - - elif ret.check(Fault): - p_ctx.out_error = ret.value - - ret = resource.handle_rpc_error(p_ctx, others, p_ctx.out_error, request) - - p_ctx.fire_event('method_exception_object') - - request.write(ret) - - else: - p_ctx.out_error = InternalError(ret.value) - logger.error(ret.getTraceback()) - - ret = resource.handle_rpc_error(p_ctx, others, p_ctx.out_error, request) - - p_ctx.fire_event('method_exception_object') - - request.write(ret) - - request.finish() diff --git a/libs_crutch/contrib/spyne/server/twisted/msgpack.py b/libs_crutch/contrib/spyne/server/twisted/msgpack.py deleted file mode 100644 index 8d548d7..0000000 --- a/libs_crutch/contrib/spyne/server/twisted/msgpack.py +++ /dev/null @@ -1,477 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import absolute_import - -import logging -logger = logging.getLogger(__name__) - -import io - -import msgpack - -from time import time -from hashlib import md5 -from collections import deque, OrderedDict -from itertools import chain - -from twisted.internet import reactor -from twisted.internet.task import deferLater -from twisted.internet.defer import Deferred, CancelledError -from twisted.internet.protocol import Protocol, Factory, connectionDone, \ - ClientFactory -from twisted.python.failure import Failure - -from spyne import EventManager, Address, ServerBase, Fault -from spyne.auxproc import process_contexts -from spyne.error import InternalError -from spyne.server.twisted import log_and_let_go - - -class TwistedMessagePackProtocolFactory(Factory): - IDLE_TIMEOUT_SEC = None - - def __init__(self, tpt): - assert isinstance(tpt, ServerBase) - - self.tpt = tpt - self.event_manager = EventManager(self) - - def buildProtocol(self, address): - retval = TwistedMessagePackProtocol(self.tpt, factory=self) - - if self.IDLE_TIMEOUT_SEC is not None: - retval.IDLE_TIMEOUT_SEC = self.IDLE_TIMEOUT_SEC - - return retval - -TwistedMessagePackProtocolServerFactory = TwistedMessagePackProtocolFactory - - -class TwistedMessagePackProtocolClientFactory(ClientFactory): - def __init__(self, tpt, max_buffer_size=2 * 1024 * 1024): - assert isinstance(tpt, ServerBase), \ - "%r is not a ServerBase instance" % tpt - - self.tpt = tpt - self.max_buffer_size = max_buffer_size - self.event_manager = EventManager(self) - - def buildProtocol(self, address): - return TwistedMessagePackProtocol(self.tpt, - max_buffer_size=self.max_buffer_size, factory=self) - - -def _cha(*args): - return args - - -class TwistedMessagePackProtocol(Protocol): - IDLE_TIMEOUT_SEC = 0 - IDLE_TIMEOUT_MSG = 'idle timeout' - MAX_INACTIVE_CONTEXTS = float('inf') - - def __init__(self, tpt, max_buffer_size=2 * 1024 * 1024, out_chunk_size=0, - out_chunk_delay_sec=1, max_in_queue_size=0, factory=None): - """Twisted protocol implementation for Spyne's MessagePack transport. - - :param tpt: Spyne transport. It's an app-wide instance. - :param max_buffer_size: Max. encoded message size. - :param out_chunk_size: Split - :param factory: Twisted protocol factory - - Supported events: - * ``outresp_flushed(ctx, ctxid, data)`` - Called right after response data is flushed to the socket. - * ctx: Always None - * ctxid: Integer equal to ``id(ctx)`` - * data: Flushed bytes object - - """ - - from spyne.server.msgpack import MessagePackTransportBase - assert isinstance(tpt, MessagePackTransportBase), \ - "Expected {!r} got {!r}".format(MessagePackTransportBase, type(tpt)) - - self.spyne_tpt = tpt - self._buffer = msgpack.Unpacker(raw=True, - max_buffer_size=max_buffer_size) - self.out_chunk_size = out_chunk_size - self.out_chunk_delay_sec = out_chunk_delay_sec - self.max_in_queue_size = max_in_queue_size - self.factory = factory - - self.sessid = '' - self._delaying = None - self.sent_bytes = 0 - self.recv_bytes = 0 - self.idle_timer = None - self.out_chunks = deque() - self.inreq_queue = OrderedDict() - self.inactive_queue = list() - self.disconnecting = False # FIXME: should we use this to raise an - # invalid connection state exception ? - - @staticmethod - def gen_chunks(l, n): - """Yield successive n-sized chunks from l.""" - if isinstance(l, io.BufferedIOBase): - while True: - data = l.read(n) - if not data: - break - yield data - l.close() - - else: - for i in range(0, len(l), n): - yield l[i:i+n] - - def gen_sessid(self, *args): - """It's up to you to use this in a subclass.""" - - retval = _cha( - Address.from_twisted_address(self.transport.getPeer()), - time(), - *args - ) - - return md5(repr(retval).encode('utf8')).hexdigest() - - def connectionMade(self): - logger.debug("%08x connection made", id(self)) - self.sessid = '' - self._delaying = None - self.sent_bytes = 0 - self.recv_bytes = 0 - self.idle_timer = None - self.out_chunks = deque() - self.inreq_queue = OrderedDict() - self.inactive_queue = list() - self.active_queue = dict() - self.disconnecting = False # FIXME: should we use this to raise an - # invalid connection state exception ? - - self._reset_idle_timer() - if self.factory is not None: - self.factory.event_manager.fire_event("connection_made", self) - - def connectionLost(self, reason=connectionDone): - if reason is connectionDone: - logger.debug("%08x connection done", id(self)) - else: - logger.debug("%08x connection lost: %s", id(self), reason) - self.disconnecting = False - if self.factory is not None: - self.factory.event_manager.fire_event("connection_lost", self) - self._cancel_idle_timer() - - def _cancel_idle_timer(self): - if self.idle_timer is not None: - if not self.idle_timer.called: - # FIXME: Workaround for a bug in Twisted 18.9.0 when - # DelayedCall.debug == True - try: - self.idle_timer.cancel() - except AttributeError: - del self.idle_timer.func - del self.idle_timer.args - del self.idle_timer.kw - - self.idle_timer = None - - def dataReceived(self, data): - self._buffer.feed(data) - self.recv_bytes += len(data) - - self._reset_idle_timer() - - for msg in self._buffer: - self.process_incoming_message(msg) - - if self.disconnecting: - return - - def _reset_idle_timer(self): - if self.idle_timer is not None: - t = self.idle_timer - self.idle_timer = None - if not t.called: - t.cancel() - - if self.IDLE_TIMEOUT_SEC is not None and self.IDLE_TIMEOUT_SEC > 0: - self.idle_timer = deferLater(reactor, self.IDLE_TIMEOUT_SEC, - self.loseConnection, self.IDLE_TIMEOUT_MSG) \ - .addErrback(self._err_idle_cancelled) \ - .addErrback(self._err_idle_cancelled_unknown_error) - - def _err_idle_cancelled(self, err): - err.trap(CancelledError) - - # do nothing. - - def _err_idle_cancelled_unknown_error(self, err): - logger.error("Sessid %s error cancelling idle timer: %s", - self.sessid, err.getTraceback()) - self.idle_timer = None - - def loseConnection(self, reason=None): - self.disconnecting = True - self.idle_timer = None - logger.debug("Aborting connection because %s", reason) - self.transport.abortConnection() - - def process_incoming_message(self, msg, oob=None): - p_ctx, others = self.spyne_tpt.produce_contexts(msg) - p_ctx.oob_ctx = oob - p_ctx.transport.remote_addr = Address.from_twisted_address( - self.transport.getPeer()) - p_ctx.transport.protocol = self - p_ctx.transport.sessid = self.sessid - - self.inactive_queue.append((p_ctx, others)) - self.process_inactive() - - @property - def num_active_contexts(self): - return len(self.inreq_queue) - - @property - def num_inactive_contexts(self): - return len(self.inactive_queue) - - def process_inactive(self): - peer = self.transport.getPeer() - addr_str = Address.from_twisted_address(peer) - - if self.max_in_queue_size == 0: - while self.num_inactive_contexts > 0: - p_ctx, others = self.inactive_queue.pop() - self.active_queue[id(p_ctx)] = p_ctx - - self.inreq_queue[id(p_ctx)] = None - self.process_contexts(p_ctx, others) - - else: - while self.num_active_contexts < self.max_in_queue_size and \ - self.num_inactive_contexts > 0: - p_ctx, others = self.inactive_queue.pop() - self.active_queue[id(p_ctx)] = p_ctx - - self.inreq_queue[id(p_ctx)] = None - self.process_contexts(p_ctx, others) - - if self.num_active_contexts > self.MAX_INACTIVE_CONTEXTS: - logger.error("%s Too many inactive contexts. " - "Closing connection.", addr_str) - self.loseConnection("Too many inactive contexts") - - logger.debug("%s active %d inactive %d", addr_str, - self.num_active_contexts, self.num_inactive_contexts) - - def enqueue_outresp_data(self, ctxid, data): - assert self.inreq_queue[ctxid] is None - self.inreq_queue[ctxid] = data - - for k, v in list(self.inreq_queue.items()): - if v is None: - break - - self.out_write(v) - self.spyne_tpt.event_manager.fire_event('outresp_flushed', - None, k, v) - del self.inreq_queue[k] - self.active_queue[k].close() - del self.active_queue[k] - - self.process_inactive() - - def out_write(self, reqdata): - if self.out_chunk_size == 0: - if isinstance(reqdata, io.BufferedIOBase): - nbytes = reqdata.tell() - reqdata.seek(0) - self.transport.write(reqdata.read()) - else: - nbytes = len(reqdata) - self.transport.write(reqdata) - - self.sent_bytes += nbytes - - else: - if isinstance(reqdata, io.BufferedIOBase): - reqdata.seek(0) - - chunks = self.gen_chunks(reqdata, self.out_chunk_size) - self.out_chunks.append(chunks) - deferLater(reactor, 0, self._write_single_chunk) - - def _wait_for_next_chunk(self): - return deferLater(reactor, self.out_chunk_delay_sec, - self._write_single_chunk) - - def _write_single_chunk(self): - try: - chunk = next(chain.from_iterable(self.out_chunks)) - except StopIteration: - chunk = None - self.out_chunks.clear() - - if chunk is None: - self._delaying = None - - logger.debug("%s no more chunks...", self.sessid) - - else: - self.transport.write(chunk) - self.sent_bytes += len(chunk) - - if self.connected and not self.disconnecting: - self._delaying = self._wait_for_next_chunk() - - logger.debug("%s One chunk of %d bytes written. Delaying " - "before next chunk write...", self.sessid, len(chunk)) - - else: - logger.debug("%s Disconnection detected, discarding " - "remaining chunks", self.sessid) - self.out_chunks.clear() - - def handle_error(self, p_ctx, others, exc): - self.spyne_tpt.get_out_string(p_ctx) - - if isinstance(exc, InternalError): - error = self.spyne_tpt.OUT_RESPONSE_SERVER_ERROR - else: - error = self.spyne_tpt.OUT_RESPONSE_CLIENT_ERROR - - data = p_ctx.out_document[0] - if isinstance(data, dict): - data = list(data.values()) - - out_object = (error, msgpack.packb(data),) - if p_ctx.oob_ctx is not None: - p_ctx.oob_ctx.d.callback(out_object) - return - - out_string = msgpack.packb(out_object) - p_ctx.transport.resp_length = len(out_string) - self.enqueue_outresp_data(id(p_ctx), out_string) - - try: - process_contexts(self, others, p_ctx, error=error) - - except Exception as e: - # Report but ignore any exceptions from auxiliary methods. - logger.error("Exception ignored from auxiliary method: %r", e) - logger.exception(e) - - def _register_callbacks(self, d, p_ctx, others): - return d \ - .addCallback(self._cb_deferred, p_ctx, others) \ - .addErrback(self._eb_deferred, p_ctx, others) \ - .addErrback(log_and_let_go, logger) - - def process_contexts(self, p_ctx, others): - if p_ctx.in_error: - self.handle_error(p_ctx, others, p_ctx.in_error) - return - - self.spyne_tpt.get_in_object(p_ctx) - if p_ctx.in_error: - logger.error(p_ctx.in_error) - self.handle_error(p_ctx, others, p_ctx.in_error) - return - - self.spyne_tpt.get_out_object(p_ctx) - if p_ctx.out_error: - self.handle_error(p_ctx, others, p_ctx.out_error) - return - - ret = p_ctx.out_object - if isinstance(ret, Deferred): - self._register_callbacks(ret, p_ctx, others) - - else: - ret = p_ctx.out_object[0] - if isinstance(ret, Deferred): - self._register_callbacks(ret, p_ctx, others) - - else: - self._cb_deferred(p_ctx.out_object, p_ctx, others, nowrap=True) - - def _eb_deferred(self, fail, p_ctx, others): - assert isinstance(fail, Failure) - - if isinstance(fail.value, Fault): - p_ctx.out_error = fail.value - - else: - p_ctx.out_error = InternalError(fail.value) - if not getattr(fail, 'logged', False): - logger.error(fail.getTraceback()) - - try: - self.handle_error(p_ctx, others, p_ctx.out_error) - - except Exception as e: - logger.exception(e) - raise - - def _cb_deferred(self, ret, p_ctx, others, nowrap=False): - # this means callback is not invoked directly instead of as part of a - # deferred chain - if not nowrap: - # if there is one return value or the output is bare (which means - # there can't be anything other than 1 return value case) use the - # enclosing list. otherwise, the return value is a tuple anyway, so - # leave it be. - if p_ctx.descriptor.is_out_bare(): - p_ctx.out_object = [ret] - - else: - if len(p_ctx.descriptor.out_message._type_info) > 1: - p_ctx.out_object = ret - else: - p_ctx.out_object = [ret] - - if p_ctx.oob_ctx is not None: - assert isinstance(p_ctx.oob_ctx.d, Deferred) - - p_ctx.oob_ctx.d.callback(p_ctx.out_object) - return - - try: - self.spyne_tpt.get_out_string(p_ctx) - self.spyne_tpt.pack(p_ctx) - - out_string = b''.join(p_ctx.out_string) - p_ctx.transport.resp_length = len(out_string) - - self.enqueue_outresp_data(id(p_ctx), out_string) - - except Exception as e: - logger.exception(e) - logger.error("%r", p_ctx) - self.handle_error(p_ctx, others, InternalError(e)) - - finally: - p_ctx.close() - - process_contexts(self.spyne_tpt, others, p_ctx) diff --git a/libs_crutch/contrib/spyne/server/twisted/websocket.py b/libs_crutch/contrib/spyne/server/twisted/websocket.py deleted file mode 100644 index 2e74cab..0000000 --- a/libs_crutch/contrib/spyne/server/twisted/websocket.py +++ /dev/null @@ -1,255 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.server.twisted`` module contains a server transport compatible -with the Twisted event loop. It uses the TwistedWebResource object as transport. - -Also see the twisted examples in the examples directory of the source -distribution. - -If you want to have a hard-coded URL in the wsdl document, this is how to do -it: :: - - resource = TwistedWebResource(...) - resource.http_transport.doc.wsdl11.build_interface_document("http://example.com") - -This is not strictly necessary -- if you don't do this, Spyne will get the -URL from the first request, build the wsdl on-the-fly and cache it as a -string in memory for later requests. However, if you want to make sure -you only have this url on the WSDL, this is how to do it. Note that if -your client takes the information in wsdl seriously, all requests will go -to the designated url above which can make testing a bit difficult. Use -in moderation. - -This module is EXPERIMENTAL. Your mileage may vary. Patches are welcome. -""" - - -from __future__ import absolute_import - -import logging -logger = logging.getLogger(__name__) - -from twisted.internet.defer import Deferred -from twisted.internet.protocol import Factory - -# FIXME: Switch to: -# from twisted.web.websockets import WebSocketsProtocol -# from twisted.web.websockets import WebSocketsResource -# from twisted.web.websockets import CONTROLS - -from spyne.util._twisted_ws import WebSocketsProtocol -from spyne.util._twisted_ws import WebSocketsResource -from spyne.util._twisted_ws import CONTROLS - - -from spyne import MethodContext, TransportContext, Address -from spyne.auxproc import process_contexts -from spyne.model import PushBase -from spyne.model.complex import ComplexModel -from spyne.model.fault import Fault -from spyne.server import ServerBase - - -class WebSocketTransportContext(TransportContext): - def __init__(self, parent, transport, type, client_handle): - TransportContext.__init__(self, parent, transport, type) - - self.client_handle = client_handle - """TwistedWebSocketProtocol instance.""" - - self.parent = parent - """Parent Context""" - - def get_peer(self): - if self.client_handle is not None: - peer = self.client_handle.transport.getPeer() - return Address.from_twisted_address(peer) - - -class WebSocketMethodContext(MethodContext): - def __init__(self, transport, client_handle): - MethodContext.__init__(self, transport, MethodContext.SERVER) - - self.transport = WebSocketTransportContext(self, transport, 'ws', - client_handle) - - -class TwistedWebSocketProtocol(WebSocketsProtocol): - """A protocol that parses and generates messages in a WebSocket stream.""" - - def __init__(self, transport, bookkeep=False, _clients=None): - self._spyne_transport = transport - self._clients = _clients - self.__app_id = id(self) - if bookkeep: - self.connectionMade = self._connectionMade - self.connectionLost = self._connectionLost - - @property - def app_id(self): - return self.__app_id - - @app_id.setter - def app_id(self, what): - entry = self._clients.get(self.__app_id, None) - - if entry: - del self._clients[self.__app_id] - self._clients[what] = entry - - self.__app_id = what - - def _connectionMade(self): - WebSocketsProtocol.connectionMade(self) - - self._clients[self.app_id] = self - - def _connectionLost(self, reason): - del self._clients[id(self)] - - - def frameReceived(self, opcode, data, fin): - tpt = self._spyne_transport - - initial_ctx = WebSocketMethodContext(tpt, client_handle=self) - initial_ctx.in_string = [data] - - contexts = tpt.generate_contexts(initial_ctx) - p_ctx, others = contexts[0], contexts[1:] - - if p_ctx.in_error: - p_ctx.out_object = p_ctx.in_error - - else: - tpt.get_in_object(p_ctx) - - if p_ctx.in_error: - p_ctx.out_object = p_ctx.in_error - - else: - tpt.get_out_object(p_ctx) - if p_ctx.out_error: - p_ctx.out_object = p_ctx.out_error - - def _cb_deferred(retval, cb=True): - if cb and len(p_ctx.descriptor.out_message._type_info) <= 1: - p_ctx.out_object = [retval] - else: - p_ctx.out_object = retval - - tpt.get_out_string(p_ctx) - self.sendFrame(opcode, ''.join(p_ctx.out_string), fin) - p_ctx.close() - process_contexts(tpt, others, p_ctx) - - def _eb_deferred(err): - p_ctx.out_error = err.value - if not issubclass(err.type, Fault): - logger.error(err.getTraceback()) - - tpt.get_out_string(p_ctx) - self.sendFrame(opcode, ''.join(p_ctx.out_string), fin) - p_ctx.close() - - ret = p_ctx.out_object - if isinstance(ret, (list, tuple)): - ret = ret[0] - - if isinstance(ret, Deferred): - ret.addCallback(_cb_deferred) - ret.addErrback(_eb_deferred) - - elif isinstance(ret, PushBase): - raise NotImplementedError() - - else: - _cb_deferred(p_ctx.out_object, cb=False) - - -class TwistedWebSocketFactory(Factory): - def __init__(self, app, bookkeep=False, _clients=None): - self.app = app - self.transport = ServerBase(app) - self.bookkeep = bookkeep - self._clients = _clients - if _clients is None: - self._clients = {} - - def buildProtocol(self, addr): - return TwistedWebSocketProtocol(self.transport, self.bookkeep, - self._clients) - -class _Fake(object): - pass - - -def _FakeWrap(cls): - class _Ret(ComplexModel): - _type_info = {"ugh ": cls} - - return _Ret - - -class _FakeCtx(object): - def __init__(self, obj, cls): - self.out_object = obj - self.out_error = None - self.descriptor = _Fake() - self.descriptor.out_message = cls - - -class InvalidRequestError(Exception): - pass - - -class TwistedWebSocketResource(WebSocketsResource): - def __init__(self, app, bookkeep=False, clients=None): - self.app = app - self.clients = clients - if clients is None: - self.clients = {} - - if bookkeep: - self.propagate = self.do_propagate - - WebSocketsResource.__init__(self, TwistedWebSocketFactory(app, - bookkeep, self.clients)) - - def propagate(self): - raise InvalidRequestError("You must enable bookkeeping to have " - "message propagation work.") - - def get_doc(self, obj, cls=None): - if cls is None: - cls = obj.__class__ - - op = self.app.out_protocol - ctx = _FakeCtx(obj, cls) - op.serialize(ctx, op.RESPONSE) - op.create_out_string(ctx) - - return ''.join(ctx.out_string) - - def do_propagate(self, obj, cls=None): - doc = self.get_doc(obj, cls) - - for c in self.clients.itervalues(): - print('sending to', c) - c.sendFrame(CONTROLS.TEXT, doc, True) diff --git a/libs_crutch/contrib/spyne/server/wsgi.py b/libs_crutch/contrib/spyne/server/wsgi.py deleted file mode 100644 index 9928c98..0000000 --- a/libs_crutch/contrib/spyne/server/wsgi.py +++ /dev/null @@ -1,624 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -""" -A server that uses http as transport via wsgi. It doesn't contain any server -logic. -""" - -import logging -logger = logging.getLogger(__name__) - -import cgi -import threading - -from inspect import isgenerator -from itertools import chain - -from spyne import Address -from spyne.util.six.moves.http_cookies import SimpleCookie -from spyne.util.six.moves.urllib.parse import unquote, quote - -from spyne import File, Fault -from spyne.application import get_fault_string_from_exception -from spyne.auxproc import process_contexts -from spyne.error import RequestTooLongError -from spyne.protocol.http import HttpRpc -from spyne.server.http import HttpBase, HttpMethodContext, HttpTransportContext -from spyne.util.odict import odict -from spyne.util.address import address_parser - -from spyne.const.ansi_color import LIGHT_GREEN -from spyne.const.ansi_color import END_COLOR -from spyne.const.http import HTTP_200 -from spyne.const.http import HTTP_404 -from spyne.const.http import HTTP_500 - - -try: - from spyne.protocol.soap.mime import apply_mtom -except ImportError as _import_error_1: - _local_import_error_1 = _import_error_1 # python 3 workaround - def apply_mtom(*args, **kwargs): - raise _local_import_error_1 - -try: - from werkzeug.formparser import parse_form_data -except ImportError as _import_error_2: - _local_import_error_2 = _import_error_2 # python 3 workaround - def parse_form_data(*args, **kwargs): - raise _local_import_error_2 - - -def _reconstruct_url(environ, protocol=True, server_name=True, path=True, - query_string=True): - """Rebuilds the calling url from values found in the - environment. - - This algorithm was found via PEP 333, the wsgi spec and - contributed by Ian Bicking. - """ - - url = '' - if protocol: - url = environ['wsgi.url_scheme'] + '://' - - if server_name: - if environ.get('HTTP_HOST'): - url += environ['HTTP_HOST'] - - else: - url += environ['SERVER_NAME'] - - if environ['wsgi.url_scheme'] == 'https': - if environ['SERVER_PORT'] != '443': - url += ':' + environ['SERVER_PORT'] - - else: - if environ['SERVER_PORT'] != '80': - url += ':' + environ['SERVER_PORT'] - - if path: - if (quote(environ.get('SCRIPT_NAME', '')) == '/' and - quote(environ.get('PATH_INFO', ''))[0] == '/'): - #skip this if it is only a slash - pass - - elif quote(environ.get('SCRIPT_NAME', ''))[0:2] == '//': - url += quote(environ.get('SCRIPT_NAME', ''))[1:] - - else: - url += quote(environ.get('SCRIPT_NAME', '')) - - url += quote(environ.get('PATH_INFO', '')) - - if query_string: - if environ.get('QUERY_STRING'): - url += '?' + environ['QUERY_STRING'] - - return url - -def _parse_qs(qs): - pairs = (s2 for s1 in qs.split('&') for s2 in s1.split(';')) - retval = odict() - - for name_value in pairs: - if name_value is None or len(name_value) == 0: - continue - nv = name_value.split('=', 1) - - if len(nv) != 2: - # Handle case of a control-name with no equal sign - nv.append(None) - - name = unquote(nv[0].replace('+', ' ')) - - value = None - if nv[1] is not None: - value = unquote(nv[1].replace('+', ' ')) - - l = retval.get(name, None) - if l is None: - l = retval[name] = [] - l.append(value) - - return retval - - -def _get_http_headers(req_env): - retval = {} - - for k, v in req_env.items(): - if k.startswith("HTTP_"): - key = k[5:].lower() - val = [v] - retval[key]= val - logger.debug("Add http header %r = %r", key, val) - - return retval - - -def _gen_http_headers(headers): - retval = [] - - for k,v in headers.items(): - if isinstance(v, (list, tuple)): - for v2 in v: - retval.append((k, v2)) - else: - retval.append((k, v)) - - return retval - - -class WsgiTransportContext(HttpTransportContext): - """The class that is used in the transport attribute of the - :class:`WsgiMethodContext` class.""" - - def __init__(self, parent, transport, req_env, content_type): - super(WsgiTransportContext, self).__init__(parent, transport, - req_env, content_type) - - self.req_env = self.req - """WSGI Request environment""" - - self.req_method = req_env.get('REQUEST_METHOD', None) - """HTTP Request verb, as a convenience to users.""" - - self.headers = _get_http_headers(self.req_env) - - def get_path(self): - return self.req_env['PATH_INFO'] - - def get_path_and_qs(self): - retval = quote(self.req_env.get('PATH_INFO', '')) - qs = self.req_env.get('QUERY_STRING', None) - if qs is not None: - retval += '?' + qs - return retval - - def get_cookie(self, key): - cookie_string = self.req_env.get('HTTP_COOKIE', None) - if cookie_string is None: - return - - cookie = SimpleCookie() - cookie.load(cookie_string) - - return cookie.get(key, None).value - - def get_request_method(self): - return self.req['REQUEST_METHOD'].upper() - - def get_request_content_type(self): - return self.req.get("CONTENT_TYPE", None) - - def get_peer(self): - addr, port = address_parser.get_ip(self.req),\ - address_parser.get_port(self.req) - - if address_parser.is_valid_ipv4(addr): - return Address(type=Address.TCP4, host=addr, port=port) - - if address_parser.is_valid_ipv6(addr): - return Address(type=Address.TCP6, host=addr, port=port) - - -class WsgiMethodContext(HttpMethodContext): - """The WSGI-Specific method context. WSGI-Specific information is stored in - the transport attribute using the :class:`WsgiTransportContext` class. - """ - - TransportContext = None - HttpTransportContext = WsgiTransportContext - - -class WsgiApplication(HttpBase): - """A `PEP-3333 `_ - compliant callable class. - - If you want to have a hard-coded URL in the wsdl document, this is how to do - it: :: - - wsgi_app = WsgiApplication(...) - wsgi_app.doc.wsdl11.build_interface_document("http://example.com") - - This is not strictly necessary -- if you don't do this, Spyne will get the - URL from the first request, build the wsdl on-the-fly and cache it as a - string in memory for later requests. However, if you want to make sure - you only have this url on the WSDL, this is how to do it. Note that if - your client takes the information in the Wsdl document seriously (not all - do), all requests will go to the designated url above even when you get the - Wsdl from another location, which can make testing a bit difficult. Use in - moderation. - - Supported events: - * ``wsdl`` - Called right before the wsdl data is returned to the client. - - * ``wsdl_exception`` - Called right after an exception is thrown during wsdl generation. - The exception object is stored in ctx.transport.wsdl_error - attribute. - - * ``wsgi_call`` - Called first when the incoming http request is identified as a rpc - request. - - * ``wsgi_return`` - Called right before the output stream is returned to the WSGI - handler. - - * ``wsgi_exception`` - Called right before returning the exception to the client. - - * ``wsgi_close`` - Called after the whole data has been returned to the client. It's - called both from success and error cases. - """ - - def __init__(self, app, chunked=True, max_content_length=2 * 1024 * 1024, - block_length=8 * 1024): - super(WsgiApplication, self).__init__(app, chunked, max_content_length, - block_length) - - self._mtx_build_interface_document = threading.Lock() - - self._wsdl = None - if self.doc.wsdl11 is not None: - self._wsdl = self.doc.wsdl11.get_interface_document() - - def __call__(self, req_env, start_response, wsgi_url=None): - """This method conforms to the WSGI spec for callable wsgi applications - (PEP 333). It looks in environ['wsgi.input'] for a fully formed rpc - message envelope, will deserialize the request parameters and call the - method on the object returned by the get_handler() method. - """ - - url = wsgi_url - if url is None: - url = _reconstruct_url(req_env).split('.wsdl')[0] - - if self.is_wsdl_request(req_env): - # Format the url for location - url = url.split('?')[0].split('.wsdl')[0] - return self.handle_wsdl_request(req_env, start_response, url) - - else: - return self.handle_rpc(req_env, start_response) - - def is_wsdl_request(self, req_env): - # Get the wsdl for the service. Assume path_info matches pattern: - # /stuff/stuff/stuff/serviceName.wsdl or - # /stuff/stuff/stuff/serviceName/?wsdl - - return ( - req_env['REQUEST_METHOD'].upper() == 'GET' - and ( - ( - 'QUERY_STRING' in req_env - and req_env['QUERY_STRING'].split('=')[0].lower() == 'wsdl' - ) - or req_env['PATH_INFO'].endswith('.wsdl') - ) - ) - - def handle_wsdl_request(self, req_env, start_response, url): - ctx = WsgiMethodContext(self, req_env, 'text/xml; charset=utf-8') - - if self.doc.wsdl11 is None: - start_response(HTTP_404, - _gen_http_headers(ctx.transport.resp_headers)) - return [HTTP_404] - - if self._wsdl is None: - self._wsdl = self.doc.wsdl11.get_interface_document() - - ctx.transport.wsdl = self._wsdl - - if ctx.transport.wsdl is None: - try: - self._mtx_build_interface_document.acquire() - - ctx.transport.wsdl = self._wsdl - - if ctx.transport.wsdl is None: - self.doc.wsdl11.build_interface_document(url) - ctx.transport.wsdl = self._wsdl = \ - self.doc.wsdl11.get_interface_document() - - except Exception as e: - logger.exception(e) - ctx.transport.wsdl_error = e - - self.event_manager.fire_event('wsdl_exception', ctx) - - start_response(HTTP_500, - _gen_http_headers(ctx.transport.resp_headers)) - - return [HTTP_500] - - finally: - self._mtx_build_interface_document.release() - - self.event_manager.fire_event('wsdl', ctx) - - ctx.transport.resp_headers['Content-Length'] = \ - str(len(ctx.transport.wsdl)) - start_response(HTTP_200, _gen_http_headers(ctx.transport.resp_headers)) - - retval = ctx.transport.wsdl - return [retval] - - def handle_error(self, p_ctx, others, error, start_response): - """Serialize errors to an iterable of strings and return them. - - :param p_ctx: Primary (non-aux) context. - :param others: List if auxiliary contexts (can be empty). - :param error: One of ctx.{in,out}_error. - :param start_response: See the WSGI spec for more info. - """ - - if p_ctx.transport.resp_code is None: - p_ctx.transport.resp_code = \ - p_ctx.out_protocol.fault_to_http_response_code(error) - - self.get_out_string(p_ctx) - - # consume the generator to get the length - p_ctx.out_string = list(p_ctx.out_string) - - p_ctx.transport.resp_headers['Content-Length'] = \ - str(sum((len(s) for s in p_ctx.out_string))) - self.event_manager.fire_event('wsgi_exception', p_ctx) - - start_response(p_ctx.transport.resp_code, - _gen_http_headers(p_ctx.transport.resp_headers)) - - try: - process_contexts(self, others, p_ctx, error=error) - except Exception as e: - # Report but ignore any exceptions from auxiliary methods. - logger.exception(e) - - return chain(p_ctx.out_string, self.__finalize(p_ctx)) - - def handle_rpc(self, req_env, start_response): - initial_ctx = WsgiMethodContext(self, req_env, - self.app.out_protocol.mime_type) - - self.event_manager.fire_event('wsgi_call', initial_ctx) - initial_ctx.in_string, in_string_charset = \ - self.__reconstruct_wsgi_request(req_env) - - contexts = self.generate_contexts(initial_ctx, in_string_charset) - p_ctx, others = contexts[0], contexts[1:] - - # TODO: rate limiting - p_ctx.active = True - - if p_ctx.in_error: - return self.handle_error(p_ctx, others, p_ctx.in_error, - start_response) - - self.get_in_object(p_ctx) - if p_ctx.in_error: - logger.error(p_ctx.in_error) - return self.handle_error(p_ctx, others, p_ctx.in_error, - start_response) - - self.get_out_object(p_ctx) - if p_ctx.out_error: - return self.handle_error(p_ctx, others, p_ctx.out_error, - start_response) - - assert p_ctx.out_object is not None - g = next(iter(p_ctx.out_object)) - is_generator = len(p_ctx.out_object) == 1 and isgenerator(g) - - # if the out_object is a generator function, this hack makes the user - # code run until first yield, which lets it set response headers and - # whatnot before calling start_response. It's important to run this - # here before serialization as the user function can also set output - # protocol. Is there a better way? - if is_generator: - first_obj = next(g) - p_ctx.out_object = ( chain((first_obj,), g), ) - - if p_ctx.transport.resp_code is None: - p_ctx.transport.resp_code = HTTP_200 - - try: - self.get_out_string(p_ctx) - - except Exception as e: - logger.exception(e) - p_ctx.out_error = Fault('Server', get_fault_string_from_exception(e)) - return self.handle_error(p_ctx, others, p_ctx.out_error, - start_response) - - - if isinstance(p_ctx.out_protocol, HttpRpc) and \ - p_ctx.out_header_doc is not None: - p_ctx.transport.resp_headers.update(p_ctx.out_header_doc) - - if p_ctx.descriptor and p_ctx.descriptor.mtom: - # when there is more than one return type, the result is - # encapsulated inside a list. when there's just one, the result - # is returned in a non-encapsulated form. the apply_mtom always - # expects the objects to be inside an iterable, hence the - # following test. - out_type_info = p_ctx.descriptor.out_message._type_info - if len(out_type_info) == 1: - p_ctx.out_object = [p_ctx.out_object] - - p_ctx.transport.resp_headers, p_ctx.out_string = apply_mtom( - p_ctx.transport.resp_headers, p_ctx.out_string, - p_ctx.descriptor.out_message._type_info.values(), - p_ctx.out_object, - ) - - self.event_manager.fire_event('wsgi_return', p_ctx) - - if self.chunked: - # the user has not set a content-length, so we delete it as the - # input is just an iterable. - if 'Content-Length' in p_ctx.transport.resp_headers: - del p_ctx.transport.resp_headers['Content-Length'] - else: - p_ctx.out_string = [''.join(p_ctx.out_string)] - - try: - len(p_ctx.out_string) - - p_ctx.transport.resp_headers['Content-Length'] = \ - str(sum([len(a) for a in p_ctx.out_string])) - except TypeError: - pass - - start_response(p_ctx.transport.resp_code, - _gen_http_headers(p_ctx.transport.resp_headers)) - - retval = chain(p_ctx.out_string, self.__finalize(p_ctx)) - - try: - process_contexts(self, others, p_ctx, error=None) - except Exception as e: - # Report but ignore any exceptions from auxiliary methods. - logger.exception(e) - - return retval - - def __finalize(self, p_ctx): - p_ctx.close() - self.event_manager.fire_event('wsgi_close', p_ctx) - - return () - - def __reconstruct_wsgi_request(self, http_env): - """Reconstruct http payload using information in the http header.""" - - content_type = http_env.get("CONTENT_TYPE") - charset = None - if content_type is not None: - # fyi, here's what the parse_header function returns: - # >>> import cgi; cgi.parse_header("text/xml; charset=utf-8") - # ('text/xml', {'charset': 'utf-8'}) - content_type = cgi.parse_header(content_type) - charset = content_type[1].get('charset', None) - - return self.__wsgi_input_to_iterable(http_env), charset - - def __wsgi_input_to_iterable(self, http_env): - istream = http_env.get('wsgi.input') - - length = str(http_env.get('CONTENT_LENGTH', self.max_content_length)) - if len(length) == 0: - length = 0 - else: - length = int(length) - - if length > self.max_content_length: - raise RequestTooLongError() - bytes_read = 0 - - while bytes_read < length: - bytes_to_read = min(self.block_length, length - bytes_read) - - if bytes_to_read + bytes_read > self.max_content_length: - raise RequestTooLongError() - - data = istream.read(bytes_to_read) - if data is None or len(data) == 0: - break - - bytes_read += len(data) - - yield data - - def decompose_incoming_envelope(self, prot, ctx, message): - """This function is only called by the HttpRpc protocol to have the wsgi - environment parsed into ``ctx.in_body_doc`` and ``ctx.in_header_doc``. - """ - - params = {} - wsgi_env = ctx.in_document - - if self.has_patterns: - # http://legacy.python.org/dev/peps/pep-0333/#url-reconstruction - domain = wsgi_env.get('HTTP_HOST', None) - if domain is None: - domain = wsgi_env['SERVER_NAME'] - else: - domain = domain.partition(':')[0] # strip port info - - params = self.match_pattern(ctx, - wsgi_env.get('REQUEST_METHOD', ''), - wsgi_env.get('PATH_INFO', ''), - domain, - ) - - if ctx.method_request_string is None: - ctx.method_request_string = '{%s}%s' % ( - prot.app.interface.get_tns(), - wsgi_env['PATH_INFO'].split('/')[-1]) - - logger.debug("%sMethod name: %r%s" % (LIGHT_GREEN, - ctx.method_request_string, END_COLOR)) - - ctx.in_header_doc = ctx.transport.headers - ctx.in_body_doc = _parse_qs(wsgi_env['QUERY_STRING']) - - for k, v in params.items(): - if k in ctx.in_body_doc: - ctx.in_body_doc[k].extend(v) - else: - ctx.in_body_doc[k] = list(v) - - verb = wsgi_env['REQUEST_METHOD'].upper() - if verb in ('POST', 'PUT', 'PATCH'): - stream, form, files = parse_form_data(wsgi_env, - stream_factory=prot.stream_factory) - - for k, v in form.lists(): - val = ctx.in_body_doc.get(k, []) - val.extend(v) - ctx.in_body_doc[k] = val - - for k, v in files.items(): - val = ctx.in_body_doc.get(k, []) - - mime_type = v.headers.get('Content-Type', - 'application/octet-stream') - - path = getattr(v.stream, 'name', None) - if path is None: - val.append(File.Value(name=v.filename, type=mime_type, - data=[v.stream.getvalue()])) - else: - v.stream.seek(0) - val.append(File.Value(name=v.filename, type=mime_type, - path=path, handle=v.stream)) - - ctx.in_body_doc[k] = val - - for k, v in ctx.in_body_doc.items(): - if v == ['']: - ctx.in_body_doc[k] = [None] diff --git a/libs_crutch/contrib/spyne/server/zeromq.py b/libs_crutch/contrib/spyne/server/zeromq.py deleted file mode 100644 index f897d55..0000000 --- a/libs_crutch/contrib/spyne/server/zeromq.py +++ /dev/null @@ -1,164 +0,0 @@ -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The ``spyne.server.zeromq`` module contains a server implementation that -uses ZeroMQ (zmq.REP) as transport. -""" -import threading - -import zmq - -from spyne.auxproc import process_contexts -from spyne.context import MethodContext -from spyne.server import ServerBase - - -class ZmqMethodContext(MethodContext): - def __init__(self, app): - super(ZmqMethodContext, self).__init__(app, MethodContext.SERVER) - self.transport.type = 'zmq' - - -class ZeroMQServer(ServerBase): - """The ZeroMQ server transport.""" - transport = 'http://rfc.zeromq.org/' - - def __init__(self, app, app_url, wsdl_url=None, ctx=None, socket=None): - if ctx and socket and ctx is not socket.context: - raise ValueError("ctx should be the same as socket.context") - super(ZeroMQServer, self).__init__(app) - - self.app_url = app_url - self.wsdl_url = wsdl_url - - if ctx: - self.ctx = ctx - elif socket: - self.ctx = socket.context - else: - self.ctx = zmq.Context() - - if socket: - self.zmq_socket = socket - else: - self.zmq_socket = self.ctx.socket(zmq.REP) - self.zmq_socket.bind(app_url) - - def __handle_wsdl_request(self): - return self.app.get_interface_document(self.url) - - # FIXME: Add suport for binary-only transports - def generate_contexts(self, ctx, in_string_charset='utf8'): - return super(ZeroMQServer, self).generate_contexts(ctx, - in_string_charset=in_string_charset) - - def serve_forever(self): - """Runs the ZeroMQ server.""" - - while True: - error = None - - initial_ctx = ZmqMethodContext(self) - initial_ctx.in_string = [self.zmq_socket.recv()] - - contexts = self.generate_contexts(initial_ctx) - p_ctx, others = contexts[0], contexts[1:] - - # TODO: Rate limiting - p_ctx.active = True - - if p_ctx.in_error: - p_ctx.out_object = p_ctx.in_error - error = p_ctx.in_error - - else: - self.get_in_object(p_ctx) - - if p_ctx.in_error: - p_ctx.out_object = p_ctx.in_error - error = p_ctx.in_error - else: - self.get_out_object(p_ctx) - if p_ctx.out_error: - p_ctx.out_object = p_ctx.out_error - error = p_ctx.out_error - - self.get_out_string(p_ctx) - - process_contexts(self, others, error) - - self.zmq_socket.send(b''.join(p_ctx.out_string)) - - p_ctx.close() - - -class ZeroMQThreadPoolServer(object): - """Create a ZeroMQ server transport with several background workers, - allowing asynchronous calls. - - More details on the pattern http://zguide.zeromq.org/page:all#Shared-Queue-DEALER-and-ROUTER-sockets""" - - def __init__(self, app, app_url, pool_size, wsdl_url=None, ctx=None, socket=None): - if ctx and socket and ctx is not socket.context: - raise ValueError("ctx should be the same as socket.context") - - self.app = app - - if ctx: - self.ctx = ctx - elif socket: - self.ctx = socket.context - else: - self.ctx = zmq.Context() - - if socket: - self.frontend = socket - else: - self.frontend = self.ctx.socket(zmq.ROUTER) - self.frontend.bind(app_url) - - be_url = 'inproc://{tns}.{name}'.format(tns=self.app.tns, name=self.app.name) - self.pool = [] - self.background_jobs = [] - for i in range(pool_size): - worker, job = self.create_worker(i, be_url) - self.pool.append(worker) - self.background_jobs.append(job) - - self.backend = self.ctx.socket(zmq.DEALER) - self.backend.bind(be_url) - - def create_worker(self, i, be_url): - socket = self.ctx.socket(zmq.REP) - socket.connect(be_url) - worker = ZeroMQServer(self.app, be_url, socket=socket) - job = threading.Thread(target=worker.serve_forever) - job.daemon = True - return worker, job - - def serve_forever(self): - """Runs the ZeroMQ server.""" - - for job in self.background_jobs: - job.start() - - zmq.device(zmq.QUEUE, self.frontend, self.backend) - - # We never get here... - self.frontend.close() - self.backend.close() diff --git a/libs_crutch/contrib/spyne/service.py b/libs_crutch/contrib/spyne/service.py deleted file mode 100644 index 5b02c3a..0000000 --- a/libs_crutch/contrib/spyne/service.py +++ /dev/null @@ -1,251 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -""" -This module contains the :class:`Service` class and its helper objects. -""" - -import logging -logger = logging.getLogger(__name__) - -from spyne.util.six.moves.collections_abc import Sequence - -from spyne.evmgr import EventManager -from spyne.util import six -from spyne.util.oset import oset - - -class ServiceBaseMeta(type): - """Adds event managers.""" - - def __init__(self, cls_name, cls_bases, cls_dict): - super(ServiceBaseMeta, self).__init__(cls_name, cls_bases, cls_dict) - - self.public_methods = {} - self.event_manager = EventManager(self, - self.__get_base_event_handlers(cls_bases)) - - def __get_base_event_handlers(self, cls_bases): - handlers = {} - - for base in cls_bases: - evmgr = getattr(base, 'event_manager', None) - if evmgr is None: - continue - - for k, v in evmgr.handlers.items(): - handler = handlers.get(k, oset()) - for h in v: - handler.add(h) - handlers[k] = handler - - return handlers - -class ServiceMeta(ServiceBaseMeta): - """Creates the :class:`spyne.MethodDescriptor` objects by iterating over - tagged methods. - """ - - def __init__(self, cls_name, cls_bases, cls_dict): - super(ServiceMeta, self).__init__(cls_name, cls_bases, cls_dict) - - self.__has_aux_methods = self.__aux__ is not None - has_nonaux_methods = None - - for k, v in cls_dict.items(): - if not hasattr(v, '_is_rpc'): - continue - - descriptor = v(_default_function_name=k, _service_class=self) - - # these two lines are needed for staticmethod wrapping to work - setattr(self, k, staticmethod(descriptor.function)) - descriptor.reset_function(getattr(self, k)) - - try: - getattr(self, k).descriptor = descriptor - except AttributeError: - pass - # FIXME: this fails with builtins. Temporary hack while we - # investigate whether we really need this or not - - self.public_methods[k] = descriptor - if descriptor.aux is None and self.__aux__ is None: - has_nonaux_methods = True - else: - self.__has_aux_methods = True - - if self.__has_aux_methods and has_nonaux_methods: - raise Exception("You can't mix primary and " - "auxiliary methods in a single service definition.") - - def is_auxiliary(self): - return self.__has_aux_methods - - -# FIXME: To be renamed to ServiceBase in Spyne 3 -@six.add_metaclass(ServiceBaseMeta) -class ServiceBaseBase(object): - __in_header__ = None - """The incoming header object that the methods under this service definition - accept.""" - - __out_header__ = None - """The outgoing header object that the methods under this service definition - accept.""" - - __service_name__ = None - """The name of this service definition as exposed in the interface document. - Defaults to the class name.""" - - __service_module__ = None - """This is used for internal idenfitication of the service class, - to override the ``__module__`` attribute.""" - - __port_types__ = () - """WSDL-Specific portType mappings""" - - __aux__ = None - """The auxiliary method type. When set, the ``aux`` property of every method - defined under this service is set to this value. The _aux flag in the @srpc - decorator overrides this.""" - - @classmethod - def get_service_class_name(cls): - return cls.__name__ - - @classmethod - def get_service_name(cls): - if cls.__service_name__ is None: - return cls.__name__ - else: - return cls.__service_name__ - - @classmethod - def get_service_module(cls): - if cls.__service_module__ is None: - return cls.__module__ - else: - return cls.__service_module__ - - @classmethod - def get_internal_key(cls): - return "%s.%s" % (cls.get_service_module(), cls.get_service_name()) - - @classmethod - def get_port_types(cls): - return cls.__port_types__ - - @classmethod - def _has_callbacks(cls): - """Determines if this service definition has callback methods or not.""" - - for method in cls.public_methods.values(): - if method.is_callback: - return True - - return False - - @classmethod - def get_context(cls): - """Returns a user defined context. Override this in your ServiceBase - subclass to customize context generation.""" - return None - - @classmethod - def call_wrapper(cls, ctx, args=None): - """Called in place of the original method call. You can override this to - do your own exception handling. - - :param ctx: The method context. - - The overriding function must call this function by convention. - """ - - if ctx.function is not None: - if args is None: - args = ctx.in_object - - assert not isinstance(args, six.string_types) - - # python3 wants a proper sequence as *args - if not isinstance(args, Sequence): - args = tuple(args) - - if not ctx.descriptor.no_ctx: - args = (ctx,) + tuple(args) - - return ctx.function(*args) - - @classmethod - def initialize(cls, app): - pass - - -@six.add_metaclass(ServiceMeta) -class Service(ServiceBaseBase): - """The ``Service`` class is the base class for all service definitions. - - The convention is to have public methods defined under a subclass of this - class along with common properties of public methods like header classes or - auxiliary processors. The :func:`spyne.decorator.srpc` decorator or its - wrappers should be used to flag public methods. - - This class is designed to be subclassed just once. You're supposed to - combine Service subclasses in order to get the public method mix you - want. - - It is a natural abstract base class, because it's of no use without any - method definitions, hence the 'Base' suffix in the name. - - This class supports the following events: - * ``method_call`` - Called right before the service method is executed - - * ``method_return_object`` - Called right after the service method is executed - - * ``method_exception_object`` - Called when an exception occurred in a service method, before the - exception is serialized. - - * ``method_accept_document`` - Called by the transport right after the incoming stream is parsed to - the incoming protocol's document type. - - * ``method_return_document`` - Called by the transport right after the outgoing object is - serialized to the outgoing protocol's document type. - - * ``method_exception_document`` - Called by the transport right before the outgoing exception object - is serialized to the outgoing protocol's document type. - - * ``method_return_string`` - Called by the transport right before passing the return string to - the client. - - * ``method_exception_string`` - Called by the transport right before passing the exception string to - the client. - """ - - -# FIXME: To be deleted in Spyne 3 -ServiceBase = Service diff --git a/libs_crutch/contrib/spyne/store/__init__.py b/libs_crutch/contrib/spyne/store/__init__.py deleted file mode 100644 index c597ee3..0000000 --- a/libs_crutch/contrib/spyne/store/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""This is the spyne storage package.""" diff --git a/libs_crutch/contrib/spyne/store/relational/__init__.py b/libs_crutch/contrib/spyne/store/relational/__init__.py deleted file mode 100644 index 642d3f3..0000000 --- a/libs_crutch/contrib/spyne/store/relational/__init__.py +++ /dev/null @@ -1,35 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""A Postgresql serializer for Spyne objects. - -Uses SQLAlchemy for mapping objects to relations. -""" - -from spyne.store.relational._base import add_column -from spyne.store.relational._base import gen_sqla_info -from spyne.store.relational._base import gen_spyne_info -from spyne.store.relational._base import get_pk_columns - -from spyne.store.relational.document import PGXml, PGObjectXml, PGHtml, \ - PGJson, PGJsonB, PGObjectJson, PGFileJson -from spyne.store.relational.simple import PGLTree, PGLQuery, PGLTxtQuery -from spyne.store.relational.spatial import PGGeometry - -from spyne.store.relational import override diff --git a/libs_crutch/contrib/spyne/store/relational/_base.py b/libs_crutch/contrib/spyne/store/relational/_base.py deleted file mode 100644 index 8c2178f..0000000 --- a/libs_crutch/contrib/spyne/store/relational/_base.py +++ /dev/null @@ -1,1203 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -from __future__ import absolute_import, print_function - -import logging -logger = logging.getLogger(__name__) - -import sqlalchemy - -try: - import simplejson as json -except ImportError: - import json - -from os.path import isabs -from inspect import isclass - -from sqlalchemy import event -from sqlalchemy.schema import Column -from sqlalchemy.schema import Index -from sqlalchemy.schema import Table -from sqlalchemy.schema import ForeignKey -from sqlalchemy.orm import _mapper_registry - -from sqlalchemy.dialects.postgresql import FLOAT -from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION -from sqlalchemy.dialects.postgresql.base import PGUuid, PGInet - -from sqlalchemy.orm import relationship -from sqlalchemy.orm import mapper -from sqlalchemy.ext.associationproxy import association_proxy - -from spyne.store.relational.simple import PGLTree -from spyne.store.relational.document import PGXml, PGObjectXml, PGObjectJson, \ - PGFileJson, PGJsonB, PGHtml, PGJson -from spyne.store.relational.spatial import PGGeometry - -# internal types -from spyne.model.enum import EnumBase -from spyne.model.complex import XmlModifier - -# Config types -from spyne.model import xml as c_xml -from spyne.model import json as c_json -from spyne.model import jsonb as c_jsonb -from spyne.model import table as c_table -from spyne.model import msgpack as c_msgpack -from spyne.model.binary import HybridFileStore - -# public types -from spyne.model import SimpleModel, Enum, Array, ComplexModelBase, \ - Any, AnyDict, AnyXml, AnyHtml, \ - Date, Time, DateTime, Duration, \ - ByteArray, String, Unicode, Uuid, Boolean, \ - Point, Line, Polygon, MultiPoint, MultiLine, MultiPolygon, \ - Float, Double, Decimal, \ - Integer, Integer8, Integer16, Integer32, Integer64, \ - UnsignedInteger, UnsignedInteger8, UnsignedInteger16, UnsignedInteger32, \ - UnsignedInteger64, \ - Ipv6Address, Ipv4Address, IpAddress, \ - File, Ltree - -from spyne.util import sanitize_args - - -# Inheritance type constants. -class _SINGLE: - pass - -class _JOINED: - pass - - -_sq2sp_type_map = { - # we map float => double because sqla doesn't - # distinguish between floats and doubles. - sqlalchemy.Float: Double, - sqlalchemy.FLOAT: Double, - - sqlalchemy.Numeric: Decimal, - sqlalchemy.NUMERIC: Decimal, - - sqlalchemy.BigInteger: Integer64, - sqlalchemy.BIGINT: Integer64, - - sqlalchemy.Integer: Integer32, - sqlalchemy.INTEGER: Integer32, - - sqlalchemy.SmallInteger: Integer16, - sqlalchemy.SMALLINT: Integer16, - - sqlalchemy.Binary: ByteArray, - sqlalchemy.LargeBinary: ByteArray, - - sqlalchemy.Boolean: Boolean, - sqlalchemy.BOOLEAN: Boolean, - - sqlalchemy.DateTime: DateTime, - sqlalchemy.TIMESTAMP: DateTime, - sqlalchemy.dialects.postgresql.base.TIMESTAMP: DateTime, - sqlalchemy.DATETIME: DateTime, - sqlalchemy.dialects.postgresql.base.INTERVAL: Duration, - - sqlalchemy.Date: Date, - sqlalchemy.DATE: Date, - - sqlalchemy.Time: Time, - sqlalchemy.TIME: Time, - - PGUuid: Uuid, - PGLTree: Ltree, - PGInet: IpAddress, -} - - -# this needs to be called whenever a new column is instantiated. -def _sp_attrs_to_sqla_constraints(cls, subcls, col_kwargs=None, col=None): - # cls is the parent class of v - if subcls.Attributes.nullable == False and cls.__extends__ is None: - if col is None: - col_kwargs['nullable'] = False - else: - col.nullable = False - - if subcls.Attributes.db_default is not None: - if col is None: - col_kwargs['default'] = subcls.Attributes.db_default - else: - col.default = subcls.Attributes.db_default - - -def _get_sqlalchemy_type(cls): - db_type = cls.Attributes.db_type - if db_type is not None: - return db_type - - # must be above Unicode, because Ltree is Unicode's subclass - if issubclass(cls, Ltree): - return PGLTree - - # must be above Unicode, because Ip*Address is Unicode's subclass - if issubclass(cls, (IpAddress, Ipv4Address, Ipv6Address)): - return PGInet - - # must be above Unicode, because Uuid is Unicode's subclass - if issubclass(cls, Uuid): - return PGUuid(as_uuid=True) - - # must be above Unicode, because Point is Unicode's subclass - if issubclass(cls, Point): - return PGGeometry("POINT", dimension=cls.Attributes.dim) - - # must be above Unicode, because Line is Unicode's subclass - if issubclass(cls, Line): - return PGGeometry("LINESTRING", dimension=cls.Attributes.dim) - - # must be above Unicode, because Polygon is Unicode's subclass - if issubclass(cls, Polygon): - return PGGeometry("POLYGON", dimension=cls.Attributes.dim) - - # must be above Unicode, because MultiPoint is Unicode's subclass - if issubclass(cls, MultiPoint): - return PGGeometry("MULTIPOINT", dimension=cls.Attributes.dim) - - # must be above Unicode, because MultiLine is Unicode's subclass - if issubclass(cls, MultiLine): - return PGGeometry("MULTILINESTRING", dimension=cls.Attributes.dim) - - # must be above Unicode, because MultiPolygon is Unicode's subclass - if issubclass(cls, MultiPolygon): - return PGGeometry("MULTIPOLYGON", dimension=cls.Attributes.dim) - - # must be above Unicode, because String is Unicode's subclass - if issubclass(cls, String): - if cls.Attributes.max_len == String.Attributes.max_len: # Default is arbitrary-length - return sqlalchemy.Text - else: - return sqlalchemy.String(cls.Attributes.max_len) - - if issubclass(cls, Unicode): - if cls.Attributes.max_len == Unicode.Attributes.max_len: # Default is arbitrary-length - return sqlalchemy.UnicodeText - else: - return sqlalchemy.Unicode(cls.Attributes.max_len) - - if issubclass(cls, EnumBase): - return sqlalchemy.Enum(*cls.__values__, name=cls.__type_name__) - - if issubclass(cls, AnyXml): - return PGXml - - if issubclass(cls, AnyHtml): - return PGHtml - - if issubclass(cls, (Any, AnyDict)): - sa = cls.Attributes.store_as - if sa is None: - return None - if isinstance(sa, c_json): - return PGJson - if isinstance(sa, c_jsonb): - return PGJsonB - raise NotImplementedError(dict(cls=cls, store_as=sa)) - - if issubclass(cls, ByteArray): - return sqlalchemy.LargeBinary - - if issubclass(cls, (Integer64, UnsignedInteger64)): - return sqlalchemy.BigInteger - - if issubclass(cls, (Integer32, UnsignedInteger32)): - return sqlalchemy.Integer - - if issubclass(cls, (Integer16, UnsignedInteger16)): - return sqlalchemy.SmallInteger - - if issubclass(cls, (Integer8, UnsignedInteger8)): - return sqlalchemy.SmallInteger - - if issubclass(cls, Float): - return FLOAT - - if issubclass(cls, Double): - return DOUBLE_PRECISION - - if issubclass(cls, (Integer, UnsignedInteger)): - return sqlalchemy.DECIMAL - - if issubclass(cls, Decimal): - return sqlalchemy.DECIMAL - - if issubclass(cls, Boolean): - if cls.Attributes.store_as is bool: - return sqlalchemy.Boolean - if cls.Attributes.store_as is int: - return sqlalchemy.SmallInteger - - raise ValueError("Boolean.store_as has invalid value %r" % - cls.Attributes.store_as) - - if issubclass(cls, Date): - return sqlalchemy.Date - - if issubclass(cls, DateTime): - if cls.Attributes.timezone is None: - if cls.Attributes.as_timezone is None: - return sqlalchemy.DateTime(timezone=True) - else: - return sqlalchemy.DateTime(timezone=False) - else: - return sqlalchemy.DateTime(timezone=cls.Attributes.timezone) - - if issubclass(cls, Time): - return sqlalchemy.Time - - if issubclass(cls, Duration): - return sqlalchemy.dialects.postgresql.base.INTERVAL - - if issubclass(cls, XmlModifier): - retval = _get_sqlalchemy_type(cls.type) - return retval - - -def _get_col_o2o(parent, subname, subcls, fk_col_name, deferrable=None, - initially=None, ondelete=None, onupdate=None): - """Gets key and child type and returns a column that points to the primary - key of the child. - """ - - assert subcls.Attributes.table_name is not None, \ - "%r has no table name." % subcls - - col_args, col_kwargs = sanitize_args(subcls.Attributes.sqla_column_args) - _sp_attrs_to_sqla_constraints(parent, subcls, col_kwargs) - - # get pkeys from child class - pk_column, = get_pk_columns(subcls) # FIXME: Support multi-col keys - - pk_key, pk_spyne_type = pk_column - pk_sqla_type = _get_sqlalchemy_type(pk_spyne_type) - - # generate a fk to it from the current object (cls) - if 'name' in col_kwargs: - colname = col_kwargs.pop('name') - else: - colname = subname - - if fk_col_name is None: - fk_col_name = colname + "_" + pk_key - - assert fk_col_name != colname, \ - "The column name for the foreign key must be different from the " \ - "column name for the object itself." - - fk = ForeignKey( - '%s.%s' % (subcls.Attributes.table_name, pk_key), - use_alter=True, - name='%s_%s_fkey' % (subcls.Attributes.table_name, fk_col_name), - deferrable=deferrable, initially=initially, - ondelete=ondelete, onupdate=onupdate, - ) - - return Column(fk_col_name, pk_sqla_type, fk, **col_kwargs) - - -def _get_col_o2m(cls, fk_col_name, deferrable=None, initially=None, - ondelete=None, onupdate=None): - """Gets the parent class and returns a column that points to the primary key - of the parent. - """ - - assert cls.Attributes.table_name is not None, "%r has no table name." % cls - col_args, col_kwargs = sanitize_args(cls.Attributes.sqla_column_args) - - # get pkeys from current class - pk_column, = get_pk_columns(cls) # FIXME: Support multi-col keys - - pk_key, pk_spyne_type = pk_column - pk_sqla_type = _get_sqlalchemy_type(pk_spyne_type) - - # generate a fk from child to the current class - if fk_col_name is None: - fk_col_name = '_'.join([cls.Attributes.table_name, pk_key]) - - # we jump through all these hoops because we must instantiate the Column - # only after we're sure that it doesn't already exist and also because - # tinkering with functors is always fun :) - yield [(fk_col_name, pk_sqla_type)] - - fk = ForeignKey('%s.%s' % (cls.Attributes.table_name, pk_key), - deferrable=deferrable, initially=initially, - ondelete=ondelete, onupdate=onupdate) - col = Column(fk_col_name, pk_sqla_type, fk, **col_kwargs) - - yield col - - -def _get_cols_m2m(cls, k, child, fk_left_col_name, fk_right_col_name, - fk_left_deferrable, fk_left_initially, - fk_right_deferrable, fk_right_initially, - fk_left_ondelete, fk_left_onupdate, - fk_right_ondelete, fk_right_onupdate): - """Gets the parent and child classes and returns foreign keys to both - tables. These columns can be used to create a relation table.""" - - col_info, left_col = _get_col_o2m(cls, fk_left_col_name, - ondelete=fk_left_ondelete, onupdate=fk_left_onupdate, - deferrable=fk_left_deferrable, initially=fk_left_initially) - right_col = _get_col_o2o(cls, k, child, fk_right_col_name, - ondelete=fk_right_ondelete, onupdate=fk_right_onupdate, - deferrable=fk_right_deferrable, initially=fk_right_initially) - left_col.primary_key = right_col.primary_key = True - return left_col, right_col - - -class _FakeTable(object): - def __init__(self, name): - self.name = name - self.c = {} - self.columns = [] - self.indexes = [] - - def append_column(self, col): - self.columns.append(col) - self.c[col.name] = col - - -def _gen_index_info(table, col, k, v): - """ - :param table: sqla table - :param col: sqla col - :param k: field name (not necessarily == k) - :param v: spyne type - """ - - unique = v.Attributes.unique - index = v.Attributes.index - if unique and not index: - index = True - - try: - index_name, index_method = index - - except (TypeError, ValueError): - index_name = "%s_%s%s" % (table.name, k, '_unique' if unique else '') - index_method = index - - if index in (False, None): - return - - if index is True: - index_args = (index_name, col), dict(unique=unique) - else: - index_args = (index_name, col), dict(unique=unique, - postgresql_using=index_method) - - if isinstance(table, _FakeTable): - table.indexes.append(index_args) - - else: - indexes = dict([(idx.name, idx) for idx in col.table.indexes]) - existing_idx = indexes.get(index_name, None) - if existing_idx is None: - Index(*index_args[0], **index_args[1]) - - else: - assert existing_idx.unique == unique, \ - "Uniqueness flag differ between existing and current values. " \ - "Existing: {!r}, New: {!r}".format(existing_idx.unique, unique) - - existing_val = existing_idx.kwargs.get('postgresql_using') - - assert existing_val == index_method, \ - "Indexing methods differ between existing and current index " \ - "directives. Existing: {!r}, New: {!r}".format( - existing_val, index_method) - -def _check_inheritance(cls, cls_bases): - table_name = cls.Attributes.table_name - - inc = [] - inheritance = None - base_class = getattr(cls, '__extends__', None) - - if base_class is None: - for b in cls_bases: - if getattr(b, '_type_info', None) is not None and b.__mixin__: - base_class = b - - if base_class is not None: - base_table_name = base_class.Attributes.table_name - if base_table_name is not None: - if base_table_name == table_name: - inheritance = _SINGLE - else: - inheritance = _JOINED - raise NotImplementedError("Joined table inheritance is not yet " - "implemented.") - - # check whether the base classes are already mapped - base_mapper = None - if base_class is not None: - base_mapper = base_class.Attributes.sqla_mapper - - if base_mapper is None: - for b in cls_bases: - bm = _mapper_registry.get(b, None) - if bm is not None: - assert base_mapper is None, "There can be only one base mapper." - base_mapper = bm - inheritance = _SINGLE - - return inheritance, base_class, base_mapper, inc - - -def _check_table(cls): - table_name = cls.Attributes.table_name - metadata = cls.Attributes.sqla_metadata - - # check whether the object already has a table - table = None - if table_name in metadata.tables: - table = metadata.tables[table_name] - else: - # We need FakeTable because table_args can contain all sorts of stuff - # that can require a fully-constructed table, and we don't have that - # information here yet. - table = _FakeTable(table_name) - - return table - - -def _add_simple_type(cls, props, table, subname, subcls, sqla_type): - col_args, col_kwargs = sanitize_args(subcls.Attributes.sqla_column_args) - _sp_attrs_to_sqla_constraints(cls, subcls, col_kwargs) - - mp = getattr(subcls.Attributes, 'mapper_property', None) - - if 'name' in col_kwargs: - colname = col_kwargs.pop('name') - else: - colname = subname - - if not subcls.Attributes.exc_db: - if colname in table.c: - col = table.c[colname] - - else: - col = Column(colname, sqla_type, *col_args, **col_kwargs) - table.append_column(col) - _gen_index_info(table, col, subname, subcls) - - if not subcls.Attributes.exc_mapper: - props[subname] = col - - elif mp is not None: - props[subname] = mp - - -def _gen_array_m2m(cls, props, subname, arrser, storage): - """Generates a relational many-to-many array. - - :param cls: The class that owns the field - :param props: SQLAlchemy Mapper properties - :param subname: Field name - :param arrser: Array serializer, ie the __orig__ of the class inside the - Array object - :param storage: The storage configuration object passed to the store_as - attribute. - """ - - metadata = cls.Attributes.sqla_metadata - - col_own, col_child = _get_cols_m2m(cls, subname, arrser, - storage.left, storage.right, - storage.fk_left_deferrable, storage.fk_left_initially, - storage.fk_right_deferrable, storage.fk_right_initially, - storage.fk_left_ondelete, storage.fk_left_onupdate, - storage.fk_right_ondelete, storage.fk_right_onupdate) - - storage.left = col_own.key - storage.right = col_child.key - - # noinspection PySimplifyBooleanCheck because literal True means - # "generate table name automatically" here - if storage.multi is True: - rel_table_name = '_'.join([cls.Attributes.table_name, subname]) - else: - rel_table_name = storage.multi - - if rel_table_name in metadata.tables: - rel_t = metadata.tables[rel_table_name] - - col_own_existing = rel_t.c.get(col_own.key, None) - assert col_own_existing is not None - if col_own_existing is not None: - assert col_own.type.__class__ == col_own_existing.type.__class__ - - col_child_existing = rel_t.c.get(col_child.key, None) - if col_child_existing is None: - rel_t.append_column(col_child) - - else: - assert col_child.type.__class__ == col_child_existing.type.__class__ - - else: - rel_t = Table(rel_table_name, metadata, *(col_own, col_child)) - - own_t = cls.Attributes.sqla_table - - rel_kwargs = dict( - lazy=storage.lazy, - backref=storage.backref, - cascade=storage.cascade, - order_by=storage.order_by, - back_populates=storage.back_populates, - ) - - if storage.explicit_join: - # Specify primaryjoin and secondaryjoin when requested. - # There are special cases when sqlalchemy can't figure it out by itself. - # this is where we help it when we can. - # e.g.: http://sqlalchemy.readthedocs.org/en/rel_1_0/orm/join_conditions.html#self-referential-many-to-many-relationship - - assert own_t is not None and len(get_pk_columns(cls)) > 0 - - # FIXME: support more than one pk - (col_pk_key, _), = get_pk_columns(cls) - col_pk = own_t.c[col_pk_key] - - rel_kwargs.update(dict( - secondary=rel_t, - primaryjoin=(col_pk == rel_t.c[col_own.key]), - secondaryjoin=(col_pk == rel_t.c[col_child.key]), - )) - - if storage.single_parent is not None: - rel_kwargs['single_parent'] = storage.single_parent - - props[subname] = relationship(arrser, **rel_kwargs) - - else: - rel_kwargs.update(dict( - secondary=rel_t, - )) - - if storage.single_parent is not None: - rel_kwargs['single_parent'] = storage.single_parent - - props[subname] = relationship(arrser, **rel_kwargs) - - -def _gen_array_simple(cls, props, subname, arrser_cust, storage): - """Generate an array of simple objects. - - :param cls: The class that owns this field - :param props: SQLAlchemy Mapper properties - :param subname: Field name - :param arrser_cust: Array serializer, ie the class itself inside the Array - object - :param storage: The storage configuration object passed to the store_as - """ - - table_name = cls.Attributes.table_name - metadata = cls.Attributes.sqla_metadata - - # get left (fk) column info - _gen_col = _get_col_o2m(cls, storage.left, - ondelete=storage.fk_left_ondelete, onupdate=storage.fk_left_onupdate, - deferrable=storage.fk_left_deferrable, - initially=storage.fk_left_initially) - - col_info = next(_gen_col) # gets the column name - # FIXME: Add support for multi-column primary keys. - storage.left, child_left_col_type = col_info[0] - child_left_col_name = storage.left - - # get right(data) column info - child_right_col_type = _get_sqlalchemy_type(arrser_cust) - child_right_col_name = storage.right # this is the data column - if child_right_col_name is None: - child_right_col_name = subname - - # get table name - child_table_name = arrser_cust.Attributes.table_name - if child_table_name is None: - child_table_name = '_'.join([table_name, subname]) - - if child_table_name in metadata.tables: - child_t = metadata.tables[child_table_name] - - # if we have the table, make sure have the right column (data column) - assert child_right_col_type.__class__ is \ - child_t.c[child_right_col_name].type.__class__, "%s.%s: %r != %r" % \ - (cls, child_right_col_name, child_right_col_type.__class__, - child_t.c[child_right_col_name].type.__class__) - - if child_left_col_name in child_t.c: - assert child_left_col_type is \ - child_t.c[child_left_col_name].type.__class__, "%r != %r" % \ - (child_left_col_type, - child_t.c[child_left_col_name].type.__class__) - else: - # Table exists but our own foreign key doesn't. - child_left_col = next(_gen_col) - _sp_attrs_to_sqla_constraints(cls, arrser_cust, col=child_left_col) - child_t.append_column(child_left_col) - - else: - # table does not exist, generate table - child_right_col = Column(child_right_col_name, child_right_col_type) - _sp_attrs_to_sqla_constraints(cls, arrser_cust, col=child_right_col) - - child_left_col = next(_gen_col) - _sp_attrs_to_sqla_constraints(cls, arrser_cust, col=child_left_col) - - child_t = Table(child_table_name , metadata, - Column('id', sqlalchemy.Integer, primary_key=True), - child_left_col, - child_right_col, - ) - _gen_index_info(child_t, child_right_col, child_right_col_name, - arrser_cust) - - # generate temporary class for association proxy - cls_name = ''.join(x.capitalize() or '_' for x in - child_table_name.split('_')) - # generates camelcase class name. - - def _i(self, *args): - setattr(self, child_right_col_name, args[0]) - - cls_ = type("_" + cls_name, (object,), {'__init__': _i}) - mapper(cls_, child_t) - props["_" + subname] = relationship(cls_) - - # generate association proxy - setattr(cls, subname, - association_proxy("_" + subname, child_right_col_name)) - - -def _gen_array_o2m(cls, props, subname, arrser, arrser_cust, storage): - _gen_col = _get_col_o2m(cls, storage.right, - ondelete=storage.fk_right_ondelete, onupdate=storage.fk_right_onupdate, - deferrable=storage.fk_right_deferrable, - initially=storage.fk_right_initially) - - col_info = next(_gen_col) # gets the column name - storage.right, col_type = col_info[0] # FIXME: Add support for multi-column primary keys. - - assert storage.left is None, \ - "'left' is ignored in one-to-many relationships " \ - "with complex types (because they already have a " \ - "table). You probably meant to use 'right'." - - child_t = arrser.__table__ - - if storage.right in child_t.c: - # TODO: This branch MUST be tested. - new_col_type = child_t.c[storage.right].type.__class__ - assert col_type is child_t.c[storage.right].type.__class__, \ - "Existing column type %r disagrees with new column type %r" % \ - (col_type, new_col_type) - - # if the column is already there, the decision about whether - # it should be in child's mapper or not should also have been - # made. - # - # so, not adding the child column to to child mapper - # here. - col = child_t.c[storage.right] - - else: - col = next(_gen_col) - - _sp_attrs_to_sqla_constraints(cls, arrser_cust, col=col) - - child_t.append_column(col) - arrser.__mapper__.add_property(col.name, col) - - - rel_kwargs = dict( - lazy=storage.lazy, - backref=storage.backref, - cascade=storage.cascade, - order_by=storage.order_by, - foreign_keys=[col], - back_populates=storage.back_populates, - ) - - if storage.single_parent is not None: - rel_kwargs['single_parent'] = storage.single_parent - - props[subname] = relationship(arrser, **rel_kwargs) - - -def _is_array(v): - return v.Attributes.max_occurs > 1 or issubclass(v, Array) - - -def _add_array_to_complex(cls, props, subname, subcls, storage): - arrser_cust = subcls - if issubclass(subcls, Array): - arrser_cust, = subcls._type_info.values() - - arrser = arrser_cust - if arrser_cust.__orig__ is not None: - arrser = arrser_cust.__orig__ - - if storage.multi != False: # many to many - _gen_array_m2m(cls, props, subname, arrser, storage) - - elif issubclass(arrser, SimpleModel): # one to many simple type - _gen_array_simple(cls, props, subname, arrser_cust, storage) - - else: # one to many complex type - _gen_array_o2m(cls, props, subname, arrser, arrser_cust, storage) - - -def _add_simple_type_to_complex(cls, props, table, subname, subcls, storage, - col_kwargs): - # v has the Attribute values we need whereas real_v is what the - # user instantiates (thus what sqlalchemy needs) - if subcls.__orig__ is None: # vanilla class - real_v = subcls - else: # customized class - real_v = subcls.__orig__ - - assert not getattr(storage, 'multi', False), \ - 'Storing a single element-type using a relation table is pointless.' - - assert storage.right is None, \ - "'right' is ignored in a one-to-one relationship" - - col = _get_col_o2o(cls, subname, subcls, storage.left, - ondelete=storage.fk_left_ondelete, onupdate=storage.fk_left_onupdate, - deferrable=storage.fk_left_deferrable, - initially=storage.fk_left_initially) - - storage.left = col.name - - if col.name in table.c: - col = table.c[col.name] - if col_kwargs.get('nullable') is False: - col.nullable = False - else: - table.append_column(col) - - rel_kwargs = dict( - lazy=storage.lazy, - backref=storage.backref, - order_by=storage.order_by, - back_populates=storage.back_populates, - ) - - if storage.single_parent is not None: - rel_kwargs['single_parent'] = storage.single_parent - - if real_v is (cls.__orig__ or cls): - (pk_col_name, pk_col_type), = get_pk_columns(cls) - rel_kwargs['remote_side'] = [table.c[pk_col_name]] - - rel = relationship(real_v, uselist=False, foreign_keys=[col], - **rel_kwargs) - - _gen_index_info(table, col, subname, subcls) - - props[subname] = rel - props[col.name] = col - - -def _add_complex_type_as_table(cls, props, table, subname, subcls, storage, - col_args, col_kwargs): - # add one to many relation - if _is_array(subcls): - _add_array_to_complex(cls, props, subname, subcls, storage) - - # add one to one relation - else: - _add_simple_type_to_complex(cls, props, table, subname, subcls, - storage, col_kwargs) - - -def _add_complex_type_as_xml(cls, props, table, subname, subcls, storage, - col_args, col_kwargs): - if 'name' in col_kwargs: - colname = col_kwargs.pop('name') - else: - colname = subname - - if colname in table.c: - col = table.c[colname] - else: - t = PGObjectXml(subcls, storage.root_tag, storage.no_ns, - storage.pretty_print) - col = Column(colname, t, **col_kwargs) - - props[subname] = col - if not subname in table.c: - table.append_column(col) - - -def _add_complex_type_as_json(cls, props, table, subname, subcls, storage, - col_args, col_kwargs, dbt): - if 'name' in col_kwargs: - colname = col_kwargs.pop('name') - else: - colname = subname - - if colname in table.c: - col = table.c[colname] - - else: - t = PGObjectJson(subcls, ignore_wrappers=storage.ignore_wrappers, - complex_as=storage.complex_as, dbt=dbt) - col = Column(colname, t, **col_kwargs) - - props[subname] = col - if not subname in table.c: - table.append_column(col) - - -def _add_complex_type(cls, props, table, subname, subcls): - if issubclass(subcls, File): - return _add_file_type(cls, props, table, subname, subcls) - - storage = getattr(subcls.Attributes, 'store_as', None) - col_args, col_kwargs = sanitize_args(subcls.Attributes.sqla_column_args) - _sp_attrs_to_sqla_constraints(cls, subcls, col_kwargs) - - if isinstance(storage, c_table): - return _add_complex_type_as_table(cls, props, table, subname, subcls, - storage, col_args, col_kwargs) - if isinstance(storage, c_xml): - return _add_complex_type_as_xml(cls, props, table, subname, subcls, - storage, col_args, col_kwargs) - if isinstance(storage, c_json): - return _add_complex_type_as_json(cls, props, table, subname, subcls, - storage, col_args, col_kwargs, 'json') - if isinstance(storage, c_jsonb): - return _add_complex_type_as_json(cls, props, table, subname, subcls, - storage, col_args, col_kwargs, 'jsonb') - if isinstance(storage, c_msgpack): - raise NotImplementedError(c_msgpack) - - if storage is None: - return - - raise ValueError(storage) - - -def _convert_fake_table(cls, table): - metadata = cls.Attributes.sqla_metadata - table_name = cls.Attributes.table_name - - _table = table - table_args, table_kwargs = sanitize_args(cls.Attributes.sqla_table_args) - table = Table(table_name, metadata, - *(tuple(table.columns) + table_args), **table_kwargs) - - for index_args, index_kwargs in _table.indexes: - Index(*index_args, **index_kwargs) - - return table - - -def _gen_mapper(cls, props, table, cls_bases): - """Generate SQLAlchemy mapper from Spyne definition data. - - :param cls: La Class. - :param props: Dict of properties for SQLAlchemt'y Mapper call. - :param table: A Table instance. Not a `_FakeTable` or anything. - :param cls_bases: Sequence of class bases. - """ - - inheritance, base_class, base_mapper, inc = _check_inheritance(cls, cls_bases) - mapper_args, mapper_kwargs = sanitize_args(cls.Attributes.sqla_mapper_args) - - _props = mapper_kwargs.get('properties', None) - if _props is None: - mapper_kwargs['properties'] = props - else: - props.update(_props) - mapper_kwargs['properties'] = props - - po = mapper_kwargs.get('polymorphic_on', None) - if po is not None: - if not isinstance(po, Column): - mapper_kwargs['polymorphic_on'] = table.c[po] - else: - logger.warning("Deleted invalid 'polymorphic_on' value %r for %r.", - po, cls) - del mapper_kwargs['polymorphic_on'] - - if base_mapper is not None: - mapper_kwargs['inherits'] = base_mapper - - if inheritance is not _SINGLE: - mapper_args = (table,) + mapper_args - - cls_mapper = mapper(cls, *mapper_args, **mapper_kwargs) - - def on_load(target, context): - d = target.__dict__ - - for k, v in cls.get_flat_type_info(cls).items(): - if not k in d: - if isclass(v) and issubclass(v, ComplexModelBase): - pass - else: - d[k] = None - - event.listen(cls, 'load', on_load) - - return cls_mapper - - -def _add_file_type(cls, props, table, subname, subcls): - storage = getattr(subcls.Attributes, 'store_as', None) - col_args, col_kwargs = sanitize_args(subcls.Attributes.sqla_column_args) - _sp_attrs_to_sqla_constraints(cls, subcls, col_kwargs) - - if isinstance(storage, HybridFileStore): - if subname in table.c: - col = table.c[subname] - - else: - assert isabs(storage.store) - #FIXME: Add support for storage markers from spyne.model.complex - if storage.db_format == 'json': - t = PGFileJson(storage.store, storage.type) - - elif storage.db_format == 'jsonb': - t = PGFileJson(storage.store, storage.type, dbt='jsonb') - - else: - raise NotImplementedError(storage.db_format) - - col = Column(subname, t, **col_kwargs) - - props[subname] = col - if not subname in table.c: - table.append_column(col) - - else: - raise NotImplementedError(storage) - - -def add_column(cls, subname, subcls): - """Add field to the given Spyne object also mapped as a SQLAlchemy object - to a SQLAlchemy table - - :param cls: The class to add the column to. - :param subname: The column name - :param subcls: The column type, a ModelBase subclass. - """ - - table = cls.__table__ - mapper_props = {} - - # Add to table - sqla_type = _get_sqlalchemy_type(subcls) - if sqla_type is None: # complex model - _add_complex_type(cls, mapper_props, table, subname, subcls) - else: - _add_simple_type(cls, mapper_props, table, subname, subcls, sqla_type) - - # Add to mapper - sqla_mapper = cls.Attributes.sqla_mapper - for subname, subcls in mapper_props.items(): - if not sqla_mapper.has_property(subname): - sqla_mapper.add_property(subname, subcls) - - -def _parent_mapper_has_property(cls, cls_bases, k): - if len(cls_bases) == 0 and cls.__orig__ is cls_bases[0]: - return False - - for b in cls_bases: - if not hasattr(b, 'Attributes'): - continue - - mapper = b.Attributes.sqla_mapper - if mapper is not None and mapper.has_property(k): - # print(" Skipping mapping field", "%s.%s" % (cls.__name__, k), - # "because parent mapper from", b.__name__, "already has it") - return True - - # print("NOT skipping mapping field", "%s.%s" % (cls.__name__, k)) - return False - - -def gen_sqla_info(cls, cls_bases=()): - """Return SQLAlchemy table object corresponding to the passed Spyne object. - Also maps given class to the returned table. - """ - - table = _check_table(cls) - mapper_props = {} - - ancestors = cls.ancestors() - if len(ancestors) > 0: - anc_mapper = ancestors[0].Attributes.sqla_mapper - if anc_mapper is None: - # no mapper in parent, use all fields - fields = cls.get_flat_type_info(cls).items() - - elif anc_mapper.concrete: - # there is mapper in parent and it's concrete, so use all fields - fields = cls.get_flat_type_info(cls).items() - - else: - # there is a mapper in parent and it's not concrete, so parent - # columns are already mapped, so use only own fields. - fields = cls._type_info.items() - - else: - # when no parents, use all fields anyway. - assert set(cls._type_info.items()) == \ - set(cls.get_flat_type_info(cls).items()) - - fields = cls.get_flat_type_info(cls).items() - - for k, v in fields: - if _parent_mapper_has_property(cls, cls_bases, k): - continue - - t = _get_sqlalchemy_type(v) - - if t is None: # complex model - p = getattr(v.Attributes, 'store_as', None) - if p is None: - logger.debug("Skipping %s.%s.%s: %r, store_as: %r" % ( - cls.get_namespace(), - cls.get_type_name(), k, v, p)) - else: - _add_complex_type(cls, mapper_props, table, k, v) - else: - _add_simple_type(cls, mapper_props, table, k, v, t) - - if isinstance(table, _FakeTable): - table = _convert_fake_table(cls, table) - - cls_mapper = _gen_mapper(cls, mapper_props, table, cls_bases) - - cls.__tablename__ = cls.Attributes.table_name - cls.Attributes.sqla_mapper = cls.__mapper__ = cls_mapper - cls.Attributes.sqla_table = cls.__table__ = table - - return table - - -def _get_spyne_type(v): - """Map sqlalchemy types to spyne types.""" - - cust = {} - if v.primary_key: - cust['primary_key'] = True - - if not v.nullable: - cust['nullable'] = False - cust['min_occurs'] = 1 - - if isinstance(v.type, sqlalchemy.Enum): - if v.type.convert_unicode: - return Unicode(values=v.type.enums, **cust) - else: - cust['type_name'] = v.type.name - return Enum(*v.type.enums, **cust) - - if isinstance(v.type, (sqlalchemy.UnicodeText, sqlalchemy.Text)): - return Unicode(**cust) - - if isinstance(v.type, (sqlalchemy.Unicode, sqlalchemy.String, - sqlalchemy.VARCHAR)): - return Unicode(v.type.length, **cust) - - if isinstance(v.type, sqlalchemy.Numeric): - return Decimal(v.type.precision, v.type.scale, **cust) - - if isinstance(v.type, PGXml): - if len(cust) > 0: - return AnyXml(**cust) - else: - return AnyXml - - if isinstance(v.type, PGHtml): - if len(cust) > 0: - return AnyHtml(**cust) - else: - return AnyHtml - - if type(v.type) in _sq2sp_type_map: - retval = _sq2sp_type_map[type(v.type)] - if len(cust) > 0: - return retval.customize(**cust) - else: - return retval - - if isinstance(v.type, (PGObjectJson, PGObjectXml)): - retval = v.type.cls - if len(cust) > 0: - return retval.customize(**cust) - else: - return retval - - if isinstance(v.type, PGFileJson): - retval = v.FileData - if len(cust) > 0: - return v.FileData.customize(**cust) - else: - return retval - - raise Exception("Spyne type was not found. Probably _sq2sp_type_map " - "needs a new entry. %r" % v) - - -def gen_spyne_info(cls): - table = cls.Attributes.sqla_table - _type_info = cls._type_info - mapper_args, mapper_kwargs = sanitize_args(cls.Attributes.sqla_mapper_args) - - if len(_type_info) == 0: - for c in table.c: - _type_info[c.name] = _get_spyne_type(c) - else: - mapper_kwargs['include_properties'] = _type_info.keys() - - # Map the table to the object - cls_mapper = mapper(cls, table, *mapper_args, **mapper_kwargs) - - cls.Attributes.table_name = cls.__tablename__ = table.name - cls.Attributes.sqla_mapper = cls.__mapper__ = cls_mapper - - -def get_pk_columns(cls): - """Return primary key fields of a Spyne object.""" - - retval = [] - for k, v in cls.get_flat_type_info(cls).items(): - if v.Attributes.sqla_column_args is not None and \ - v.Attributes.sqla_column_args[-1].get('primary_key', False): - retval.append((k, v)) - - return tuple(retval) if len(retval) > 0 else None diff --git a/libs_crutch/contrib/spyne/store/relational/document.py b/libs_crutch/contrib/spyne/store/relational/document.py deleted file mode 100644 index 0383f7a..0000000 --- a/libs_crutch/contrib/spyne/store/relational/document.py +++ /dev/null @@ -1,353 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logger = logging.getLogger(__name__) - -import os -import json -import shutil - -import sqlalchemy.dialects - -from uuid import uuid1 -from mmap import mmap, ACCESS_READ -from contextlib import closing -from os.path import join, abspath, dirname, basename, isfile - -try: - from lxml import etree - from lxml import html - from spyne.util.xml import get_object_as_xml, get_xml_as_object - -except ImportError as _import_error: - etree = None - html = None - - _local_import_error = _import_error - def get_object_as_xml(*_, **__): - raise _local_import_error - def get_xml_as_object(*_, **__): - raise _local_import_error - -from sqlalchemy.sql.type_api import UserDefinedType - -from spyne import ValidationError -from spyne.model.relational import FileData - -from spyne.util import six -from spyne.util.six import binary_type, text_type, BytesIO, StringIO -from spyne.util.fileproxy import SeekableFileProxy - - -class PGXml(UserDefinedType): - def __init__(self, pretty_print=False, xml_declaration=False, - encoding='UTF-8'): - super(PGXml, self).__init__() - self.xml_declaration = xml_declaration - self.pretty_print = pretty_print - self.encoding = encoding - - def get_col_spec(self, **_): - return "xml" - - def bind_processor(self, dialect): - def process(value): - if value is None or \ - isinstance(value, (six.text_type, six.binary_type)): - return value - - if six.PY2: - return etree.tostring(value, pretty_print=self.pretty_print, - encoding=self.encoding, xml_declaration=False) - - return etree.tostring(value, pretty_print=self.pretty_print, - encoding="unicode", xml_declaration=False) - - return process - - def result_processor(self, dialect, col_type): - def process(value): - if value is not None: - return etree.fromstring(value) - else: - return value - return process - -sqlalchemy.dialects.postgresql.base.ischema_names['xml'] = PGXml - - -class PGHtml(UserDefinedType): - def __init__(self, pretty_print=False, encoding='UTF-8'): - super(PGHtml, self).__init__() - - self.pretty_print = pretty_print - self.encoding = encoding - - def get_col_spec(self, **_): - return "text" - - def bind_processor(self, dialect): - def process(value): - if isinstance(value, (six.text_type, six.binary_type)) \ - or value is None: - return value - else: - return html.tostring(value, pretty_print=self.pretty_print, - encoding=self.encoding) - return process - - def result_processor(self, dialect, col_type): - def process(value): - if value is not None and len(value) > 0: - return html.fromstring(value) - else: - return None - return process - - -class PGJson(UserDefinedType): - def __init__(self, encoding='UTF-8'): - self.encoding = encoding - - def get_col_spec(self, **_): - return "json" - - def bind_processor(self, dialect): - def process(value): - if isinstance(value, (text_type, binary_type)) or value is None: - return value - else: - if six.PY2: - return json.dumps(value, encoding=self.encoding) - else: - return json.dumps(value) - return process - - def result_processor(self, dialect, col_type): - def process(value): - if isinstance(value, (text_type, binary_type)): - return json.loads(value) - else: - return value - return process - -sqlalchemy.dialects.postgresql.base.ischema_names['json'] = PGJson - - -class PGJsonB(PGJson): - def get_col_spec(self, **_): - return "jsonb" - - -sqlalchemy.dialects.postgresql.base.ischema_names['jsonb'] = PGJsonB - - -class PGObjectXml(UserDefinedType): - def __init__(self, cls, root_tag_name=None, no_namespace=False, - pretty_print=False): - self.cls = cls - self.root_tag_name = root_tag_name - self.no_namespace = no_namespace - self.pretty_print = pretty_print - - def get_col_spec(self, **_): - return "xml" - - def bind_processor(self, dialect): - def process(value): - if value is not None: - return etree.tostring(get_object_as_xml(value, self.cls, - self.root_tag_name, self.no_namespace), encoding='utf8', - pretty_print=self.pretty_print, xml_declaration=False) - return process - - def result_processor(self, dialect, col_type): - def process(value): - if value is not None: - return get_xml_as_object(etree.fromstring(value), self.cls) - return process - - -class PGObjectJson(UserDefinedType): - def __init__(self, cls, ignore_wrappers=True, complex_as=dict, dbt='json', - encoding='utf8'): - self.cls = cls - self.ignore_wrappers = ignore_wrappers - self.complex_as = complex_as - self.dbt = dbt - self.encoding = encoding - - from spyne.util.dictdoc import get_dict_as_object - from spyne.util.dictdoc import get_object_as_json - self.get_object_as_json = get_object_as_json - self.get_dict_as_object = get_dict_as_object - - def get_col_spec(self, **_): - return self.dbt - - def bind_processor(self, dialect): - def process(value): - if value is not None: - try: - return self.get_object_as_json(value, self.cls, - ignore_wrappers=self.ignore_wrappers, - complex_as=self.complex_as, - ).decode(self.encoding) - - except Exception as e: - logger.debug("Failed to serialize %r to json: %r", value, e) - raise - - return process - - def result_processor(self, dialect, col_type): - from spyne.util.dictdoc import JsonDocument - - def process(value): - if value is None: - return None - - if isinstance(value, six.binary_type): - value = value.decode(self.encoding) - - if isinstance(value, six.text_type): - return self.get_dict_as_object(json.loads(value), self.cls, - ignore_wrappers=self.ignore_wrappers, - complex_as=self.complex_as, - protocol=JsonDocument, - ) - - return self.get_dict_as_object(value, self.cls, - ignore_wrappers=self.ignore_wrappers, - complex_as=self.complex_as, - protocol=JsonDocument, - ) - - return process - - -class PGFileJson(PGObjectJson): - def __init__(self, store, type=None, dbt='json'): - if type is None: - type = FileData - - super(PGFileJson, self).__init__(type, ignore_wrappers=True, - complex_as=list, dbt=dbt) - self.store = store - - def bind_processor(self, dialect): - def process(value): - if value is not None: - if value.data is not None: - value.path = uuid1().hex - fp = join(self.store, value.path) - if not abspath(fp).startswith(self.store): - raise ValidationError(value.path, "Path %r contains " - "relative path operators (e.g. '..')") - - with open(fp, 'wb') as file: - for d in value.data: - file.write(d) - - elif value.handle is not None: - value.path = uuid1().hex - fp = join(self.store, value.path) - if not abspath(fp).startswith(self.store): - raise ValidationError(value.path, "Path %r contains " - "relative path operators (e.g. '..')") - - if isinstance(value.handle, (StringIO, BytesIO)): - with open(fp, 'wb') as out_file: - out_file.write(value.handle.getvalue()) - else: - with closing(mmap(value.handle.fileno(), 0, - access=ACCESS_READ)) as data: - with open(fp, 'wb') as out_file: - out_file.write(data) - - elif value.path is not None: - in_file_path = value.path - - if not isfile(in_file_path): - logger.error("File path in %r not found" % value) - - if dirname(abspath(in_file_path)) != self.store: - dest = join(self.store, uuid1().get_hex()) - - if value.move: - shutil.move(in_file_path, dest) - logger.debug("move '%s' => '%s'", - in_file_path, dest) - - else: - shutil.copy(in_file_path, dest) - logger.debug("copy '%s' => '%s'", - in_file_path, dest) - - value.path = basename(dest) - value.abspath = dest - - else: - raise ValueError("Invalid file object passed in. All of " - ".data, .handle and .path are None.") - - value.store = self.store - value.abspath = join(self.store, value.path) - - return self.get_object_as_json(value, self.cls, - ignore_wrappers=self.ignore_wrappers, - complex_as=self.complex_as, - ) - - return process - - def result_processor(self, dialect, col_type): - def process(value): - if value is None: - return None - - if isinstance(value, six.text_type): - value = json.loads(value) - - elif isinstance(value, six.binary_type): - value = json.loads(value.decode('utf8')) - - retval = self.get_dict_as_object(value, self.cls, - ignore_wrappers=self.ignore_wrappers, - complex_as=self.complex_as) - - retval.store = self.store - retval.abspath = path = join(self.store, retval.path) - retval.handle = None - retval.data = [b''] - - if not os.access(path, os.R_OK): - logger.error("File %r is not readable", path) - return retval - - h = retval.handle = SeekableFileProxy(open(path, 'rb')) - if os.fstat(retval.handle.fileno()).st_size > 0: - h.mmap = mmap(h.fileno(), 0, access=ACCESS_READ) - retval.data = (h.mmap,) - # FIXME: Where do we close this mmap? - - return retval - - return process diff --git a/libs_crutch/contrib/spyne/store/relational/override.py b/libs_crutch/contrib/spyne/store/relational/override.py deleted file mode 100644 index e04f2fb..0000000 --- a/libs_crutch/contrib/spyne/store/relational/override.py +++ /dev/null @@ -1,98 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -from sqlalchemy.ext.compiler import compiles - -from sqlalchemy.dialects.postgresql import INET -from spyne.store.relational import PGXml, PGJson, PGHtml, PGJsonB, \ - PGObjectJson, PGFileJson - - -@compiles(PGXml) -def compile_xml(type_, compiler, **kw): - return "xml" - - -@compiles(PGHtml) -def compile_html(type_, compiler, **kw): - return "text" - - -@compiles(PGJson) -def compile_json(type_, compiler, **kw): - return type_.get_col_spec() - - -@compiles(PGJsonB) -def compile_jsonb(type_, compiler, **kw): - return type_.get_col_spec() - - -@compiles(PGObjectJson) -def compile_ojson(type_, compiler, **kw): - return type_.get_col_spec() - - -@compiles(PGFileJson) -def compile_fjson(type_, compiler, **kw): - return type_.get_col_spec() - - -@compiles(INET) -def compile_inet(type_, compiler, **kw): - return "inet" - - - -@compiles(PGXml, "firebird") -def compile_xml_firebird(type_, compiler, **kw): - return "blob" - - -@compiles(PGHtml, "firebird") -def compile_html_firebird(type_, compiler, **kw): - return "blob" - - -@compiles(PGJson, "firebird") -def compile_json_firebird(type_, compiler, **kw): - return "blob" - - -@compiles(PGJsonB, "firebird") -def compile_jsonb_firebird(type_, compiler, **kw): - return "blob" - - -@compiles(PGObjectJson, "firebird") -def compile_ojson_firebird(type_, compiler, **kw): - return "blob" - - -@compiles(PGFileJson, "firebird") -def compile_fjson_firebird(type_, compiler, **kw): - return "blob" - - -@compiles(INET, "firebird") -def compile_inet_firebird(type_, compiler, **kw): - # http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/netinet_in.h.html - # INET6_ADDRSTRLEN - return "varchar(45)" diff --git a/libs_crutch/contrib/spyne/store/relational/simple.py b/libs_crutch/contrib/spyne/store/relational/simple.py deleted file mode 100644 index 0b2aeba..0000000 --- a/libs_crutch/contrib/spyne/store/relational/simple.py +++ /dev/null @@ -1,96 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from sqlalchemy import sql -from sqlalchemy.ext.compiler import compiles -from sqlalchemy.dialects.postgresql.base import ischema_names, PGTypeCompiler -from sqlalchemy.dialects.postgresql import ARRAY as PGArray, UUID as PGUuid - -from sqlalchemy.sql.sqltypes import Concatenable -from sqlalchemy.sql.type_api import UserDefinedType - - -@compiles(PGUuid, "sqlite") -def compile_uuid_sqlite(type_, compiler, **kw): - return "BLOB" - - - -class PGLTree(Concatenable, UserDefinedType): - """Postgresql `ltree` type.""" - - class Comparator(Concatenable.Comparator): - def ancestor_of(self, other): - if isinstance(other, list): - return self.op('@>')(sql.cast(other, PGArray(PGLTree))) - else: - return self.op('@>')(other) - - def descendant_of(self, other): - if isinstance(other, list): - return self.op('<@')(sql.cast(other, PGArray(PGLTree))) - else: - return self.op('<@')(other) - - def lquery(self, other): - if isinstance(other, list): - return self.op('?')(sql.cast(other, PGArray(PGLQuery))) - else: - return self.op('~')(other) - - def ltxtquery(self, other): - return self.op('@')(other) - - comparator_factory = Comparator - - __visit_name__ = 'LTREE' - - -class PGLQuery(UserDefinedType): - """Postresql `lquery` type.""" - - __visit_name__ = 'LQUERY' - - -class PGLTxtQuery(UserDefinedType): - """Postresql `ltxtquery` type.""" - - __visit_name__ = 'LTXTQUERY' - - -ischema_names['ltree'] = PGLTree -ischema_names['lquery'] = PGLQuery -ischema_names['ltxtquery'] = PGLTxtQuery - - -def visit_LTREE(self, type_, **kw): - return 'LTREE' - - -def visit_LQUERY(self, type_, **kw): - return 'LQUERY' - - -def visit_LTXTQUERY(self, type_, **kw): - return 'LTXTQUERY' - - -PGTypeCompiler.visit_LTREE = visit_LTREE -PGTypeCompiler.visit_LQUERY = visit_LQUERY -PGTypeCompiler.visit_LTXTQUERY = visit_LTXTQUERY diff --git a/libs_crutch/contrib/spyne/store/relational/spatial.py b/libs_crutch/contrib/spyne/store/relational/spatial.py deleted file mode 100644 index 152362b..0000000 --- a/libs_crutch/contrib/spyne/store/relational/spatial.py +++ /dev/null @@ -1,84 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from sqlalchemy import sql -from sqlalchemy.ext.compiler import compiles -from sqlalchemy.sql.type_api import UserDefinedType - - -class PGGeometry(UserDefinedType): - """Geometry type for Postgis 2""" - - class PlainWkt: - pass - - class PlainWkb: - pass - - def __init__(self, geometry_type='GEOMETRY', srid=4326, dimension=2, - format='wkt'): - self.geometry_type = geometry_type.upper() - self.name = 'geometry' - self.srid = int(srid) - self.dimension = dimension - self.format = format - - if self.format == 'wkt': - self.format = PGGeometry.PlainWkt - elif self.format == 'wkb': - self.format = PGGeometry.PlainWkb - - def get_col_spec(self): - return '%s(%s,%d)' % (self.name, self.geometry_type, self.srid) - - def column_expression(self, col): - if self.format is PGGeometry.PlainWkb: - return sql.func.ST_AsBinary(col, type_=self) - if self.format is PGGeometry.PlainWkt: - return sql.func.ST_AsText(col, type_=self) - - def result_processor(self, dialect, coltype): - if self.format is PGGeometry.PlainWkt: - def process(value): - if value is not None: - return value - - if self.format is PGGeometry.PlainWkb: - def process(value): - if value is not None: - return sql.func.ST_AsBinary(value, self.srid) - - return process - - def bind_expression(self, bindvalue): - if self.format is PGGeometry.PlainWkt: - return sql.func.ST_GeomFromText(bindvalue, self.srid) - - -Geometry = PGGeometry - - -@compiles(PGGeometry) -def compile_geometry(type_, compiler, **kw): - return '%s(%s,%d)' % (type_.name, type_.geometry_type, type_.srid) - - -@compiles(PGGeometry, "sqlite") -def compile_geometry_sqlite(type_, compiler, **kw): - return "BLOB" diff --git a/libs_crutch/contrib/spyne/store/relational/util.py b/libs_crutch/contrib/spyne/store/relational/util.py deleted file mode 100644 index e30633f..0000000 --- a/libs_crutch/contrib/spyne/store/relational/util.py +++ /dev/null @@ -1,279 +0,0 @@ -# -# retrieved from https://github.com/kvesteri/sqlalchemy-utils -# commit 99e1ea0eb288bc50ddb4a4aed5c50772d915ca73 -# -# Copyright (c) 2012, Konsta Vesterinen -# -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, this -# list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# * The names of the contributors may not be used to endorse or promote products -# derived from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY DIRECT, -# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -# - -import os -import cProfile - -from copy import copy - -from sqlalchemy.engine import Dialect, create_engine -from sqlalchemy.engine.url import make_url -from sqlalchemy.exc import OperationalError, ProgrammingError -from sqlalchemy.orm import object_session -from sqlalchemy.orm.exc import UnmappedInstanceError - - -def get_bind(obj): - """ - Return the bind for given SQLAlchemy Engine / Connection / declarative - model object. - :param obj: SQLAlchemy Engine / Connection / declarative model object - :: - from sqlalchemy_utils import get_bind - get_bind(session) # Connection object - get_bind(user) - """ - if hasattr(obj, 'bind'): - conn = obj.bind - else: - try: - conn = object_session(obj).bind - except UnmappedInstanceError: - conn = obj - - if not hasattr(conn, 'execute'): - raise TypeError( - 'This method accepts only Session, Engine, Connection and ' - 'declarative model objects.' - ) - return conn - - -def quote(mixed, ident): - """ - Conditionally quote an identifier. - :: - from sqlalchemy_utils import quote - engine = create_engine('sqlite:///:memory:') - quote(engine, 'order') - # '"order"' - quote(engine, 'some_other_identifier') - # 'some_other_identifier' - :param mixed: SQLAlchemy Session / Connection / Engine / Dialect object. - :param ident: identifier to conditionally quote - """ - if isinstance(mixed, Dialect): - dialect = mixed - else: - dialect = get_bind(mixed).dialect - return dialect.preparer(dialect).quote(ident) - - -def database_exists(url): - """Check if a database exists. - - :param url: A SQLAlchemy engine URL. - - Performs backend-specific testing to quickly determine if a database - exists on the server. :: - - database_exists('postgresql://postgres@localhost/name') #=> False - create_database('postgresql://postgres@localhost/name') - database_exists('postgresql://postgres@localhost/name') #=> True - - Supports checking against a constructed URL as well. :: - - engine = create_engine('postgresql://postgres@localhost/name') - database_exists(engine.url) #=> False - create_database(engine.url) - database_exists(engine.url) #=> True - - """ - - url = copy(make_url(url)) - database = url.database - if url.drivername.startswith('postgres'): - url.database = 'postgres' - elif not url.drivername.startswith('sqlite'): - url.database = None - - engine = create_engine(url) - - if engine.dialect.name == 'postgresql': - text = "SELECT 1 FROM pg_database WHERE datname='%s'" % database - return bool(engine.execute(text).scalar()) - - elif engine.dialect.name == 'mysql': - text = ("SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA " - "WHERE SCHEMA_NAME = '%s'" % database) - return bool(engine.execute(text).scalar()) - - elif engine.dialect.name == 'sqlite': - return database == ':memory:' or os.path.exists(database) - - else: - text = 'SELECT 1' - try: - url.database = database - engine = create_engine(url) - engine.execute(text) - return True - - except (ProgrammingError, OperationalError): - return False - - -def create_database(url, encoding='utf8', psql_template='template1'): - """Issue the appropriate CREATE DATABASE statement. - - :param url: A SQLAlchemy engine URL. - :param encoding: The encoding to create the database as. - :param psql_template: - The name of the template from which to create the new database, - only supported by PostgreSQL driver. As per Postgresql docs, defaults to - "template1". - - To create a database, you can pass a simple URL that would have - been passed to ``create_engine``. :: - - create_database('postgresql://postgres@localhost/name') - - You may also pass the url from an existing engine. :: - - create_database(engine.url) - - Has full support for mysql, postgres, and sqlite. In theory, - other database engines should be supported. - """ - - url = copy(make_url(url)) - - database = url.database - - if url.drivername.startswith('postgres'): - url.database = 'postgres' - elif not url.drivername.startswith('sqlite'): - url.database = None - - engine = create_engine(url) - - if engine.dialect.name == 'postgresql': - if engine.driver == 'psycopg2': - from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT - engine.raw_connection().set_isolation_level( - ISOLATION_LEVEL_AUTOCOMMIT - ) - - text = "CREATE DATABASE {0} ENCODING '{1}' TEMPLATE {2}".format( - quote(engine, database), - encoding, - quote(engine, psql_template) - ) - engine.execute(text) - - elif engine.dialect.name == 'mysql': - text = "CREATE DATABASE {0} CHARACTER SET = '{1}'".format( - quote(engine, database), - encoding - ) - engine.execute(text) - - elif engine.dialect.name == 'sqlite' and database != ':memory:': - open(database, 'w').close() - - else: - text = 'CREATE DATABASE {0}'.format(quote(engine, database)) - engine.execute(text) - - -def drop_database(url): - """Issue the appropriate DROP DATABASE statement. - - :param url: A SQLAlchemy engine URL. - - Works similar to the :ref:`create_database` method in that both url text - and a constructed url are accepted. :: - - drop_database('postgresql://postgres@localhost/name') - drop_database(engine.url) - - """ - - url = copy(make_url(url)) - - database = url.database - - if url.drivername.startswith('postgresql'): - url.database = 'template1' - elif not url.drivername.startswith('sqlite'): - url.database = None - - engine = create_engine(url) - - if engine.dialect.name == 'sqlite' and url.database != ':memory:': - os.remove(url.database) - - elif engine.dialect.name == 'postgresql' and engine.driver == 'psycopg2': - from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT - engine.raw_connection().set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) - - # Disconnect all users from the database we are dropping. - version = list( - map( - int, - engine.execute('SHOW server_version').first()[0].split('.') - ) - ) - pid_column = ( - 'pid' if (version[0] >= 9 and version[1] >= 2) else 'procpid' - ) - text = ''' - SELECT pg_terminate_backend(pg_stat_activity.%(pid_column)s) - FROM pg_stat_activity - WHERE pg_stat_activity.datname = '%(database)s' - AND %(pid_column)s <> pg_backend_pid(); - ''' % {'pid_column': pid_column, 'database': database} - engine.execute(text) - - # Drop the database. - text = 'DROP DATABASE {0}'.format(quote(engine, database)) - engine.execute(text) - - else: - text = 'DROP DATABASE {0}'.format(quote(engine, database)) - engine.execute(text) - - -# https://zapier.com/engineering/profiling-python-boss/ -def do_cprofile(func): - def profiled_func(*args, **kwargs): - profile = cProfile.Profile() - try: - profile.enable() - result = func(*args, **kwargs) - profile.disable() - return result - finally: - profile.print_stats(sort='time') - - return profiled_func diff --git a/libs_crutch/contrib/spyne/test/__init__.py b/libs_crutch/contrib/spyne/test/__init__.py deleted file mode 100644 index 51fda37..0000000 --- a/libs_crutch/contrib/spyne/test/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -class FakeApp(object): - transport = 'transport' - tns = 'tns' - name = 'name' - services = [] - classes = () - -import logging -logging.basicConfig(level=logging.DEBUG) -logging.getLogger('spyne.util.appreg').setLevel(logging.INFO) - -from spyne.context import FakeContext diff --git a/libs_crutch/contrib/spyne/test/interface/__init__.py b/libs_crutch/contrib/spyne/test/interface/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/contrib/spyne/test/interface/test_interface.py b/libs_crutch/contrib/spyne/test/interface/test_interface.py deleted file mode 100644 index a47c7c5..0000000 --- a/libs_crutch/contrib/spyne/test/interface/test_interface.py +++ /dev/null @@ -1,107 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -import unittest - -from spyne import Application, Service, rpc -from spyne.model import Array, ComplexModel, AnyXml, UnsignedLong, \ - UnsignedInteger16, Integer, DateTime, Unicode -from spyne.protocol.http import HttpRpc -from spyne.protocol.soap import Soap11 - - -class TestInterface(unittest.TestCase): - def test_imports(self): - import logging - logging.basicConfig(level=logging.DEBUG) - - class KeyValuePair(ComplexModel): - __namespace__ = "1" - key = Unicode - value = Unicode - - class Something(ComplexModel): - __namespace__ = "2" - d = DateTime - i = Integer - - class SomethingElse(ComplexModel): - __namespace__ = "3" - a = AnyXml - b = UnsignedLong - s = Something - - class BetterSomething(Something): - __namespace__ = "4" - k = UnsignedInteger16 - - class Service1(Service): - @rpc(SomethingElse, _returns=Array(KeyValuePair)) - def some_call(ctx, sth): - pass - - class Service2(Service): - @rpc(BetterSomething, _returns=Array(KeyValuePair)) - def some_other_call(ctx, sth): - pass - - application = Application([Service1, Service2], - in_protocol=HttpRpc(), - out_protocol=Soap11(), - name='Service', tns='target_namespace' - ) - - imports = application.interface.imports - tns = application.interface.get_tns() - smm = application.interface.service_method_map - print(imports) - - assert imports[tns] == set(['1', '3', '4']) - assert imports['3'] == set(['2']) - assert imports['4'] == set(['2']) - - assert smm['{%s}some_call' % tns] - assert smm['{%s}some_call' % tns][0].service_class == Service1 - assert smm['{%s}some_call' % tns][0].function == Service1.some_call - - assert smm['{%s}some_other_call' % tns] - assert smm['{%s}some_other_call' % tns][0].service_class == Service2 - assert smm['{%s}some_other_call' % tns][0].function == Service2.some_other_call - - def test_custom_primitive_in_array(self): - RequestStatus = Unicode(values=['new', 'processed'], zonta='bonta') - - class DataRequest(ComplexModel): - status = Array(RequestStatus) - - class HelloWorldService(Service): - @rpc(DataRequest) - def some_call(ctx, dgrntcl): - pass - - Application([HelloWorldService], 'spyne.examples.hello.soap', - in_protocol=Soap11(validator='lxml'), - out_protocol=Soap11()) - - # test passes if instantiating Application doesn't fail - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/interface/test_wsgi.py b/libs_crutch/contrib/spyne/test/interface/test_wsgi.py deleted file mode 100644 index e27f3c8..0000000 --- a/libs_crutch/contrib/spyne/test/interface/test_wsgi.py +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import unittest - -from spyne.util import six -from spyne.util.six import StringIO - -from spyne.protocol.soap.soap11 import Soap11 -from spyne.server.wsgi import WsgiApplication -from spyne.application import Application -from spyne.model.primitive import Unicode -from spyne.decorator import rpc -from spyne.const.xml import WSDL11 -from spyne.service import Service - - -def start_response(code, headers): - print(code, headers) - - -class Test(unittest.TestCase): - def setUp(self): - class SomeService(Service): - @rpc(Unicode) - def some_call(ctx, some_str): - print(some_str) - - - app = Application([SomeService], "some_tns", in_protocol=Soap11(), - out_protocol=Soap11()) - self.wsgi_app = WsgiApplication(app) - - def test_document_built(self): - self.h = 0 - - def on_wsdl_document_built(doc): - self.h += 1 - - self.wsgi_app.doc.wsdl11.event_manager.add_listener( - "wsdl_document_built", on_wsdl_document_built) - self.wsgi_app.doc.wsdl11.build_interface_document("http://some_url/") - - assert self.h == 1 - - def test_document_manipulation(self): - def on_wsdl_document_built(doc): - doc.root_elt.tag = 'ehe' - - self.wsgi_app.doc.wsdl11.event_manager.add_listener( - "wsdl_document_built", on_wsdl_document_built) - self.wsgi_app.doc.wsdl11.build_interface_document("http://some_url/") - d = self.wsgi_app.doc.wsdl11.get_interface_document() - - from lxml import etree - - assert etree.fromstring(d).tag == 'ehe' - - def test_wsgi(self): - retval = b''.join(self.wsgi_app({ - 'PATH_INFO': '/', - 'QUERY_STRING': 'wsdl', - 'SERVER_NAME': 'localhost', - 'SERVER_PORT': '7000', - 'REQUEST_METHOD': 'GET', - 'wsgi.url_scheme': 'http', - 'wsgi.input': StringIO(), - }, start_response)) - - from lxml import etree - - assert etree.fromstring(retval).tag == WSDL11('definitions') - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/interface/test_xml_schema.py b/libs_crutch/contrib/spyne/test/interface/test_xml_schema.py deleted file mode 100644 index 4163539..0000000 --- a/libs_crutch/contrib/spyne/test/interface/test_xml_schema.py +++ /dev/null @@ -1,551 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -import unittest - -from pprint import pprint -from lxml import etree - -from spyne import Application -from spyne import rpc -from spyne.const import xml as ns -from spyne.const.xml import NS_XSD -from spyne.model import ByteArray -from spyne.model import ComplexModel -from spyne.model import XmlAttribute -from spyne.model import XmlData -from spyne.model import AnyXml -from spyne.model import Integer -from spyne.model import Mandatory as M -from spyne.model import Unicode -from spyne.model import Uuid -from spyne.model import Boolean -from spyne.protocol.soap import Soap11, Soap12 -from spyne.service import Service -from spyne.util.xml import get_schema_documents -from spyne.util.xml import parse_schema_element -from spyne.util.xml import parse_schema_string - -from spyne.interface.xml_schema import XmlSchema -from spyne.interface.xml_schema.genpy import CodeGenerator - - -class TestXmlSchema(unittest.TestCase): - def test_choice_tag(self): - class SomeObject(ComplexModel): - __namespace__ = "badass_ns" - - one = Integer(xml_choice_group="numbers") - two = Integer(xml_choice_group="numbers") - punk = Unicode - - class KickassService(Service): - @rpc(_returns=SomeObject) - def wooo(ctx): - return SomeObject() - - Application([KickassService], - tns='kickass.ns', - in_protocol=Soap11(validator='lxml'), - out_protocol=Soap11() - ) - - docs = get_schema_documents([SomeObject]) - doc = docs['tns'] - print(etree.tostring(doc, pretty_print=True)) - assert len(doc.xpath('/xs:schema/xs:complexType[@name="SomeObject"]' - '/xs:sequence/xs:element[@name="punk"]', - namespaces={'xs': NS_XSD})) > 0 - assert len(doc.xpath('/xs:schema/xs:complexType[@name="SomeObject"]' - '/xs:sequence/xs:choice/xs:element[@name="one"]', - namespaces={'xs': NS_XSD})) > 0 - - def test_customized_class_with_empty_subclass(self): - class SummaryStatsOfDouble(ComplexModel): - _type_info = [('Min', XmlAttribute(Integer, use='required')), - ('Max', XmlAttribute(Integer, use='required')), - ('Avg', XmlAttribute(Integer, use='required'))] - - class SummaryStats(SummaryStatsOfDouble): - ''' this is an empty base class ''' - - class Payload(ComplexModel): - _type_info = [('Stat1', SummaryStats.customize(nillable=False)), - ('Stat2', SummaryStats), - ('Stat3', SummaryStats), - ('Dummy', Unicode)] - - class JackedUpService(Service): - @rpc(_returns=Payload) - def GetPayload(ctx): - return Payload() - - Application([JackedUpService], - tns='kickass.ns', - in_protocol=Soap11(validator='lxml'), - out_protocol=Soap11() - ) - - # if no exceptions while building the schema, no problem. - # see: https://github.com/arskom/spyne/issues/226 - - - def test_namespaced_xml_attribute(self): - class Release(ComplexModel): - __namespace__ = "http://usefulinc.com/ns/doap#" - - _type_info = [ - ('about', XmlAttribute(Unicode, - ns="http://www.w3.org/1999/02/22-rdf-syntax-ns#")), - ] - - class Project(ComplexModel): - __namespace__ = "http://usefulinc.com/ns/doap#" - - _type_info = [ - ('about', XmlAttribute(Unicode, - ns="http://www.w3.org/1999/02/22-rdf-syntax-ns#")), - ('release', Release.customize(max_occurs=float('inf'))), - ] - - class RdfService(Service): - @rpc(Unicode, Unicode, _returns=Project) - def some_call(ctx, a, b): - pass - - Application([RdfService], - tns='spynepi', - in_protocol=Soap11(validator='lxml'), - out_protocol=Soap11() - ) - - # if no exceptions while building the schema, no problem. - - def test_customized_simple_type_in_xml_attribute(self): - class Product(ComplexModel): - __namespace__ = 'some_ns' - - id = XmlAttribute(Uuid) - edition = Unicode - - class SomeService(Service): - @rpc(Product, _returns=Product) - def echo_product(ctx, product): - logging.info('edition_id: %r', product.edition_id) - return product - - Application([SomeService], tns='some_ns', - in_protocol=Soap11(validator='lxml'), - out_protocol=Soap11() - ) - - # if no exceptions while building the schema, no problem. - - def test_binary_encodings(self): - class Product(ComplexModel): - __namespace__ = 'some_ns' - - hex = ByteArray(encoding='hex') - base64_1 = ByteArray(encoding='base64') - base64_2 = ByteArray - - class SomeService(Service): - @rpc(Product, _returns=Product) - def echo_product(ctx, product): - logging.info('edition_id: %r', product.edition_id) - return product - - app = Application([SomeService], - tns='some_ns', - in_protocol=Soap11(validator='lxml'), - out_protocol=Soap11() - ) - - _ns = {'xs': NS_XSD} - pref_xs = ns.PREFMAP[NS_XSD] - xs = XmlSchema(app.interface) - xs.build_interface_document() - elt = xs.get_interface_document()['tns'].xpath( - '//xs:complexType[@name="Product"]', - namespaces=_ns)[0] - - assert elt.xpath('//xs:element[@name="base64_1"]/@type', - namespaces=_ns)[0] == '%s:base64Binary' % pref_xs - assert elt.xpath('//xs:element[@name="base64_2"]/@type', - namespaces=_ns)[0] == '%s:base64Binary' % pref_xs - assert elt.xpath('//xs:element[@name="hex"]/@type', - namespaces=_ns)[0] == '%s:hexBinary' % pref_xs - - def test_multilevel_customized_simple_type(self): - class ExampleService(Service): - __tns__ = 'http://xml.company.com/ns/example/' - - @rpc(M(Uuid), _returns=Unicode) - def say_my_uuid(ctx, uuid): - return 'Your UUID: %s' % uuid - - Application([ExampleService], - tns='kickass.ns', - in_protocol=Soap11(validator='lxml'), - out_protocol=Soap11() - ) - - # if no exceptions while building the schema, no problem. - # see: http://stackoverflow.com/questions/16042132/cannot-use-mandatory-uuid-or-other-pattern-related-must-be-type-as-rpc-argumen - - def test_any_tag(self): - logging.basicConfig(level=logging.DEBUG) - - class SomeType(ComplexModel): - __namespace__ = "zo" - - anything = AnyXml(schema_tag='{%s}any' % NS_XSD, namespace='##other', - process_contents='lax') - - docs = get_schema_documents([SomeType]) - print(etree.tostring(docs['tns'], pretty_print=True)) - _any = docs['tns'].xpath('//xsd:any', namespaces={'xsd': NS_XSD}) - - assert len(_any) == 1 - assert _any[0].attrib['namespace'] == '##other' - assert _any[0].attrib['processContents'] == 'lax' - - def _build_xml_data_test_schema(self, custom_root): - tns = 'kickass.ns' - - class ProductEdition(ComplexModel): - __namespace__ = tns - id = XmlAttribute(Uuid) - if custom_root: - name = XmlData(Uuid) - else: - name = XmlData(Unicode) - - class Product(ComplexModel): - __namespace__ = tns - id = XmlAttribute(Uuid) - edition = ProductEdition - - class ExampleService(Service): - @rpc(Product, _returns=Product) - def say_my_uuid(ctx, product): - pass - - app = Application([ExampleService], - tns='kickass.ns', - in_protocol=Soap11(validator='lxml'), - out_protocol=Soap11() - ) - - schema = XmlSchema(app.interface) - schema.build_interface_document() - schema.build_validation_schema() - - doc = schema.get_interface_document()['tns'] - print(etree.tostring(doc, pretty_print=True)) - return schema - - def test_xml_data_schema_doc(self): - schema = self._build_xml_data_test_schema(custom_root=False) - - assert len(schema.get_interface_document()['tns'].xpath( - '/xs:schema/xs:complexType[@name="ProductEdition"]' - '/xs:simpleContent/xs:extension/xs:attribute[@name="id"]' - ,namespaces={'xs': NS_XSD})) == 1 - - def _test_xml_data_validation(self): - schema = self._build_xml_data_test_schema(custom_root=False) - - assert schema.validation_schema.validate(etree.fromstring(""" - - punk - - """)), schema.validation_schema.error_log.last_error - - def _test_xml_data_validation_custom_root(self): - schema = self._build_xml_data_test_schema(custom_root=True) - - assert schema.validation_schema.validate(etree.fromstring(""" - - - 00000000-0000-0000-0000-000000000002 - - - """)), schema.validation_schema.error_log.last_error - - - def test_subs(self): - from lxml import etree - from spyne.util.xml import get_schema_documents - xpath = lambda o, x: o.xpath(x, namespaces={"xs": NS_XSD}) - - m = { - "s0": "aa", - "s2": "cc", - "s3": "dd", - } - - class C(ComplexModel): - __namespace__ = "aa" - a = Integer - b = Integer(sub_name="bb") - c = Integer(sub_ns="cc") - d = Integer(sub_ns="dd", sub_name="dd") - - elt = get_schema_documents([C], "aa")['tns'] - print(etree.tostring(elt, pretty_print=True)) - - seq, = xpath(elt, "xs:complexType/xs:sequence") - - assert len(seq) == 4 - assert len(xpath(seq, 'xs:element[@name="a"]')) == 1 - assert len(xpath(seq, 'xs:element[@name="bb"]')) == 1 - - # FIXME: this doesn't feel right. - # check the spec to see whether it should it be prefixed. - # - #assert len(xpath(seq, 'xs:element[@name="{cc}c"]')) == 1 - #assert len(xpath(seq, 'xs:element[@name="{dd}dd"]')) == 1 - - def test_mandatory(self): - xpath = lambda o, x: o.xpath(x, namespaces={"xs": NS_XSD}) - - class C(ComplexModel): - __namespace__ = "aa" - foo = XmlAttribute(M(Unicode)) - - elt = get_schema_documents([C])['tns'] - print(etree.tostring(elt, pretty_print=True)) - foo, = xpath(elt, 'xs:complexType/xs:attribute[@name="foo"]') - attrs = foo.attrib - assert 'use' in attrs and attrs['use'] == 'required' - - def test_annotation(self): - tns = 'some_ns' - doc = "Some Doc" - - class SomeClass(ComplexModel): - __namespace__ = tns - some_attr = Unicode(doc=doc) - - schema = get_schema_documents([SomeClass], tns)['tns'] - print(etree.tostring(schema, pretty_print=True)) - assert schema.xpath("//xs:documentation/text()", - namespaces={'xs': NS_XSD}) == [doc] - - -class TestParseOwnXmlSchema(unittest.TestCase): - def test_simple(self): - tns = 'some_ns' - class SomeGuy(ComplexModel): - __namespace__ = 'some_ns' - - id = Integer - - schema = get_schema_documents([SomeGuy], tns)['tns'] - print(etree.tostring(schema, pretty_print=True)) - - objects = parse_schema_element(schema) - pprint(objects[tns].types) - - NewGuy = objects[tns].types["SomeGuy"] - assert NewGuy.get_type_name() == SomeGuy.get_type_name() - assert NewGuy.get_namespace() == SomeGuy.get_namespace() - assert dict(NewGuy._type_info) == dict(SomeGuy._type_info) - - def test_customized_unicode(self): - tns = 'some_ns' - class SomeGuy(ComplexModel): - __namespace__ = tns - name = Unicode(max_len=10, pattern="a", min_len=5, default="aa") - - schema = get_schema_documents([SomeGuy], tns)['tns'] - print(etree.tostring(schema, pretty_print=True)) - - objects = parse_schema_element(schema) - pprint(objects[tns].types) - - NewGuy = objects['some_ns'].types["SomeGuy"] - assert NewGuy._type_info['name'].Attributes.max_len == 10 - assert NewGuy._type_info['name'].Attributes.min_len == 5 - assert NewGuy._type_info['name'].Attributes.pattern == "a" - assert NewGuy._type_info['name'].Attributes.default == "aa" - - def test_boolean_default(self): - tns = 'some_ns' - class SomeGuy(ComplexModel): - __namespace__ = tns - bald = Boolean(default=True) - - schema = get_schema_documents([SomeGuy], tns)['tns'] - print(etree.tostring(schema, pretty_print=True)) - - objects = parse_schema_element(schema) - pprint(objects[tns].types) - - NewGuy = objects['some_ns'].types["SomeGuy"] - assert NewGuy._type_info['bald'].Attributes.default == True - - def test_boolean_attribute_default(self): - tns = 'some_ns' - class SomeGuy(ComplexModel): - __namespace__ = tns - - bald = XmlAttribute(Boolean(default=True)) - - schema = get_schema_documents([SomeGuy], tns)['tns'] - print(etree.tostring(schema, pretty_print=True)) - - objects = parse_schema_element(schema) - pprint(objects[tns].types) - - NewGuy = objects['some_ns'].types["SomeGuy"] - assert NewGuy._type_info['bald'].Attributes.default == True - - def test_attribute(self): - tns = 'some_ns' - class SomeGuy(ComplexModel): - __namespace__ = tns - - name = XmlAttribute(Unicode) - - schema = get_schema_documents([SomeGuy], tns)['tns'] - print(etree.tostring(schema, pretty_print=True)) - - objects = parse_schema_element(schema) - pprint(objects) - pprint(objects[tns].types) - - NewGuy = objects['some_ns'].types["SomeGuy"] - assert NewGuy._type_info['name'].type is Unicode - - def test_attribute_with_customized_type(self): - tns = 'some_ns' - class SomeGuy(ComplexModel): - __namespace__ = tns - - name = XmlAttribute(Unicode(default="aa")) - - schema = get_schema_documents([SomeGuy], tns)['tns'] - print(etree.tostring(schema, pretty_print=True)) - - objects = parse_schema_element(schema) - pprint(objects[tns].types) - - NewGuy = objects['some_ns'].types["SomeGuy"] - assert NewGuy._type_info['name'].type.__orig__ is Unicode - assert NewGuy._type_info['name'].type.Attributes.default == "aa" - - def test_inherited_attribute(self): - class DeviceEntity(ComplexModel): - token = XmlAttribute(Unicode, use='required') - - class DigitalInput(DeviceEntity): - IdleState = XmlAttribute(Unicode) - - class SomeService(Service): - @rpc(_returns=DigitalInput, _body_style='bare') - def GetDigitalInput(ctx): - return DigitalInput() - - Application([SomeService], 'some_tns', - in_protocol=Soap11(validator='lxml'), - out_protocol=Soap11()) - - def test_simple_type_explicit_customization(self): - class Header(ComplexModel): - test = Boolean(min_occurs=0, nillable=False) - pw = Unicode.customize(min_occurs=0, nillable=False, min_len=6) - - class Params(ComplexModel): - sendHeader = Header.customize(nillable=False, min_occurs=1) - - class DummyService(Service): - @rpc(Params, _returns=Unicode) - def loadServices(ctx, serviceParams): - return '42' - - Application([DummyService], - tns='dummy', - name='DummyService', - in_protocol=Soap11(validator='lxml'), - out_protocol=Soap11() - ) - # if instantiation doesn't fail, test is green. - - -class TestParseForeignXmlSchema(unittest.TestCase): - def test_simple_content(self): - tns = 'some_ns' - - schema = """ - - - - - - - -""" - - objects = parse_schema_string(schema) - pprint(objects[tns].types) - - NewGuy = objects[tns].types['SomeGuy'] - ti = NewGuy._type_info - pprint(dict(ti)) - assert issubclass(ti['_data'], XmlData) - assert ti['_data'].type is Unicode - - assert issubclass(ti['attr'], XmlAttribute) - assert ti['attr'].type is Unicode - - -class TestCodeGeneration(unittest.TestCase): - def _get_schema(self, *args): - schema_doc = get_schema_documents(args)['tns'] - return parse_schema_element(schema_doc) - - def test_simple(self): - ns = 'some_ns' - - class SomeObject(ComplexModel): - __namespace__ = ns - _type_info = [ - ('i', Integer), - ('s', Unicode), - ] - - s = self._get_schema(SomeObject)[ns] - code = CodeGenerator().genpy(ns, s) - - # FIXME: Properly parse it - assert """class SomeObject(_ComplexBase): - _type_info = [ - ('i', Integer), - ('s', Unicode), - ]""" in code - - -if __name__ == '__main__': - unittest.main() - diff --git a/libs_crutch/contrib/spyne/test/interface/wsdl/__init__.py b/libs_crutch/contrib/spyne/test/interface/wsdl/__init__.py deleted file mode 100644 index d23660d..0000000 --- a/libs_crutch/contrib/spyne/test/interface/wsdl/__init__.py +++ /dev/null @@ -1,68 +0,0 @@ - -# -# spyne - Copyright (C) spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from spyne.application import Application -from spyne.interface.wsdl import Wsdl11 -from spyne.protocol.soap import Soap11 -import spyne.const.xml as ns - -def build_app(service_list, tns, name): - app = Application(service_list, tns, name=name, - in_protocol=Soap11(), out_protocol=Soap11()) - app.transport = 'http://schemas.xmlsoap.org/soap/http' - return app - -class AppTestWrapper(): - def __init__(self, application): - - self.url = 'http:/localhost:7789/wsdl' - self.service_string = ns.WSDL11('service') - self.port_string = ns.WSDL11('port') - self.soap_binding_string = ns.WSDL11_SOAP('binding') - self.operation_string = ns.WSDL11('operation') - self.port_type_string = ns.WSDL11('portType') - self.binding_string = ns.WSDL11('binding') - - self.app = application - self.interface_doc = Wsdl11(self.app.interface) - self.interface_doc.build_interface_document(self.url) - self.wsdl = self.interface_doc.get_interface_document() - - def get_service_list(self): - return self.interface_doc.root_elt.findall(self.service_string) - - def get_port_list(self, service): - from lxml import etree - print((etree.tostring(service, pretty_print=True))) - return service.findall(self.port_string) - - def get_soap_bindings(self, binding): - return binding.findall(self.soap_binding_string) - - def get_port_types(self): - return self.interface_doc.root_elt.findall(self.port_type_string) - - def get_port_operations(self, port_type): - return port_type.findall(self.operation_string) - - def get_bindings(self): - return self.interface_doc.root_elt.findall(self.binding_string) - - def get_binding_operations(self, binding): - return [o for o in binding.iterfind(self.operation_string)] diff --git a/libs_crutch/contrib/spyne/test/interface/wsdl/defult_services.py b/libs_crutch/contrib/spyne/test/interface/wsdl/defult_services.py deleted file mode 100644 index 5d54a7f..0000000 --- a/libs_crutch/contrib/spyne/test/interface/wsdl/defult_services.py +++ /dev/null @@ -1,49 +0,0 @@ - -# -# spyne - Copyright (C) spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -from spyne.model.primitive import String -from spyne.service import Service -from spyne.decorator import rpc - - -def TDefaultPortService(): - class DefaultPortService(Service): - @rpc(String, _returns=String) - def echo_default_port_service(self, string): - return string - - return DefaultPortService - - -def TDefaultPortServiceMultipleMethods(): - class DefaultPortServiceMultipleMethods(Service): - @rpc(String, _returns=String) - def echo_one(self, string): - return string - - @rpc(String, _returns=String) - def echo_two(self, string): - return string - - @rpc(String, _returns=String) - def echo_three(self, string): - return string - - return DefaultPortServiceMultipleMethods diff --git a/libs_crutch/contrib/spyne/test/interface/wsdl/port_service_services.py b/libs_crutch/contrib/spyne/test/interface/wsdl/port_service_services.py deleted file mode 100644 index 365bb62..0000000 --- a/libs_crutch/contrib/spyne/test/interface/wsdl/port_service_services.py +++ /dev/null @@ -1,127 +0,0 @@ - -# -# spyne - Copyright (C) spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from spyne.model.primitive import String -from spyne.service import Service -from spyne.decorator import rpc - -def TS1(): - class S1(Service): - name = 'S1Fools' - __namespace__ = 'Hippity' - - @rpc(String, _returns=String) - def echo_string_s1(self, string): - return string - return S1 - -def TS2(): - class S2(Service): - name = 'S2Fools' - __namespace__ = 'Hoppity' - - @rpc(String, _returns=String) - def bobs(self, string): - return string - - return S2 - -def TS3(): - class S3(Service): - name = 'S3Fools' - __namespace__ = 'Hoppity' - __service_name__ = 'BlahService' - __port_types__ = ['bobhope', 'larry'] - - @rpc(String, _returns=String) - def echo(self, string): - return string - - @rpc(String, _port_type='bobhope', _returns=String) - def echo_bob_hope(self, string): - return 'Bob Hope' - - return S3 - -def TMissingRPCPortService(): - class MissingRPCPortService(Service): - name = 'MissingRPCPortService' - __namespace__ = 'MissingRPCPortService' - __service_name__ = 'MissingRPCPortService' - __port_types__ = ['existing'] - - @rpc(String, _returns=String) - def raise_exception(self, string): - return string - return MissingRPCPortService - -def TBadRPCPortService(): - class BadRPCPortService(Service): - name = 'MissingRPCPortService' - __namespace__ = 'MissingRPCPortService' - __service_name__ = 'MissingRPCPortService' - __port_types__ = ['existing'] - - @rpc(String, _port_type='existingss', _returns=String) - def raise_exception(self, string): - return string - - return BadRPCPortService - -def TMissingServicePortService(): - class MissingServicePortService(Service): - name = 'MissingRPCPortService' - __namespace__ = 'MissingRPCPortService' - __service_name__ = 'MissingRPCPortService' - __port_types__ = ['existing'] - - @rpc(String, _port_type='existingss', _returns=String) - def raise_exception(self, string): - return string - - return MissingServicePortService - -def TSinglePortService(): - class SinglePortService(Service): - name = 'SinglePort' - __service_name__ = 'SinglePortService_ServiceInterface' - __namespace__ = 'SinglePortNS' - __port_types__ = ['FirstPortType'] - - @rpc(String, _port_type='FirstPortType', _returns=String) - def echo_default_port_service(self, string): - return string - - return SinglePortService - -def TDoublePortService(): - class DoublePortService(Service): - name = 'DoublePort' - __namespace__ = 'DoublePort' - __port_types__ = ['FirstPort', 'SecondPort'] - - @rpc(String, _port_type='FirstPort', _returns=String) - def echo_first_port(self, string): - return string - - @rpc(String, _port_type='SecondPort', _returns=String) - def echo_second_port(self, string): - return string - - return DoublePortService diff --git a/libs_crutch/contrib/spyne/test/interface/wsdl/test_bindings.py b/libs_crutch/contrib/spyne/test/interface/wsdl/test_bindings.py deleted file mode 100644 index e82bcf9..0000000 --- a/libs_crutch/contrib/spyne/test/interface/wsdl/test_bindings.py +++ /dev/null @@ -1,133 +0,0 @@ -#!/usr/bin/env python -#encoding: utf8 -# -# spyne - Copyright (C) spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logging.basicConfig(level=logging.DEBUG) - -import unittest - -import spyne.const.xml as ns - - -from spyne.interface.wsdl.wsdl11 import Wsdl11 -from . import build_app -from .port_service_services import TS1 -from .port_service_services import TSinglePortService -from .port_service_services import TDoublePortService - -class TestWSDLBindingBehavior(unittest.TestCase): - def setUp(self): - self.transport = 'http://schemas.xmlsoap.org/soap/http' - self.url = 'http:/localhost:7789/wsdl' - self.port_type_string = ns.WSDL11('portType') - self.service_string = ns.WSDL11('service') - self.binding_string = ns.WSDL11('binding') - self.operation_string = ns.WSDL11('operation') - self.port_string = ns.WSDL11('port') - - def test_binding_simple(self): - sa = build_app([TS1()], 'S1Port', 'TestServiceName') - - interface_doc = Wsdl11(sa.interface) - interface_doc.build_interface_document(self.url) - - - services = interface_doc.root_elt.xpath( - '/wsdl:definitions/wsdl:service', - namespaces = { - 'wsdl':'http://schemas.xmlsoap.org/wsdl/' }) - self.assertEqual(len(services), 1) - - portTypes = interface_doc.root_elt.xpath( - '/wsdl:definitions/wsdl:portType', - namespaces = { - 'wsdl':'http://schemas.xmlsoap.org/wsdl/' }) - self.assertEqual(len(portTypes), 1) - - ports = interface_doc.root_elt.xpath( - '/wsdl:definitions/wsdl:service[@name="%s"]/wsdl:port' % - "S1", - namespaces = { - 'wsdl':'http://schemas.xmlsoap.org/wsdl/' }) - self.assertEqual(len(ports), 1) - - - def test_binding_multiple(self): - SinglePortService, DoublePortService = TSinglePortService(), TDoublePortService() - - sa = build_app( - [SinglePortService, DoublePortService], - 'MultiServiceTns', - 'AppName' - ) - interface_doc = Wsdl11(sa.interface) - interface_doc.build_interface_document(self.url) - - - # 2 Service, - # First has 1 port - # Second has 2 - - # => need 2 service, 3 port and 3 bindings - - services = interface_doc.root_elt.xpath( - '/wsdl:definitions/wsdl:service', - namespaces = { - 'wsdl':'http://schemas.xmlsoap.org/wsdl/' }) - self.assertEqual(len(services), 2) - - portTypes = interface_doc.root_elt.xpath( - '/wsdl:definitions/wsdl:portType', - namespaces = { - 'wsdl':'http://schemas.xmlsoap.org/wsdl/' }) - self.assertEqual(len(portTypes), 3) - - - bindings = interface_doc.root_elt.xpath( - '/wsdl:definitions/wsdl:binding', - namespaces = { - 'wsdl':'http://schemas.xmlsoap.org/wsdl/' }) - - self.assertEqual(len(bindings), 3) - - ports = interface_doc.root_elt.xpath( - '/wsdl:definitions/wsdl:service[@name="%s"]/wsdl:port' % - SinglePortService.__service_name__, - namespaces = { - 'wsdl':'http://schemas.xmlsoap.org/wsdl/' }) - self.assertEqual(len(ports), 1) - - ports = interface_doc.root_elt.xpath( - '/wsdl:definitions/wsdl:service[@name="%s"]/wsdl:port' % - "DoublePortService", - namespaces = { - 'wsdl':'http://schemas.xmlsoap.org/wsdl/' }) - self.assertEqual(len(ports), 2) - - # checking name and type - #service SinglePortService - for srv in (SinglePortService, DoublePortService): - for port in srv.__port_types__: - bindings = interface_doc.root_elt.xpath( - '/wsdl:definitions/wsdl:binding[@name="%s"]' % - port, - namespaces = { - 'wsdl':'http://schemas.xmlsoap.org/wsdl/' }) - self.assertEqual(bindings[0].get('type'), "tns:%s" % port) diff --git a/libs_crutch/contrib/spyne/test/interface/wsdl/test_default_wsdl.py b/libs_crutch/contrib/spyne/test/interface/wsdl/test_default_wsdl.py deleted file mode 100644 index c81046e..0000000 --- a/libs_crutch/contrib/spyne/test/interface/wsdl/test_default_wsdl.py +++ /dev/null @@ -1,248 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -import logging -logging.basicConfig(level=logging.DEBUG) - -import unittest - -from lxml import etree - -from spyne.application import Application - -from spyne.test.interface.wsdl import AppTestWrapper -from spyne.test.interface.wsdl import build_app -from spyne.test.interface.wsdl.defult_services import TDefaultPortService -from spyne.test.interface.wsdl.defult_services import \ - TDefaultPortServiceMultipleMethods - -from spyne.const import REQUEST_SUFFIX -from spyne.const import RESPONSE_SUFFIX -from spyne.const import ARRAY_SUFFIX - -from spyne.decorator import srpc -from spyne.service import Service -from spyne.interface.wsdl import Wsdl11 -from spyne.model.complex import Array -from spyne.model.primitive import String - -ns = { - 'wsdl': 'http://schemas.xmlsoap.org/wsdl/', - 'xs': 'http://www.w3.org/2001/XMLSchema', -} - - -class TestDefaultWSDLBehavior(unittest.TestCase): - def _default_service(self, app_wrapper, service_name): - self.assertEqual(1, len(app_wrapper.get_service_list())) - - services = app_wrapper.get_service_list() - service = services[0] - - # the default behavior requires that there be only a single service - self.assertEqual(1, len(services)) - self.assertEqual(service_name, service.get('name')) - - # Test the default service has the correct number of ports - # the default behavior requires that there be only a single port - ports = app_wrapper.get_port_list(service) - self.assertEqual(len(ports), 1) - - def _default_port_type(self, app_wrapper, portType_name, op_count): - # Verify the portType Count - portTypes = app_wrapper.get_port_types() - - # there should be only one portType - self.assertEqual(1, len(portTypes)) - - # Verify the portType name - portType = portTypes[0] - # Check the name of the port - self.assertEqual(portType_name, portType.get('name')) - - # verify that the portType definition has the correct - # number of operations - ops = app_wrapper.get_port_operations(portType) - self.assertEqual(op_count, len(ops)) - - def _default_binding(self, wrapper, binding_name, opp_count): - # the default behavior is only single binding - bindings = wrapper.get_bindings() - self.assertEqual(1, len(bindings)) - - # check for the correct binding name - binding = bindings[0] - name = binding.get('name') - self.assertEqual(binding_name, name) - - # Test that the default service contains the soap binding - sb = wrapper.get_soap_bindings(binding) - self.assertEqual(1, len(sb)) - - # verify the correct number of operations - ops = wrapper.get_binding_operations(binding) - self.assertEqual(opp_count, len(ops)) - - def _default_binding_methods(self, wrapper, op_count, op_names): - binding = wrapper.get_bindings()[0] - operations = wrapper.get_binding_operations(binding) - - # Check the number of operations bound to the port - self.assertEqual(op_count, len(operations)) - - # Check the operation names are correct - for op in operations: - self.assertTrue(op.get('name') in op_names) - - def test_default_port_type(self): - # Test the default port is created - # Test the default port has the correct name - app = build_app( - [TDefaultPortService()], - 'DefaultPortTest', - 'DefaultPortName' - ) - - wrapper = AppTestWrapper(app) - self._default_port_type(wrapper, 'DefaultPortName', 1) - - def test_default_port_type_multiple(self): - app = build_app( - [TDefaultPortServiceMultipleMethods()], - 'DefaultServiceTns', - 'MultipleDefaultPortServiceApp' - ) - - wrapper = AppTestWrapper(app) - - self._default_port_type(wrapper, "MultipleDefaultPortServiceApp", 3) - - def test_default_binding(self): - app = build_app( - [TDefaultPortService()], - 'DefaultPortTest', - 'DefaultBindingName' - ) - - wrapper = AppTestWrapper(app) - - self._default_binding(wrapper, "DefaultBindingName", 1) - - def test_default_binding_multiple(self): - app = build_app( - [TDefaultPortServiceMultipleMethods()], - 'DefaultPortTest', - 'MultipleDefaultBindingNameApp' - ) - - wrapper = AppTestWrapper(app) - - self._default_binding(wrapper, 'MultipleDefaultBindingNameApp', 3) - - def test_default_binding_methods(self): - app = build_app( - [TDefaultPortService()], - 'DefaultPortTest', - 'DefaultPortMethods' - ) - - wrapper = AppTestWrapper(app) - - self._default_binding_methods( - wrapper, - 1, - ['echo_default_port_service'] - ) - - def test_bare_simple(self): - class SomeService(Service): - @srpc(String, _returns=String, _body_style='bare') - def whatever(ss): - return ss - - app = Application([SomeService], tns='tns') - app.transport = 'None' - - wsdl = Wsdl11(app.interface) - wsdl.build_interface_document('url') - wsdl = etree.fromstring(wsdl.get_interface_document()) - - schema = wsdl.xpath( - '/wsdl:definitions/wsdl:types/xs:schema[@targetNamespace="tns"]', - namespaces=ns, - ) - assert len(schema) == 1 - - print(etree.tostring(wsdl, pretty_print=True)) - - elts = schema[0].xpath( - 'xs:element[@name="whatever%s"]' % REQUEST_SUFFIX, namespaces=ns) - assert len(elts) > 0 - assert elts[0].attrib['type'] == 'xs:string' - - elts = schema[0].xpath( - 'xs:element[@name="whatever%s"]' % RESPONSE_SUFFIX, namespaces=ns) - assert len(elts) > 0 - assert elts[0].attrib['type'] == 'xs:string' - - def test_bare_with_conflicting_types(self): - class SomeService(Service): - @srpc(Array(String), _returns=Array(String)) - def whatever(sa): - return sa - - @srpc(Array(String), _returns=Array(String), _body_style='bare') - def whatever_bare(sa): - return sa - - app = Application([SomeService], tns='tns') - app.transport = 'None' - - wsdl = Wsdl11(app.interface) - wsdl.build_interface_document('url') - wsdl = etree.fromstring(wsdl.get_interface_document()) - schema, = wsdl.xpath( - '/wsdl:definitions/wsdl:types/xs:schema[@targetNamespace="tns"]', - namespaces=ns, - ) - - print(etree.tostring(schema, pretty_print=True)) - - assert len(schema.xpath( - 'xs:complexType[@name="string%s"]' % ARRAY_SUFFIX, - namespaces=ns)) > 0 - - elts = schema.xpath( - 'xs:element[@name="whatever_bare%s"]' % REQUEST_SUFFIX, - namespaces=ns) - - assert len(elts) > 0 - assert elts[0].attrib['type'] == 'tns:string%s' % ARRAY_SUFFIX - - elts = schema.xpath( - 'xs:element[@name="whatever_bare%s"]' % RESPONSE_SUFFIX, - namespaces=ns) - - assert len(elts) > 0 - assert elts[0].attrib['type'] == 'tns:string%s' % ARRAY_SUFFIX - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/interface/wsdl/test_op_req_suffix.py b/libs_crutch/contrib/spyne/test/interface/wsdl/test_op_req_suffix.py deleted file mode 100644 index 141395b..0000000 --- a/libs_crutch/contrib/spyne/test/interface/wsdl/test_op_req_suffix.py +++ /dev/null @@ -1,287 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 - -import unittest - -from webtest import TestApp as _TestApp # avoid confusing py.test - -from spyne.application import Application -from spyne.decorator import srpc -from spyne.service import Service -from spyne.model.primitive import Integer, Unicode -from spyne.model.complex import Iterable -from spyne.protocol.soap import Soap11 -from spyne.protocol.http import HttpRpc -from spyne.protocol.json import JsonDocument -from spyne.server.wsgi import WsgiApplication - -from spyne.const.xml import PREFMAP, NS_WSDL11_SOAP - -def strip_whitespace(string): - return ''.join(string.split()) - - -class TestOperationRequestSuffix(unittest.TestCase): - """ - test different protocols with REQUEST_SUFFIX and _operation_name - _in_message_name is a concern, will test that as well - """ - - default_function_name = 'echo' - - # output is not affected, will use soap output for all tests - result_body = ''' - - - - Echo, test - Echo, test - - - ''' - - def get_function_names(self, suffix, _operation_name=None, - _in_message_name=None): - """This tests the logic of how names are produced. - Its logic should match expected behavior of the decorator. - returns operation name, in message name, service name depending on - args""" - function_name = self.default_function_name - - if _operation_name is None: - operation_name = function_name - else: - operation_name = _operation_name - - if _in_message_name is None: - request_name = operation_name + suffix - else: - request_name = _in_message_name - - return function_name, operation_name, request_name - - def get_app(self, in_protocol, suffix, _operation_name=None, - _in_message_name=None): - """setup testapp dependent on suffix and _in_message_name""" - - import spyne.const - spyne.const.REQUEST_SUFFIX = suffix - - class EchoService(Service): - - srpc_kparams = {'_returns': Iterable(Unicode)} - if _in_message_name: - srpc_kparams['_in_message_name'] = _in_message_name - if _operation_name: - srpc_kparams['_operation_name'] = _operation_name - - @srpc(Unicode, Integer, **srpc_kparams) - def echo(string, times): - for i in range(times): - yield 'Echo, %s' % string - - application = Application([EchoService], - tns='spyne.examples.echo', - in_protocol=in_protocol, - out_protocol=Soap11() - ) - app = WsgiApplication(application) - - testapp = _TestApp(app) - - # so that it doesn't interfere with other tests. - spyne.const.REQUEST_SUFFIX = '' - - return testapp - - def assert_response_ok(self, resp): - """check the default response""" - self.assertEqual(resp.status_int, 200, resp) - self.assertTrue( - strip_whitespace(self.result_body) in strip_whitespace(str(resp)), - '{0} not in {1}'.format(self.result_body, resp)) - - ### application error tests ### - def assert_application_error(self, suffix, _operation_name=None, - _in_message_name=None): - self.assertRaises(ValueError, - self.get_app, Soap11(validator='lxml'), suffix, - _operation_name, _in_message_name) - - def test_assert_application_error(self): - """check error when op namd and in name are both used""" - self.assert_application_error(suffix='', - _operation_name='TestOperationName', - _in_message_name='TestMessageName') - - ### soap tests ### - def assert_soap_ok(self, suffix, _operation_name=None, - _in_message_name=None): - """helper to test soap requests""" - - # setup - app = self.get_app(Soap11(validator='lxml'), suffix, _operation_name, - _in_message_name) - - function_name, operation_name, request_name = self.get_function_names( - suffix, _operation_name, _in_message_name) - - soap_input_body = """ - - - - - test - 2 - - - """.format(request_name) - - # check wsdl - wsdl = app.get('/?wsdl') - self.assertEqual(wsdl.status_int, 200, wsdl) - self.assertTrue(request_name in wsdl, - '{0} not found in wsdl'.format(request_name)) - - soap_strings = [ - ''.format(request_name), - ''.format(request_name), - ] - for soap_string in soap_strings: - self.assertTrue(soap_string in wsdl, - '{0} not in {1}'.format(soap_string, wsdl)) - if request_name != operation_name: - wrong_string = '= 2.5") diff --git a/libs_crutch/contrib/spyne/test/interop/server/httprpc_pod_basic.py b/libs_crutch/contrib/spyne/test/interop/server/httprpc_pod_basic.py deleted file mode 100644 index bcf657f..0000000 --- a/libs_crutch/contrib/spyne/test/interop/server/httprpc_pod_basic.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""pod being plain old data""" - -import logging - - -logging.basicConfig(level=logging.DEBUG) -logger = logging.getLogger('spyne.protocol.xml') -logger.setLevel(logging.DEBUG) - -from spyne.test.interop.server import get_open_port -from spyne.application import Application -from spyne.test.interop.server._service import services -from spyne.protocol.http import HttpRpc -from spyne.server.wsgi import WsgiApplication - -httprpc_soap_application = Application(services, - 'spyne.test.interop.server.httprpc.pod', in_protocol=HttpRpc(), - out_protocol=HttpRpc()) -host = "127.0.0.1" -port = [0] - - -def main(): - try: - from wsgiref.simple_server import make_server - from wsgiref.validate import validator - if port[0] == 0: - port[0] = get_open_port() - - wsgi_application = WsgiApplication(httprpc_soap_application) - server = make_server(host, port[0], validator(wsgi_application)) - - logger.info('Starting interop server at %s:%s.' % ('0.0.0.0', port[0])) - logger.info('WSDL is at: /?wsdl') - server.serve_forever() - - except ImportError: - print("Error: example server code requires Python >= 2.5") - - -if __name__ == '__main__': - main() diff --git a/libs_crutch/contrib/spyne/test/interop/server/httprpc_pod_basic_twisted.py b/libs_crutch/contrib/spyne/test/interop/server/httprpc_pod_basic_twisted.py deleted file mode 100644 index 6e824f4..0000000 --- a/libs_crutch/contrib/spyne/test/interop/server/httprpc_pod_basic_twisted.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""pod being plain old data""" - -import logging -logging.basicConfig(level=logging.DEBUG) -logger = logging.getLogger('spyne.protocol.xml') -logger.setLevel(logging.DEBUG) - -from spyne.test.interop.server import get_open_port -from spyne.application import Application -from spyne.test.interop.server._service import services -from spyne.protocol.http import HttpRpc -from spyne.server.twisted import TwistedWebResource - -httprpc_soap_application = Application(services, - 'spyne.test.interop.server.httprpc.pod', - in_protocol=HttpRpc(), out_protocol=HttpRpc()) - -host = '127.0.0.1' -port = [0] - -def main(argv): - from twisted.web.server import Site - from twisted.internet import reactor - from twisted.python import log - observer = log.PythonLoggingObserver('twisted') - log.startLoggingWithObserver(observer.emit, setStdout=False) - - if port[0] == 0: - port[0] = get_open_port() - - wr = TwistedWebResource(httprpc_soap_application) - site = Site(wr) - - reactor.listenTCP(port[0], site) - logging.info("listening on: %s:%d" % (host,port[0])) - - return reactor.run() - - -if __name__ == '__main__': - import sys - sys.exit(main(sys.argv)) diff --git a/libs_crutch/contrib/spyne/test/interop/server/msgpackrpc_http_basic.py b/libs_crutch/contrib/spyne/test/interop/server/msgpackrpc_http_basic.py deleted file mode 100644 index d19e869..0000000 --- a/libs_crutch/contrib/spyne/test/interop/server/msgpackrpc_http_basic.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logging.basicConfig(level=logging.DEBUG) -logging.getLogger('spyne.protocol.msgpack').setLevel(logging.DEBUG) -logger = logging.getLogger('spyne.test.interop.server.msgpackrpc_http_basic') - -from spyne.test.interop.server import get_open_port -from spyne.server.wsgi import WsgiApplication -from spyne.test.interop.server._service import services -from spyne.application import Application -from spyne.protocol.msgpack import MessagePackRpc - -msgpackrpc_application = Application(services, 'spyne.test.interop.server', - in_protocol=MessagePackRpc(validator='soft'), - out_protocol=MessagePackRpc()) - -host = '127.0.0.1' -port = [0] - -def main(): - try: - from wsgiref.simple_server import make_server - from wsgiref.validate import validator - if port[0] == 0: - port[0] = get_open_port() - - wsgi_application = WsgiApplication(msgpackrpc_application) - server = make_server(host, port[0], validator(wsgi_application)) - - logger.info('Starting interop server at %s:%s.' % (host, port[0])) - logger.info('WSDL is at: /?wsdl') - server.serve_forever() - - except ImportError: - print("Error: example server code requires Python >= 2.5") - -if __name__ == '__main__': - main() diff --git a/libs_crutch/contrib/spyne/test/interop/server/soap11/__init__.py b/libs_crutch/contrib/spyne/test/interop/server/soap11/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/contrib/spyne/test/interop/server/soap11/httprpc_soap_basic.py b/libs_crutch/contrib/spyne/test/interop/server/soap11/httprpc_soap_basic.py deleted file mode 100644 index 3969598..0000000 --- a/libs_crutch/contrib/spyne/test/interop/server/soap11/httprpc_soap_basic.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logging.basicConfig(level=logging.DEBUG) -logger = logging.getLogger('spyne.protocol.xml') -logger.setLevel(logging.DEBUG) - -from spyne.application import Application -from spyne.test.interop.server._service import services -from spyne.protocol.http import HttpRpc -from spyne.protocol.soap import Soap11 -from spyne.server.wsgi import WsgiApplication -from spyne.test.interop.server import get_open_port - - -httprpc_soap_application = Application(services, - 'spyne.test.interop.server.httprpc.soap', in_protocol=HttpRpc(), out_protocol=Soap11()) - - -host = '127.0.0.1' -port = [0] - - -if __name__ == '__main__': - try: - from wsgiref.simple_server import make_server - from wsgiref.validate import validator - if port[0] == 0: - port[0] = get_open_port() - - wsgi_application = WsgiApplication(httprpc_soap_application) - server = make_server(host, port[0], validator(wsgi_application)) - - logger.info('Starting interop server at %s:%s.' % ('0.0.0.0', port[0])) - logger.info('WSDL is at: /?wsdl') - server.serve_forever() - - except ImportError: - print("Error: example server code requires Python >= 2.5") diff --git a/libs_crutch/contrib/spyne/test/interop/server/soap11/soap_http_basic.py b/libs_crutch/contrib/spyne/test/interop/server/soap11/soap_http_basic.py deleted file mode 100644 index 65a8605..0000000 --- a/libs_crutch/contrib/spyne/test/interop/server/soap11/soap_http_basic.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging - -logging.basicConfig(level=logging.DEBUG) -logging.getLogger('spyne.protocol.xml').setLevel(logging.DEBUG) -logger = logging.getLogger('spyne.test.interop.server.soap_http_basic') - - -from spyne.test.interop.server import get_open_port -from spyne.server.wsgi import WsgiApplication -from spyne.test.interop.server._service import services -from spyne.application import Application -from spyne.protocol.soap import Soap11 - - -soap11_application = Application(services, 'spyne.test.interop.server', - in_protocol=Soap11(validator='lxml', cleanup_namespaces=True), - out_protocol=Soap11()) - - -host = '127.0.0.1' -port = [0] - - -def main(): - try: - from wsgiref.simple_server import make_server - from wsgiref.validate import validator - if port[0] == 0: - port[0] = get_open_port() - - wsgi_application = WsgiApplication(soap11_application) - server = make_server(host, port[0], validator(wsgi_application)) - - logger.info('Starting interop server at %s:%s.' % ('0.0.0.0', port[0])) - logger.info('WSDL is at: /?wsdl') - server.serve_forever() - - except ImportError: - print("Error: example server code requires Python >= 2.5") - - -if __name__ == '__main__': - main() diff --git a/libs_crutch/contrib/spyne/test/interop/server/soap11/soap_http_basic_twisted.py b/libs_crutch/contrib/spyne/test/interop/server/soap11/soap_http_basic_twisted.py deleted file mode 100644 index b9d502b..0000000 --- a/libs_crutch/contrib/spyne/test/interop/server/soap11/soap_http_basic_twisted.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logging.basicConfig(level=logging.DEBUG) -logger = logging.getLogger('spyne.wsgi') -logger.setLevel(logging.DEBUG) - -from spyne.test.interop.server import get_open_port -from spyne.test.interop.server.soap_http_basic import soap11_application -from spyne.server.twisted import TwistedWebResource - - -host = '127.0.0.1' -port = [0] - - -def main(argv): - from twisted.web.server import Site - from twisted.internet import reactor - from twisted.python import log - - observer = log.PythonLoggingObserver('twisted') - log.startLoggingWithObserver(observer.emit, setStdout=False) - - wr = TwistedWebResource(soap11_application) - site = Site(wr) - - if port[0] == 0: - port[0] = get_open_port() - reactor.listenTCP(port[0], site) - logging.info("listening on: %s:%d" % (host,port[0])) - - return reactor.run() - -if __name__ == '__main__': - import sys - sys.exit(main(sys.argv)) diff --git a/libs_crutch/contrib/spyne/test/interop/server/soap11/soap_http_static.py b/libs_crutch/contrib/spyne/test/interop/server/soap11/soap_http_static.py deleted file mode 100644 index 0e8e26e..0000000 --- a/libs_crutch/contrib/spyne/test/interop/server/soap11/soap_http_static.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logging.basicConfig(level=logging.DEBUG) -logger = logging.getLogger('spyne.wsgi') -logger.setLevel(logging.DEBUG) - -import os - -from spyne.test.interop.server import get_open_port -from spyne.test.interop.server.soap_http_basic import soap11_application -from spyne.server.twisted import TwistedWebResource - - -host = '127.0.0.1' -port = [0] -url = 'app' - - -def main(argv): - from twisted.python import log - from twisted.web.server import Site - from twisted.web.static import File - from twisted.internet import reactor - from twisted.python import log - - observer = log.PythonLoggingObserver('twisted') - log.startLoggingWithObserver(observer.emit, setStdout=False) - - static_dir = os.path.abspath('.') - logging.info("registering static folder %r on /" % static_dir) - root = File(static_dir) - - wr = TwistedWebResource(soap11_application) - logging.info("registering %r on /%s" % (wr, url)) - root.putChild(url, wr) - - site = Site(root) - - if port[0] == 0: - port[0] = get_open_port() - reactor.listenTCP(port[0], site) - logging.info("listening on: %s:%d" % (host,port)) - - return reactor.run() - - -if __name__ == '__main__': - import sys - sys.exit(main(sys.argv)) diff --git a/libs_crutch/contrib/spyne/test/interop/server/soap11/soap_zeromq.py b/libs_crutch/contrib/spyne/test/interop/server/soap11/soap_zeromq.py deleted file mode 100644 index 5cb7f4d..0000000 --- a/libs_crutch/contrib/spyne/test/interop/server/soap11/soap_zeromq.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging - -from spyne.test.interop.server import get_open_port -from spyne.test.interop.server.soap11.soap_http_basic import soap11_application - -from spyne.server.zeromq import ZeroMQServer - - -host = '127.0.0.1' -port = [0] - - -def main(): - if port[0] == 0: - port[0] = get_open_port() - url = "tcp://%s:%d" % (host, port[0]) - - logging.basicConfig(level=logging.DEBUG) - logging.getLogger('spyne.protocol.xml').setLevel(logging.DEBUG) - - server = ZeroMQServer(soap11_application, url) - logging.info("************************") - logging.info("Use Ctrl+\\ to exit if Ctrl-C does not work.") - logging.info("See the 'I can't Ctrl-C my Python/Ruby application. Help!' " - "question in http://www.zeromq.org/area:faq for more info.") - logging.info("listening on %r" % url) - logging.info("************************") - - server.serve_forever() - - -if __name__ == '__main__': - main() \ No newline at end of file diff --git a/libs_crutch/contrib/spyne/test/interop/server/soap12/__init__.py b/libs_crutch/contrib/spyne/test/interop/server/soap12/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/contrib/spyne/test/interop/server/soap12/httprpc_soap_basic.py b/libs_crutch/contrib/spyne/test/interop/server/soap12/httprpc_soap_basic.py deleted file mode 100644 index c3bc5b1..0000000 --- a/libs_crutch/contrib/spyne/test/interop/server/soap12/httprpc_soap_basic.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logging.basicConfig(level=logging.DEBUG) -logger = logging.getLogger('spyne.protocol.xml') -logger.setLevel(logging.DEBUG) - -from spyne.application import Application -from spyne.test.interop.server._service import services -from spyne.protocol.http import HttpRpc -from spyne.protocol.soap import Soap12 -from spyne.server.wsgi import WsgiApplication - -httprpc_soap_application = Application(services, - 'spyne.test.interop.server.httprpc.soap', in_protocol=HttpRpc(), out_protocol=Soap12()) - -host = '127.0.0.1' -port = 9753 - -if __name__ == '__main__': - try: - from wsgiref.simple_server import make_server - from wsgiref.validate import validator - - wsgi_application = WsgiApplication(httprpc_soap_application) - server = make_server(host, port, validator(wsgi_application)) - - logger.info('Starting interop server at %s:%s.' % ('0.0.0.0', 9753)) - logger.info('WSDL is at: /?wsdl') - server.serve_forever() - - except ImportError: - print("Error: example server code requires Python >= 2.5") diff --git a/libs_crutch/contrib/spyne/test/interop/server/soap12/soap_http_basic.py b/libs_crutch/contrib/spyne/test/interop/server/soap12/soap_http_basic.py deleted file mode 100644 index 27fe6f0..0000000 --- a/libs_crutch/contrib/spyne/test/interop/server/soap12/soap_http_basic.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging - -logging.basicConfig(level=logging.DEBUG) -logging.getLogger('spyne.protocol.xml').setLevel(logging.DEBUG) -logger = logging.getLogger('spyne.test.interop.server.soap_http_basic') - -from spyne.server.wsgi import WsgiApplication -from spyne.test.interop.server._service import services -from spyne.application import Application -from spyne.protocol.soap import Soap12 - - -soap12_application = Application(services, 'spyne.test.interop.server', - in_protocol=Soap12(validator='lxml', cleanup_namespaces=True), - out_protocol=Soap12()) - -host = '127.0.0.1' -port = 9754 - -def main(): - try: - from wsgiref.simple_server import make_server - from wsgiref.validate import validator - - wsgi_application = WsgiApplication(soap12_application) - server = make_server(host, port, validator(wsgi_application)) - - logger.info('Starting interop server at %s:%s.' % ('0.0.0.0', 9754)) - logger.info('WSDL is at: /?wsdl') - server.serve_forever() - - except ImportError: - print("Error: example server code requires Python >= 2.5") - -if __name__ == '__main__': - main() diff --git a/libs_crutch/contrib/spyne/test/interop/server/soap12/soap_http_basic_twisted.py b/libs_crutch/contrib/spyne/test/interop/server/soap12/soap_http_basic_twisted.py deleted file mode 100644 index d115e2f..0000000 --- a/libs_crutch/contrib/spyne/test/interop/server/soap12/soap_http_basic_twisted.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logging.basicConfig(level=logging.DEBUG) -logger = logging.getLogger('spyne.wsgi') -logger.setLevel(logging.DEBUG) - -from spyne.test.interop.server.soap12.soap_http_basic import soap12_application -from spyne.server.twisted import TwistedWebResource - -host = '127.0.0.1' -port = 9755 - -def main(argv): - from twisted.web.server import Site - from twisted.internet import reactor - from twisted.python import log - - observer = log.PythonLoggingObserver('twisted') - log.startLoggingWithObserver(observer.emit, setStdout=False) - - wr = TwistedWebResource(soap12_application) - site = Site(wr) - - reactor.listenTCP(port, site) - logging.info("listening on: %s:%d" % (host,port)) - - return reactor.run() - -if __name__ == '__main__': - import sys - sys.exit(main(sys.argv)) diff --git a/libs_crutch/contrib/spyne/test/interop/server/soap12/soap_http_static.py b/libs_crutch/contrib/spyne/test/interop/server/soap12/soap_http_static.py deleted file mode 100644 index 6cc742f..0000000 --- a/libs_crutch/contrib/spyne/test/interop/server/soap12/soap_http_static.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logging.basicConfig(level=logging.DEBUG) -logger = logging.getLogger('spyne.wsgi') -logger.setLevel(logging.DEBUG) - -import os - -from spyne.test.interop.server.soap12.soap_http_basic import soap12_application -from spyne.server.twisted import TwistedWebResource - -host = '127.0.0.1' -port = 9756 -url = 'app' - -def main(argv): - from twisted.python import log - from twisted.web.server import Site - from twisted.web.static import File - from twisted.internet import reactor - from twisted.python import log - - observer = log.PythonLoggingObserver('twisted') - log.startLoggingWithObserver(observer.emit, setStdout=False) - - static_dir = os.path.abspath('.') - logging.info("registering static folder %r on /" % static_dir) - root = File(static_dir) - - wr = TwistedWebResource(soap12_application) - logging.info("registering %r on /%s" % (wr, url)) - root.putChild(url, wr) - - site = Site(root) - - reactor.listenTCP(port, site) - logging.info("listening on: %s:%d" % (host,port)) - - return reactor.run() - - -if __name__ == '__main__': - import sys - sys.exit(main(sys.argv)) diff --git a/libs_crutch/contrib/spyne/test/interop/server/soap12/soap_zeromq.py b/libs_crutch/contrib/spyne/test/interop/server/soap12/soap_zeromq.py deleted file mode 100644 index 62c2b73..0000000 --- a/libs_crutch/contrib/spyne/test/interop/server/soap12/soap_zeromq.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging - -from spyne.test.interop.server.soap12.soap_http_basic import soap12_application - -from spyne.server.zeromq import ZeroMQServer - -host = '127.0.0.1' -port = 55555 - -def main(): - url = "tcp://%s:%d" % (host,port) - - logging.basicConfig(level=logging.DEBUG) - logging.getLogger('spyne.protocol.xml').setLevel(logging.DEBUG) - - server = ZeroMQServer(soap12_application, url) - logging.info("************************") - logging.info("Use Ctrl+\\ to exit if Ctrl-C does not work.") - logging.info("See the 'I can't Ctrl-C my Python/Ruby application. Help!' " - "question in http://www.zeromq.org/area:faq for more info.") - logging.info("listening on %r" % url) - logging.info("************************") - - server.serve_forever() - -if __name__ == '__main__': - main() \ No newline at end of file diff --git a/libs_crutch/contrib/spyne/test/interop/test_django.py b/libs_crutch/contrib/spyne/test/interop/test_django.py deleted file mode 100644 index b5df8b5..0000000 --- a/libs_crutch/contrib/spyne/test/interop/test_django.py +++ /dev/null @@ -1,305 +0,0 @@ -# coding: utf-8 -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -from __future__ import absolute_import - -import datetime -import re -from django.core.exceptions import ImproperlyConfigured -from django.test import TestCase, TransactionTestCase, Client - -from spyne.client.django import DjangoTestClient -from spyne.model.fault import Fault -from spyne.model.complex import ComplexModelBase -from spyne.util.django import (DjangoComplexModel, DjangoComplexModelMeta, - email_re) -from spyne.util.six import add_metaclass - -from rpctest.core.models import (FieldContainer, RelatedFieldContainer, - UserProfile as DjUserProfile) -from rpctest.core.views import app, hello_world_service, Container - - -class SpyneTestCase(TransactionTestCase): - def setUp(self): - self.client = DjangoTestClient('/hello_world/', hello_world_service.app) - - def _test_say_hello(self): - resp = self.client.service.say_hello('Joe', 5) - list_resp = list(resp) - self.assertEqual(len(list_resp), 5) - self.assertEqual(list_resp, ['Hello, Joe'] * 5) - - -class DjangoViewTestCase(TestCase): - def test_say_hello(self): - client = DjangoTestClient('/say_hello/', app) - resp = client.service.say_hello('Joe', 5) - list_resp = list(resp) - self.assertEqual(len(list_resp), 5) - self.assertEqual(list_resp, ['Hello, Joe'] * 5) - - def test_response_encoding(self): - client = DjangoTestClient('/say_hello/', app) - response = client.service.say_hello.get_django_response('Joe', 5) - self.assertTrue('Content-Type' in response) - self.assertTrue(response['Content-Type'].startswith('text/xml')) - - def test_error(self): - client = Client() - response = client.post('/say_hello/', {}) - self.assertContains(response, 'faultstring', status_code=500) - - def test_cached_wsdl(self): - """Test if wsdl is cached.""" - client = Client() - response = client.get('/say_hello/') - self.assertContains(response, - 'location="http://testserver/say_hello/"') - response = client.get('/say_hello/', HTTP_HOST='newtestserver') - self.assertNotContains(response, - 'location="http://newtestserver/say_hello/"') - - def test_not_cached_wsdl(self): - """Test if wsdl is not cached.""" - client = Client() - response = client.get('/say_hello_not_cached/') - self.assertContains( - response, 'location="http://testserver/say_hello_not_cached/"') - response = client.get('/say_hello_not_cached/', - HTTP_HOST='newtestserver') - - self.assertContains( - response, 'location="http://newtestserver/say_hello_not_cached/"') - - -class ModelTestCase(TestCase): - - """Test mapping between django and spyne models.""" - - def setUp(self): - self.client = DjangoTestClient('/api/', app) - - def test_exclude(self): - """Test if excluded field is not mapped.""" - type_info = Container.get_flat_type_info(Container) - self.assertIn('id', type_info) - self.assertNotIn('excluded_field', type_info) - - def test_pk_mapping(self): - """Test if primary key is mapped as optional but not nillable.""" - type_info = Container.get_flat_type_info(Container) - pk_field = type_info['id'] - self.assertEqual(pk_field.Attributes.min_occurs, 0) - self.assertFalse(pk_field.Attributes.nullable) - - def test_regex_pattern_mapping(self): - """Test if regex pattern is mapped from django model.""" - type_info = Container.get_flat_type_info(Container) - email_field = type_info['email_field'] - self.assertEqual(email_field.__name__, 'Unicode') - self.assertIsNotNone(email_field.Attributes.pattern) - self.assertEqual(email_field.Attributes.min_occurs, 1) - self.assertFalse(email_field.Attributes.nullable) - - def test_blank_field(self): - """Test if blank fields are optional but not null.""" - type_info = Container.get_flat_type_info(Container) - blank_field = type_info['blank_field'] - self.assertEqual(blank_field.__name__, 'NormalizedString') - self.assertEqual(blank_field.Attributes.min_occurs, 0) - self.assertFalse(blank_field.Attributes.nullable) - - def test_blank_as_dict(self): - """Test if blank field is omitted in as_dict representation.""" - container = Container() - container_dict = container.as_dict() - self.assertNotIn('blank_field', container_dict) - - def test_length_validators_field(self): - """Test if length validators are correctly mapped.""" - type_info = Container.get_flat_type_info(Container) - length_validators_field = type_info['length_validators_field'] - self.assertEqual(length_validators_field.__name__, 'NormalizedString') - self.assertEqual(length_validators_field.Attributes.min_occurs, 1) - self.assertTrue(length_validators_field.Attributes.nullable) - self.assertEqual(length_validators_field.Attributes.min_len, 3) - self.assertEqual(length_validators_field.Attributes.max_len, 10) - - def test_get_container(self): - """Test mapping from Django model to spyne model.""" - get_container = lambda: self.client.service.get_container(2) - self.assertRaises(Fault, get_container) - container = FieldContainer.objects.create(slug_field='container') - FieldContainer.objects.create(slug_field='container2', - foreign_key=container, - one_to_one_field=container, - email_field='email@example.com', - char_field='yo') - c = get_container() - self.assertIsInstance(c, Container) - - def test_create_container(self): - """Test complex input to create Django model.""" - related_container = RelatedFieldContainer(id='related') - new_container = FieldContainer(slug_field='container', - date_field=datetime.date.today(), - datetime_field=datetime.datetime.now(), - email_field='email@example.com', - time_field=datetime.time(), - custom_foreign_key=related_container, - custom_one_to_one_field=related_container) - create_container = (lambda: self.client.service.create_container( - new_container)) - c = create_container() - - self.assertIsInstance(c, Container) - self.assertEqual(c.custom_one_to_one_field_id, 'related') - self.assertEqual(c.custom_foreign_key_id, 'related') - self.assertRaises(Fault, create_container) - - def test_create_container_unicode(self): - """Test complex unicode input to create Django model.""" - new_container = FieldContainer( - char_field=u'спайн', - text_field=u'спайн', - slug_field='spyne', - email_field='email@example.com', - date_field=datetime.date.today(), - datetime_field=datetime.datetime.now(), - time_field=datetime.time() - ) - create_container = (lambda: self.client.service.create_container( - new_container)) - c = create_container() - self.assertIsInstance(c, Container) - self.assertRaises(Fault, create_container) - - def test_optional_relation_fields(self): - """Test if optional_relations flag makes fields optional.""" - class UserProfile(DjangoComplexModel): - class Attributes(DjangoComplexModel.Attributes): - django_model = DjUserProfile - - self.assertFalse(UserProfile._type_info['user_id'].Attributes.nullable) - - class UserProfile(DjangoComplexModel): - class Attributes(DjangoComplexModel.Attributes): - django_model = DjUserProfile - django_optional_relations = True - - self.assertEqual( - UserProfile._type_info['user_id'].Attributes.min_occurs, 0) - - def test_abstract_custom_djangomodel(self): - """Test if can create custom DjangoComplexModel.""" - @add_metaclass(DjangoComplexModelMeta) - class OrderedDjangoComplexModel(ComplexModelBase): - __abstract__ = True - - class Attributes(ComplexModelBase.Attributes): - declare_order = 'declared' - - class OrderedFieldContainer(OrderedDjangoComplexModel): - class Attributes(OrderedDjangoComplexModel.Attributes): - django_model = FieldContainer - - field_container = OrderedFieldContainer() - type_info_fields = field_container._type_info.keys() - django_field_names = [field.get_attname() for field in - FieldContainer._meta.fields] - # file field is not mapped - django_field_names.remove('file_field') - # check if ordering is the same as defined in Django model - self.assertEqual(type_info_fields, django_field_names) - - def test_nonabstract_custom_djangomodel(self): - """Test if can't create non abstract custom model.""" - with self.assertRaises( - ImproperlyConfigured, msg='Can create non abstract custom model' - ): - @add_metaclass(DjangoComplexModelMeta) - class CustomNotAbstractDjangoComplexModel(ComplexModelBase): - class Attributes(ComplexModelBase.Attributes): - declare_order = 'declared' - - -# in XmlSchema ^ and $ are set implicitly -python_email_re = '^' + email_re.pattern + '$' - - -class EmailRegexTestCase(TestCase): - - """Tests for email_re.""" - - def test_empty(self): - """Empty string is invalid email.""" - self.assertIsNone(re.match(python_email_re, '')) - - def test_valid(self): - """Test valid email.""" - self.assertIsNotNone( - re.match(python_email_re, 'valid.email@example.com') - ) - - def test_valid_single_letter_domain(self): - """Test valid email.""" - self.assertIsNotNone(re.match(python_email_re, 'valid.email@e.x.com')) - - def test_invalid(self): - """Test invalid email.""" - self.assertIsNone(re.match(python_email_re, '@example.com')) - - def test_invalid_tld(self): - """Test if email from Top Level Domain is invalid.""" - self.assertIsNone(re.match(python_email_re, 'babushka@email')) - self.assertIsNone(re.match(python_email_re, 'babushka@domain.email-')) - - -class DjangoServiceTestCase(TestCase): - - """Tests for Django specific service.""" - - def test_handle_does_not_exist(self): - """Test if Django service handles `ObjectDoesNotExist` exceptions.""" - client = DjangoTestClient('/api/', app) - with self.assertRaisesRegexp(Fault, 'Client.FieldContainerNotFound'): - client.service.raise_does_not_exist() - - def test_handle_validation_error(self): - """Test if Django service handles `ValidationError` exceptions.""" - client = DjangoTestClient('/api/', app) - with self.assertRaisesRegexp(Fault, 'Client.ValidationError'): - client.service.raise_validation_error() - - -class FromUnicodeAssertionTestCase(TestCase): - - def test_from_unicode_does_not_assert(self): - client = Client() - url = '/synchro/1/' - msg = b"""TestModel - 2015-09-23T13:54:51.796366+00:00 - """ - hdrs = {'SOAPAction': b'"sync"', 'Content-Type': 'text/xml; charset=utf-8'} - client.post(url, msg, 'text/xml', True, **hdrs) diff --git a/libs_crutch/contrib/spyne/test/interop/test_httprpc.py b/libs_crutch/contrib/spyne/test/interop/test_httprpc.py deleted file mode 100644 index 4d72bc4..0000000 --- a/libs_crutch/contrib/spyne/test/interop/test_httprpc.py +++ /dev/null @@ -1,119 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import unittest - -import time - -import pytz - -from datetime import datetime - -from spyne.test.interop._test_soap_client_base import server_started -from spyne.util import thread, urlencode, urlopen, Request, HTTPError - - -_server_started = False - - -class TestHttpRpc(unittest.TestCase): - def setUp(self): - global _server_started - from spyne.test.interop.server.httprpc_pod_basic import main, port - - if not _server_started: - def run_server(): - main() - - thread.start_new_thread(run_server, ()) - - # FIXME: Does anybody have a better idea? - time.sleep(2) - - _server_started = True - - self.base_url = 'http://localhost:%d' % port[0] - - def test_404(self): - url = '%s/404' % self.base_url - try: - data = urlopen(url).read() - except HTTPError as e: - assert e.code == 404 - - def test_413(self): - url = self.base_url - try: - data = Request(url,("foo"*3*1024*1024)) - except HTTPError as e: - assert e.code == 413 - - def test_500(self): - url = '%s/python_exception' % self.base_url - try: - data = urlopen(url).read() - except HTTPError as e: - assert e.code == 500 - - def test_500_2(self): - url = '%s/soap_exception' % self.base_url - try: - data = urlopen(url).read() - except HTTPError as e: - assert e.code == 500 - - def test_echo_string(self): - url = '%s/echo_string?s=punk' % self.base_url - data = urlopen(url).read() - - assert data == b'punk' - - def test_echo_integer(self): - url = '%s/echo_integer?i=444' % self.base_url - data = urlopen(url).read() - - assert data == b'444' - - def test_echo_datetime(self): - dt = datetime.now(pytz.utc).isoformat().encode('ascii') - params = urlencode({ - 'dt': dt, - }) - - print(params) - url = '%s/echo_datetime?%s' % (self.base_url, str(params)) - data = urlopen(url).read() - - assert dt == data - - def test_echo_datetime_tz(self): - dt = datetime.now(pytz.utc).isoformat().encode('ascii') - params = urlencode({ - 'dt': dt, - }) - - print(params) - url = '%s/echo_datetime?%s' % (self.base_url, str(params)) - data = urlopen(url).read() - - assert dt == data - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/interop/test_msgpackrpc_client_http.py b/libs_crutch/contrib/spyne/test/interop/test_msgpackrpc_client_http.py deleted file mode 100644 index ab39ff6..0000000 --- a/libs_crutch/contrib/spyne/test/interop/test_msgpackrpc_client_http.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import unittest - -from spyne.client.http import HttpClient -from spyne.test.interop._test_soap_client_base import SpyneClientTestBase -from spyne.test.interop.server.msgpackrpc_http_basic import msgpackrpc_application, port -from spyne.util.etreeconv import root_dict_to_etree - -class TestSpyneHttpClient(SpyneClientTestBase, unittest.TestCase): - def setUp(self): - SpyneClientTestBase.setUp(self, 'msgpack_rpc_http') - - self.client = HttpClient('http://localhost:%d/' % port[0], - msgpackrpc_application) - self.ns = "spyne.test.interop.server" - - @unittest.skip("MessagePackRpc does not support header") - def test_echo_in_header(self): - pass - - @unittest.skip("MessagePackRpc does not support header") - def test_send_out_header(self): - pass - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/interop/test_pyramid.py b/libs_crutch/contrib/spyne/test/interop/test_pyramid.py deleted file mode 100644 index 2dd3b04..0000000 --- a/libs_crutch/contrib/spyne/test/interop/test_pyramid.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding: utf-8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# -import unittest -from wsgiref.util import setup_testing_defaults -from wsgiref.validate import validator - -from lxml import etree -from pyramid import testing -from pyramid.config import Configurator -from pyramid.request import Request - -from spyne.protocol.soap import Soap11 -from spyne.service import Service -from spyne.decorator import srpc -from spyne import Application -from spyne.model import Unicode, Integer, Iterable -from spyne.server.pyramid import PyramidApplication - - -class SpyneIntegrationTest(unittest.TestCase): - """Tests for integration of Spyne into Pyramid view callable""" - class HelloWorldService(Service): - @srpc(Unicode, Integer, _returns=Iterable(Unicode)) - def say_hello(name, times): - for i in range(times): - yield 'Hello, %s' % name - - def setUp(self): - request = testing.DummyRequest() - self.config = testing.setUp(request=request) - - def tearDown(self): - testing.tearDown() - - def testGetWsdl(self): - """Simple test for serving of WSDL by spyne through pyramid route""" - application = PyramidApplication( - Application([self.HelloWorldService], - tns='spyne.examples.hello', - in_protocol=Soap11(validator='lxml'), - out_protocol=Soap11())) - - config = Configurator(settings={'debug_all': True}) - config.add_route('home', '/') - config.add_view(application, route_name='home') - wsgi_app = validator(config.make_wsgi_app()) - - env = { - 'SCRIPT_NAME': '', - 'REQUEST_METHOD': 'GET', - 'PATH_INFO': '/', - 'QUERY_STRING': 'wsdl', - } - setup_testing_defaults(env) - - request = Request(env) - resp = request.get_response(wsgi_app) - self.assert_(resp.status.startswith("200 ")) - node = etree.XML(resp.body) # will throw exception if non well formed - diff --git a/libs_crutch/contrib/spyne/test/interop/test_soap_client_http.py b/libs_crutch/contrib/spyne/test/interop/test_soap_client_http.py deleted file mode 100644 index b1e88e2..0000000 --- a/libs_crutch/contrib/spyne/test/interop/test_soap_client_http.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import unittest - -from spyne.client.http import HttpClient -from spyne.test.interop._test_soap_client_base import SpyneClientTestBase, \ - server_started -from spyne.test.interop.server.soap11.soap_http_basic import soap11_application - -class TestSpyneHttpClient(SpyneClientTestBase, unittest.TestCase): - def setUp(self): - SpyneClientTestBase.setUp(self, 'http') - - port, = server_started.keys() - - self.client = HttpClient('http://localhost:%d/' % port, - soap11_application) - self.ns = "spyne.test.interop.server" - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/interop/test_soap_client_http_twisted.py b/libs_crutch/contrib/spyne/test/interop/test_soap_client_http_twisted.py deleted file mode 100644 index 84022e9..0000000 --- a/libs_crutch/contrib/spyne/test/interop/test_soap_client_http_twisted.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from twisted.trial import unittest -from spyne.test.interop._test_soap_client_base import run_server, server_started - -from spyne.client.twisted import TwistedHttpClient -from spyne.test.interop.server.soap11.soap_http_basic import soap11_application - -class TestSpyneHttpClient(unittest.TestCase): - def setUp(self): - run_server('http') - - port, = server_started.keys() - - self.ns = b"spyne.test.interop.server._service" - self.client = TwistedHttpClient(b'http://localhost:%d/' % port, - soap11_application) - - def test_echo_boolean(self): - def eb(ret): - raise ret - - def cb(ret): - assert ret == True - - return self.client.service.echo_boolean(True).addCallbacks(cb, eb) - - def test_python_exception(self): - def eb(ret): - print(ret) - - def cb(ret): - assert False, "must fail: %r" % ret - - return self.client.service.python_exception().addCallbacks(cb, eb) - - def test_soap_exception(self): - def eb(ret): - print(type(ret)) - - def cb(ret): - assert False, "must fail: %r" % ret - - return self.client.service.soap_exception().addCallbacks(cb, eb) - - def test_documented_exception(self): - def eb(ret): - print(ret) - - def cb(ret): - assert False, "must fail: %r" % ret - - return self.client.service.python_exception().addCallbacks(cb, eb) diff --git a/libs_crutch/contrib/spyne/test/interop/test_soap_client_zeromq.py b/libs_crutch/contrib/spyne/test/interop/test_soap_client_zeromq.py deleted file mode 100644 index 944c2b0..0000000 --- a/libs_crutch/contrib/spyne/test/interop/test_soap_client_zeromq.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import unittest - -from spyne.client.zeromq import ZeroMQClient - -from spyne.test.interop._test_soap_client_base import SpyneClientTestBase, \ - server_started -from spyne.test.interop.server.soap11.soap_http_basic import soap11_application - - -class TestSpyneZmqClient(SpyneClientTestBase, unittest.TestCase): - def setUp(self): - SpyneClientTestBase.setUp(self, 'zeromq') - - port, = server_started.keys() - - self.client = ZeroMQClient('tcp://localhost:%d' % port, - soap11_application) - self.ns = "spyne.test.interop.server._service" - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/interop/test_suds.py b/libs_crutch/contrib/spyne/test/interop/test_suds.py deleted file mode 100644 index 87fdb44..0000000 --- a/libs_crutch/contrib/spyne/test/interop/test_suds.py +++ /dev/null @@ -1,428 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -suds_logger = logging.getLogger('suds') -suds_logger.setLevel(logging.INFO) - -import unittest - -from datetime import datetime -from base64 import b64encode, b64decode - -from suds.sax.parser import Parser -from suds.client import Client -from suds.plugin import MessagePlugin -from suds import WebFault - -from spyne.util import six - -from spyne.test.interop._test_soap_client_base import SpyneClientTestBase, \ - server_started - - -class LastReceivedPlugin(MessagePlugin): - def received(self, context): - sax = Parser() - self.reply = sax.parse(string=context.reply) - - -class TestSuds(SpyneClientTestBase, unittest.TestCase): - def setUp(self): - SpyneClientTestBase.setUp(self, 'http') - - port, = server_started.keys() - - self.client = Client("http://localhost:%d/?wsdl" % port, cache=None, - plugins=[LastReceivedPlugin()]) - self.ns = "spyne.test.interop.server" - - def test_echo_datetime(self): - val = datetime.now() - ret = self.client.service.echo_datetime(val) - - assert val == ret - - def test_echo_datetime_with_invalid_format(self): - val = datetime.now() - ret = self.client.service.echo_datetime_with_invalid_format(val) - - assert val == ret - - def test_echo_date(self): - val = datetime.now().date() - ret = self.client.service.echo_date(val) - - assert val == ret - - def test_echo_date_with_invalid_format(self): - val = datetime.now().date() - ret = self.client.service.echo_date_with_invalid_format(val) - - assert val == ret - - def test_echo_time(self): - val = datetime.now().time() - ret = self.client.service.echo_time(val) - - assert val == ret - - def test_echo_time_with_invalid_format(self): - val = datetime.now().time() - ret = self.client.service.echo_time_with_invalid_format(val) - - assert val == ret - - def test_echo_simple_boolean_array(self): - val = [False, False, False, True] - ret = self.client.service.echo_simple_boolean_array(val) - - assert val == ret - - def test_echo_boolean(self): - val = True - ret = self.client.service.echo_boolean(val) - self.assertEqual(val, ret) - - val = False - ret = self.client.service.echo_boolean(val) - self.assertEqual(val, ret) - - def test_enum(self): - DaysOfWeekEnum = self.client.factory.create("DaysOfWeekEnum") - - val = DaysOfWeekEnum.Monday - ret = self.client.service.echo_enum(val) - - assert val == ret - - def test_bytearray(self): - val = b"\x00\x01\x02\x03\x04" - # suds doesn't support base64 encoding, so we do it manually - ret = self.client.service.echo_bytearray(b64encode(val).decode()) - - assert val == b64decode(ret) - - def test_validation(self): - non_nillable_class = self.client.factory.create( - "{hunk.sunk}NonNillableClass") - non_nillable_class.i = 6 - non_nillable_class.s = None - - try: - self.client.service.non_nillable(non_nillable_class) - except WebFault as e: - pass - else: - raise Exception("must fail") - - def test_echo_integer_array(self): - ia = self.client.factory.create('integerArray') - ia.integer.extend([1, 2, 3, 4, 5]) - self.client.service.echo_integer_array(ia) - - def test_echo_in_header(self): - in_header = self.client.factory.create('InHeader') - in_header.s = 'a' - in_header.i = 3 - - self.client.set_options(soapheaders=in_header) - ret = self.client.service.echo_in_header() - self.client.set_options(soapheaders=None) - - print(ret) - - self.assertEqual(in_header.s, ret.s) - self.assertEqual(in_header.i, ret.i) - - def test_echo_in_complex_header(self): - in_header = self.client.factory.create('InHeader') - in_header.s = 'a' - in_header.i = 3 - in_trace_header = self.client.factory.create('InTraceHeader') - in_trace_header.client = 'suds' - in_trace_header.callDate = datetime(year=2000, month=1, day=1, hour=0, - minute=0, second=0, microsecond=0) - - self.client.set_options(soapheaders=(in_header, in_trace_header)) - ret = self.client.service.echo_in_complex_header() - self.client.set_options(soapheaders=None) - - print(ret) - - self.assertEqual(in_header.s, ret[0].s) - self.assertEqual(in_header.i, ret[0].i) - self.assertEqual(in_trace_header.client, ret[1].client) - self.assertEqual(in_trace_header.callDate, ret[1].callDate) - - def test_send_out_header(self): - out_header = self.client.factory.create('OutHeader') - out_header.dt = datetime(year=2000, month=1, day=1) - out_header.f = 3.141592653 - - ret = self.client.service.send_out_header() - - self.assertTrue(isinstance(ret, type(out_header))) - self.assertEqual(ret.dt, out_header.dt) - self.assertEqual(ret.f, out_header.f) - - def test_send_out_complex_header(self): - out_header = self.client.factory.create('OutHeader') - out_header.dt = datetime(year=2000, month=1, day=1) - out_header.f = 3.141592653 - out_trace_header = self.client.factory.create('OutTraceHeader') - out_trace_header.receiptDate = datetime(year=2000, month=1, day=1, - hour=1, minute=1, second=1, microsecond=1) - out_trace_header.returnDate = datetime(year=2000, month=1, day=1, - hour=1, minute=1, second=1, microsecond=100) - - ret = self.client.service.send_out_complex_header() - - self.assertTrue(isinstance(ret[0], type(out_header))) - self.assertEqual(ret[0].dt, out_header.dt) - self.assertEqual(ret[0].f, out_header.f) - self.assertTrue(isinstance(ret[1], type(out_trace_header))) - self.assertEqual(ret[1].receiptDate, out_trace_header.receiptDate) - self.assertEqual(ret[1].returnDate, out_trace_header.returnDate) - # Control the reply soap header (in an unelegant way but this is the - # only way with suds) - soapheaders = self.client.options.plugins[0].reply.getChild("Envelope").getChild("Header") - soap_out_header = soapheaders.getChild('OutHeader') - self.assertEqual('T'.join((out_header.dt.date().isoformat(), - out_header.dt.time().isoformat())), - soap_out_header.getChild('dt').getText()) - self.assertEqual(six.text_type(out_header.f), soap_out_header.getChild('f').getText()) - soap_out_trace_header = soapheaders.getChild('OutTraceHeader') - self.assertEqual('T'.join((out_trace_header.receiptDate.date().isoformat(), - out_trace_header.receiptDate.time().isoformat())), - soap_out_trace_header.getChild('receiptDate').getText()) - self.assertEqual('T'.join((out_trace_header.returnDate.date().isoformat(), - out_trace_header.returnDate.time().isoformat())), - soap_out_trace_header.getChild('returnDate').getText()) - - def test_echo_string(self): - test_string = "OK" - ret = self.client.service.echo_string(test_string) - - self.assertEqual(ret, test_string) - - def __get_xml_test_val(self): - return { - "test_sub": { - "test_subsub1": { - "test_subsubsub1": ["subsubsub1 value"] - }, - "test_subsub2": ["subsub2 value 1", "subsub2 value 2"], - "test_subsub3": [ - { - "test_subsub3sub1": ["subsub3sub1 value"] - }, - { - "test_subsub3sub2": ["subsub3sub2 value"] - }, - ], - "test_subsub4": [], - "test_subsub5": ["x"], - } - } - - - def test_echo_simple_class(self): - val = self.client.factory.create("{spyne.test.interop.server}SimpleClass") - - val.i = 45 - val.s = "asd" - - ret = self.client.service.echo_simple_class(val) - - assert ret.i == val.i - assert ret.s == val.s - - def test_echo_class_with_self_reference(self): - val = self.client.factory.create("{spyne.test.interop.server}ClassWithSelfReference") - - val.i = 45 - val.sr = self.client.factory.create("{spyne.test.interop.server}ClassWithSelfReference") - val.sr.i = 50 - val.sr.sr = None - - ret = self.client.service.echo_class_with_self_reference(val) - - assert ret.i == val.i - assert ret.sr.i == val.sr.i - - def test_echo_nested_class(self): - val = self.client.factory.create("{punk.tunk}NestedClass"); - - val.i = 45 - val.s = "asd" - val.f = 12.34 - val.ai = self.client.factory.create("integerArray") - val.ai.integer.extend([1, 2, 3, 45, 5, 3, 2, 1, 4]) - - val.simple = self.client.factory.create("{spyne.test.interop.server}SimpleClassArray") - - val.simple.SimpleClass.append(self.client.factory.create("{spyne.test.interop.server}SimpleClass")) - val.simple.SimpleClass.append(self.client.factory.create("{spyne.test.interop.server}SimpleClass")) - - val.simple.SimpleClass[0].i = 45 - val.simple.SimpleClass[0].s = "asd" - val.simple.SimpleClass[1].i = 12 - val.simple.SimpleClass[1].s = "qwe" - - val.other = self.client.factory.create("{spyne.test.interop.server}OtherClass"); - val.other.dt = datetime.now() - val.other.d = 123.456 - val.other.b = True - - ret = self.client.service.echo_nested_class(val) - - self.assertEqual(ret.i, val.i) - self.assertEqual(ret.ai[0], val.ai[0]) - self.assertEqual(ret.simple.SimpleClass[0].s, val.simple.SimpleClass[0].s) - self.assertEqual(ret.other.dt, val.other.dt) - - def test_huge_number(self): - self.assertEqual(self.client.service.huge_number(), 2 ** int(1e5)) - - def test_long_string(self): - self.assertEqual(self.client.service.long_string(), - ('0123456789abcdef' * 16384)) - - def test_empty(self): - self.client.service.test_empty() - - def test_echo_extension_class(self): - val = self.client.factory.create("{bar}ExtensionClass") - - val.i = 45 - val.s = "asd" - val.f = 12.34 - - val.simple = self.client.factory.create("{spyne.test.interop.server}SimpleClassArray") - - val.simple.SimpleClass.append(self.client.factory.create("{spyne.test.interop.server}SimpleClass")) - val.simple.SimpleClass.append(self.client.factory.create("{spyne.test.interop.server}SimpleClass")) - - val.simple.SimpleClass[0].i = 45 - val.simple.SimpleClass[0].s = "asd" - val.simple.SimpleClass[1].i = 12 - val.simple.SimpleClass[1].s = "qwe" - - val.other = self.client.factory.create("{spyne.test.interop.server}OtherClass"); - val.other.dt = datetime.now() - val.other.d = 123.456 - val.other.b = True - - val.p = self.client.factory.create("{hunk.sunk}NonNillableClass"); - val.p.dt = datetime(2010, 6, 2) - val.p.i = 123 - val.p.s = "punk" - - val.l = datetime(2010, 7, 2) - val.q = 5 - - ret = self.client.service.echo_extension_class(val) - print(ret) - - self.assertEqual(ret.i, val.i) - self.assertEqual(ret.s, val.s) - self.assertEqual(ret.f, val.f) - self.assertEqual(ret.simple.SimpleClass[0].i, val.simple.SimpleClass[0].i) - self.assertEqual(ret.other.dt, val.other.dt) - self.assertEqual(ret.p.s, val.p.s) - - - def test_python_exception(self): - try: - self.client.service.python_exception() - raise Exception("must fail") - except WebFault as e: - pass - - def test_soap_exception(self): - try: - self.client.service.soap_exception() - raise Exception("must fail") - except WebFault as e: - pass - - def test_complex_return(self): - ret = self.client.service.complex_return() - - self.assertEqual(ret.resultCode, 1) - self.assertEqual(ret.resultDescription, "Test") - self.assertEqual(ret.transactionId, 123) - self.assertEqual(ret.roles.RoleEnum[0], "MEMBER") - - def test_return_invalid_data(self): - try: - self.client.service.return_invalid_data() - raise Exception("must fail") - except: - pass - - def test_custom_messages(self): - ret = self.client.service.custom_messages("test") - - assert ret == 'test' - - def test_echo_simple_bare(self): - ret = self.client.service.echo_simple_bare("test") - - assert ret == 'test' - - # - # This test is disabled because suds does not create the right request - # object. Opening the first tag below is wrong. - # - # - # - # - # - # - # abc - # def - # - # - # - # - # - # The right request looks like this: - # - # - # abc - # def - # - # - def _test_echo_complex_bare(self): - val = ['abc','def'] - ia = self.client.factory.create('stringArray') - ia.string.extend(val) - ret = self.client.service.echo_complex_bare(ia) - - assert ret == val - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/interop/test_wsi.py b/libs_crutch/contrib/spyne/test/interop/test_wsi.py deleted file mode 100644 index cb0aefe..0000000 --- a/libs_crutch/contrib/spyne/test/interop/test_wsi.py +++ /dev/null @@ -1,161 +0,0 @@ -#!/usr/bin/env python -# -# WS-I interoperability test http://www.ws-i.org/deliverables/workinggroup.aspx?wg=testingtools -# latest download: http://www.ws-i.org/Testing/Tools/2005/06/WSI_Test_Java_Final_1.1.zip -# -# Before launching this test, you should download the zip file and unpack it in this -# directory this should create the wsi-test-tools directory. -# -# Adapted from http://thestewscope.wordpress.com/2008/08/19/ruby-wrapper-for-ws-i-analyzer-tools/ -# from Luca Dariz -# - -import os -import string -from lxml import etree - -CONFIG_FILE = 'config.xml' -SPYNE_TEST_NS = 'spyne.test.interop.server' -SPYNE_TEST_PORT = 'Application' -SPYNE_REPORT_FILE = 'wsi-report-spyne.xml' - -WSI_ANALYZER_CONFIG_TEMPLATE=string.Template(""" - - - false - - - - - - ${ASSERTIONS_FILE} - - - - ${PORT_NAME} - - - ${PORT_NAME} - - - ${PORT_NAME} - - - ${PORT_NAME} - - - ${PORT_NAME} - - - ${PORT_NAME} - - - ${PORT_NAME} - - ${WSDL_URI} - - -""") - -#This must be changed to point to the physical root of the wsi-installation -WSI_HOME_TAG = "WSI_HOME" -WSI_HOME_VAL = "wsi-test-tools" -WSI_JAVA_HOME_TAG = "WSI_JAVA_HOME" -WSI_JAVA_HOME_VAL = WSI_HOME_VAL+"/java" -WSI_JAVA_OPTS_TAG = "WSI_JAVA_OPTS" -WSI_JAVA_OPTS_VAL = " -Dorg.xml.sax.driver=org.apache.xerces.parsers.SAXParser" -WSI_TEST_ASSERTIONS_FILE = WSI_HOME_VAL+"/common/profiles/SSBP10_BP11_TAD.xml" -WSI_STYLESHEET_FILE = WSI_HOME_VAL+"/common/xsl/report.xsl" -WSI_EXECUTION_COMMAND = "java ${WSI_JAVA_OPTS} -Dwsi.home=${WSI_HOME} -cp ${WSI_CP}\ - org.wsi.test.analyzer.BasicProfileAnalyzer -config " - -WSIClasspath=[ - WSI_JAVA_HOME_VAL+"/lib/wsi-test-tools.jar", - WSI_JAVA_HOME_VAL+"/lib", - WSI_JAVA_HOME_VAL+"/lib/xercesImpl.jar", - WSI_JAVA_HOME_VAL+"/lib/xmlParserAPIs.jar", - WSI_JAVA_HOME_VAL+"/lib/wsdl4j.jar", - WSI_JAVA_HOME_VAL+"/lib/uddi4j.jar", - WSI_JAVA_HOME_VAL+"/lib/axis.jar", - WSI_JAVA_HOME_VAL+"/lib/jaxrpc.jar", - WSI_JAVA_HOME_VAL+"/lib/saaj.jar", - WSI_JAVA_HOME_VAL+"/lib/commons-discovery.jar", - WSI_JAVA_HOME_VAL+"/lib/commons-logging.jar" -] -WSI_CLASSPATH_TAG = "WSI_CP" -WSI_CLASSPATH_VAL = ':'.join(WSIClasspath) - - -def configure_env(): - os.environ[WSI_HOME_TAG] = WSI_HOME_VAL - os.environ[WSI_JAVA_HOME_TAG] = WSI_JAVA_HOME_VAL - os.environ[WSI_JAVA_OPTS_TAG] = WSI_JAVA_OPTS_VAL - os.environ[WSI_CLASSPATH_TAG] = WSI_CLASSPATH_VAL - -def create_config(wsdl_uri, config_file): - print(("Creating config for wsdl at %s ...\n" %wsdl_uri)) - # extract target elements - service = 'ValidatingApplication' - port = 'ValidatingApplication' - # for wsdl service declarations: - # create config(service, port) - vars = {'REPORT_FILE':SPYNE_REPORT_FILE, - 'STYLESHEET_FILE':WSI_STYLESHEET_FILE, - 'ASSERTIONS_FILE':WSI_TEST_ASSERTIONS_FILE, - 'WSDL_NAMESPACE':SPYNE_TEST_NS, - 'PORT_NAME':SPYNE_TEST_PORT, - 'WSDL_URI':wsdl_uri} - config = WSI_ANALYZER_CONFIG_TEMPLATE.substitute(vars) - f = open(config_file, 'w') - f.write(config) - f.close() - -def analyze_wsdl(config_file): - # execute ws-i tests - # don't execute Analyzer.sh directly since it needs bash - os.system(WSI_EXECUTION_COMMAND + config_file) - - # parse result - e = etree.parse(SPYNE_REPORT_FILE).getroot() - summary = etree.ETXPath('{%s}summary' %e.nsmap['wsi-report'])(e) - if summary: - # retrieve overall result of the test - result = summary[0].get('result') - if result == 'failed': - outs = etree.ETXPath('{%s}artifact' %(e.nsmap['wsi-report'],))(e) - - # filter for the object describing the wsdl test - desc = [o for o in outs if o.get('type') == 'description'][0] - - # loop over every group test - for entry in desc.iterchildren(): - # loop over every single test - for test in entry.iterchildren(): - # simply print the error if there is one - # an html can be generated using files in wsi-test-tools/common/xsl - if test.get('result') == 'failed': - fail_msg = etree.ETXPath('{%s}failureMessage' %e.nsmap['wsi-report'])(test) - fail_det = etree.ETXPath('{%s}failureDetail' %e.nsmap['wsi-report'])(test) - if fail_msg: - print(('\nFAILURE in test %s\n' %test.get('id'))) - print((fail_msg[0].text)) - if fail_det: - print('\nFAILURE MSG\n') - print((fail_det[0].text)) - -from spyne.test.interop._test_soap_client_base import run_server - -if __name__ == '__main__': - run_server('http') - configure_env() - create_config('http://localhost:9754/?wsdl', CONFIG_FILE) - analyze_wsdl(CONFIG_FILE) diff --git a/libs_crutch/contrib/spyne/test/interop/test_zeep.py b/libs_crutch/contrib/spyne/test/interop/test_zeep.py deleted file mode 100644 index d2df5f7..0000000 --- a/libs_crutch/contrib/spyne/test/interop/test_zeep.py +++ /dev/null @@ -1,380 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# -import logging - - -zeep_logger = logging.getLogger('zeep') -zeep_logger.setLevel(logging.INFO) - -import unittest - -from datetime import datetime -from base64 import b64encode, b64decode - -from spyne.test.interop._test_soap_client_base import server_started -from spyne.util import six - -from zeep import Client -from zeep.transports import Transport -from zeep.exceptions import Error as ZeepError - - -class TestZeep(unittest.TestCase): - def setUp(self): - from spyne.test.interop._test_soap_client_base import run_server - run_server('http') - - port, = server_started.keys() - - transport = Transport(cache=False) - self.client = Client("http://localhost:%d/?wsdl" % port, - transport=transport) - self.ns = "spyne.test.interop.server" - - def get_inst(self, what): - return self.client.get_type(what)() - - def test_echo_datetime(self): - val = datetime.now() - ret = self.client.service.echo_datetime(val) - - assert val == ret - - def test_echo_datetime_with_invalid_format(self): - val = datetime.now() - ret = self.client.service.echo_datetime_with_invalid_format(val) - - assert val == ret - - def test_echo_date(self): - val = datetime.now().date() - ret = self.client.service.echo_date(val) - - assert val == ret - - def test_echo_date_with_invalid_format(self): - val = datetime.now().date() - ret = self.client.service.echo_date_with_invalid_format(val) - - assert val == ret - - def test_echo_time(self): - val = datetime.now().time() - ret = self.client.service.echo_time(val) - - assert val == ret - - def test_echo_time_with_invalid_format(self): - val = datetime.now().time() - ret = self.client.service.echo_time_with_invalid_format(val) - - assert val == ret - - def test_echo_simple_boolean_array(self): - val = [False, False, False, True] - ret = self.client.service.echo_simple_boolean_array(val) - - assert val == ret - - def test_echo_boolean(self): - val = True - ret = self.client.service.echo_boolean(val) - self.assertEqual(val, ret) - - val = False - ret = self.client.service.echo_boolean(val) - self.assertEqual(val, ret) - - def test_enum(self): - val = self.client.get_type("{%s}DaysOfWeekEnum" % self.ns)('Monday') - - ret = self.client.service.echo_enum(val) - - assert val == ret - - def test_bytearray(self): - val = b"\x00\x01\x02\x03\x04" - ret = self.client.service.echo_bytearray(val) - - assert val == ret - - def test_validation(self): - non_nillable_class = self.client.get_type("{hunk.sunk}NonNillableClass")() - non_nillable_class.i = 6 - non_nillable_class.s = None - - try: - self.client.service.non_nillable(non_nillable_class) - except ZeepError as e: - pass - else: - raise Exception("must fail") - - def test_echo_integer_array(self): - ia = self.client.get_type('{%s}integerArray' % self.ns)() - ia.integer.extend([1, 2, 3, 4, 5]) - self.client.service.echo_integer_array(ia) - - def test_echo_in_header(self): - in_header = self.client.get_type('{%s}InHeader' % self.ns)() - in_header.s = 'a' - in_header.i = 3 - - ret = self.client.service.echo_in_header(_soapheaders={ - 'InHeader': in_header, - }) - - print(ret) - - out_header = ret.body.echo_in_headerResult - self.assertEqual(in_header.s, out_header.s) - self.assertEqual(in_header.i, out_header.i) - - def test_echo_in_complex_header(self): - in_header = self.client.get_type('{%s}InHeader' % self.ns)() - in_header.s = 'a' - in_header.i = 3 - in_trace_header = self.client.get_type('{%s}InTraceHeader' % self.ns)() - in_trace_header.client = 'suds' - in_trace_header.callDate = datetime(year=2000, month=1, day=1, hour=0, - minute=0, second=0, microsecond=0) - - ret = self.client.service.echo_in_complex_header(_soapheaders={ - 'InHeader': in_header, - 'InTraceHeader': in_trace_header - }) - - print(ret) - - out_header = ret.body.echo_in_complex_headerResult0 - out_trace_header = ret.body.echo_in_complex_headerResult1 - - self.assertEqual(in_header.s, out_header.s) - self.assertEqual(in_header.i, out_header.i) - self.assertEqual(in_trace_header.client, out_trace_header.client) - self.assertEqual(in_trace_header.callDate, out_trace_header.callDate) - - def test_send_out_header(self): - out_header = self.client.get_type('{%s}OutHeader' % self.ns)() - out_header.dt = datetime(year=2000, month=1, day=1) - out_header.f = 3.141592653 - - ret = self.client.service.send_out_header() - - self.assertEqual(ret.header.OutHeader.dt, out_header.dt) - self.assertEqual(ret.header.OutHeader.f, out_header.f) - - def test_send_out_complex_header(self): - out_header = self.client.get_type('{%s}OutHeader' % self.ns)() - out_header.dt = datetime(year=2000, month=1, day=1) - out_header.f = 3.141592653 - out_trace_header = self.client.get_type('{%s}OutTraceHeader' % self.ns)() - out_trace_header.receiptDate = datetime(year=2000, month=1, day=1, - hour=1, minute=1, second=1, microsecond=1) - out_trace_header.returnDate = datetime(year=2000, month=1, day=1, - hour=1, minute=1, second=1, microsecond=100) - - ret = self.client.service.send_out_complex_header() - - self.assertEqual(ret.header.OutHeader.dt, out_header.dt) - self.assertEqual(ret.header.OutHeader.f, out_header.f) - self.assertEqual(ret.header.OutTraceHeader.receiptDate, out_trace_header.receiptDate) - self.assertEqual(ret.header.OutTraceHeader.returnDate, out_trace_header.returnDate) - - def test_echo_string(self): - test_string = "OK" - ret = self.client.service.echo_string(test_string) - - self.assertEqual(ret, test_string) - - def __get_xml_test_val(self): - return { - "test_sub": { - "test_subsub1": { - "test_subsubsub1": ["subsubsub1 value"] - }, - "test_subsub2": ["subsub2 value 1", "subsub2 value 2"], - "test_subsub3": [ - { - "test_subsub3sub1": ["subsub3sub1 value"] - }, - { - "test_subsub3sub2": ["subsub3sub2 value"] - }, - ], - "test_subsub4": [], - "test_subsub5": ["x"], - } - } - - - def test_echo_simple_class(self): - val = self.client.get_type("{%s}SimpleClass" % self.ns)() - - val.i = 45 - val.s = "asd" - - ret = self.client.service.echo_simple_class(val) - - assert ret.i == val.i - assert ret.s == val.s - - def test_echo_class_with_self_reference(self): - val = self.client.get_type("{%s}ClassWithSelfReference" % self.ns)() - - val.i = 45 - val.sr = self.client.get_type("{%s}ClassWithSelfReference" % self.ns)() - val.sr.i = 50 - val.sr.sr = None - - ret = self.client.service.echo_class_with_self_reference(val) - - assert ret.i == val.i - assert ret.sr.i == val.sr.i - - def test_echo_nested_class(self): - val = self.client.get_type("{punk.tunk}NestedClass")() - - val.i = 45 - val.s = "asd" - val.f = 12.34 - val.ai = self.client.get_type("{%s}integerArray" % self.ns)() - val.ai.integer.extend([1, 2, 3, 45, 5, 3, 2, 1, 4]) - - val.simple = self.client.get_type("{%s}SimpleClassArray" % self.ns)() - - val.simple.SimpleClass.append(self.client.get_type("{%s}SimpleClass" % self.ns)()) - val.simple.SimpleClass.append(self.client.get_type("{%s}SimpleClass" % self.ns)()) - - val.simple.SimpleClass[0].i = 45 - val.simple.SimpleClass[0].s = "asd" - val.simple.SimpleClass[1].i = 12 - val.simple.SimpleClass[1].s = "qwe" - - val.other = self.client.get_type("{%s}OtherClass" % self.ns)() - val.other.dt = datetime.now() - val.other.d = 123.456 - val.other.b = True - - ret = self.client.service.echo_nested_class(val) - - self.assertEqual(ret.i, val.i) - self.assertEqual(ret.ai.integer, val.ai.integer) - self.assertEqual(ret.ai.integer[0], val.ai.integer[0]) - self.assertEqual(ret.simple.SimpleClass[0].s, val.simple.SimpleClass[0].s) - self.assertEqual(ret.other.dt, val.other.dt) - - def test_huge_number(self): - self.assertEqual(self.client.service.huge_number(), 2 ** int(1e5)) - - def test_long_string(self): - self.assertEqual(self.client.service.long_string(), - ('0123456789abcdef' * 16384)) - - def test_empty(self): - self.client.service.test_empty() - - def test_echo_extension_class(self): - val = self.client.get_type("{bar}ExtensionClass")() - - val.i = 45 - val.s = "asd" - val.f = 12.34 - - val.simple = self.client.get_type("{%s}SimpleClassArray" % self.ns)() - - val.simple.SimpleClass.append(self.client.get_type("{%s}SimpleClass" % self.ns)()) - val.simple.SimpleClass.append(self.client.get_type("{%s}SimpleClass" % self.ns)()) - - val.simple.SimpleClass[0].i = 45 - val.simple.SimpleClass[0].s = "asd" - val.simple.SimpleClass[1].i = 12 - val.simple.SimpleClass[1].s = "qwe" - - val.other = self.client.get_type("{%s}OtherClass" % self.ns)() - val.other.dt = datetime.now() - val.other.d = 123.456 - val.other.b = True - - val.p = self.client.get_type("{hunk.sunk}NonNillableClass")() - val.p.dt = datetime(2010, 6, 2) - val.p.i = 123 - val.p.s = "punk" - - val.l = datetime(2010, 7, 2) - val.q = 5 - - ret = self.client.service.echo_extension_class(val) - print(ret) - - self.assertEqual(ret.i, val.i) - self.assertEqual(ret.s, val.s) - self.assertEqual(ret.f, val.f) - self.assertEqual(ret.simple.SimpleClass[0].i, val.simple.SimpleClass[0].i) - self.assertEqual(ret.other.dt, val.other.dt) - self.assertEqual(ret.p.s, val.p.s) - - - def test_python_exception(self): - try: - self.client.service.python_exception() - raise Exception("must fail") - except ZeepError as e: - pass - - def test_soap_exception(self): - try: - self.client.service.soap_exception() - raise Exception("must fail") - except ZeepError as e: - pass - - def test_complex_return(self): - ret = self.client.service.complex_return() - - self.assertEqual(ret.resultCode, 1) - self.assertEqual(ret.resultDescription, "Test") - self.assertEqual(ret.transactionId, 123) - self.assertEqual(ret.roles.RoleEnum[0], "MEMBER") - - def test_return_invalid_data(self): - try: - self.client.service.return_invalid_data() - raise Exception("must fail") - except: - pass - - def test_custom_messages(self): - ret = self.client.service.custom_messages("test") - - assert ret == 'test' - - def test_echo_simple_bare(self): - ret = self.client.service.echo_simple_bare("test") - - assert ret == 'test' - - def test_echo_complex_bare(self): - val = ['abc','def'] - ret = self.client.service.echo_complex_bare(val) - - assert ret == val - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/model/__init__.py b/libs_crutch/contrib/spyne/test/model/__init__.py deleted file mode 100644 index 7b899d5..0000000 --- a/libs_crutch/contrib/spyne/test/model/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# diff --git a/libs_crutch/contrib/spyne/test/model/test_binary.py b/libs_crutch/contrib/spyne/test/model/test_binary.py deleted file mode 100644 index 06a4841..0000000 --- a/libs_crutch/contrib/spyne/test/model/test_binary.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import unittest -from lxml import etree - -from spyne.protocol.soap import Soap11 -from spyne.model.binary import ByteArray -from spyne.model.binary import _bytes_join -import spyne.const.xml - -ns_xsd = spyne.const.xml.NS_XSD -ns_test = 'test_namespace' - - -class TestBinary(unittest.TestCase): - def setUp(self): - self.data = bytes(bytearray(range(0xff))) - - def test_data(self): - element = etree.Element('test') - Soap11().to_parent(None, ByteArray, [self.data], element, ns_test) - print(etree.tostring(element, pretty_print=True)) - element = element[0] - - a2 = Soap11().from_element(None, ByteArray, element) - self.assertEqual(self.data, _bytes_join(a2)) - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/model/test_complex.py b/libs_crutch/contrib/spyne/test/model/test_complex.py deleted file mode 100644 index 0bc56e0..0000000 --- a/libs_crutch/contrib/spyne/test/model/test_complex.py +++ /dev/null @@ -1,1146 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import print_function - -import pytz -import datetime -import unittest - -from pprint import pprint - -from lxml import etree - -from base64 import b64encode -from decimal import Decimal as D - -from spyne import Application, rpc, mrpc, Service, ByteArray, Array, \ - ComplexModel, SelfReference, XmlData, XmlAttribute, Unicode, DateTime, \ - Float, Integer, String -from spyne.const import xml -from spyne.error import ResourceNotFoundError -from spyne.interface import Interface -from spyne.interface.wsdl import Wsdl11 -from spyne.model.addtl import TimeSegment, DateSegment, DateTimeSegment -from spyne.protocol import ProtocolBase -from spyne.protocol.soap import Soap11 -from spyne.server.null import NullServer - -from spyne.protocol.dictdoc import SimpleDictDocument -from spyne.protocol.xml import XmlDocument - -from spyne.test import FakeApp - -ns_test = 'test_namespace' - - -class Address(ComplexModel): - street = String - city = String - zip = Integer - since = DateTime - lattitude = Float - longitude = Float - -Address.resolve_namespace(Address, __name__) - - -class Person(ComplexModel): - name = String - birthdate = DateTime - age = Integer - addresses = Array(Address) - titles = Array(String) - -Person.resolve_namespace(Person, __name__) - - -class Employee(Person): - employee_id = Integer - salary = Float - -Employee.resolve_namespace(Employee, __name__) - -class Level2(ComplexModel): - arg1 = String - arg2 = Float - -Level2.resolve_namespace(Level2, __name__) - - -class Level3(ComplexModel): - arg1 = Integer - -Level3.resolve_namespace(Level3, __name__) - - -class Level4(ComplexModel): - arg1 = String - -Level4.resolve_namespace(Level4, __name__) - - -class Level1(ComplexModel): - level2 = Level2 - level3 = Array(Level3) - level4 = Array(Level4) - -Level1.resolve_namespace(Level1, __name__) - - -class TestComplexModel(unittest.TestCase): - def test_validate_on_assignment_fail(self): - class C(ComplexModel): - i = Integer(voa=True) - - try: - C().i = 'a' - except ValueError: - pass - else: - raise Exception('must fail with ValueError') - - def test_validate_on_assignment_success(self): - class C(ComplexModel): - i = Integer(voa=True) - - c = C() - - c.i = None - assert c.i is None - - c.i = 5 - assert c.i == 5 - - def test_simple_class(self): - a = Address() - a.street = '123 happy way' - a.city = 'badtown' - a.zip = 32 - a.lattitude = 4.3 - a.longitude = 88.0 - - element = etree.Element('test') - XmlDocument().to_parent(None, Address, a, element, ns_test) - element = element[0] - self.assertEqual(5, len(element.getchildren())) - - a.since = datetime.datetime(year=2011, month=12, day=31, tzinfo=pytz.utc) - element = etree.Element('test') - XmlDocument().to_parent(None, Address, a, element, ns_test) - element = element[0] - self.assertEqual(6, len(element.getchildren())) - - r = XmlDocument().from_element(None, Address, element) - - self.assertEqual(a.street, r.street) - self.assertEqual(a.city, r.city) - self.assertEqual(a.zip, r.zip) - self.assertEqual(a.lattitude, r.lattitude) - self.assertEqual(a.longitude, r.longitude) - self.assertEqual(a.since, r.since) - - def test_nested_class(self): # FIXME: this test is incomplete - p = Person() - element = etree.Element('test') - XmlDocument().to_parent(None, Person, p, element, ns_test) - element = element[0] - - self.assertEqual(None, p.name) - self.assertEqual(None, p.birthdate) - self.assertEqual(None, p.age) - self.assertEqual(None, p.addresses) - - def test_class_array(self): - peeps = [] - names = ['bob', 'jim', 'peabody', 'mumblesleeves'] - dob = datetime.datetime(1979, 1, 1, tzinfo=pytz.utc) - for name in names: - a = Person() - a.name = name - a.birthdate = dob - a.age = 27 - peeps.append(a) - - type = Array(Person) - type.resolve_namespace(type, __name__) - - element = etree.Element('test') - - XmlDocument().to_parent(None, type, peeps, element, ns_test) - element = element[0] - - self.assertEqual(4, len(element.getchildren())) - - peeps2 = XmlDocument().from_element(None, type, element) - for i in range(0, 4): - self.assertEqual(peeps2[i].name, names[i]) - self.assertEqual(peeps2[i].birthdate, dob) - - def test_class_nested_array(self): - peeps = [] - names = ['bob', 'jim', 'peabody', 'mumblesleves'] - - for name in names: - a = Person() - a.name = name - a.birthdate = datetime.datetime(1979, 1, 1) - a.age = 27 - a.addresses = [] - - for i in range(0, 25): - addr = Address() - addr.street = '555 downtown' - addr.city = 'funkytown' - a.addresses.append(addr) - peeps.append(a) - - arr = Array(Person) - arr.resolve_namespace(arr, __name__) - element = etree.Element('test') - XmlDocument().to_parent(None, arr, peeps, element, ns_test) - element = element[0] - - self.assertEqual(4, len(element.getchildren())) - - peeps2 = XmlDocument().from_element(None, arr, element) - for peep in peeps2: - self.assertEqual(27, peep.age) - self.assertEqual(25, len(peep.addresses)) - self.assertEqual('funkytown', peep.addresses[18].city) - - def test_complex_class(self): - l = Level1() - l.level2 = Level2() - l.level2.arg1 = 'abcd' - l.level2.arg2 = 1.444 - l.level3 = [] - l.level4 = [] - - for i in range(0, 100): - a = Level3() - a.arg1 = i - l.level3.append(a) - - for i in range(0, 4): - a = Level4() - a.arg1 = str(i) - l.level4.append(a) - - element = etree.Element('test') - XmlDocument().to_parent(None, Level1, l, element, ns_test) - element = element[0] - l1 = XmlDocument().from_element(None, Level1, element) - - self.assertEqual(l1.level2.arg1, l.level2.arg1) - self.assertEqual(l1.level2.arg2, l.level2.arg2) - self.assertEqual(len(l1.level4), len(l.level4)) - self.assertEqual(100, len(l.level3)) - - -class X(ComplexModel): - __namespace__ = 'tns' - x = Integer(nillable=True, max_occurs='unbounded') - - -class Y(X): - __namespace__ = 'tns' - y = Integer - - -class TestIncompleteInput(unittest.TestCase): - def test_x(self): - x = X() - x.x = [1, 2] - element = etree.Element('test') - XmlDocument().to_parent(None, X, x, element, 'tns') - msg = element[0] - r = XmlDocument().from_element(None, X, msg) - self.assertEqual(r.x, [1, 2]) - - def test_y_fromxml(self): - x = X() - x.x = [1, 2] - element = etree.Element('test') - XmlDocument().to_parent(None, X, x, element, 'tns') - msg = element[0] - r = XmlDocument().from_element(None, Y, msg) - self.assertEqual(r.x, [1, 2]) - - def test_y_toxml(self): - y = Y() - y.x = [1, 2] - y.y = 38 - element = etree.Element('test') - XmlDocument().to_parent(None, Y, y, element, 'tns') - msg = element[0] - r = XmlDocument().from_element(None, Y, msg) - - def test_serialization_instance_on_subclass(self): - test_values = { - 'x': [1, 2], - 'y': 38 - } - instance = Y.get_serialization_instance(test_values) - - self.assertEqual(instance.x, [1, 2]) - self.assertEqual(instance.y, 38) - - -class SisMsg(ComplexModel): - data_source = String(nillable=False, min_occurs=1, max_occurs=1, max_len=50) - direction = String(nillable=False, min_occurs=1, max_occurs=1, max_len=50) - interface_name = String(nillable=False, min_occurs=1, max_occurs=1, max_len=50) - crt_dt = DateTime(nillable=False) - - -class EncExtractXs(ComplexModel): - __min_occurs__ = 1 - __max_occurs__ = 1 - mbr_idn = Integer(nillable=False, min_occurs=1, max_occurs=1, max_len=18) - enc_idn = Integer(nillable=False, min_occurs=1, max_occurs=1, max_len=18) - hist_idn = Integer(nillable=False, min_occurs=1, max_occurs=1, max_len=18) - -class TestComplex(unittest.TestCase): - def test_array_type_name(self): - assert Array(String, type_name='punk').__type_name__ == 'punk' - - def test_ctor_kwargs(self): - class Category(ComplexModel): - id = Integer(min_occurs=1, max_occurs=1, nillable=False) - children = Array(Unicode) - - v = Category(id=5, children=['a','b']) - - assert v.id == 5 - assert v.children == ['a', 'b'] - - def test_ctor_args(self): - class Category(ComplexModel): - id = XmlData(Integer(min_occurs=1, max_occurs=1, nillable=False)) - children = Array(Unicode) - - v = Category(id=5, children=['a','b']) - - assert v.id == 5 - assert v.children == ['a', 'b'] - - v = Category(5, children=['a','b']) - - assert v.id == 5 - assert v.children == ['a', 'b'] - - def test_ctor_args_2(self): - class Category(ComplexModel): - children = Array(Unicode) - - class BetterCategory(Category): - sub_category = Unicode - - v = BetterCategory(children=['a','b'], sub_category='aaa') - - assert v.children == ['a', 'b'] - assert v.sub_category == 'aaa' - - def test_flat_type_info(self): - class A(ComplexModel): - i = Integer - - class B(A): - s = String - - assert 's' in B.get_flat_type_info(B) - assert 'i' in B.get_flat_type_info(B) - - def test_flat_type_info_attr(self): - class A(ComplexModel): - i = Integer - ia = XmlAttribute(Integer) - - class B(A): - s = String - sa = XmlAttribute(String) - - assert 's' in B.get_flat_type_info(B) - assert 'i' in B.get_flat_type_info(B) - assert 'sa' in B.get_flat_type_info(B) - assert 'ia' in B.get_flat_type_info(B) - assert 'sa' in B.get_flat_type_info(B).attrs - assert 'ia' in B.get_flat_type_info(B).attrs - - -class TestXmlAttribute(unittest.TestCase): - def assertIsNotNone(self, obj, msg=None): - """Stolen from Python 2.7 stdlib.""" - - if obj is None: - standardMsg = 'unexpectedly None' - self.fail(self._formatMessage(msg, standardMsg)) - - def test_add_to_schema(self): - class CM(ComplexModel): - i = Integer - s = String - a = XmlAttribute(String) - - app = FakeApp() - app.tns = 'tns' - CM.resolve_namespace(CM, app.tns) - interface = Interface(app) - interface.add_class(CM) - - wsdl = Wsdl11(interface) - wsdl.build_interface_document('http://a-aaaa.com') - pref = CM.get_namespace_prefix(interface) - type_def = wsdl.get_schema_info(pref).types[CM.get_type_name()] - attribute_def = type_def.find(xml.XSD('attribute')) - print(etree.tostring(type_def, pretty_print=True)) - - self.assertIsNotNone(attribute_def) - self.assertEqual(attribute_def.get('name'), 'a') - self.assertEqual(attribute_def.get('type'), CM.a.type.get_type_name_ns(interface)) - - def test_b64_non_attribute(self): - class PacketNonAttribute(ComplexModel): - __namespace__ = 'myns' - Data = ByteArray - - test_string = b'yo test data' - b64string = b64encode(test_string) - - gg = PacketNonAttribute(Data=[test_string]) - - element = etree.Element('test') - Soap11().to_parent(None, PacketNonAttribute, gg, element, gg.get_namespace()) - - element = element[0] - #print etree.tostring(element, pretty_print=True) - data = element.find('{%s}Data' % gg.get_namespace()).text - self.assertEqual(data, b64string.decode('ascii')) - s1 = Soap11().from_element(None, PacketNonAttribute, element) - assert s1.Data[0] == test_string - - def test_b64_attribute(self): - class PacketAttribute(ComplexModel): - __namespace__ = 'myns' - Data = XmlAttribute(ByteArray, use='required') - - test_string = b'yo test data' - b64string = b64encode(test_string) - gg = PacketAttribute(Data=[test_string]) - - element = etree.Element('test') - Soap11().to_parent(None, PacketAttribute, gg, element, gg.get_namespace()) - - element = element[0] - print(etree.tostring(element, pretty_print=True)) - print(element.attrib) - self.assertEqual(element.attrib['Data'], b64string.decode('ascii')) - - s1 = Soap11().from_element(None, PacketAttribute, element) - assert s1.Data[0] == test_string - - def test_customized_type(self): - class SomeClass(ComplexModel): - a = XmlAttribute(Integer(ge=4)) - class SomeService(Service): - @rpc(SomeClass) - def some_call(ctx, some_class): - pass - app = Application([SomeService], 'some_tns') - - -class TestSimpleTypeRestrictions(unittest.TestCase): - def test_simple_type_info(self): - class CM(ComplexModel): - i = Integer - s = String - - class CCM(ComplexModel): - c = CM - i = Integer - s = String - - sti = CCM.get_simple_type_info(CCM) - - pprint(sti) - assert "i" in sti - assert sti["i"].path == ('i',) - assert sti["i"].type is Integer - assert sti["s"].parent is CCM - assert "s" in sti - assert sti["s"].path == ('s',) - assert sti["s"].type is String - assert sti["s"].parent is CCM - - assert "c.i" in sti - assert sti["c.i"].path == ('c','i') - assert sti["c.i"].type is Integer - assert sti["c.i"].parent is CM - assert "c.s" in sti - assert sti["c.s"].path == ('c','s') - assert sti["c.s"].type is String - assert sti["c.s"].parent is CM - - def test_simple_type_info_conflicts(self): - class CM(ComplexModel): - i = Integer - s = String - - class CCM(ComplexModel): - c = CM - c_i = Float - - try: - CCM.get_simple_type_info(CCM, hier_delim='_') - except ValueError: - pass - else: - raise Exception("must fail") - -class TestFlatDict(unittest.TestCase): - def test_basic(self): - class CM(ComplexModel): - i = Integer - s = String - - class CCM(ComplexModel): - c = CM - i = Integer - s = String - - val = CCM(i=5, s='a', c=CM(i=7, s='b')) - - d = SimpleDictDocument().object_to_simple_dict(CCM, val) - - assert d['i'] == 5 - assert d['s'] == 'a' - assert d['c.i'] == 7 - assert d['c.s'] == 'b' - - assert len(d) == 4 - - def test_sub_name_ser(self): - class CM(ComplexModel): - integer = Integer(sub_name='i') - string = String(sub_name='s') - - val = CM(integer=7, string='b') - - d = SimpleDictDocument().object_to_simple_dict(CM, val) - - pprint(d) - - assert d['i'] == 7 - assert d['s'] == 'b' - - assert len(d) == 2 - - def test_sub_name_deser(self): - class CM(ComplexModel): - integer = Integer(sub_name='i') - string = String(sub_name='s') - - d = {'i': [7], 's': ['b']} - - val = SimpleDictDocument().simple_dict_to_object(None, d, CM) - - pprint(d) - - assert val.integer == 7 - assert val.string == 'b' - - def test_array_not_none(self): - class CM(ComplexModel): - i = Integer - s = String - - class CCM(ComplexModel): - c = Array(CM) - - val = CCM(c=[CM(i=i, s='b'*(i+1)) for i in range(2)]) - - d = SimpleDictDocument().object_to_simple_dict(CCM, val) - print(d) - - assert d['c[0].i'] == 0 - assert d['c[0].s'] == 'b' - assert d['c[1].i'] == 1 - assert d['c[1].s'] == 'bb' - - assert len(d) == 4 - - def test_array_none(self): - class CM(ComplexModel): - i = Integer - s = String - - class CCM(ComplexModel): - c = Array(CM) - - val = CCM() - - d = SimpleDictDocument().object_to_simple_dict(CCM, val) - print(d) - - assert len(d) == 0 - - def test_array_nested(self): - class CM(ComplexModel): - i = Array(Integer) - - class CCM(ComplexModel): - c = Array(CM) - - val = CCM(c=[CM(i=range(i)) for i in range(2, 4)]) - - d = SimpleDictDocument().object_to_simple_dict(CCM, val) - pprint(d) - - assert d['c[0].i'] == [0, 1] - assert d['c[1].i'] == [0, 1, 2] - - assert len(d) == 2 - - def test_array_nonwrapped(self): - i = Array(Integer, wrapped=False) - - assert issubclass(i, Integer), i - assert i.Attributes.max_occurs == D('infinity') - - -class TestSelfRefence(unittest.TestCase): - def test_canonical_case(self): - class TestSelfReference(ComplexModel): - self_reference = SelfReference - - c = TestSelfReference._type_info['self_reference'] - c = c.__orig__ or c - - assert c is TestSelfReference - - class SoapService(Service): - @rpc(_returns=TestSelfReference) - def view_categories(ctx): - pass - - Application([SoapService], 'service.soap') - - def test_self_referential_array_workaround(self): - from spyne.util.dictdoc import get_object_as_dict - class Category(ComplexModel): - id = Integer(min_occurs=1, max_occurs=1, nillable=False) - - Category._type_info['children'] = Array(Category) - - parent = Category() - parent.children = [Category(id=0), Category(id=1)] - - d = get_object_as_dict(parent, Category) - pprint(d) - assert d['children'][0]['id'] == 0 - assert d['children'][1]['id'] == 1 - - class SoapService(Service): - @rpc(_returns=Category) - def view_categories(ctx): - pass - - Application([SoapService], 'service.soap', - in_protocol=ProtocolBase(), - out_protocol=ProtocolBase()) - - def test_canonical_array(self): - class Category(ComplexModel): - id = Integer(min_occurs=1, max_occurs=1, nillable=False) - children = Array(SelfReference) - - parent = Category() - parent.children = [Category(id=1), Category(id=2)] - - sr, = Category._type_info['children']._type_info.values() - assert issubclass(sr, Category) - - -class TestMemberRpc(unittest.TestCase): - def test_simple(self): - class SomeComplexModel(ComplexModel): - @mrpc() - def put(self, ctx): - return "PUNK!!!" - - methods = SomeComplexModel.Attributes.methods - print(methods) - assert 'put' in methods - - def test_simple_customize(self): - class SomeComplexModel(ComplexModel): - @mrpc() - def put(self, ctx): - return "PUNK!!!" - - methods = SomeComplexModel.customize(zart='zurt').Attributes.methods - print(methods) - assert 'put' in methods - - def test_simple_with_fields(self): - class SomeComplexModel(ComplexModel): - a = Integer - @mrpc() - def put(self, ctx): - return "PUNK!!!" - - methods = SomeComplexModel.Attributes.methods - print(methods) - assert 'put' in methods - - def test_simple_with_explicit_fields(self): - class SomeComplexModel(ComplexModel): - _type_info = [('a', Integer)] - @mrpc() - def put(self, ctx): - return "PUNK!!!" - - methods = SomeComplexModel.Attributes.methods - print(methods) - assert 'put' in methods - - def test_native_call(self): - v = 'whatever' - - class SomeComplexModel(ComplexModel): - @mrpc() - def put(self, ctx): - return v - - assert SomeComplexModel().put(None) == v - - def test_interface(self): - class SomeComplexModel(ComplexModel): - @mrpc() - def member_method(self, ctx): - pass - - methods = SomeComplexModel.Attributes.methods - print(methods) - assert 'member_method' in methods - - class SomeService(Service): - @rpc(_returns=SomeComplexModel) - def service_method(ctx): - return SomeComplexModel() - - app = Application([SomeService], 'some_ns') - - mmm = __name__ + '.SomeComplexModel.member_method' - assert mmm in app.interface.method_id_map - - def test_interface_mult(self): - class SomeComplexModel(ComplexModel): - @mrpc() - def member_method(self, ctx): - pass - - methods = SomeComplexModel.Attributes.methods - print(methods) - assert 'member_method' in methods - - class SomeService(Service): - @rpc(_returns=SomeComplexModel) - def service_method(ctx): - return SomeComplexModel() - - @rpc(_returns=SomeComplexModel.customize(type_name='zon')) - def service_method_2(ctx): - return SomeComplexModel() - - app = Application([SomeService], 'some_ns') - - mmm = __name__ + '.SomeComplexModel.member_method' - assert mmm in app.interface.method_id_map - - def test_remote_call_error(self): - from spyne import mrpc - v = 'deger' - - class SomeComplexModel(ComplexModel): - @mrpc(_returns=SelfReference) - def put(self, ctx): - return v - - class SomeService(Service): - @rpc(_returns=SomeComplexModel) - def get(ctx): - return SomeComplexModel() - - null = NullServer(Application([SomeService], tns='some_tns')) - - try: - null.service.put() - except ResourceNotFoundError: - pass - else: - raise Exception("Must fail with: \"Requested resource " - "'{spyne.test.model.test_complex}SomeComplexModel' not found\"") - - def test_signature(self): - class SomeComplexModel(ComplexModel): - @mrpc() - def member_method(self, ctx): - pass - - methods = SomeComplexModel.Attributes.methods - - # we use __orig__ because implicit classes are .customize(validate_freq=False)'d - assert methods['member_method'].in_message._type_info[0].__orig__ is SomeComplexModel - - def test_self_reference(self): - from spyne import mrpc - - class SomeComplexModel(ComplexModel): - @mrpc(_returns=SelfReference) - def method(self, ctx): - pass - - methods = SomeComplexModel.Attributes.methods - assert methods['method'].out_message._type_info[0] is SomeComplexModel - - def test_remote_call_success(self): - from spyne import mrpc - - class SomeComplexModel(ComplexModel): - i = Integer - @mrpc(_returns=SelfReference) - def echo(self, ctx): - return self - - class SomeService(Service): - @rpc(_returns=SomeComplexModel) - def get(ctx): - return SomeComplexModel() - - null = NullServer(Application([SomeService], tns='some_tns')) - - v = SomeComplexModel(i=5) - assert null.service['SomeComplexModel.echo'](v) is v - - def test_order(self): - class CM(ComplexModel): - _type_info = [ - ('a', Integer), - ('c', Integer(order=0)) - ] - - assert CM._type_info.keys() == ['c', 'a'] - - -class TestDoc(unittest.TestCase): - def test_parent_doc(self): - class SomeComplexModel(ComplexModel): - """Some docstring""" - some_field = Unicode - class Annotations(ComplexModel.Annotations): - __use_parent_doc__ = True - assert "Some docstring" == SomeComplexModel.get_documentation() - - def test_annotation(self): - class SomeComplexModel(ComplexModel): - """Some docstring""" - class Annotations(ComplexModel.Annotations): - doc = "Some annotations" - - some_field = Unicode - assert "Some annotations" == SomeComplexModel.get_documentation() - - def test_no_parent_doc(self): - class SomeComplexModel(ComplexModel): - """Some docstring""" - class Annotations(ComplexModel.Annotations): - __use_parent_doc__ = False - - some_field = Unicode - assert "" == SomeComplexModel.get_documentation() - - def test_parent_doc_customize(self): - """Check that we keep the documentation when we use customize""" - class SomeComplexModel(ComplexModel): - """Some docstring""" - some_field = Unicode - class Annotations(ComplexModel.Annotations): - __use_parent_doc__ = True - assert "Some docstring" == SomeComplexModel.customize().get_documentation() - - -class TestCustomize(unittest.TestCase): - def test_base_class(self): - class A(ComplexModel): - s = Unicode - - assert A.customize().__extends__ is None - - class B(A): - i = Integer - - assert B.__orig__ is None - - B2 = B.customize() - - assert B2.__orig__ is B - assert B2.__extends__ is A - - B3 = B2.customize() - - assert B3.__orig__ is B - assert B3.__extends__ is A - - def test_noop(self): - class A(ComplexModel): - s = Unicode - - assert A.get_flat_type_info(A)['s'].Attributes.max_len == D('inf') - - def test_cust_simple(self): - # simple types are different from complex ones for __extends__ handling. - # simple types set __orig__ and __extends__ on customization. - # complex types set __orig__ but not extend. - # for complex types, __extend__ is set only on explicit inheritance - - t = Unicode(max_len=10) - - assert t.Attributes.max_len == 10 - assert t.__extends__ is Unicode - assert t.__orig__ is Unicode - - def test_cust_simple_again(self): - t = Unicode(max_len=10) - t2 = t(min_len=5) - - assert t2.Attributes.max_len == 10 - assert t2.Attributes.min_len == 5 - assert t2.__extends__ is t - assert t2.__orig__ is Unicode - - def test_cust_complex(self): - class A(ComplexModel): - s = Unicode - - A2 = A.customize( - child_attrs=dict( - s=dict( - max_len=10 - ) - ) - ) - - assert A2.get_flat_type_info(A2)['s'].Attributes.max_len == 10 - - def test_cust_base_class(self): - class A(ComplexModel): - s = Unicode - - class B(A): - i = Integer - - B2 = B.customize( - child_attrs=dict( - s=dict( - max_len=10, - ), - ), - ) - - assert B2.get_flat_type_info(B2)['s'].Attributes.max_len == 10 - - def test_cust_again_base_class(self): - class A(ComplexModel): - s = Unicode - - A2 = A.customize() - try: - class B(A2): - i = Integer - except AssertionError: - pass - else: - raise Exception("must fail") - - def test_cust_array(self): - A = Array(Unicode) - - assert A.__orig__ is Array - assert A.__extends__ is None - assert issubclass(A, Array) - - def test_cust_array_again(self): - A = Array(Unicode) - - A = A.customize(foo='bar') - - assert A.Attributes.foo == 'bar' - assert A.__orig__ is Array - assert A.__extends__ is None - assert issubclass(A, Array) - - def test_cust_array_serializer(self): - A = Array(Unicode) - - A = A.customize( - serializer_attrs=dict( - max_len=10, - ), - ) - - serializer, = A._type_info.values() - - assert serializer.Attributes.max_len == 10 - assert serializer.__orig__ is Unicode - assert issubclass(serializer, Unicode) - - def test_cust_sub_array(self): - """vanilla class is passed as base""" - class A(ComplexModel): - s = Array(Unicode) - - d = dict( - child_attrs=dict( - s=dict( - serializer_attrs=dict( - max_len=10, - ), - ), - ), - ) - - A2 = A.customize(**d) - - ser, = A2._type_info['s']._type_info.values() - assert ser.Attributes.max_len == 10 - - class B(A): - i = Integer - - B2 = B.customize(**d) - - b2_fti = B2.get_flat_type_info(B2) - ser, = b2_fti['s']._type_info.values() - - assert ser.Attributes.max_len == 10 - - def test_cust_side_effect(self): - class A(ComplexModel): - s = Unicode - i = Integer - - class B(A): - d = DateTime - - B2 = B.customize(child_attrs=dict(s=dict(max_len=10))) - assert B2.get_flat_type_info(B2)['s'].Attributes.max_len == 10 - - B3 = B2.customize(child_attrs=dict(d=dict(dt_format="%y"))) - assert B3.get_flat_type_info(B3)['s'].Attributes.max_len == 10 - - def test_cust_all(self): - class A(ComplexModel): - s = Unicode - i = Unicode - - class B(A): - d = DateTime - - B2 = B.customize(child_attrs_all=dict(max_len=10)) - assert B2.get_flat_type_info(B2)['s'].Attributes.max_len == 10 - assert B2.get_flat_type_info(B2)['i'].Attributes.max_len == 10 - - def test_cust_noexc(self): - class A(ComplexModel): - s = Unicode - i = Integer - - class B(A): - d = DateTime - - B2 = B.customize(child_attrs_noexc=dict(s=dict(max_len=10))) - assert B2.get_flat_type_info(B2)['s'].Attributes.max_len == 10 - assert B2.get_flat_type_info(B2)['s'].Attributes.exc == False - assert B2.get_flat_type_info(B2)['i'].Attributes.exc == True - - - def test_complex_type_name_clashes(self): - class TestComplexModel(ComplexModel): - attr1 = String - - TestComplexModel1 = TestComplexModel - - class TestComplexModel(ComplexModel): - attr2 = String - - TestComplexModel2 = TestComplexModel - - class TestService(Service): - @rpc(TestComplexModel1) - def test1(ctx, obj): - pass - - @rpc(TestComplexModel2) - def test2(ctx, obj): - pass - - try: - Application([TestService], 'tns') - except Exception as e: - print(e) - else: - raise Exception("must fail with: " - "ValueError: classes " - " " - "and " - " " - "have conflicting names.") - - -class TestAdditional(unittest.TestCase): - def test_time_segment(self): - data = TimeSegment.from_string("[11:12:13.123456,14:15:16.789012]") - - assert data.start_inclusive - assert data.start == datetime.time(11, 12, 13, 123456) - assert data.end == datetime.time(14, 15, 16, 789012) - assert data.end_inclusive - - def test_date_segment(self): - data = DateSegment.from_string("[2016-03-03,2016-05-07[") - - assert data.start_inclusive == True - assert data.start == datetime.date(2016, 3, 3) - assert data.end == datetime.date(2016, 5, 7) - assert data.end_inclusive == False - - def test_datetime_segment(self): - data = DateTimeSegment.from_string("]2016-03-03T10:20:30.405060," - "2016-05-07T00:01:02.030405]") - - assert data.start_inclusive == False - assert data.start == datetime.datetime(2016, 3, 3, 10, 20, 30, 405060) - assert data.end == datetime.datetime(2016, 5, 7, 0, 1, 2, 30405) - assert data.end_inclusive == True - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/model/test_enum.py b/libs_crutch/contrib/spyne/test/model/test_enum.py deleted file mode 100644 index 7d8fd4f..0000000 --- a/libs_crutch/contrib/spyne/test/model/test_enum.py +++ /dev/null @@ -1,166 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import unittest - -from pprint import pprint - -from spyne.application import Application -from spyne.const.xml import XSD -from spyne.interface.wsdl.wsdl11 import Wsdl11 -from spyne.model.complex import Array -from spyne.model.complex import ComplexModel -from spyne.protocol.xml import XmlDocument -from spyne.protocol.soap.soap11 import Soap11 - -from spyne.server.wsgi import WsgiApplication -from spyne.service import Service -from spyne.decorator import rpc - -from spyne.model.enum import Enum - -from lxml import etree - -vals = [ - 'Monday', - 'Tuesday', - 'Wednesday', - 'Thursday', - 'Friday', - 'Saturday', - 'Sunday', -] - -DaysOfWeekEnum = Enum( - 'Monday', - 'Tuesday', - 'Wednesday', - 'Thursday', - 'Friday', - 'Saturday', - 'Sunday', - type_name = 'DaysOfWeekEnum', -) - -class SomeService(Service): - @rpc(DaysOfWeekEnum, _returns=DaysOfWeekEnum) - def get_the_day(self, day): - return DaysOfWeekEnum.Sunday - - -class SomeClass(ComplexModel): - days = DaysOfWeekEnum(max_occurs=7) - - -class TestEnum(unittest.TestCase): - def setUp(self): - self.app = Application([SomeService], 'tns', - in_protocol=Soap11(), out_protocol=Soap11()) - self.app.transport = 'test' - - self.server = WsgiApplication(self.app) - self.wsdl = Wsdl11(self.app.interface) - self.wsdl.build_interface_document('prot://url') - - def test_wsdl(self): - wsdl = self.wsdl.get_interface_document() - - elt = etree.fromstring(wsdl) - simple_type = elt.xpath('//xs:simpleType', namespaces=self.app.interface.nsmap)[0] - - print((etree.tostring(elt, pretty_print=True))) - print(simple_type) - - self.assertEqual(simple_type.attrib['name'], 'DaysOfWeekEnum') - self.assertEqual(simple_type[0].tag, XSD("restriction")) - self.assertEqual([e.attrib['value'] for e in simple_type[0]], vals) - - def test_serialize(self): - mo = DaysOfWeekEnum.Monday - print((repr(mo))) - - elt = etree.Element('test') - XmlDocument().to_parent(None, DaysOfWeekEnum, mo, elt, 'test_namespace') - elt = elt[0] - ret = XmlDocument().from_element(None, DaysOfWeekEnum, elt) - - self.assertEqual(mo, ret) - - def test_serialize_complex_array(self): - days = [ - DaysOfWeekEnum.Monday, - DaysOfWeekEnum.Tuesday, - DaysOfWeekEnum.Wednesday, - DaysOfWeekEnum.Thursday, - DaysOfWeekEnum.Friday, - DaysOfWeekEnum.Saturday, - DaysOfWeekEnum.Sunday, - ] - - days_xml = [ - ('{tns}DaysOfWeekEnum', 'Monday'), - ('{tns}DaysOfWeekEnum', 'Tuesday'), - ('{tns}DaysOfWeekEnum', 'Wednesday'), - ('{tns}DaysOfWeekEnum', 'Thursday'), - ('{tns}DaysOfWeekEnum', 'Friday'), - ('{tns}DaysOfWeekEnum', 'Saturday'), - ('{tns}DaysOfWeekEnum', 'Sunday'), - ] - - DaysOfWeekEnumArray = Array(DaysOfWeekEnum) - DaysOfWeekEnumArray.__namespace__ = 'tns' - - elt = etree.Element('test') - XmlDocument().to_parent(None, DaysOfWeekEnumArray, days, - elt, 'test_namespace') - - elt = elt[0] - ret = XmlDocument().from_element(None, Array(DaysOfWeekEnum), elt) - assert days == ret - - print((etree.tostring(elt, pretty_print=True))) - - pprint(self.app.interface.nsmap) - assert days_xml == [ (e.tag, e.text) for e in - elt.xpath('//tns:DaysOfWeekEnum', namespaces=self.app.interface.nsmap)] - - def test_serialize_simple_array(self): - t = SomeClass(days=[ - DaysOfWeekEnum.Monday, - DaysOfWeekEnum.Tuesday, - DaysOfWeekEnum.Wednesday, - DaysOfWeekEnum.Thursday, - DaysOfWeekEnum.Friday, - DaysOfWeekEnum.Saturday, - DaysOfWeekEnum.Sunday, - ]) - - SomeClass.resolve_namespace(SomeClass, 'tns') - - elt = etree.Element('test') - XmlDocument().to_parent(None, SomeClass, t, elt, 'test_namespace') - elt = elt[0] - - print((etree.tostring(elt, pretty_print=True))) - - ret = XmlDocument().from_element(None, SomeClass, elt) - self.assertEqual(t.days, ret.days) - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/model/test_exception.py b/libs_crutch/contrib/spyne/test/model/test_exception.py deleted file mode 100644 index 1b436a3..0000000 --- a/libs_crutch/contrib/spyne/test/model/test_exception.py +++ /dev/null @@ -1,200 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import unittest -from spyne.test import FakeApp -from spyne.interface import Interface -from spyne.interface.wsdl import Wsdl11 -from spyne.protocol.xml import XmlDocument -from spyne.model.fault import Fault - -class FaultTests(unittest.TestCase): - def test_ctor_defaults(self): - fault = Fault() - self.assertEqual(fault.faultcode, 'Server') - self.assertEqual(fault.faultstring, 'Fault') - self.assertEqual(fault.faultactor, '') - self.assertEqual(fault.detail, None) - self.assertEqual(repr(fault), "Fault(Server: 'Fault')") - - def test_ctor_faultcode_w_senv_prefix(self): - fault = Fault(faultcode='Other') - self.assertEqual(fault.faultcode, 'Other') - self.assertEqual(repr(fault), "Fault(Other: 'Fault')") - - def test_ctor_explicit_faultstring(self): - fault = Fault(faultstring='Testing') - self.assertEqual(fault.faultstring, 'Testing') - self.assertEqual(repr(fault), "Fault(Server: 'Testing')") - - def test_to_parent_wo_detail(self): - from lxml.etree import Element - import spyne.const.xml - ns_soap_env = spyne.const.xml.NS_SOAP11_ENV - soap_env = spyne.const.xml.PREFMAP[spyne.const.xml.NS_SOAP11_ENV] - - element = Element('testing') - fault = Fault() - cls = Fault - - XmlDocument().to_parent(None, cls, fault, element, 'urn:ignored') - - (child,) = element.getchildren() - self.assertEqual(child.tag, '{%s}Fault' % ns_soap_env) - self.assertEqual(child.find('faultcode').text, '%s:Server' % soap_env) - self.assertEqual(child.find('faultstring').text, 'Fault') - self.assertEqual(child.find('faultactor').text, '') - self.assertFalse(child.findall('detail')) - - def test_to_parent_w_detail(self): - from lxml.etree import Element - element = Element('testing') - detail = Element('something') - fault = Fault(detail=detail) - cls = Fault - - XmlDocument().to_parent(None, cls, fault, element, 'urn:ignored') - - (child,) = element.getchildren() - self.assertTrue(child.find('detail').find('something') is detail) - - def test_from_xml_wo_detail(self): - from lxml.etree import Element - from lxml.etree import SubElement - from spyne.const.xml import PREFMAP, SOAP11_ENV, NS_SOAP11_ENV - - soap_env = PREFMAP[NS_SOAP11_ENV] - element = Element(SOAP11_ENV('Fault')) - - fcode = SubElement(element, 'faultcode') - fcode.text = '%s:other' % soap_env - fstr = SubElement(element, 'faultstring') - fstr.text = 'Testing' - actor = SubElement(element, 'faultactor') - actor.text = 'phreddy' - - fault = XmlDocument().from_element(None, Fault, element) - - self.assertEqual(fault.faultcode, '%s:other' % soap_env) - self.assertEqual(fault.faultstring, 'Testing') - self.assertEqual(fault.faultactor, 'phreddy') - self.assertEqual(fault.detail, None) - - def test_from_xml_w_detail(self): - from lxml.etree import Element - from lxml.etree import SubElement - from spyne.const.xml import SOAP11_ENV - - element = Element(SOAP11_ENV('Fault')) - fcode = SubElement(element, 'faultcode') - fcode.text = 'soap11env:other' - fstr = SubElement(element, 'faultstring') - fstr.text = 'Testing' - actor = SubElement(element, 'faultactor') - actor.text = 'phreddy' - detail = SubElement(element, 'detail') - - fault = XmlDocument().from_element(None, Fault, element) - - self.assertTrue(fault.detail is detail) - - def test_add_to_schema_no_extends(self): - from spyne.const.xml import XSD - - class cls(Fault): - __namespace__='ns' - @classmethod - def get_type_name_ns(self, app): - return 'testing:My' - - interface = Interface(FakeApp()) - interface.add_class(cls) - - pref = cls.get_namespace_prefix(interface) - wsdl = Wsdl11(interface) - wsdl.build_interface_document('prot://addr') - schema = wsdl.get_schema_info(pref) - - self.assertEqual(len(schema.types), 1) - c_cls = interface.classes['{ns}cls'] - c_elt = schema.types[0] - self.assertTrue(c_cls is cls) - self.assertEqual(c_elt.tag, XSD('complexType')) - self.assertEqual(c_elt.get('name'), 'cls') - - self.assertEqual(len(schema.elements), 1) - e_elt = schema.elements.values()[0] - self.assertEqual(e_elt.tag, XSD('element')) - self.assertEqual(e_elt.get('name'), 'cls') - self.assertEqual(e_elt.get('type'), 'testing:My') - self.assertEqual(len(e_elt), 0) - - def test_add_to_schema_w_extends(self): - from spyne.const.xml import XSD - - class base(Fault): - __namespace__ = 'ns' - - @classmethod - def get_type_name_ns(self, app): - return 'testing:Base' - - class cls(Fault): - __namespace__ = 'ns' - @classmethod - def get_type_name_ns(self, app): - return 'testing:My' - - interface = Interface(FakeApp()) - interface.add_class(cls) - - pref = cls.get_namespace_prefix(interface) - wsdl = Wsdl11(interface) - wsdl.build_interface_document('prot://addr') - schema = wsdl.get_schema_info(pref) - - self.assertEqual(len(schema.types), 1) - self.assertEqual(len(interface.classes), 1) - - c_cls = next(iter(interface.classes.values())) - c_elt = next(iter(schema.types.values())) - - self.assertTrue(c_cls is cls) - self.assertEqual(c_elt.tag, XSD('complexType')) - self.assertEqual(c_elt.get('name'), 'cls') - - from lxml import etree - print(etree.tostring(c_elt, pretty_print=True)) - self.assertEqual(len(c_elt), 0) - -class DummySchemaEntries: - def __init__(self, app): - self.app = app - self._complex_types = [] - self._elements = [] - - def add_complex_type(self, cls, ct): - self._complex_types.append((cls, ct)) - - def add_element(self, cls, elt): - self._elements.append((cls, elt)) - - -if __name__ == '__main__': #pragma NO COVERAGE - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/model/test_primitive.py b/libs_crutch/contrib/spyne/test/model/test_primitive.py deleted file mode 100644 index cc6c8c2..0000000 --- a/libs_crutch/contrib/spyne/test/model/test_primitive.py +++ /dev/null @@ -1,978 +0,0 @@ -#!/usr/bin/env python -# coding=utf-8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import print_function - -import re -import uuid -import datetime -import unittest -import warnings - -import pytz -import spyne - -from datetime import timedelta - -from lxml import etree -from spyne.model.primitive._base import re_match_with_span as rmws - -from spyne.util import six -from spyne.const import xml as ns - -from spyne import Null, AnyDict, Uuid, Array, ComplexModel, Date, Time, \ - Boolean, DateTime, Duration, Float, Integer, NumberLimitsWarning, Unicode, \ - String, Decimal, Integer16, ModelBase, MimeType, MimeTypeStrict, MediaType - -from spyne.protocol import ProtocolBase -from spyne.protocol.xml import XmlDocument - -ns_test = 'test_namespace' - - -class TestCast(unittest.TestCase): - pass # TODO: test Unicode(cast=str) - - -class TestPrimitive(unittest.TestCase): - def test_mime_type_family(self): - mime_attr = MimeType.Attributes - mime_strict_attr = MimeTypeStrict.Attributes - assert rmws(mime_attr, u'application/foo') - assert not rmws(mime_attr, u'application/ foo') - assert not rmws(mime_attr, u'application/') - assert rmws(mime_attr, u'foo/bar') - assert not rmws(mime_attr, u'foo/bar ') - assert not rmws(mime_strict_attr, u'foo/bar') - - media_attr = MediaType.Attributes - media_strict_attr = MediaType.Attributes - - print(media_attr.pattern) - - assert rmws(media_attr, u'text/plain; charset="utf-8"') - assert rmws(media_attr, u'text/plain; charset=utf-8') - assert rmws(media_attr, u'text/plain; charset=utf-8 ') - assert rmws(media_attr, u'text/plain; charset=utf-8') - assert rmws(media_attr, u'text/plain; charset=utf-8;') - assert rmws(media_attr, u'text/plain; charset=utf-8; ') - assert not rmws(media_attr, u'text/plain; charset=utf-8; foo') - assert not rmws(media_attr, u'text/plain; charset=utf-8; foo=') - assert rmws(media_attr, u'text/plain; charset=utf-8; foo=""') - assert rmws(media_attr, u'text/plain; charset=utf-8; foo="";') - assert rmws(media_attr, u'text/plain; charset=utf-8; foo=""; ') - assert rmws(media_attr, u'text/plain; charset=utf-8; foo=""; ') - assert not rmws(media_attr, u'text/plain;; charset=utf-8; foo=""') - assert not rmws(media_attr, u'text/plain;;; charset=utf-8; foo=""') - assert not rmws(media_attr, u'text/plain; charset=utf-8;; foo=""') - assert not rmws(media_attr, u'text/plain; charset=utf-8;;; foo=""') - assert not rmws(media_attr, u'text/plain; charset=utf-8;;; foo="";') - assert not rmws(media_attr, u'text/plain; charset=utf-8;;; foo=""; ; ') - assert not rmws(media_strict_attr, u'foo/bar;') - assert not rmws(media_strict_attr, u' applicaton/json;') - assert not rmws(media_strict_attr, u'applicaton/json;') - - assert MediaType - - - def test_getitem_cust(self): - assert Unicode[dict(max_len=2)].Attributes.max_len - - def test_ancestors(self): - class A(ComplexModel): i = Integer - class B(A): i2 = Integer - class C(B): i3 = Integer - - assert C.ancestors() == [B, A] - assert B.ancestors() == [A] - assert A.ancestors() == [] - - def test_nillable_quirks(self): - assert ModelBase.Attributes.nillable == True - - class Attributes(ModelBase.Attributes): - nillable = False - nullable = False - - assert Attributes.nillable == False - assert Attributes.nullable == False - - class Attributes(ModelBase.Attributes): - nillable = True - - assert Attributes.nillable == True - assert Attributes.nullable == True - - class Attributes(ModelBase.Attributes): - nillable = False - - assert Attributes.nillable == False - assert Attributes.nullable == False - - class Attributes(ModelBase.Attributes): - nullable = True - - assert Attributes.nillable == True - assert Attributes.nullable == True - - class Attributes(ModelBase.Attributes): - nullable = False - - assert Attributes.nillable == False - assert Attributes.nullable == False - - class Attributes(ModelBase.Attributes): - nullable = False - - class Attributes(Attributes): - pass - - assert Attributes.nullable == False - - def test_nillable_inheritance_quirks(self): - class Attributes(ModelBase.Attributes): - nullable = False - - class AttrMixin: - pass - - class NewAttributes(Attributes, AttrMixin): - pass - - assert NewAttributes.nullable is False - - class AttrMixin: - pass - - class NewAttributes(AttrMixin, Attributes): - pass - - assert NewAttributes.nullable is False - - def test_decimal(self): - assert Decimal(10, 4).Attributes.total_digits == 10 - assert Decimal(10, 4).Attributes.fraction_digits == 4 - - def test_decimal_format(self): - f = 123456 - str_format = '${0}' - element = etree.Element('test') - XmlDocument().to_parent(None, Decimal(str_format=str_format), f, - element, ns_test) - element = element[0] - - self.assertEqual(element.text, '$123456') - - def test_string(self): - s = String() - element = etree.Element('test') - XmlDocument().to_parent(None, String, 'value', element, ns_test) - element = element[0] - - self.assertEqual(element.text, 'value') - value = XmlDocument().from_element(None, String, element) - self.assertEqual(value, 'value') - - def test_datetime(self): - n = datetime.datetime.now(pytz.utc) - - element = etree.Element('test') - XmlDocument().to_parent(None, DateTime, n, element, ns_test) - element = element[0] - - self.assertEqual(element.text, n.isoformat()) - dt = XmlDocument().from_element(None, DateTime, element) - self.assertEqual(n, dt) - - def test_datetime_format(self): - n = datetime.datetime.now().replace(microsecond=0) - format = "%Y %m %d %H %M %S" - - element = etree.Element('test') - XmlDocument().to_parent(None, DateTime(dt_format=format), n, element, - ns_test) - element = element[0] - - assert element.text == datetime.datetime.strftime(n, format) - dt = XmlDocument().from_element(None, DateTime(dt_format=format), - element) - assert n == dt - - def test_datetime_unicode_format(self): - n = datetime.datetime.now().replace(microsecond=0) - format = u"%Y %m %d\u00a0%H %M %S" - - element = etree.Element('test') - XmlDocument().to_parent(None, DateTime(dt_format=format), n, - element, ns_test) - element = element[0] - - if six.PY2: - assert element.text == n.strftime(format.encode('utf8')) \ - .decode('utf8') - else: - assert element.text == n.strftime(format) - - dt = XmlDocument().from_element(None, DateTime(dt_format=format), - element) - assert n == dt - - def test_date_format(self): - t = datetime.date.today() - format = "%Y-%m-%d" - - element = etree.Element('test') - XmlDocument().to_parent(None, - Date(date_format=format), t, element, ns_test) - assert element[0].text == datetime.date.strftime(t, format) - - dt = XmlDocument().from_element(None, - Date(date_format=format), element[0]) - assert t == dt - - def test_datetime_timezone(self): - import pytz - - n = datetime.datetime.now(pytz.timezone('EST')) - element = etree.Element('test') - cls = DateTime(as_timezone=pytz.utc, timezone=False) - XmlDocument().to_parent(None, cls, n, element, ns_test) - element = element[0] - - c = n.astimezone(pytz.utc).replace(tzinfo=None) - self.assertEqual(element.text, c.isoformat()) - dt = XmlDocument().from_element(None, cls, element) - assert dt.tzinfo is not None - dt = dt.replace(tzinfo=None) - self.assertEqual(c, dt) - - def test_date_timezone(self): - elt = etree.Element('wot') - elt.text = '2013-08-09+02:00' - dt = XmlDocument().from_element(None, Date, elt) - print("ok without validation.") - dt = XmlDocument(validator='soft').from_element(None, Date, elt) - print(dt) - - def test_time(self): - n = datetime.time(1, 2, 3, 4) - - ret = ProtocolBase().to_bytes(Time, n) - self.assertEqual(ret, n.isoformat()) - - dt = ProtocolBase().from_unicode(Time, ret) - self.assertEqual(n, dt) - - def test_time_usec(self): - # python's datetime and time only accept ints between [0, 1e6[ - # if the incoming data is 999999.8 microseconds, rounding it up means - # adding 1 second to time. For many reasons, we want to avoid that. (see - # http://bugs.python.org/issue1487389) That's why 999999.8 usec is - # rounded to 999999. - - # rounding 0.1 µsec down - t = ProtocolBase().from_unicode(Time, "12:12:12.0000001") - self.assertEqual(datetime.time(12, 12, 12), t) - - # rounding 1.5 µsec up. 0.5 is rounded down by python 3 and up by - # python 2 so we test with 1.5 µsec instead. frikkin' nonsense. - t = ProtocolBase().from_unicode(Time, "12:12:12.0000015") - self.assertEqual(datetime.time(12, 12, 12, 2), t) - - # rounding 999998.8 µsec up - t = ProtocolBase().from_unicode(Time, "12:12:12.9999988") - self.assertEqual(datetime.time(12, 12, 12, 999999), t) - - # rounding 999999.1 µsec down - t = ProtocolBase().from_unicode(Time, "12:12:12.9999991") - self.assertEqual(datetime.time(12, 12, 12, 999999), t) - - # rounding 999999.8 µsec down, not up. - t = ProtocolBase().from_unicode(Time, "12:12:12.9999998") - self.assertEqual(datetime.time(12, 12, 12, 999999), t) - - def test_date(self): - n = datetime.date(2011, 12, 13) - - ret = ProtocolBase().to_unicode(Date, n) - self.assertEqual(ret, n.isoformat()) - - dt = ProtocolBase().from_unicode(Date, ret) - self.assertEqual(n, dt) - - def test_utcdatetime(self): - datestring = '2007-05-15T13:40:44Z' - e = etree.Element('test') - e.text = datestring - - dt = XmlDocument().from_element(None, DateTime, e) - - self.assertEqual(dt.year, 2007) - self.assertEqual(dt.month, 5) - self.assertEqual(dt.day, 15) - - datestring = '2007-05-15T13:40:44.003Z' - e = etree.Element('test') - e.text = datestring - - dt = XmlDocument().from_element(None, DateTime, e) - - self.assertEqual(dt.year, 2007) - self.assertEqual(dt.month, 5) - self.assertEqual(dt.day, 15) - - def test_date_exclusive_boundaries(self): - test_model = Date.customize(gt=datetime.date(2016, 1, 1), - lt=datetime.date(2016, 2, 1)) - self.assertFalse( - test_model.validate_native(test_model, datetime.date(2016, 1, 1))) - self.assertFalse( - test_model.validate_native(test_model, datetime.date(2016, 2, 1))) - - def test_date_inclusive_boundaries(self): - test_model = Date.customize(ge=datetime.date(2016, 1, 1), - le=datetime.date(2016, 2, 1)) - self.assertTrue( - test_model.validate_native(test_model, datetime.date(2016, 1, 1))) - self.assertTrue( - test_model.validate_native(test_model, datetime.date(2016, 2, 1))) - - def test_datetime_exclusive_boundaries(self): - test_model = DateTime.customize( - gt=datetime.datetime(2016, 1, 1, 12, 00) - .replace(tzinfo=spyne.LOCAL_TZ), - lt=datetime.datetime(2016, 2, 1, 12, 00) - .replace(tzinfo=spyne.LOCAL_TZ), - ) - self.assertFalse(test_model.validate_native(test_model, - datetime.datetime(2016, 1, 1, 12, 00))) - self.assertFalse(test_model.validate_native(test_model, - datetime.datetime(2016, 2, 1, 12, 00))) - - def test_datetime_inclusive_boundaries(self): - test_model = DateTime.customize( - ge=datetime.datetime(2016, 1, 1, 12, 00) - .replace(tzinfo=spyne.LOCAL_TZ), - le=datetime.datetime(2016, 2, 1, 12, 00) - .replace(tzinfo=spyne.LOCAL_TZ) - ) - - self.assertTrue(test_model.validate_native(test_model, - datetime.datetime(2016, 1, 1, 12, 00))) - self.assertTrue(test_model.validate_native(test_model, - datetime.datetime(2016, 2, 1, 12, 00))) - - def test_time_exclusive_boundaries(self): - test_model = Time.customize(gt=datetime.time(12, 00), - lt=datetime.time(13, 00)) - - self.assertFalse( - test_model.validate_native(test_model, datetime.time(12, 00))) - self.assertFalse( - test_model.validate_native(test_model, datetime.time(13, 00))) - - def test_time_inclusive_boundaries(self): - test_model = Time.customize(ge=datetime.time(12, 00), - le=datetime.time(13, 00)) - - self.assertTrue( - test_model.validate_native(test_model, datetime.time(12, 00))) - self.assertTrue( - test_model.validate_native(test_model, datetime.time(13, 00))) - - def test_datetime_extreme_boundary(self): - self.assertTrue( - DateTime.validate_native(DateTime, datetime.datetime.min)) - self.assertTrue( - DateTime.validate_native(DateTime, datetime.datetime.max)) - - def test_time_extreme_boundary(self): - self.assertTrue(Time.validate_native(Time, datetime.time(0, 0, 0, 0))) - self.assertTrue( - Time.validate_native(Time, datetime.time(23, 59, 59, 999999))) - - def test_date_extreme_boundary(self): - self.assertTrue(Date.validate_native(Date, datetime.date.min)) - self.assertTrue(Date.validate_native(Date, datetime.date.max)) - - def test_integer(self): - i = 12 - integer = Integer() - - element = etree.Element('test') - XmlDocument().to_parent(None, Integer, i, element, ns_test) - element = element[0] - - self.assertEqual(element.text, '12') - value = XmlDocument().from_element(None, integer, element) - self.assertEqual(value, i) - - def test_integer_limits(self): - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") - - integer = Integer16(ge=-32768) - - assert len(w) == 0 - - integer = Integer16(ge=-32769) - assert len(w) == 1 - assert issubclass(w[-1].category, NumberLimitsWarning) - assert "smaller than min_bound" in str(w[-1].message) - - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") - - integer = Integer16(le=32767) - - assert len(w) == 0 - - integer = Integer16(le=32768) - assert len(w) == 1 - assert issubclass(w[-1].category, NumberLimitsWarning) - assert "greater than max_bound" in str(w[-1].message) - - try: - Integer16(ge=32768) - except ValueError: - pass - else: - raise Exception("must fail") - - try: - Integer16(lt=-32768) - except ValueError: - pass - else: - raise Exception("must fail") - - def test_large_integer(self): - i = 128375873458473 - integer = Integer() - - element = etree.Element('test') - XmlDocument().to_parent(None, Integer, i, element, ns_test) - element = element[0] - - self.assertEqual(element.text, '128375873458473') - value = XmlDocument().from_element(None, integer, element) - self.assertEqual(value, i) - - def test_float(self): - f = 1.22255645 - - element = etree.Element('test') - XmlDocument().to_parent(None, Float, f, element, ns_test) - element = element[0] - - self.assertEqual(element.text, repr(f)) - - f2 = XmlDocument().from_element(None, Float, element) - self.assertEqual(f2, f) - - def test_array(self): - type = Array(String) - type.resolve_namespace(type, "zbank") - - values = ['a', 'b', 'c', 'd', 'e', 'f'] - - element = etree.Element('test') - XmlDocument().to_parent(None, type, values, element, ns_test) - element = element[0] - - self.assertEqual(len(values), len(element.getchildren())) - - values2 = XmlDocument().from_element(None, type, element) - self.assertEqual(values[3], values2[3]) - - def test_array_empty(self): - type = Array(String) - type.resolve_namespace(type, "zbank") - - values = [] - - element = etree.Element('test') - XmlDocument().to_parent(None, type, values, element, ns_test) - element = element[0] - - self.assertEqual(len(values), len(element.getchildren())) - - values2 = XmlDocument().from_element(None, type, element) - self.assertEqual(len(values2), 0) - - def test_unicode(self): - s = u'\x34\x55\x65\x34' - self.assertEqual(4, len(s)) - element = etree.Element('test') - XmlDocument().to_parent(None, String, s, element, 'test_ns') - element = element[0] - value = XmlDocument().from_element(None, String, element) - self.assertEqual(value, s) - - def test_unicode_pattern_mult_cust(self): - assert Unicode(pattern='a').Attributes.pattern == 'a' - assert Unicode(pattern='a')(5).Attributes.pattern == 'a' - - def test_unicode_upattern(self): - patt = r'[\w .-]+' - attr = Unicode(unicode_pattern=patt).Attributes - assert attr.pattern == patt - assert attr._pattern_re.flags & re.UNICODE - assert attr._pattern_re.match(u"Ğ Ğ ç .-") - assert attr._pattern_re.match(u"\t") is None - - def test_unicode_nullable_mult_cust_false(self): - assert Unicode(nullable=False).Attributes.nullable == False - assert Unicode(nullable=False)(5).Attributes.nullable == False - - def test_unicode_nullable_mult_cust_true(self): - assert Unicode(nullable=True).Attributes.nullable == True - assert Unicode(nullable=True)(5).Attributes.nullable == True - - def test_null(self): - element = etree.Element('test') - XmlDocument().to_parent(None, Null, None, element, ns_test) - print(etree.tostring(element)) - - element = element[0] - self.assertTrue(bool(element.attrib.get(ns.XSI('nil')))) - value = XmlDocument().from_element(None, Null, element) - self.assertEqual(None, value) - - def test_point(self): - from spyne.model.primitive.spatial import _get_point_pattern - - a = re.compile(_get_point_pattern(2)) - assert a.match('POINT (10 40)') is not None - assert a.match('POINT(10 40)') is not None - - assert a.match('POINT(10.0 40)') is not None - assert a.match('POINT(1.310e4 40)') is not None - - def test_multipoint(self): - from spyne.model.primitive.spatial import _get_multipoint_pattern - - a = re.compile(_get_multipoint_pattern(2)) - assert a.match('MULTIPOINT (10 40, 40 30, 20 20, 30 10)') is not None - # FIXME: - # assert a.match('MULTIPOINT ((10 40), (40 30), (20 20), (30 10))') is not None - - def test_linestring(self): - from spyne.model.primitive.spatial import _get_linestring_pattern - - a = re.compile(_get_linestring_pattern(2)) - assert a.match('LINESTRING (30 10, 10 30, 40 40)') is not None - - def test_multilinestring(self): - from spyne.model.primitive.spatial import _get_multilinestring_pattern - - a = re.compile(_get_multilinestring_pattern(2)) - assert a.match('''MULTILINESTRING ((10 10, 20 20, 10 40), - (40 40, 30 30, 40 20, 30 10))''') is not None - - def test_polygon(self): - from spyne.model.primitive.spatial import _get_polygon_pattern - - a = re.compile(_get_polygon_pattern(2)) - assert a.match( - 'POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))') is not None - - def test_multipolygon(self): - from spyne.model.primitive.spatial import _get_multipolygon_pattern - - a = re.compile(_get_multipolygon_pattern(2)) - assert a.match('''MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)), - ((15 5, 40 10, 10 20, 5 10, 15 5)))''') is not None - assert a.match('''MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)), - ((20 35, 45 20, 30 5, 10 10, 10 30, 20 35), - (30 20, 20 25, 20 15, 30 20)))''') is not None - - def test_boolean(self): - b = etree.Element('test') - XmlDocument().to_parent(None, Boolean, True, b, ns_test) - b = b[0] - self.assertEqual('true', b.text) - - b = etree.Element('test') - XmlDocument().to_parent(None, Boolean, 0, b, ns_test) - b = b[0] - self.assertEqual('false', b.text) - - b = etree.Element('test') - XmlDocument().to_parent(None, Boolean, 1, b, ns_test) - b = b[0] - self.assertEqual('true', b.text) - - b = XmlDocument().from_element(None, Boolean, b) - self.assertEqual(b, True) - - b = etree.Element('test') - XmlDocument().to_parent(None, Boolean, False, b, ns_test) - b = b[0] - self.assertEqual('false', b.text) - - b = XmlDocument().from_element(None, Boolean, b) - self.assertEqual(b, False) - - b = etree.Element('test') - XmlDocument().to_parent(None, Boolean, None, b, ns_test) - b = b[0] - self.assertEqual('true', b.get(ns.XSI('nil'))) - - b = XmlDocument().from_element(None, Boolean, b) - self.assertEqual(b, None) - - def test_new_type(self): - """Customized primitives go into namespace based on module name.""" - custom_type = Unicode(pattern='123') - self.assertEqual(custom_type.get_namespace(), custom_type.__module__) - - def test_default_nullable(self): - """Test if default nullable changes nullable attribute.""" - try: - self.assertTrue(Unicode.Attributes.nullable) - orig_default = Unicode.Attributes.NULLABLE_DEFAULT - Unicode.Attributes.NULLABLE_DEFAULT = False - self.assertFalse(Unicode.Attributes.nullable) - self.assertFalse(Unicode.Attributes.nillable) - finally: - Unicode.Attributes.NULLABLE_DEFAULT = orig_default - self.assertEqual(Unicode.Attributes.nullable, orig_default) - - def test_simple_type_explicit_customization(self): - assert Unicode(max_len=5).__extends__ is not None - assert Unicode.customize(max_len=5).__extends__ is not None - - def test_anydict_customization(self): - from spyne.model import json - assert isinstance( - AnyDict.customize(store_as='json').Attributes.store_as, json) - - def test_uuid_serialize(self): - value = uuid.UUID('12345678123456781234567812345678') - - assert ProtocolBase().to_unicode(Uuid, value) \ - == '12345678-1234-5678-1234-567812345678' - assert ProtocolBase().to_unicode(Uuid(serialize_as='hex'), value) \ - == '12345678123456781234567812345678' - assert ProtocolBase().to_unicode(Uuid(serialize_as='urn'), value) \ - == 'urn:uuid:12345678-1234-5678-1234-567812345678' - assert ProtocolBase().to_unicode(Uuid(serialize_as='bytes'), value) \ - == b'\x124Vx\x124Vx\x124Vx\x124Vx' - assert ProtocolBase().to_unicode(Uuid(serialize_as='bytes_le'), value) \ - == b'xV4\x124\x12xV\x124Vx\x124Vx' - assert ProtocolBase().to_unicode(Uuid(serialize_as='fields'), value) \ - == (305419896, 4660, 22136, 18, 52, 95073701484152) - assert ProtocolBase().to_unicode(Uuid(serialize_as='int'), value) \ - == 24197857161011715162171839636988778104 - - def test_uuid_deserialize(self): - value = uuid.UUID('12345678123456781234567812345678') - - assert ProtocolBase().from_unicode(Uuid, - '12345678-1234-5678-1234-567812345678') == value - assert ProtocolBase().from_unicode(Uuid(serialize_as='hex'), - '12345678123456781234567812345678') == value - assert ProtocolBase().from_unicode(Uuid(serialize_as='urn'), - 'urn:uuid:12345678-1234-5678-1234-567812345678') == value - assert ProtocolBase().from_bytes(Uuid(serialize_as='bytes'), - b'\x124Vx\x124Vx\x124Vx\x124Vx') == value - assert ProtocolBase().from_bytes(Uuid(serialize_as='bytes_le'), - b'xV4\x124\x12xV\x124Vx\x124Vx') == value - assert ProtocolBase().from_unicode(Uuid(serialize_as='fields'), - (305419896, 4660, 22136, 18, 52, 95073701484152)) == value - assert ProtocolBase().from_unicode(Uuid(serialize_as='int'), - 24197857161011715162171839636988778104) == value - - def test_uuid_validate(self): - assert Uuid.validate_string(Uuid, - '12345678-1234-5678-1234-567812345678') - assert Uuid.validate_native(Uuid, - uuid.UUID('12345678-1234-5678-1234-567812345678')) - - def test_datetime_serialize_as(self): - i = 1234567890123456 - v = datetime.datetime.fromtimestamp(i / 1e6) - - assert ProtocolBase().to_unicode( - DateTime(serialize_as='sec'), v) == i//1e6 - assert ProtocolBase().to_unicode( - DateTime(serialize_as='sec_float'), v) == i/1e6 - assert ProtocolBase().to_unicode( - DateTime(serialize_as='msec'), v) == i//1e3 - assert ProtocolBase().to_unicode( - DateTime(serialize_as='msec_float'), v) == i/1e3 - assert ProtocolBase().to_unicode( - DateTime(serialize_as='usec'), v) == i - - def test_datetime_deserialize(self): - i = 1234567890123456 - v = datetime.datetime.fromtimestamp(i / 1e6) - - assert ProtocolBase().from_unicode( - DateTime(serialize_as='sec'), i//1e6) == \ - datetime.datetime.fromtimestamp(i//1e6) - assert ProtocolBase().from_unicode( - DateTime(serialize_as='sec_float'), i/1e6) == v - - assert ProtocolBase().from_unicode( - DateTime(serialize_as='msec'), i//1e3) == \ - datetime.datetime.fromtimestamp(i/1e3//1000) - assert ProtocolBase().from_unicode( - DateTime(serialize_as='msec_float'), i/1e3) == v - - assert ProtocolBase().from_unicode( - DateTime(serialize_as='usec'), i) == v - - def test_datetime_ancient(self): - t = DateTime(dt_format="%Y-%m-%d %H:%M:%S") # to trigger strftime - v = datetime.datetime(1881, 1, 1) - vs = '1881-01-01 00:00:00' - - dt = ProtocolBase().from_unicode(t, vs) - self.assertEqual(v, dt) - - dt = ProtocolBase().to_unicode(t, v) - self.assertEqual(vs, dt) - - def test_custom_strftime(self): - s = ProtocolBase.strftime(datetime.date(1800, 9, 23), - "%Y has the same days as 1980 and 2008") - if s != "1800 has the same days as 1980 and 2008": - raise AssertionError(s) - - print("Testing all day names from 0001/01/01 until 2000/08/01") - # Get the weekdays. Can't hard code them; they could be - # localized. - days = [] - for i in range(1, 10): - days.append(datetime.date(2000, 1, i).strftime("%A")) - nextday = {} - for i in range(8): - nextday[days[i]] = days[i + 1] - - startdate = datetime.date(1, 1, 1) - enddate = datetime.date(2000, 8, 1) - prevday = ProtocolBase.strftime(startdate, "%A") - one_day = datetime.timedelta(1) - - testdate = startdate + one_day - while testdate < enddate: - if (testdate.day == 1 and testdate.month == 1 and - (testdate.year % 100 == 0)): - print("Testing century", testdate.year) - day = ProtocolBase.strftime(testdate, "%A") - if nextday[prevday] != day: - raise AssertionError(str(testdate)) - prevday = day - testdate = testdate + one_day - - def test_datetime_usec(self): - # see the comments on time test for why the rounding here is weird - - # rounding 0.1 µsec down - dt = ProtocolBase().from_unicode(DateTime, - "2015-01-01 12:12:12.0000001") - self.assertEqual(datetime.datetime(2015, 1, 1, 12, 12, 12), dt) - - # rounding 1.5 µsec up. 0.5 is rounded down by python 3 and up by - # python 2 so we test with 1.5 µsec instead. frikkin' nonsense. - dt = ProtocolBase().from_unicode(DateTime, - "2015-01-01 12:12:12.0000015") - self.assertEqual(datetime.datetime(2015, 1, 1, 12, 12, 12, 2), dt) - - # rounding 999998.8 µsec up - dt = ProtocolBase().from_unicode(DateTime, - "2015-01-01 12:12:12.9999988") - self.assertEqual(datetime.datetime(2015, 1, 1, 12, 12, 12, 999999), dt) - - # rounding 999999.1 µsec down - dt = ProtocolBase().from_unicode(DateTime, - "2015-01-01 12:12:12.9999991") - self.assertEqual(datetime.datetime(2015, 1, 1, 12, 12, 12, 999999), dt) - - # rounding 999999.8 µsec down, not up. - dt = ProtocolBase().from_unicode(DateTime, - "2015-01-01 12:12:12.9999998") - self.assertEqual(datetime.datetime(2015, 1, 1, 12, 12, 12, 999999), dt) - - -### Duration Data Type -## http://www.w3schools.com/schema/schema_dtypes_date.asp -# Duration Data type -# The time interval is specified in the following form "PnYnMnDTnHnMnS" where: -# P indicates the period (required) -# nY indicates the number of years -# nM indicates the number of months -# nD indicates the number of days -# T indicates the start of a time section (*required* if you are going to -# specify hours, minutes, seconds or microseconds) -# nH indicates the number of hours -# nM indicates the number of minutes -# nS indicates the number of seconds - -class SomeBlob(ComplexModel): - __namespace__ = 'myns' - howlong = Duration() - - -class TestDurationPrimitive(unittest.TestCase): - def test_onehour_oneminute_onesecond(self): - answer = 'PT1H1M1S' - gg = SomeBlob() - gg.howlong = timedelta(hours=1, minutes=1, seconds=1) - - element = etree.Element('test') - XmlDocument().to_parent(None, SomeBlob, gg, element, gg.get_namespace()) - element = element[0] - - print(gg.howlong) - print(etree.tostring(element, pretty_print=True)) - assert element[0].text == answer - - data = element.find('{%s}howlong' % gg.get_namespace()).text - self.assertEqual(data, answer) - s1 = XmlDocument().from_element(None, SomeBlob, element) - assert s1.howlong.total_seconds() == gg.howlong.total_seconds() - - def test_4suite(self): - # borrowed from 4Suite - tests_seconds = [ - (0, u'PT0S'), - (1, u'PT1S'), - (59, u'PT59S'), - (60, u'PT1M'), - (3599, u'PT59M59S'), - (3600, u'PT1H'), - (86399, u'PT23H59M59S'), - (86400, u'P1D'), - (86400 * 60, u'P60D'), - (86400 * 400, u'P400D') - ] - - for secs, answer in tests_seconds: - gg = SomeBlob() - gg.howlong = timedelta(seconds=secs) - - element = etree.Element('test') - XmlDocument()\ - .to_parent(None, SomeBlob, gg, element, gg.get_namespace()) - element = element[0] - - print(gg.howlong) - print(etree.tostring(element, pretty_print=True)) - assert element[0].text == answer - - data = element.find('{%s}howlong' % gg.get_namespace()).text - self.assertEqual(data, answer) - s1 = XmlDocument().from_element(None, SomeBlob, element) - assert s1.howlong.total_seconds() == secs - - for secs, answer in tests_seconds: - if secs > 0: - secs *= -1 - answer = '-' + answer - gg = SomeBlob() - gg.howlong = timedelta(seconds=secs) - - element = etree.Element('test') - XmlDocument()\ - .to_parent(None, SomeBlob, gg, element, gg.get_namespace()) - element = element[0] - - print(gg.howlong) - print(etree.tostring(element, pretty_print=True)) - assert element[0].text == answer - - data = element.find('{%s}howlong' % gg.get_namespace()).text - self.assertEqual(data, answer) - s1 = XmlDocument().from_element(None, SomeBlob, element) - assert s1.howlong.total_seconds() == secs - - def test_duration_positive_seconds_only(self): - answer = 'PT35S' - gg = SomeBlob() - gg.howlong = timedelta(seconds=35) - - element = etree.Element('test') - XmlDocument().to_parent(None, SomeBlob, gg, element, gg.get_namespace()) - element = element[0] - - print(gg.howlong) - print(etree.tostring(element, pretty_print=True)) - assert element[0].text == answer - - data = element.find('{%s}howlong' % gg.get_namespace()).text - self.assertEqual(data, answer) - s1 = XmlDocument().from_element(None, SomeBlob, element) - assert s1.howlong.total_seconds() == gg.howlong.total_seconds() - - def test_duration_positive_minutes_and_seconds_only(self): - answer = 'PT5M35S' - gg = SomeBlob() - gg.howlong = timedelta(minutes=5, seconds=35) - - element = etree.Element('test') - XmlDocument().to_parent(None, SomeBlob, gg, element, gg.get_namespace()) - element = element[0] - - print(gg.howlong) - print(etree.tostring(element, pretty_print=True)) - assert element[0].text == answer - - data = element.find('{%s}howlong' % gg.get_namespace()).text - self.assertEqual(data, answer) - s1 = XmlDocument().from_element(None, SomeBlob, element) - assert s1.howlong.total_seconds() == gg.howlong.total_seconds() - - def test_duration_positive_milliseconds_only(self): - answer = 'PT0.666000S' - gg = SomeBlob() - gg.howlong = timedelta(milliseconds=666) - - element = etree.Element('test') - XmlDocument().to_parent(None, SomeBlob, gg, element, gg.get_namespace()) - element = element[0] - - print(gg.howlong) - print(etree.tostring(element, pretty_print=True)) - assert element[0].text == answer - - data = element.find('{%s}howlong' % gg.get_namespace()).text - self.assertEqual(data, answer) - s1 = XmlDocument().from_element(None, SomeBlob, element) - assert s1.howlong.total_seconds() == gg.howlong.total_seconds() - - def test_duration_xml_duration(self): - dur = datetime.timedelta(days=5 + 30 + 365, hours=1, minutes=1, - seconds=12, microseconds=8e5) - - str1 = 'P400DT3672.8S' - str2 = 'P1Y1M5DT1H1M12.8S' - - self.assertEqual(dur, ProtocolBase().from_unicode(Duration, str1)) - self.assertEqual(dur, ProtocolBase().from_unicode(Duration, str2)) - - self.assertEqual(dur, ProtocolBase().from_unicode(Duration, - ProtocolBase().to_unicode(Duration, dur))) - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/multipython/__init__.py b/libs_crutch/contrib/spyne/test/multipython/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/contrib/spyne/test/multipython/model/__init__.py b/libs_crutch/contrib/spyne/test/multipython/model/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/contrib/spyne/test/multipython/model/test_complex.py b/libs_crutch/contrib/spyne/test/multipython/model/test_complex.py deleted file mode 100644 index 9aa8d1c..0000000 --- a/libs_crutch/contrib/spyne/test/multipython/model/test_complex.py +++ /dev/null @@ -1,179 +0,0 @@ -# coding: utf-8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -"""Complex model tests runnable on different Python implementations.""" - -import unittest - -from spyne.model.complex import (ComplexModel, ComplexModelMeta, - ComplexModelBase, Array) -from spyne.model.primitive import Unicode, Integer, String -from spyne.util.six import add_metaclass - - -class DeclareOrder_declare(ComplexModel.customize(declare_order='declared')): - field3 = Integer - field1 = Integer - field2 = Integer - - -class MyComplexModelMeta(ComplexModelMeta): - """Custom complex model metaclass.""" - - def __new__(mcs, name, bases, attrs): - attrs['new_field'] = Unicode - attrs['field1'] = Unicode - new_cls = super(MyComplexModelMeta, mcs).__new__(mcs, name, bases, - attrs) - return new_cls - - -@add_metaclass(MyComplexModelMeta) -class MyComplexModel(ComplexModelBase): - """Custom complex model class.""" - class Attributes(ComplexModelBase.Attributes): - declare_order = 'declared' - - -class MyModelWithDeclaredOrder(MyComplexModel): - """Test model for complex model with custom metaclass.""" - class Attributes(MyComplexModel.Attributes): - declare_order = 'declared' - - field3 = Integer - field1 = Integer - field2 = Integer - - -class TestComplexModel(unittest.TestCase): - def test_add_field(self): - class C(ComplexModel): - u = Unicode - C.append_field('i', Integer) - assert C._type_info['i'] is Integer - - def test_insert_field(self): - class C(ComplexModel): - u = Unicode - C.insert_field(0, 'i', Integer) - assert C._type_info.keys() == ['i', 'u'] - - def test_variants(self): - class C(ComplexModel): - u = Unicode - CC = C.customize(child_attrs=dict(u=dict(min_len=5))) - print(dict(C.Attributes._variants.items())) - r, = C.Attributes._variants - assert r is CC - assert CC.Attributes.parent_variant is C - C.append_field('i', Integer) - assert C._type_info['i'] is Integer - assert CC._type_info['i'] is Integer - - def test_child_customization(self): - class C(ComplexModel): - u = Unicode - CC = C.customize(child_attrs=dict(u=dict(min_len=5))) - assert CC._type_info['u'].Attributes.min_len == 5 - assert C._type_info['u'].Attributes.min_len != 5 - - def test_array_customization(self): - CC = Array(Unicode).customize( - serializer_attrs=dict(min_len=5), punks='roll', - ) - assert CC.Attributes.punks == 'roll' - assert CC._type_info[0].Attributes.min_len == 5 - - def test_array_customization_complex(self): - class C(ComplexModel): - u = Unicode - - CC = Array(C).customize( - punks='roll', - serializer_attrs=dict(bidik=True) - ) - assert CC.Attributes.punks == 'roll' - assert CC._type_info[0].Attributes.bidik == True - - def test_delayed_child_customization_append(self): - class C(ComplexModel): - u = Unicode - CC = C.customize(child_attrs=dict(i=dict(ge=5))) - CC.append_field('i', Integer) - assert CC._type_info['i'].Attributes.ge == 5 - assert not 'i' in C._type_info - - def test_delayed_child_customization_insert(self): - class C(ComplexModel): - u = Unicode - CC = C.customize(child_attrs=dict(i=dict(ge=5))) - CC.insert_field(1, 'i', Integer) - assert CC._type_info['i'].Attributes.ge == 5 - assert not 'i' in C._type_info - - def test_array_member_name(self): - print(Array(String, member_name="punk")._type_info) - assert 'punk' in Array(String, member_name="punk")._type_info - - def test_customize(self): - class Base(ComplexModel): - class Attributes(ComplexModel.Attributes): - prop1 = 3 - prop2 = 6 - - Base2 = Base.customize(prop1=4) - - self.assertNotEquals(Base.Attributes.prop1, Base2.Attributes.prop1) - self.assertEqual(Base.Attributes.prop2, Base2.Attributes.prop2) - - class Derived(Base): - class Attributes(Base.Attributes): - prop3 = 9 - prop4 = 12 - - Derived2 = Derived.customize(prop1=5, prop3=12) - - self.assertEqual(Base.Attributes.prop1, 3) - self.assertEqual(Base2.Attributes.prop1, 4) - - self.assertEqual(Derived.Attributes.prop1, 3) - self.assertEqual(Derived2.Attributes.prop1, 5) - - self.assertNotEquals(Derived.Attributes.prop3, Derived2.Attributes.prop3) - self.assertEqual(Derived.Attributes.prop4, Derived2.Attributes.prop4) - - Derived3 = Derived.customize(prop3=12) - Base.prop1 = 4 - - # changes made to bases propagate, unless overridden - self.assertEqual(Derived.Attributes.prop1, Base.Attributes.prop1) - self.assertNotEquals(Derived2.Attributes.prop1, Base.Attributes.prop1) - self.assertEqual(Derived3.Attributes.prop1, Base.Attributes.prop1) - - def test_declare_order(self): - self.assertEqual(["field3", "field1", "field2"], - list(DeclareOrder_declare._type_info)) - self.assertEqual(["field3", "field1", "field2", "new_field"], - list(MyModelWithDeclaredOrder._type_info)) - - -if __name__ == '__main__': - import sys - sys.exit(unittest.main()) diff --git a/libs_crutch/contrib/spyne/test/protocol/__init__.py b/libs_crutch/contrib/spyne/test/protocol/__init__.py deleted file mode 100644 index 7250806..0000000 --- a/libs_crutch/contrib/spyne/test/protocol/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""This is Spyne's test package. - -You are not supposed to import test package from production code because tests -fiddle with global state of Spyne classes. -""" diff --git a/libs_crutch/contrib/spyne/test/protocol/_test_dictdoc.py b/libs_crutch/contrib/spyne/test/protocol/_test_dictdoc.py deleted file mode 100644 index 9dd8352..0000000 --- a/libs_crutch/contrib/spyne/test/protocol/_test_dictdoc.py +++ /dev/null @@ -1,1381 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import unicode_literals - -import logging -logger = logging.getLogger(__name__) - -import unittest - -import uuid -import pytz -import decimal -from spyne.util import six -from spyne.util.dictdoc import get_object_as_dict - -if not six.PY2: - long = int - -from datetime import datetime -from datetime import date -from datetime import time -from datetime import timedelta - -import lxml.etree -import lxml.html - -from lxml.builder import E - -from spyne import MethodContext -from spyne.service import Service -from spyne.server import ServerBase -from spyne.application import Application -from spyne.decorator import srpc, rpc -from spyne.error import ValidationError -from spyne.model.binary import binary_encoding_handlers, File -from spyne.model.complex import ComplexModel -from spyne.model.complex import Iterable -from spyne.model.fault import Fault -from spyne.protocol import ProtocolBase -from spyne.model.binary import ByteArray -from spyne.model.primitive import Decimal -from spyne.model.primitive import Integer -from spyne.model.primitive import String -from spyne.model.primitive import DateTime -from spyne.model.primitive import Mandatory -from spyne.model.primitive import AnyXml -from spyne.model.primitive import AnyHtml -from spyne.model.primitive import AnyDict -from spyne.model.primitive import Unicode -from spyne.model.primitive import AnyUri -from spyne.model.primitive import ImageUri -from spyne.model.primitive import Double -from spyne.model.primitive import Integer8 -from spyne.model.primitive import Time -from spyne.model.primitive import Date -from spyne.model.primitive import Duration -from spyne.model.primitive import Boolean -from spyne.model.primitive import Uuid -from spyne.model.primitive import Point -from spyne.model.primitive import Line -from spyne.model.primitive import Polygon -from spyne.model.primitive import MultiPoint -from spyne.model.primitive import MultiLine -from spyne.model.primitive import MultiPolygon - - -def _unbyte(d): - if d is None: - return - - for k, v in list(d.items()): - if isinstance(k, bytes): - del d[k] - d[k.decode('utf8')] = v - - if isinstance(v, dict): - _unbyte(v) - - for k, v in d.items(): - if isinstance(v, (list, tuple)): - l = [] - for sub in v: - if isinstance(sub, dict): - l.append(_unbyte(sub)) - - elif isinstance(sub, bytes): - l.append(sub.decode("utf8")) - - else: - l.append(sub) - - d[k] = tuple(l) - - elif isinstance(v, bytes): - try: - d[k] = v.decode('utf8') - except UnicodeDecodeError: - d[k] = v - - return d - - -def TDry(serializer, _DictDocumentChild, dumps_kwargs=None): - if not dumps_kwargs: - dumps_kwargs = {} - - def _dry_me(services, d, ignore_wrappers=False, complex_as=dict, - just_ctx=False, just_in_object=False, validator=None, - polymorphic=False): - - app = Application(services, 'tns', - in_protocol=_DictDocumentChild( - ignore_wrappers=ignore_wrappers, complex_as=complex_as, - polymorphic=polymorphic, validator=validator, - ), - out_protocol=_DictDocumentChild( - ignore_wrappers=ignore_wrappers, complex_as=complex_as, - polymorphic=polymorphic), - ) - - server = ServerBase(app) - initial_ctx = MethodContext(server, MethodContext.SERVER) - in_string = serializer.dumps(d, **dumps_kwargs) - if not isinstance(in_string, bytes): - in_string = in_string.encode('utf8') - initial_ctx.in_string = [in_string] - - ctx, = server.generate_contexts(initial_ctx, in_string_charset='utf8') - if not just_ctx: - server.get_in_object(ctx) - if not just_in_object: - server.get_out_object(ctx) - server.get_out_string(ctx) - - return ctx - return _dry_me - -def TDictDocumentTest(serializer, _DictDocumentChild, dumps_kwargs=None, - loads_kwargs=None, convert_dict=None): - if not dumps_kwargs: - dumps_kwargs = {} - if not loads_kwargs: - loads_kwargs = {} - _dry_me = TDry(serializer, _DictDocumentChild, dumps_kwargs) - - if convert_dict is None: - convert_dict = lambda v: v - - class Test(unittest.TestCase): - def dumps(self, o): - print("using", dumps_kwargs, "to dump", o) - return serializer.dumps(o, **dumps_kwargs) - - def loads(self, o): - return _unbyte(serializer.loads(o, **loads_kwargs)) - - def test_complex_with_only_primitive_fields(self): - class SomeComplexModel(ComplexModel): - i = Integer - s = Unicode - - class SomeService(Service): - @srpc(SomeComplexModel, _returns=SomeComplexModel) - def some_call(scm): - return SomeComplexModel(i=5, s='5x') - - ctx = _dry_me([SomeService], {"some_call":[]}) - - s = self.loads(b''.join(ctx.out_string)) - - s = s["some_callResponse"]["some_callResult"]["SomeComplexModel"] - assert s["i"] == 5 - assert s["s"] in ("5x", b"5x") - - def test_complex(self): - class CM(ComplexModel): - i = Integer - s = Unicode - - class CCM(ComplexModel): - c = CM - i = Integer - s = Unicode - - class SomeService(Service): - @srpc(CCM, _returns=CCM) - def some_call(ccm): - return CCM(c=ccm.c, i=ccm.i, s=ccm.s) - - ctx = _dry_me([SomeService], {"some_call": - {"ccm": {"CCM":{ - "c":{"CM":{"i":3, "s": "3x"}}, - "i":4, - "s": "4x", - }}} - }) - - ret = self.loads(b''.join(ctx.out_string)) - print(ret) - - d = ret['some_callResponse']['some_callResult']['CCM'] - assert d['i'] == 4 - assert d['s'] in ('4x', b'4x') - assert d['c']['CM']['i'] == 3 - assert d['c']['CM']['s'] in ('3x', b'3x') - - def test_multiple_list(self): - class SomeService(Service): - @srpc(Unicode(max_occurs=decimal.Decimal('inf')), - _returns=Unicode(max_occurs=decimal.Decimal('inf'))) - def some_call(s): - return s - - ctx = _dry_me([SomeService], {"some_call":[["a","b"]]}) - - data = b''.join(ctx.out_string) - print(data) - - assert self.loads(data) == \ - {"some_callResponse": {"some_callResult": ("a", "b")}} - - def test_multiple_dict(self): - class SomeService(Service): - @srpc(Unicode(max_occurs=decimal.Decimal('inf')), - _returns=Unicode(max_occurs=decimal.Decimal('inf'))) - def some_call(s): - return s - - ctx = _dry_me([SomeService], {"some_call":{"s":["a","b"]}}) - - assert self.loads(b''.join(ctx.out_string)) == \ - {"some_callResponse": {"some_callResult": ("a", "b")}} - - def test_multiple_dict_array(self): - class SomeService(Service): - @srpc(Iterable(Unicode), _returns=Iterable(Unicode)) - def some_call(s): - return s - - ctx = _dry_me([SomeService], {"some_call":{"s":["a","b"]}}) - - assert self.loads(b''.join(ctx.out_string)) == \ - {"some_callResponse": {"some_callResult": ("a", "b")}} - - def test_multiple_dict_complex_array(self): - class CM(ComplexModel): - i = Integer - s = Unicode - - class CCM(ComplexModel): - c = CM - i = Integer - s = Unicode - - class ECM(CCM): - d = DateTime - - class SomeService(Service): - @srpc(Iterable(ECM), _returns=Iterable(ECM)) - def some_call(ecm): - return ecm - - ctx = _dry_me([SomeService], { - "some_call": {"ecm": [{"ECM": { - "c": {"CM":{"i":3, "s": "3x"}}, - "i":4, - "s": "4x", - "d": "2011-12-13T14:15:16Z" - }}] - }}) - - print(ctx.in_object) - - ret = self.loads(b''.join(ctx.out_string)) - print(ret) - assert ret["some_callResponse"]['some_callResult'] - assert ret["some_callResponse"]['some_callResult'][0] - assert ret["some_callResponse"]['some_callResult'][0]["ECM"]["c"] - assert ret["some_callResponse"]['some_callResult'][0]["ECM"]["c"]["CM"]["i"] == 3 - assert ret["some_callResponse"]['some_callResult'][0]["ECM"]["c"]["CM"]["s"] in ("3x", b"3x") - assert ret["some_callResponse"]['some_callResult'][0]["ECM"]["i"] == 4 - assert ret["some_callResponse"]['some_callResult'][0]["ECM"]["s"] in ("4x", b"4x") - assert ret["some_callResponse"]['some_callResult'][0]["ECM"]["d"] == "2011-12-13T14:15:16+00:00" - - def test_invalid_request(self): - class SomeService(Service): - @srpc(Integer, String, DateTime) - def yay(i,s,d): - print(i,s,d) - - ctx = _dry_me([SomeService], {"some_call": {"yay": []}}, - just_in_object=True) - - print(ctx.in_error) - assert ctx.in_error.faultcode == 'Client.ResourceNotFound' - - def test_invalid_string(self): - class SomeService(Service): - @srpc(Integer, String, DateTime) - def yay(i,s,d): - print(i, s, d) - - ctx = _dry_me([SomeService], {"yay": {"s": 1}}, validator='soft', - just_in_object=True) - - assert ctx.in_error.faultcode == 'Client.ValidationError' - - def test_invalid_number(self): - class SomeService(Service): - @srpc(Integer, String, DateTime) - def yay(i,s,d): - print(i,s,d) - - ctx = _dry_me([SomeService], {"yay": ["s", "B"]}, validator='soft', - just_in_object=True) - - assert ctx.in_error.faultcode == 'Client.ValidationError' - - def test_missing_value(self): - class SomeService(Service): - @srpc(Integer, Unicode, Mandatory.DateTime) - def yay(i, s, d): - print(i, s, d) - - ctx = _dry_me([SomeService], {"yay": [1, "B"]}, validator='soft', - just_in_object=True) - - print(ctx.in_error.faultstring) - assert ctx.in_error.faultcode == 'Client.ValidationError' - assert ctx.in_error.faultstring.endswith("at least 1 times.") - - def test_invalid_datetime(self): - class SomeService(Service): - @srpc(Integer, String, Mandatory.DateTime) - def yay(i,s,d): - print(i,s,d) - - ctx = _dry_me([SomeService],{"yay": {"d":"a2011"}},validator='soft', - just_in_object=True) - - assert ctx.in_error.faultcode == 'Client.ValidationError' - - def test_fault_to_dict(self): - class SomeService(Service): - @srpc(_returns=String) - def some_call(): - raise Fault() - - _dry_me([SomeService], {"some_call":[]}) - - def test_prune_none_and_optional(self): - class SomeObject(ComplexModel): - i = Integer - s = String(min_occurs=1) - - class SomeService(Service): - @srpc(_returns=SomeObject) - def some_call(): - return SomeObject() - - ctx = _dry_me([SomeService], {"some_call":[]}) - - ret = self.loads(b''.join(ctx.out_string)) - - assert ret == {"some_callResponse": {'some_callResult': - {'SomeObject': {'s': None}}}} - - def test_any_xml(self): - d = lxml.etree.tostring(E('{ns1}x', E('{ns2}Y', "some data")), - encoding='unicode') - - class SomeService(Service): - @srpc(AnyXml, _returns=AnyXml) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == lxml.etree._Element - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_any_html(self): - d = lxml.html.tostring(E('div', E('span', "something")), - encoding='unicode') - - class SomeService(Service): - @srpc(AnyHtml, _returns=AnyHtml) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == lxml.html.HtmlElement - return p - - ctx = _dry_me([SomeService], {"some_call": [d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - - print(s) - print(d) - assert s == d - - def test_any_dict(self): - d = {'helo': 213, 'data': {'nested': [12, 0.3]}} - - class SomeService(Service): - @srpc(AnyDict, _returns=AnyDict) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == dict - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = b''.join(ctx.out_string) - d = self.dumps({"some_callResponse": {"some_callResult": d}}) - - print(s) - print(d) - assert self.loads(s) == self.loads(d) - - def test_unicode(self): - d = u'some string' - - class SomeService(Service): - @srpc(Unicode, _returns=Unicode) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == six.text_type - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_any_uri(self): - d = 'http://example.com/?asd=b12&df=aa#tag' - - class SomeService(Service): - @srpc(AnyUri, _returns=AnyUri) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, six.string_types) - return p - - ctx = _dry_me([SomeService], {"some_call": [d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_image_uri(self): - d = 'http://example.com/funny.gif' - - class SomeService(Service): - @srpc(ImageUri, _returns=ImageUri) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, six.string_types) - return p - - ctx = _dry_me([SomeService], {"some_call": [d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_decimal(self): - d = decimal.Decimal('1e100') - if _DictDocumentChild._decimal_as_string: - d = str(d) - - class SomeService(Service): - @srpc(Decimal, _returns=Decimal) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == decimal.Decimal - return p - - ctx = _dry_me([SomeService], {"some_call": [d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_double(self): - d = 12.3467 - - class SomeService(Service): - @srpc(Double, _returns=Double) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == float - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_integer(self): - d = 5 - - class SomeService(Service): - @srpc(Integer, _returns=Integer) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == int - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_integer_way_small(self): - d = -1<<1000 - if _DictDocumentChild._huge_numbers_as_string: - d = str(d) - - class SomeService(Service): - @srpc(Integer, _returns=Integer) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == long - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - - print(s) - print(d) - assert s == d - - def test_integer_way_big(self): - d = 1<<1000 - if _DictDocumentChild._huge_numbers_as_string: - d = str(d) - - class SomeService(Service): - @srpc(Integer, _returns=Integer) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == long - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_time(self): - d = time(10, 20, 30).isoformat() - - class SomeService(Service): - @srpc(Time, _returns=Time) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == time - assert p.isoformat() == d - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_date(self): - vdt = datetime(2010, 9, 8) - d = vdt.date().isoformat() - - class SomeService(Service): - @srpc(Date, _returns=Date) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == date - assert p.isoformat() == d - return p - - @srpc(_returns=Date) - def some_call_dt(): - return vdt - - ctx = _dry_me([SomeService], {"some_call": [d]}) - s = self.loads(b''.join(ctx.out_string)) - rd = {"some_callResponse": {"some_callResult": d}} - print(s) - print(rd) - assert s == rd - - ctx = _dry_me([SomeService], {"some_call_dt": []}) - s = self.loads(b''.join(ctx.out_string)) - rd = {"some_call_dtResponse": {"some_call_dtResult": d}} - print(s) - print(rd) - assert s == rd - - def test_datetime(self): - d = datetime(2010, 9, 8, 7, 6, 5).isoformat() - - class SomeService(Service): - @srpc(DateTime, _returns=DateTime(timezone=False)) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == datetime - assert p.replace(tzinfo=None).isoformat() == d - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}, validator='soft') - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_datetime_tz(self): - d = datetime(2010, 9, 8, 7, 6, 5, tzinfo=pytz.utc).isoformat() - - class SomeService(Service): - @srpc(DateTime, _returns=DateTime(ge=datetime(2010,1,1,tzinfo=pytz.utc))) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == datetime - assert p.isoformat() == d - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}, validator='soft') - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_duration(self): - d = ProtocolBase().to_unicode(Duration, timedelta(0, 45)) - - class SomeService(Service): - @srpc(Duration, _returns=Duration) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == timedelta - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_boolean(self): - d = True - - class SomeService(Service): - @srpc(Boolean, _returns=Boolean) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == bool - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_uuid(self): - d = '7d2a6330-eb64-4900-8a10-38ebef415e9d' - - class SomeService(Service): - @srpc(Uuid, _returns=Uuid) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == uuid.UUID - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_point2d(self): - d = 'POINT(1 2)' - - class SomeService(Service): - @srpc(Point, _returns=Point) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, six.string_types) - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_point3d(self): - d = 'POINT(1 2 3)' - - class SomeService(Service): - @srpc(Point, _returns=Point) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, six.string_types) - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_line2d(self): - d = 'LINESTRING(1 2, 3 4)' - - class SomeService(Service): - @srpc(Line, _returns=Line) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, six.string_types) - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_line3d(self): - d = 'LINESTRING(1 2 3, 4 5 6)' - - class SomeService(Service): - @srpc(Line, _returns=Line) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, six.string_types) - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_polygon2d(self): - d = 'POLYGON((1 1, 1 2, 2 2, 2 1, 1 1))' - - class SomeService(Service): - @srpc(Polygon(2), _returns=Polygon(2)) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, six.string_types) - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_polygon3d(self): - d = 'POLYGON((1 1 0, 1 2 0, 2 2 0, 2 1 0, 1 1 0))' - - class SomeService(Service): - @srpc(Polygon(3), _returns=Polygon(3)) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, six.string_types) - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_multipoint2d(self): - d = 'MULTIPOINT ((10 40), (40 30), (20 20), (30 10))' - - class SomeService(Service): - @srpc(MultiPoint(2), _returns=MultiPoint(2)) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, six.string_types) - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_multipoint3d(self): - d = 'MULTIPOINT (10 40 30, 40 30 10,)' - - class SomeService(Service): - @srpc(MultiPoint(3), _returns=MultiPoint(3)) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, six.string_types) - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_multiline2d(self): - d = 'MULTILINESTRING ((10 10, 20 20, 10 40), (40 40, 30 30, 40 20, 30 10))' - - class SomeService(Service): - @srpc(MultiLine(2), _returns=MultiLine(2)) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, six.string_types) - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_multiline3d(self): - d = 'MULTILINESTRING ((10 10, 20 20, 10 40), (40 40, 30 30, 40 20, 30 10))' - - class SomeService(Service): - @srpc(MultiLine(3), _returns=MultiLine(3)) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, six.string_types) - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_multipolygon2d(self): - d = 'MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))' - - class SomeService(Service): - @srpc(MultiPolygon(2), _returns=MultiPolygon(2)) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, six.string_types) - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_multipolygon3d(self): - d = 'MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),' \ - '((20 35, 45 20, 30 5, 10 10, 10 30, 20 35),' \ - '(30 20, 20 25, 20 15, 30 20)))' - - class SomeService(Service): - @srpc(MultiPolygon(3), _returns=MultiPolygon(3)) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, six.string_types) - return p - - ctx = _dry_me([SomeService], {"some_call":[d]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": d}} - print(s) - print(d) - assert s == d - - def test_generator(self): - class SomeService(Service): - @srpc(_returns=Iterable(Integer)) - def some_call(): - return iter(range(1000)) - - ctx = _dry_me([SomeService], {"some_call":[]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": tuple(range(1000))}} - print(s) - print(d) - assert s == d - - def test_bytearray(self): - dbe = _DictDocumentChild.default_binary_encoding - beh = binary_encoding_handlers[dbe] - - data = bytes(bytearray(range(0xff))) - encoded_data = beh([data]) - if _DictDocumentChild.text_based: - encoded_data = encoded_data.decode('latin1') - - class SomeService(Service): - @srpc(ByteArray, _returns=ByteArray) - def some_call(ba): - print(ba) - print(type(ba)) - assert isinstance(ba, tuple) - assert ba == (data,) - return ba - - ctx = _dry_me([SomeService], {"some_call": [encoded_data]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": encoded_data}} - - print(repr(s)) - print(repr(d)) - print(repr(encoded_data)) - assert s == d - - def test_file_data(self): - # the only difference with the bytearray test is/are the types - # inside @srpc - dbe = _DictDocumentChild.default_binary_encoding - beh = binary_encoding_handlers[dbe] - - data = bytes(bytearray(range(0xff))) - encoded_data = beh([data]) - if _DictDocumentChild.text_based: - encoded_data = encoded_data.decode('latin1') - - class SomeService(Service): - @srpc(File, _returns=File) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, File.Value) - assert p.data == (data,) - return p.data - - # we put the encoded data in the list of arguments. - ctx = _dry_me([SomeService], {"some_call": [encoded_data]}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": encoded_data}} - - print(s) - print(d) - print(repr(encoded_data)) - assert s == d - - def test_file_value(self): - dbe = _DictDocumentChild.default_binary_encoding - beh = binary_encoding_handlers[dbe] - - # Prepare data - v = File.Value( - name='some_file.bin', - type='application/octet-stream', - ) - file_data = bytes(bytearray(range(0xff))) - v.data = (file_data,) - beh([file_data]) - if _DictDocumentChild.text_based: - test_data = beh(v.data).decode('latin1') - else: - test_data = beh(v.data) - - print(repr(v.data)) - - class SomeService(Service): - @srpc(File, _returns=File) - def some_call(p): - print(p) - print(type(p)) - assert isinstance(p, File.Value) - assert p.data == (file_data,) - assert p.type == v.type - assert p.name == v.name - return p - - d = get_object_as_dict(v, File, protocol=_DictDocumentChild, - ignore_wrappers=False) - ctx = _dry_me([SomeService], {"some_call": {'p': d}}) - s = b''.join(ctx.out_string) - d = self.dumps({"some_callResponse": {"some_callResult": { - 'name': v.name, - 'type': v.type, - 'data': test_data, - }}}) - - print(self.loads(s)) - print(self.loads(d)) - print(v) - assert self.loads(s) == self.loads(d) - - def test_validation_frequency(self): - class SomeService(Service): - @srpc(ByteArray(min_occurs=1), _returns=ByteArray) - def some_call(p): - pass - - try: - _dry_me([SomeService], {"some_call": []}, validator='soft') - except ValidationError: - pass - else: - raise Exception("must raise ValidationError") - - def test_validation_nullable(self): - class SomeService(Service): - @srpc(ByteArray(nullable=False), _returns=ByteArray) - def some_call(p): - pass - - try: - _dry_me([SomeService], {"some_call": [None]}, - validator='soft') - except ValidationError: - pass - - else: - raise Exception("must raise ValidationError") - - def test_validation_string_pattern(self): - class SomeService(Service): - @srpc(Uuid) - def some_call(p): - pass - - try: - _dry_me([SomeService], {"some_call": ["duduk"]}, - validator='soft') - except ValidationError as e: - print(e) - pass - - else: - raise Exception("must raise ValidationError") - - def test_validation_integer_range(self): - class SomeService(Service): - @srpc(Integer(ge=0, le=5)) - def some_call(p): - pass - - try: - _dry_me([SomeService], {"some_call": [10]}, - validator='soft') - except ValidationError: - pass - - else: - raise Exception("must raise ValidationError") - - def test_validation_integer_type(self): - class SomeService(Service): - @srpc(Integer8) - def some_call(p): - pass - - try: - _dry_me([SomeService], {"some_call": [-129]}, - validator='soft') - except ValidationError: - pass - - else: - raise Exception("must raise ValidationError") - - def test_validation_integer_type_2(self): - class SomeService(Service): - @srpc(Integer8) - def some_call(p): - pass - - try: - _dry_me([SomeService], {"some_call": [1.2]}, validator='soft') - - except ValidationError: - pass - - else: - raise Exception("must raise ValidationError") - - def test_not_wrapped(self): - class SomeInnerClass(ComplexModel): - d = date - dt = datetime - - class SomeClass(ComplexModel): - a = int - b = Unicode - c = SomeInnerClass.customize(not_wrapped=True) - - class SomeService(Service): - @srpc(SomeClass.customize(not_wrapped=True), - _returns=SomeClass.customize(not_wrapped=True)) - def some_call(p): - assert p.a == 1 - assert p.b == 's' - assert p.c.d == date(2018, 11, 22) - return p - - inner = {"a": 1, "b": "s", "c": {"d": '2018-11-22'}} - doc = {"some_call": [inner]} - ctx = _dry_me([SomeService], doc, validator='soft') - - print(ctx.out_document) - - d = convert_dict({"some_callResponse": {"some_callResult": inner}}) - self.assertEquals(ctx.out_document[0], d) - - def test_validation_freq_parent(self): - class C(ComplexModel): - i = Integer(min_occurs=1) - s = Unicode - - class SomeService(Service): - @srpc(C) - def some_call(p): - pass - - try: - # must raise validation error for missing i - _dry_me([SomeService], {"some_call": {'p': {'C': {'s': 'a'}}}}, - validator='soft') - except ValidationError as e: - logger.exception(e) - pass - except BaseException as e: - logger.exception(e) - pass - else: - raise Exception("must raise ValidationError") - - # must not raise anything for missing p because C has min_occurs=0 - _dry_me([SomeService], {"some_call": {}}, validator='soft') - - def test_inheritance(self): - class P(ComplexModel): - identifier = Uuid - signature = Unicode - - class C(P): - foo = Unicode - bar = Uuid - - class SomeService(Service): - @rpc(_returns=C) - def some_call(ctx): - result = C() - result.identifier = uuid.UUID(int=0) - result.signature = 'yyyyyyyyyyy' - result.foo = 'zzzzzz' - result.bar = uuid.UUID(int=1) - return result - - ctx = _dry_me([SomeService], {"some_call": []}) - - s = self.loads(b''.join(ctx.out_string)) - d = {"some_callResponse": {"some_callResult": {"C": { - 'identifier': '00000000-0000-0000-0000-000000000000', - 'bar': '00000000-0000-0000-0000-000000000001', - 'foo': 'zzzzzz', - 'signature': 'yyyyyyyyyyy' - }}}} - - assert s == d - - def test_exclude(self): - class C(ComplexModel): - s1 = Unicode(exc=True) - s2 = Unicode - - class SomeService(Service): - @srpc(C, _returns=C) - def some_call(sc): - assert sc.s1 is None, "sc={}".format(sc) - assert sc.s2 == "s2" - return C(s1="s1", s2="s2") - - doc = [{"C": {"s1": "s1","s2": "s2"}}] - ctx = _dry_me([SomeService], {"some_call": doc}) - - self.assertEquals(ctx.out_document[0], convert_dict( - {'some_callResponse': {'some_callResult': {'C': {'s2': 's2'}}}}) - ) - - def test_polymorphic_deserialization(self): - class P(ComplexModel): - sig = Unicode - - class C(P): - foo = Unicode - - class D(P): - bar = Integer - - class SomeService(Service): - @rpc(P, _returns=Unicode) - def typeof(ctx, p): - return type(p).__name__ - - ctx = _dry_me([SomeService], - {"typeof": [{'C':{'sig':'a', 'foo': 'f'}}]}, - polymorphic=True) - - s = self.loads(b''.join(ctx.out_string)) - d = {"typeofResponse": {"typeofResult": 'C'}} - - print(s) - print(d) - assert s == d - - ctx = _dry_me([SomeService], - {"typeof": [{'D':{'sig':'b', 'bar': 5}}]}, - polymorphic=True) - - s = self.loads(b''.join(ctx.out_string)) - d = {"typeofResponse": {"typeofResult": 'D'}} - - print(s) - print(d) - assert s == d - - def test_default(self): - class SomeComplexModel(ComplexModel): - _type_info = [ - ('a', Unicode), - ('b', Unicode(default='default')), - ] - - class SomeService(Service): - @srpc(SomeComplexModel) - def some_method(s): - pass - - ctx = _dry_me([SomeService], - {"some_method": [{"s": {"a": "x", "b": None}}]}, - polymorphic=True) - - assert ctx.in_object.s.b == None - assert ctx.in_error is None - - ctx = _dry_me([SomeService], {"some_method": {"s": {"a": "x"}}}, - polymorphic=True) - - assert ctx.in_object.s.b == 'default' - assert ctx.in_error is None - - def test_nillable_default(self): - class SomeComplexModel(ComplexModel): - _type_info = [ - ('a', Unicode), - ('b', Unicode(min_occurs=1, default='default', nillable=True)), - ] - - class SomeService(Service): - @srpc(SomeComplexModel) - def some_method(s): - pass - - ctx = _dry_me([SomeService], - {"some_method": [{"s": {"a": "x", "b": None}}]}, - polymorphic=True, validator='soft') - - assert ctx.in_object.s.b == None - assert ctx.in_error is None - - ctx = _dry_me([SomeService], {"some_method": {"s": {"a": "x"}}}, - polymorphic=True) - - assert ctx.in_object.s.b == 'default' - assert ctx.in_error is None - - return Test diff --git a/libs_crutch/contrib/spyne/test/protocol/test_cloth.py b/libs_crutch/contrib/spyne/test/protocol/test_cloth.py deleted file mode 100644 index 965d852..0000000 --- a/libs_crutch/contrib/spyne/test/protocol/test_cloth.py +++ /dev/null @@ -1,570 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import print_function - -import logging -logger = logging.getLogger(__name__) - -import unittest - -from lxml import etree, html -from lxml.builder import E - -from spyne import ComplexModel, XmlAttribute, Unicode, Array, Integer, \ - SelfReference, XmlData -from spyne.protocol.cloth import XmlCloth -from spyne.test import FakeContext -from spyne.util.six import BytesIO - - -class TestModelCloth(unittest.TestCase): - def test_root_html(self): - class SomeObject(ComplexModel): - class Attributes(ComplexModel.Attributes): - html_cloth = html.fromstring("") - - assert SomeObject.Attributes._html_cloth is None - assert SomeObject.Attributes._html_root_cloth is not None - - def test_html(self): - class SomeObject(ComplexModel): - class Attributes(ComplexModel.Attributes): - html_cloth = html.fromstring('') - - assert SomeObject.Attributes._html_cloth is not None - assert SomeObject.Attributes._html_root_cloth is None - - def test_root_xml(self): - class SomeObject(ComplexModel): - class Attributes(ComplexModel.Attributes): - xml_cloth = etree.fromstring('') - - assert SomeObject.Attributes._xml_cloth is None - assert SomeObject.Attributes._xml_root_cloth is not None - - def test_xml(self): - class SomeObject(ComplexModel): - class Attributes(ComplexModel.Attributes): - xml_cloth = html.fromstring('') - - assert SomeObject.Attributes._xml_cloth is not None - assert SomeObject.Attributes._xml_root_cloth is None - - -class TestXmlClothToParent(unittest.TestCase): - def setUp(self): - self.ctx = FakeContext() - self.stream = BytesIO() - logging.basicConfig(level=logging.DEBUG) - - def _run(self, inst, cls=None): - if cls is None: - cls = inst.__class__ - - with etree.xmlfile(self.stream) as parent: - XmlCloth().subserialize(self.ctx, cls, inst, parent, - name=cls.__name__) - - elt = etree.fromstring(self.stream.getvalue()) - - print(etree.tostring(elt, pretty_print=True)) - return elt - - def test_simple(self): - v = 'punk.' - elt = self._run(v, Unicode) - - assert elt.text == v - - def test_complex_primitive(self): - class SomeObject(ComplexModel): - s = Unicode - - v = 'punk.' - elt = self._run(SomeObject(s=v)) - - assert elt[0].text == v - - def test_complex_inheritance(self): - class A(ComplexModel): - i = Integer - - class B(A): - s = Unicode - - i = 42 - s = 'punk.' - elt = self._run(B(i=i, s=s)) - - # order is important - assert len(elt) == 2 - assert elt[0].text == str(i) - assert elt[1].text == s - - -### !!! WARNING !!! ### !!! WARNING !!! ### !!! WARNING !!! ### !!! WARNING !!! -# -# This test uses spyne_id and spyne_tagbag instead of spyne-id and spyne-tagbag -# for ease of testing. The attributes used here are different from what you are -# going to see in the real-world uses of this functionality. -# You have been warned !!! -# -### !!! WARNING !!! ### !!! WARNING !!! ### !!! WARNING !!! ### !!! WARNING !!! -class TestXmlCloth(unittest.TestCase): - def setUp(self): - self.ctx = FakeContext() - self.stream = BytesIO() - logging.basicConfig(level=logging.DEBUG) - - def _run(self, inst, spid=None, cloth=None): - cls = inst.__class__ - if cloth is None: - assert spid is not None - cloth = etree.fromstring("""""" % spid) - else: - assert spid is None - - with etree.xmlfile(self.stream) as parent: - XmlCloth(cloth=cloth).set_identifier_prefix('spyne_') \ - .subserialize(self.ctx, cls, inst, parent) - - elt = etree.fromstring(self.stream.getvalue()) - - print(etree.tostring(elt, pretty_print=True)) - return elt - - def test_simple_value(self): - class SomeObject(ComplexModel): - s = Unicode - - v = 'punk.' - elt = self._run(SomeObject(s=v), spid='s') - - assert elt[0].text == v - - def test_simple_empty(self): - class SomeObject(ComplexModel): - s = Unicode - - elt = self._run(SomeObject(), spid='s') - - assert len(elt) == 0 - - # FIXME: just fix it - def _test_simple_empty_nonoptional(self): - class SomeObject(ComplexModel): - s = Unicode(min_occurs=1) - - elt = self._run(SomeObject(), spid='s') - - assert elt[0].text is None - - # FIXME: just fix it - def _test_simple_empty_nonoptional_clear(self): - class SomeObject(ComplexModel): - s = Unicode(min_occurs=1) - - cloth = etree.fromstring("""oi punk!""") - - elt = self._run(SomeObject(), cloth=cloth) - - assert elt[0].text is None - - def test_xml_data_tag(self): - class SomeObject(ComplexModel): - d = XmlData(Unicode) - - cloth = etree.fromstring('') - - elt = self._run(SomeObject(d='data'), cloth=cloth) - - assert elt.text == 'data' - - def test_xml_data_attr(self): - class SomeObject(ComplexModel): - d = XmlData(Unicode) - - cloth = etree.fromstring('') - - elt = self._run(SomeObject(d='data'), cloth=cloth) - - assert elt.text == 'data' - - def test_xml_data_attr_undesignated(self): - class SomeObject(ComplexModel): - d = Unicode - - cloth = etree.fromstring('') - - elt = self._run(SomeObject(d='data'), cloth=cloth) - - assert elt.text == 'data' - - def test_simple_value_xmlattribute(self): - v = 'punk.' - - class SomeObject(ComplexModel): - s = XmlAttribute(Unicode(min_occurs=1)) - - cloth = etree.fromstring("""""") - elt = self._run(SomeObject(s=v), cloth=cloth) - - assert elt.attrib['s'] == v - - def test_simple_value_xmlattribute_subname(self): - v = 'punk.' - - class SomeObject(ComplexModel): - s = XmlAttribute(Unicode(min_occurs=1, sub_name='foo')) - - cloth = etree.fromstring("""""") - elt = self._run(SomeObject(s=v), cloth=cloth) - - assert elt.attrib['foo'] == v - - def test_simple_value_xmlattribute_non_immediate(self): - v = 'punk.' - - class SomeObject(ComplexModel): - s = XmlAttribute(Unicode(min_occurs=1, sub_name='foo')) - - cloth = etree.fromstring("""""") - elt = self._run(SomeObject(s=v), cloth=cloth) - - assert elt.attrib['foo'] == v - assert elt[0].attrib['foo'] == v - - def test_simple_value_xmlattribute_non_immediate_non_designated(self): - v = 'punk.' - - class SomeObject(ComplexModel): - s = Unicode(min_occurs=1, sub_name='foo') - - cloth = etree.fromstring("""""") - elt = self._run(SomeObject(s=v), cloth=cloth) - - assert not 'foo' in elt.attrib - assert elt[0].attrib['foo'] == v - - def test_non_tagbag(self): - cloth = E.a( - E.b( - E.c( - E.d( - spyne_id="i", - ), - spyne_id="c", - ), - spyne_id="i", - ), - spyne_tagbag='', - ) - - class C2(ComplexModel): - i = Integer - - class C1(ComplexModel): - i = Integer - c = C2 - - elt = self._run(C1(i=1, c=C2(i=2)), cloth=cloth) - assert elt.xpath('//b/text()') == ['1'] - # no order guarantee is given - assert set(elt.xpath('//d/text()')) == set(['1', '2']) - - def test_array(self): - v = range(3) - - class SomeObject(ComplexModel): - s = Array(Integer) - - cloth = E.a( - E.b( - E.c(spyne_id="integer"), - spyne_id="s", - ) - ) - - elt = self._run(SomeObject(s=v), cloth=cloth) - - assert elt.xpath('//c/text()') == [str(i) for i in v] - - def test_array_empty(self): - class SomeObject(ComplexModel): - s = Array(Integer) - - elt_str = '' - cloth = etree.fromstring(elt_str) - - elt = self._run(SomeObject(), cloth=cloth) - - assert elt.xpath('//c') == [] - - # FIXME: just fix it - def _test_array_empty_nonoptional(self): - class SomeObject(ComplexModel): - s = Array(Integer(min_occurs=1)) - - elt_str = '' - cloth = etree.fromstring(elt_str) - - elt = self._run(SomeObject(), cloth=cloth) - - assert elt.xpath('//c') == [cloth[0][0]] - - def test_simple_two_tags(self): - class SomeObject(ComplexModel): - s = Unicode - i = Integer - - v = SomeObject(s='s', i=5) - - cloth = E.a( - E.b1(), - E.b2( - E.c1(spyne_id="s"), - E.c2(), - ), - E.e( - E.g1(), - E.g2(spyne_id="i"), - E.g3(), - ), - ) - - elt = self._run(v, cloth=cloth) - - print(etree.tostring(elt, pretty_print=True)) - assert elt[0].tag == 'b1' - assert elt[1].tag == 'b2' - assert elt[1][0].tag == 'c1' - assert elt[1][0].text == 's' - assert elt[1][1].tag == 'c2' - assert elt[2].tag == 'e' - assert elt[2][0].tag == 'g1' - assert elt[2][1].tag == 'g2' - assert elt[2][1].text == '5' - assert elt[2][2].tag == 'g3' - - def test_sibling_order(self): - class SomeObject(ComplexModel): - s = Unicode - - v = SomeObject(s='s') - - cloth = E.a( - E.b1(), - E.b2( - E.c0(), - E.c1(), - E.c2(spyne_id="s"), - E.c3(), - E.c4(), - ), - ) - - elt = self._run(v, cloth=cloth) - print(etree.tostring(elt, pretty_print=True)) - assert elt[0].tag == 'b1' - assert elt[1].tag == 'b2' - assert elt[1][0].tag == 'c0' - assert elt[1][1].tag == 'c1' - assert elt[1][2].tag == 'c2' - assert elt[1][2].text == 's' - assert elt[1][3].tag == 'c3' - assert elt[1][4].tag == 'c4' - - def test_parent_text(self): - class SomeObject(ComplexModel): - s = Unicode - - v = SomeObject(s='s') - - cloth = E.a( - "text 0", - E.b1(spyne_id="s"), - ) - - print(etree.tostring(cloth, pretty_print=True)) - elt = self._run(v, cloth=cloth) - print(etree.tostring(elt, pretty_print=True)) - - assert elt.tag == 'a' - assert elt.text == 'text 0' - - assert elt[0].tag == 'b1' - assert elt[0].text == 's' - - def test_anc_text(self): - class SomeObject(ComplexModel): - s = Unicode - - v = SomeObject(s='s') - - cloth = E.a( - E.b1( - "text 1", - E.c1(spyne_id="s"), - ) - ) - - print(etree.tostring(cloth, pretty_print=True)) - elt = self._run(v, cloth=cloth) - print(etree.tostring(elt, pretty_print=True)) - - assert elt[0].tag == 'b1' - assert elt[0].text == 'text 1' - assert elt[0][0].tag == 'c1' - assert elt[0][0].text == 's' - - def test_prevsibl_tail(self): - class SomeObject(ComplexModel): - s = Unicode - - v = SomeObject(s='s') - - cloth = E.a( - E.b1( - E.c1(), - "text 2", - E.c2(spyne_id="s"), - ) - ) - - print(etree.tostring(cloth, pretty_print=True)) - elt = self._run(v, cloth=cloth) - print(etree.tostring(elt, pretty_print=True)) - - assert elt[0].tag == 'b1' - assert elt[0][0].tag == 'c1' - assert elt[0][0].tail == 'text 2' - assert elt[0][1].text == 's' - - def test_sibling_tail_close(self): - class SomeObject(ComplexModel): - s = Unicode - - v = SomeObject(s='s') - - cloth = E.a( - E.b0(spyne_id="s"), - "text 3", - ) - - print(etree.tostring(cloth, pretty_print=True)) - elt = self._run(v, cloth=cloth) - print(etree.tostring(elt, pretty_print=True)) - - assert elt[0].tag == 'b0' - assert elt[0].text == 's' - assert elt[0].tail == 'text 3' - - def test_sibling_tail_close_sibling(self): - class SomeObject(ComplexModel): - s = Unicode - i = Integer - - v = SomeObject(s='s', i=5) - - cloth = E.a( - E.b0(spyne_id="s"), - "text 3", - E.b1(spyne_id="i"), - ) - - print(etree.tostring(cloth, pretty_print=True)) - elt = self._run(v, cloth=cloth) - print(etree.tostring(elt, pretty_print=True)) - - assert elt[0].tag == 'b0' - assert elt[0].text == 's' - assert elt[0].tail == 'text 3' - - def test_sibling_tail_close_anc(self): - class SomeObject(ComplexModel): - s = Unicode - i = Integer - - v = SomeObject(s='s', i=5) - - cloth = E.a( - E.b0(), - "text 0", - E.b1( - E.c0(spyne_id="s"), - "text 1", - E.c1(), - "text 2", - ), - "text 3", - E.b2( - E.c1(spyne_id="i"), - "text 4", - ) - ) - - print(etree.tostring(cloth, pretty_print=True)) - elt = self._run(v, cloth=cloth) - print(etree.tostring(elt, pretty_print=True)) - - assert elt.xpath('/a/b1/c0')[0].tail == 'text 1' - assert elt.xpath('/a/b1/c1')[0].tail == 'text 2' - assert elt.xpath('/a/b2/c1')[0].tail == 'text 4' - - def test_nested_conflicts(self): - class SomeObject(ComplexModel): - s = Unicode - i = Integer - c = SelfReference - - v = SomeObject(s='x', i=1, c=SomeObject(s='y', i=2)) - - cloth = E.a( - E.b0(), - "text 0", - E.b1( - E.c0(spyne_id="s"), - "text 1", - E.c1( - E.d0(spyne_id="s"), - E.d1(spyne_id="i"), - spyne_id="c", - ), - "text 2", - ), - "text 3", - E.b2( - E.c2(spyne_id="i"), - "text 4", - ) - ) - - print(etree.tostring(cloth, pretty_print=True)) - elt = self._run(v, cloth=cloth) - print(etree.tostring(elt, pretty_print=True)) - - assert elt.xpath('/a/b1/c0')[0].text == str(v.s) - assert elt.xpath('/a/b1/c1/d0')[0].text == str(v.c.s) - assert elt.xpath('/a/b1/c1/d1')[0].text == str(v.c.i) - assert elt.xpath('/a/b2/c2')[0].text == str(v.i) - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/protocol/test_html_microformat.py b/libs_crutch/contrib/spyne/test/protocol/test_html_microformat.py deleted file mode 100644 index 8a0df49..0000000 --- a/libs_crutch/contrib/spyne/test/protocol/test_html_microformat.py +++ /dev/null @@ -1,300 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logging.basicConfig(level=logging.DEBUG) - -import unittest - -from lxml import html - -from spyne.application import Application -from spyne.decorator import srpc -from spyne.model.primitive import Integer -from spyne.model.primitive import String -from spyne.model.complex import Array -from spyne.model.complex import ComplexModel -from spyne.protocol.http import HttpRpc -from spyne.protocol.html import HtmlMicroFormat -from spyne.service import Service -from spyne.server.wsgi import WsgiMethodContext -from spyne.server.wsgi import WsgiApplication -from spyne.util.test import show, call_wsgi_app_kwargs - - -class TestHtmlMicroFormat(unittest.TestCase): - def test_simple(self): - class SomeService(Service): - @srpc(String, _returns=String) - def some_call(s): - return s - - app = Application([SomeService], 'tns', - in_protocol=HttpRpc(hier_delim='_'), - out_protocol=HtmlMicroFormat(doctype=None)) - - server = WsgiApplication(app) - - initial_ctx = WsgiMethodContext(server, { - 'QUERY_STRING': 's=s', - 'PATH_INFO': '/some_call', - 'REQUEST_METHOD': 'GET', - 'SERVER_NAME': 'localhost', - }, 'some-content-type') - - ctx, = server.generate_contexts(initial_ctx) - assert ctx.in_error is None - - server.get_in_object(ctx) - server.get_out_object(ctx) - server.get_out_string(ctx) - - assert b''.join(ctx.out_string) == b'
' \ - b'
s
' - - def test_multiple_return(self): - class SomeService(Service): - @srpc(_returns=[Integer, String]) - def some_call(): - return 1, 's' - - app = Application([SomeService], 'tns', - in_protocol=HttpRpc(hier_delim='_'), - out_protocol=HtmlMicroFormat(doctype=None)) - server = WsgiApplication(app) - - initial_ctx = WsgiMethodContext(server, { - 'QUERY_STRING': '', - 'PATH_INFO': '/some_call', - 'REQUEST_METHOD': 'GET', - 'SERVER_NAME': 'localhost', - }, 'some-content-type') - - ctx, = server.generate_contexts(initial_ctx) - server.get_in_object(ctx) - server.get_out_object(ctx) - server.get_out_string(ctx) - - assert b''.join(ctx.out_string) == b'
' \ - b'
1
' \ - b'
s
' - - - def test_complex(self): - class CM(ComplexModel): - i = Integer - s = String - - class CCM(ComplexModel): - c = CM - i = Integer - s = String - - class SomeService(Service): - @srpc(CCM, _returns=CCM) - def some_call(ccm): - return CCM(c=ccm.c,i=ccm.i, s=ccm.s) - - app = Application([SomeService], 'tns', - in_protocol=HttpRpc(hier_delim='_'), - out_protocol=HtmlMicroFormat(doctype=None)) - server = WsgiApplication(app) - - initial_ctx = WsgiMethodContext(server, { - 'QUERY_STRING': 'ccm_c_s=abc&ccm_c_i=123&ccm_i=456&ccm_s=def', - 'PATH_INFO': '/some_call', - 'REQUEST_METHOD': 'GET', - 'SERVER_NAME': 'localhost', - }, 'some-content-type') - - ctx, = server.generate_contexts(initial_ctx) - server.get_in_object(ctx) - server.get_out_object(ctx) - server.get_out_string(ctx) - - # - # Here's what this is supposed to return: - # - #
- #
- #
456
- #
- #
123
- #
abc
- #
- #
def
- #
- #
- # - - elt = html.fromstring(b''.join(ctx.out_string)) - print(html.tostring(elt, pretty_print=True)) - - resp = elt.find_class('some_callResponse') - assert len(resp) == 1 - res = resp[0].find_class('some_callResult') - assert len(res) == 1 - - i = res[0].findall('div[@class="i"]') - assert len(i) == 1 - assert i[0].text == '456' - - c = res[0].findall('div[@class="c"]') - assert len(c) == 1 - - c_i = c[0].findall('div[@class="i"]') - assert len(c_i) == 1 - assert c_i[0].text == '123' - - c_s = c[0].findall('div[@class="s"]') - assert len(c_s) == 1 - assert c_s[0].text == 'abc' - - s = res[0].findall('div[@class="s"]') - assert len(s) == 1 - assert s[0].text == 'def' - - def test_multiple(self): - class SomeService(Service): - @srpc(String(max_occurs='unbounded'), _returns=String) - def some_call(s): - print(s) - return '\n'.join(s) - - app = Application([SomeService], 'tns', - in_protocol=HttpRpc(hier_delim='_'), - out_protocol=HtmlMicroFormat(doctype=None)) - server = WsgiApplication(app) - - initial_ctx = WsgiMethodContext(server, { - 'QUERY_STRING': 's=1&s=2', - 'PATH_INFO': '/some_call', - 'REQUEST_METHOD': 'GET', - 'SERVER_NAME': 'localhost', - }, 'some-content-type') - - ctx, = server.generate_contexts(initial_ctx) - server.get_in_object(ctx) - server.get_out_object(ctx) - server.get_out_string(ctx) - - assert b''.join(ctx.out_string) == (b'
' - b'
1\n2
') - - ctx, = server.generate_contexts(initial_ctx) - server.get_in_object(ctx) - server.get_out_object(ctx) - server.get_out_string(ctx) - - assert b''.join(ctx.out_string) == b'
' \ - b'
1\n2
' - - def test_before_first_root(self): - class CM(ComplexModel): - i = Integer - s = String - - class CCM(ComplexModel): - c = CM - i = Integer - s = String - - class SomeService(Service): - @srpc(CCM, _returns=Array(CCM)) - def some_call(ccm): - return [CCM(c=ccm.c,i=ccm.i, s=ccm.s)] * 2 - - cb_called = [False] - def _cb(ctx, cls, inst, parent, name, **kwargs): - assert not cb_called[0] - cb_called[0] = True - - app = Application([SomeService], 'tns', - in_protocol=HttpRpc(hier_delim='_'), - out_protocol=HtmlMicroFormat( - doctype=None, before_first_root=_cb)) - server = WsgiApplication(app) - - call_wsgi_app_kwargs(server, - ccm_c_s='abc', ccm_c_i=123, ccm_i=456, ccm_s='def') - - assert cb_called[0] - - def test_complex_array(self): - class CM(ComplexModel): - i = Integer - s = String - - class CCM(ComplexModel): - c = CM - i = Integer - s = String - - class SomeService(Service): - @srpc(CCM, _returns=Array(CCM)) - def some_call(ccm): - return [CCM(c=ccm.c,i=ccm.i, s=ccm.s)] * 2 - - app = Application([SomeService], 'tns', - in_protocol=HttpRpc(hier_delim='_'), - out_protocol=HtmlMicroFormat(doctype=None)) - server = WsgiApplication(app) - - out_string = call_wsgi_app_kwargs(server, - ccm_c_s='abc', ccm_c_i=123, ccm_i=456, ccm_s='def') - - # - # Here's what this is supposed to return: - # - #
- #
- #
456
- #
- #
123
- #
abc
- #
- #
def
- #
- #
- #
456
- #
- #
123
- #
abc
- #
- #
def
- #
- #
- # - - print(out_string) - elt = html.fromstring(out_string) - show(elt, "TestHtmlMicroFormat.test_complex_array") - - resp = elt.find_class('some_callResponse') - assert len(resp) == 1 - res = resp[0].find_class('some_callResult') - assert len(res) == 1 - - assert len(res[0].find_class("CCM")) == 2 - - # We don't need to test the rest as the test_complex test takes care of - # that - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/protocol/test_html_table.py b/libs_crutch/contrib/spyne/test/protocol/test_html_table.py deleted file mode 100644 index d979356..0000000 --- a/libs_crutch/contrib/spyne/test/protocol/test_html_table.py +++ /dev/null @@ -1,508 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logging.basicConfig(level=logging.DEBUG) - -import unittest - -from lxml import etree, html - -from spyne.application import Application -from spyne.decorator import srpc -from spyne.model.primitive import Integer, Unicode -from spyne.model.primitive import String -from spyne.model.primitive import AnyUri -from spyne.model.complex import Array -from spyne.model.complex import ComplexModel -from spyne.protocol.http import HttpRpc -from spyne.protocol.html.table import HtmlColumnTable, HtmlRowTable -from spyne.service import Service -from spyne.server.wsgi import WsgiApplication -from spyne.util.test import show, call_wsgi_app_kwargs, call_wsgi_app - - -class CM(ComplexModel): - _type_info = [ - ('i', Integer), - ('s', String), - ] - - -class CCM(ComplexModel): - _type_info = [ - ('c', CM), - ('i', Integer), - ('s', String), - ] - - -class TestHtmlColumnTable(unittest.TestCase): - def test_complex_array(self): - class SomeService(Service): - @srpc(CCM, _returns=Array(CCM)) - def some_call(ccm): - return [ccm] * 5 - - app = Application([SomeService], 'tns', in_protocol=HttpRpc(), - out_protocol=HtmlColumnTable(field_type_name_attr=None)) - server = WsgiApplication(app) - - out_string = call_wsgi_app_kwargs(server, - ccm_i='456', - ccm_s='def', - ccm_c_i='123', - ccm_c_s='abc', - ) - - elt = etree.fromstring(out_string) - show(elt, 'TestHtmlColumnTable.test_complex_array') - - elt = html.fromstring(out_string) - - row, = elt[0] # thead - cell = row.findall('th[@class="i"]') - assert len(cell) == 1 - assert cell[0].text == 'i' - - cell = row.findall('th[@class="s"]') - assert len(cell) == 1 - assert cell[0].text == 's' - - for row in elt[1]: # tbody - cell = row.xpath('td[@class="i"]') - assert len(cell) == 1 - assert cell[0].text == '456' - - cell = row.xpath('td[@class="c"]//td[@class="i"]') - assert len(cell) == 1 - assert cell[0].text == '123' - - cell = row.xpath('td[@class="c"]//td[@class="s"]') - assert len(cell) == 1 - assert cell[0].text == 'abc' - - cell = row.xpath('td[@class="s"]') - assert len(cell) == 1 - assert cell[0].text == 'def' - - def test_string_array(self): - class SomeService(Service): - @srpc(String(max_occurs='unbounded'), _returns=Array(String)) - def some_call(s): - return s - - app = Application([SomeService], 'tns', in_protocol=HttpRpc(), - out_protocol=HtmlColumnTable( - field_name_attr=None, field_type_name_attr=None)) - server = WsgiApplication(app) - - out_string = call_wsgi_app(server, body_pairs=(('s', '1'), ('s', '2'))) - elt = etree.fromstring(out_string) - show(elt, "TestHtmlColumnTable.test_string_array") - assert out_string.decode('utf8') == \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '
some_callResponse
1
2
' - - def test_anyuri_string(self): - _link = "http://arskom.com.tr/" - - class C(ComplexModel): - c = AnyUri - - class SomeService(Service): - @srpc(_returns=Array(C)) - def some_call(): - return [C(c=_link)] - - app = Application([SomeService], 'tns', in_protocol=HttpRpc(), - out_protocol=HtmlColumnTable(field_type_name_attr=None)) - server = WsgiApplication(app) - - out_string = call_wsgi_app_kwargs(server) - - elt = html.fromstring(out_string) - show(elt, "TestHtmlColumnTable.test_anyuri_string") - assert elt.xpath('//td[@class="c"]')[0][0].tag == 'a' - assert elt.xpath('//td[@class="c"]')[0][0].attrib['href'] == _link - - def test_anyuri_uri_value(self): - _link = "http://arskom.com.tr/" - _text = "Arskom" - - class C(ComplexModel): - c = AnyUri - - class SomeService(Service): - @srpc(_returns=Array(C)) - def some_call(): - return [C(c=AnyUri.Value(_link, text=_text))] - - app = Application([SomeService], 'tns', in_protocol=HttpRpc(), - out_protocol=HtmlColumnTable(field_type_name_attr=None)) - server = WsgiApplication(app) - - out_string = call_wsgi_app_kwargs(server) - - elt = html.fromstring(out_string) - print(html.tostring(elt, pretty_print=True)) - assert elt.xpath('//td[@class="c"]')[0][0].tag == 'a' - assert elt.xpath('//td[@class="c"]')[0][0].text == _text - assert elt.xpath('//td[@class="c"]')[0][0].attrib['href'] == _link - - def test_row_subprot(self): - from lxml.html.builder import E - from spyne.protocol.html import HtmlBase - from spyne.util.six.moves.urllib.parse import urlencode - from spyne.protocol.html import HtmlMicroFormat - - class SearchProtocol(HtmlBase): - def to_parent(self, ctx, cls, inst, parent, name, **kwargs): - s = self.to_unicode(cls._type_info['query'], inst.query) - q = urlencode({"q": s}) - - parent.write(E.a("Search %s" % inst.query, - href="{}?{}".format(inst.uri, q))) - - def column_table_gen_header(self, ctx, cls, parent, name): - parent.write(E.thead(E.th("Search", - **{'class': 'search-link'}))) - - def column_table_before_row(self, ctx, cls, inst, parent, name,**_): - ctxstack = getattr(ctx.protocol[self], - 'array_subprot_ctxstack', []) - - tr_ctx = parent.element('tr') - tr_ctx.__enter__() - ctxstack.append(tr_ctx) - - td_ctx = parent.element('td', **{'class': "search-link"}) - td_ctx.__enter__() - ctxstack.append(td_ctx) - - ctx.protocol[self].array_subprot_ctxstack = ctxstack - - def column_table_after_row(self, ctx, cls, inst, parent, name, - **kwargs): - ctxstack = ctx.protocol[self].array_subprot_ctxstack - - for elt_ctx in reversed(ctxstack): - elt_ctx.__exit__(None, None, None) - - del ctxstack[:] - - class Search(ComplexModel): - query = Unicode - uri = Unicode - - SearchTable = Array( - Search.customize(prot=SearchProtocol()), - prot=HtmlColumnTable(field_type_name_attr=None), - ) - - class SomeService(Service): - @srpc(_returns=SearchTable) - def some_call(): - return [ - Search(query='Arskom', uri='https://www.google.com/search'), - Search(query='Spyne', uri='https://www.bing.com/search'), - ] - - app = Application([SomeService], 'tns', in_protocol=HttpRpc(), - out_protocol=HtmlMicroFormat()) - server = WsgiApplication(app) - - out_string = call_wsgi_app_kwargs(server) - - elt = html.fromstring(out_string) - print(html.tostring(elt, pretty_print=True)) - - assert elt.xpath('//td[@class="search-link"]/a/text()') == \ - ['Search Arskom', 'Search Spyne'] - - assert elt.xpath('//td[@class="search-link"]/a/@href') == [ - 'https://www.google.com/search?q=Arskom', - 'https://www.bing.com/search?q=Spyne', - ] - - assert elt.xpath('//th[@class="search-link"]/text()') == ["Search"] - - -class TestHtmlRowTable(unittest.TestCase): - def test_anyuri_string(self): - _link = "http://arskom.com.tr/" - - class C(ComplexModel): - c = AnyUri - - class SomeService(Service): - @srpc(_returns=C) - def some_call(): - return C(c=_link) - - app = Application([SomeService], 'tns', in_protocol=HttpRpc(), - out_protocol=HtmlRowTable(field_type_name_attr=None)) - server = WsgiApplication(app) - - out_string = call_wsgi_app_kwargs(server) - - elt = html.fromstring(out_string) - print(html.tostring(elt, pretty_print=True)) - assert elt.xpath('//td[@class="c"]')[0][0].tag == 'a' - assert elt.xpath('//td[@class="c"]')[0][0].attrib['href'] == _link - - def test_anyuri_uri_value(self): - _link = "http://arskom.com.tr/" - _text = "Arskom" - - class C(ComplexModel): - c = AnyUri - - class SomeService(Service): - @srpc(_returns=C) - def some_call(): - return C(c=AnyUri.Value(_link, text=_text)) - - app = Application([SomeService], 'tns', in_protocol=HttpRpc(), - out_protocol=HtmlRowTable(field_type_name_attr=None)) - server = WsgiApplication(app) - - out_string = call_wsgi_app_kwargs(server) - - elt = html.fromstring(out_string) - print(html.tostring(elt, pretty_print=True)) - assert elt.xpath('//td[@class="c"]')[0][0].tag == 'a' - assert elt.xpath('//td[@class="c"]')[0][0].text == _text - assert elt.xpath('//td[@class="c"]')[0][0].attrib['href'] == _link - - def test_complex(self): - class SomeService(Service): - @srpc(CCM, _returns=CCM) - def some_call(ccm): - return ccm - - app = Application([SomeService], 'tns', - in_protocol=HttpRpc(hier_delim="_"), - out_protocol=HtmlRowTable(field_type_name_attr=None)) - - server = WsgiApplication(app) - - out_string = call_wsgi_app_kwargs(server, 'some_call', - ccm_c_s='abc', ccm_c_i='123', ccm_i='456', ccm_s='def') - - elt = html.fromstring(out_string) - show(elt, "TestHtmlRowTable.test_complex") - - # Here's what this is supposed to return - """ - - - - - - - - - - - - - - - -
i456
c - - - - - - - - - - - -
i123
sabc
-
sdef
- """ - - print(html.tostring(elt, pretty_print=True)) - resp = elt.find_class('CCM') - assert len(resp) == 1 - - assert elt.xpath('tbody/tr/th[@class="i"]/text()')[0] == 'i' - assert elt.xpath('tbody/tr/td[@class="i"]/text()')[0] == '456' - - assert elt.xpath('tbody/tr/td[@class="c"]//th[@class="i"]/text()')[0] == 'i' - assert elt.xpath('tbody/tr/td[@class="c"]//td[@class="i"]/text()')[0] == '123' - - assert elt.xpath('tbody/tr/td[@class="c"]//th[@class="s"]/text()')[0] == 's' - assert elt.xpath('tbody/tr/td[@class="c"]//td[@class="s"]/text()')[0] == 'abc' - - assert elt.xpath('tbody/tr/th[@class="s"]/text()')[0] == 's' - assert elt.xpath('tbody/tr/td[@class="s"]/text()')[0] == 'def' - - def test_string_array(self): - class SomeService(Service): - @srpc(String(max_occurs='unbounded'), _returns=Array(String)) - def some_call(s): - return s - - app = Application([SomeService], 'tns', in_protocol=HttpRpc(), - out_protocol=HtmlRowTable(field_name_attr=None, - field_type_name_attr=None)) - server = WsgiApplication(app) - - out_string = call_wsgi_app(server, body_pairs=(('s', '1'), ('s', '2')) ) - show(html.fromstring(out_string), 'TestHtmlRowTable.test_string_array') - assert out_string.decode('utf8') == \ - '
' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '
string' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '
1
2
' \ - '
' \ - '
' - - def test_string_array_no_header(self): - class SomeService(Service): - @srpc(String(max_occurs='unbounded'), _returns=Array(String)) - def some_call(s): - return s - - app = Application([SomeService], 'tns', in_protocol=HttpRpc(), - out_protocol=HtmlRowTable(header=False, - field_name_attr=None, field_type_name_attr=None)) - - server = WsgiApplication(app) - - out_string = call_wsgi_app(server, body_pairs=(('s', '1'), ('s', '2')) ) - #FIXME: Needs a proper test with xpaths and all. - show(html.fromstring(out_string), 'TestHtmlRowTable.test_string_array_no_header') - assert out_string.decode('utf8') == \ - '
' \ - '' \ - '' \ - '' \ - '' \ - '
' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '
1
2
' \ - '
' \ - '
' - - - def test_complex_array(self): - v = [ - CM(i=1, s='a'), - CM(i=2, s='b'), - CM(i=3, s='c'), - CM(i=4, s='d'), - ] - class SomeService(Service): - @srpc(_returns=Array(CM)) - def some_call(): - return v - - app = Application([SomeService], 'tns', in_protocol=HttpRpc(), - out_protocol=HtmlRowTable(field_type_name_attr=None)) - server = WsgiApplication(app) - - out_string = call_wsgi_app_kwargs(server) - show(html.fromstring(out_string), 'TestHtmlRowTable.test_complex_array') - #FIXME: Needs a proper test with xpaths and all. - assert out_string.decode('utf8') == \ - '
' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '
i1
sa
' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '
i2
sb
' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '
i3
sc
' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '' \ - '
i4
sd
' \ - '
' - - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/protocol/test_http.py b/libs_crutch/contrib/spyne/test/protocol/test_http.py deleted file mode 100644 index 3f34572..0000000 --- a/libs_crutch/contrib/spyne/test/protocol/test_http.py +++ /dev/null @@ -1,867 +0,0 @@ -#!/usr/bin/env python -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -import logging -logging.basicConfig(level=logging.DEBUG) - -import unittest - -from spyne.util.six import StringIO -from spyne.util.six.moves.http_cookies import SimpleCookie - -from datetime import datetime -from wsgiref.validate import validator as wsgiref_validator - -from spyne.server.wsgi import _parse_qs -from spyne.application import Application -from spyne.error import ValidationError -from spyne.const.http import HTTP_200 -from spyne.decorator import rpc -from spyne.decorator import srpc -from spyne.model import ByteArray, DateTime, Uuid, String, Integer, Integer8, \ - ComplexModel, Array -from spyne.protocol.http import HttpRpc, HttpPattern, _parse_cookie -from spyne.service import Service -from spyne.server.wsgi import WsgiApplication, WsgiMethodContext -from spyne.server.http import HttpTransportContext -from spyne.util.test import call_wsgi_app_kwargs - - -class TestString(unittest.TestCase): - def setUp(self): - class SomeService(Service): - @srpc(String, _returns=String) - def echo_string(s): - return s - - app = Application([SomeService], 'tns', - in_protocol=HttpRpc(validator='soft'), - out_protocol=HttpRpc(), - ) - - self.app = WsgiApplication(app) - - def test_without_content_type(self): - headers = None - ret = call_wsgi_app_kwargs(self.app, 'echo_string', headers, s="string") - assert ret == b'string' - - def test_without_encoding(self): - headers = {'CONTENT_TYPE':'text/plain'} - ret = call_wsgi_app_kwargs(self.app, 'echo_string', headers, s="string") - assert ret == b'string' - - def test_with_encoding(self): - headers = {'CONTENT_TYPE':'text/plain; charset=utf8'} - ret = call_wsgi_app_kwargs(self.app, 'echo_string', headers, s="string") - assert ret == b'string' - - -class TestHttpTransportContext(unittest.TestCase): - def test_gen_header(self): - val = HttpTransportContext.gen_header("text/plain", charset="utf8") - assert val == 'text/plain; charset="utf8"' - - -class TestSimpleDictDocument(unittest.TestCase): - def test_own_parse_qs_01(self): - assert dict(_parse_qs('')) == {} - def test_own_parse_qs_02(self): - assert dict(_parse_qs('p')) == {'p': [None]} - def test_own_parse_qs_03(self): - assert dict(_parse_qs('p=')) == {'p': ['']} - def test_own_parse_qs_04(self): - assert dict(_parse_qs('p=1')) == {'p': ['1']} - def test_own_parse_qs_05(self): - assert dict(_parse_qs('p=1&')) == {'p': ['1']} - def test_own_parse_qs_06(self): - assert dict(_parse_qs('p=1&q')) == {'p': ['1'], 'q': [None]} - def test_own_parse_qs_07(self): - assert dict(_parse_qs('p=1&q=')) == {'p': ['1'], 'q': ['']} - def test_own_parse_qs_08(self): - assert dict(_parse_qs('p=1&q=2')) == {'p': ['1'], 'q': ['2']} - def test_own_parse_qs_09(self): - assert dict(_parse_qs('p=1&q=2&p')) == {'p': ['1', None], 'q': ['2']} - def test_own_parse_qs_10(self): - assert dict(_parse_qs('p=1&q=2&p=')) == {'p': ['1', ''], 'q': ['2']} - def test_own_parse_qs_11(self): - assert dict(_parse_qs('p=1&q=2&p=3')) == {'p': ['1', '3'], 'q': ['2']} - -def _test(services, qs, validator='soft', strict_arrays=False): - app = Application(services, 'tns', - in_protocol=HttpRpc(validator=validator, strict_arrays=strict_arrays), - out_protocol=HttpRpc()) - server = WsgiApplication(app) - - initial_ctx = WsgiMethodContext(server, { - 'QUERY_STRING': qs, - 'PATH_INFO': '/some_call', - 'REQUEST_METHOD': 'GET', - 'SERVER_NAME': "localhost", - }, 'some-content-type') - - ctx, = server.generate_contexts(initial_ctx) - - server.get_in_object(ctx) - if ctx.in_error is not None: - raise ctx.in_error - - server.get_out_object(ctx) - if ctx.out_error is not None: - raise ctx.out_error - - server.get_out_string(ctx) - - return ctx - -class TestValidation(unittest.TestCase): - def test_validation_frequency(self): - class SomeService(Service): - @srpc(ByteArray(min_occurs=1), _returns=ByteArray) - def some_call(p): - pass - - try: - _test([SomeService], '', validator='soft') - except ValidationError: - pass - else: - raise Exception("must raise ValidationError") - - def _test_validation_frequency_simple_bare(self): - class SomeService(Service): - @srpc(ByteArray(min_occurs=1), _body_style='bare', _returns=ByteArray) - def some_call(p): - pass - - try: - _test([SomeService], '', validator='soft') - except ValidationError: - pass - else: - raise Exception("must raise ValidationError") - - def test_validation_frequency_complex_bare_parent(self): - class C(ComplexModel): - i=Integer(min_occurs=1) - s=String - - class SomeService(Service): - @srpc(C, _body_style='bare') - def some_call(p): - pass - - # must not complain about missing s - _test([SomeService], 'i=5', validator='soft') - - # must raise validation error for missing i - try: - _test([SomeService], 's=a', validator='soft') - except ValidationError: - pass - else: - raise Exception("must raise ValidationError") - - # must raise validation error for missing i - try: - _test([SomeService], '', validator='soft') - except ValidationError: - pass - else: - raise Exception("must raise ValidationError") - - def test_validation_frequency_parent(self): - class C(ComplexModel): - i=Integer(min_occurs=1) - s=String - - class SomeService(Service): - @srpc(C) - def some_call(p): - pass - - # must not complain about missing s - _test([SomeService], 'p.i=5', validator='soft') - try: - # must raise validation error for missing i - _test([SomeService], 'p.s=a', validator='soft') - except ValidationError: - pass - else: - raise Exception("must raise ValidationError") - - # must not raise anything for missing p because C has min_occurs=0 - _test([SomeService], '', validator='soft') - - def test_validation_array(self): - class C(ComplexModel): - i=Integer(min_occurs=1) - s=String - - class SomeService(Service): - @srpc(Array(C)) - def some_call(p): - pass - - # must not complain about missing s - _test([SomeService], 'p[0].i=5', validator='soft') - try: - # must raise validation error for missing i - _test([SomeService], 'p[0].s=a', validator='soft') - except ValidationError: - pass - else: - raise Exception("must raise ValidationError") - - # must not raise anything for missing p because C has min_occurs=0 - _test([SomeService], '', validator='soft') - - def test_validation_array_index_jump_error(self): - class C(ComplexModel): - i=Integer - - class SomeService(Service): - @srpc(Array(C), _returns=String) - def some_call(p): - return repr(p) - - try: - # must raise validation error for index jump from 0 to 2 even without - # any validation - _test([SomeService], 'p[0].i=42&p[2].i=42&', strict_arrays=True) - except ValidationError: - pass - else: - raise Exception("must raise ValidationError") - - def test_validation_array_index_jump_tolerate(self): - class C(ComplexModel): - i=Integer - - class SomeService(Service): - @srpc(Array(C), _returns=String) - def some_call(p): - return repr(p) - - # must not raise validation error for index jump from 0 to 2 and ignore - # element with index 1 - ret = _test([SomeService], 'p[0].i=0&p[2].i=2&', strict_arrays=False) - assert ret.out_object[0] == '[C(i=0), C(i=2)]' - - # even if they arrive out-of-order. - ret = _test([SomeService], 'p[2].i=2&p[0].i=0&', strict_arrays=False) - assert ret.out_object[0] == '[C(i=0), C(i=2)]' - - def test_validation_nested_array(self): - class CC(ComplexModel): - d = DateTime - - class C(ComplexModel): - i = Integer(min_occurs=1) - cc = Array(CC) - - class SomeService(Service): - @srpc(Array(C)) - def some_call(p): - print(p) - - # must not complain about missing s - _test([SomeService], 'p[0].i=5', validator='soft') - try: - # must raise validation error for missing i - _test([SomeService], 'p[0].cc[0].d=2013-01-01', validator='soft') - except ValidationError: - pass - else: - raise Exception("must raise ValidationError") - - # must not raise anything for missing p because C has min_occurs=0 - _test([SomeService], '', validator='soft') - - def test_validation_nullable(self): - class SomeService(Service): - @srpc(ByteArray(nullable=False), _returns=ByteArray) - def some_call(p): - pass - - try: - _test([SomeService], 'p', validator='soft') - except ValidationError: - pass - else: - raise Exception("must raise ValidationError") - - def test_validation_string_pattern(self): - class SomeService(Service): - @srpc(Uuid) - def some_call(p): - pass - - try: - _test([SomeService], "p=duduk", validator='soft') - except ValidationError: - pass - else: - raise Exception("must raise ValidationError") - - def test_validation_integer_range(self): - class SomeService(Service): - @srpc(Integer(ge=0, le=5)) - def some_call(p): - pass - - try: - _test([SomeService], 'p=10', validator='soft') - except ValidationError: - pass - else: - raise Exception("must raise ValidationError") - - def test_validation_integer_type(self): - class SomeService(Service): - @srpc(Integer8) - def some_call(p): - pass - - try: - _test([SomeService], "p=-129", validator='soft') - except ValidationError: - pass - else: - raise Exception("must raise ValidationError") - - def test_validation_integer_type_2(self): - class SomeService(Service): - @srpc(Integer8) - def some_call(p): - pass - - try: - _test([SomeService], "p=1.2", validator='soft') - except ValidationError: - pass - else: - raise Exception("must raise ValidationError") - - -class Test(unittest.TestCase): - def test_multiple_return(self): - class SomeService(Service): - @srpc(_returns=[Integer, String]) - def some_call(): - return 1, 's' - - try: - _test([SomeService], '') - except TypeError: - pass - else: - raise Exception("Must fail with: HttpRpc does not support complex " - "return types.") - - def test_primitive_only(self): - class SomeComplexModel(ComplexModel): - i = Integer - s = String - - class SomeService(Service): - @srpc(SomeComplexModel, _returns=SomeComplexModel) - def some_call(scm): - return SomeComplexModel(i=5, s='5x') - - try: - _test([SomeService], '') - except TypeError: - pass - else: - raise Exception("Must fail with: HttpRpc does not support complex " - "return types.") - - def test_complex(self): - class CM(ComplexModel): - _type_info = [ - ("i", Integer), - ("s", String), - ] - - class CCM(ComplexModel): - _type_info = [ - ("i", Integer), - ("c", CM), - ("s", String), - ] - - class SomeService(Service): - @srpc(CCM, _returns=String) - def some_call(ccm): - return repr(CCM(c=ccm.c, i=ccm.i, s=ccm.s)) - - ctx = _test([SomeService], '&ccm.i=1&ccm.s=s&ccm.c.i=3&ccm.c.s=cs') - - assert ctx.out_string[0] == b"CCM(i=1, c=CM(i=3, s='cs'), s='s')" - - def test_simple_array(self): - class SomeService(Service): - @srpc(String(max_occurs='unbounded'), _returns=String) - def some_call(s): - return '\n'.join(s) - - ctx = _test([SomeService], '&s=1&s=2') - assert b''.join(ctx.out_string) == b'1\n2' - - def test_complex_array(self): - class CM(ComplexModel): - _type_info = [ - ("i", Integer), - ("s", String), - ] - - class SomeService(Service): - @srpc(Array(CM), _returns=String) - def some_call(cs): - return '\n'.join([repr(c) for c in cs]) - - ctx = _test([SomeService], - 'cs[0].i=1&cs[0].s=x' - '&cs[1].i=2&cs[1].s=y' - '&cs[2].i=3&cs[2].s=z') - - assert b''.join(ctx.out_string) == \ - b"CM(i=1, s='x')\n" \ - b"CM(i=2, s='y')\n" \ - b"CM(i=3, s='z')" - - def test_complex_array_empty(self): - class CM(ComplexModel): - _type_info = [ - ("i", Integer), - ("s", String), - ] - - class SomeService(Service): - @srpc(Array(CM), _returns=String) - def some_call(cs): - return repr(cs) - - ctx = _test([SomeService], 'cs=empty') - - assert b''.join(ctx.out_string) == b'[]' - - def test_complex_object_empty(self): - class CM(ComplexModel): - _type_info = [ - ("i", Integer), - ("s", String), - ] - - class SomeService(Service): - @srpc(CM, _returns=String) - def some_call(c): - return repr(c) - - ctx = _test([SomeService], 'c=empty') - - assert b''.join(ctx.out_string) == b'CM()' - - def test_nested_flatten(self): - class CM(ComplexModel): - _type_info = [ - ("i", Integer), - ("s", String), - ] - - class CCM(ComplexModel): - _type_info = [ - ("i", Integer), - ("c", CM), - ("s", String), - ] - - class SomeService(Service): - @srpc(CCM, _returns=String) - def some_call(ccm): - return repr(ccm) - - ctx = _test([SomeService], '&ccm.i=1&ccm.s=s&ccm.c.i=3&ccm.c.s=cs') - - print(ctx.out_string) - assert b''.join(ctx.out_string) == b"CCM(i=1, c=CM(i=3, s='cs'), s='s')" - - def test_nested_flatten_with_multiple_values_1(self): - class CM(ComplexModel): - _type_info = [ - ("i", Integer), - ("s", String), - ] - - class CCM(ComplexModel): - _type_info = [ - ("i", Integer), - ("c", CM), - ("s", String), - ] - - class SomeService(Service): - @srpc(CCM.customize(max_occurs=2), _returns=String) - def some_call(ccm): - return repr(ccm) - - ctx = _test([SomeService], 'ccm[0].i=1&ccm[0].s=s' - '&ccm[0].c.i=1&ccm[0].c.s=a' - '&ccm[1].c.i=2&ccm[1].c.s=b') - - s = b''.join(ctx.out_string) - - assert s == b"[CCM(i=1, c=CM(i=1, s='a'), s='s'), CCM(c=CM(i=2, s='b'))]" - - def test_nested_flatten_with_multiple_values_2(self): - class CM(ComplexModel): - _type_info = [ - ("i", Integer), - ("s", String), - ] - - class CCM(ComplexModel): - _type_info = [ - ("i", Integer), - ("c", CM.customize(max_occurs=2)), - ("s", String), - ] - - class SomeService(Service): - @srpc(CCM, _returns=String) - def some_call(ccm): - return repr(ccm) - - ctx = _test([SomeService], 'ccm.i=1&ccm.s=s' - '&ccm.c[0].i=1&ccm.c[0].s=a' - '&ccm.c[1].i=2&ccm.c[1].s=b') - - s = b''.join(list(ctx.out_string)) - assert s == b"CCM(i=1, c=[CM(i=1, s='a'), CM(i=2, s='b')], s='s')" - - def test_nested_flatten_with_complex_array(self): - class CM(ComplexModel): - _type_info = [ - ("i", Integer), - ("s", String), - ] - - class CCM(ComplexModel): - _type_info = [ - ("i", Integer), - ("c", Array(CM)), - ("s", String), - ] - - class SomeService(Service): - @srpc(CCM, _returns=String) - def some_call(ccm): - return repr(ccm) - - ctx = _test([SomeService], 'ccm.i=1&ccm.s=s' - '&ccm.c[0].i=1&ccm.c[0].s=a' - '&ccm.c[1].i=2&ccm.c[1].s=b') - - s = b''.join(list(ctx.out_string)) - assert s == b"CCM(i=1, c=[CM(i=1, s='a'), CM(i=2, s='b')], s='s')" - - def test_nested_2_flatten_with_primitive_array(self): - class CCM(ComplexModel): - _type_info = [ - ("i", Integer), - ("c", Array(String)), - ("s", String), - ] - - class SomeService(Service): - @srpc(Array(CCM), _returns=String) - def some_call(ccm): - return repr(ccm) - - ctx = _test([SomeService], 'ccm[0].i=1&ccm[0].s=s' - '&ccm[0].c=a' - '&ccm[0].c=b') - s = b''.join(list(ctx.out_string)) - assert s == b"[CCM(i=1, c=['a', 'b'], s='s')]" - - def test_default(self): - class CM(ComplexModel): - _type_info = [ - ("i", Integer), - ("s", String(default='default')), - ] - - class SomeService(Service): - @srpc(CM, _returns=String) - def some_call(cm): - return repr(cm) - - # s is missing - ctx = _test([SomeService], 'cm.i=1') - s = b''.join(ctx.out_string) - assert s == b"CM(i=1, s='default')" - - # s is None - ctx = _test([SomeService], 'cm.i=1&cm.s') - s = b''.join(ctx.out_string) - assert s == b"CM(i=1)" - - # s is empty - ctx = _test([SomeService], 'cm.i=1&cm.s=') - s = b''.join(ctx.out_string) - assert s == b"CM(i=1, s='')" - - def test_nested_flatten_with_primitive_array(self): - class CCM(ComplexModel): - _type_info = [ - ("i", Integer), - ("c", Array(String)), - ("s", String), - ] - - class SomeService(Service): - @srpc(CCM, _returns=String) - def some_call(ccm): - return repr(ccm) - - ctx = _test([SomeService], 'ccm.i=1&ccm.s=s' - '&ccm.c=a' - '&ccm.c=b') - s = b''.join(list(ctx.out_string)) - assert s == b"CCM(i=1, c=['a', 'b'], s='s')" - - ctx = _test([SomeService], 'ccm.i=1' - '&ccm.s=s' - '&ccm.c[1]=b' - '&ccm.c[0]=a') - s = b''.join(list(ctx.out_string)) - assert s == b"CCM(i=1, c=['a', 'b'], s='s')" - - ctx = _test([SomeService], 'ccm.i=1' - '&ccm.s=s' - '&ccm.c[0]=a' - '&ccm.c[1]=b') - s = b''.join(list(ctx.out_string)) - assert s == b"CCM(i=1, c=['a', 'b'], s='s')" - - def test_http_headers(self): - d = datetime(year=2013, month=1, day=1) - string = ['hey', 'yo'] - - class ResponseHeader(ComplexModel): - _type_info = { - 'Set-Cookie': String(max_occurs='unbounded'), - 'Expires': DateTime - } - - class SomeService(Service): - __out_header__ = ResponseHeader - - @rpc(String) - def some_call(ctx, s): - assert s is not None - ctx.out_header = ResponseHeader(**{'Set-Cookie': string, - 'Expires': d}) - - def start_response(code, headers): - print(headers) - assert len([s for s in string if ('Set-Cookie', s) in headers]) == len(string) - assert dict(headers)['Expires'] == 'Tue, 01 Jan 2013 00:00:00 GMT' - - app = Application([SomeService], 'tns', - in_protocol=HttpRpc(), out_protocol=HttpRpc()) - wsgi_app = WsgiApplication(app) - - req_dict = { - 'SCRIPT_NAME': '', - 'QUERY_STRING': '&s=foo', - 'PATH_INFO': '/some_call', - 'REQUEST_METHOD': 'GET', - 'SERVER_NAME': 'localhost', - 'SERVER_PORT': "9999", - 'wsgi.url_scheme': 'http', - 'wsgi.version': (1,0), - 'wsgi.input': StringIO(), - 'wsgi.errors': StringIO(), - 'wsgi.multithread': False, - 'wsgi.multiprocess': False, - 'wsgi.run_once': True, - } - - ret = wsgi_app(req_dict, start_response) - print(list(ret)) - - wsgi_app = wsgiref_validator(wsgi_app) - - ret = wsgi_app(req_dict, start_response) - - assert list(ret) == [b''] - - -class TestHttpPatterns(unittest.TestCase): - def test_rules(self): - _int = 5 - _fragment = 'some_fragment' - - class SomeService(Service): - @srpc(Integer, _returns=Integer, _patterns=[ - HttpPattern('/%s/'% _fragment)]) - def some_call(some_int): - assert some_int == _int - - app = Application([SomeService], 'tns', in_protocol=HttpRpc(), out_protocol=HttpRpc()) - server = WsgiApplication(app) - - environ = { - 'QUERY_STRING': '', - 'PATH_INFO': '/%s/%d' % (_fragment, _int), - 'SERVER_PATH':"/", - 'SERVER_NAME': "localhost", - 'wsgi.url_scheme': 'http', - 'SERVER_PORT': '9000', - 'REQUEST_METHOD': 'GET', - } - - initial_ctx = WsgiMethodContext(server, environ, 'some-content-type') - - ctx, = server.generate_contexts(initial_ctx) - - foo = [] - for i in server._http_patterns: - foo.append(i) - - assert len(foo) == 1 - print(foo) - assert ctx.descriptor is not None - - server.get_in_object(ctx) - assert ctx.in_error is None - - server.get_out_object(ctx) - assert ctx.out_error is None - - -class ParseCookieTest(unittest.TestCase): - def test_cookie_parse(self): - string = 'some_string' - class RequestHeader(ComplexModel): - some_field = String - - class SomeService(Service): - __in_header__ = RequestHeader - - @rpc(String) - def some_call(ctx, s): - assert ctx.in_header.some_field == string - - def start_response(code, headers): - assert code == HTTP_200 - - c = 'some_field=%s'% (string,) - - app = Application([SomeService], 'tns', - in_protocol=HttpRpc(parse_cookie=True), out_protocol=HttpRpc()) - - wsgi_app = WsgiApplication(app) - - req_dict = { - 'SCRIPT_NAME': '', - 'QUERY_STRING': '', - 'PATH_INFO': '/some_call', - 'REQUEST_METHOD': 'GET', - 'SERVER_NAME': 'localhost', - 'SERVER_PORT': "9999", - 'HTTP_COOKIE': c, - 'wsgi.url_scheme': 'http', - 'wsgi.version': (1,0), - 'wsgi.input': StringIO(), - 'wsgi.errors': StringIO(), - 'wsgi.multithread': False, - 'wsgi.multiprocess': False, - 'wsgi.run_once': True, - } - - ret = wsgi_app(req_dict, start_response) - print(ret) - - wsgi_app = wsgiref_validator(wsgi_app) - - ret = wsgi_app(req_dict, start_response) - print(ret) - - # These tests copied from Django: - # https://github.com/django/django/pull/6277/commits/da810901ada1cae9fc1f018f879f11a7fb467b28 - def test_python_cookies(self): - """ - Test cases copied from Python's Lib/test/test_http_cookies.py - """ - self.assertEqual(_parse_cookie('chips=ahoy; vienna=finger'), {'chips': 'ahoy', 'vienna': 'finger'}) - # Here _parse_cookie() differs from Python's cookie parsing in that it - # treats all semicolons as delimiters, even within quotes. - self.assertEqual( - _parse_cookie('keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"'), - {'keebler': '"E=mc2', 'L': '\\"Loves\\"', 'fudge': '\\012', '': '"'} - ) - # Illegal cookies that have an '=' char in an unquoted value. - self.assertEqual(_parse_cookie('keebler=E=mc2'), {'keebler': 'E=mc2'}) - # Cookies with ':' character in their name. - self.assertEqual(_parse_cookie('key:term=value:term'), {'key:term': 'value:term'}) - # Cookies with '[' and ']'. - self.assertEqual(_parse_cookie('a=b; c=[; d=r; f=h'), {'a': 'b', 'c': '[', 'd': 'r', 'f': 'h'}) - - def test_cookie_edgecases(self): - # Cookies that RFC6265 allows. - self.assertEqual(_parse_cookie('a=b; Domain=example.com'), {'a': 'b', 'Domain': 'example.com'}) - # _parse_cookie() has historically kept only the last cookie with the - # same name. - self.assertEqual(_parse_cookie('a=b; h=i; a=c'), {'a': 'c', 'h': 'i'}) - - def test_invalid_cookies(self): - """ - Cookie strings that go against RFC6265 but browsers will send if set - via document.cookie. - """ - # Chunks without an equals sign appear as unnamed values per - # https://bugzilla.mozilla.org/show_bug.cgi?id=169091 - self.assertIn('django_language', - _parse_cookie('abc=def; unnamed; django_language=en').keys()) - # Even a double quote may be an unamed value. - self.assertEqual( - _parse_cookie('a=b; "; c=d'), {'a': 'b', '': '"', 'c': 'd'}) - # Spaces in names and values, and an equals sign in values. - self.assertEqual(_parse_cookie('a b c=d e = f; gh=i'), - {'a b c': 'd e = f', 'gh': 'i'}) - # More characters the spec forbids. - self.assertEqual(_parse_cookie('a b,c<>@:/[]?{}=d " =e,f g'), - {'a b,c<>@:/[]?{}': 'd " =e,f g'}) - # Unicode characters. The spec only allows ASCII. - self.assertEqual(_parse_cookie(u'saint=André Bessette'), - {u'saint': u'André Bessette'}) - # Browsers don't send extra whitespace or semicolons in Cookie headers, - # but _parse_cookie() should parse whitespace the same way - # document.cookie parses whitespace. - self.assertEqual(_parse_cookie(' = b ; ; = ; c = ; '), - {'': 'b', 'c': ''}) - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/protocol/test_json.py b/libs_crutch/contrib/spyne/test/protocol/test_json.py deleted file mode 100644 index ac162fb..0000000 --- a/libs_crutch/contrib/spyne/test/protocol/test_json.py +++ /dev/null @@ -1,212 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import unittest -try: - import simplejson as json -except ImportError: - import json - - -from spyne import MethodContext -from spyne import Application -from spyne import rpc,srpc -from spyne import Service -from spyne.model import Integer, Unicode, ComplexModel -from spyne.protocol.json import JsonP -from spyne.protocol.json import JsonDocument -from spyne.protocol.json import JsonEncoder -from spyne.protocol.json import _SpyneJsonRpc1 -from spyne.server import ServerBase -from spyne.server.null import NullServer - -from spyne.test.protocol._test_dictdoc import TDictDocumentTest -from spyne.test.protocol._test_dictdoc import TDry - - -class TestDictDocument(TDictDocumentTest(json, JsonDocument, - dumps_kwargs=dict(cls=JsonEncoder))): - def dumps(self, o): - return super(TestDictDocument, self).dumps(o).encode('utf8') - - def loads(self, o): - return super(TestDictDocument, self).loads(o.decode('utf8')) - -_dry_sjrpc1 = TDry(json, _SpyneJsonRpc1) - -class TestSpyneJsonRpc1(unittest.TestCase): - def test_call(self): - class SomeService(Service): - @srpc(Integer, _returns=Integer) - def yay(i): - print(i) - return i - - ctx = _dry_sjrpc1([SomeService], - {"ver": 1, "body": {"yay": {"i":5}}}, True) - - print(ctx) - print(list(ctx.out_string)) - assert ctx.out_document == {"ver": 1, "body": 5} - - def test_call_with_header(self): - class SomeHeader(ComplexModel): - i = Integer - - class SomeService(Service): - __in_header__ = SomeHeader - @rpc(Integer, _returns=Integer) - def yay(ctx, i): - print(ctx.in_header) - return ctx.in_header.i - - ctx = _dry_sjrpc1([SomeService], - {"ver": 1, "body": {"yay": None}, "head": {"i":5}}, True) - - print(ctx) - print(list(ctx.out_string)) - assert ctx.out_document == {"ver": 1, "body": 5} - - def test_error(self): - class SomeHeader(ComplexModel): - i = Integer - - class SomeService(Service): - __in_header__ = SomeHeader - @rpc(Integer, Integer, _returns=Integer) - def div(ctx, dividend, divisor): - return dividend / divisor - - ctx = _dry_sjrpc1([SomeService], - {"ver": 1, "body": {"div": [4,0]}}, True) - - print(ctx) - print(list(ctx.out_string)) - assert ctx.out_document == {"ver": 1, "fault": { - 'faultcode': 'Server', 'faultstring': 'Internal Error'}} - - -class TestJsonDocument(unittest.TestCase): - def test_out_kwargs(self): - class SomeService(Service): - @srpc() - def yay(): - pass - - app = Application([SomeService], 'tns', - in_protocol=JsonDocument(), - out_protocol=JsonDocument()) - - assert 'cls' in app.out_protocol.kwargs - assert not ('cls' in app.in_protocol.kwargs) - - app = Application([SomeService], 'tns', - in_protocol=JsonDocument(), - out_protocol=JsonDocument(cls='hey')) - - assert app.out_protocol.kwargs['cls'] == 'hey' - assert not ('cls' in app.in_protocol.kwargs) - - def test_invalid_input(self): - class SomeService(Service): - pass - - app = Application([SomeService], 'tns', - in_protocol=JsonDocument(), - out_protocol=JsonDocument()) - - server = ServerBase(app) - - initial_ctx = MethodContext(server, MethodContext.SERVER) - initial_ctx.in_string = [b'{'] - ctx, = server.generate_contexts(initial_ctx, in_string_charset='utf8') - assert ctx.in_error.faultcode == 'Client.JsonDecodeError' - - -class TestJsonP(unittest.TestCase): - def test_callback_name(self): - callback_name = 'some_callback' - - class SomeComplexModel(ComplexModel): - i = Integer - s = Unicode - - v1 = 42 - v2 = SomeComplexModel(i=42, s='foo') - - class SomeService(Service): - @srpc(_returns=Integer) - def yay(): - return v1 - - @srpc(_returns=SomeComplexModel) - def complex(): - return v2 - - app = Application([SomeService], 'tns', - in_protocol=JsonDocument(), - out_protocol=JsonP(callback_name)) - - server = NullServer(app, ostr=True) - - ret = server.service.yay() - ret = list(ret) - print(b''.join(ret)) - assert b''.join(ret) == b''.join((callback_name.encode('utf8'), b'(', - str(v1).encode('utf8'), b');')) - - ret = server.service.complex() - ret = list(ret) - print(b''.join(ret)) - assert b''.join(ret) == b''.join((callback_name.encode('utf8'), b'(', - json.dumps({"i": 42, "s": "foo"}).encode('utf-8') , b');')) - - - def test_wrapped_array_in_wrapped_response(self): - from spyne.model.complex import ComplexModel, Array - from spyne.model.primitive import Unicode - - class Permission(ComplexModel): - _type_info = [ - ('application', Unicode), - ('feature', Unicode), - ] - - class SomeService(Service): - @srpc(_returns=Array(Permission)) - def yay(): - return [ - Permission(application='app', feature='f1'), - Permission(application='app', feature='f2') - ] - - app = Application([SomeService], 'tns', - in_protocol=JsonDocument(), - out_protocol=JsonDocument(ignore_wrappers=False)) - - server = NullServer(app, ostr=True) - retstr = b''.join(server.service.yay()).decode('utf-8') - print(retstr) - assert retstr == '{"yayResponse": {"yayResult": [' \ - '{"Permission": {"application": "app", "feature": "f1"}}, ' \ - '{"Permission": {"application": "app", "feature": "f2"}}]}}' - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/protocol/test_msgpack.py b/libs_crutch/contrib/spyne/test/protocol/test_msgpack.py deleted file mode 100644 index aa98892..0000000 --- a/libs_crutch/contrib/spyne/test/protocol/test_msgpack.py +++ /dev/null @@ -1,146 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging - -from spyne.util import six - -logging.basicConfig(level=logging.DEBUG) - -import unittest - -import msgpack - -from spyne import MethodContext -from spyne.application import Application -from spyne.decorator import rpc -from spyne.decorator import srpc -from spyne.service import Service -from spyne.model.complex import Array -from spyne.model.primitive import String -from spyne.model.complex import ComplexModel -from spyne.model.primitive import Unicode -from spyne.protocol.msgpack import MessagePackDocument -from spyne.protocol.msgpack import MessagePackRpc -from spyne.util.six import BytesIO -from spyne.server import ServerBase -from spyne.server.wsgi import WsgiApplication -from spyne.test.protocol._test_dictdoc import TDictDocumentTest - -from spyne.test.test_service import start_response - - -def convert_dict(d): - if isinstance(d, six.text_type): - return d.encode('utf8') - - if not isinstance(d, dict): - return d - - r = {} - - for k, v in d.items(): - r[k.encode('utf8')] = convert_dict(v) - - return r - - -# apply spyne defaults to test unpacker -TestMessagePackDocument = TDictDocumentTest(msgpack, MessagePackDocument, - loads_kwargs=dict(use_list=False), convert_dict=convert_dict) - - -class TestMessagePackRpc(unittest.TestCase): - def test_invalid_input(self): - class SomeService(Service): - @srpc() - def yay(): - pass - - app = Application([SomeService], 'tns', - in_protocol=MessagePackDocument(), - out_protocol=MessagePackDocument()) - - server = ServerBase(app) - - initial_ctx = MethodContext(server, MethodContext.SERVER) - initial_ctx.in_string = [b'\xdf'] # Invalid input - ctx, = server.generate_contexts(initial_ctx) - assert ctx.in_error.faultcode == 'Client.MessagePackDecodeError' - - def test_rpc(self): - data = {"a":"b", "c": "d"} - - class KeyValuePair(ComplexModel): - key = Unicode - value = Unicode - - class SomeService(Service): - @rpc(String(max_occurs='unbounded'), - _returns=Array(KeyValuePair), - _in_variable_names={ - 'keys': 'key' - } - ) - def get_values(ctx, keys): - for k in keys: - yield KeyValuePair(key=k, value=data[k]) - - application = Application([SomeService], - in_protocol=MessagePackRpc(), - out_protocol=MessagePackRpc(ignore_wrappers=False), - name='Service', tns='tns') - server = WsgiApplication(application) - - input_string = msgpack.packb([0, 0, "get_values", [["a", "c"]]]) - input_stream = BytesIO(input_string) - - ret = server({ - 'CONTENT_LENGTH': str(len(input_string)), - 'CONTENT_TYPE': 'application/x-msgpack', - 'HTTP_CONNECTION': 'close', - 'HTTP_CONTENT_LENGTH': str(len(input_string)), - 'HTTP_CONTENT_TYPE': 'application/x-msgpack', - 'PATH_INFO': '/', - 'QUERY_STRING': '', - 'SERVER_NAME': 'localhost', - 'SERVER_PORT': '7000', - 'REQUEST_METHOD': 'POST', - 'wsgi.url_scheme': 'http', - 'wsgi.input': input_stream, - }, start_response) - - ret = b''.join(ret) - print(repr(ret)) - ret = msgpack.unpackb(ret) - print(repr(ret)) - - s = [1, 0, None, {b'get_valuesResponse': { - b'get_valuesResult': [ - {b"KeyValuePair": {b'key': b'a', b'value': b'b'}}, - {b"KeyValuePair": {b'key': b'c', b'value': b'd'}}, - ] - }} - ] - print(s) - assert ret == s - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/protocol/test_soap11.py b/libs_crutch/contrib/spyne/test/protocol/test_soap11.py deleted file mode 100644 index 01d8346..0000000 --- a/libs_crutch/contrib/spyne/test/protocol/test_soap11.py +++ /dev/null @@ -1,500 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -# -# Most of the service tests are performed through the interop tests. -# - -import datetime -import unittest - -from lxml import etree -import pytz - -from spyne import MethodContext -from spyne.application import Application -from spyne.decorator import rpc -from spyne.interface.wsdl import Wsdl11 -from spyne.model.complex import Array -from spyne.model.complex import ComplexModel -from spyne.model.primitive import Unicode -from spyne.model.primitive import DateTime, Date -from spyne.model.primitive import Float -from spyne.model.primitive import Integer -from spyne.model.primitive import String -from spyne.model.fault import Fault -from spyne.protocol.soap import Soap11 -from spyne.service import Service -from spyne.server import ServerBase - -from spyne.protocol.soap import _from_soap -from spyne.protocol.soap import _parse_xml_string - -Application.transport = 'test' - - -def start_response(code, headers): - print(code, headers) - - -class Address(ComplexModel): - __namespace__ = "TestService" - - street = String - city = String - zip = Integer - since = DateTime - laditude = Float - longitude = Float - -class Person(ComplexModel): - __namespace__ = "TestService" - - name = String - birthdate = DateTime - age = Integer - addresses = Array(Address) - titles = Array(String) - -class Request(ComplexModel): - __namespace__ = "TestService" - - param1 = String - param2 = Integer - -class Response(ComplexModel): - __namespace__ = "TestService" - - param1 = Float - -class TypeNS1(ComplexModel): - __namespace__ = "TestService.NS1" - - s = String - i = Integer - -class TypeNS2(ComplexModel): - __namespace__ = "TestService.NS2" - - d = DateTime - f = Float - -class MultipleNamespaceService(Service): - @rpc(TypeNS1, TypeNS2) - def a(ctx, t1, t2): - return "OK" - -class TestService(Service): - @rpc(String, _returns=String) - def aa(ctx, s): - return s - - @rpc(String, Integer, _returns=DateTime) - def a(ctx, s, i): - return datetime.datetime.now() - - @rpc(Person, String, Address, _returns=Address) - def b(ctx, p, s, a): - return Address() - - @rpc(Person) - def d(ctx, Person): - pass - - @rpc(Person) - def e(ctx, Person): - pass - - @rpc(String, String, String, _returns=String, - _in_variable_names={'_from': 'from', '_self': 'self', - '_import': 'import'}, - _out_variable_name="return") - def f(ctx, _from, _self, _import): - return '1234' - - -class MultipleReturnService(Service): - @rpc(String, _returns=(String, String, String)) - def multi(ctx, s): - return s, 'a', 'b' - - -class TestSingle(unittest.TestCase): - def setUp(self): - self.app = Application([TestService], 'tns', - in_protocol=Soap11(), out_protocol=Soap11()) - self.app.transport = 'null.spyne' - self.srv = TestService() - - wsdl = Wsdl11(self.app.interface) - wsdl.build_interface_document('URL') - self.wsdl_str = wsdl.get_interface_document() - self.wsdl_doc = etree.fromstring(self.wsdl_str) - - def test_portypes(self): - porttype = self.wsdl_doc.find('{http://schemas.xmlsoap.org/wsdl/}portType') - self.assertEqual( - len(self.srv.public_methods), len(porttype.getchildren())) - - def test_override_param_names(self): - for n in [b'self', b'import', b'return', b'from']: - assert n in self.wsdl_str, '"%s" not in self.wsdl_str' - -class TestMultiple(unittest.TestCase): - def setUp(self): - self.app = Application([MultipleReturnService], 'tns', in_protocol=Soap11(), out_protocol=Soap11()) - self.app.transport = 'none' - self.wsdl = Wsdl11(self.app.interface) - self.wsdl.build_interface_document('URL') - - def test_multiple_return(self): - message_class = list(MultipleReturnService.public_methods.values())[0].out_message - message = message_class() - - self.assertEqual(len(message._type_info), 3) - - sent_xml = etree.Element('test') - self.app.out_protocol.to_parent(None, message_class, ('a', 'b', 'c'), - sent_xml, self.app.tns) - sent_xml = sent_xml[0] - - print((etree.tostring(sent_xml, pretty_print=True))) - response_data = self.app.out_protocol.from_element(None, message_class, sent_xml) - - self.assertEqual(len(response_data), 3) - self.assertEqual(response_data[0], 'a') - self.assertEqual(response_data[1], 'b') - self.assertEqual(response_data[2], 'c') - - -class TestSoap11(unittest.TestCase): - def test_simple_message(self): - m = ComplexModel.produce( - namespace=None, - type_name='myMessage', - members={'s': String, 'i': Integer} - ) - m.resolve_namespace(m, 'test') - - m_inst = m(s="a", i=43) - - e = etree.Element('test') - Soap11().to_parent(None, m, m_inst, e, m.get_namespace()) - e=e[0] - - self.assertEqual(e.tag, '{%s}myMessage' % m.get_namespace()) - - self.assertEqual(e.find('{%s}s' % m.get_namespace()).text, 'a') - self.assertEqual(e.find('{%s}i' % m.get_namespace()).text, '43') - - values = Soap11().from_element(None, m, e) - - self.assertEqual('a', values.s) - self.assertEqual(43, values.i) - - def test_href(self): - # the template. Start at pos 0, some servers complain if - # xml tag is not in the first line. - envelope_string = [ -b''' - - - - - - - - - - somemachine - someuser - - - machine2 - user2 - - -'''] - - root, xmlids = _parse_xml_string(envelope_string, - etree.XMLParser(), 'utf8') - header, payload = _from_soap(root, xmlids) - - # quick and dirty test href reconstruction - self.assertEqual(len(payload[0]), 2) - - def test_namespaces(self): - m = ComplexModel.produce( - namespace="some_namespace", - type_name='myMessage', - members={'s': String, 'i': Integer}, - ) - - mi = m() - mi.s = 'a' - - e = etree.Element('test') - Soap11().to_parent(None, m, mi, e, m.get_namespace()) - e=e[0] - - self.assertEqual(e.tag, '{some_namespace}myMessage') - - def test_class_to_parent(self): - m = ComplexModel.produce( - namespace=None, - type_name='myMessage', - members={'p': Person} - ) - - m.resolve_namespace(m, "punk") - - m_inst = m() - m_inst.p = Person() - m_inst.p.name = 'steve-o' - m_inst.p.age = 2 - m_inst.p.addresses = [] - - element=etree.Element('test') - Soap11().to_parent(None, m, m_inst, element, m.get_namespace()) - element=element[0] - - self.assertEqual(element.tag, '{%s}myMessage' % m.get_namespace()) - self.assertEqual(element[0].find('{%s}name' % Person.get_namespace()).text, - 'steve-o') - self.assertEqual(element[0].find('{%s}age' % Person.get_namespace()).text, '2') - self.assertEqual( - len(element[0].find('{%s}addresses' % Person.get_namespace())), 0) - - p1 = Soap11().from_element(None, m, element)[0] - - self.assertEqual(p1.name, m_inst.p.name) - self.assertEqual(p1.age, m_inst.p.age) - self.assertEqual(p1.addresses, []) - - def test_datetime_fixed_format(self): - # Soap should ignore formats - n = datetime.datetime.now(pytz.utc).replace(microsecond=0) - format = "%Y %m %d %H %M %S" - - element = etree.Element('test') - Soap11().to_parent(None, DateTime(dt_format=format), n, - element, 'some_namespace') - assert element[0].text == n.isoformat() - - dt = Soap11().from_element(None, DateTime(dt_format=format), element[0]) - assert n == dt - - def test_date_with_tzoffset(self): - for iso_d in ('2013-04-05', '2013-04-05+02:00', '2013-04-05-02:00', '2013-04-05Z'): - d = Soap11().from_unicode(Date, iso_d) - assert isinstance(d, datetime.date) == True - assert d.year == 2013 - assert d.month == 4 - assert d.day == 5 - - def test_to_parent_nested(self): - m = ComplexModel.produce( - namespace=None, - type_name='myMessage', - members={'p':Person} - ) - - m.resolve_namespace(m, "m") - - p = Person() - p.name = 'steve-o' - p.age = 2 - p.addresses = [] - - for i in range(0, 100): - a = Address() - a.street = '123 happy way' - a.zip = i - a.laditude = '45.22' - a.longitude = '444.234' - p.addresses.append(a) - - m_inst = m(p=p) - - element=etree.Element('test') - Soap11().to_parent(None, m, m_inst, element, m.get_namespace()) - element=element[0] - - self.assertEqual('{%s}myMessage' % m.get_namespace(), element.tag) - - addresses = element[0].find('{%s}addresses' % Person.get_namespace()) - self.assertEqual(100, len(addresses)) - self.assertEqual('0', addresses[0].find('{%s}zip' % - Address.get_namespace()).text) - - def test_fault_deserialization_missing_fault_actor(self): - element = etree.fromstring(b""" - - - - soap:Client - Some String - - - Some_Policy - - - - - """) - - ret = Soap11().from_element(None, Fault, element[0][0]) - assert ret.faultcode == "soap:Client" - - -# TestSoapHeader supporting classes. -# SOAP Header Elements defined by WS-Addressing. - -NAMESPACE_ADDRESSING = 'http://www.w3.org/2005/08/addressing' - -class Action (Unicode): - __type_name__ = "Action" - __namespace__ = NAMESPACE_ADDRESSING - -class MessageID (Unicode): - __type_name__ = "MessageID" - __namespace__ = NAMESPACE_ADDRESSING - -class RelatesTo (Unicode): - __type_name__ = "RelatesTo" - __namespace__ = NAMESPACE_ADDRESSING - -class SOAPServiceWithHeader(Service): - @rpc(Unicode, - _in_header=(Action, - MessageID, - RelatesTo), - _out_variable_name= 'status', - _returns=Unicode - ) - def someRequest(ctx, response): - print (response) - return 'OK' - -class TestSoapHeader(unittest.TestCase): - - def setUp(self): - self.app = Application([SOAPServiceWithHeader], - 'tns', - in_protocol=Soap11(), - out_protocol=Soap11()) - - def test_soap_input_header(self): - server = ServerBase(self.app) - initial_ctx = MethodContext(server, MethodContext.SERVER) - initial_ctx.in_string = [ - b''' - - /SomeAction - SomeMessageID - SomeRelatesToID - - - - OK - - - ''' - ] - - ctx, = server.generate_contexts(initial_ctx, in_string_charset='utf8') - server.get_in_object(ctx) - - self.assertEqual(ctx.in_header[0], '/SomeAction') - self.assertEqual(ctx.in_header[1], 'SomeMessageID') - self.assertEqual(ctx.in_header[2], 'SomeRelatesToID') - - def test_soap_input_header_order(self): - """ - Tests supports for input headers whose elements are provided in - different order than that defined in rpc declaration _in_header parameter. - """ - server = ServerBase(self.app) - initial_ctx = MethodContext(server, MethodContext.SERVER) - initial_ctx.in_string = [ - b''' - - SomeMessageID - SomeRelatesToID - /SomeAction - - - - OK - - - ''' - ] - - ctx, = server.generate_contexts(initial_ctx, in_string_charset='utf8') - server.get_in_object(ctx) - - self.assertEqual(ctx.in_header[0], '/SomeAction') - self.assertEqual(ctx.in_header[1], 'SomeMessageID') - self.assertEqual(ctx.in_header[2], 'SomeRelatesToID') - - - def test_soap_input_header_order_and_missing(self): - """ - Test that header ordering logic also works when an input header - element is missing. Confirm that it returns None for the missing - parameter. - """ - server = ServerBase(self.app) - initial_ctx = MethodContext(server, MethodContext.SERVER) - initial_ctx.in_string = [ - b''' - - SomeMessageID - /SomeAction - - - - OK - - - ''' - ] - - ctx, = server.generate_contexts(initial_ctx, in_string_charset='utf8') - server.get_in_object(ctx) - - self.assertEqual(ctx.in_header[0], '/SomeAction') - self.assertEqual(ctx.in_header[1], 'SomeMessageID') - self.assertEqual(ctx.in_header[2], None) - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/protocol/test_soap12.py b/libs_crutch/contrib/spyne/test/protocol/test_soap12.py deleted file mode 100644 index 30e3b18..0000000 --- a/libs_crutch/contrib/spyne/test/protocol/test_soap12.py +++ /dev/null @@ -1,297 +0,0 @@ -#!/usr/bin/env python - -from __future__ import unicode_literals - -import unittest - -from lxml import etree -from lxml.doctestcompare import LXMLOutputChecker, PARSE_XML - -from spyne import Fault, Unicode, ByteArray -from spyne.application import Application -from spyne.const import xml as ns -from spyne.const.xml import NS_SOAP11_ENV -from spyne.decorator import srpc, rpc -from spyne.interface import Wsdl11 -from spyne.model.complex import ComplexModel -from spyne.model.primitive import Integer, String -from spyne.protocol.soap.mime import _join_attachment -from spyne.protocol.soap.soap12 import Soap12 -from spyne.protocol.xml import XmlDocument -from spyne.server.wsgi import WsgiApplication -from spyne.service import Service -from spyne.test.protocol.test_soap11 import TestService, TestSingle, \ - TestMultiple, MultipleReturnService -from spyne.util.six import BytesIO - - -def start_response(code, headers): - print(code, headers) - - -MTOM_REQUEST = b""" ---uuid:2e53e161-b47f-444a-b594-eb6b72e76997 -Content-Type: application/xop+xml; charset=UTF-8; - type="application/soap+xml"; action="sendDocument"; -Content-Transfer-Encoding: binary -Content-ID: - - - - - EA055406-5881-4F02-A3DC-9A5A7510D018.dat - - - - 26981FCD51C95FA47780400B7A45132F - - - - ---uuid:2e53e161-b47f-444a-b594-eb6b72e76997 -Content-Type: application/octet-stream -Content-Transfer-Encoding: binary -Content-ID: <04dfbca1-54b8-4631-a556-4addea6716ed-223384@cxf.apache.org> - -sample data ---uuid:2e53e161-b47f-444a-b594-eb6b72e76997-- -""" - - -# Service Classes -class DownloadPartFileResult(ComplexModel): - ErrorCode = Integer - ErrorMessage = String - Data = String - - -class TestSingleSoap12(TestSingle): - def setUp(self): - self.app = Application([TestService], 'tns', in_protocol=Soap12(), out_protocol=Soap12()) - self.app.transport = 'null.spyne' - self.srv = TestService() - - wsdl = Wsdl11(self.app.interface) - wsdl.build_interface_document('URL') - self.wsdl_str = wsdl.get_interface_document() - self.wsdl_doc = etree.fromstring(self.wsdl_str) - - -class TestMultipleSoap12(TestMultiple): - def setUp(self): - self.app = Application([MultipleReturnService], 'tns', in_protocol=Soap12(), out_protocol=Soap12()) - self.app.transport = 'none' - self.wsdl = Wsdl11(self.app.interface) - self.wsdl.build_interface_document('URL') - - -class TestSoap12(unittest.TestCase): - - def test_soap12(self): - element = etree.fromstring(b""" - - - - - env:Sender - - st:SomeDomainProblem - - - - - Some_Policy - - - - - """) - - so = Soap12() - ret = so.from_element(None, Fault, element[0][0]) - assert ret.faultcode == "env:Sender.st:SomeDomainProblem" - - def test_fault_generation(self): - class SoapException(Service): - @srpc() - def soap_exception(): - raise Fault( - "Client.Plausible.issue", "A plausible fault", 'http://faultactor.example.com', - detail={'some':'extra info'}) - app = Application([SoapException], 'tns', in_protocol=Soap12(), out_protocol=Soap12()) - - req = b""" - - - - - - """ - - server = WsgiApplication(app) - response = etree.fromstring(b''.join(server({ - 'QUERY_STRING': '', - 'PATH_INFO': '/call', - 'REQUEST_METHOD': 'POST', - 'CONTENT_TYPE': 'text/xml; charset=utf8', - 'wsgi.input': BytesIO(req) - }, start_response, "http://null"))) - - response_str = etree.tostring(response, pretty_print=True) - print(response_str) - - expected = b""" - - - - - A plausible fault - - http://faultactor.example.com - - soap12env:Sender - - Plausible - - issue - - - - - extra info - - - - """ - if not LXMLOutputChecker().check_output(expected, response_str, PARSE_XML): - raise Exception("Got: %s but expected: %s" % (response_str, expected)) - - def test_gen_fault_codes(self): - fault_string = "Server.Plausible.error" - value, faultstrings = Soap12().gen_fault_codes(faultstring=fault_string) - self.assertEqual(value, "%s:Receiver" %(Soap12.soap_env)) - self.assertEqual(faultstrings[0], "Plausible") - self.assertEqual(faultstrings[1], "error") - - fault_string = "UnknownFaultCode.Plausible.error" - with self.assertRaises(TypeError): - value, faultstrings = Soap12().gen_fault_codes(faultstring=fault_string) - - def test_mtom(self): - FILE_NAME = 'EA055406-5881-4F02-A3DC-9A5A7510D018.dat' - TNS = 'http://gib.gov.tr/vedop3/eFatura' - class SomeService(Service): - @rpc(Unicode(sub_name="fileName"), ByteArray(sub_name='binaryData'), - ByteArray(sub_name="hash"), _returns=Unicode) - def documentRequest(ctx, file_name, file_data, data_hash): - assert file_name == FILE_NAME - assert file_data == (b'sample data',) - - return file_name - - app = Application([SomeService], tns=TNS, - in_protocol=Soap12(), out_protocol=Soap12()) - - server = WsgiApplication(app) - response = etree.fromstring(b''.join(server({ - 'QUERY_STRING': '', - 'PATH_INFO': '/call', - 'REQUEST_METHOD': 'POST', - 'CONTENT_TYPE': 'Content-Type: multipart/related; ' - 'type="application/xop+xml"; ' - 'boundary="uuid:2e53e161-b47f-444a-b594-eb6b72e76997"; ' - 'start=""; ' - 'start-info="application/soap+xml"; action="sendDocument"', - 'wsgi.input': BytesIO(MTOM_REQUEST.replace(b"\n", b"\r\n")) - }, start_response, "http://null"))) - - response_str = etree.tostring(response, pretty_print=True) - print(response_str) - - nsdict = dict(tns=TNS) - - assert etree.fromstring(response_str) \ - .xpath(".//tns:documentRequestResult/text()", namespaces=nsdict) \ - == [FILE_NAME] - - def test_mtom_join_envelope_chunks(self): - FILE_NAME = 'EA055406-5881-4F02-A3DC-9A5A7510D018.dat' - TNS = 'http://gib.gov.tr/vedop3/eFatura' - - # large enough payload to be chunked - PAYLOAD = b"sample data " * 1024 - class SomeService(Service): - @rpc(Unicode(sub_name="fileName"), ByteArray(sub_name='binaryData'), - ByteArray(sub_name="hash"), _returns=Unicode) - def documentRequest(ctx, file_name, file_data, data_hash): - assert file_name == FILE_NAME - assert file_data == (PAYLOAD,) - - return file_name - - app = Application([SomeService], tns=TNS, - in_protocol=Soap12(), out_protocol=Soap12()) - - server = WsgiApplication(app, block_length=1024) - response = etree.fromstring(b''.join(server({ - 'QUERY_STRING': '', - 'PATH_INFO': '/call', - 'REQUEST_METHOD': 'POST', - 'CONTENT_TYPE': 'Content-Type: multipart/related; ' - 'type="application/xop+xml"; ' - 'boundary="uuid:2e53e161-b47f-444a-b594-eb6b72e76997"; ' - 'start=""; ' - 'start-info="application/soap+xml"; action="sendDocument"', - 'wsgi.input': BytesIO(MTOM_REQUEST - .replace(b"\n", b"\r\n") - .replace(b"sample data", PAYLOAD)), - }, start_response, "http://null"))) - - response_str = etree.tostring(response, pretty_print=True) - print(response_str) - - nsdict = dict(tns=TNS) - - assert etree.fromstring(response_str) \ - .xpath(".//tns:documentRequestResult/text()", namespaces=nsdict) \ - == [FILE_NAME] - - def test_bytes_join_attachment(self): - href_id = "http://tempuri.org/1/634133419330914808" - payload = "ANJNSLJNDYBC SFDJNIREMX:CMKSAJN" - envelope = ''' - - - - - 0 - - - - - - - - - ''' % href_id - - (joinedmsg, numreplaces) = _join_attachment(NS_SOAP11_ENV, - href_id, envelope, payload) - - soaptree = etree.fromstring(joinedmsg) - - body = soaptree.find(ns.SOAP11_ENV("Body")) - response = body.getchildren()[0] - result = response.getchildren()[0] - r = XmlDocument().from_element(None, DownloadPartFileResult, result) - - self.assertEqual(payload, r.Data) - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/protocol/test_xml.py b/libs_crutch/contrib/spyne/test/protocol/test_xml.py deleted file mode 100644 index 2ae3218..0000000 --- a/libs_crutch/contrib/spyne/test/protocol/test_xml.py +++ /dev/null @@ -1,628 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import print_function - -import logging -logging.basicConfig(level=logging.DEBUG) - -import sys -import unittest -import decimal -import datetime - -from pprint import pprint -from base64 import b64encode - -from lxml import etree -from lxml.builder import E - -from spyne import MethodContext, rpc, ByteArray, File, AnyXml -from spyne.context import FakeContext -from spyne.const import RESULT_SUFFIX -from spyne.service import Service -from spyne.server import ServerBase -from spyne.application import Application -from spyne.decorator import srpc -from spyne.util.six import BytesIO -from spyne.model import Fault, Integer, Decimal, Unicode, Date, DateTime, \ - XmlData, Array, ComplexModel, XmlAttribute, Mandatory as M -from spyne.protocol.xml import XmlDocument, SchemaValidationError - -from spyne.util import six -from spyne.util.xml import get_xml_as_object, get_object_as_xml, \ - get_object_as_xml_polymorphic, get_xml_as_object_polymorphic -from spyne.server.wsgi import WsgiApplication -from spyne.const.xml import NS_XSI - - -class TestXml(unittest.TestCase): - def test_empty_string(self): - class a(ComplexModel): - b = Unicode - - elt = etree.fromstring('') - o = get_xml_as_object(elt, a) - - assert o.b == '' - - def test_xml_data(self): - class C(ComplexModel): - a = XmlData(Unicode) - b = XmlAttribute(Unicode) - - class SomeService(Service): - @srpc(C, _returns=C) - def some_call(c): - assert c.a == 'a' - assert c.b == 'b' - return c - - app = Application([SomeService], "tns", name="test_xml_data", - in_protocol=XmlDocument(), out_protocol=XmlDocument()) - server = ServerBase(app) - initial_ctx = MethodContext(server, MethodContext.SERVER) - initial_ctx.in_string = [ - b'' - b'a' - b'' - ] - - ctx, = server.generate_contexts(initial_ctx) - server.get_in_object(ctx) - server.get_out_object(ctx) - server.get_out_string(ctx) - - print(ctx.out_string) - pprint(app.interface.nsmap) - - ret = etree.fromstring(b''.join(ctx.out_string)).xpath( - '//tns:some_call' + RESULT_SUFFIX, namespaces=app.interface.nsmap)[0] - - print(etree.tostring(ret, pretty_print=True)) - - assert ret.text == "a" - assert ret.attrib['b'] == "b" - - def test_wrapped_array(self): - parent = etree.Element('parent') - val = ['a', 'b'] - cls = Array(Unicode, namespace='tns') - XmlDocument().to_parent(None, cls, val, parent, 'tns') - print(etree.tostring(parent, pretty_print=True)) - xpath = parent.xpath('//x:stringArray/x:string/text()', - namespaces={'x': 'tns'}) - assert xpath == val - - def test_simple_array(self): - class cls(ComplexModel): - __namespace__ = 'tns' - s = Unicode(max_occurs='unbounded') - val = cls(s=['a', 'b']) - - parent = etree.Element('parent') - XmlDocument().to_parent(None, cls, val, parent, 'tns') - print(etree.tostring(parent, pretty_print=True)) - xpath = parent.xpath('//x:cls/x:s/text()', namespaces={'x': 'tns'}) - assert xpath == val.s - - def test_decimal(self): - d = decimal.Decimal('1e100') - - class SomeService(Service): - @srpc(Decimal(120,4), _returns=Decimal) - def some_call(p): - print(p) - print(type(p)) - assert type(p) == decimal.Decimal - assert d == p - return p - - app = Application([SomeService], "tns", in_protocol=XmlDocument(), - out_protocol=XmlDocument()) - server = ServerBase(app) - initial_ctx = MethodContext(server, MethodContext.SERVER) - initial_ctx.in_string = [ - b'

', - str(d).encode('ascii'), - b'

' - ] - - ctx, = server.generate_contexts(initial_ctx) - server.get_in_object(ctx) - server.get_out_object(ctx) - server.get_out_string(ctx) - - elt = etree.fromstring(b''.join(ctx.out_string)) - - print(etree.tostring(elt, pretty_print=True)) - target = elt.xpath('//tns:some_callResult/text()', - namespaces=app.interface.nsmap)[0] - assert target == str(d) - - def test_subs(self): - from lxml import etree - from spyne.util.xml import get_xml_as_object - from spyne.util.xml import get_object_as_xml - - m = { - "s0": "aa", - "s2": "cc", - "s3": "dd", - } - - class C(ComplexModel): - __namespace__ = "aa" - a = Integer - b = Integer(sub_name="bb") - c = Integer(sub_ns="cc") - d = Integer(sub_ns="dd", sub_name="dd") - - elt = get_object_as_xml(C(a=1, b=2, c=3, d=4), C) - print(etree.tostring(elt, pretty_print=True)) - - assert elt.xpath("s0:a/text()", namespaces=m) == ["1"] - assert elt.xpath("s0:bb/text()", namespaces=m) == ["2"] - assert elt.xpath("s2:c/text()", namespaces=m) == ["3"] - assert elt.xpath("s3:dd/text()", namespaces=m) == ["4"] - - c = get_xml_as_object(elt, C) - print(c) - assert c.a == 1 - assert c.b == 2 - assert c.c == 3 - assert c.d == 4 - - def test_sub_attributes(self): - from lxml import etree - from spyne.util.xml import get_xml_as_object - from spyne.util.xml import get_object_as_xml - - m = { - "s0": "aa", - "s2": "cc", - "s3": "dd", - } - - class C(ComplexModel): - __namespace__ = "aa" - a = XmlAttribute(Integer) - b = XmlAttribute(Integer(sub_name="bb")) - c = XmlAttribute(Integer(sub_ns="cc")) - d = XmlAttribute(Integer(sub_ns="dd", sub_name="dd")) - - elt = get_object_as_xml(C(a=1, b=2, c=3, d=4), C) - print(etree.tostring(elt, pretty_print=True)) - - assert elt.xpath("//*/@a") == ["1"] - assert elt.xpath("//*/@bb") == ["2"] - assert elt.xpath("//*/@s2:c", namespaces=m) == ["3"] - assert elt.xpath("//*/@s3:dd", namespaces=m) == ["4"] - - c = get_xml_as_object(elt, C) - print(c) - assert c.a == 1 - assert c.b == 2 - assert c.c == 3 - assert c.d == 4 - - def test_dates(self): - d = Date - xml_dates = [ - etree.fromstring(b'2013-04-05'), - etree.fromstring(b'2013-04-05+02:00'), - etree.fromstring(b'2013-04-05-02:00'), - etree.fromstring(b'2013-04-05Z'), - ] - - for xml_date in xml_dates: - c = get_xml_as_object(xml_date, d) - assert isinstance(c, datetime.date) == True - assert c.year == 2013 - assert c.month == 4 - assert c.day == 5 - - def test_datetime_usec(self): - fs = etree.fromstring - d = get_xml_as_object(fs('2013-04-05T06:07:08.123456'), DateTime) - assert d.microsecond == 123456 - - # rounds up - d = get_xml_as_object(fs('2013-04-05T06:07:08.1234567'), DateTime) - assert d.microsecond == 123457 - - # rounds down - d = get_xml_as_object(fs('2013-04-05T06:07:08.1234564'), DateTime) - assert d.microsecond == 123456 - - # rounds up as well - d = get_xml_as_object(fs('2013-04-05T06:07:08.1234565'), DateTime) - # FIXME: this is very interesting. why? - if not six.PY2: - assert d.microsecond == 123456 - else: - assert d.microsecond == 123457 - - def _get_ctx(self, server, in_string): - initial_ctx = MethodContext(server, MethodContext.SERVER) - initial_ctx.in_string = in_string - ctx, = server.generate_contexts(initial_ctx) - server.get_in_object(ctx) - return ctx - - def test_mandatory_elements(self): - class SomeService(Service): - @srpc(M(Unicode), _returns=Unicode) - def some_call(s): - assert s == 'hello' - return s - - app = Application([SomeService], "tns", name="test_mandatory_elements", - in_protocol=XmlDocument(validator='lxml'), - out_protocol=XmlDocument()) - server = ServerBase(app) - - # Valid call with all mandatory elements in - ctx = self._get_ctx(server, [ - b'' - b'hello' - b'' - ]) - server.get_out_object(ctx) - server.get_out_string(ctx) - ret = etree.fromstring(b''.join(ctx.out_string)).xpath( - '//tns:some_call%s/text()' % RESULT_SUFFIX, - namespaces=app.interface.nsmap)[0] - assert ret == 'hello' - - # Invalid call - ctx = self._get_ctx(server, [ - b'' - # no mandatory elements here... - b'' - ]) - self.assertRaises(SchemaValidationError, server.get_out_object, ctx) - - def test_unicode_chars_in_exception(self): - class SomeService(Service): - @srpc(Unicode(pattern=u'x'), _returns=Unicode) - def some_call(s): - test(should, never, reach, here) - - app = Application([SomeService], "tns", name="test_mandatory_elements", - in_protocol=XmlDocument(validator='lxml'), - out_protocol=XmlDocument()) - server = WsgiApplication(app) - - req = ( - u'' - u'Ğ' - u'' - ).encode('utf8') - - print("AAA") - resp = server({ - 'QUERY_STRING': '', - 'PATH_INFO': '/', - 'REQUEST_METHOD': 'POST', - 'SERVER_NAME': 'localhost', - 'SERVER_PORT': '80', - 'wsgi.input': BytesIO(req), - "wsgi.url_scheme": 'http', - }, lambda x, y: print(x,y)) - print("AAA") - - assert u'Ğ'.encode('utf8') in b''.join(resp) - - def test_mandatory_subelements(self): - class C(ComplexModel): - foo = M(Unicode) - - class SomeService(Service): - @srpc(C.customize(min_occurs=1), _returns=Unicode) - def some_call(c): - assert c is not None - assert c.foo == 'hello' - return c.foo - - app = Application( - [SomeService], "tns", name="test_mandatory_subelements", - in_protocol=XmlDocument(validator='lxml'), - out_protocol=XmlDocument()) - server = ServerBase(app) - - ctx = self._get_ctx(server, [ - b'' - # no mandatory elements at all... - b'' - ]) - self.assertRaises(SchemaValidationError, server.get_out_object, ctx) - - ctx = self._get_ctx(server, [ - b'' - b'' - # no mandatory elements here... - b'' - b'' - ]) - self.assertRaises(SchemaValidationError, server.get_out_object, ctx) - - def test_mandatory_element_attributes(self): - class C(ComplexModel): - bar = XmlAttribute(M(Unicode)) - - class SomeService(Service): - @srpc(C.customize(min_occurs=1), _returns=Unicode) - def some_call(c): - assert c is not None - assert hasattr(c, 'foo') - assert c.foo == 'hello' - return c.foo - - app = Application( - [SomeService], "tns", name="test_mandatory_element_attributes", - in_protocol=XmlDocument(validator='lxml'), - out_protocol=XmlDocument()) - server = ServerBase(app) - - ctx = self._get_ctx(server, [ - b'' - # no mandatory elements at all... - b'' - ]) - self.assertRaises(SchemaValidationError, server.get_out_object, ctx) - - ctx = self._get_ctx(server, [ - b'' - b'' - # no mandatory elements here... - b'' - b'' - ]) - self.assertRaises(SchemaValidationError, server.get_out_object, ctx) - - def test_bare_sub_name_ns(self): - class Action(ComplexModel): - class Attributes(ComplexModel.Attributes): - sub_ns = "SOME_NS" - sub_name = "Action" - data = XmlData(Unicode) - must_understand = XmlAttribute(Unicode) - - elt = get_object_as_xml(Action("x", must_understand="y"), Action) - eltstr = etree.tostring(elt) - print(eltstr) - assert eltstr == b'x' - - def test_null_mandatory_attribute(self): - class Action (ComplexModel): - data = XmlAttribute(M(Unicode)) - - elt = get_object_as_xml(Action(), Action) - eltstr = etree.tostring(elt) - print(eltstr) - assert eltstr == b'' - - def test_bytearray(self): - v = b'aaaa' - elt = get_object_as_xml([v], ByteArray, 'B') - eltstr = etree.tostring(elt) - print(eltstr) - assert elt.text == b64encode(v).decode('ascii') - - def test_any_xml_text(self): - v = u"" - elt = get_object_as_xml(v, AnyXml, 'B', no_namespace=True) - eltstr = etree.tostring(elt) - print(eltstr) - assert etree.tostring(elt[0], encoding="unicode") == v - - def test_any_xml_bytes(self): - v = b"" - - elt = get_object_as_xml(v, AnyXml, 'B', no_namespace=True) - eltstr = etree.tostring(elt) - print(eltstr) - assert etree.tostring(elt[0]) == v - - def test_any_xml_elt(self): - v = E.roots(E.bloody(E.roots())) - elt = get_object_as_xml(v, AnyXml, 'B') - eltstr = etree.tostring(elt) - print(eltstr) - assert etree.tostring(elt[0]) == etree.tostring(v) - - def test_file(self): - v = b'aaaa' - f = BytesIO(v) - elt = get_object_as_xml(File.Value(handle=f), File, 'B') - eltstr = etree.tostring(elt) - print(eltstr) - assert elt.text == b64encode(v).decode('ascii') - - def test_fault_detail_as_dict(self): - elt = get_object_as_xml(Fault(detail={"this": "that"}), Fault) - eltstr = etree.tostring(elt) - print(eltstr) - assert b'that' in eltstr - - def test_xml_encoding(self): - ctx = FakeContext(out_document=E.rain(u"yağmur")) - XmlDocument(encoding='iso-8859-9').create_out_string(ctx) - s = b''.join(ctx.out_string) - assert u"ğ".encode('iso-8859-9') in s - - def test_default(self): - class SomeComplexModel(ComplexModel): - _type_info = [ - ('a', Unicode), - ('b', Unicode(default='default')), - ] - - obj = XmlDocument().from_element( - None, SomeComplexModel, - etree.fromstring(""" - - string - - """) - ) - - # xml schema says it should be None - assert obj.b == 'default' - - obj = XmlDocument().from_element( - None, SomeComplexModel, - etree.fromstring(""" - - string - - - """ % NS_XSI) - ) - - # xml schema says it should be 'default' - assert obj.b == 'default' - - obj = XmlDocument(replace_null_with_default=False).from_element( - None, SomeComplexModel, - etree.fromstring(""" - - string - - - """ % NS_XSI) - ) - - # xml schema says it should be 'default' - assert obj.b is None - - def test_polymorphic_roundtrip(self): - - class B(ComplexModel): - __namespace__ = 'some_ns' - _type_info = { - '_b': Unicode, - } - - def __init__(self): - super(B, self).__init__() - self._b = "b" - - class C(B): - __namespace__ = 'some_ns' - _type_info = { - '_c': Unicode, - } - - def __init__(self): - super(C, self).__init__() - self._c = "c" - - class A(ComplexModel): - __namespace__ = 'some_ns' - _type_info = { - '_a': Unicode, - '_b': B, - } - - def __init__(self, b=None): - super(A, self).__init__() - self._a = 'a' - self._b = b - - a = A(b=C()) - elt = get_object_as_xml_polymorphic(a, A) - xml_string = etree.tostring(elt, pretty_print=True) - if six.PY2: - print(xml_string, end="") - else: - sys.stdout.buffer.write(xml_string) - - element_tree = etree.fromstring(xml_string) - new_a = get_xml_as_object_polymorphic(elt, A) - - assert new_a._a == a._a, (a._a, new_a._a) - assert new_a._b._b == a._b._b, (a._b._b, new_a._b._b) - assert new_a._b._c == a._b._c, (a._b._c, new_a._b._c) - - -class TestIncremental(unittest.TestCase): - def test_one(self): - class SomeComplexModel(ComplexModel): - s = Unicode - i = Integer - - v = SomeComplexModel(s='a', i=1), - - class SomeService(Service): - @rpc(_returns=SomeComplexModel) - def get(ctx): - return v - - desc = SomeService.public_methods['get'] - ctx = FakeContext(out_object=v, descriptor=desc) - ostr = ctx.out_stream = BytesIO() - XmlDocument(Application([SomeService], __name__)) \ - .serialize(ctx, XmlDocument.RESPONSE) - - elt = etree.fromstring(ostr.getvalue()) - print(etree.tostring(elt, pretty_print=True)) - - assert elt.xpath('x:getResult/x:i/text()', - namespaces={'x':__name__}) == ['1'] - assert elt.xpath('x:getResult/x:s/text()', - namespaces={'x':__name__}) == ['a'] - - def test_many(self): - class SomeComplexModel(ComplexModel): - s = Unicode - i = Integer - - v = [ - SomeComplexModel(s='a', i=1), - SomeComplexModel(s='b', i=2), - SomeComplexModel(s='c', i=3), - SomeComplexModel(s='d', i=4), - SomeComplexModel(s='e', i=5), - ] - - class SomeService(Service): - @rpc(_returns=Array(SomeComplexModel)) - def get(ctx): - return v - - desc = SomeService.public_methods['get'] - ctx = FakeContext(out_object=[v], descriptor=desc) - ostr = ctx.out_stream = BytesIO() - XmlDocument(Application([SomeService], __name__)) \ - .serialize(ctx, XmlDocument.RESPONSE) - - elt = etree.fromstring(ostr.getvalue()) - print(etree.tostring(elt, pretty_print=True)) - - assert elt.xpath('x:getResult/x:SomeComplexModel/x:i/text()', - namespaces={'x': __name__}) == ['1', '2', '3', '4', '5'] - assert elt.xpath('x:getResult/x:SomeComplexModel/x:s/text()', - namespaces={'x': __name__}) == ['a', 'b', 'c', 'd', 'e'] - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/protocol/test_yaml.py b/libs_crutch/contrib/spyne/test/protocol/test_yaml.py deleted file mode 100644 index 2cb224c..0000000 --- a/libs_crutch/contrib/spyne/test/protocol/test_yaml.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import unittest - -from spyne.test.protocol._test_dictdoc import TDictDocumentTest -from spyne.protocol.yaml import YamlDocument - -from spyne import MethodContext -from spyne.application import Application -from spyne.decorator import srpc -from spyne.service import Service -from spyne.server import ServerBase - -from spyne.protocol.yaml import yaml -yaml.dumps = yaml.dump -yaml.loads = yaml.load - -TestYamlDocument = TDictDocumentTest(yaml, YamlDocument, YamlDocument().out_kwargs) - - -class Test(unittest.TestCase): - def test_invalid_input(self): - class SomeService(Service): - @srpc() - def yay(): - pass - - app = Application([SomeService], 'tns', - in_protocol=YamlDocument(), - out_protocol=YamlDocument()) - - server = ServerBase(app) - - initial_ctx = MethodContext(server, MethodContext.SERVER) - initial_ctx.in_string = [b'{'] - ctx, = server.generate_contexts(initial_ctx) - assert ctx.in_error.faultcode == 'Client.YamlDecodeError' - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/regen_wsdl.py b/libs_crutch/contrib/spyne/test/regen_wsdl.py deleted file mode 100644 index 2ad5f75..0000000 --- a/libs_crutch/contrib/spyne/test/regen_wsdl.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python - -from lxml import etree -from spyne.test.sort_wsdl import sort_wsdl -from spyne.interface.wsdl import Wsdl11 - -from spyne.test.interop.server._service import services -from spyne.application import Application - -app = Application(services, 'spyne.test.interop.server') -app.transport = 'http://schemas.xmlsoap.org/soap/http' -wsdl = Wsdl11(app.interface) -wsdl.build_interface_document('http://localhost:9754/') -elt = etree.ElementTree(etree.fromstring(wsdl.get_interface_document())) -sort_wsdl(elt) -s = etree.tostring(elt) - -# minidom's serialization seems to put attributes in alphabetic order. -# this is exactly what we want here. -from xml.dom.minidom import parseString -doc = parseString(s) -s = doc.toprettyxml(indent=' ', newl='\n', encoding='utf8') -s = s.replace(" xmlns:","\n xmlns:") - -open('wsdl.xml', 'w').write(s) -print('wsdl.xml written') diff --git a/libs_crutch/contrib/spyne/test/sort_wsdl.py b/libs_crutch/contrib/spyne/test/sort_wsdl.py deleted file mode 100644 index 96a4581..0000000 --- a/libs_crutch/contrib/spyne/test/sort_wsdl.py +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""Quick hack to sort the wsdl. it's helpful when comparing the wsdl output -from two spyne versions. -""" - -ns_wsdl = "http://schemas.xmlsoap.org/wsdl/" -ns_schema = "http://www.w3.org/2001/XMLSchema" - -import sys - -from lxml import etree - - -def cache_order(l, ns): - return dict([ ("{%s}%s" % (ns, a), l.index(a)) for a in l]) - - -wsdl_order = ('types', 'message', 'service', 'portType', 'binding') -wsdl_order = cache_order(wsdl_order, ns_wsdl) - -schema_order = ('import', 'element', 'simpleType', 'complexType', 'attribute') -schema_order = cache_order(schema_order, ns_schema) - -parser = etree.XMLParser(remove_blank_text=True) - - -def main(): - tree = etree.parse(sys.stdin, parser=parser) - sort_wsdl(tree) - tree.write(sys.stdout, encoding="UTF-8", xml_declaration=True) - return 0 - - -def sort_wsdl(tree): - l0 = [] - type_node = None - - for e in tree.getroot(): - if e.tag == "{%s}types" % ns_wsdl: - assert type_node is None - type_node = e - - else: - l0.append(e) - e.getparent().remove(e) - - l0.sort(key=lambda e: (wsdl_order[e.tag], e.attrib['name'])) - for e in l0: - tree.getroot().append(e) - - for e in tree.getroot(): - if e.tag in ("{%s}portType" % ns_wsdl, "{%s}binding" % ns_wsdl, "{%s}operation" % ns_wsdl): - nodes = [] - for p in e.getchildren(): - nodes.append(p) - - nodes.sort(key=lambda e: e.attrib.get('name', '0')) - - for p in nodes: - e.append(p) - - schemas = [] - - for e in type_node: - schemas.append(e) - e.getparent().remove(e) - - schemas.sort(key=lambda e: e.attrib["targetNamespace"]) - - for s in schemas: - type_node.append(s) - - for s in schemas: - nodes = [] - for e in s: - nodes.append(e) - e.getparent().remove(e) - - nodes.sort(key=lambda e: (schema_order[e.tag], e.attrib.get('name', '\0'))) - - for e in nodes: - s.append(e) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/libs_crutch/contrib/spyne/test/test_null_server.py b/libs_crutch/contrib/spyne/test/test_null_server.py deleted file mode 100644 index 26307fb..0000000 --- a/libs_crutch/contrib/spyne/test/test_null_server.py +++ /dev/null @@ -1,134 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import gc -import unittest - -from lxml import etree - -from spyne import const -from spyne.interface.wsdl import Wsdl11 -from spyne.protocol.xml import XmlDocument - -from spyne.model.complex import Array -from spyne.model.primitive import Boolean -from spyne.model.primitive import String -from spyne.application import Application -from spyne.decorator import srpc -from spyne.service import Service -from spyne.server.null import NullServer - -class TestNullServer(unittest.TestCase): - def test_call_one_arg(self): - queue = set() - - class MessageService(Service): - @srpc(String) - def send_message(s): - queue.add(s) - - application = Application([MessageService], 'some_tns', - in_protocol=XmlDocument(), out_protocol=XmlDocument()) - - server = NullServer(application) - server.service.send_message("zabaaa") - - assert set(["zabaaa"]) == queue - - def test_call_two_args(self): - queue = set() - - class MessageService(Service): - @srpc(String, String) - def send_message(s, k): - queue.add((s,k)) - - application = Application([MessageService], 'some_tns', - in_protocol=XmlDocument(), out_protocol=XmlDocument()) - - server = NullServer(application) - - queue.clear() - server.service.send_message("zabaaa", k="hobaa") - assert set([("zabaaa","hobaa")]) == queue - - queue.clear() - server.service.send_message(k="hobaa") - assert set([(None,"hobaa")]) == queue - - queue.clear() - server.service.send_message("zobaaa", s="hobaa") - assert set([("hobaa", None)]) == queue - - def test_ostr(self): - queue = set() - - class MessageService(Service): - @srpc(String, String, _returns=Array(String)) - def send_message(s, k): - queue.add((s, k)) - return [s, k] - - application = Application([MessageService], 'some_tns', - in_protocol=XmlDocument(), out_protocol=XmlDocument()) - - ostr_server = NullServer(application, ostr=True) - - queue.clear() - ret = ostr_server.service.send_message("zabaaa", k="hobaa") - assert set([("zabaaa","hobaa")]) == queue - assert etree.fromstring(b''.join(ret)).xpath('//tns:string/text()', - namespaces=application.interface.nsmap) == ['zabaaa', 'hobaa'] - - queue.clear() - ostr_server.service.send_message(k="hobaa") - assert set([(None,"hobaa")]) == queue - - queue.clear() - ostr_server.service.send_message("zobaaa", s="hobaa") - assert set([("hobaa", None)]) == queue - - def test_no_gc_collect(self): - class PingService(Service): - @srpc(_returns=Boolean) - def ping(): - return True - - application = Application( - [PingService], 'some_tns', - in_protocol=XmlDocument(), out_protocol=XmlDocument()) - - server = NullServer(application) - origin_collect = gc.collect - origin_MIN_GC_INTERVAL = const.MIN_GC_INTERVAL - try: - gc.collect = lambda : 1/0 - with self.assertRaises(ZeroDivisionError): - const.MIN_GC_INTERVAL = 0 - server.service.ping() - # No raise - const.MIN_GC_INTERVAL = float('inf') - server.service.ping() - finally: - gc.collect = origin_collect - const.MIN_GC_INTERVAL = origin_MIN_GC_INTERVAL - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/test_service.py b/libs_crutch/contrib/spyne/test/test_service.py deleted file mode 100644 index b8d8acb..0000000 --- a/libs_crutch/contrib/spyne/test/test_service.py +++ /dev/null @@ -1,511 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -# -# Most of the service tests are performed through the interop tests. -# - -import logging -logging.basicConfig(level=logging.DEBUG) - -import unittest - -from spyne.util.six import BytesIO - -from lxml import etree - -from spyne import LogicError -from spyne.const import RESPONSE_SUFFIX -from spyne.model.primitive import NATIVE_MAP - -from spyne.service import Service -from spyne.decorator import rpc, srpc -from spyne.application import Application -from spyne.auxproc.sync import SyncAuxProc -from spyne.auxproc.thread import ThreadAuxProc -from spyne.protocol.http import HttpRpc -from spyne.protocol.soap import Soap11 -from spyne.server.null import NullServer -from spyne.server.wsgi import WsgiApplication -from spyne.model import Array, SelfReference, Iterable, ComplexModel, String, \ - Unicode - -Application.transport = 'test' - - -def start_response(code, headers): - print(code, headers) - - -class MultipleMethods1(Service): - @srpc(String) - def multi(s): - return "%r multi 1" % s - - -class MultipleMethods2(Service): - @srpc(String) - def multi(s): - return "%r multi 2" % s - - -class TestEvents(unittest.TestCase): - def test_method_exception(self): - from spyne.protocol.xml import XmlDocument - - h = [0] - - def on_method_exception_object(ctx): - assert ctx.out_error is not None - from spyne.protocol.xml import SchemaValidationError - assert isinstance(ctx.out_error, SchemaValidationError) - logging.error("method_exception_object: %r", repr(ctx.out_error)) - h[0] += 1 - - def on_method_exception_document(ctx): - assert ctx.out_error is not None - from spyne.protocol.xml import SchemaValidationError - assert isinstance(ctx.out_error, SchemaValidationError) - logging.error("method_exception_document: %r", - etree.tostring(ctx.out_document)) - h[0] += 1 - - class SomeService(Service): - @rpc(Unicode(5)) - def some_call(ctx, some_str): - print(some_str) - - app = Application([SomeService], "some_tns", - in_protocol=XmlDocument(validator='lxml'), out_protocol=Soap11()) - - app.event_manager.add_listener( - "method_exception_object", on_method_exception_object) - - app.event_manager.add_listener( - "method_exception_document", on_method_exception_document) - - # this shouldn't be called because: - # 1. document isn't validated - # 2. hence; document can't be parsed - # 3. hence; document can't be mapped to a function - # 4. hence; document can't be mapped to a service class - # 5. hence; no handlers from the service class is invoked. - # 6. hence; the h[0] == 2 check (instead of 3) - SomeService.event_manager.add_listener( - "method_exception_object", on_method_exception_object) - - wsgi_app = WsgiApplication(app) - - xml_request = b""" - - 123456 - - """ - - _ = b''.join(wsgi_app({ - 'PATH_INFO': '/', - 'SERVER_NAME': 'localhost', - 'SERVER_PORT': '7000', - 'REQUEST_METHOD': 'POST', - 'wsgi.url_scheme': 'http', - 'wsgi.input': BytesIO(xml_request), - }, start_response)) - - assert h[0] == 2 - - -class TestMultipleMethods(unittest.TestCase): - def test_single_method(self): - try: - Application([MultipleMethods1, MultipleMethods2], 'tns', - in_protocol=Soap11(), out_protocol=Soap11()) - - except ValueError: - pass - - else: - raise Exception('must fail.') - - def test_simple_aux_nullserver(self): - data = [] - - class SomeService(Service): - @srpc(String) - def call(s): - data.append(s) - - class AuxService(Service): - __aux__ = SyncAuxProc() - - @srpc(String) - def call(s): - data.append(s) - - app = Application([SomeService, AuxService], 'tns', 'name', Soap11(), - Soap11()) - server = NullServer(app) - server.service.call("hey") - - assert data == ['hey', 'hey'] - - def test_namespace_in_message_name(self): - class S(Service): - @srpc(String, _in_message_name='{tns}inMessageName') - def call(s): - pass - - app = Application([S], 'tns', 'name', Soap11(), Soap11()) - - def test_simple_aux_wsgi(self): - data = [] - - class SomeService(Service): - @srpc(String, _returns=String) - def call(s): - data.append(s) - - class AuxService(Service): - __aux__ = SyncAuxProc() - - @srpc(String, _returns=String) - def call(s): - data.append(s) - - app = Application([SomeService, AuxService], 'tns', - in_protocol=HttpRpc(), out_protocol=HttpRpc()) - - server = WsgiApplication(app) - server({ - 'QUERY_STRING': 's=hey', - 'PATH_INFO': '/call', - 'REQUEST_METHOD': 'POST', - 'CONTENT_TYPE': 'text/xml; charset=utf8', - 'SERVER_NAME': 'localhost', - 'wsgi.input': BytesIO(), - }, start_response, "http://null") - - assert data == ['hey', 'hey'] - - def test_thread_aux_wsgi(self): - import logging - logging.basicConfig(level=logging.DEBUG) - - data = set() - - class SomeService(Service): - @srpc(String, _returns=String) - def call(s): - data.add(s) - - class AuxService(Service): - __aux__ = ThreadAuxProc() - - @srpc(String, _returns=String) - def call(s): - data.add(s + "aux") - - app = Application([SomeService, AuxService], 'tns', - in_protocol=HttpRpc(), out_protocol=HttpRpc()) - server = WsgiApplication(app) - server({ - 'QUERY_STRING': 's=hey', - 'PATH_INFO': '/call', - 'REQUEST_METHOD': 'POST', - 'CONTENT_TYPE': 'text/xml; charset=utf8', - 'SERVER_NAME': 'localhost', - 'wsgi.input': BytesIO(), - }, start_response, "http://null") - - import time - time.sleep(1) - - assert data == set(['hey', 'heyaux']) - - def test_mixing_primary_and_aux_methods(self): - try: - class SomeService(Service): - @srpc(String, _returns=String, _aux=ThreadAuxProc()) - def call(s): - pass - - @srpc(String, _returns=String) - def mall(s): - pass - except Exception: - pass - else: - raise Exception("must fail with 'Exception: you can't mix aux and " - "non-aux methods in a single service definition.'") - - def __run_service(self, service): - app = Application([service], 'tns', in_protocol=HttpRpc(), - out_protocol=Soap11()) - server = WsgiApplication(app) - - return_string = b''.join(server({ - 'QUERY_STRING': '', - 'PATH_INFO': '/some_call', - 'REQUEST_METHOD': 'POST', - 'CONTENT_TYPE': 'text/xml; charset=utf8', - 'SERVER_NAME': 'localhost', - 'wsgi.input': BytesIO(b""), - }, start_response, "http://null")) - - elt = etree.fromstring(return_string) - print(etree.tostring(elt, pretty_print=True)) - - return elt, app.interface.nsmap - - def test_settings_headers_from_user_code(self): - class RespHeader(ComplexModel): - __namespace__ = 'tns' - Elem1 = String - - # test header in service definition - class SomeService(Service): - __out_header__ = RespHeader - - @rpc() - def some_call(ctx): - ctx.out_header = RespHeader() - ctx.out_header.Elem1 = 'Test1' - - elt, nsmap = self.__run_service(SomeService) - query = '/soap11env:Envelope/soap11env:Header/tns:RespHeader' \ - '/tns:Elem1/text()' - - assert elt.xpath(query, namespaces=nsmap)[0] == 'Test1' - - # test header in decorator - class SomeService(Service): - @rpc(_out_header=RespHeader) - def some_call(ctx): - ctx.out_header = RespHeader() - ctx.out_header.Elem1 = 'Test1' - - elt, nsmap = self.__run_service(SomeService) - query = '/soap11env:Envelope/soap11env:Header/tns:RespHeader/tns' \ - ':Elem1/text()' - assert elt.xpath(query, namespaces=nsmap)[0] == 'Test1' - - # test no header - class SomeService(Service): - @rpc() - def some_call(ctx): - ctx.out_header = RespHeader() - ctx.out_header.Elem1 = 'Test1' - - elt, nsmap = self.__run_service(SomeService) - query = '/soap11env:Envelope/soap11env:Header/tns:RespHeader' \ - '/tns:Elem1/text()' - assert len(elt.xpath(query, namespaces=nsmap)) == 0 - - -class TestNativeTypes(unittest.TestCase): - def test_native_types(self): - for t in NATIVE_MAP: - class SomeService(Service): - @rpc(t) - def some_call(ctx, arg): - pass - - nt, = SomeService.public_methods['some_call'].in_message \ - ._type_info.values() - - assert issubclass(nt, NATIVE_MAP[t]) - - def test_native_types_in_arrays(self): - for t in NATIVE_MAP: - class SomeService(Service): - @rpc(Array(t)) - def some_call(ctx, arg): - pass - - nt, = SomeService.public_methods['some_call'].in_message \ - ._type_info.values() - nt, = nt._type_info.values() - assert issubclass(nt, NATIVE_MAP[t]) - - -class TestBodyStyle(unittest.TestCase): - - def test_soap_bare_empty_output(self): - class SomeService(Service): - @rpc(String, _body_style='bare') - def some_call(ctx, s): - assert s == 'abc' - - app = Application([SomeService], 'tns', in_protocol=Soap11(), - out_protocol=Soap11(cleanup_namespaces=True)) - - req = b""" - - - abc - - - """ - - server = WsgiApplication(app) - resp = etree.fromstring(b''.join(server({ - 'QUERY_STRING': '', - 'PATH_INFO': '/call', - 'REQUEST_METHOD': 'POST', - 'CONTENT_TYPE': 'text/xml; charset=utf8', - 'SERVER_NAME': 'localhost', - 'wsgi.input': BytesIO(req), - }, start_response, "http://null"))) - - print(etree.tostring(resp, pretty_print=True)) - - assert resp[0].tag == '{http://schemas.xmlsoap.org/soap/envelope/}Body' - assert len(resp[0]) == 1 - assert resp[0][0].tag == '{tns}some_call' + RESPONSE_SUFFIX - assert len(resp[0][0]) == 0 - - def test_soap_bare_empty_input(self): - class SomeService(Service): - - @rpc(_body_style='bare', _returns=String) - def some_call(ctx): - return 'abc' - - app = Application([SomeService], 'tns', in_protocol=Soap11(), - out_protocol=Soap11(cleanup_namespaces=True)) - - req = b""" - - - - - - """ - - server = WsgiApplication(app) - resp = etree.fromstring(b''.join(server({ - 'QUERY_STRING': '', - 'PATH_INFO': '/call', - 'REQUEST_METHOD': 'POST', - 'CONTENT_TYPE': 'text/xml; charset=utf8', - 'SERVER_NAME': 'localhost', - 'wsgi.input': BytesIO(req) - }, start_response, "http://null"))) - - print(etree.tostring(resp, pretty_print=True)) - - assert resp[0].tag == '{http://schemas.xmlsoap.org/soap/envelope/}Body' - assert resp[0][0].tag == '{tns}some_call' + RESPONSE_SUFFIX - assert resp[0][0].text == 'abc' - - def test_soap_bare_empty_model_input_method_name(self): - class EmptyRequest(ComplexModel): - pass - - try: - class SomeService(Service): - @rpc(EmptyRequest, _body_style='bare', _returns=String) - def some_call(ctx, request): - return 'abc' - except Exception: - pass - else: - raise Exception("Must fail with exception: body_style='bare' does " - "not allow empty model as param") - - def test_implicit_class_conflict(self): - class someCallResponse(ComplexModel): - __namespace__ = 'tns' - s = String - - class SomeService(Service): - @rpc(someCallResponse, _returns=String) - def someCall(ctx, x): - return ['abc', 'def'] - - try: - Application([SomeService], 'tns', in_protocol=Soap11(), - out_protocol=Soap11(cleanup_namespaces=True)) - except ValueError as e: - print(e) - else: - raise Exception("must fail.") - - def test_soap_bare_wrapped_array_output(self): - class SomeService(Service): - @rpc(_body_style='bare', _returns=Array(String)) - def some_call(ctx): - return ['abc', 'def'] - - app = Application([SomeService], 'tns', in_protocol=Soap11(), - out_protocol=Soap11(cleanup_namespaces=True)) - - req = b""" - - - - - - """ - - server = WsgiApplication(app) - resp = etree.fromstring(b''.join(server({ - 'QUERY_STRING': '', - 'PATH_INFO': '/call', - 'REQUEST_METHOD': 'POST', - 'CONTENT_TYPE': 'text/xml; charset=utf8', - 'wsgi.input': BytesIO(req) - }, start_response, "http://null"))) - - print(etree.tostring(resp, pretty_print=True)) - - assert resp[0].tag == '{http://schemas.xmlsoap.org/soap/envelope/}Body' - assert resp[0][0].tag == '{tns}some_call' + RESPONSE_SUFFIX - assert resp[0][0][0].text == 'abc' - assert resp[0][0][1].text == 'def' - - def test_array_iterable(self): - class SomeService(Service): - @rpc(Array(Unicode), Iterable(Unicode)) - def some_call(ctx, a, b): - pass - - app = Application([SomeService], 'tns', in_protocol=Soap11(), - out_protocol=Soap11(cleanup_namespaces=True)) - - server = WsgiApplication(app) - - def test_invalid_self_reference(self): - try: - class SomeService(Service): - @rpc(_returns=SelfReference) - def method(ctx): - pass - except LogicError: - pass - else: - raise Exception("Must fail with: " - "'SelfReference can't be used inside @rpc and its ilk'") - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/test_soft_validation.py b/libs_crutch/contrib/spyne/test/test_soft_validation.py deleted file mode 100644 index 2177bd7..0000000 --- a/libs_crutch/contrib/spyne/test/test_soft_validation.py +++ /dev/null @@ -1,179 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -# -# Most of the service tests are performed through the interop tests. -# - -import unittest - -from spyne.application import Application -from spyne.decorator import srpc -from spyne.error import ValidationError -from spyne.service import Service -from spyne.protocol.http import HttpRpc -from spyne.protocol.soap import Soap11 -from spyne.model.primitive import Integer -from spyne.model.primitive import String -from spyne.server import ServerBase -from spyne.server.wsgi import WsgiApplication - -from spyne import MethodContext -from spyne.server.wsgi import WsgiMethodContext - -Application.transport = 'test' - - -class TestValidationString(unittest.TestCase): - def test_min_len(self): - StrictType = String(min_len=3) - - self.assertEqual(StrictType.validate_string(StrictType, 'aaa'), True) - self.assertEqual(StrictType.validate_string(StrictType, 'a'), False) - - def test_max_len(self): - StrictType = String(max_len=3) - - self.assertEqual(StrictType.validate_string(StrictType, 'a'), True) - self.assertEqual(StrictType.validate_string(StrictType, 'aaa'), True) - self.assertEqual(StrictType.validate_string(StrictType, 'aaaa'), False) - - def test_pattern(self): - # Pattern match needs to be checked after the string is decoded, that's - # why we need to use validate_native here. - StrictType = String(pattern='[a-z]') - - self.assertEqual(StrictType.validate_native(StrictType, 'a'), True) - self.assertEqual(StrictType.validate_native(StrictType, 'a1'), False) - self.assertEqual(StrictType.validate_native(StrictType, '1'), False) - - -class TestValidationInteger(unittest.TestCase): - def test_lt(self): - StrictType = Integer(lt=3) - - self.assertEqual(StrictType.validate_native(StrictType, 2), True) - self.assertEqual(StrictType.validate_native(StrictType, 3), False) - - def test_le(self): - StrictType = Integer(le=3) - - self.assertEqual(StrictType.validate_native(StrictType, 2), True) - self.assertEqual(StrictType.validate_native(StrictType, 3), True) - self.assertEqual(StrictType.validate_native(StrictType, 4), False) - - def test_gt(self): - StrictType = Integer(gt=3) - - self.assertEqual(StrictType.validate_native(StrictType, 4), True) - self.assertEqual(StrictType.validate_native(StrictType, 3), False) - - def test_ge(self): - StrictType = Integer(ge=3) - - self.assertEqual(StrictType.validate_native(StrictType, 3), True) - self.assertEqual(StrictType.validate_native(StrictType, 2), False) - -class TestHttpRpcSoftValidation(unittest.TestCase): - def setUp(self): - class SomeService(Service): - @srpc(String(pattern='a')) - def some_method(s): - pass - @srpc(String(pattern='a', max_occurs=2)) - def some_other_method(s): - pass - - self.application = Application([SomeService], - in_protocol=HttpRpc(validator='soft'), - out_protocol=Soap11(), - name='Service', tns='tns', - ) - - - def __get_ctx(self, mn, qs): - server = WsgiApplication(self.application) - ctx = WsgiMethodContext(server, { - 'QUERY_STRING': qs, - 'PATH_INFO': '/%s' % mn, - 'REQUEST_METHOD': "GET", - 'SERVER_NAME': 'localhost', - }, 'some-content-type') - - ctx, = server.generate_contexts(ctx) - server.get_in_object(ctx) - - return ctx - - def test_http_rpc(self): - ctx = self.__get_ctx('some_method', 's=1') - self.assertEqual(ctx.in_error.faultcode, 'Client.ValidationError') - - ctx = self.__get_ctx('some_method', 's=a') - self.assertEqual(ctx.in_error, None) - - ctx = self.__get_ctx('some_other_method', 's=1') - self.assertEqual(ctx.in_error.faultcode, 'Client.ValidationError') - ctx = self.__get_ctx('some_other_method', 's=1&s=2') - self.assertEqual(ctx.in_error.faultcode, 'Client.ValidationError') - ctx = self.__get_ctx('some_other_method', 's=1&s=2&s=3') - self.assertEqual(ctx.in_error.faultcode, 'Client.ValidationError') - ctx = self.__get_ctx('some_other_method', 's=a&s=a&s=a') - self.assertEqual(ctx.in_error.faultcode, 'Client.ValidationError') - - ctx = self.__get_ctx('some_other_method', 's=a&s=a') - self.assertEqual(ctx.in_error, None) - ctx = self.__get_ctx('some_other_method', 's=a') - self.assertEqual(ctx.in_error, None) - ctx = self.__get_ctx('some_other_method', '') - self.assertEqual(ctx.in_error, None) - -class TestSoap11SoftValidation(unittest.TestCase): - def test_basic(self): - class SomeService(Service): - @srpc(String(pattern='a')) - def some_method(s): - pass - - application = Application([SomeService], - in_protocol=Soap11(validator='soft'), - out_protocol=Soap11(), - name='Service', tns='tns', - ) - server = ServerBase(application) - - ctx = MethodContext(server, MethodContext.SERVER) - ctx.in_string = [u""" - - - - OK - - - - """] - - ctx, = server.generate_contexts(ctx) - server.get_in_object(ctx) - - self.assertEqual(isinstance(ctx.in_error, ValidationError), True) - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/test_sqlalchemy.py b/libs_crutch/contrib/spyne/test/test_sqlalchemy.py deleted file mode 100644 index 9d7af0d..0000000 --- a/libs_crutch/contrib/spyne/test/test_sqlalchemy.py +++ /dev/null @@ -1,1302 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logging.basicConfig(level=logging.DEBUG) - -import inspect -import unittest -import sqlalchemy - -from pprint import pprint - -from sqlalchemy import create_engine -from sqlalchemy import MetaData -from sqlalchemy import Column -from sqlalchemy import Table -from sqlalchemy.exc import IntegrityError - -from sqlalchemy.orm import mapper -from sqlalchemy.orm import sessionmaker - -from spyne import M, Any, Double - -from spyne.model import XmlAttribute, File, XmlData, ComplexModel, Array, \ - Integer32, Unicode, Integer, Enum, TTableModel, DateTime, Boolean - -from spyne.model.binary import HybridFileStore -from spyne.model.complex import xml -from spyne.model.complex import table - -from spyne.store.relational import get_pk_columns -from spyne.store.relational.document import PGJsonB, PGJson, PGFileJson, \ - PGObjectJson - -TableModel = TTableModel() - - -class TestSqlAlchemyTypeMappings(unittest.TestCase): - def test_init(self): - fn = inspect.stack()[0][3] - from sqlalchemy.inspection import inspect as sqla_inspect - - class SomeClass1(TableModel): - __tablename__ = "%s_%d" % (fn, 1) - i = Integer32(pk=True) - e = Unicode(32) - - from spyne.util.dictdoc import get_dict_as_object - inst = get_dict_as_object(dict(i=4), SomeClass1) - assert not sqla_inspect(inst).attrs.e.history.has_changes() - - def test_bool(self): - fn = inspect.stack()[0][3] - - class SomeClass1(TableModel): - __tablename__ = "%s_%d" % (fn, 1) - i = Integer32(pk=True) - b = Boolean - - assert isinstance(SomeClass1.Attributes.sqla_table.c.b.type, - sqlalchemy.Boolean) - - class SomeClass2(TableModel): - __tablename__ = "%s_%d" % (fn, 2) - i = Integer32(pk=True) - b = Boolean(store_as=int) - - assert isinstance(SomeClass2.Attributes.sqla_table.c.b.type, - sqlalchemy.SmallInteger) - - def test_jsonb(self): - fn = inspect.stack()[0][3] - - class SomeClass1(TableModel): - __tablename__ = "%s_%d" % (fn, 1) - i = Integer32(pk=True) - a = Any(store_as='json') - - assert isinstance(SomeClass1.Attributes.sqla_table.c.a.type, PGJson) - - class SomeClass2(TableModel): - __tablename__ = "%s_%d" % (fn, 2) - i = Integer32(pk=True) - a = Any(store_as='jsonb') - - assert isinstance(SomeClass2.Attributes.sqla_table.c.a.type, PGJsonB) - - class SomeClass3(TableModel): - __tablename__ = "%s_%d" % (fn, 3) - i = Integer32(pk=True) - a = File(store_as=HybridFileStore("path", db_format='jsonb')) - - assert isinstance(SomeClass3.Attributes.sqla_table.c.a.type, PGFileJson) - assert SomeClass3.Attributes.sqla_table.c.a.type.dbt == 'jsonb' - - def test_obj_json(self): - fn = inspect.stack()[0][3] - - class SomeClass(ComplexModel): - s = Unicode - d = Double - - class SomeClass1(TableModel): - __tablename__ = "%s_%d" % (fn, 1) - _type_info = [ - ('i', Integer32(pk=True)), - ('a', Array(SomeClass, store_as='json')), - ] - - assert isinstance(SomeClass1.Attributes.sqla_table.c.a.type, - PGObjectJson) - - class SomeClass2(TableModel): - __tablename__ = "%s_%d" % (fn, 2) - i = Integer32(pk=True) - a = SomeClass.customize(store_as='json') - - assert isinstance(SomeClass2.Attributes.sqla_table.c.a.type, - PGObjectJson) - - -class TestSqlAlchemySchema(unittest.TestCase): - def setUp(self): - logging.getLogger('sqlalchemy').setLevel(logging.DEBUG) - - self.engine = create_engine('sqlite:///:memory:') - self.session = sessionmaker(bind=self.engine)() - self.metadata = TableModel.Attributes.sqla_metadata = MetaData() - self.metadata.bind = self.engine - logging.info('Testing against sqlalchemy-%s', sqlalchemy.__version__) - - def test_obj_json_dirty(self): - fn = inspect.stack()[0][3] - - class SomeClass(ComplexModel): - s = Unicode - d = Double - - class SomeClass1(TableModel): - __tablename__ = "%s_%d" % (fn, 1) - _type_info = [ - ('i', Integer32(pk=True)), - ('a', SomeClass.store_as('jsonb')), - ] - - self.metadata.create_all() - - sc1 = SomeClass1(i=5, a=SomeClass(s="s", d=42.0)) - self.session.add(sc1) - self.session.commit() - - from sqlalchemy.orm.attributes import flag_modified - - # TODO: maybe do the flag_modified() on setitem? - sc1.a.s = "ss" - flag_modified(sc1, 'a') - - assert sc1 in self.session.dirty - - self.session.commit() - assert sc1.a.s == "ss" - - # not implemented - #sc1.a[0].s = "sss" - #flag_modified(sc1.a[0], 's') - #assert sc1.a[0] in self.session.dirty - - def test_schema(self): - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True, autoincrement=False) - s = Unicode(64, unique=True) - i = Integer32(64, index=True) - - t = SomeClass.__table__ - self.metadata.create_all() # not needed, just nice to see. - - assert t.c.id.primary_key == True - assert t.c.id.autoincrement == False - indexes = list(t.indexes) - indexes.sort(key=lambda idx: idx.name) - for idx in indexes: - assert 'i' in idx.columns or 's' in idx.columns - if 's' in idx.columns: - assert idx.unique - - def test_colname_simple(self): - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True, autoincrement=False) - s = Unicode(64, sqla_column_args=dict(name='ss')) - - t = SomeClass.__table__ - self.metadata.create_all() # not needed, just nice to see. - - assert 'ss' in t.c - - def test_colname_complex_table(self): - class SomeOtherClass(TableModel): - __tablename__ = 'some_other_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - s = Unicode(64) - - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = ( - {"sqlite_autoincrement": True}, - ) - - id = Integer32(primary_key=True) - o = SomeOtherClass.customize(store_as='table', - sqla_column_args=dict(name='oo')) - - t = SomeClass.__table__ - self.metadata.create_all() # not needed, just nice to see. - - assert 'oo_id' in t.c - - def test_colname_complex_json(self): - class SomeOtherClass(TableModel): - __tablename__ = 'some_other_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - s = Unicode(64) - - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = ( - {"sqlite_autoincrement": True}, - ) - - id = Integer32(primary_key=True) - o = SomeOtherClass.customize(store_as='json', - sqla_column_args=dict(name='oo')) - - t = SomeClass.__table__ - self.metadata.create_all() # not needed, just nice to see. - - assert 'oo' in t.c - - def test_nested_sql(self): - class SomeOtherClass(TableModel): - __tablename__ = 'some_other_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - s = Unicode(64) - - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = ( - {"sqlite_autoincrement": True}, - ) - - id = Integer32(primary_key=True) - o = SomeOtherClass.customize(store_as='table') - - self.metadata.create_all() - - soc = SomeOtherClass(s='ehe') - sc = SomeClass(o=soc) - - self.session.add(sc) - self.session.commit() - self.session.close() - - sc_db = self.session.query(SomeClass).get(1) - print(sc_db) - assert sc_db.o.s == 'ehe' - assert sc_db.o_id == 1 - - sc_db.o = None - self.session.commit() - self.session.close() - - sc_db = self.session.query(SomeClass).get(1) - assert sc_db.o == None - assert sc_db.o_id == None - - def test_nested_sql_array_as_table(self): - class SomeOtherClass(TableModel): - __tablename__ = 'some_other_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - s = Unicode(64) - - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - others = Array(SomeOtherClass, store_as='table') - - self.metadata.create_all() - - soc1 = SomeOtherClass(s='ehe1') - soc2 = SomeOtherClass(s='ehe2') - sc = SomeClass(others=[soc1, soc2]) - - self.session.add(sc) - self.session.commit() - self.session.close() - - sc_db = self.session.query(SomeClass).get(1) - - assert sc_db.others[0].s == 'ehe1' - assert sc_db.others[1].s == 'ehe2' - - self.session.close() - - def test_nested_sql_array_as_multi_table(self): - class SomeOtherClass(TableModel): - __tablename__ = 'some_other_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - s = Unicode(64) - - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - others = Array(SomeOtherClass, store_as=table(multi=True)) - - self.metadata.create_all() - - soc1 = SomeOtherClass(s='ehe1') - soc2 = SomeOtherClass(s='ehe2') - sc = SomeClass(others=[soc1, soc2]) - - self.session.add(sc) - self.session.commit() - self.session.close() - - sc_db = self.session.query(SomeClass).get(1) - - assert sc_db.others[0].s == 'ehe1' - assert sc_db.others[1].s == 'ehe2' - - self.session.close() - - def test_nested_sql_array_as_multi_table_with_backref(self): - class SomeOtherClass(TableModel): - __tablename__ = 'some_other_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - s = Unicode(64) - - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - others = Array(SomeOtherClass, - store_as=table(multi=True, backref='some_classes')) - - self.metadata.create_all() - - soc1 = SomeOtherClass(s='ehe1') - soc2 = SomeOtherClass(s='ehe2') - sc = SomeClass(others=[soc1, soc2]) - - self.session.add(sc) - self.session.commit() - self.session.close() - - soc_db = self.session.query(SomeOtherClass).all() - - assert soc_db[0].some_classes[0].id == 1 - assert soc_db[1].some_classes[0].id == 1 - - self.session.close() - - def test_nested_sql_array_as_xml(self): - class SomeOtherClass(ComplexModel): - id = Integer32 - s = Unicode(64) - - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - others = Array(SomeOtherClass, store_as='xml') - - self.metadata.create_all() - - soc1 = SomeOtherClass(s='ehe1') - soc2 = SomeOtherClass(s='ehe2') - sc = SomeClass(others=[soc1, soc2]) - - self.session.add(sc) - self.session.commit() - self.session.close() - - sc_db = self.session.query(SomeClass).get(1) - - assert sc_db.others[0].s == 'ehe1' - assert sc_db.others[1].s == 'ehe2' - - self.session.close() - - def test_nested_sql_array_as_xml_no_ns(self): - class SomeOtherClass(ComplexModel): - id = Integer32 - s = Unicode(64) - - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - others = Array(SomeOtherClass, store_as=xml(no_ns=True)) - - self.metadata.create_all() - - soc1 = SomeOtherClass(s='ehe1') - soc2 = SomeOtherClass(s='ehe2') - sc = SomeClass(others=[soc1, soc2]) - - self.session.add(sc) - self.session.commit() - self.session.close() - - sc_xml = self.session.connection() \ - .execute("select others from some_class") .fetchall()[0][0] - - from lxml import etree - assert etree.fromstring(sc_xml).tag == 'SomeOtherClassArray' - - self.session.close() - - def test_inheritance(self): - class SomeOtherClass(TableModel): - __tablename__ = 'some_other_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - s = Unicode(64) - - class SomeClass(SomeOtherClass): - numbers = Array(Integer32).store_as(xml(no_ns=True, root_tag='a')) - - self.metadata.create_all() - - sc = SomeClass(id=5, s='s', numbers=[1, 2, 3, 4]) - - self.session.add(sc) - self.session.commit() - self.session.close() - - sc_db = self.session.query(SomeClass).get(5) - assert sc_db.numbers == [1, 2, 3, 4] - self.session.close() - - sc_db = self.session.query(SomeOtherClass).get(5) - assert sc_db.id == 5 - try: - sc_db.numbers - except AttributeError: - pass - else: - raise Exception("must fail") - - self.session.close() - - def test_inheritance_with_complex_fields(self): - class Foo(TableModel): - __tablename__ = 'foo' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - s = Unicode(64) - - class Bar(TableModel): - __tablename__ = 'bar' - __table_args__ = {"sqlite_autoincrement": True} - __mapper_args__ = { - 'polymorphic_on': 'type', - 'polymorphic_identity': 'bar', - 'with_polymorphic': '*', - } - - id = Integer32(primary_key=True) - s = Unicode(64) - type = Unicode(6) - foos = Array(Foo).store_as('table') - - class SubBar(Bar): - __mapper_args__ = { - 'polymorphic_identity': 'subbar', - } - i = Integer32 - - sqlalchemy.orm.configure_mappers() - - mapper_subbar = SubBar.Attributes.sqla_mapper - mapper_bar = Bar.Attributes.sqla_mapper - assert not mapper_subbar.concrete - - for inheriting in mapper_subbar.iterate_to_root(): - if inheriting is not mapper_subbar \ - and not (mapper_bar.relationships['foos'] is - mapper_subbar.relationships['foos']): - raise Exception("Thou shalt stop children relationships " - "from overriding the ones in parent") - - def test_mixins_with_complex_fields(self): - class Foo(TableModel): - __tablename__ = 'foo' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - s = Unicode(64) - - class Bar(TableModel): - __tablename__ = 'bar' - __table_args__ = {"sqlite_autoincrement": True} - __mixin__ = True - __mapper_args__ = { - 'polymorphic_on': 'type', - 'polymorphic_identity': 'bar', - 'with_polymorphic': '*', - } - - id = Integer32(primary_key=True) - s = Unicode(64) - type = Unicode(6) - foos = Array(Foo).store_as('table') - - class SubBar(Bar): - __mapper_args__ = { - 'polymorphic_identity': 'subbar', - } - i = Integer32 - - sqlalchemy.orm.configure_mappers() - - mapper_subbar = SubBar.Attributes.sqla_mapper - mapper_bar = Bar.Attributes.sqla_mapper - assert not mapper_subbar.concrete - - for inheriting in mapper_subbar.iterate_to_root(): - if inheriting is not mapper_subbar \ - and not (mapper_bar.relationships['foos'] is - mapper_subbar.relationships['foos']): - raise Exception("Thou shalt stop children relationships " - "from overriding the ones in parent") - - def test_sqlalchemy_inheritance(self): - # no spyne code is involved here. - # this is just to test test the sqlalchemy behavior that we rely on. - - class Employee(object): - def __init__(self, name): - self.name = name - - def __repr__(self): - return self.__class__.__name__ + " " + self.name - - class Manager(Employee): - def __init__(self, name, manager_data): - self.name = name - self.manager_data = manager_data - - def __repr__(self): - return ( - self.__class__.__name__ + " " + - self.name + " " + self.manager_data - ) - - class Engineer(Employee): - def __init__(self, name, engineer_info): - self.name = name - self.engineer_info = engineer_info - - def __repr__(self): - return ( - self.__class__.__name__ + " " + - self.name + " " + self.engineer_info - ) - - employees_table = Table('employees', self.metadata, - Column('employee_id', sqlalchemy.Integer, primary_key=True), - Column('name', sqlalchemy.String(50)), - Column('manager_data', sqlalchemy.String(50)), - Column('engineer_info', sqlalchemy.String(50)), - Column('type', sqlalchemy.String(20), nullable=False), - ) - - employee_mapper = mapper(Employee, employees_table, - polymorphic_on=employees_table.c.type, - polymorphic_identity='employee') - - manager_mapper = mapper(Manager, inherits=employee_mapper, - polymorphic_identity='manager') - - engineer_mapper = mapper(Engineer, inherits=employee_mapper, - polymorphic_identity='engineer') - - self.metadata.create_all() - - manager = Manager('name', 'data') - self.session.add(manager) - self.session.commit() - self.session.close() - - assert self.session.query(Employee).with_polymorphic('*') \ - .filter_by(employee_id=1) \ - .one().type == 'manager' - - def test_inheritance_polymorphic_with_non_nullables_in_subclasses(self): - class SomeOtherClass(TableModel): - __tablename__ = 'some_other_class' - __table_args__ = {"sqlite_autoincrement": True} - __mapper_args__ = {'polymorphic_on': 't', 'polymorphic_identity': 1} - - id = Integer32(primary_key=True) - t = Integer32(nillable=False) - s = Unicode(64, nillable=False) - - class SomeClass(SomeOtherClass): - __mapper_args__ = ( - (), - {'polymorphic_identity': 2}, - ) - - i = Integer(nillable=False) - - self.metadata.create_all() - - assert SomeOtherClass.__table__.c.s.nullable == False - - # this should be nullable to let other classes be added. - # spyne still checks this constraint when doing input validation. - # spyne should generate a constraint to check this at database level as - # well. - assert SomeOtherClass.__table__.c.i.nullable == True - - soc = SomeOtherClass(s='s') - self.session.add(soc) - self.session.commit() - soc_id = soc.id - - try: - sc = SomeClass(i=5) - self.session.add(sc) - self.session.commit() - except IntegrityError: - self.session.rollback() - else: - raise Exception("Must fail with IntegrityError.") - - sc2 = SomeClass(s='s') # this won't fail. should it? - self.session.add(sc2) - self.session.commit() - - self.session.expunge_all() - - assert self.session.query(SomeOtherClass).with_polymorphic('*') \ - .filter_by(id=soc_id).one().t == 1 - - self.session.close() - - def test_inheritance_polymorphic(self): - class SomeOtherClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = {"sqlite_autoincrement": True} - __mapper_args__ = {'polymorphic_on': 't', 'polymorphic_identity': 1} - - id = Integer32(primary_key=True) - s = Unicode(64) - t = Integer32(nillable=False) - - class SomeClass(SomeOtherClass): - __mapper_args__ = {'polymorphic_identity': 2} - numbers = Array(Integer32).store_as(xml(no_ns=True, root_tag='a')) - - self.metadata.create_all() - - sc = SomeClass(id=5, s='s', numbers=[1, 2, 3, 4]) - - self.session.add(sc) - self.session.commit() - self.session.close() - - assert self.session.query(SomeOtherClass).with_polymorphic('*') \ - .filter_by(id=5).one().t == 2 - self.session.close() - - def test_nested_sql_array_as_json(self): - class SomeOtherClass(ComplexModel): - id = Integer32 - s = Unicode(64) - - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - others = Array(SomeOtherClass, store_as='json') - - self.metadata.create_all() - - soc1 = SomeOtherClass(s='ehe1') - soc2 = SomeOtherClass(s='ehe2') - sc = SomeClass(others=[soc1, soc2]) - - self.session.add(sc) - self.session.commit() - self.session.close() - - sc_db = self.session.query(SomeClass).get(1) - - assert sc_db.others[0].s == 'ehe1' - assert sc_db.others[1].s == 'ehe2' - - self.session.close() - - def test_modifiers(self): - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = {"sqlite_autoincrement": True} - - i = XmlAttribute(Integer32(pk=True)) - s = XmlData(Unicode(64)) - - self.metadata.create_all() - self.session.add(SomeClass(s='s')) - self.session.commit() - self.session.expunge_all() - - ret = self.session.query(SomeClass).get(1) - assert ret.i == 1 # redundant - assert ret.s == 's' - - def test_default_ctor(self): - class SomeOtherClass(ComplexModel): - id = Integer32 - s = Unicode(64) - - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - others = Array(SomeOtherClass, store_as='json') - f = Unicode(32, default='uuu') - - self.metadata.create_all() - self.session.add(SomeClass()) - self.session.commit() - self.session.expunge_all() - - assert self.session.query(SomeClass).get(1).f == 'uuu' - - def test_default_value(self): - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - f = Unicode(32, db_default=u'uuu') - - self.metadata.create_all() - val = SomeClass() - assert val.f is None - - self.session.add(val) - self.session.commit() - - self.session.expunge_all() - - assert self.session.query(SomeClass).get(1).f == u'uuu' - - def test_default_ctor_with_sql_relationship(self): - class SomeOtherClass(TableModel): - __tablename__ = 'some_other_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - s = Unicode(64) - - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - o = SomeOtherClass.customize(store_as='table') - - self.metadata.create_all() - self.session.add(SomeClass()) - self.session.commit() - - def test_store_as_index(self): - class SomeOtherClass(TableModel): - __tablename__ = 'some_other_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - s = Unicode(64) - - class SomeClass(TableModel): - __tablename__ = 'some_class' - __table_args__ = {"sqlite_autoincrement": True} - - id = Integer32(primary_key=True) - o = SomeOtherClass.customize(store_as='table', index='btree') - - self.metadata.create_all() - idx, = SomeClass.__table__.indexes - assert 'o_id' in idx.columns - - def test_scalar_collection(self): - class SomeClass(TableModel): - __tablename__ = 'some_class' - - id = Integer32(primary_key=True) - values = Array(Unicode).store_as('table') - - self.metadata.create_all() - - self.session.add(SomeClass(id=1, values=['a', 'b', 'c'])) - self.session.commit() - sc = self.session.query(SomeClass).get(1) - assert sc.values == ['a', 'b', 'c'] - del sc - - sc = self.session.query(SomeClass).get(1) - sc.values.append('d') - self.session.commit() - del sc - sc = self.session.query(SomeClass).get(1) - assert sc.values == ['a', 'b', 'c', 'd'] - - sc = self.session.query(SomeClass).get(1) - sc.values = sc.values[1:] - self.session.commit() - del sc - sc = self.session.query(SomeClass).get(1) - assert sc.values == ['b', 'c', 'd'] - - def test_multiple_fk(self): - class SomeChildClass(TableModel): - __tablename__ = 'some_child_class' - - id = Integer32(primary_key=True) - s = Unicode(64) - i = Integer32 - - class SomeClass(TableModel): - __tablename__ = 'some_class' - - id = Integer32(primary_key=True) - children = Array(SomeChildClass).store_as('table') - mirror = SomeChildClass.store_as('table') - - self.metadata.create_all() - - children = [ - SomeChildClass(s='p', i=600), - SomeChildClass(s='|', i=10), - SomeChildClass(s='q', i=9), - ] - - sc = SomeClass(children=children) - self.session.add(sc) - self.session.flush() - sc.mirror = children[1] - self.session.commit() - del sc - - sc = self.session.query(SomeClass).get(1) - assert ''.join([scc.s for scc in sc.children]) == 'p|q' - assert sum([scc.i for scc in sc.children]) == 619 - - def test_simple_fk(self): - class SomeChildClass(TableModel): - __tablename__ = 'some_child_class' - - id = Integer32(primary_key=True) - s = Unicode(64) - i = Integer32 - - class SomeClass(TableModel): - __tablename__ = 'some_class' - - id = Integer32(primary_key=True) - child_id = Integer32(fk='some_child_class.id') - - foreign_keys = SomeClass.__table__.c['child_id'].foreign_keys - assert len(foreign_keys) == 1 - fk, = foreign_keys - assert fk._colspec == 'some_child_class.id' - - def test_multirel_single_table(self): - class SomeChildClass(TableModel): - __tablename__ = 'some_child_class' - - id = Integer32(primary_key=True) - s = Unicode(64) - - class SomeOtherChildClass(TableModel): - __tablename__ = 'some_other_child_class' - - id = Integer32(primary_key=True) - i = Integer32 - - class SomeClass(TableModel): - __tablename__ = 'some_class' - - id = Integer32(primary_key=True) - - children = Array(SomeChildClass, - store_as=table( - multi='children', lazy='joined', - left='parent_id', right='child_id', - fk_left_ondelete='cascade', - fk_right_ondelete='cascade', - ), - ) - - other_children = Array(SomeOtherChildClass, - store_as=table( - multi='children', lazy='joined', - left='parent_id', right='other_child_id', - fk_left_ondelete='cascade', - fk_right_ondelete='cascade', - ), - ) - - t = SomeClass.Attributes.sqla_metadata.tables['children'] - - fkp, = t.c.parent_id.foreign_keys - assert fkp._colspec == 'some_class.id' - - fkc, = t.c.child_id.foreign_keys - assert fkc._colspec == 'some_child_class.id' - - fkoc, = t.c.other_child_id.foreign_keys - assert fkoc._colspec == 'some_other_child_class.id' - - def test_reflection(self): - class SomeClass(TableModel): - __tablename__ = 'some_class' - - id = Integer32(primary_key=True) - s = Unicode(32) - - TableModel.Attributes.sqla_metadata.create_all() - - # create a new table model with empty metadata - TM2 = TTableModel() - TM2.Attributes.sqla_metadata.bind = self.engine - - # fill it with information from the db - TM2.Attributes.sqla_metadata.reflect() - - # convert sqla info to spyne info - class Reflected(TM2): - __table__ = TM2.Attributes.sqla_metadata.tables['some_class'] - - pprint(dict(Reflected._type_info).items()) - assert issubclass(Reflected._type_info['id'], Integer) - - # this looks at spyne attrs - assert [k for k, v in get_pk_columns(Reflected)] == ['id'] - - # this looks at sqla attrs - assert [k for k, v in Reflected.get_primary_keys()] == ['id'] - - assert issubclass(Reflected._type_info['s'], Unicode) - assert Reflected._type_info['s'].Attributes.max_len == 32 - - def _test_sqlalchemy_remapping(self): - class SomeTable(TableModel): - __tablename__ = 'some_table' - id = Integer32(pk=True) - i = Integer32 - s = Unicode(32) - - class SomeTableSubset(TableModel): - __table__ = SomeTable.__table__ - - id = Integer32(pk=True) # sqla session doesn't work without pk - i = Integer32 - - class SomeTableOtherSubset(TableModel): - __table__ = SomeTable.__table__ - _type_info = [(k, v) for k, v in SomeTable._type_info.items() - if k in ('id', 's')] - - self.session.add(SomeTable(id=1, i=2, s='s')) - self.session.commit() - - st = self.session.query(SomeTable).get(1) - sts = self.session.query(SomeTableSubset).get(1) - stos = self.session.query(SomeTableOtherSubset).get(1) - - sts.i = 3 - sts.s = 'ss' # will not be flushed to db - self.session.commit() - - assert st.s == 's' - assert stos.i == 3 - - def test_file_storage(self): - class C(TableModel): - __tablename__ = "c" - - id = Integer32(pk=True) - f = File(store_as=HybridFileStore('test_file_storage', 'json')) - - self.metadata.create_all() - c = C(f=File.Value(name=u"name", type=u"type", data=[b"data"])) - self.session.add(c) - self.session.flush() - self.session.commit() - - c = self.session.query(C).get(1) - print(c) - assert c.f.name == "name" - assert c.f.type == "type" - assert c.f.data[0][:] == b"data" - - def test_append_field_complex_existing_column(self): - class C(TableModel): - __tablename__ = "c" - u = Unicode(pk=True) - - class D(TableModel): - __tablename__ = "d" - d = Integer32(pk=True) - c = C.store_as('table') - - C.append_field('d', D.store_as('table')) - assert C.Attributes.sqla_mapper.get_property('d').argument is D - - def test_append_field_complex_delayed(self): - class C(TableModel): - __tablename__ = "c" - u = Unicode(pk=True) - - class D(C): - i = Integer32 - - C.append_field('d', DateTime) - - assert D.Attributes.sqla_mapper.has_property('d') - - def _test_append_field_complex_explicit_existing_column(self): - # FIXME: Test something! - - class C(TableModel): - __tablename__ = "c" - id = Integer32(pk=True) - - # c already also produces c_id. this is undefined behaviour, one of them - # gets ignored, whichever comes first. - class D(TableModel): - __tablename__ = "d" - id = Integer32(pk=True) - c = C.store_as('table') - c_id = Integer32(15) - - def test_append_field_complex_circular_array(self): - class C(TableModel): - __tablename__ = "cc" - id = Integer32(pk=True) - - class D(TableModel): - __tablename__ = "dd" - id = Integer32(pk=True) - c = Array(C).customize(store_as=table(right='dd_id')) - - C.append_field('d', D.customize(store_as=table(left='dd_id'))) - self.metadata.create_all() - - c1, c2 = C(id=1), C(id=2) - d = D(id=1, c=[c1, c2]) - self.session.add(d) - self.session.commit() - assert c1.d.id == 1 - - def test_append_field_complex_new_column(self): - class C(TableModel): - __tablename__ = "c" - u = Unicode(pk=True) - - class D(TableModel): - __tablename__ = "d" - id = Integer32(pk=True) - - C.append_field('d', D.store_as('table')) - assert C.Attributes.sqla_mapper.get_property('d').argument is D - assert isinstance(C.Attributes.sqla_table.c['d_id'].type, - sqlalchemy.Integer) - - def test_append_field_array(self): - class C(TableModel): - __tablename__ = "c" - id = Integer32(pk=True) - - class D(TableModel): - __tablename__ = "d" - id = Integer32(pk=True) - - C.append_field('d', Array(D).store_as('table')) - assert C.Attributes.sqla_mapper.get_property('d').argument is D - print(repr(D.Attributes.sqla_table)) - assert isinstance(D.Attributes.sqla_table.c['c_id'].type, - sqlalchemy.Integer) - - def test_append_field_array_many(self): - class C(TableModel): - __tablename__ = "c" - id = Integer32(pk=True) - - class D(TableModel): - __tablename__ = "d" - id = Integer32(pk=True) - - C.append_field('d', Array(D).store_as(table(multi='c_d'))) - assert C.Attributes.sqla_mapper.get_property('d').argument is D - rel_table = C.Attributes.sqla_metadata.tables['c_d'] - assert 'c_id' in rel_table.c - assert 'd_id' in rel_table.c - - def test_append_field_complex_cust(self): - class C(TableModel): - __tablename__ = "c" - id = Integer32(pk=True) - - class D(TableModel): - __tablename__ = "d" - id = Integer32(pk=True) - c = Array(C).store_as('table') - - C.append_field('d', D.customize( - nullable=False, - store_as=table(left='d_id'), - )) - assert C.__table__.c['d_id'].nullable == False - - def _test_append_field_cust(self): - class C(TableModel): - __tablename__ = "c" - id = Integer32(pk=True) - - C2 = C.customize() - - C.append_field("s", Unicode) - - C() - - self.metadata.create_all() - - assert "s" in C2._type_info - assert "s" in C2.Attributes.sqla_mapper.columns - - self.session.add(C2(s='foo')) - self.session.commit() - assert self.session.query(C).first().s == 'foo' - - def test_polymorphic_cust(self): - class C(TableModel): - __tablename__ = "c" - __mapper_args__ = { - 'polymorphic_on': 't', - 'polymorphic_identity': 1, - } - - id = Integer32(pk=True) - t = M(Integer32) - - class D(C): - __mapper_args__ = { - 'polymorphic_identity': 2, - } - d = Unicode - - D2 = D.customize() - - assert C().t == 1 - assert D().t == 2 - - # That's the way SQLAlchemy works. Don't use customized classes in - # anywhere other than interface definitions - assert D2().t == None - - def test_base_append_simple(self): - class B(TableModel): - __tablename__ = 'b' - __mapper_args__ = { - 'polymorphic_on': 't', - 'polymorphic_identity': 1, - } - - id = Integer32(pk=True) - t = M(Integer32) - - class C(B): - __mapper_args__ = { - 'polymorphic_identity': 1, - } - s = Unicode - - B.append_field('i', Integer32) - - self.metadata.create_all() - - self.session.add(C(s="foo", i=42)) - self.session.commit() - - c = self.session.query(C).first() - - assert c.s == 'foo' - assert c.i == 42 - assert c.t == 1 - - def test_base_append_complex(self): - class B(TableModel): - __tablename__ = 'b' - __mapper_args__ = { - 'polymorphic_on': 't', - 'polymorphic_identity': 1, - } - - id = Integer32(pk=True) - t = M(Integer32) - - class C(B): - __mapper_args__ = { - 'polymorphic_identity': 1, - } - s = Unicode - - class D(TableModel): - __tablename__ = 'd' - id = Integer32(pk=True) - i = M(Integer32) - - B.append_field('d', D.store_as('table')) - - self.metadata.create_all() - - self.session.add(C(d=D(i=42))) - self.session.commit() - - c = self.session.query(C).first() - - assert c.d.i == 42 - - -class TestSqlAlchemySchemaWithPostgresql(unittest.TestCase): - def setUp(self): - self.metadata = TableModel.Attributes.sqla_metadata = MetaData() - - def test_enum(self): - table_name = "test_enum" - - enums = ('SUBSCRIBED', 'UNSUBSCRIBED', 'UNCONFIRMED') - - class SomeClass(TableModel): - __tablename__ = table_name - - id = Integer32(primary_key=True) - e = Enum(*enums, type_name='status_choices') - - t = self.metadata.tables[table_name] - assert 'e' in t.c - assert tuple(t.c.e.type.enums) == enums - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/test_sqlalchemy_deprecated.py b/libs_crutch/contrib/spyne/test/test_sqlalchemy_deprecated.py deleted file mode 100644 index 8607a06..0000000 --- a/libs_crutch/contrib/spyne/test/test_sqlalchemy_deprecated.py +++ /dev/null @@ -1,380 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logging.basicConfig(level=logging.DEBUG) - -import unittest -import sqlalchemy - -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm.mapper import Mapper - -from sqlalchemy import create_engine -from sqlalchemy import MetaData -from sqlalchemy import Column -from sqlalchemy import Table -from sqlalchemy import ForeignKey - -from sqlalchemy.orm import mapper -from sqlalchemy.orm import relationship -from sqlalchemy.orm import sessionmaker - -from sqlalchemy.schema import UniqueConstraint - -from spyne.application import Application -from spyne.decorator import rpc -from spyne.model import ComplexModel -from spyne.model import Array -from spyne.model import Unicode -from spyne.model import Integer -from spyne.model.table import TableModel -from spyne.protocol.http import HttpRpc -from spyne.protocol.soap import Soap11 -from spyne.server.wsgi import WsgiApplication -from spyne.server.wsgi import WsgiMethodContext - -# -# Deprecated Table Model Tests -# - -class TestSqlAlchemy(unittest.TestCase): - def setUp(self): - self.metadata = MetaData() - self.DeclarativeBase = declarative_base(metadata=self.metadata) - self.engine = create_engine('sqlite:///:memory:', echo=True) - self.Session = sessionmaker(bind=self.engine) - - def tearDown(self): - del self.metadata - del self.DeclarativeBase - del self.engine - del self.Session - - def test_declarative(self): - from sqlalchemy import Integer - from sqlalchemy import String - - class DbObject(TableModel, self.DeclarativeBase): - __tablename__ = 'db_object' - - id = Column(Integer, primary_key=True) - s = Column(String) - - self.metadata.create_all(self.engine) - - def test_mapper(self): - import sqlalchemy - - class Address(self.DeclarativeBase): - __tablename__ = 'address' - - id = Column(sqlalchemy.Integer, primary_key=True) - email = Column(sqlalchemy.String(50)) - user_id = Column(sqlalchemy.Integer, ForeignKey('user.id')) - - class User(self.DeclarativeBase): - __tablename__ = 'user' - - id = Column(sqlalchemy.Integer, primary_key=True) - name = Column(sqlalchemy.String(50)) - addresses = relationship("Address", backref="user") - - self.metadata.create_all(self.engine) - - import spyne.model.primitive - - class AddressDetail(ComplexModel): - id = spyne.model.primitive.Integer - user_name = spyne.model.primitive.String - address = spyne.model.primitive.String - - @classmethod - def mapper(cls, meta): - user_t = meta.tables['user'] - address_t = meta.tables['address'] - - cls._main_t = user_t.join(address_t) - - cls._properties = { - 'id': address_t.c.id, - 'user_name': user_t.c.name, - 'address': address_t.c.email, - } - - cls._mapper = mapper(cls, cls._main_t, - include_properties=cls._properties.values(), - properties=cls._properties, - primary_key=[address_t.c.id] - ) - - AddressDetail.mapper(self.metadata) - - def test_custom_mapper(self): - class CustomMapper(Mapper): - def __init__(self, class_, local_table, *args, **kwargs): - super(CustomMapper, self).__init__(class_, local_table, *args, - **kwargs) - - # Do not configure primary keys to check that CustomerMapper is - # actually used - def _configure_pks(self): - pass - - def custom_mapper(class_, local_table=None, *args, **params): - return CustomMapper(class_, local_table, *args, **params) - - CustomDeclarativeBase = declarative_base(metadata=self.metadata, - mapper=custom_mapper) - - class User(CustomDeclarativeBase): - __tablename__ = 'user' - - # CustomMapper should not fail because of no primary key - name = Column(sqlalchemy.String(50)) - - self.metadata.create_all(self.engine) - - def test_rpc(self): - import sqlalchemy - from sqlalchemy import sql - - class KeyValuePair(TableModel, self.DeclarativeBase): - __tablename__ = 'key_value_store' - __namespace__ = 'punk' - - key = Column(sqlalchemy.String(100), nullable=False, primary_key=True) - value = Column(sqlalchemy.String, nullable=False) - - self.metadata.create_all(self.engine) - - import hashlib - - session = self.Session() - - for i in range(1, 10): - key = str(i).encode() - m = hashlib.md5() - m.update(key) - value = m.hexdigest() - - session.add(KeyValuePair(key=key, value=value)) - - session.commit() - - from spyne.service import Service - from spyne.model.complex import Array - from spyne.model.primitive import String - - class Service(Service): - @rpc(String(max_occurs='unbounded'), - _returns=Array(KeyValuePair), - _in_variable_names={ - 'keys': 'key' - } - ) - def get_values(ctx, keys): - session = self.Session() - - return session.query(KeyValuePair).filter(sql.and_( - KeyValuePair.key.in_(keys) - )).order_by(KeyValuePair.key) - - application = Application([Service], - in_protocol=HttpRpc(), - out_protocol=Soap11(), - name='Service', tns='tns' - ) - server = WsgiApplication(application) - - initial_ctx = WsgiMethodContext(server, { - 'REQUEST_METHOD': 'GET', - 'QUERY_STRING': 'key=1&key=2&key=3', - 'PATH_INFO': '/get_values', - 'SERVER_NAME': 'localhost', - }, 'some-content-type') - - ctx, = server.generate_contexts(initial_ctx) - server.get_in_object(ctx) - server.get_out_object(ctx) - server.get_out_string(ctx) - - i = 0 - for e in ctx.out_document[0][0][0]: - i+=1 - key = str(i) - m = hashlib.md5() - m.update(key) - value = m.hexdigest() - - _key = e.find('{%s}key' % KeyValuePair.get_namespace()) - _value = e.find('{%s}value' % KeyValuePair.get_namespace()) - - print((_key, _key.text)) - print((_value, _value.text)) - - self.assertEqual(_key.text, key) - self.assertEqual(_value.text, value) - - def test_late_mapping(self): - import sqlalchemy - - user_t = Table('user', self.metadata, - Column('id', sqlalchemy.Integer, primary_key=True), - Column('name', sqlalchemy.String), - ) - - class User(TableModel, self.DeclarativeBase): - __table__ = user_t - - self.assertEqual(User._type_info['id'].__type_name__, 'integer') - self.assertEqual(User._type_info['name'].__type_name__, 'string') - - - def test_default_ctor(self): - import sqlalchemy - - class User1Mixin(object): - id = Column(sqlalchemy.Integer, primary_key=True) - name = Column(sqlalchemy.String(256)) - - class User1(self.DeclarativeBase, TableModel, User1Mixin): - __tablename__ = 'spyne_user1' - - mail = Column(sqlalchemy.String(256)) - - u = User1(id=1, mail="a@b.com", name='dummy') - - assert u.id == 1 - assert u.mail == "a@b.com" - assert u.name == "dummy" - - class User2Mixin(object): - id = Column(sqlalchemy.Integer, primary_key=True) - name = Column(sqlalchemy.String(256)) - - class User2(TableModel, self.DeclarativeBase, User2Mixin): - __tablename__ = 'spyne_user2' - - mail = Column(sqlalchemy.String(256)) - - u = User2(id=1, mail="a@b.com", name='dummy') - - assert u.id == 1 - assert u.mail == "a@b.com" - assert u.name == "dummy" - - def test_mixin_inheritance(self): - import sqlalchemy - - class UserMixin(object): - id = Column(sqlalchemy.Integer, primary_key=True) - name = Column(sqlalchemy.String(256)) - - class User(self.DeclarativeBase, TableModel, UserMixin): - __tablename__ = 'spyne_user_mixin' - - mail = Column(sqlalchemy.String(256)) - - assert 'mail' in User._type_info - assert 'name' in User._type_info - assert 'id' in User._type_info - - def test_same_table_inheritance(self): - import sqlalchemy - - class User(self.DeclarativeBase, TableModel): - __tablename__ = 'spyne_user_sti' - - id = Column(sqlalchemy.Integer, primary_key=True) - name = Column(sqlalchemy.String(256)) - - class UserMail(User): - mail = Column(sqlalchemy.String(256)) - - assert 'mail' in UserMail._type_info - assert 'name' in UserMail._type_info - assert 'id' in UserMail._type_info - - def test_relationship_array(self): - import sqlalchemy - class Permission(TableModel, self.DeclarativeBase): - __tablename__ = 'spyne_user_permission' - - id = Column(sqlalchemy.Integer, primary_key=True) - user_id = Column(sqlalchemy.Integer, ForeignKey("spyne_user.id")) - - - class User(TableModel, self.DeclarativeBase): - __tablename__ = 'spyne_user' - - id = Column(sqlalchemy.Integer, primary_key=True) - permissions = relationship(Permission) - - class Address(self.DeclarativeBase, TableModel): - __tablename__ = 'spyne_address' - - id = Column(sqlalchemy.Integer, primary_key=True) - address = Column(sqlalchemy.String(256)) - user_id = Column(sqlalchemy.Integer, ForeignKey(User.id), nullable=False) - user = relationship(User) - - assert 'permissions' in User._type_info - assert issubclass(User._type_info['permissions'], Array) - assert issubclass(User._type_info['permissions']._type_info.values()[0], Permission) - - #Address().user = None - #User().permissions = None # This fails, and actually is supposed to fail. - - -class TestSpyne2Sqlalchemy(unittest.TestCase): - def test_table(self): - class SomeClass(ComplexModel): - __metadata__ = MetaData() - __tablename__ = 'some_class' - - i = Integer(primary_key=True) - - t = SomeClass.Attributes.sqla_table - assert t.c['i'].type.__class__ is sqlalchemy.DECIMAL - - def test_table_args(self): - class SomeClass(ComplexModel): - __metadata__ = MetaData() - __tablename__ = 'some_class' - __table_args__ = ( - UniqueConstraint('j'), - ) - - i = Integer(primary_key=True) - j = Unicode(64) - - t = SomeClass.Attributes.sqla_table - assert isinstance(t.c['j'].type, sqlalchemy.String) - - for c in t.constraints: - if isinstance(c, UniqueConstraint): - assert list(c.columns) == [t.c.j] - break - else: - raise Exception("UniqueConstraint is missing.") - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/test/transport/__init__.py b/libs_crutch/contrib/spyne/test/transport/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/contrib/spyne/test/transport/test_msgpack.py b/libs_crutch/contrib/spyne/test/transport/test_msgpack.py deleted file mode 100644 index c94174a..0000000 --- a/libs_crutch/contrib/spyne/test/transport/test_msgpack.py +++ /dev/null @@ -1,94 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import msgpack - -from spyne import Application, Service, rpc -from spyne.model import Unicode -from spyne.protocol.msgpack import MessagePackDocument - -from twisted.trial import unittest - - -class TestMessagePackServer(unittest.TestCase): - def gen_prot(self, app): - from spyne.server.twisted.msgpack import TwistedMessagePackProtocol - from twisted.test.proto_helpers import StringTransportWithDisconnection - from spyne.server.msgpack import MessagePackServerBase - - prot = TwistedMessagePackProtocol(MessagePackServerBase(app)) - transport = StringTransportWithDisconnection() - prot.makeConnection(transport) - transport.protocol = prot - - return prot - - def test_roundtrip(self): - v = "yaaay!" - class SomeService(Service): - @rpc(Unicode, _returns=Unicode) - def yay(ctx, u): - return u - - app = Application([SomeService], 'tns', - in_protocol=MessagePackDocument(), - out_protocol=MessagePackDocument()) - - prot = self.gen_prot(app) - request = msgpack.packb({'yay': [v]}) - prot.dataReceived(msgpack.packb([1, request])) - val = prot.transport.value() - print(repr(val)) - val = msgpack.unpackb(val) - print(repr(val)) - - self.assertEqual(val, [0, msgpack.packb(v)]) - - def test_roundtrip_deferred(self): - from twisted.internet import reactor - from twisted.internet.task import deferLater - - v = "yaaay!" - p_ctx = [] - class SomeService(Service): - @rpc(Unicode, _returns=Unicode) - def yay(ctx, u): - def _cb(): - return u - p_ctx.append(ctx) - return deferLater(reactor, 0.1, _cb) - - app = Application([SomeService], 'tns', - in_protocol=MessagePackDocument(), - out_protocol=MessagePackDocument()) - - prot = self.gen_prot(app) - request = msgpack.packb({'yay': [v]}) - def _ccb(_): - val = prot.transport.value() - print(repr(val)) - val = msgpack.unpackb(val) - print(repr(val)) - - self.assertEqual(val, [0, msgpack.packb(v)]) - - prot.dataReceived(msgpack.packb([1, request])) - - return p_ctx[0].out_object[0].addCallback(_ccb) - diff --git a/libs_crutch/contrib/spyne/test/util/__init__.py b/libs_crutch/contrib/spyne/test/util/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/contrib/spyne/test/util/test_address.py b/libs_crutch/contrib/spyne/test/util/test_address.py deleted file mode 100644 index 04f8918..0000000 --- a/libs_crutch/contrib/spyne/test/util/test_address.py +++ /dev/null @@ -1,639 +0,0 @@ -#!/usr/bin/env python -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -# The MIT License -# -# Copyright (c) Val Neekman @ Neekware Inc. http://neekware.com -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -# - -from unittest import TestCase - - -from spyne.util.address import set_address_parser_settings - -set_address_parser_settings(trusted_proxies=['177.139.233.100']) - -from spyne.util.address import address_parser - - -class IPv4TestCase(TestCase): - """IP address Test""" - - def test_meta_none(self): - request = { - } - ip = address_parser.get_real_ip(request) - self.assertIsNone(ip) - - def test_http_x_forwarded_for_multiple(self): - request = { - 'HTTP_X_FORWARDED_FOR': '192.168.255.182, 10.0.0.0, 127.0.0.1, 198.84.193.157, 177.139.233.139', - 'HTTP_X_REAL_IP': '177.139.233.132', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "198.84.193.157") - - def test_http_x_forwarded_for_multiple_left_most_ip(self): - request = { - 'HTTP_X_FORWARDED_FOR': '192.168.255.182, 198.84.193.157, 10.0.0.0, 127.0.0.1, 177.139.233.139', - 'HTTP_X_REAL_IP': '177.139.233.132', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "198.84.193.157") - - def test_http_x_forwarded_for_multiple_right_most_ip(self): - request = { - 'HTTP_X_FORWARDED_FOR': '192.168.255.182, 198.84.193.157, 10.0.0.0, 127.0.0.1, 177.139.233.139', - 'HTTP_X_REAL_IP': '177.139.233.132', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request, right_most_proxy=True) - self.assertEqual(ip, "177.139.233.139") - - def test_http_x_forwarded_for_multiple_right_most_ip_private(self): - request = { - 'HTTP_X_FORWARDED_FOR': '192.168.255.182, 198.84.193.157, 10.0.0.0, 127.0.0.1, 177.139.233.139', - 'HTTP_X_REAL_IP': '177.139.233.132', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request, right_most_proxy=True) - self.assertEqual(ip, "177.139.233.139") - - def test_http_x_forwarded_for_multiple_bad_address(self): - request = { - 'HTTP_X_FORWARDED_FOR': 'unknown, 192.168.255.182, 10.0.0.0, 127.0.0.1, 198.84.193.157, 177.139.233.139', - 'HTTP_X_REAL_IP': '177.139.233.132', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "198.84.193.157") - - def test_http_x_forwarded_for_singleton(self): - request = { - 'HTTP_X_FORWARDED_FOR': '177.139.233.139', - 'HTTP_X_REAL_IP': '177.139.233.132', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.139") - - def test_http_x_forwarded_for_singleton_private_address(self): - request = { - 'HTTP_X_FORWARDED_FOR': '192.168.255.182', - 'HTTP_X_REAL_IP': '177.139.233.132', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.132") - - def test_bad_http_x_forwarded_for_fallback_on_x_real_ip(self): - request = { - 'HTTP_X_FORWARDED_FOR': 'unknown 177.139.233.139', - 'HTTP_X_REAL_IP': '177.139.233.132', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.132") - - def test_empty_http_x_forwarded_for_fallback_on_x_real_ip(self): - request = { - 'HTTP_X_FORWARDED_FOR': '', - 'HTTP_X_REAL_IP': '177.139.233.132', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.132") - - def test_empty_http_x_forwarded_for_empty_x_real_ip_fallback_on_remote_addr(self): - request = { - 'HTTP_X_FORWARDED_FOR': '', - 'HTTP_X_REAL_IP': '', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.133") - - def test_empty_http_x_forwarded_for_private_x_real_ip_fallback_on_remote_addr(self): - request = { - 'HTTP_X_FORWARDED_FOR': '', - 'HTTP_X_REAL_IP': '192.168.255.182', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.133") - - def test_private_http_x_forward_for_ip_addr(self): - request = { - 'HTTP_X_FORWARDED_FOR': '127.0.0.1', - 'HTTP_X_REAL_IP': '', - 'REMOTE_ADDR': '', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, None) - - def test_private_remote_addr_for_ip_addr(self): - request = { - 'HTTP_X_FORWARDED_FOR': '', - 'REMOTE_ADDR': '127.0.0.1', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, None) - - def test_missing_x_forwarded(self): - request = { - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.133") - - def test_missing_x_forwarded_missing_real_ip(self): - request = { - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.133") - - def test_best_matched_real_ip(self): - request = { - 'HTTP_X_REAL_IP': '127.0.0.1', - 'REMOTE_ADDR': '172.31.233.133', - } - ip = address_parser.get_ip(request) - self.assertEqual(ip, "172.31.233.133") - - def test_best_matched_private_ip(self): - request = { - 'HTTP_X_REAL_IP': '127.0.0.1', - 'REMOTE_ADDR': '192.31.233.133', - } - ip = address_parser.get_ip(request) - self.assertEqual(ip, "192.31.233.133") - - def test_best_matched_private_ip_2(self): - request = { - 'HTTP_X_REAL_IP': '192.31.233.133', - 'REMOTE_ADDR': '127.0.0.1', - } - ip = address_parser.get_ip(request) - self.assertEqual(ip, "192.31.233.133") - - def test_x_forwarded_for_multiple(self): - request = { - 'X_FORWARDED_FOR': '192.168.255.182, 10.0.0.0, 127.0.0.1, 198.84.193.157, 177.139.233.139', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "198.84.193.157") - - def test_x_forwarded_for_multiple_left_most_ip(self): - request = { - 'X_FORWARDED_FOR': '192.168.255.182, 198.84.193.157, 10.0.0.0, 127.0.0.1, 177.139.233.139', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "198.84.193.157") - - def test_x_forwarded_for_multiple_right_most_ip(self): - request = { - 'X_FORWARDED_FOR': '192.168.255.182, 198.84.193.157, 10.0.0.0, 127.0.0.1, 177.139.233.139', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request, right_most_proxy=True) - self.assertEqual(ip, "177.139.233.139") - - def test_x_forwarded_for_multiple_right_most_ip_private(self): - request = { - 'X_FORWARDED_FOR': '192.168.255.182, 198.84.193.157, 10.0.0.0, 127.0.0.1, 177.139.233.139', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request, right_most_proxy=True) - self.assertEqual(ip, "177.139.233.139") - - def test_x_forwarded_for_multiple_bad_address(self): - request = { - 'X_FORWARDED_FOR': 'unknown, 192.168.255.182, 10.0.0.0, 127.0.0.1, 198.84.193.157, 177.139.233.139', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "198.84.193.157") - - def test_x_forwarded_for_singleton(self): - request = { - 'X_FORWARDED_FOR': '177.139.233.139', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.139") - - def test_x_forwarded_for_singleton_private_address(self): - request = { - 'X_FORWARDED_FOR': '192.168.255.182', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.133") - - def test_bad_x_forwarded_for_fallback_on_x_real_ip(self): - request = { - 'X_FORWARDED_FOR': 'unknown 177.139.233.139', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.133") - - def test_empty_x_forwarded_for_fallback_on_x_real_ip(self): - request = { - 'X_FORWARDED_FOR': '', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.133") - - def test_empty_x_forwarded_for_empty_x_real_ip_fallback_on_remote_addr(self): - request = { - 'X_FORWARDED_FOR': '', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.133") - - def test_empty_x_forwarded_for_private_x_real_ip_fallback_on_remote_addr(self): - request = { - 'X_FORWARDED_FOR': '', - 'REMOTE_ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.133") - - def test_private_x_forward_for_ip_addr(self): - request = { - 'X_FORWARDED_FOR': '127.0.0.1', - 'REMOTE_ADDR': '', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, None) - - def test_x_forwarded_for_singleton_hyphen_as_delimiter(self): - request = { - 'X-FORWARDED-FOR': '177.139.233.139', - 'REMOTE-ADDR': '177.139.233.133', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "177.139.233.139") - - -class IPv4TrustedProxiesTestCase(TestCase): - """Trusted Proxies - IP address Test""" - - def test_meta_none(self): - request = { - } - ip = address_parser.get_trusted_ip(request) - self.assertIsNone(ip) - - def test_http_x_forwarded_for_conf_settings(self): - request = { - 'HTTP_X_FORWARDED_FOR': '198.84.193.157, 177.139.200.139, 177.139.233.100', - } - - ip = address_parser.get_trusted_ip(request) - self.assertEqual(ip, "198.84.193.157") - - def test_http_x_forwarded_for_no_proxy(self): - request = { - 'HTTP_X_FORWARDED_FOR': '198.84.193.157, 177.139.200.139, 177.139.233.139', - } - ip = address_parser.get_trusted_ip(request, trusted_proxies=[]) - self.assertIsNone(ip) - - def test_http_x_forwarded_for_single_proxy(self): - request = { - 'HTTP_X_FORWARDED_FOR': '198.84.193.157, 177.139.200.139, 177.139.233.139', - } - ip = address_parser.get_trusted_ip(request, trusted_proxies=['177.139.233.139']) - self.assertEqual(ip, "198.84.193.157") - - def test_http_x_forwarded_for_single_proxy_with_right_most(self): - request = { - 'HTTP_X_FORWARDED_FOR': '177.139.233.139, 177.139.200.139, 198.84.193.157', - } - ip = address_parser.get_trusted_ip(request, right_most_proxy=True, trusted_proxies=['177.139.233.139']) - self.assertEqual(ip, "198.84.193.157") - - def test_http_x_forwarded_for_multi_proxy(self): - request = { - 'HTTP_X_FORWARDED_FOR': '198.84.193.157, 177.139.200.139, 177.139.233.139', - } - ip = address_parser.get_trusted_ip(request, trusted_proxies=['177.139.233.138', '177.139.233.139']) - self.assertEqual(ip, "198.84.193.157") - - def test_http_x_forwarded_for_all_proxies_in_subnet(self): - request = { - 'HTTP_X_FORWARDED_FOR': '198.84.193.157, 177.139.200.139, 177.139.233.139', - } - ip = address_parser.get_trusted_ip(request, trusted_proxies=['177.139.233']) - self.assertEqual(ip, "198.84.193.157") - - def test_http_x_forwarded_for_all_proxies_in_subnet_2(self): - request = { - 'HTTP_X_FORWARDED_FOR': '198.84.193.157, 177.139.200.139, 177.139.233.139', - } - ip = address_parser.get_trusted_ip(request, trusted_proxies=['177.139']) - self.assertEqual(ip, "198.84.193.157") - - def test_x_forwarded_for_single_proxy(self): - request = { - 'X_FORWARDED_FOR': '198.84.193.157, 177.139.200.139, 177.139.233.139', - } - ip = address_parser.get_trusted_ip(request, trusted_proxies=['177.139.233.139']) - self.assertEqual(ip, "198.84.193.157") - - def test_x_forwarded_for_single_proxy_hyphens(self): - request = { - 'X-FORWARDED-FOR': '198.84.193.157, 177.139.200.139, 177.139.233.139', - } - ip = address_parser.get_trusted_ip(request, trusted_proxies=['177.139.233.139']) - self.assertEqual(ip, "198.84.193.157") - - def test_http_x_forwarded_for_and_x_forward_for_single_proxy(self): - request = { - 'HTTP_X_FORWARDED_FOR': '198.84.193.156, 177.139.200.139, 177.139.233.139', - 'X_FORWARDED_FOR': '198.84.193.157, 177.139.200.139, 177.139.233.139', - } - ip = address_parser.get_trusted_ip(request, trusted_proxies=['177.139.233.139']) - self.assertEqual(ip, "198.84.193.156") - - -class IPv6TestCase(TestCase): - """IP address Test""" - - def test_http_x_forwarded_for_multiple(self): - request = { - 'HTTP_X_FORWARDED_FOR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf, 74dc::02ba', - 'HTTP_X_REAL_IP': '74dc::02ba', - 'REMOTE_ADDR': '74dc::02ba', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "3ffe:1900:4545:3:200:f8ff:fe21:67cf") - - def test_http_x_forwarded_for_multiple_bad_address(self): - request = { - 'HTTP_X_FORWARDED_FOR': 'unknown, ::1/128, 74dc::02ba', - 'HTTP_X_REAL_IP': '3ffe:1900:4545:3:200:f8ff:fe21:67cf', - 'REMOTE_ADDR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_http_x_forwarded_for_singleton(self): - request = { - 'HTTP_X_FORWARDED_FOR': '74dc::02ba', - 'HTTP_X_REAL_IP': '3ffe:1900:4545:3:200:f8ff:fe21:67cf', - 'REMOTE_ADDR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_http_x_forwarded_for_singleton_private_address(self): - request = { - 'HTTP_X_FORWARDED_FOR': '::1/128', - 'HTTP_X_REAL_IP': '74dc::02ba', - 'REMOTE_ADDR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_bad_http_x_forwarded_for_fallback_on_x_real_ip(self): - request = { - 'HTTP_X_FORWARDED_FOR': 'unknown ::1/128', - 'HTTP_X_REAL_IP': '74dc::02ba', - 'REMOTE_ADDR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_empty_http_x_forwarded_for_fallback_on_x_real_ip(self): - request = { - 'HTTP_X_FORWARDED_FOR': '', - 'HTTP_X_REAL_IP': '74dc::02ba', - 'REMOTE_ADDR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_empty_http_x_forwarded_for_empty_x_real_ip_fallback_on_remote_addr(self): - request = { - 'HTTP_X_FORWARDED_FOR': '', - 'HTTP_X_REAL_IP': '', - 'REMOTE_ADDR': '74dc::02ba', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_empty_http_x_forwarded_for_private_x_real_ip_fallback_on_remote_addr(self): - request = { - 'HTTP_X_FORWARDED_FOR': '', - 'HTTP_X_REAL_IP': '::1/128', - 'REMOTE_ADDR': '74dc::02ba', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_private_http_x_forward_for_ip_addr(self): - request = { - 'HTTP_X_FORWARDED_FOR': '::1/128', - 'HTTP_X_REAL_IP': '', - 'REMOTE_ADDR': '', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, None) - - def test_private_real_ip_for_ip_addr(self): - request = { - 'HTTP_X_FORWARDED_FOR': '', - 'HTTP_X_REAL_IP': '::1/128', - 'REMOTE_ADDR': '', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, None) - - def test_private_remote_addr_for_ip_addr(self): - request = { - 'HTTP_X_FORWARDED_FOR': '', - 'HTTP_X_REAL_IP': '', - 'REMOTE_ADDR': '::1/128', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, None) - - def test_missing_x_forwarded(self): - request = { - 'HTTP_X_REAL_IP': '74dc::02ba', - 'REMOTE_ADDR': '74dc::02ba', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_missing_x_forwarded_missing_real_ip(self): - request = { - 'REMOTE_ADDR': '74dc::02ba', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_missing_x_forwarded_missing_real_ip_mix_case(self): - request = { - 'REMOTE_ADDR': '74DC::02BA', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_private_remote_address(self): - request = { - 'REMOTE_ADDR': 'fe80::02ba', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, None) - - def test_best_matched_real_ip(self): - request = { - 'HTTP_X_REAL_IP': '::1', - 'REMOTE_ADDR': 'fe80::02ba', - } - ip = address_parser.get_ip(request) - self.assertEqual(ip, "fe80::02ba") - - def test_x_forwarded_for_multiple(self): - request = { - 'X_FORWARDED_FOR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf, 74dc::02ba', - 'REMOTE_ADDR': '74dc::02ba', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "3ffe:1900:4545:3:200:f8ff:fe21:67cf") - - def test_x_forwarded_for_multiple_bad_address(self): - request = { - 'X_FORWARDED_FOR': 'unknown, ::1/128, 74dc::02ba', - 'REMOTE_ADDR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_x_forwarded_for_singleton(self): - request = { - 'X_FORWARDED_FOR': '74dc::02ba', - 'REMOTE_ADDR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_x_forwarded_for_singleton_private_address(self): - request = { - 'X_FORWARDED_FOR': '::1/128', - 'HTTP_X_REAL_IP': '74dc::02ba', - 'REMOTE_ADDR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_bad_x_forwarded_for_fallback_on_x_real_ip(self): - request = { - 'X_FORWARDED_FOR': 'unknown ::1/128', - 'REMOTE_ADDR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "3ffe:1900:4545:3:200:f8ff:fe21:67cf") - - def test_empty_x_forwarded_for_fallback_on_x_real_ip(self): - request = { - 'X_FORWARDED_FOR': '', - 'REMOTE_ADDR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "3ffe:1900:4545:3:200:f8ff:fe21:67cf") - - def test_empty_x_forwarded_for_empty_x_real_ip_fallback_on_remote_addr(self): - request = { - 'X_FORWARDED_FOR': '', - 'REMOTE_ADDR': '74dc::02ba', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_empty_x_forwarded_for_private_x_real_ip_fallback_on_remote_addr(self): - request = { - 'X_FORWARDED_FOR': '', - 'REMOTE_ADDR': '74dc::02ba', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - def test_private_x_forward_for_ip_addr(self): - request = { - 'X_FORWARDED_FOR': '::1/128', - 'REMOTE_ADDR': '', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, None) - - def test_x_forwarded_for_singleton_hyphen_as_delimiter(self): - request = { - 'X-FORWARDED-FOR': '74dc::02ba', - 'REMOTE-ADDR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf', - } - ip = address_parser.get_real_ip(request) - self.assertEqual(ip, "74dc::02ba") - - -class IPv6TrustedProxiesTestCase(TestCase): - """Trusted Proxies - IP address Test""" - - def test_http_x_forwarded_for_no_proxy(self): - request = { - 'HTTP_X_FORWARDED_FOR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf, 74dc::02ba', - } - ip = address_parser.get_trusted_ip(request, trusted_proxies=[]) - self.assertIsNone(ip) - - def test_http_x_forwarded_for_single_proxy(self): - request = { - 'HTTP_X_FORWARDED_FOR': '3ffe:1900:4545:3:200:f8ff:fe21:67cf, 74dc::02ba', - } - ip = address_parser.get_trusted_ip(request, trusted_proxies=['74dc::02ba']) - self.assertEqual(ip, "3ffe:1900:4545:3:200:f8ff:fe21:67cf") diff --git a/libs_crutch/contrib/spyne/test/util/test_util.py b/libs_crutch/contrib/spyne/test/util/test_util.py deleted file mode 100644 index 3b8b5e7..0000000 --- a/libs_crutch/contrib/spyne/test/util/test_util.py +++ /dev/null @@ -1,601 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function - -import json -import decimal -import unittest - -import pytz -import sqlalchemy - -from pprint import pprint -from decimal import Decimal as D -from datetime import datetime - -from lxml import etree - -from spyne.const import MAX_STRING_FIELD_LENGTH - -from spyne.decorator import srpc -from spyne.application import Application - -from spyne.model.complex import XmlAttribute, TypeInfo -from spyne.model.complex import ComplexModel -from spyne.model.complex import Iterable -from spyne.model.complex import Array -from spyne.model.primitive import Decimal -from spyne.model.primitive import DateTime -from spyne.model.primitive import Integer -from spyne.model.primitive import Unicode - -from spyne.service import Service - -from spyne.util import AttrDict, AttrDictColl, get_version -from spyne.util import memoize, memoize_ignore_none, memoize_ignore, memoize_id - -from spyne.util.protocol import deserialize_request_string - -from spyne.util.dictdoc import get_dict_as_object, get_object_as_yaml, \ - get_object_as_json -from spyne.util.dictdoc import get_object_as_dict -from spyne.util.tdict import tdict -from spyne.util.tlist import tlist - -from spyne.util.xml import get_object_as_xml -from spyne.util.xml import get_xml_as_object -from spyne.util.xml import get_schema_documents -from spyne.util.xml import get_validation_schema - - -class TestUtil(unittest.TestCase): - def test_version(self): - assert get_version('sqlalchemy') == get_version(sqlalchemy) - assert '.'.join([str(i) for i in get_version('sqlalchemy')]) == \ - sqlalchemy.__version__ - - -class TestTypeInfo(unittest.TestCase): - def test_insert(self): - d = TypeInfo() - - d['a'] = 1 - assert d[0] == d['a'] == 1 - - d.insert(0, ('b', 2)) - - assert d[1] == d['a'] == 1 - assert d[0] == d['b'] == 2 - - def test_insert_existing(self): - d = TypeInfo() - - d["a"] = 1 - d["b"] = 2 - assert d[1] == d['b'] == 2 - - d.insert(0, ('b', 3)) - assert d[1] == d['a'] == 1 - assert d[0] == d['b'] == 3 - - def test_update(self): - d = TypeInfo() - d["a"] = 1 - d.update([('b', 2)]) - assert d[0] == d['a'] == 1 - assert d[1] == d['b'] == 2 - - -class TestXml(unittest.TestCase): - def test_serialize(self): - - class C(ComplexModel): - __namespace__ = "tns" - i = Integer - s = Unicode - - c = C(i=5, s="x") - - ret = get_object_as_xml(c, C) - print(etree.tostring(ret)) - assert ret.tag == "{tns}C" - - ret = get_object_as_xml(c, C, "X") - print(etree.tostring(ret)) - assert ret.tag == "{tns}X" - - ret = get_object_as_xml(c, C, "X", no_namespace=True) - print(etree.tostring(ret)) - assert ret.tag == "X" - - ret = get_object_as_xml(c, C, no_namespace=True) - print(etree.tostring(ret)) - assert ret.tag == "C" - - def test_deserialize(self): - class Punk(ComplexModel): - __namespace__ = 'some_namespace' - - a = Unicode - b = Integer - c = Decimal - d = DateTime - - class Foo(ComplexModel): - __namespace__ = 'some_other_namespace' - - a = Unicode - b = Integer - c = Decimal - d = DateTime - e = XmlAttribute(Integer) - - def __eq__(self, other): - # remember that this is a test object - assert ( - self.a == other.a and - self.b == other.b and - self.c == other.c and - self.d == other.d and - self.e == other.e - ) - - return True - - docs = get_schema_documents([Punk, Foo]) - pprint(docs) - assert docs['s0'].tag == '{http://www.w3.org/2001/XMLSchema}schema' - assert docs['tns'].tag == '{http://www.w3.org/2001/XMLSchema}schema' - print() - - print("the other namespace %r:" % docs['tns'].attrib['targetNamespace']) - assert docs['tns'].attrib['targetNamespace'] == 'some_namespace' - print(etree.tostring(docs['tns'], pretty_print=True)) - print() - - print("the other namespace %r:" % docs['s0'].attrib['targetNamespace']) - assert docs['s0'].attrib['targetNamespace'] == 'some_other_namespace' - print(etree.tostring(docs['s0'], pretty_print=True)) - print() - - foo = Foo(a=u'a', b=1, c=decimal.Decimal('3.4'), - d=datetime(2011,2,20,tzinfo=pytz.utc), e=5) - doc = get_object_as_xml(foo, Foo) - print(etree.tostring(doc, pretty_print=True)) - foo_back = get_xml_as_object(doc, Foo) - - assert foo_back == foo - - # as long as it doesn't fail, it's ok. - get_validation_schema([Punk, Foo]) - - -class TestCDict(unittest.TestCase): - def test_cdict(self): - from spyne.util.cdict import cdict - - class A(object): - pass - - class B(A): - pass - - class E(B): - pass - - class F(E): - pass - - class C(object): - pass - - d = cdict({A: "fun", F: 'zan'}) - - assert d[A] == 'fun' - assert d[B] == 'fun' - assert d[F] == 'zan' - try: - d[C] - except KeyError: - pass - else: - raise Exception("Must fail.") - - -class TestTDict(unittest.TestCase): - def test_tdict_notype(self): - d = tdict() - d[0] = 1 - assert d[0] == 1 - - d = tdict() - d.update({0:1}) - assert d[0] == 1 - - d = tdict.fromkeys([0], 1) - assert d[0] == 1 - - def test_tdict_k(self): - d = tdict(str) - try: - d[0] = 1 - except TypeError: - pass - else: - raise Exception("must fail") - - d = tdict(str) - d['s'] = 1 - assert d['s'] == 1 - - def test_tdict_v(self): - d = tdict(vt=str) - try: - d[0] = 1 - except TypeError: - pass - else: - raise Exception("must fail") - - d = tdict(vt=str) - d[0] = 's' - assert d[0] == 's' - - -class TestLogRepr(unittest.TestCase): - def test_log_repr_simple(self): - from spyne.model.complex import ComplexModel - from spyne.model.primitive import String - from spyne.util.web import log_repr - - class Z(ComplexModel): - z=String - - l = MAX_STRING_FIELD_LENGTH + 100 - print(log_repr(Z(z="a" * l))) - print("Z(z='%s'(...))" % ('a' * MAX_STRING_FIELD_LENGTH)) - - assert log_repr(Z(z="a" * l)) == "Z(z='%s'(...))" % \ - ('a' * MAX_STRING_FIELD_LENGTH) - assert log_repr(['a','b','c'], Array(String)) == "['a', 'b', (...)]" - - def test_log_repr_complex(self): - from spyne.model import ByteArray - from spyne.model import File - from spyne.model.complex import ComplexModel - from spyne.model.primitive import String - from spyne.util.web import log_repr - - class Z(ComplexModel): - _type_info = [ - ('f', File(logged=False)), - ('t', ByteArray(logged=False)), - ('z', Array(String)), - ] - l = MAX_STRING_FIELD_LENGTH + 100 - val = Z(z=["abc"] * l, t=['t'], f=File.Value(name='aaa', data=['t'])) - print(repr(val)) - - assert log_repr(val) == "Z(z=['abc', 'abc', (...)])" - - def test_log_repr_dict_vanilla(self): - from spyne.model import AnyDict - from spyne.util.web import log_repr - - t = AnyDict - - assert log_repr({1: 1}, t) == "{1: 1}" - assert log_repr({1: 1, 2: 2}, t) == "{1: 1, 2: 2}" - assert log_repr({1: 1, 2: 2, 3: 3}, t) == "{1: 1, 2: 2, (...)}" - - assert log_repr([1], t) == "[1]" - assert log_repr([1, 2], t) == "[1, 2]" - assert log_repr([1, 2, 3], t) == "[1, 2, (...)]" - - def test_log_repr_dict_keys(self): - from spyne.model import AnyDict - from spyne.util.web import log_repr - - t = AnyDict(logged='keys') - - assert log_repr({1: 1}, t) == "{1: (...)}" - - assert log_repr([1], t) == "[1]" - - def test_log_repr_dict_values(self): - from spyne.model import AnyDict - from spyne.util.web import log_repr - - t = AnyDict(logged='values') - - assert log_repr({1: 1}, t) == "{(...): 1}" - - assert log_repr([1], t) == "[1]" - - def test_log_repr_dict_full(self): - from spyne.model import AnyDict - from spyne.util.web import log_repr - - t = AnyDict(logged='full') - - assert log_repr({1: 1, 2: 2, 3: 3}, t) == "{1: 1, 2: 2, 3: 3}" - assert log_repr([1, 2, 3], t) == "[1, 2, 3]" - - def test_log_repr_dict_keys_full(self): - from spyne.model import AnyDict - from spyne.util.web import log_repr - - t = AnyDict(logged='keys-full') - - assert log_repr({1: 1, 2: 2, 3: 3}, t) == "{1: (...), 2: (...), 3: (...)}" - assert log_repr([1, 2, 3], t) == "[1, 2, 3]" - - def test_log_repr_dict_values_full(self): - from spyne.model import AnyDict - from spyne.util.web import log_repr - - t = AnyDict(logged='values-full') - - assert log_repr({1: 1, 2: 2, 3: 3}, t) == "{(...): 1, (...): 2, (...): 3}" - assert log_repr([1, 2, 3], t) == "[1, 2, 3]" - - -class TestDeserialize(unittest.TestCase): - def test_deserialize(self): - from spyne.protocol.soap import Soap11 - - class SomeService(Service): - @srpc(Integer, _returns=Iterable(Integer)) - def some_call(yo): - return range(yo) - - app = Application([SomeService], 'tns', in_protocol=Soap11(), - out_protocol=Soap11()) - - meat = 30 - - string = """ - - - - %s - - - - """ % meat - - obj = deserialize_request_string(string, app) - - assert obj.yo == meat - - -class TestEtreeDict(unittest.TestCase): - - longMessage = True - - def test_simple(self): - from lxml.etree import tostring - from spyne.util.etreeconv import root_dict_to_etree - assert tostring(root_dict_to_etree({'a':{'b':'c'}})) == b'c' - - def test_not_sized(self): - from lxml.etree import tostring - from spyne.util.etreeconv import root_dict_to_etree - - complex_value = root_dict_to_etree({'a':{'b':1}}) - self.assertEqual(tostring(complex_value), b'1', - "The integer should be properly rendered in the etree") - - complex_none = root_dict_to_etree({'a':{'b':None}}) - self.assertEqual(tostring(complex_none), b'', - "None should not be rendered in the etree") - - simple_value = root_dict_to_etree({'a': 1}) - self.assertEqual(tostring(simple_value), b'1', - "The integer should be properly rendered in the etree") - - none_value = root_dict_to_etree({'a': None}) - self.assertEqual(tostring(none_value), b'', - "None should not be rendered in the etree") - - string_value = root_dict_to_etree({'a': 'lol'}) - self.assertEqual(tostring(string_value), b'lol', - "A string should be rendered as a string") - - complex_string_value = root_dict_to_etree({'a': {'b': 'lol'}}) - self.assertEqual(tostring(complex_string_value), b'lol', - "A string should be rendered as a string") - - -class TestDictDoc(unittest.TestCase): - def test_the(self): - class C(ComplexModel): - __namespace__ = "tns" - i = Integer - s = Unicode - a = Array(DateTime) - - def __eq__(self, other): - print("Yaaay!") - return self.i == other.i and \ - self.s == other.s and \ - self.a == other.a - - c = C(i=5, s="x", a=[datetime(2011,12,22, tzinfo=pytz.utc)]) - - for iw, ca in ((False,dict), (True,dict), (False,list), (True, list)): - print() - print('complex_as:', ca) - d = get_object_as_dict(c, C, complex_as=ca) - print(d) - o = get_dict_as_object(d, C, complex_as=ca) - print(o) - print(c) - assert o == c - - -class TestAttrDict(unittest.TestCase): - def test_attr_dict(self): - assert AttrDict(a=1)['a'] == 1 - - def test_attr_dict_coll(self): - assert AttrDictColl('SomeDict').SomeDict.NAME == 'SomeDict' - assert AttrDictColl('SomeDict').SomeDict(a=1)['a'] == 1 - assert AttrDictColl('SomeDict').SomeDict(a=1).NAME == 'SomeDict' - - -class TestYaml(unittest.TestCase): - def test_deser(self): - class C(ComplexModel): - a = Unicode - b = Decimal - - ret = get_object_as_yaml(C(a='burak', b=D(30)), C) - assert ret == b"""C: - a: burak - b: '30' -""" - - -class TestJson(unittest.TestCase): - def test_deser(self): - class C(ComplexModel): - _type_info = [ - ('a', Unicode), - ('b', Decimal), - ] - - ret = get_object_as_json(C(a='burak', b=D(30)), C) - assert ret == b'["burak", "30"]' - ret = get_object_as_json(C(a='burak', b=D(30)), C, complex_as=dict) - assert json.loads(ret.decode('utf8')) == \ - json.loads(u'{"a": "burak", "b": "30"}') - - -class TestFifo(unittest.TestCase): - def test_msgpack_fifo(self): - import msgpack - - v1 = [1, 2, 3, 4] - v2 = [5, 6, 7, 8] - v3 = {b"a": 9, b"b": 10, b"c": 11} - - s1 = msgpack.packb(v1) - s2 = msgpack.packb(v2) - s3 = msgpack.packb(v3) - - unpacker = msgpack.Unpacker() - unpacker.feed(s1) - unpacker.feed(s2) - unpacker.feed(s3[:4]) - - assert next(iter(unpacker)) == v1 - assert next(iter(unpacker)) == v2 - try: - next(iter(unpacker)) - except StopIteration: - pass - else: - raise Exception("must fail") - - unpacker.feed(s3[4:]) - assert next(iter(unpacker)) == v3 - - -class TestTlist(unittest.TestCase): - def test_tlist(self): - tlist([], int) - - a = tlist([1, 2], int) - a.append(3) - a += [4] - a = [5] + [a] - a = a + [6] - a[0] = 1 - a[5:] = [5] - - try: - tlist([1, 2, 'a'], int) - a.append('a') - a += ['a'] - _ = ['a'] + a - _ = a + ['a'] - a[0] = 'a' - a[0:] = 'a' - - except TypeError: - pass - else: - raise Exception("Must fail") - - -class TestMemoization(unittest.TestCase): - def test_memoize(self): - counter = [0] - @memoize - def f(arg): - counter[0] += 1 - print(arg, counter) - - f(1) - f(1) - assert counter[0] == 1 - - f(2) - assert counter[0] == 2 - - def test_memoize_ignore_none(self): - counter = [0] - @memoize_ignore_none - def f(arg): - counter[0] += 1 - print(arg, counter) - return arg - - f(None) - f(None) - assert counter[0] == 2 - - f(1) - assert counter[0] == 3 - f(1) - assert counter[0] == 3 - - def test_memoize_ignore_values(self): - counter = [0] - @memoize_ignore((1,)) - def f(arg): - counter[0] += 1 - print(arg, counter) - return arg - - f(1) - f(1) - assert counter[0] == 2 - - f(2) - assert counter[0] == 3 - f(2) - assert counter[0] == 3 - - def test_memoize_id(self): - counter = [0] - @memoize_id - def f(arg): - counter[0] += 1 - print(arg, counter) - return arg - - d = {} - f(d) - f(d) - assert counter[0] == 1 - - f({}) - assert counter[0] == 2 - f({}) - assert counter[0] == 3 - - -if __name__ == '__main__': - unittest.main() diff --git a/libs_crutch/contrib/spyne/util/__init__.py b/libs_crutch/contrib/spyne/util/__init__.py deleted file mode 100644 index c644bb3..0000000 --- a/libs_crutch/contrib/spyne/util/__init__.py +++ /dev/null @@ -1,112 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import logging -logger = logging.getLogger(__name__) - -from spyne.util import six - -from spyne.util.coopmt import keepfirst -from spyne.util.coopmt import coroutine -from spyne.util.coopmt import Break - -from spyne.util.memo import memoize -from spyne.util.memo import memoize_first -from spyne.util.memo import memoize_ignore -from spyne.util.memo import memoize_ignore_none -from spyne.util.memo import memoize_id - -from spyne.util.attrdict import AttrDict -from spyne.util.attrdict import AttrDictColl -from spyne.util.attrdict import DefaultAttrDict - -from spyne.util._base import utctime -from spyne.util._base import get_version - - -try: - import thread - - from urllib import splittype, splithost, quote, urlencode - from urllib2 import urlopen, Request, HTTPError - -except ImportError: # Python 3 - import _thread as thread - - from urllib.parse import splittype, splithost, quote, urlencode - from urllib.request import urlopen, Request - from urllib.error import HTTPError - - -def split_url(url): - """Splits a url into (uri_scheme, host[:port], path)""" - scheme, remainder = splittype(url) - host, path = splithost(remainder) - return scheme.lower(), host, path - - -def sanitize_args(a): - try: - args, kwargs = a - if isinstance(args, tuple) and isinstance(kwargs, dict): - return args, dict(kwargs) - - except (TypeError, ValueError): - args, kwargs = (), {} - - if a is not None: - if isinstance(a, dict): - args = tuple() - kwargs = a - - elif isinstance(a, tuple): - if isinstance(a[-1], dict): - args, kwargs = a[0:-1], a[-1] - else: - args = a - kwargs = {} - - return args, kwargs - - -if six.PY2: - def _bytes_join(val, joiner=''): - return joiner.join(val) -else: - def _bytes_join(val, joiner=b''): - return joiner.join(val) - - -def utf8(s): - if isinstance(s, bytes): - return s.decode('utf8') - - if isinstance(s, list): - return [utf8(ss) for ss in s] - - if isinstance(s, tuple): - return tuple([utf8(ss) for ss in s]) - - if isinstance(s, set): - return {utf8(ss) for ss in s} - - if isinstance(s, frozenset): - return frozenset([utf8(ss) for ss in s]) - - return s diff --git a/libs_crutch/contrib/spyne/util/_base.py b/libs_crutch/contrib/spyne/util/_base.py deleted file mode 100644 index d23dea5..0000000 --- a/libs_crutch/contrib/spyne/util/_base.py +++ /dev/null @@ -1,44 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from time import mktime -from datetime import datetime - -from spyne.util import memoize, six - - -def utctime(): - return mktime(datetime.utcnow().timetuple()) - - -@memoize -def get_version(package): - if isinstance(package, (six.text_type, six.binary_type)): - package = __import__(package) - - verstr = getattr(package, '__version__') - retval = [] - - for f in verstr.split("."): - try: - retval.append(int(f)) - except ValueError: - retval.append(f) - - return tuple(retval) diff --git a/libs_crutch/contrib/spyne/util/_twisted_ws.py b/libs_crutch/contrib/spyne/util/_twisted_ws.py deleted file mode 100644 index df7468c..0000000 --- a/libs_crutch/contrib/spyne/util/_twisted_ws.py +++ /dev/null @@ -1,610 +0,0 @@ -# -*- test-case-name: twisted.web.test.test_websockets -*- -# Copyright (c) Twisted Matrix Laboratories. -# 2011-2012 Oregon State University Open Source Lab -# 2011-2012 Corbin Simpson -# -# See LICENSE for details. - -""" -The WebSockets protocol (RFC 6455), provided as a resource which wraps a -factory. -""" - -__all__ = ["WebSocketsResource", "IWebSocketsProtocol", "IWebSocketsResource", - "WebSocketsProtocol", "WebSocketsProtocolWrapper"] - - -from hashlib import sha1 -from struct import pack, unpack - -from zope.interface import implementer, Interface, providedBy, directlyProvides - -from twisted.python import log -from twisted.python.constants import Flags, FlagConstant -from twisted.internet.protocol import Protocol -from twisted.internet.interfaces import IProtocol -from twisted.web.resource import IResource -from twisted.web.server import NOT_DONE_YET - - - -class _WSException(Exception): - """ - Internal exception for control flow inside the WebSockets frame parser. - """ - - - -class CONTROLS(Flags): - """ - Control frame specifiers. - """ - - CONTINUE = FlagConstant(0) - TEXT = FlagConstant(1) - BINARY = FlagConstant(2) - CLOSE = FlagConstant(8) - PING = FlagConstant(9) - PONG = FlagConstant(10) - - -# The GUID for WebSockets, from RFC 6455. -_WS_GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" - - - -def _makeAccept(key): - """ - Create an B{accept} response for a given key. - - @type key: C{str} - @param key: The key to respond to. - - @rtype: C{str} - @return: An encoded response. - """ - return sha1("%s%s" % (key, _WS_GUID)).digest().encode("base64").strip() - - - -def _mask(buf, key): - """ - Mask or unmask a buffer of bytes with a masking key. - - @type buf: C{str} - @param buf: A buffer of bytes. - - @type key: C{str} - @param key: The masking key. Must be exactly four bytes. - - @rtype: C{str} - @return: A masked buffer of bytes. - """ - key = [ord(i) for i in key] - buf = list(buf) - for i, char in enumerate(buf): - buf[i] = chr(ord(char) ^ key[i % 4]) - return "".join(buf) - - - -def _makeFrame(buf, opcode, fin, mask=None): - """ - Make a frame. - - This function always creates unmasked frames, and attempts to use the - smallest possible lengths. - - @type buf: C{str} - @param buf: A buffer of bytes. - - @type opcode: C{CONTROLS} - @param opcode: Which type of frame to create. - - @rtype: C{str} - @return: A packed frame. - """ - bufferLength = len(buf) - if mask is not None: - lengthMask = 0x80 - else: - lengthMask = 0 - - if bufferLength > 0xffff: - length = "%s%s" % (chr(lengthMask | 0x7f), pack(">Q", bufferLength)) - elif bufferLength > 0x7d: - length = "%s%s" % (chr(lengthMask | 0x7e), pack(">H", bufferLength)) - else: - length = chr(lengthMask | bufferLength) - - if fin: - header = 0x80 - else: - header = 0x01 - - header = chr(header | opcode.value) - if mask is not None: - buf = "%s%s" % (mask, _mask(buf, mask)) - frame = "%s%s%s" % (header, length, buf) - return frame - - - -def _parseFrames(frameBuffer, needMask=True): - """ - Parse frames in a highly compliant manner. - - @param frameBuffer: A buffer of bytes. - @type frameBuffer: C{list} - - @param needMask: If C{True}, refuse any frame which is not masked. - @type needMask: C{bool} - """ - start = 0 - payload = "".join(frameBuffer) - - while True: - # If there's not at least two bytes in the buffer, bail. - if len(payload) - start < 2: - break - - # Grab the header. This single byte holds some flags and an opcode - header = ord(payload[start]) - if header & 0x70: - # At least one of the reserved flags is set. Pork chop sandwiches! - raise _WSException("Reserved flag in frame (%d)" % header) - - fin = header & 0x80 - - # Get the opcode, and translate it to a local enum which we actually - # care about. - opcode = header & 0xf - try: - opcode = CONTROLS.lookupByValue(opcode) - except ValueError: - raise _WSException("Unknown opcode %d in frame" % opcode) - - # Get the payload length and determine whether we need to look for an - # extra length. - length = ord(payload[start + 1]) - masked = length & 0x80 - - if not masked and needMask: - # The client must mask the data sent - raise _WSException("Received data not masked") - - length &= 0x7f - - # The offset we'll be using to walk through the frame. We use this - # because the offset is variable depending on the length and mask. - offset = 2 - - # Extra length fields. - if length == 0x7e: - if len(payload) - start < 4: - break - - length = payload[start + 2:start + 4] - length = unpack(">H", length)[0] - offset += 2 - elif length == 0x7f: - if len(payload) - start < 10: - break - - # Protocol bug: The top bit of this long long *must* be cleared; - # that is, it is expected to be interpreted as signed. - length = payload[start + 2:start + 10] - length = unpack(">Q", length)[0] - offset += 8 - - if masked: - if len(payload) - (start + offset) < 4: - # This is not strictly necessary, but it's more explicit so - # that we don't create an invalid key. - break - - key = payload[start + offset:start + offset + 4] - offset += 4 - - if len(payload) - (start + offset) < length: - break - - data = payload[start + offset:start + offset + length] - - if masked: - data = _mask(data, key) - - if opcode == CONTROLS.CLOSE: - if len(data) >= 2: - # Gotta unpack the opcode and return usable data here. - data = unpack(">H", data[:2])[0], data[2:] - else: - # No reason given; use generic data. - data = 1000, "No reason given" - - yield opcode, data, bool(fin) - start += offset + length - - if len(payload) > start: - frameBuffer[:] = [payload[start:]] - else: - frameBuffer[:] = [] - - - - -class IWebSocketsProtocol(IProtocol): - """ - A protocol which understands the WebSockets interface. - - @since: 13.1 - """ - - def sendFrame(opcode, data, fin): - """ - Send a frame. - """ - - - def frameReceived(opcode, data, fin): - """ - Callback when a frame is received. - """ - - - def loseConnection(): - """ - Close the connection sending a close frame first. - """ - - - -@implementer(IWebSocketsProtocol) -class WebSocketsProtocol(Protocol): - """ - @since: 13.1 - """ - _disconnecting = False - _buffer = None - - - def connectionMade(self): - """ - Log the new connection and initialize the buffer list. - """ - log.msg("Opening connection with %s" % self.transport.getPeer()) - self._buffer = [] - - - def _parseFrames(self): - """ - Find frames in incoming data and pass them to the underlying protocol. - """ - for frame in _parseFrames(self._buffer): - opcode, data, fin = frame - if opcode in (CONTROLS.CONTINUE, CONTROLS.TEXT, CONTROLS.BINARY): - # Business as usual. Decode the frame, if we have a decoder. - # Pass the frame to the underlying protocol. - self.frameReceived(opcode, data, fin) - elif opcode == CONTROLS.CLOSE: - # The other side wants us to close. - reason, text = data - log.msg("Closing connection: %r (%d)" % (text, reason)) - - # Close the connection. - self.transport.loseConnection() - return - elif opcode == CONTROLS.PING: - # 5.5.2 PINGs must be responded to with PONGs. - # 5.5.3 PONGs must contain the data that was sent with the - # provoking PING. - self.transport.write(_makeFrame(data, CONTROLS.PONG, True)) - - - def frameReceived(self, opcode, data, fin): - """ - Callback to implement. - """ - raise NotImplementedError() - - - def sendFrame(self, opcode, data, fin): - """ - Build a frame packet and send it over the wire. - """ - packet = _makeFrame(data, opcode, fin) - self.transport.write(packet) - - - def dataReceived(self, data): - """ - Append the data to the buffer list and parse the whole. - """ - self._buffer.append(data) - try: - self._parseFrames() - except _WSException: - # Couldn't parse all the frames, something went wrong, let's bail. - log.err() - self.transport.loseConnection() - - - def loseConnection(self): - """ - Close the connection. - - This includes telling the other side we're closing the connection. - - If the other side didn't signal that the connection is being closed, - then we might not see their last message, but since their last message - should, according to the spec, be a simple acknowledgement, it - shouldn't be a problem. - """ - # Send a closing frame. It's only polite. (And might keep the browser - # from hanging.) - if not self._disconnecting: - frame = _makeFrame("", CONTROLS.CLOSE, True) - self.transport.write(frame) - self._disconnecting = True - self.transport.loseConnection() - - - -class WebSocketsProtocolWrapper(WebSocketsProtocol): - """ - A protocol wrapper which provides L{IWebSocketsProtocol} by making messages - as data frames. - - @since: 13.1 - """ - - def __init__(self, wrappedProtocol, defaultOpcode=CONTROLS.TEXT): - self.wrappedProtocol = wrappedProtocol - self.defaultOpcode = defaultOpcode - - - def makeConnection(self, transport): - """ - Upon connection, provides the transport interface, and forwards ourself - as the transport to C{self.wrappedProtocol}. - """ - directlyProvides(self, providedBy(transport)) - WebSocketsProtocol.makeConnection(self, transport) - self.wrappedProtocol.makeConnection(self) - - - def connectionMade(self): - """ - Initialize the list of messages. - """ - WebSocketsProtocol.connectionMade(self) - self._messages = [] - - - def write(self, data): - """ - Write to the websocket protocol, transforming C{data} in a frame. - """ - self.sendFrame(self.defaultOpcode, data, True) - - - def writeSequence(self, data): - """ - Send all chunks from C{data} using C{write}. - """ - for chunk in data: - self.write(chunk) - - - def __getattr__(self, name): - """ - Forward all non-local attributes and methods to C{self.transport}. - """ - return getattr(self.transport, name) - - - def frameReceived(self, opcode, data, fin): - """ - FOr each frame received, accumulate the data (ignoring the opcode), and - forwarding the messages if C{fin} is set. - """ - self._messages.append(data) - if fin: - content = "".join(self._messages) - self._messages[:] = [] - self.wrappedProtocol.dataReceived(content) - - - def connectionLost(self, reason): - """ - Forward C{connectionLost} to C{self.wrappedProtocol}. - """ - self.wrappedProtocol.connectionLost(reason) - - - -class IWebSocketsResource(Interface): - """ - A WebSockets resource. - - @since: 13.1 - """ - - def lookupProtocol(protocolNames, request): - """ - Build a protocol instance for the given protocol options and request. - The returned protocol is plugged to the HTTP transport, and the - returned protocol name, if specified, is used as - I{Sec-WebSocket-Protocol} value. If the protocol provides - L{IWebSocketsProtocol}, it will be connected directly, otherwise it - will be wrapped by L{WebSocketsProtocolWrapper}. - - @param protocolNames: The asked protocols from the client. - @type protocolNames: C{list} of C{str} - - @param request: The connecting client request. - @type request: L{IRequest} - - @return: A tuple of (protocol, matched protocol name or C{None}). - @rtype: C{tuple} - """ - - - -@implementer(IResource, IWebSocketsResource) -class WebSocketsResource(object): - """ - A resource for serving a protocol through WebSockets. - - This class wraps a factory and connects it to WebSockets clients. Each - connecting client will be connected to a new protocol of the factory. - - Due to unresolved questions of logistics, this resource cannot have - children. - - @param factory: The factory producing either L{IWebSocketsProtocol} or - L{IProtocol} providers, which will be used by the default - C{lookupProtocol} implementation. - @type factory: L{twisted.internet.protocol.Factory} - - @since: 13.1 - """ - isLeaf = True - - def __init__(self, factory): - self._factory = factory - - - def getChildWithDefault(self, name, request): - """ - Reject attempts to retrieve a child resource. All path segments beyond - the one which refers to this resource are handled by the WebSocket - connection. - """ - raise RuntimeError( - "Cannot get IResource children from WebSocketsResource") - - - def putChild(self, path, child): - """ - Reject attempts to add a child resource to this resource. The - WebSocket connection handles all path segments beneath this resource, - so L{IResource} children can never be found. - """ - raise RuntimeError( - "Cannot put IResource children under WebSocketsResource") - - - def lookupProtocol(self, protocolNames, request): - """ - Build a protocol instance for the given protocol names and request. - This default implementation ignores the protocol names and just return - a protocol instance built by C{self._factory}. - - @param protocolNames: The asked protocols from the client. - @type protocolNames: C{list} of C{str} - - @param request: The connecting client request. - @type request: L{Request} - - @return: A tuple of (protocol, C{None}). - @rtype: C{tuple} - """ - protocol = self._factory.buildProtocol(request.transport.getPeer()) - return protocol, None - - - def render(self, request): - """ - Render a request. - - We're not actually rendering a request. We are secretly going to handle - a WebSockets connection instead. - - @param request: The connecting client request. - @type request: L{Request} - - @return: a string if the request fails, otherwise C{NOT_DONE_YET}. - """ - request.defaultContentType = None - # If we fail at all, we'll fail with 400 and no response. - failed = False - - if request.method != "GET": - # 4.2.1.1 GET is required. - failed = True - print('request.method', request.method) - - upgrade = request.getHeader("Upgrade") - if upgrade is None or "websocket" not in upgrade.lower(): - # 4.2.1.3 Upgrade: WebSocket is required. - failed = True - print('request.getHeader("Upgrade")', request.getHeader("Upgrade")) - - connection = request.getHeader("Connection") - if connection is None or "upgrade" not in connection.lower(): - # 4.2.1.4 Connection: Upgrade is required. - failed = True - print('request.getHeader("Connection")', request.getHeader("Connection")) - - key = request.getHeader("Sec-WebSocket-Key") - if key is None: - # 4.2.1.5 The challenge key is required. - failed = True - print('request.getHeader("Sec-WebSocket-Key")', request.getHeader("Sec-WebSocket-Key")) - - version = request.getHeader("Sec-WebSocket-Version") - if version != "13": - # 4.2.1.6 Only version 13 works. - failed = True - # 4.4 Forward-compatible version checking. - request.setHeader("Sec-WebSocket-Version", "13") - print('request.getHeader("Sec-WebSocket-Version")', request.getHeader("Sec-WebSocket-Version")) - - if failed: - request.setResponseCode(400) - return "" - - askedProtocols = request.requestHeaders.getRawHeaders( - "Sec-WebSocket-Protocol") - protocol, protocolName = self.lookupProtocol(askedProtocols, request) - - # If a protocol is not created, we deliver an error status. - if not protocol: - request.setResponseCode(502) - return "" - - # We are going to finish this handshake. We will return a valid status - # code. - # 4.2.2.5.1 101 Switching Protocols - request.setResponseCode(101) - # 4.2.2.5.2 Upgrade: websocket - request.setHeader("Upgrade", "WebSocket") - # 4.2.2.5.3 Connection: Upgrade - request.setHeader("Connection", "Upgrade") - # 4.2.2.5.4 Response to the key challenge - request.setHeader("Sec-WebSocket-Accept", _makeAccept(key)) - # 4.2.2.5.5 Optional codec declaration - if protocolName: - request.setHeader("Sec-WebSocket-Protocol", protocolName) - - # Provoke request into flushing headers and finishing the handshake. - request.write("") - - # And now take matters into our own hands. We shall manage the - # transport's lifecycle. - transport, request.transport = request.transport, None - - if not IWebSocketsProtocol.providedBy(protocol): - protocol = WebSocketsProtocolWrapper(protocol) - - # Connect the transport to our factory, and make things go. We need to - # do some stupid stuff here; see #3204, which could fix it. - if request.isSecure(): - # Secure connections wrap in TLSMemoryBIOProtocol too. - transport.protocol.wrappedProtocol = protocol - else: - transport.protocol = protocol - protocol.makeConnection(transport) - - return NOT_DONE_YET diff --git a/libs_crutch/contrib/spyne/util/address.py b/libs_crutch/contrib/spyne/util/address.py deleted file mode 100644 index 55a0da0..0000000 --- a/libs_crutch/contrib/spyne/util/address.py +++ /dev/null @@ -1,276 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -# The MIT License -# -# Copyright (c) Val Neekman @ Neekware Inc. http://neekware.com -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -# - -from __future__ import print_function - -# Direct plagiarization of https://github.com/un33k/django-ipware/ -# at 57897c03026913892e61a164bc8b022778802ab9 - -import socket - -# List of known proxy server(s) -TRUSTED_PROXIES = [] - -# Search for the real IP address in the following order -# Configurable via settings.py -PRECEDENCE = ( - 'HTTP_X_FORWARDED_FOR', 'X_FORWARDED_FOR', - # (client, proxy1, proxy2) OR (proxy2, proxy1, client) - 'HTTP_CLIENT_IP', - 'HTTP_X_REAL_IP', - 'HTTP_X_FORWARDED', - 'HTTP_X_CLUSTER_CLIENT_IP', - 'HTTP_FORWARDED_FOR', - 'HTTP_FORWARDED', - 'HTTP_VIA', - 'REMOTE_ADDR', -) - -# Private IP addresses -# http://en.wikipedia.org/wiki/List_of_assigned_/8_IPv4_address_blocks -# http://www.ietf.org/rfc/rfc3330.txt (IPv4) -# http://www.ietf.org/rfc/rfc5156.txt (IPv6) -# Regex would be ideal here, but this is keeping it simple -# as fields are configurable via settings.py -PRIVATE_IP_PREFIXES = ( - '0.', # externally non-routable - '10.', # class A private block - '169.254.', # link-local block - '172.16.', '172.17.', '172.18.', '172.19.', - '172.20.', '172.21.', '172.22.', '172.23.', - '172.24.', '172.25.', '172.26.', '172.27.', - '172.28.', '172.29.', '172.30.', '172.31.', - # class B private blocks - '192.0.2.', - # reserved for documentation and example code - '192.168.', # class C private block - '255.255.255.', # IPv4 broadcast address -) + ( - '2001:db8:', - # reserved for documentation and example code - 'fc00:', # IPv6 private block - 'fe80:', # link-local unicast - 'ff00:', # IPv6 multicast -) - -LOOPBACK_PREFIX = ( - '127.', # IPv4 loopback device - '::1', # IPv6 loopback device -) - -NON_PUBLIC_IP_PREFIXES = PRIVATE_IP_PREFIXES + LOOPBACK_PREFIX - - -def set_address_parser_settings(trusted_proxies, field_precedence=PRECEDENCE, - private_ip_prefixes=NON_PUBLIC_IP_PREFIXES): - """Changes global parameters for Spyne's residend ip address parser. - - :param trusted_proxies: Tuple of reverse proxies that are under YOUR control. - :param field_precedence: A tuple of field names that may contain address - information, in decreasing level of preference. - :param private_ip_prefixes: You might want to add your list of - public-but-otherwise-private ip prefixes or addresses here. - """ - - global address_parser - - address_parser = AddressParser(trusted_proxies=trusted_proxies, - field_precedence=field_precedence, - private_ip_prefixes=private_ip_prefixes) - - -class AddressParser(object): - def __init__(self, private_ip_prefixes=None, trusted_proxies=(), - field_precedence=PRECEDENCE): - if private_ip_prefixes is not None: - self.private_ip_prefixes = private_ip_prefixes - else: - self.private_ip_prefixes = \ - tuple([ip.lower() for ip in NON_PUBLIC_IP_PREFIXES]) - - if len(trusted_proxies) > 0: - self.trusted_proxies = trusted_proxies - - else: - self.trusted_proxies = \ - tuple([ip.lower() for ip in TRUSTED_PROXIES]) - - self.field_precedence = field_precedence - - - def get_port(self, wsgi_env): - return wsgi_env.get("REMOTE_PORT", 0) - - def get_ip(self, wsgi_env, real_ip_only=False, right_most_proxy=False): - """ - Returns client's best-matched ip-address, or None - """ - best_matched_ip = None - - for key in self.field_precedence: - value = wsgi_env.get(key, None) - if value is None: - value = wsgi_env.get(key.replace('_', '-'), None) - - if value is None or value == '': - continue - - ips = [ip.strip().lower() for ip in value.split(',')] - - if right_most_proxy and len(ips) > 1: - ips = reversed(ips) - - for ip_str in ips: - if ip_str is None or ip_str == '' or not \ - AddressParser.is_valid_ip(ip_str): - continue - - if not ip_str.startswith(self.private_ip_prefixes): - return ip_str - - if not real_ip_only: - loopback = LOOPBACK_PREFIX - - if best_matched_ip is None: - best_matched_ip = ip_str - - elif best_matched_ip.startswith(loopback) \ - and not ip_str.startswith(loopback): - best_matched_ip = ip_str - - return best_matched_ip - - def get_real_ip(self, wsgi_env, right_most_proxy=False): - """ - Returns client's best-matched `real` `externally-routable` ip-address, - or None - """ - return self.get_ip(wsgi_env, real_ip_only=True, - right_most_proxy=right_most_proxy) - - def get_trusted_ip(self, wsgi_env, right_most_proxy=False, - trusted_proxies=None): - """ - Returns client's ip-address from `trusted` proxy server(s) or None - """ - - if trusted_proxies is None: - trusted_proxies = self.trusted_proxies - - if trusted_proxies is None or len(trusted_proxies) == 0: - trusted_proxies = TRUSTED_PROXIES - - if trusted_proxies is None or len(trusted_proxies) == 0: - return - - meta_keys = ['HTTP_X_FORWARDED_FOR', 'X_FORWARDED_FOR'] - - for key in meta_keys: - value = wsgi_env.get(key, None) - if value is None: - value = wsgi_env.get(key.replace('_', '-'), None) - - if value is None or value == '': - continue - - ips = [ip.strip().lower() for ip in value.split(',')] - - if len(ips) > 1: - if right_most_proxy: - ips.reverse() - - for proxy in trusted_proxies: - if proxy in ips[-1]: - return ips[0] - - @staticmethod - def is_valid_ipv4(ip_str): - """ - Check the validity of an IPv4 address - """ - - if ip_str is None: - return False - - try: - socket.inet_pton(socket.AF_INET, ip_str) - - except AttributeError: # pragma: no cover - try: # Fall-back on legacy API or False - socket.inet_aton(ip_str) - except (AttributeError, socket.error): - return False - return ip_str.count('.') == 3 - - except socket.error: - return False - - return True - - @staticmethod - def is_valid_ipv6(ip_str): - """ - Check the validity of an IPv6 address - """ - - if ip_str is None: - return False - - try: - socket.inet_pton(socket.AF_INET6, ip_str) - - except socket.error: - return False - - return True - - @staticmethod - def is_valid_ip(ip_str): - """ - Check the validity of an IP address - """ - - return AddressParser.is_valid_ipv4(ip_str) or \ - AddressParser.is_valid_ipv6(ip_str) - - -address_parser = AddressParser() diff --git a/libs_crutch/contrib/spyne/util/appreg.py b/libs_crutch/contrib/spyne/util/appreg.py deleted file mode 100644 index 31c8637..0000000 --- a/libs_crutch/contrib/spyne/util/appreg.py +++ /dev/null @@ -1,88 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -""" -Module that contains the Spyne Application Registry. -""" - -import logging -logger = logging.getLogger(__name__) - -applications = {} - -try: - from collections import namedtuple - - _ApplicationMetaData = namedtuple("_ApplicationMetaData", - ['app', 'inst_stack', 'null', 'ostr']) - -except ImportError: # python 2.5 - class _ApplicationMetaData: - def __init__(self, app, inst_stack, null, ostr): - self.app = app - self.inst_stack = inst_stack - self.null = null - self.ostr = ostr - - -def unregister_application(app): - key = (app.tns, app.name) - del applications[key] - - -def register_application(app): - key = (app.tns, app.name) - - from spyne.server.null import NullServer - - try: - import traceback - stack = traceback.format_stack() - except ImportError: - stack = None - - prev = applications.get(key, None) - - if prev is not None: - if hash(prev.app) == hash(app): - logger.debug("Application %r previously registered as %r is the same" - " as %r. Skipping." % (prev.app, key, app)) - prev.inst_stack.append(stack) - - else: - logger.warning("Overwriting application %r(%r)." % (key, app)) - - if prev.inst_stack is not None: - stack_traces = [] - for s in prev.inst_stack: - if s is not None: - stack_traces.append(''.join(s)) - logger.debug("Stack trace of the instantiation:\n%s" % - '====================\n'.join(stack_traces)) - - applications[key] = _ApplicationMetaData(app=app, inst_stack=[stack], - null=NullServer(app, appinit=False), - ostr=NullServer(app, appinit=False, ostr=True) - ) - - logger.debug("Registering %r as %r" % (app, key)) - - -def get_application(tns, name='Application'): - return applications.get((tns, name), None) diff --git a/libs_crutch/contrib/spyne/util/attrdict.py b/libs_crutch/contrib/spyne/util/attrdict.py deleted file mode 100644 index de996aa..0000000 --- a/libs_crutch/contrib/spyne/util/attrdict.py +++ /dev/null @@ -1,87 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -def TAttrDict(default=None): - class AttrDict(object): - def __init__(self, *args, **kwargs): - self.__data = dict(*args, **kwargs) - - def __call__(self, **kwargs): - retval = AttrDict(self.__data.items()) - for k,v in kwargs.items(): - setattr(retval, k, v) - return retval - - def __setattr__(self, key, value): - if key == "_AttrDict__data": - return object.__setattr__(self, key, value) - if key == 'items': - raise ValueError("'items' is part of dict interface") - self.__data[key] = value - - def __setitem__(self, key, value): - self.__data[key] = value - - def __iter__(self): - return iter(self.__data) - - def items(self): - return self.__data.items() - - def get(self, key, *args): - return self.__data.get(key, *args) - - def update(self, d): - return self.__data.update(d) - - def __repr__(self): - return "AttrDict(%s)" % ', '.join(['%s=%r' % (k, v) - for k,v in sorted(self.__data.items(), key=lambda x:x[0])]) - - if default is None: - def __getattr__(self, key): - return self.__data[key] - def __getitem__(self, key): - return self.__data[key] - else: - def __getitem__(self, key): - if key in self.__data: - return self.__data[key] - else: - return default() - def __getattr__(self, key): - if key in ("_AttrDict__data", 'items', 'get', 'update'): - return object.__getattribute__(self, '__data') - if key in self.__data: - return self.__data[key] - else: - return default() - - return AttrDict - -AttrDict = TAttrDict() -DefaultAttrDict = TAttrDict(lambda: None) - - -class AttrDictColl(object): - AttrDictImpl = DefaultAttrDict - def __init__(self, *args): - for a in args: - setattr(self, a, AttrDictColl.AttrDictImpl(NAME=a)) diff --git a/libs_crutch/contrib/spyne/util/autorel.py b/libs_crutch/contrib/spyne/util/autorel.py deleted file mode 100644 index 2fc685a..0000000 --- a/libs_crutch/contrib/spyne/util/autorel.py +++ /dev/null @@ -1,258 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -# -# Copyright (c) 2004-2016, CherryPy Team (team@cherrypy.org) -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of the CherryPy Team nor the names of its contributors -# may be used to endorse or promote products derived from this software -# without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# - -import logging -logger = logging.getLogger(__name__) - -import os, re, sys - -from spyne.util.color import YEL - -# _module__file__base is used by Autoreload to make -# absolute any filenames retrieved from sys.modules which are not -# already absolute paths. This is to work around Python's quirk -# of importing the startup script and using a relative filename -# for it in sys.modules. -# -# Autoreload examines sys.modules afresh every time it runs. If an application -# changes the current directory by executing os.chdir(), then the next time -# Autoreload runs, it will not be able to find any filenames which are -# not absolute paths, because the current directory is not the same as when the -# module was first imported. Autoreload will then wrongly conclude the file -# has "changed", and initiate the shutdown/re-exec sequence. -# See cherrypy ticket #917. -# For this workaround to have a decent probability of success, this module -# needs to be imported as early as possible, before the app has much chance -# to change the working directory. -_module__file__base = os.getcwd() - -try: - import fcntl -except ImportError: - MAX_FILES = 0 -else: - try: - MAX_FILES = os.sysconf('SC_OPEN_MAX') - except AttributeError: - MAX_FILES = 1024 - - - -class AutoReloader(object): - """Monitor which re-executes the process when files change. - - This :ref:`plugin` restarts the process (via :func:`os.execv`) - if any of the files it monitors change (or is deleted). By default, the - autoreloader monitors all imported modules; you can add to the - set by adding to ``autoreload.files``:: - - spyne.util.autorel.AutoReloader.FILES.add(myFile) - - - spyne.util.autorel.AutoReloader.match = r'^(?!cherrypy).+' - - The autoreload plugin takes a ``frequency`` argument. The default is - 1 second; that is, the autoreloader will examine files once each second. - """ - - FILES = set() - """The set of files to poll for modifications.""" - - def __init__(self, frequency=1, match='.*'): - self.max_cloexec_files = MAX_FILES - - self.mtimes = {} - self.files = set(AutoReloader.FILES) - - self.match = match - """A regular expression by which to match filenames. - - If there are imported files you do *not* wish to monitor, you can - adjust the ``match`` attribute, a regular expression. For example, - to stop monitoring cherrypy itself, try ``match=r'^(?!cherrypy).+'``\\. - """ - - self.frequency = frequency - """The interval in seconds at which to poll for modified files.""" - - def start(self): - from twisted.internet.task import LoopingCall - - retval = LoopingCall(self.run) - retval.start(self.frequency) - return retval # oh no - - def sysfiles(self): - """Return a Set of sys.modules filenames to monitor.""" - files = set() - for k, m in list(sys.modules.items()): - if re.match(self.match, k): - if ( - hasattr(m, '__loader__') and - hasattr(m.__loader__, 'archive') - ): - f = m.__loader__.archive - else: - try: - f = getattr(m, '__file__', None) - except ImportError: - f = None - - if f is not None and not os.path.isabs(f): - # ensure absolute paths so a os.chdir() in the app - # doesn't break me - f = os.path.normpath( - os.path.join(_module__file__base, f)) - files.add(f) - return files - - def run(self): - """Reload the process if registered files have been modified.""" - for filename in self.sysfiles() | self.files: - if filename: - if filename.endswith('.pyc'): - filename = filename[:-1] - - oldtime = self.mtimes.get(filename, 0) - if oldtime is None: - # Module with no .py file. Skip it. - continue - - try: - mtime = os.stat(filename).st_mtime - except OSError: - # Either a module with no .py file, or it's been deleted. - mtime = None - - if filename not in self.mtimes: - # If a module has no .py file, this will be None. - self.mtimes[filename] = mtime - else: - if mtime is None or mtime > oldtime: - # The file has been deleted or modified. - logger.info("Restarting because '%s' has changed." % - filename) - - from twisted.internet import reactor - reactor.stop() - self._do_execv() - return - - @staticmethod - def _extend_pythonpath(env): - """ - If sys.path[0] is an empty string, the interpreter was likely - invoked with -m and the effective path is about to change on - re-exec. Add the current directory to $PYTHONPATH to ensure - that the new process sees the same path. - - This issue cannot be addressed in the general case because - Python cannot reliably reconstruct the - original command line (http://bugs.python.org/issue14208). - - (This idea filched from tornado.autoreload) - """ - - path_prefix = '.' + os.pathsep - existing_path = env.get('PYTHONPATH', '') - needs_patch = ( - sys.path[0] == '' and - not existing_path.startswith(path_prefix) - ) - - if needs_patch: - env["PYTHONPATH"] = path_prefix + existing_path - - def _set_cloexec(self): - """Set the CLOEXEC flag on all open files (except stdin/out/err). - - If self.max_cloexec_files is an integer (the default), then on - platforms which support it, it represents the max open files setting - for the operating system. This function will be called just before - the process is restarted via os.execv() to prevent open files - from persisting into the new process. - - Set self.max_cloexec_files to 0 to disable this behavior. - """ - for fd in range(3, self.max_cloexec_files): # skip stdin/out/err - try: - flags = fcntl.fcntl(fd, fcntl.F_GETFD) - except IOError: - continue - fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC) - - def _do_execv(self): - """Re-execute the current process. - - This must be called from the main thread, because certain platforms - (OS X) don't allow execv to be called in a child thread very well. - """ - args = sys.argv[:] - - self._extend_pythonpath(os.environ) - - logger.info('Re-spawning %s' % ' '.join(args)) - logger.info("") - logger.info("%s Bye! %s", YEL("-" * 35), YEL("-" * 35)) - logger.info("") - - if sys.platform[:4] == 'java': - from _systemrestart import SystemRestart - raise SystemRestart - - args.insert(0, sys.executable) - if sys.platform == 'win32': - args = ['"%s"' % arg for arg in args] - - os.chdir(_module__file__base) - logger.debug("Change working directory to: %s", _module__file__base) - - if self.max_cloexec_files: - self._set_cloexec() - - os.execv(sys.executable, args) diff --git a/libs_crutch/contrib/spyne/util/cdict.py b/libs_crutch/contrib/spyne/util/cdict.py deleted file mode 100644 index 8e4d170..0000000 --- a/libs_crutch/contrib/spyne/util/cdict.py +++ /dev/null @@ -1,85 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""cdict (ClassDict) is a funny kind of dict that tries to return the values for -the base classes of a key when the entry for the key is not found. It is not a -generalized dictionary that can handle any type of key -- it relies on -spyne.model api to look for classes. It also assumes cdict never changes after -the first lookup. - ->>> from spyne.util.cdict import cdict ->>> class A(object): -... pass -... ->>> class B(A): -... pass -... ->>> class C(object): -... pass -... ->>> class D: -... pass -... ->>> d=cdict({A: "fun", object: "base"}) ->>> print d[A] -fun ->>> print d -{: 'fun', : 'base'} ->>> print d[B] -fun ->>> print d -{: 'fun', : 'fun', : 'base'} ->>> print d[C] -base ->>> print d -{: 'fun', : 'fun', : 'base', : 'base'} ->>> print d[D] -*** KeyError: ->>> -""" - -import logging -logger = logging.getLogger(__name__) - -class cdict(dict): - def __getitem__(self, cls): - try: - return dict.__getitem__(self, cls) - - except KeyError as e: - if not hasattr(cls, '__bases__'): - cls = cls.__class__ - - for b in reversed(cls.__bases__): - try: - retval = self[b] - # this is why a cdict instance must never be modified after - # the first lookup - self[cls] = retval - return retval - except KeyError: - pass - raise e - - def get(self, k, d=None): - try: - return self[k] - - except KeyError: - return d diff --git a/libs_crutch/contrib/spyne/util/cherry.py b/libs_crutch/contrib/spyne/util/cherry.py deleted file mode 100644 index b604fc7..0000000 --- a/libs_crutch/contrib/spyne/util/cherry.py +++ /dev/null @@ -1,41 +0,0 @@ -# Use Cherrypy as wsgi server. -# Source: https://www.digitalocean.com/community/tutorials/how-to-deploy-python-wsgi-applications-using-a-cherrypy-web-server-behind-nginx - -import logging -import calculate.contrib.cherrypy as cherrypy - - -def cherry_graft_and_start(wsgi_application, host="0.0.0.0", port=8000, - num_threads=30, ssl_module=None, cert=None, key=None, cacert=None): - - logging.basicConfig(level=logging.DEBUG) - logging.getLogger('spyne.protocol.xml').setLevel(logging.DEBUG) - - # Mount the application - cherrypy.tree.graft(wsgi_application, "/") - - # Unsubscribe the default server - cherrypy.server.unsubscribe() - - # Instantiate a new server object - server = cherrypy._cpserver.Server() - - # Configure the server object - server.socket_host = host - server.socket_port = port - server.thread_pool = num_threads - - # For SSL Support - if ssl_module is not None: - server.ssl_module = ssl_module # eg. 'pyopenssl' - server.ssl_certificate = cert # eg. 'ssl/certificate.crt' - server.ssl_private_key = key # eg. 'ssl/private.key' - server.ssl_certificate_chain = cacert # eg. 'ssl/bundle.crt' - - # Subscribe this server - server.subscribe() - - # Start the server engine (Option 1 *and* 2) - cherrypy.engine.start() - - return cherrypy.engine.block() diff --git a/libs_crutch/contrib/spyne/util/color.py b/libs_crutch/contrib/spyne/util/color.py deleted file mode 100644 index f12490a..0000000 --- a/libs_crutch/contrib/spyne/util/color.py +++ /dev/null @@ -1,74 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import absolute_import - - -try: - import colorama - R = lambda s: ''.join((colorama.Fore.RED, colorama.Style.BRIGHT, s, - colorama.Style.RESET_ALL)) - G = lambda s: ''.join((colorama.Fore.GREEN, colorama.Style.BRIGHT, s, - colorama.Style.RESET_ALL)) - B = lambda s: ''.join((colorama.Fore.BLUE, colorama.Style.BRIGHT, s, - colorama.Style.RESET_ALL)) - - DARK_R = lambda s: ''.join((colorama.Fore.RED, s, colorama.Style.RESET_ALL)) - DARK_G = lambda s: ''.join((colorama.Fore.GREEN, s, colorama.Style.RESET_ALL)) - DARK_B = lambda s: ''.join((colorama.Fore.BLUE, s, colorama.Style.RESET_ALL)) - - YEL = lambda s: ''.join((colorama.Fore.YELLOW, colorama.Style.BRIGHT, s, - colorama.Style.RESET_ALL)) - MAG = lambda s: ''.join((colorama.Fore.MAGENTA, colorama.Style.BRIGHT, s, - colorama.Style.RESET_ALL)) - CYA = lambda s: ''.join((colorama.Fore.CYAN, colorama.Style.BRIGHT, s, - colorama.Style.RESET_ALL)) - - DARK_YEL = lambda s: ''.join((colorama.Fore.YELLOW, s, - colorama.Style.RESET_ALL)) - DARK_MAG = lambda s: ''.join((colorama.Fore.MAGENTA, s, - colorama.Style.RESET_ALL)) - DARK_CYA = lambda s: ''.join((colorama.Fore.CYAN, s, - colorama.Style.RESET_ALL)) - -except ImportError: - R = lambda s: s - G = lambda s: s - B = lambda s: s - DARK_R = lambda s: s - DARK_G = lambda s: s - DARK_B = lambda s: s - YEL = lambda s: s - MAG = lambda s: s - CYA = lambda s: s - DARK_YEL = lambda s: s - DARK_MAG = lambda s: s - DARK_CYA = lambda s: s - - -if __name__ == '__main__': - print(R("RED")) - print(G("GREEN")) - print(B("BLUE")) - print(DARK_R("DARK_RED")) - print(DARK_G("DARK_GREEN")) - print(DARK_B("DARK_BLUE")) - print(YEL("YELLOW")) - print(MAG("MAGENTA")) - print(CYA("CYAN")) diff --git a/libs_crutch/contrib/spyne/util/coopmt.py b/libs_crutch/contrib/spyne/util/coopmt.py deleted file mode 100644 index 62fb142..0000000 --- a/libs_crutch/contrib/spyne/util/coopmt.py +++ /dev/null @@ -1,102 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The cooperative multitasking module. It includes the coroutine stuff. - -This could have been named just coroutine.py if it wasn't for the coroutine -decorator. -""" - - -import logging -logger = logging.getLogger(__name__) - -from itertools import chain -from inspect import isgeneratorfunction - - -class Break(Exception): - """Raised for breaking out of infinite loops inside coroutines.""" - pass - - -def coroutine(func): - assert isgeneratorfunction(func) - - def start(*args, **kwargs): - try: - ret = func(*args, **kwargs) - except TypeError as e: - logger.error("Function %r at %s:%d got error %r", func.func_name, - func.__module__, func.__code__.co_firstlineno, e) - raise - - try: - next(ret) - - except StopIteration: - return None - - except Exception as e: - if not hasattr(e, 'logged'): - logger.error("Exception in coroutine") - logger.exception(e) - try: - e.logged = True - except: - pass - - raise - - return ret - - return start - - -def keepfirst(func): - assert isgeneratorfunction(func) - - def start(*args, **kwargs): - try: - ret = func(*args, **kwargs) - except TypeError as e: - logger.error("Function %r at %s:%d got error %r", func.func_name, - func.__module__, func.__code__.co_firstlineno, e) - raise - - try: - first = next(ret) - - except StopIteration: - return None - - except Exception as e: - if not hasattr(e, 'logged'): - logger.error("Exception in coroutine") - logger.exception(e) - try: - e.logged = True - except: - pass - - raise - - return chain((first,), ret) - - return start diff --git a/libs_crutch/contrib/spyne/util/dictdoc.py b/libs_crutch/contrib/spyne/util/dictdoc.py deleted file mode 100644 index 77571cd..0000000 --- a/libs_crutch/contrib/spyne/util/dictdoc.py +++ /dev/null @@ -1,214 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from spyne.context import FakeContext - -from spyne.protocol.dictdoc import HierDictDocument -from spyne.protocol.dictdoc import SimpleDictDocument - -try: - from spyne.protocol.json import JsonDocument -except ImportError as _import_error: - _local_import_error = _import_error - def JsonDocument(*args, **kwargs): - raise _local_import_error - - -try: - from spyne.protocol.yaml import YamlDocument -except ImportError as _import_error: - _local_import_error = _import_error - def YamlDocument(*args, **kwargs): - raise _local_import_error - - -try: - from spyne.protocol.msgpack import MessagePackDocument -except ImportError as _import_error: - _local_import_error = _import_error - def MessagePackDocument(*args, **kwargs): - raise _local_import_error - - -from spyne.model.primitive import Double -from spyne.model.primitive import Boolean -from spyne.model.primitive import Decimal -from spyne.model.primitive import Integer - - -class _UtilProtocol(HierDictDocument): - def __init__(self, app=None, validator=None, mime_type=None, - ignore_uncap=False, - # DictDocument specific - ignore_wrappers=True, - complex_as=dict, - ordered=False): - - super(_UtilProtocol, self).__init__(app, validator, mime_type, ignore_uncap, - ignore_wrappers, complex_as, ordered) - - self._from_unicode_handlers[Double] = lambda cls, val: val - self._from_unicode_handlers[Boolean] = lambda cls, val: val - self._from_unicode_handlers[Decimal] = lambda cls, val: val - self._from_unicode_handlers[Integer] = lambda cls, val: val - - self._to_unicode_handlers[Double] = lambda cls, val: val - self._to_unicode_handlers[Boolean] = lambda cls, val: val - self._to_unicode_handlers[Decimal] = lambda cls, val: val - self._to_unicode_handlers[Integer] = lambda cls, val: val - - -def get_doc_as_object(d, cls, ignore_wrappers=True, complex_as=list, - protocol=_UtilProtocol, protocol_inst=None): - if protocol_inst is None: - protocol_inst = protocol(ignore_wrappers=ignore_wrappers, - complex_as=complex_as) - - return protocol_inst._doc_to_object(None, cls, d) - - -get_dict_as_object = get_doc_as_object -"""DEPRECATED: Use ``get_doc_as_object`` instead""" - - -def get_object_as_doc(o, cls=None, ignore_wrappers=True, complex_as=dict, - protocol=_UtilProtocol, protocol_inst=None): - if cls is None: - cls = o.__class__ - - if protocol_inst is None: - protocol_inst = protocol(ignore_wrappers=ignore_wrappers, - complex_as=complex_as) - - retval = protocol_inst._object_to_doc(cls, o) - - if not ignore_wrappers: - return {cls.get_type_name(): retval} - - return retval - - -get_object_as_dict = get_object_as_doc -"""DEPRECATED: Use ``get_object_as_doc`` instead.""" - -def get_object_as_simple_dict(o, cls=None, hier_delim='.', prefix=None): - if cls is None: - cls = o.__class__ - - return SimpleDictDocument(hier_delim=hier_delim) \ - .object_to_simple_dict(cls, o, prefix=prefix) - - -def get_object_as_json(o, cls=None, ignore_wrappers=True, complex_as=list, - encoding='utf8', polymorphic=False, indent=None, **kwargs): - if cls is None: - cls = o.__class__ - - prot = JsonDocument(ignore_wrappers=ignore_wrappers, complex_as=complex_as, - polymorphic=polymorphic, indent=indent, **kwargs) - ctx = FakeContext(out_document=[prot._object_to_doc(cls, o)]) - prot.create_out_string(ctx, encoding) - return b''.join(ctx.out_string) - - -def get_object_as_json_doc(o, cls=None, ignore_wrappers=True, complex_as=list, - polymorphic=False, indent=None, **kwargs): - if cls is None: - cls = o.__class__ - - prot = JsonDocument(ignore_wrappers=ignore_wrappers, complex_as=complex_as, - polymorphic=polymorphic, indent=indent, **kwargs) - - return prot._object_to_doc(cls, o) - - -def get_object_as_yaml(o, cls=None, ignore_wrappers=False, complex_as=dict, - encoding='utf8', polymorphic=False): - if cls is None: - cls = o.__class__ - - prot = YamlDocument(ignore_wrappers=ignore_wrappers, complex_as=complex_as, - polymorphic=polymorphic) - ctx = FakeContext(out_document=[prot._object_to_doc(cls,o)]) - prot.create_out_string(ctx, encoding) - return b''.join(ctx.out_string) - - -def get_object_as_yaml_doc(o, cls=None, ignore_wrappers=False, complex_as=dict, - polymorphic=False): - if cls is None: - cls = o.__class__ - - prot = YamlDocument(ignore_wrappers=ignore_wrappers, complex_as=complex_as, - polymorphic=polymorphic) - return prot._object_to_doc(cls, o) - - -def get_object_as_msgpack(o, cls=None, ignore_wrappers=False, complex_as=dict, - polymorphic=False): - if cls is None: - cls = o.__class__ - - prot = MessagePackDocument(ignore_wrappers=ignore_wrappers, - complex_as=complex_as, polymorphic=polymorphic) - ctx = FakeContext(out_document=[prot._object_to_doc(cls,o)]) - prot.create_out_string(ctx) - return b''.join(ctx.out_string) - - -def get_object_as_msgpack_doc(o, cls=None, ignore_wrappers=False, - complex_as=dict, polymorphic=False): - if cls is None: - cls = o.__class__ - - prot = MessagePackDocument(ignore_wrappers=ignore_wrappers, - complex_as=complex_as, polymorphic=polymorphic) - - return prot._object_to_doc(cls, o) - - -def json_loads(s, cls, protocol=JsonDocument, **kwargs): - if s is None: - return None - if s == '': - return None - prot = protocol(**kwargs) - ctx = FakeContext(in_string=[s]) - prot.create_in_document(ctx) - return prot._doc_to_object(None, cls, ctx.in_document, - validator=prot.validator) - - -get_json_as_object = json_loads - - -def yaml_loads(s, cls, protocol=YamlDocument, ignore_wrappers=False, **kwargs): - if s is None: - return None - if s == '' or s == b'': - return None - prot = protocol(ignore_wrappers=ignore_wrappers, **kwargs) - ctx = FakeContext(in_string=[s]) - prot.create_in_document(ctx) - retval = prot._doc_to_object(None, cls, ctx.in_document, - validator=prot.validator) - return retval - - -get_yaml_as_object = yaml_loads diff --git a/libs_crutch/contrib/spyne/util/django.py b/libs_crutch/contrib/spyne/util/django.py deleted file mode 100644 index c51e622..0000000 --- a/libs_crutch/contrib/spyne/util/django.py +++ /dev/null @@ -1,538 +0,0 @@ -# encoding: utf-8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""Useful stuff to integrate Spyne with Django. - -* Django model <-> spyne type mapping -* Service for common exception handling - -""" - -from __future__ import absolute_import - -import logging -logger = logging.getLogger(__name__) - -import re - -from itertools import chain - -from django.core.exceptions import (ImproperlyConfigured, ObjectDoesNotExist, - ValidationError as DjValidationError) -from django.core.validators import (slug_re, - MinLengthValidator, MaxLengthValidator) -try: - from django.core.validators import comma_separated_int_list_re -except ImportError: - comma_separated_int_list_re = re.compile(r'^[\d,]+$') - -from spyne.error import (ResourceNotFoundError, ValidationError as - BaseValidationError, Fault) -from spyne.model import primitive -from spyne.model.complex import ComplexModelMeta, ComplexModelBase -from spyne.service import Service -from spyne.util.cdict import cdict -from spyne.util.odict import odict -from spyne.util.six import add_metaclass - - -# regex is based on http://www.w3.org/TR/xforms20/#xforms:email -email_re = re.compile( - r"[A-Za-z0-9!#-'\*\+\-/=\?\^_`\{-~]+" - r"(\.[A-Za-z0-9!#-'\*\+\-/=\?\^_`\{-~]+)*@" - # domain part is either a single symbol - r"(([a-zA-Z0-9]|" - # or have at least two symbols - # hyphen can't be at the beginning or end of domain part - # domain should contain at least 2 parts, the last one is TLD - r"([a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])+)\.)+" - # TLD should contain only letters, at least 2 - r"[A-Za-z]{2,}", re.IGNORECASE) - - -def _handle_minlength(validator, params): - new_min = validator.limit_value - old_min = params.setdefault('min_len', new_min) - params['min_len'] = max(old_min, new_min) - - -def _handle_maxlength(validator, params): - new_max = validator.limit_value - old_max = params.setdefault('max_len', new_max) - params['max_len'] = min(old_max, new_max) - - -class BaseDjangoFieldMapper(object): - - """Abstrace base class for field mappers.""" - - _VALIDATOR_HANDLERS = cdict({ - MinLengthValidator: _handle_minlength, - MaxLengthValidator: _handle_maxlength, - }) - - @staticmethod - def is_field_nullable(field, **kwargs): - """Return True if django field is nullable.""" - return field.null - - @staticmethod - def is_field_blank(field, **kwargs): - """Return True if django field is blank.""" - return field.blank - - def map(self, field, **kwargs): - """Map field to spyne model. - - :param field: Django Field instance - :param kwargs: Extra params to configure spyne model - :returns: tuple (field attribute name, mapped spyne model) - - """ - params = kwargs.copy() - - self._process_validators(field.validators, params) - - nullable = self.is_field_nullable(field, **kwargs) - blank = self.is_field_blank(field, **kwargs) - required = not (field.has_default() or blank or field.primary_key) - - if field.has_default(): - params['default'] = field.get_default() - - spyne_model = self.get_spyne_model(field, **kwargs) - customized_model = spyne_model(nullable=nullable, - min_occurs=int(required), **params) - - return (field.attname, customized_model) - - def get_spyne_model(self, field, **kwargs): - """Return spyne model for given Django field.""" - raise NotImplementedError - - def _process_validators(self, validators, params): - for v in validators: - handler = self._VALIDATOR_HANDLERS.get(type(v)) - if handler: - handler(v, params) - - -class DjangoFieldMapper(BaseDjangoFieldMapper): - - """Basic mapper for django fields.""" - - def __init__(self, spyne_model): - """Django field mapper constructor.""" - self.spyne_model = spyne_model - - def get_spyne_model(self, field, **kwargs): - """Return configured spyne model.""" - return self.spyne_model - - -class DecimalMapper(DjangoFieldMapper): - - """Mapper for DecimalField.""" - - def map(self, field, **kwargs): - """Map DecimalField to spyne model. - - :returns: tuple (field attribute name, mapped spyne model) - - """ - params = kwargs.copy() - params.update({ - 'total_digits': field.max_digits, - 'fraction_digits': field.decimal_places, - }) - return super(DecimalMapper, self).map(field, **params) - - -class RelationMapper(BaseDjangoFieldMapper): - - """Mapper for relation fields (ForeignKey, OneToOneField).""" - - def __init__(self, django_model_mapper): - """Constructor for relation field mapper.""" - self.django_model_mapper = django_model_mapper - - @staticmethod - def is_field_blank(field, **kwargs): - """Return True if `optional_relations` is set. - - Otherwise use basic behaviour. - - """ - optional_relations = kwargs.get('optional_relations', False) - return (optional_relations or - BaseDjangoFieldMapper.is_field_blank(field, **kwargs)) - - def get_spyne_model(self, field, **kwargs): - """Return spyne model configured by related field.""" - related_field = field.rel.get_related_field() if hasattr(field, 'rel') else field.remote_field.get_related_field() - field_type = related_field.__class__.__name__ - field_mapper = self.django_model_mapper.get_field_mapper(field_type) - - _, related_spyne_model = field_mapper.map(related_field, **kwargs) - return related_spyne_model - - -class DjangoModelMapper(object): - - r"""Mapper from django models to spyne complex models. - - You can extend it registering new field types: :: - - class NullBooleanMapper(DjangoFieldMapper): - - def map(self, field, **kwargs): - params = kwargs.copy() - # your mapping logic goes here - return super(NullBooleanMapper, self).map(field, **params) - - default_model_mapper.register_field_mapper('NullBooleanField', \ - NullBooleanMapper(primitive.Boolean)) - - - You may subclass it if you want different mapping logic for different - Django models. - - """ - - field_mapper_class = DjangoFieldMapper - - class UnknownFieldMapperException(Exception): - - """Raises when there is no field mapper for given django_type.""" - - def __init__(self, django_spyne_models=()): - """Register field mappers in internal registry.""" - self._registry = {} - - for django_type, spyne_model in django_spyne_models: - self.register(django_type, spyne_model) - - def get_field_mapper(self, django_type): - """Get mapper registered for given django_type. - - :param django_type: Django internal field type - :returns: registered mapper - :raises: :exc:`UnknownFieldMapperException` - - """ - try: - return self._registry[django_type] - except KeyError: - raise self.UnknownFieldMapperException( - 'No mapper for field type {0}'.format(django_type)) - - def register(self, django_type, spyne_model): - """Register default field mapper for django_type and spyne_model. - - :param django_type: Django internal field type - :param spyne_model: Spyne model, usually primitive - - """ - field_mapper = self.field_mapper_class(spyne_model) - self.register_field_mapper(django_type, field_mapper) - - def register_field_mapper(self, django_type, field_mapper): - """Register field mapper for django_type. - - :param django_type: Django internal field type - :param field_mapper: :class:`DjangoFieldMapper` instance - - """ - self._registry[django_type] = field_mapper - - @staticmethod - def get_all_field_names(meta): - if hasattr(meta, 'get_all_field_names'): - return meta.get_all_field_names() - - return list(set(chain.from_iterable( - (field.name, field.attname) if hasattr(field, 'attname') else ( - field.name,) - for field in meta.get_fields() - # For complete backwards compatibility, you may want to exclude - # GenericForeignKey from the results. - if not (field.many_to_one and field.related_model is None) - ))) - - @staticmethod - def _get_fields(django_model, exclude=None): - field_names = set(exclude) if exclude is not None else set() - meta = django_model._meta # pylint: disable=W0212 - unknown_fields_names = \ - field_names.difference(DjangoModelMapper.get_all_field_names(meta)) - - if unknown_fields_names: - raise ImproperlyConfigured( - 'Unknown field names: {0}' - .format(', '.join(unknown_fields_names))) - - return [field for field in meta.fields if field.name not in - field_names] - - def map(self, django_model, exclude=None, **kwargs): - """Prepare dict of model fields mapped to spyne models. - - :param django_model: Django model class. - :param exclude: list of fields excluded from mapping. - :param kwargs: extra kwargs are passed to all field mappers - - :returns: dict mapping attribute names to spyne models - :raises: :exc:`UnknownFieldMapperException` - - """ - field_map = odict() - - for field in self._get_fields(django_model, exclude): - field_type = field.__class__.__name__ - - try: - field_mapper = self._registry[field_type] - except KeyError: - # mapper for this field is not registered - if not (field.has_default() or field.null): - # field is required - raise self.UnknownFieldMapperException( - 'No mapper for field type {0}'.format(field_type)) - else: - # skip this field - logger.info('Field {0} is skipped from mapping.') - continue - - attr_name, spyne_model = field_mapper.map(field, **kwargs) - field_map[attr_name] = spyne_model - - return field_map - - -def strip_regex_metachars(pattern): - """Strip ^ and $ from pattern begining and end. - - According to http://www.w3.org/TR/xmlschema-0/#regexAppendix XMLSchema - expression language does not contain the metacharacters ^ and $. - - :returns: stripped pattern string - - """ - start = 0 - till = len(pattern) - - if pattern.startswith('^'): - start = 1 - - if pattern.endswith('$'): - till -= 1 - - return pattern[start:till] - - -# django's own slug_re.pattern is invalid according to xml schema -- it doesn't -# like the location of the dash character. using the equivalent pattern accepted -# by xml schema here. -SLUG_RE_PATTERN = '[a-zA-Z0-9_-]+' - - -DEFAULT_FIELD_MAP = ( - ('AutoField', primitive.Integer32), - ('CharField', primitive.NormalizedString), - ('SlugField', primitive.Unicode( - type_name='Slug', pattern=strip_regex_metachars(SLUG_RE_PATTERN))), - ('TextField', primitive.Unicode), - ('EmailField', primitive.Unicode( - type_name='Email', pattern=strip_regex_metachars(email_re.pattern))), - ('CommaSeparatedIntegerField', primitive.Unicode( - type_name='CommaSeparatedField', - pattern=strip_regex_metachars(comma_separated_int_list_re.pattern))), - ('URLField', primitive.AnyUri), - ('FilePathField', primitive.Unicode), - - ('BooleanField', primitive.Boolean), - ('NullBooleanField', primitive.Boolean), - ('IntegerField', primitive.Integer), - ('BigIntegerField', primitive.Integer64), - ('PositiveIntegerField', primitive.UnsignedInteger32), - ('SmallIntegerField', primitive.Integer16), - ('PositiveSmallIntegerField', primitive.UnsignedInteger16), - ('FloatField', primitive.Double), - - ('TimeField', primitive.Time), - ('DateField', primitive.Date), - ('DateTimeField', primitive.DateTime), - - # simple fixed defaults for relation fields - ('ForeignKey', primitive.Integer32), - ('OneToOneField', primitive.Integer32), -) - - -def model_mapper_factory(mapper_class, field_map): - """Factory for model mappers. - - The factory is useful to create custom field mappers based on default one. - - """ - model_mapper = mapper_class(field_map) - - # register relation field mappers that are aware of related field type - model_mapper.register_field_mapper( - 'ForeignKey', RelationMapper(model_mapper)) - - model_mapper.register_field_mapper( - 'OneToOneField', RelationMapper(model_mapper)) - - model_mapper.register_field_mapper('DecimalField', - DecimalMapper(primitive.Decimal)) - return model_mapper - - -default_model_mapper = model_mapper_factory(DjangoModelMapper, - DEFAULT_FIELD_MAP) - - -class DjangoComplexModelMeta(ComplexModelMeta): - - """Meta class for complex spyne models representing Django models.""" - - def __new__(mcs, name, bases, attrs): # pylint: disable=C0202 - """Populate new complex type from configured Django model.""" - super_new = super(DjangoComplexModelMeta, mcs).__new__ - - abstract = bool(attrs.get('__abstract__', False)) - - if abstract: - # skip processing of abstract models - return super_new(mcs, name, bases, attrs) - - attributes = attrs.get('Attributes') - - if attributes is None: - raise ImproperlyConfigured('You have to define Attributes and ' - 'specify Attributes.django_model') - - if getattr(attributes, 'django_model', None) is None: - raise ImproperlyConfigured('You have to define django_model ' - 'attribute in Attributes') - - mapper = getattr(attributes, 'django_mapper', default_model_mapper) - attributes.django_mapper = mapper - exclude = getattr(attributes, 'django_exclude', None) - optional_relations = getattr(attributes, 'django_optional_relations', - False) - spyne_attrs = mapper.map(attributes.django_model, exclude=exclude, - optional_relations=optional_relations) - spyne_attrs.update(attrs) - return super_new(mcs, name, bases, spyne_attrs) - - -@add_metaclass(DjangoComplexModelMeta) -class DjangoComplexModel(ComplexModelBase): - - """Base class with Django model mapping support. - - Sample usage: :: - - class PersonType(DjangoComplexModel): - class Attributes(DjangoComplexModel.Attributes): - django_model = Person - - - Attribute :attr:`django_model` is required for Django model mapping - machinery. You can customize your types defining custom type fields: :: - - class PersonType(DjangoComplexModel): - gender = primitive.Unicode(pattern='^[FM]$') - - class Attributes(DjangoComplexModel.Attributes): - django_model = Person - - - There is an option to specify custom mapper: :: - - class PersonType(DjangoComplexModel): - class Attributes(DjangoComplexModel.Attributes): - django_model = Person - django_mapper = my_custom_mapper - - You can also exclude some fields from mapping: :: - - class PersonType(DjangoComplexModel): - class Attributes(DjangoComplexModel.Attributes): - django_model = Person - django_exclude = ['phone'] - - You may set `django_optional_relations`` attribute flag to indicate - that relation fields (ForeignKey, OneToOneField) of your model are - optional. This is useful when you want to create base and related - instances in remote procedure. In this case primary key of base model is - not yet available. - - """ - - __abstract__ = True - - -class ObjectNotFoundError(ResourceNotFoundError): - - """Fault constructed from `model.DoesNotExist` exception.""" - - def __init__(self, does_not_exist_exc): - """Construct fault with code Client.NotFound.""" - message = str(does_not_exist_exc) - object_name = message.split()[0] - # we do not want to reuse initialization of ResourceNotFoundError - Fault.__init__( - self, faultcode='Client.{0}NotFound'.format(object_name), - faultstring=message) - - -class ValidationError(BaseValidationError): - - """Fault constructed from `ValidationError` exception.""" - - def __init__(self, validation_error_exc): - """Construct fault with code Client..""" - message = str(validation_error_exc) - # we do not want to reuse initialization of BaseValidationError - Fault.__init__( - self, faultcode='Client.{0}'.format( - type(validation_error_exc).__name__), faultstring=message) - - -class DjangoService(Service): - - """Service with common Django exception handling.""" - - @classmethod - def call_wrapper(cls, ctx): - """Handle common Django exceptions.""" - try: - out_object = super(DjangoService, cls).call_wrapper(ctx) - except ObjectDoesNotExist as e: - raise ObjectNotFoundError(e) - except DjValidationError as e: - raise ValidationError(e) - return out_object - - -# FIXME: To be removed in Spyne 3 -DjangoServiceBase = DjangoService diff --git a/libs_crutch/contrib/spyne/util/dyninit.py b/libs_crutch/contrib/spyne/util/dyninit.py deleted file mode 100644 index 21eb775..0000000 --- a/libs_crutch/contrib/spyne/util/dyninit.py +++ /dev/null @@ -1,147 +0,0 @@ -# encoding: utf-8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from datetime import date, datetime - -from spyne import D, Integer, ModelBase, Date, DateTime, IpAddress, Decimal, \ - Boolean -from spyne.protocol import ProtocolBase -from spyne.util import six -from spyne.util.cdict import cdict - - -BOOL_VALUES_BYTES_TRUE = (b't', b'1', b'on', b'yes', b'true') -BOOL_VALUES_STR_TRUE = (u't', u'1', u'on', u'yes', u'true') - -BOOL_VALUES_BYTES_FALSE = (b'f', b'0', b'off', b'no', b'false') -BOOL_VALUES_STR_FALSE = (u'f', u'0', u'off', u'no', u'false') - -BOOL_VALUES_NONE = (None, '') - - -if six.PY2: - bytes = str -else: - unicode = str - - -_prot = ProtocolBase() - -def _bool_from_int(i): - if i in (0, 1): - return i == 1 - raise ValueError(i) - - -def _bool_from_bytes(s): - if s in BOOL_VALUES_NONE: - return None - - s = s.strip() - if s in BOOL_VALUES_NONE: - return None - s = s.lower() - if s in BOOL_VALUES_BYTES_TRUE: - return True - if s in BOOL_VALUES_BYTES_FALSE: - return False - raise ValueError(s) - - -def _bool_from_str(s): - if s in BOOL_VALUES_NONE: - return None - - s = s.strip() - if s in BOOL_VALUES_NONE: - return None - if s in BOOL_VALUES_STR_TRUE: - return True - if s in BOOL_VALUES_STR_FALSE: - return False - raise ValueError(s) - - -MAP = cdict({ - ModelBase: cdict({ - object: lambda _: _, - bytes: lambda _: _.strip(), - unicode: lambda _: _.strip(), - }), - - Decimal: cdict({ - D: lambda d: d, - int: lambda i: D(i), - bytes: lambda s: None if s.strip() == '' else D(s.strip()), - unicode: lambda s: None if s.strip() == u'' else D(s.strip()), - }), - - Boolean: cdict({ - D: lambda d: _bool_from_int(int(d)), - int: _bool_from_int, - bytes: _bool_from_bytes, - unicode: _bool_from_str, - }), - - Integer: cdict({ - D: lambda _: _, - int: lambda _: _, - bytes: lambda s: None if s.strip() == '' else int(s.strip()), - unicode: lambda s: None if s.strip() == u'' else int(s.strip()), - }), - - Date: cdict({ - date: lambda _: _, - datetime: lambda _: _.date(), - object: lambda _:_, - bytes: lambda s: None if s.strip() in ('', '0000-00-00') - else _prot.date_from_unicode(Date, s.strip()), - unicode: lambda s: None if s.strip() in (u'', u'0000-00-00') - else _prot.date_from_unicode(Date, s.strip()), - }), - - DateTime: cdict({ - date: lambda _: datetime(date.year, date.month, date.day), - datetime: lambda _: _, - object: lambda _:_, - bytes: lambda s: None if s.strip() in ('', '0000-00-00 00:00:00') - else _prot.datetime_from_unicode(DateTime, s.strip()), - unicode: lambda s: None if s.strip() in (u'', u'0000-00-00 00:00:00') - else _prot.datetime_from_unicode(DateTime, s.strip()), - }), - - IpAddress: cdict({ - object: lambda _: _, - bytes: lambda s: None if s.strip() == '' else s.strip(), - unicode: lambda s: None if s.strip() == u'' else s.strip(), - }) -}) - - -def dynamic_init(cls, **kwargs): - fti = cls.get_flat_type_info(cls) - retval = cls() - - for k, v in fti.items(): - if k in kwargs: - subval = kwargs[k] - t = MAP[v] - setattr(retval, k, t[type(subval)](subval)) - - return retval diff --git a/libs_crutch/contrib/spyne/util/email.py b/libs_crutch/contrib/spyne/util/email.py deleted file mode 100644 index 456f7f5..0000000 --- a/libs_crutch/contrib/spyne/util/email.py +++ /dev/null @@ -1,129 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from __future__ import absolute_import - -import logging -logger = logging.getLogger(__name__) - -import getpass -import inspect -import traceback -import smtplib - -from socket import gethostname -from subprocess import Popen, PIPE - -from email.utils import COMMASPACE, formatdate -from email.mime.text import MIMEText -from email.mime.multipart import MIMEMultipart -from email.mime.application import MIMEApplication - -from spyne.util import six - - -def email_exception(exception_address, message="", bcc=None): - # http://stackoverflow.com/questions/1095601/find-module-name-of-the-originating-exception-in-python - frm = inspect.trace()[-1] - mod = inspect.getmodule(frm[0]) - module_name = mod.__name__ if mod else frm[1] - - sender = 'robot@spyne.io' - recipients = [exception_address] - if bcc is not None: - recipients.extend(bcc) - - error_str = ("%s\n\n%s" % (message, traceback.format_exc())) - msg = MIMEText(error_str.encode('utf8'), 'plain', 'utf8') - msg['To'] = exception_address - msg['From'] = 'Spyne ' - msg['Date'] = formatdate() - msg['Subject'] = "(%s@%s) %s" % (getpass.getuser(), gethostname(), module_name) - - try: - smtp_object = smtplib.SMTP('localhost') - smtp_object.sendmail(sender, recipients, msg.as_string()) - logger.error("Error email sent") - - except Exception as e: - logger.error("Error: unable to send email") - logger.exception(e) - - -def email_text_smtp(addresses, sender=None, subject='', message="", - host='localhost', port=25): - if sender is None: - sender = 'Spyne ' - - exc = traceback.format_exc() - if exc is not None: - message = (u"%s\n\n%s" % (message, exc)) - msg = MIMEText(message.encode('utf8'), 'plain', 'utf8') - msg['To'] = COMMASPACE.join(addresses) - msg['From'] = sender - msg['Date'] = formatdate() - msg['Subject'] = subject - - smtp_object = smtplib.SMTP(host, port) - if six.PY2: - smtp_object.sendmail(sender, addresses, msg.as_string()) - else: - smtp_object.sendmail(sender, addresses, msg.as_bytes()) - logger.info("Text email sent to: %r.", addresses) - - -def email_text(addresses, sender=None, subject='', message="", bcc=None, - att=None): - if att is None: - att = {} - - if sender is None: - sender = 'Spyne ' - - exc = traceback.format_exc() - if exc is not None and exc != 'None\n': - message = (u"%s\n\n%s" % (message, exc)) - msg = MIMEText(message.encode('utf8'), 'plain', 'utf8') - if len(att) > 0: - newmsg = MIMEMultipart() - newmsg.attach(msg) - for k, v in att.items(): - part = MIMEApplication(v) - part.add_header('Content-Disposition', 'attachment', filename=k) - newmsg.attach(part) - - msg = newmsg - - msg['To'] = COMMASPACE.join(addresses) - msg['From'] = sender - msg['Date'] = formatdate() - msg['Subject'] = subject - - cmd = ["/usr/sbin/sendmail", "-oi", '--'] - cmd.extend(addresses) - if bcc is not None: - cmd.extend(bcc) - - p = Popen(cmd, stdin=PIPE) - if six.PY2: - p.communicate(msg.as_string()) - else: - p.communicate(msg.as_bytes()) - - logger.info("Text email sent to: %r.", addresses) diff --git a/libs_crutch/contrib/spyne/util/etreeconv.py b/libs_crutch/contrib/spyne/util/etreeconv.py deleted file mode 100644 index cbd440c..0000000 --- a/libs_crutch/contrib/spyne/util/etreeconv.py +++ /dev/null @@ -1,131 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""This module contains the utility methods that convert an ElementTree -hierarchy to python dicts and vice versa. -""" - -import collections - -from spyne.util import six - -from lxml import etree - -from spyne.util.odict import odict - - -def root_dict_to_etree(d): - """Converts a dictionary to an xml hiearchy. Just like a valid xml document, - the dictionary must have a single element. The format of the child - dictionaries is the same as :func:`dict_to_etree`. - """ - - assert len(d) == 1, "Incoming dict len must be exactly 1. Data: %r" % d - - key, = d.keys() - retval = etree.Element(key) - for val in d.values(): - break - - if val is None: - return retval - - if isinstance(val, dict) or isinstance(val, odict): - dict_to_etree(val, retval) - elif not isinstance(val, collections.Sized) or isinstance(val, six.string_types): - retval.text=str(val) - else: - for a in val: - dict_to_etree(a, retval) - - return retval - - -def dict_to_etree(d, parent): - """Takes a the dict whose value is either None or an instance of dict, odict - or an iterable. The iterables can contain either other dicts/odicts or - str/unicode instances. - """ - - for k, v in d.items(): - if v is None: - etree.SubElement(parent, k) - - elif isinstance(v, six.string_types): - etree.SubElement(parent, k).text = v - - elif isinstance(v, dict) or isinstance(v, odict): - child = etree.SubElement(parent, k) - dict_to_etree(v, child) - - elif not isinstance(v, collections.Sized): - etree.SubElement(parent, k).text = str(v) - - elif len(v) == 0: - etree.SubElement(parent, k) - - else: - for e in v: - child=etree.SubElement(parent, k) - if isinstance(e, dict) or isinstance(e, odict): - dict_to_etree(e, child) - else: - child.text=str(e) - - -def root_etree_to_dict(element, iterable=(list, list.append)): - """Takes an xml root element and returns the corresponding dict. The second - argument is a pair of iterable type and the function used to add elements to - the iterable. The xml attributes are ignored. - """ - - return {element.tag: iterable[0]([etree_to_dict(element, iterable)])} - - -def etree_to_dict(element, iterable=(list, list.append)): - """Takes an xml root element and returns the corresponding dict. The second - argument is a pair of iterable type and the function used to add elements to - the iterable. The xml attributes are ignored. - """ - - if (element.text is None) or element.text.isspace(): - retval = odict() - for elt in element: - if not (elt.tag in retval): - retval[elt.tag] = iterable[0]() - iterable[1](retval[elt.tag], etree_to_dict(elt, iterable)) - - else: - retval = element.text - - return retval - - -def etree_strip_namespaces(element): - """Removes any namespace information form the given element recursively.""" - - retval = etree.Element(element.tag.rpartition('}')[-1]) - retval.text = element.text - for a in element.attrib: - retval.attrib[a.rpartition('}')[-1]] = element.attrib[a] - - for e in element: - retval.append(etree_strip_namespaces(e)) - - return retval diff --git a/libs_crutch/contrib/spyne/util/fileproxy.py b/libs_crutch/contrib/spyne/util/fileproxy.py deleted file mode 100644 index 878c3a2..0000000 --- a/libs_crutch/contrib/spyne/util/fileproxy.py +++ /dev/null @@ -1,190 +0,0 @@ - -# -# Copyright (C) 2013-2014 by Hong Minhee -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -# - -import os - -from spyne.util.six.moves.collections_abc import Iterator - -__all__ = 'FileProxy', 'ReusableFileProxy', 'SeekableFileProxy' - - -class FileProxy(Iterator): - """The complete proxy for ``wrapped`` file-like object. - - :param wrapped: the file object to wrap - :type wrapped: :class:`file`, file-like object - """ - - def __init__(self, wrapped): - self.wrapped = wrapped - self.mmap = None - - def __iter__(self): - f = self.wrapped - it = getattr(f, '__iter__', None) - if callable(it): - return it() - return self - - def __next__(self): - """Implementation of :class:`collections.Iterator` protocol.""" - line = self.readline() - if not line: - raise StopIteration('hit eof') - return line - - next = __next__ - - def read(self, size=-1): - """Reads at the most ``size`` bytes from the file. - It maybe less if the read hits EOF before obtaining ``size`` bytes. - - :param size: bytes to read. if it is negative or omitted, - read all data until EOF is reached. default is -1 - :returns: read bytes. an empty string when EOF is encountered - immediately - :rtype: :class:`str` - """ - return self.wrapped.read(size) - - def readline(self, size=None): - r"""Reads an entire line from the file. A trailing newline - character is kept in the string (but maybe absent when a file - ends with an incomplete line). - - :param size: if it's present and non-negative, it is maximum - byte count (including trailing newline) and - an incomplete line maybe returned - :type size: :class:`numbers.Integral` - :returns: read bytes - :rtype: :class:`str` - - .. note:: - - Unlike ``stdio``'s :c:func:`fgets()`, the returned string - contains null characters (``'\0'``) if they occurred in - the input. - - """ - return self.wrapped.readline(size) - - def readlines(self, sizehint=None): - """Reads until EOF using :meth:`readline()`. - - :param sizehint: if it's present, instead of reading up to EOF, - whole lines totalling approximately ``sizehint`` - bytes (or more to accommodate a final whole line) - :type sizehint: :class:`numbers.Integral` - :returns: a list containing the lines read - :rtype: :class:`list` - """ - wrapped = self.wrapped - try: - readlines = wrapped.readlines - except AttributeError: - lines = [] - while 1: - line = wrapped.readline() - if line: - lines.append(line) - else: - break - return lines - return readlines() if sizehint is None else readlines(sizehint) - - def xreadlines(self): - """The same to ``iter(file)``. Use that. - - .. deprecated:: long time ago - - Use :func:`iter()` instead. - """ - return iter(self) - - def close(self): - """Closes the file. It's a context manager as well, - so prefer :keyword:`with` statement than direct call of - this:: - - with FileProxy(file_) as f: - print f.read() - """ - try: - close = self.wrapped.close - except AttributeError: - pass - else: - close() - - def __enter__(self): - return self.wrapped - - def __exit__(self, exc_type, value, traceback): - self.close() - - def __del__(self): - if self.mmap is not None: - self.mmap.close() - self.wrapped.close() - - def fileno(self): - return self.wrapped.fileno() - - -class SeekableFileProxy(FileProxy): - """The almost same to :class:`FileProxy` except it has - :meth:`seek()` and :meth:`tell()` methods in addition. - """ - - def seek(self, offset, whence=os.SEEK_SET): - """Sets the file's current position. - - :param offset: the offset to set - :type offset: :class:`numbers.Integral` - :param whence: see the docs of :meth:`file.seek()`. - default is :const:`os.SEEK_SET` - """ - self.wrapped.seek(offset, whence) - - def tell(self): - """Gets the file's current position. - - :returns: the file's current position - :rtype: :class:`numbers.Integral` - """ - return self.wrapped.tell() - - -class ReusableFileProxy(SeekableFileProxy): - """It memorizes the current position (:meth:`tell()`) when the context - enters and then rewinds (:meth:`seek()`) back to the memorized - :attr:`initial_offset` when the context exits. - """ - - def __enter__(self): - self.initial_offset = self.tell() - self.seek(0) - return super(ReusableFileProxy, self).__enter__() - - def __exit__(self, exc_type, value, traceback): - self.seek(self.initial_offset) diff --git a/libs_crutch/contrib/spyne/util/gencpp.py b/libs_crutch/contrib/spyne/util/gencpp.py deleted file mode 100644 index 01ad206..0000000 --- a/libs_crutch/contrib/spyne/util/gencpp.py +++ /dev/null @@ -1,254 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""A PoC that implements like 2% of the job of converting Spyne objects to -standard C++ classes.""" - -import sys - -INDENT = ' ' - -class Object(object): - def __init__(self): - self.parent = None - self.comment_before = None - self.comment_after = None - - def _comment_before_to_stream(self, ostr, indent): - if self.comment_before is None: - return - - ostr.write("\n") - ostr.write(INDENT * indent) - ostr.write("/**\n") - ostr.write(INDENT * indent) - ostr.write(" *") - for line in self.comment_before.split('\n'): - ostr.write(" ") - ostr.write(line) - ostr.write('\n') - ostr.write(INDENT * indent) - ostr.write(" */") - ostr.write("\n") - - def _comment_after_to_stream(self, ostr, indent): - if self.comment_after is None: - return - - lines = self.comment_after.split('\n') - - if len(lines) < 2: - ostr.write(" // ") - ostr.write(self.comment_after) - - else: - ostr.write(INDENT * indent) - ostr.write("/**\n") - ostr.write(INDENT * indent) - ostr.write(" *") - for line in lines: - ostr.write(" ") - ostr.write(line) - ostr.write('\n') - ostr.write(INDENT * indent) - ostr.write(" */") - ostr.write("\n") - - -class Entry(Object): - def __init__(self, modifier=None): - super(Entry, self).__init__() - self.modifier = modifier - - def to_decl_stream(self, ostr, indent): - raise NotImplemented() - - def to_defn_stream(self, ostr, indent): - raise NotImplemented() - - -class Literal(Object): - def __init__(self, value): - super(Literal, self).__init__() - self.value = value - - -class StringLiteral(Literal): - def to_stream(self, ostr, indent): - self._comment_before_to_stream(ostr, indent) - - ostr.write('"') - ostr.write(self.value) # TODO: escaping - ostr.write('"') - - self._comment_after_to_stream(ostr, indent) - - -class DataMember(Entry): - def __init__(self, modifier, type, name, initializer=None): - super(DataMember, self).__init__(modifier) - self.type = type - self.name = name - self.initializer = initializer - - def to_decl_stream(self, ostr, indent): - ostr.write(INDENT * indent) - if self.modifier is not None: - ostr.write(self.modifier) - ostr.write(" ") - ostr.write(self.type) - ostr.write(" ") - ostr.write(self.name) - - if self.modifier != 'static' and self.initializer is not None: - ostr.write(" = ") - self.initializer.to_stream(ostr, indent) - - ostr.write(";") - ostr.write("\n") - - def to_defn_stream(self, ostr, indent): - if self.modifier != 'static': - return - - self._comment_before_to_stream(ostr, indent) - - ostr.write(INDENT * indent) - - ostr.write(self.type) - ostr.write(" ") - - parents = [] - parent = self.parent - while parent is not None: - parents.insert(0, parent) - parent = parent.parent - - for parent in parents: - ostr.write(parent.name) - ostr.write("::") - - ostr.write(self.name) - - if self.initializer is not None: - ostr.write(" = ") - self.initializer.to_stream(ostr, indent) - - ostr.write(";") - ostr.write("\n") - - self._comment_after_to_stream(ostr, indent) - - -class Class(Entry): - def __init__(self): - super(Class, self).__init__() - - self.name = None - self.namespace = None - self.type = 'class' - self.public_entries = [] - self.protected_entries = [] - self.private_entries = [] - - def to_decl_stream(self, ostr, indent=0): - if self.namespace is not None: - ostr.write("namespace ") - ostr.write(self.namespace) - ostr.write(" {\n") - - ostr.write(INDENT * indent) - ostr.write("%s %s {\n" % (self.type, self.name,)) - - if len(self.public_entries) > 0: - ostr.write(INDENT * indent) - ostr.write("public:\n") - for e in self.public_entries: - e.to_decl_stream(ostr, indent + 1) - ostr.write("\n") - - if len(self.protected_entries) > 0: - ostr.write(INDENT * indent) - ostr.write("protected:\n") - for e in self.protected_entries: - e.to_decl_stream(ostr, indent + 1) - ostr.write("\n") - - if len(self.private_entries) > 0: - ostr.write(INDENT * indent) - ostr.write("private:\n") - for e in self.private_entries: - e.to_decl_stream(ostr, indent + 1) - ostr.write("\n") - - ostr.write(INDENT * indent) - ostr.write("};\n") - - if self.namespace is not None: - ostr.write("}\n") - - def to_defn_stream(self, ostr, indent=0): - if self.namespace is not None: - ostr.write("namespace ") - ostr.write(self.namespace) - ostr.write(" {\n") - - if len(self.public_entries) > 0: - for e in self.public_entries: - e.to_defn_stream(ostr, indent) - - if len(self.protected_entries) > 0: - for e in self.protected_entries: - e.to_defn_stream(ostr, indent) - - if len(self.private_entries) > 0: - for e in self.private_entries: - e.to_defn_stream(ostr, indent) - - if self.namespace is not None: - ostr.write("}\n") - -def gen_cpp_class(cls, namespace=None, type_map=None): - if type_map is None: - type_map = dict() - - ocls = Class() - ocls.name = cls.get_type_name() - ocls.namespace = namespace - - keys = Class() - keys.name = "Key" - keys.parent = ocls - keys.type = "struct" - ocls.public_entries.append(keys) - - for k, v in cls.get_flat_type_info(cls).items(): - member = DataMember( - "static", "const std::string", - k, StringLiteral(v.Attributes.sub_name or k) - ) - - member.comment_before = v.Annotations.doc - member.parent = keys - - keys.public_entries.append(member) - - ocls.to_decl_stream(sys.stdout) - sys.stdout.write("\n\n\n\n") - ocls.to_defn_stream(sys.stdout) diff --git a/libs_crutch/contrib/spyne/util/http.py b/libs_crutch/contrib/spyne/util/http.py deleted file mode 100644 index 3e21a8a..0000000 --- a/libs_crutch/contrib/spyne/util/http.py +++ /dev/null @@ -1,66 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. - -import sys -import time - -from time import strftime -from time import gmtime -from collections import deque - -# This is a modified version of twisted's addCookie - - -def generate_cookie(k, v, max_age=None, domain=None, path=None, - comment=None, secure=False): - """Generate a HTTP response cookie. No sanity check whatsoever is done, - don't send anything other than ASCII. - - :param k: Cookie key. - :param v: Cookie value. - :param max_age: Seconds. - :param domain: Domain. - :param path: Path. - :param comment: Whatever. - :param secure: If true, appends 'Secure' to the cookie string. - """ - - retval = deque(['%s=%s' % (k, v)]) - - if max_age is not None: - retval.append("Max-Age=%d" % max_age) - assert time.time() < sys.maxint - - expires = time.time() + max_age - expires = min(2<<30, expires) - 1 # FIXME - retval.append("Expires=%s" % strftime("%a, %d %b %Y %H:%M:%S GMT", - gmtime(expires))) - if domain is not None: - retval.append("Domain=%s" % domain) - if path is not None: - retval.append("Path=%s" % path) - if comment is not None: - retval.append("Comment=%s" % comment) - if secure: - retval.append("Secure") - - return '; '.join(retval) diff --git a/libs_crutch/contrib/spyne/util/invregexp.py b/libs_crutch/contrib/spyne/util/invregexp.py deleted file mode 100644 index 1e7f1e5..0000000 --- a/libs_crutch/contrib/spyne/util/invregexp.py +++ /dev/null @@ -1,322 +0,0 @@ - -# -# invRegex.py -# -# Copyright 2008, Paul McGuire -# -# The pyparsing license follows: -# -######### -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -######### -# -# pyparsing script to expand a regular expression into all possible matching -# strings -# -# Supports: -# - {n} and {m,n} repetition, but not unbounded + or * repetition -# - ? optional elements -# - [] character ranges -# - () grouping -# - | alternation -# - -__all__ = ["count", "invregexp"] - -from pyparsing import Combine -from pyparsing import Literal -from pyparsing import ParseFatalException -from pyparsing import ParseResults -from pyparsing import ParserElement -from pyparsing import SkipTo -from pyparsing import Suppress -from pyparsing import Word -from pyparsing import nums -from pyparsing import oneOf -from pyparsing import opAssoc -from pyparsing import operatorPrecedence -from pyparsing import printables -from pyparsing import srange - - -class CharacterRangeEmitter(object): - def __init__(self, chars): - # remove duplicate chars in character range, but preserve original order - seen = set() - self.charset = "".join(seen.add(c) or c for c in chars if c not in seen) - - def __str__(self): - return '[' + self.charset + ']' - - def __repr__(self): - return '[' + self.charset + ']' - - def make_generator(self): - def gen_chars(): - for s in self.charset: - yield s - return gen_chars - - -class OptionalEmitter(object): - def __init__(self, expr): - self.expr = expr - - def make_generator(self): - def optional_gen(): - yield "" - for s in self.expr.make_generator()(): - yield s - return optional_gen - - -class DotEmitter(object): - def make_generator(self): - def dot_gen(): - for c in printables: - yield c - return dot_gen - - -class GroupEmitter(object): - def __init__(self, exprs): - self.exprs = ParseResults(exprs) - - def make_generator(self): - def group_gen(): - def recurse_list(elist): - if len(elist) == 1: - for s in elist[0].make_generator()(): - yield s - else: - for s in elist[0].make_generator()(): - for s2 in recurse_list(elist[1:]): - yield s + s2 - if self.exprs: - for s in recurse_list(self.exprs): - yield s - - return group_gen - - -class AlternativeEmitter(object): - def __init__(self, exprs): - self.exprs = exprs - - def make_generator(self): - def alt_gen(): - for e in self.exprs: - for s in e.make_generator()(): - yield s - - return alt_gen - - -class LiteralEmitter(object): - def __init__(self, lit): - self.lit = lit - - def __str__(self): - return "Lit:" + self.lit - - def __repr__(self): - return "Lit:" + self.lit - - def make_generator(self): - def lit_gen(): - yield self.lit - - return lit_gen - - -def handle_range(toks): - return CharacterRangeEmitter(srange(toks[0])) - - -def handle_repetition(toks): - toks = toks[0] - if toks[1] in "*+": - raise ParseFatalException("", 0, "unbounded repetition operators not supported") - if toks[1] == "?": - return OptionalEmitter(toks[0]) - if "count" in toks: - return GroupEmitter([toks[0]] * int(toks.count)) - if "minCount" in toks: - mincount = int(toks.minCount) - maxcount = int(toks.maxCount) - optcount = maxcount - mincount - if optcount: - opt = OptionalEmitter(toks[0]) - for i in range(1, optcount): - opt = OptionalEmitter(GroupEmitter([toks[0], opt])) - return GroupEmitter([toks[0]] * mincount + [opt]) - else: - return [toks[0]] * mincount - - -def handle_literal(toks): - lit = "" - for t in toks: - if t[0] == "\\": - if t[1] == "t": - lit += '\t' - else: - lit += t[1] - else: - lit += t - return LiteralEmitter(lit) - - -def handle_macro(toks): - macroChar = toks[0][1] - if macroChar == "d": - return CharacterRangeEmitter("0123456789") - elif macroChar == "w": - return CharacterRangeEmitter(srange("[A-Za-z0-9_]")) - elif macroChar == "s": - return LiteralEmitter(" ") - else: - raise ParseFatalException("", 0, "unsupported macro character (" + macroChar + ")") - - -def handle_sequence(toks): - return GroupEmitter(toks[0]) - - -def handle_dot(): - return CharacterRangeEmitter(printables) - - -def handle_alternative(toks): - return AlternativeEmitter(toks[0]) - - -_parser = None -def parser(): - global _parser - if _parser is None: - ParserElement.setDefaultWhitespaceChars("") - lbrack, rbrack, lbrace, rbrace, lparen, rparen = map(Literal, "[]{}()") - - reMacro = Combine("\\" + oneOf(list("dws"))) - escapedChar = ~ reMacro + Combine("\\" + oneOf(list(printables))) - reLiteralChar = "".join(c for c in printables if c not in r"\[]{}().*?+|") + " \t" - - reRange = Combine(lbrack + SkipTo(rbrack, ignore=escapedChar) + rbrack) - reLiteral = (escapedChar | oneOf(list(reLiteralChar))) - reDot = Literal(".") - repetition = ( - (lbrace + Word(nums).setResultsName("count") + rbrace) | - (lbrace + Word(nums).setResultsName("minCount") + "," + Word(nums).setResultsName("maxCount") + rbrace) | - oneOf(list("*+?")) - ) - - reRange.setParseAction(handle_range) - reLiteral.setParseAction(handle_literal) - reMacro.setParseAction(handle_macro) - reDot.setParseAction(handle_dot) - - reTerm = (reLiteral | reRange | reMacro | reDot) - reExpr = operatorPrecedence(reTerm, [ - (repetition, 1, opAssoc.LEFT, handle_repetition), - (None, 2, opAssoc.LEFT, handle_sequence), - (Suppress('|'), 2, opAssoc.LEFT, handle_alternative), - ]) - - _parser = reExpr - - return _parser - - -def count(gen): - """Simple function to count the number of elements returned by a generator.""" - i = 0 - for s in gen: - i += 1 - return i - - -def invregexp(regex): - """Call this routine as a generator to return all the strings that - match the input regular expression. - for s in invregexp("[A-Z]{3}\d{3}"): - print s - """ - invReGenerator = GroupEmitter(parser().parseString(regex)).make_generator() - return invReGenerator() - - -def main(): - tests = r""" - [A-EA] - [A-D]* - [A-D]{3} - X[A-C]{3}Y - X[A-C]{3}\( - X\d - foobar\d\d - foobar{2} - foobar{2,9} - fooba[rz]{2} - (foobar){2} - ([01]\d)|(2[0-5]) - ([01]\d\d)|(2[0-4]\d)|(25[0-5]) - [A-C]{1,2} - [A-C]{0,3} - [A-C]\s[A-C]\s[A-C] - [A-C]\s?[A-C][A-C] - [A-C]\s([A-C][A-C]) - [A-C]\s([A-C][A-C])? - [A-C]{2}\d{2} - @|TH[12] - @(@|TH[12])? - @(@|TH[12]|AL[12]|SP[123]|TB(1[0-9]?|20?|[3-9]))? - @(@|TH[12]|AL[12]|SP[123]|TB(1[0-9]?|20?|[3-9])|OH(1[0-9]?|2[0-9]?|30?|[4-9]))? - (([ECMP]|HA|AK)[SD]|HS)T - [A-CV]{2} - A[cglmrstu]|B[aehikr]?|C[adeflmorsu]?|D[bsy]|E[rsu]|F[emr]?|G[ade]|H[efgos]?|I[nr]?|Kr?|L[airu]|M[dgnot]|N[abdeiop]?|Os?|P[abdmortu]?|R[abefghnu]|S[bcegimnr]?|T[abcehilm]|Uu[bhopqst]|U|V|W|Xe|Yb?|Z[nr] - (a|b)|(x|y) - (a|b) (x|y) - """.split('\n') - - for t in tests: - t = t.strip() - if not t: - continue - - print('-' * 50) - print(t) - try: - print(count(invregexp(t))) - for s in invregexp(t): - print(s) - - except ParseFatalException as pfe: - print(pfe.msg) - print() - continue - - print() - - -if __name__ == "__main__": - main() diff --git a/libs_crutch/contrib/spyne/util/memo.py b/libs_crutch/contrib/spyne/util/memo.py deleted file mode 100644 index d64efb5..0000000 --- a/libs_crutch/contrib/spyne/util/memo.py +++ /dev/null @@ -1,180 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The module for memoization stuff. - -When you have memory leaks in your daemon, the reason could very well be -reckless usage of the tools here. - -These are NOT thread-safe. If you are relying on exactly-one-execution-per-key -behavior in a multithreaded environment, roll your own stuff. -""" - - -import logging -logger = logging.getLogger(__name__) - -import threading - - -MEMOIZATION_STATS_LOG_INTERVAL = 60.0 - - -def _log_all(): - logger.info("%d memoizers", len(memoize.registry)) - for memo in memoize.registry: - logger.info("%r: %d entries.", memo.func, len(memo.memo)) - - -def _log_func(func): - for memo in memoize.registry: - if memo.func is func.func.im_self.func: - break - else: - logger.error("%r not found in memoization regisry", func) - return - - logger.info("%r: %d entries.", memo.func, len(memo.memo)) - for k, v in memo.memo.items(): - logger.info("\t%r: %r", k, v) - - -def start_memoization_stats_logger(func=None): - logger.info("Enabling @memoize statistics every %d second(s).", - MEMOIZATION_STATS_LOG_INTERVAL) - - if func is None: - _log_all() - else: - _log_func(func) - - t = threading.Timer(MEMOIZATION_STATS_LOG_INTERVAL, - start_memoization_stats_logger, (func,)) - - t.daemon = True - t.start() - - -class memoize(object): - """A memoization decorator that keeps caching until reset.""" - - registry = [] - - def __init__(self, func): - self.func = func - self.memo = {} - self.lock = threading.RLock() - memoize.registry.append(self) - - def __call__(self, *args, **kwargs): - key = self.get_key(args, kwargs) - # we hope that gil makes this comparison is race-free - if not key in self.memo: - with self.lock: - # make sure the situation hasn't changed after lock acq - if not key in self.memo: - value = self.func(*args, **kwargs) - self.memo[key] = value - return value - return self.memo.get(key) - - def get_key(self, args, kwargs): - return tuple(args), tuple(kwargs.items()) - - def reset(self): - self.memo = {} - - -class memoize_first(object): - """A memoization decorator that keeps the first call without condition, aka - a singleton accessor.""" - - registry = [] - - def __init__(self, func): - self.func = func - self.lock = threading.RLock() - memoize.registry.append(self) - - def __call__(self, *args, **kwargs): - if not hasattr(self, 'memo'): - value = self.func(*args, **kwargs) - self.memo = value - return value - return self.memo - - def reset(self): - del self.memo - - -def memoize_ignore(values): - """A memoization decorator that does memoization unless the returned - value is in the 'values' iterable. eg let `values = (2,)` and - `add = lambda x, y: x + y`, the result of `add(1, 1)` (=2) is not memoized - but the result of `add(5, 5)` (=10) is. - """ - - assert iter(values), \ - "memoize_ignore requires an iterable of values to ignore" - - class _memoize_ignored(memoize): - def __call__(self, *args, **kwargs): - key = self.get_key(args, kwargs) - # we hope that gil makes this comparison is race-free - if not key in self.memo: - with self.lock: - # make sure the situation hasn't changed after lock acq - if not key in self.memo: - value = self.func(*args, **kwargs) - if not value in values: - self.memo[key] = value - - return value - return self.memo.get(key) - - return _memoize_ignored - - -class memoize_ignore_none(memoize): - """A memoization decorator that ignores `None` values. ie when the decorated - function returns `None`, the value is returned but not memoized. - """ - - def __call__(self, *args, **kwargs): - key = self.get_key(args, kwargs) - # we hope that gil makes this comparison is race-free - if not key in self.memo: - with self.lock: - # make sure the situation hasn't changed after lock acq - if not key in self.memo: - value = self.func(*args, **kwargs) - if not (value is None): - self.memo[key] = value - - return value - return self.memo.get(key) - - -class memoize_id(memoize): - """A memoization decorator that keeps caching until reset for unhashable - types. It works on id()'s of objects instead.""" - - def get_key(self, args, kwargs): - return tuple([id(a) for a in args]), \ - tuple([(k, id(v)) for k, v in kwargs.items()]) diff --git a/libs_crutch/contrib/spyne/util/meta.py b/libs_crutch/contrib/spyne/util/meta.py deleted file mode 100644 index 81f8da0..0000000 --- a/libs_crutch/contrib/spyne/util/meta.py +++ /dev/null @@ -1,139 +0,0 @@ -# encoding: utf-8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""Metaclass utilities for -:attr:`spyne.model.complex.ComplexModelBase.Attributes.declare_order` -""" - -import sys -import inspect - -from functools import wraps -from itertools import chain -from warnings import warn - -from spyne.util.odict import odict - - -class ClassNotFoundException(Exception): - """Raise when class declaration is not found in frame stack.""" - - -class AttributeNotFoundException(Exception): - """Raise when attribute is not found in class declaration.""" - - -class Prepareable(type): - """Implement __prepare__ for Python 2. - - This class is used in Python 2 and Python 3 to support `six.add_metaclass` - decorator that populates attributes of resulting class from plain unordered - attributes dict of decorated class. - - Based on https://gist.github.com/DasIch/5562625 - """ - - def __new__(cls, name, bases, attributes): - try: - constructor = attributes["__new__"] - except KeyError: - return type.__new__(cls, name, bases, attributes) - - def preparing_constructor(cls, name, bases, attributes): - # Don't bother with this shit unless the user *explicitly* asked for - # it - for c in chain(bases, [cls]): - if hasattr(c,'Attributes') and not \ - (c.Attributes.declare_order in (None, 'random')): - break - else: - return constructor(cls, name, bases, attributes) - - try: - cls.__prepare__ - except AttributeError: - return constructor(cls, name, bases, attributes) - - if isinstance(attributes, odict): - # we create class dynamically with passed odict - return constructor(cls, name, bases, attributes) - - current_frame = sys._getframe() - class_declaration = None - - while class_declaration is None: - literals = list(reversed(current_frame.f_code.co_consts)) - - for literal in literals: - if inspect.iscode(literal) and literal.co_name == name: - class_declaration = literal - break - - else: - if current_frame.f_back: - current_frame = current_frame.f_back - else: - raise ClassNotFoundException( - "Can't find class declaration in any frame") - - def get_index(attribute_name, - _names=class_declaration.co_names): - try: - return _names.index(attribute_name) - except ValueError: - if attribute_name.startswith('_'): - # we don't care about the order of magic and non - # public attributes - return 0 - else: - msg = ("Can't find {0} in {1} class declaration. " - .format(attribute_name, - class_declaration.co_name)) - msg += ("HINT: use spyne.util.odict.odict for " - "class attributes if you populate them" - " dynamically.") - raise AttributeNotFoundException(msg) - - by_appearance = sorted( - attributes.items(), key=lambda item: get_index(item[0]) - ) - - namespace = cls.__prepare__(name, bases) - for key, value in by_appearance: - namespace[key] = value - - new_cls = constructor(cls, name, bases, namespace) - - found_module = inspect.getmodule(class_declaration) - assert found_module is not None, ( - 'Module is not found for class_declaration {0}, name {1}' - .format(class_declaration, name)) - assert found_module.__name__ == new_cls.__module__, ( - 'Found wrong class declaration of {0}: {1} != {2}.' - .format(name, found_module.__name__, new_cls.__module__)) - - return new_cls - - try: - attributes["__new__"] = wraps(constructor)(preparing_constructor) - except: - warn("Wrapping class initializer failed. This is normal " - "when running under Nuitka") - - return type.__new__(cls, name, bases, attributes) diff --git a/libs_crutch/contrib/spyne/util/odict.py b/libs_crutch/contrib/spyne/util/odict.py deleted file mode 100644 index d8b996d..0000000 --- a/libs_crutch/contrib/spyne/util/odict.py +++ /dev/null @@ -1,134 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -""" -This module contains an ordered dictionary implementation. - -We need this in Python 2.7 because collections.OrderedDict does not support -reordering by assignment to keys(). - -We need this in Python 3.x because keys() returns KeyView which which doesn't -support `__getitem__` -- i.e. getting nth variable from the ordered dict. -""" - - -class odict(dict): - """Sort of an ordered dictionary implementation.""" - - def __init__(self, data=()): - if isinstance(data, self.__class__): - self.__list = list(data.__list) - super(odict, self).__init__(data) - - else: - self.__list = [] - super(odict, self).__init__() - self.update(data) - - def __getitem__(self, key): - if isinstance(key, int): - return super(odict, self).__getitem__(self.__list[key]) - else: - return super(odict, self).__getitem__(key) - - def __setitem__(self, key, val): - if isinstance(key, int): - super(odict, self).__setitem__(self.__list[key], val) - - else: - if not (key in self): - self.__list.append(key) - super(odict, self).__setitem__(key, val) - - assert len(self.__list) == super(odict, self).__len__(), ( - repr(self.__list), super(odict, self).__repr__()) - - def __repr__(self): - return "{%s}" % ','.join(["%r: %r" % (k, v) for k, v in self.items()]) - - def __str__(self): - return repr(self) - - def __len__(self): - assert len(self.__list) == super(odict, self).__len__() - return len(self.__list) - - def __iter__(self): - return iter(self.__list) - - def __delitem__(self, key): - if not isinstance(key, int): - super(odict, self).__delitem__(key) - key = self.__list.index(key) # ouch. - else: - super(odict, self).__delitem__(self.__list[key]) - del self.__list[key] - - def __add__(self, other): - self.update(other) - return self - - def items(self): - retval = [] - for k in self.__list: - retval.append( (k, super(odict, self).__getitem__(k)) ) - return retval - - def iteritems(self): - for k in self.__list: - yield k, super(odict, self).__getitem__(k) - - def keys(self): - return self.__list - - def update(self, data, **kwargs): - if isinstance(data, (dict, odict)): - data = data.items() - - for k, v in data: - self[k] = v - - for k, v in kwargs.items(): - self[k] = v - - def values(self): - retval = [] - for l in self.__list: - retval.append(super(odict, self).__getitem__(l)) - return retval - - def itervalues(self): - for l in self.__list: - yield self[l] - - def get(self, key, default=None): - if key in self: - return self[key] - return default - - def append(self, t): - k, v = t - self[k] = v - - def insert(self, index, item): - k, v = item - if k in self: - del self.__list[self.__list.index(k)] - self.__list.insert(index, k) - super(odict, self).__setitem__(k, v) diff --git a/libs_crutch/contrib/spyne/util/oset.py b/libs_crutch/contrib/spyne/util/oset.py deleted file mode 100644 index 03e7c59..0000000 --- a/libs_crutch/contrib/spyne/util/oset.py +++ /dev/null @@ -1,96 +0,0 @@ -# http://code.activestate.com/recipes/576694/ - -from spyne.util.six.moves.collections_abc import MutableSet - -KEY, PREV, NEXT = list(range(3)) - -"""This module contains an ordered set implementation from -http://code.activestate.com/recipes/576694/ """ - -class oset(MutableSet): - """An ordered set implementation.""" - - def __init__(self, iterable=None): - self.end = end = [] - end += [None, end, end] # sentinel node for doubly linked list - self.map = {} # key --> [key, prev, next] - if iterable is not None: - self |= iterable - - def __len__(self): - return len(self.map) - - def __contains__(self, key): - return key in self.map - - def add(self, key): - if key not in self.map: - end = self.end - curr = end[PREV] - curr[NEXT] = end[PREV] = self.map[key] = [key, curr, end] - - def extend(self, keys): - for key in keys: - if key not in self.map: - end = self.end - curr = end[PREV] - curr[NEXT] = end[PREV] = self.map[key] = [key, curr, end] - - def discard(self, key): - if key in self.map: - key, prev, next = self.map.pop(key) - prev[NEXT] = next - next[PREV] = prev - - def __iter__(self): - end = self.end - curr = end[NEXT] - while curr is not end: - yield curr[KEY] - curr = curr[NEXT] - - def __reversed__(self): - end = self.end - curr = end[PREV] - while curr is not end: - yield curr[KEY] - curr = curr[PREV] - - def pop(self, last=True): - if not self: - raise KeyError('set is empty') - key = next(reversed(self)) if last else next(iter(self)) - self.discard(key) - return key - - def __repr__(self): - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, list(self)) - - def __eq__(self, other): - if isinstance(other, oset): - return len(self) == len(other) and list(self) == list(other) - return set(self) == set(other) - - @property - def back(self): - return self.end[1][0] - -if __name__ == '__main__': - print((oset('abracadabra'))) - stuff = oset() - stuff.add(1) - print(stuff) - stuff.add(1) - print(stuff) - print((oset('simsalabim'))) - o = oset('abcde') - print(o) - print(o.end) - - o = oset() - print(o.back) - - o = oset([3]) - print(o.back) diff --git a/libs_crutch/contrib/spyne/util/protocol.py b/libs_crutch/contrib/spyne/util/protocol.py deleted file mode 100644 index 4ae305c..0000000 --- a/libs_crutch/contrib/spyne/util/protocol.py +++ /dev/null @@ -1,38 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""Helpers for protocol boilerplate.""" - -from spyne import MethodContext -from spyne.server import ServerBase - - -def deserialize_request_string(string, app): - """Deserialize request string using in_protocol in application definition. - Returns the corresponding native python object. - """ - - server = ServerBase(app) - initial_ctx = MethodContext(server, MethodContext.SERVER) - initial_ctx.in_string = [string] - - ctx = server.generate_contexts(initial_ctx)[0] - server.get_in_object(ctx) - - return ctx.in_object diff --git a/libs_crutch/contrib/spyne/util/resource.py b/libs_crutch/contrib/spyne/util/resource.py deleted file mode 100644 index bde03e8..0000000 --- a/libs_crutch/contrib/spyne/util/resource.py +++ /dev/null @@ -1,72 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -import os.path - -import spyne.util.autorel - - -def get_resource_path(ns, fn): - try: - from spyne._deploymentinfo import resource_filename - except ImportError: - from pkg_resources import resource_filename - - resfn = resource_filename(ns, fn) - spyne.util.autorel.AutoReloader.FILES.add(resfn) - path = os.path.abspath(resfn) - return path - - -def get_resource_file(ns, fn): - return open(get_resource_path(ns, fn), 'rb') - - -def get_resource_file_contents(ns, fn, enc=None): - resfn = get_resource_path(ns, fn) - - if enc is None: - return open(resfn, 'rb').read() - else: - return open(resfn, 'rb').read().decode(enc) - - -def parse_xml_resource(ns, fn): - from lxml import etree - - retval = etree.parse(get_resource_file(ns, fn)) - - return retval.getroot() - - -def parse_html_resource(ns, fn): - from lxml import html - - retval = html.parse(get_resource_file(ns, fn)) - - return retval.getroot() - - -def parse_cloth_resource(ns, fn): - from lxml import html - - retval = html.fragment_fromstring(get_resource_file_contents(ns, fn), - create_parent='spyne-root') - retval.attrib['spyne-tagbag'] = '' - return retval diff --git a/libs_crutch/contrib/spyne/util/simple.py b/libs_crutch/contrib/spyne/util/simple.py deleted file mode 100644 index 164be68..0000000 --- a/libs_crutch/contrib/spyne/util/simple.py +++ /dev/null @@ -1,57 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""Contains functions that implement the most common protocol and transport -combinations""" - - -from spyne.application import Application - - -def wsgi_soap11_application(services, tns='spyne.simple.soap', validator=None, - name=None): - """Wraps `services` argument inside a WsgiApplication that uses Soap 1.1 for - both input and output protocols. - """ - - from spyne.protocol.soap import Soap11 - from spyne.server.wsgi import WsgiApplication - - application = Application(services, tns, name=name, - in_protocol=Soap11(validator=validator), out_protocol=Soap11()) - - return WsgiApplication(application) - -wsgi_soap_application = wsgi_soap11_application -"""DEPRECATED! Use :func:`wsgi_soap11_application` instead.""" - - -def pyramid_soap11_application(services, tns='spyne.simple.soap', - validator=None, name=None): - """Wraps `services` argument inside a PyramidApplication that uses Soap 1.1 - for both input and output protocols. - """ - - from spyne.protocol.soap import Soap11 - from spyne.server.pyramid import PyramidApplication - - application = Application(services, tns, name=name, - in_protocol=Soap11(validator=validator), out_protocol=Soap11()) - - return PyramidApplication(application) diff --git a/libs_crutch/contrib/spyne/util/six.py b/libs_crutch/contrib/spyne/util/six.py deleted file mode 100644 index d13ddbe..0000000 --- a/libs_crutch/contrib/spyne/util/six.py +++ /dev/null @@ -1,985 +0,0 @@ -# Copyright (c) 2010-2020 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -"""Utilities for writing code that runs on Python 2 and 3""" - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson " -__version__ = "1.14.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("getoutput", "commands", "subprocess"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("splitvalue", "urllib", "urllib.parse"), - MovedAttribute("splittype", "urllib", "urllib.parse"), - MovedAttribute("splithost", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), - MovedAttribute("parse_http_list", "urllib2", "urllib.request"), - MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" - _func_name = "__name__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - _func_name = "func_name" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) -get_function_name = operator.attrgetter(_func_name) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - del io - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - _assertNotRegex = "assertNotRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" - _assertNotRegex = "assertNotRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - _assertNotRegex = "assertNotRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -def assertNotRegex(self, *args, **kwargs): - return getattr(self, _assertNotRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - try: - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - finally: - value = None - tb = None - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - try: - raise tp, value, tb - finally: - tb = None -""") - - -if sys.version_info[:2] > (3,): - exec_("""def raise_from(value, from_value): - try: - raise value from from_value - finally: - value = None -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - # This does exactly the same what the :func:`py3:functools.update_wrapper` - # function does on Python versions after 3.2. It sets the ``__wrapped__`` - # attribute on ``wrapper`` object and it doesn't raise an error if any of - # the attributes mentioned in ``assigned`` and ``updated`` are missing on - # ``wrapped`` object. - def _update_wrapper(wrapper, wrapped, - assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - for attr in assigned: - try: - value = getattr(wrapped, attr) - except AttributeError: - continue - else: - setattr(wrapper, attr, value) - for attr in updated: - getattr(wrapper, attr).update(getattr(wrapped, attr, {})) - wrapper.__wrapped__ = wrapped - return wrapper - _update_wrapper.__doc__ = functools.update_wrapper.__doc__ - - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - return functools.partial(_update_wrapper, wrapped=wrapped, - assigned=assigned, updated=updated) - wraps.__doc__ = functools.wraps.__doc__ - -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(type): - - def __new__(cls, name, this_bases, d): - if sys.version_info[:2] >= (3, 7): - # This version introduced PEP 560 that requires a bit - # of extra care (we mimic what is done by __build_class__). - resolved_bases = types.resolve_bases(bases) - if resolved_bases is not bases: - d['__orig_bases__'] = bases - else: - resolved_bases = bases - return meta(name, resolved_bases, d) - - @classmethod - def __prepare__(cls, name, this_bases): - return meta.__prepare__(name, bases) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - if hasattr(cls, '__qualname__'): - orig_vars['__qualname__'] = cls.__qualname__ - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def ensure_binary(s, encoding='utf-8', errors='strict'): - """Coerce **s** to six.binary_type. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> encoded to `bytes` - - `bytes` -> `bytes` - """ - if isinstance(s, text_type): - return s.encode(encoding, errors) - elif isinstance(s, binary_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - -def ensure_str(s, encoding='utf-8', errors='strict'): - """Coerce *s* to `str`. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) - if PY2 and isinstance(s, text_type): - s = s.encode(encoding, errors) - elif PY3 and isinstance(s, binary_type): - s = s.decode(encoding, errors) - return s - - -def ensure_text(s, encoding='utf-8', errors='strict'): - """Coerce *s* to six.text_type. - - For Python 2: - - `unicode` -> `unicode` - - `str` -> `unicode` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if isinstance(s, binary_type): - return s.decode(encoding, errors) - elif isinstance(s, text_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - -def python_2_unicode_compatible(klass): - """ - A class decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) \ No newline at end of file diff --git a/libs_crutch/contrib/spyne/util/tdict.py b/libs_crutch/contrib/spyne/util/tdict.py deleted file mode 100644 index 82875e2..0000000 --- a/libs_crutch/contrib/spyne/util/tdict.py +++ /dev/null @@ -1,101 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""The typed dict module""" - - -from itertools import chain - - -class tdict(dict): - def __init__(self, kt=None, vt=None, data=None): - """This is a typed dict implementation that optionally enforces given - types on contained values on assignment.""" - - self._kt = kt - self._vt = vt - - if kt is None and vt is None: - self.check = self._check_noop - elif kt is None: - self.check = self._check_v - elif vt is None: - self.check = self._check_k - else: - self.check = self._check_kv - - if data is not None: - self.update(data) - - def _check_noop(self, *_): - pass - - def _check_k(self, key, _): - if not isinstance(key, self._kt): - raise TypeError(repr(key)) - - def _check_v(self, _, value): - if not isinstance(value, self._vt): - raise TypeError(repr(value)) - - def _check_kv(self, key, value): - if not isinstance(key, self._kt): - raise TypeError(repr(key)) - if not isinstance(value, self._vt): - raise TypeError(repr(value)) - - def __setitem__(self, key, value): - self.check(key, value) - super(tdict, self).__setitem__(key, value) - - def update(self, E=None, **F): - try: - it = chain(E.items(), F.items()) - except AttributeError: - it = chain(E, F) - - for k, v in it: - self[k] = v - - def setdefault(self, k, d=None): - self._check_k(k, d) if self._kt is None else None - self._check_v(k, d) if self._vt is None else None - - super(tdict, self).setdefault(k, d) - - @classmethod - def fromkeys(cls, S, v=None): - kt = vt = None - - if len(S) > 0: - kt, = set((type(s) for s in S)) - - if v is not None: - vt = type(v) - - retval = tdict(kt, vt) - - for s in S: - retval[s] = v - - return retval - - def repr(self): - return "tdict(kt=%s, vt=%s, data=%s)" % \ - (self._kt, self._vt, super(tdict, self).__repr__()) diff --git a/libs_crutch/contrib/spyne/util/test.py b/libs_crutch/contrib/spyne/util/test.py deleted file mode 100644 index 850f20d..0000000 --- a/libs_crutch/contrib/spyne/util/test.py +++ /dev/null @@ -1,97 +0,0 @@ -# encoding: utf8 -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -from pprint import pformat - -from spyne.util import urlencode - - -def _start_response(code, headers): - print(code, pformat(headers)) - -def call_wsgi_app_kwargs(app, _mn='some_call', _headers=None, **kwargs): - return call_wsgi_app(app, _mn, _headers, kwargs.items()) - -def call_wsgi_app(app, mn='some_call', headers=None, body_pairs=None): - if headers is None: - headers = {} - if body_pairs is None: - body_pairs = [] - - body_pairs = [(k,str(v)) for k,v in body_pairs] - - request = { - u'QUERY_STRING': urlencode(body_pairs), - u'PATH_INFO': '/%s' % mn, - u'REQUEST_METHOD': u'GET', - u'SERVER_NAME': u'spyne.test', - u'SERVER_PORT': u'0', - u'wsgi.url_scheme': u'http', - } - - print(headers) - request.update(headers) - - out_string = [] - t = None - for s in app(request, _start_response): - t = type(s) - out_string.append(s) - - if t == bytes: - out_string = b''.join(out_string) - else: - out_string = ''.join(out_string) - - return out_string - -from os import mkdir, getcwd -from os.path import join, basename - - -def show(elt, tn=None, stdout=True): - if tn is None: - import inspect - - for frame in inspect.stack(): - if frame[3].startswith("test_"): - cn = frame[0].f_locals['self'].__class__.__name__ - tn = "%s.%s" % (cn, frame[3]) - break - - else: - raise Exception("don't be lazy and pass test name.") - - from lxml import html, etree - out_string = etree.tostring(elt, pretty_print=True) - if stdout: - print(out_string) - - fn = '%s.html' % tn - if basename(getcwd()) != 'test_html': - try: - mkdir('test_html') - except OSError: - pass - - f = open(join("test_html", fn), 'wb') - else: - f = open(fn, 'wb') - - f.write(html.tostring(elt, pretty_print=True, doctype="")) diff --git a/libs_crutch/contrib/spyne/util/tlist.py b/libs_crutch/contrib/spyne/util/tlist.py deleted file mode 100644 index ee83666..0000000 --- a/libs_crutch/contrib/spyne/util/tlist.py +++ /dev/null @@ -1,103 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -# adapted from: https://gist.github.com/KrzysztofCiba/4579691 - -## -# @file TypedList.py -# @author Krzysztof.Ciba@NOSPAMgmail.com -# @date 2012/07/19 08:21:22 -# @brief Definition of TypedList class. - - -class tlist(list): - """ - .. class:: tlist - - A list-like class holding only objects of specified type(s). - """ - - def __init__(self, iterable=None, types=None): - iterable = list() if not iterable else iterable - - # make sure it is iterable - iter(iterable) - - types = types if isinstance(types, tuple) else (types,) - for item in types: - if not isinstance(item, type): - raise TypeError("%s is not a type" % repr(item)) - - self._types = types - for i in iterable: - self._type_check(i) - list.__init__(self, iterable) - - def types(self): - return self._types - - def _type_check(self, val): - if not self._types: - return - - if not isinstance(val, self._types): - raise TypeError( - "Wrong type %s, this list can hold only instances of %s" - % (type(val), str(self._types))) - - def __iadd__(self, other): - map(self._type_check, other) - list.__iadd__(self, other) - return self - - def __add__(self, other): - iterable = [item for item in self] + [item for item in other] - return tlist(iterable, self._types) - - def __radd__(self, other): - iterable = [item for item in other] + [item for item in self] - if isinstance(other, tlist): - return self.__class__(iterable, other.types()) - return tlist(iterable, self._types) - - def __setitem__(self, key, value): - itervalue = (value,) - if isinstance(key, slice): - iter(value) - itervalue = value - map(self._type_check, itervalue) - list.__setitem__(self, key, value) - - def __setslice__(self, i, j, iterable): - iter(iterable) - map(self._type_check, iterable) - list.__setslice__(self, i, j, iterable) - - def append(self, val): - self._type_check(val) - list.append(self, val) - - def extend(self, iterable): - iter(iterable) - map(self._type_check, iterable) - list.extend(self, iterable) - - def insert(self, i, val): - self._type_check(val) - list.insert(self, i, val) diff --git a/libs_crutch/contrib/spyne/util/toposort.py b/libs_crutch/contrib/spyne/util/toposort.py deleted file mode 100644 index 6fba0ce..0000000 --- a/libs_crutch/contrib/spyne/util/toposort.py +++ /dev/null @@ -1,54 +0,0 @@ -# http://code.activestate.com/recipes/577413-topological-sort/ -# -# The MIT License (MIT) -# -# Copyright (c) ActiveState.com -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - - -from pprint import pformat - -try: - # Python 3 - from functools import reduce -except: - pass - - -def toposort2(data): - if len(data) == 0: - return - - for k, v in data.items(): - v.discard(k) # Ignore self dependencies - - # add items that are listed as dependencies but not as dependents to data - extra_items_in_deps = reduce(set.union, data.values()) - set(data.keys()) - data.update(dict([(item,set()) for item in extra_items_in_deps])) - - while True: - ordered = set(item for item,dep in data.items() if len(dep) == 0) - if len(ordered) == 0: - break - yield sorted(ordered, key=lambda x:repr(x)) - data = dict([(item, (dep - ordered)) for item,dep in data.items() - if item not in ordered]) - - assert not data, "A cyclic dependency exists amongst\n%s" % pformat(data) diff --git a/libs_crutch/contrib/spyne/util/web.py b/libs_crutch/contrib/spyne/util/web.py deleted file mode 100644 index 1dd3847..0000000 --- a/libs_crutch/contrib/spyne/util/web.py +++ /dev/null @@ -1,354 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -""" -Some code dump from some time ago. - -If you're using this for anything serious, you're insane. -""" - -from __future__ import absolute_import - -import logging -logger = logging.getLogger(__name__) - -from inspect import isclass - -from spyne import rpc, Any, AnyDict, NATIVE_MAP, M, Array, ComplexModelBase, \ - UnsignedInteger32, PushBase, Iterable, ModelBase, File, Service, \ - ResourceNotFoundError, Unicode - -from spyne.const import MAX_ARRAY_ELEMENT_NUM, MAX_DICT_ELEMENT_NUM, \ - MAX_STRING_FIELD_LENGTH, MAX_FIELD_NUM - -try: - from spyne.store.relational.document import FileData - from sqlalchemy.orm.exc import DetachedInstanceError -except ImportError: - # these are used just for isinstance checks. so we just set it to an - # anonymous value - FileData = type('__hidden', (object, ), {}) - DetachedInstanceError = type('__hidden', (Exception, ), {}) - -from spyne.util import memoize, six - -EXCEPTION_ADDRESS = None - - -try: - from colorama.ansi import Fore - from colorama.ansi import Style - RED = Fore.RED + Style.BRIGHT - GREEN = Fore.GREEN + Style.BRIGHT - RESET = Style.RESET_ALL - -except ImportError: - RED = "" - GREEN = "" - RESET = "" - - -class ReaderService(Service): - pass - - -class WriterService(Service): - pass - - -def log_repr(obj, cls=None, given_len=None, parent=None, from_array=False, - tags=None, prot=None): - """Use this function if you want to serialize a ComplexModelBase instance to - logs. It will: - - * Limit size of the String types - * Limit size of Array types - * Not try to iterate on iterators, push data, etc. - """ - - if tags is None: - tags = set() - - if obj is None: - return 'None' - - objcls = None - if hasattr(obj, '__class__'): - objcls = obj.__class__ - - if objcls in (list, tuple): - objcls = Array(Any) - - elif objcls is dict: - objcls = AnyDict - - elif objcls in NATIVE_MAP: - objcls = NATIVE_MAP[objcls] - - if objcls is not None and (cls is None or issubclass(objcls, cls)): - cls = objcls - - cls_attrs = None - logged = None - - if hasattr(cls, 'Attributes'): - if prot is None: - cls_attrs = cls.Attributes - else: - cls_attrs = prot.get_cls_attrs(cls) - - logged = cls_attrs.logged - if not logged: - return "%s(...)" % cls.get_type_name() - - if logged == '...': - return "(...)" - - if issubclass(cls, File) and isinstance(obj, File.Value): - cls = obj.__class__ - - if cls_attrs.logged == 'len': - l = '?' - try: - if isinstance(obj, (list, tuple)): - l = str(sum([len(o) for o in obj])) - - else: - l = str(len(obj)) - except (TypeError, ValueError): - if given_len is not None: - l = str(given_len) - - return "" % l - - if callable(cls_attrs.logged): - try: - return cls_attrs.logged(obj) - except Exception as e: - logger.error("Exception %r in log_repr transformer ignored", e) - logger.exception(e) - pass - - if issubclass(cls, AnyDict): - retval = [] - - if isinstance(obj, dict): - if logged == 'full': - for i, (k, v) in enumerate(obj.items()): - retval.append('%r: %r' % (k, v)) - - elif logged == 'keys': - for i, k in enumerate(obj.keys()): - if i >= MAX_DICT_ELEMENT_NUM: - retval.append("(...)") - break - - retval.append('%r: (...)' % (k,)) - - elif logged == 'values': - for i, v in enumerate(obj.values()): - if i >= MAX_DICT_ELEMENT_NUM: - retval.append("(...)") - break - - retval.append('(...): %s' % (log_repr(v, tags=tags),)) - - elif logged == 'keys-full': - for k in obj.keys(): - retval.append('%r: (...)' % (k,)) - - elif logged == 'values-full': - for v in obj.values(): - retval.append('(...): %r' % (v,)) - - elif logged is True: # default behaviour - for i, (k, v) in enumerate(obj.items()): - if i >= MAX_DICT_ELEMENT_NUM: - retval.append("(...)") - break - - retval.append('%r: %s' % (k, - log_repr(v, parent=k, tags=tags))) - else: - raise ValueError("Invalid value logged=%r", logged) - - return "{%s}" % ', '.join(retval) - - else: - if logged in ('full', 'keys-full', 'values-full'): - retval = [repr(s) for s in obj] - - else: - for i, v in enumerate(obj): - if i >= MAX_DICT_ELEMENT_NUM: - retval.append("(...)") - break - - retval.append(log_repr(v, tags=tags)) - - return "[%s]" % ', '.join(retval) - - if (issubclass(cls, Array) or (cls_attrs.max_occurs > 1)) and not from_array: - if id(obj) in tags: - return "%s(...)" % obj.__class__.__name__ - - tags.add(id(obj)) - - retval = [] - - subcls = cls - if issubclass(cls, Array): - subcls, = cls._type_info.values() - - if isinstance(obj, PushBase): - return '[]' - - if logged is None: - logged = cls_attrs.logged - - for i, o in enumerate(obj): - if logged != 'full' and i >= MAX_ARRAY_ELEMENT_NUM: - retval.append("(...)") - break - - retval.append(log_repr(o, subcls, from_array=True, tags=tags)) - - return "[%s]" % (', '.join(retval)) - - if issubclass(cls, ComplexModelBase): - if id(obj) in tags: - return "%s(...)" % obj.__class__.__name__ - - tags.add(id(obj)) - - retval = [] - i = 0 - - for k, t in cls.get_flat_type_info(cls).items(): - if i >= MAX_FIELD_NUM: - retval.append("(...)") - break - - if not t.Attributes.logged: - continue - - if logged == '...': - retval.append("%s=(...)" % k) - continue - - try: - v = getattr(obj, k, None) - except (AttributeError, KeyError, DetachedInstanceError): - v = None - - # HACK!: sometimes non-db attributes restored from database don't - # get properly reinitialized. - if isclass(v) and issubclass(v, ModelBase): - continue - - polymap = t.Attributes.polymap - if polymap is not None: - t = polymap.get(v.__class__, t) - - if v is not None: - retval.append("%s=%s" % (k, log_repr(v, t, parent=k, tags=tags))) - i += 1 - - return "%s(%s)" % (cls.get_type_name(), ', '.join(retval)) - - if issubclass(cls, Unicode) and isinstance(obj, six.string_types): - if len(obj) > MAX_STRING_FIELD_LENGTH: - return '%r(...)' % obj[:MAX_STRING_FIELD_LENGTH] - - return repr(obj) - - if issubclass(cls, File) and isinstance(obj, FileData): - return log_repr(obj, FileData, tags=tags) - - retval = repr(obj) - - if len(retval) > MAX_STRING_FIELD_LENGTH: - retval = retval[:MAX_STRING_FIELD_LENGTH] + "(...)" - - return retval - - -def TReaderService(T, T_name): - class ReaderService(ReaderService): - @rpc(M(UnsignedInteger32), _returns=T, - _in_message_name='get_%s' % T_name, - _in_variable_names={'obj_id': "%s_id" % T_name}) - def get(ctx, obj_id): - return ctx.udc.session.query(T).filter_by(id=obj_id).one() - - @rpc(_returns=Iterable(T), - _in_message_name='get_all_%s' % T_name) - def get_all(ctx): - return ctx.udc.session.query(T).order_by(T.id) - - return ReaderService - - -def TWriterService(T, T_name, put_not_found='raise'): - assert put_not_found in ('raise', 'fix') - - if put_not_found == 'raise': - def put_not_found(obj): - raise ResourceNotFoundError('%s.id=%d' % (T_name, obj.id)) - - elif put_not_found == 'fix': - def put_not_found(obj): - obj.id = None - - class WriterService(WriterService): - @rpc(M(T), _returns=UnsignedInteger32, - _in_message_name='put_%s' % T_name, - _in_variable_names={'obj': T_name}) - def put(ctx, obj): - if obj.id is None: - ctx.udc.session.add(obj) - ctx.udc.session.flush() # so that we get the obj.id value - - else: - if ctx.udc.session.query(T).get(obj.id) is None: - # this is to prevent the client from setting the primary key - # of a new object instead of the database's own primary-key - # generator. - # Instead of raising an exception, you can also choose to - # ignore the primary key set by the client by silently doing - # obj.id = None in order to have the database assign the - # primary key the traditional way. - put_not_found(obj.id) - - else: - ctx.udc.session.merge(obj) - - return obj.id - - @rpc(M(UnsignedInteger32), - _in_message_name='del_%s' % T_name, - _in_variable_names={'obj_id': '%s_id' % T_name}) - def del_(ctx, obj_id): - count = ctx.udc.session.query(T).filter_by(id=obj_id).count() - if count == 0: - raise ResourceNotFoundError(obj_id) - - ctx.udc.session.query(T).filter_by(id=obj_id).delete() - - return WriterService diff --git a/libs_crutch/contrib/spyne/util/wsgi_wrapper.py b/libs_crutch/contrib/spyne/util/wsgi_wrapper.py deleted file mode 100644 index b150849..0000000 --- a/libs_crutch/contrib/spyne/util/wsgi_wrapper.py +++ /dev/null @@ -1,126 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - -"""A Convenience module for wsgi wrapper routines.""" - -import logging -logger = logging.getLogger(__name__) - -import os - -from spyne import Application -from spyne.server.wsgi import WsgiApplication - - -class WsgiMounter(object): - """Simple mounter object for wsgi callables. Takes a dict where the keys are - uri fragments and values are :class:`spyne.application.Application` - instances. - - :param mounts: dict of :class:`spyne.application.Application` instances - whose keys are url fragments. Use ``''`` or ``'/'`` as key to set the - default handler. When a default handler is not set, an empty 404 page is - returned. - """ - - @staticmethod - def default(e, s): - s("404 Not found", []) - return [] - - def __init__(self, mounts=None): - self.mounts = {} - - for k, v in (mounts or {}).items(): - if isinstance(v, Application): - app = WsgiApplication(v) - else: - assert callable(v), "%r is not a valid wsgi app." % v - app = v - - if k in ('', '/'): - self.default = app - else: - self.mounts[k] = app - - def __call__(self, environ, start_response): - path_info = environ.get('PATH_INFO', '') - fragments = [a for a in path_info.split('/') if len(a) > 0] - - script = '' - if len(fragments) > 0: - script = fragments[0] - - app = self.mounts.get(script, self.default) - if app is self.default: - return app(environ, start_response) - - original_script_name = environ.get('SCRIPT_NAME', '') - - if len(script) > 0: - script = "/" + script - environ['SCRIPT_NAME'] = ''.join(('/', original_script_name, script)) - pi = ''.join(('/', '/'.join(fragments[1:]))) - - if pi == '/': - environ['PATH_INFO'] = '' - else: - environ['PATH_INFO'] = pi - - return app(environ, start_response) - - -def run_twisted(apps, port, static_dir='.', interface='0.0.0.0'): - """Twisted wrapper for the spyne.server.wsgi.WsgiApplication. Twisted can - use one thread per request to run services, so code wrapped this way does - not necessarily have to respect twisted way of doing things. - - :param apps: List of tuples containing (application, url) pairs - :param port: Port to listen to. - :param static_dir: The directory that contains static files. Pass `None` if - you don't want to server static content. Url fragments in the `apps` - argument take precedence. - :param interface: The network interface to which the server binds, if not - specified, it will accept connections on any interface by default. - """ - - import twisted.web.server - import twisted.web.static - - from twisted.web.resource import Resource - from twisted.web.wsgi import WSGIResource - from twisted.internet import reactor - - if static_dir != None: - static_dir = os.path.abspath(static_dir) - logging.info("registering static folder %r on /" % static_dir) - root = twisted.web.static.File(static_dir) - else: - root = Resource() - - for app, url in apps: - resource = WSGIResource(reactor, reactor, app) - logging.info("registering %r on /%s" % (app, url)) - root.putChild(url, resource) - - site = twisted.web.server.Site(root) - reactor.listenTCP(port, site, interface=interface) - logging.info("listening on: %s:%d" % (interface, port)) - - return reactor.run() diff --git a/libs_crutch/contrib/spyne/util/xml.py b/libs_crutch/contrib/spyne/util/xml.py deleted file mode 100644 index d52ada5..0000000 --- a/libs_crutch/contrib/spyne/util/xml.py +++ /dev/null @@ -1,309 +0,0 @@ - -# -# spyne - Copyright (C) Spyne contributors. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 -# - - -"""The `spyne.util.xml` module contains various Xml and Xml Schema related -utility functions. -""" -from inspect import isgenerator - -from lxml import etree - -from os.path import dirname -from os.path import abspath - -from spyne import ServiceBase, Application, srpc -from spyne.context import FakeContext -from spyne.interface import Interface -from spyne.interface.xml_schema import XmlSchema -from spyne.interface.xml_schema.parser import XmlSchemaParser, Thier_repr, PARSER -from spyne.protocol import ProtocolMixin -from spyne.protocol.cloth import XmlCloth - -from spyne.protocol.xml import XmlDocument -from spyne.util.appreg import unregister_application -from spyne.util.six import BytesIO -from spyne.util.tlist import tlist - - -class FakeApplication(object): - def __init__(self, default_namespace): - self.tns = default_namespace - self.services = () - self.classes = () - - -def get_schema_documents(models, default_namespace=None): - """Returns the schema documents in a dict whose keys are namespace prefixes - and values are Element objects. - - :param models: A list of spyne.model classes that will be represented in - the schema. - """ - - if default_namespace is None: - default_namespace = models[0].get_namespace() - - fake_app = FakeApplication(default_namespace) - - interface = Interface(fake_app) - for m in models: - m.resolve_namespace(m, default_namespace) - interface.add_class(m) - interface.populate_interface(fake_app) - - document = XmlSchema(interface) - document.build_interface_document() - - return document.get_interface_document() - - -def get_validation_schema(models, default_namespace=None): - """Returns the validation schema object for the given models. - - :param models: A list of spyne.model classes that will be represented in - the schema. - """ - - if default_namespace is None: - default_namespace = models[0].get_namespace() - - fake_app = FakeApplication(default_namespace) - - interface = Interface(fake_app) - for m in models: - m.resolve_namespace(m, default_namespace) - interface.add_class(m) - - schema = XmlSchema(interface) - schema.build_validation_schema() - - return schema.validation_schema - - -def _dig(par): - for elt in par: - elt.tag = elt.tag.split('}')[-1] - _dig(elt) - - -_xml_object = XmlDocument() - - -def get_object_as_xml(inst, cls=None, root_tag_name=None, no_namespace=False): - """Returns an ElementTree representation of a - :class:`spyne.model.complex.ComplexModel` subclass. - - :param inst: The instance of the class to be serialized. - :param cls: The class to be serialized. Optional. - :param root_tag_name: The root tag string to use. Defaults to the output of - ``value.__class__.get_type_name_ns()``. - :param no_namespace: When true, namespace information is discarded. - """ - - if cls is None: - cls = inst.__class__ - - parent = etree.Element("parent") - _xml_object.to_parent(None, cls, inst, parent, cls.get_namespace(), - root_tag_name) - if no_namespace: - _dig(parent) - etree.cleanup_namespaces(parent) - - return parent[0] - - -def get_object_as_xml_polymorphic(inst, cls=None, root_tag_name=None, - no_namespace=False): - """Returns an ElementTree representation of a - :class:`spyne.model.complex.ComplexModel` subclass. - - :param inst: The instance of the class to be serialized. - :param cls: The class to be serialized. Optional. - :param root_tag_name: The root tag string to use. Defaults to the output of - ``value.__class__.get_type_name_ns()``. - :param no_namespace: When true, namespace information is discarded. - """ - - if cls is None: - cls = inst.__class__ - - if no_namespace: - app = Application([ServiceBase], tns="", - out_protocol=XmlDocument(polymorphic=True)) - else: - tns = cls.get_namespace() - if tns is None: - raise ValueError( - "Either set a namespace for %r or pass no_namespace=True" - % (cls, )) - - class _DummyService(ServiceBase): - @srpc(cls) - def f(_): - pass - - app = Application([_DummyService], tns=tns, - out_protocol=XmlDocument(polymorphic=True)) - - unregister_application(app) - - parent = etree.Element("parent", nsmap=app.interface.nsmap) - - app.out_protocol.to_parent(None, cls, inst, parent, cls.get_namespace(), - root_tag_name) - - if no_namespace: - _dig(parent) - - etree.cleanup_namespaces(parent) - - return parent[0] - - -def get_xml_as_object_polymorphic(elt, cls): - """Returns a native :class:`spyne.model.complex.ComplexModel` child from an - ElementTree representation of the same class. - - :param elt: The xml document to be deserialized. - :param cls: The class the xml document represents. - """ - - tns = cls.get_namespace() - if tns is None: - raise ValueError("Please set a namespace for %r" % (cls, )) - - class _DummyService(ServiceBase): - @srpc(cls) - def f(_): - pass - - app = Application([_DummyService], tns=tns, - in_protocol=XmlDocument(polymorphic=True)) - - unregister_application(app) - - return app.in_protocol.from_element(FakeContext(app=app), cls, elt) - - -def get_object_as_xml_cloth(inst, cls=None, no_namespace=False, encoding='utf8'): - """Returns an ElementTree representation of a - :class:`spyne.model.complex.ComplexModel` subclass. - - :param inst: The instance of the class to be serialized. - :param cls: The class to be serialized. Optional. - :param root_tag_name: The root tag string to use. Defaults to the output of - ``value.__class__.get_type_name_ns()``. - :param no_namespace: When true, namespace information is discarded. - """ - - if cls is None: - cls = inst.__class__ - - if cls.get_namespace() is None and no_namespace is None: - no_namespace = True - - if no_namespace is None: - no_namespace = False - - ostr = BytesIO() - xml_cloth = XmlCloth(use_ns=(not no_namespace)) - ctx = FakeContext() - with etree.xmlfile(ostr, encoding=encoding) as xf: - ctx.outprot_ctx.doctype_written = False - ctx.protocol.prot_stack = tlist([], ProtocolMixin) - tn = cls.get_type_name() - ret = xml_cloth.subserialize(ctx, cls, inst, xf, tn) - - assert not isgenerator(ret) - - return ostr.getvalue() - - -def get_xml_as_object(elt, cls): - """Returns a native :class:`spyne.model.complex.ComplexModel` child from an - ElementTree representation of the same class. - - :param elt: The xml document to be deserialized. - :param cls: The class the xml document represents. - """ - return _xml_object.from_element(None, cls, elt) - - -def parse_schema_string(s, files={}, repr_=Thier_repr(with_ns=False), - skip_errors=False): - """Parses a schema string and returns a _Schema object. - - :param s: The string or bytes object that contains the schema document. - :param files: A dict that maps namespaces to path to schema files that - contain the schema document for those namespaces. - :param repr_: A callable that functions as `repr`. - :param skip_errors: Skip parsing errors and return a partial schema. - See debug log for details. - - :return: :class:`spyne.interface.xml_schema.parser._Schema` instance. - """ - - elt = etree.fromstring(s, parser=PARSER) - return XmlSchemaParser(files, repr_=repr_, - skip_errors=skip_errors).parse_schema(elt) - - -def parse_schema_element(elt, files={}, repr_=Thier_repr(with_ns=False), - skip_errors=False): - """Parses a `` element and returns a _Schema object. - - :param elt: The `` element, an lxml.etree._Element instance. - :param files: A dict that maps namespaces to path to schema files that - contain the schema document for those namespaces. - :param repr_: A callable that functions as `repr`. - :param skip_errors: Skip parsing errors and return a partial schema. - See debug log for details. - - :return: :class:`spyne.interface.xml_schema.parser._Schema` instance. - """ - - return XmlSchemaParser(files, repr_=repr_, - skip_errors=skip_errors).parse_schema(elt) - - -def parse_schema_file(file_name, files=None, repr_=Thier_repr(with_ns=False), - skip_errors=False): - """Parses a schema file and returns a _Schema object. Schema files typically - have the `*.xsd` extension. - - :param file_name: The path to the file that contains the schema document - to be parsed. - :param files: A dict that maps namespaces to path to schema files that - contain the schema document for those namespaces. - :param repr_: A callable that functions as `repr`. - :param skip_errors: Skip parsing errors and return a partial schema. - See debug log for details. - - :return: :class:`spyne.interface.xml_schema.parser._Schema` instance. - """ - - if files is None: - files = dict() - - elt = etree.fromstring(open(file_name, 'rb').read(), parser=PARSER) - wd = abspath(dirname(file_name)) - return XmlSchemaParser(files, wd, repr_=repr_, - skip_errors=skip_errors).parse_schema(elt) diff --git a/libs_crutch/contrib/suds/__init__.py b/libs_crutch/contrib/suds/__init__.py deleted file mode 100755 index 3c1ef0d..0000000 --- a/libs_crutch/contrib/suds/__init__.py +++ /dev/null @@ -1,164 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Lightweight SOAP Python client providing a Web Service proxy. - -""" - -import sys - - -# -# Project properties -# - -from .version import __build__, __version__ - - -# -# Exceptions -# - -class MethodNotFound(Exception): - def __init__(self, name): - Exception.__init__(self, "Method not found: '%s'" % (name,)) - -class PortNotFound(Exception): - def __init__(self, name): - Exception.__init__(self, "Port not found: '%s'" % (name,)) - -class ServiceNotFound(Exception): - def __init__(self, name): - Exception.__init__(self, "Service not found: '%s'" % (name,)) - -class TypeNotFound(Exception): - def __init__(self, name): - Exception.__init__(self, "Type not found: '%s'" % (tostr(name),)) - -class BuildError(Exception): - def __init__(self, name, exception): - Exception.__init__(self, "An error occurred while building an " - "instance of (%s). As a result the object you requested could not " - "be constructed. It is recommended that you construct the type " - "manually using a Suds object. Please open a ticket with a " - "description of this error. Reason: %s" % (name, exception)) - -class WebFault(Exception): - def __init__(self, fault, document): - if hasattr(fault, "faultstring"): - Exception.__init__(self, "Server raised fault: '%s'" % - (fault.faultstring,)) - self.fault = fault - self.document = document - - -# -# Logging -# - -class Repr: - def __init__(self, x): - self.x = x - def __str__(self): - return repr(self.x) - - -# -# Utility -# - -class null: - """I{null} object used to pass NULL for optional XML nodes.""" - pass - -def objid(obj): - return obj.__class__.__name__ + ":" + hex(id(obj)) - -def tostr(object, encoding=None): - """Get a unicode safe string representation of an object.""" - if isinstance(object, str): - if encoding is None: - return object - return object.encode(encoding) - if isinstance(object, tuple): - s = ["("] - for item in object: - s.append(tostr(item)) - s.append(", ") - s.append(")") - return "".join(s) - if isinstance(object, list): - s = ["["] - for item in object: - s.append(tostr(item)) - s.append(", ") - s.append("]") - return "".join(s) - if isinstance(object, dict): - s = ["{"] - for item in list(object.items()): - s.append(tostr(item[0])) - s.append(" = ") - s.append(tostr(item[1])) - s.append(", ") - s.append("}") - return "".join(s) - try: - return str(object) - except Exception: - return str(object) - - -# -# Python 3 compatibility -# - -if sys.version_info < (3, 0): - from io import StringIO as BytesIO -else: - from io import BytesIO - -# Idea from 'http://lucumr.pocoo.org/2011/1/22/forwards-compatible-python'. -class UnicodeMixin(object): - if sys.version_info >= (3, 0): - # For Python 3, __str__() and __unicode__() should be identical. - __str__ = lambda x: x.__unicode__() - else: - __str__ = lambda x: str(x).encode("utf-8") - -# Used instead of byte literals as they are not supported on Python versions -# prior to 2.6. -def byte_str(s="", encoding="utf-8", input_encoding="utf-8", errors="strict"): - """ - Returns a byte string version of 's', encoded as specified in 'encoding'. - - Accepts str & unicode objects, interpreting non-unicode strings as byte - strings encoded using the given input encoding. - - """ - assert isinstance(s, str) - if isinstance(s, str): - return s.encode(encoding, errors) - if s and encoding != input_encoding: - return s.decode(input_encoding, errors).encode(encoding, errors) - return s - -# Class used to represent a byte string. Useful for asserting that correct -# string types are being passed around where needed. -if sys.version_info >= (3, 0): - byte_str_class = bytes -else: - byte_str_class = str diff --git a/libs_crutch/contrib/suds/argparser.py b/libs_crutch/contrib/suds/argparser.py deleted file mode 100755 index 2ca9dd1..0000000 --- a/libs_crutch/contrib/suds/argparser.py +++ /dev/null @@ -1,419 +0,0 @@ -# -*- coding: utf-8 -*- - -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jurko Gospodnetić ( jurko.gospodnetic@pke.hr ) - -""" -Suds web service operation invocation function argument parser. - -See the parse_args() function description for more detailed information. - -""" - -__all__ = ["parse_args"] - - -def parse_args(method_name, param_defs, args, kwargs, external_param_processor, - extra_parameter_errors): - """ - Parse arguments for suds web service operation invocation functions. - - Suds prepares Python function objects for invoking web service operations. - This function implements generic binding agnostic part of processing the - arguments passed when calling those function objects. - - Argument parsing rules: - * Each input parameter element should be represented by single regular - Python function argument. - * At most one input parameter belonging to a single choice parameter - structure may have its value specified as something other than None. - * Positional arguments are mapped to choice group input parameters the - same as is done for a simple all/sequence group - each in turn. - - Expects to be passed the web service operation's parameter definitions - (parameter name, type & optional ancestry information) in order and, based - on that, extracts the values for those parameter from the arguments - provided in the web service operation invocation call. - - Ancestry information describes parameters constructed based on suds - library's automatic input parameter structure unwrapping. It is expected to - include the parameter's XSD schema 'ancestry' context, i.e. a list of all - the parent XSD schema tags containing the parameter's tag. Such - ancestry context provides detailed information about how the parameter's - value is expected to be used, especially in relation to other input - parameters, e.g. at most one parameter value may be specified for - parameters directly belonging to the same choice input group. - - Rules on acceptable ancestry items: - * Ancestry item's choice() method must return whether the item - represents a XSD schema tag. - * Passed ancestry items are used 'by address' internally and the same XSD - schema tag is expected to be identified by the exact same ancestry item - object during the whole argument processing. - - During processing, each parameter's definition and value, together with any - additional pertinent information collected from the encountered parameter - definition structure, is passed on to the provided external parameter - processor function. There that information is expected to be used to - construct the actual binding specific web service operation invocation - request. - - Raises a TypeError exception in case any argument related errors are - detected. The exceptions raised have been constructed to make them as - similar as possible to their respective exceptions raised during regular - Python function argument checking. - - Does not support multiple same-named input parameters. - - """ - arg_parser = _ArgParser(method_name, param_defs, external_param_processor) - return arg_parser(args, kwargs, extra_parameter_errors) - - -class _ArgParser: - """Internal argument parser implementation function object.""" - - def __init__(self, method_name, param_defs, external_param_processor): - self.__method_name = method_name - self.__param_defs = param_defs - self.__external_param_processor = external_param_processor - self.__stack = [] - - def __call__(self, args, kwargs, extra_parameter_errors): - """ - Runs the main argument parsing operation. - - Passed args & kwargs objects are not modified during parsing. - - Returns an informative 2-tuple containing the number of required & - allowed arguments. - - """ - assert not self.active(), "recursive argument parsing not allowed" - self.__init_run(args, kwargs, extra_parameter_errors) - try: - self.__process_parameters() - return self.__all_parameters_processed() - finally: - self.__cleanup_run() - assert not self.active() - - def active(self): - """ - Return whether this object is currently running argument processing. - - Used to avoid recursively entering argument processing from within an - external parameter processor. - - """ - return bool(self.__stack) - - def __all_parameters_processed(self): - """ - Finish the argument processing. - - Should be called after all the web service operation's parameters have - been successfully processed and, afterwards, no further parameter - processing is allowed. - - Returns a 2-tuple containing the number of required & allowed - arguments. - - See the _ArgParser class description for more detailed information. - - """ - assert self.active() - sentinel_frame = self.__stack[0] - self.__pop_frames_above(sentinel_frame) - assert len(self.__stack) == 1 - self.__pop_top_frame() - assert not self.active() - args_required = sentinel_frame.args_required() - args_allowed = sentinel_frame.args_allowed() - self.__check_for_extra_arguments(args_required, args_allowed) - return args_required, args_allowed - - def __check_for_extra_arguments(self, args_required, args_allowed): - """ - Report an error in case any extra arguments are detected. - - Does nothing if reporting extra arguments as exceptions has not been - enabled. - - May only be called after the argument processing has been completed. - - """ - assert not self.active() - if not self.__extra_parameter_errors: - return - - if self.__kwargs: - param_name = list(self.__kwargs.keys())[0] - if param_name in self.__params_with_arguments: - msg = "got multiple values for parameter '%s'" - else: - msg = "got an unexpected keyword argument '%s'" - self.__error(msg % (param_name,)) - - if self.__args: - def plural_suffix(count): - if count == 1: - return "" - return "s" - def plural_was_were(count): - if count == 1: - return "was" - return "were" - expected = args_required - if args_required != args_allowed: - expected = "%d to %d" % (args_required, args_allowed) - given = self.__args_count - msg_parts = ["takes %s positional argument" % (expected,), - plural_suffix(expected), " but %d " % (given,), - plural_was_were(given), " given"] - self.__error("".join(msg_parts)) - - def __cleanup_run(self): - """Cleans up after a completed argument parsing run.""" - self.__stack = [] - assert not self.active() - - def __error(self, message): - """Report an argument processing error.""" - raise TypeError("%s() %s" % (self.__method_name, message)) - - def __frame_factory(self, ancestry_item): - """Construct a new frame representing the given ancestry item.""" - frame_class = Frame - if ancestry_item is not None and ancestry_item.choice(): - frame_class = ChoiceFrame - return frame_class(ancestry_item, self.__error, - self.__extra_parameter_errors) - - def __get_param_value(self, name): - """ - Extract a parameter value from the remaining given arguments. - - Returns a 2-tuple consisting of the following: - * Boolean indicating whether an argument has been specified for the - requested input parameter. - * Parameter value. - - """ - if self.__args: - return True, self.__args.pop(0) - try: - value = self.__kwargs.pop(name) - except KeyError: - return False, None - return True, value - - def __in_choice_context(self): - """ - Whether we are currently processing a choice parameter group. - - This includes processing a parameter defined directly or indirectly - within such a group. - - May only be called during parameter processing or the result will be - calculated based on the context left behind by the previous parameter - processing if any. - - """ - for x in self.__stack: - if x.__class__ is ChoiceFrame: - return True - return False - - def __init_run(self, args, kwargs, extra_parameter_errors): - """Initializes data for a new argument parsing run.""" - assert not self.active() - self.__args = list(args) - self.__kwargs = dict(kwargs) - self.__extra_parameter_errors = extra_parameter_errors - self.__args_count = len(args) + len(kwargs) - self.__params_with_arguments = set() - self.__stack = [] - self.__push_frame(None) - - def __match_ancestry(self, ancestry): - """ - Find frames matching the given ancestry. - - Returns a tuple containing the following: - * Topmost frame matching the given ancestry or the bottom-most sentry - frame if no frame matches. - * Unmatched ancestry part. - - """ - stack = self.__stack - if len(stack) == 1: - return stack[0], ancestry - previous = stack[0] - for frame, n in zip(stack[1:], range(len(ancestry))): - if frame.id() is not ancestry[n]: - return previous, ancestry[n:] - previous = frame - return frame, ancestry[n + 1:] - - def __pop_frames_above(self, frame): - """Pops all the frames above, but not including the given frame.""" - while self.__stack[-1] is not frame: - self.__pop_top_frame() - assert self.__stack - - def __pop_top_frame(self): - """Pops the top frame off the frame stack.""" - popped = self.__stack.pop() - if self.__stack: - self.__stack[-1].process_subframe(popped) - - def __process_parameter(self, param_name, param_type, ancestry=None): - """Collect values for a given web service operation input parameter.""" - assert self.active() - param_optional = param_type.optional() - has_argument, value = self.__get_param_value(param_name) - if has_argument: - self.__params_with_arguments.add(param_name) - self.__update_context(ancestry) - self.__stack[-1].process_parameter(param_optional, value is not None) - self.__external_param_processor(param_name, param_type, - self.__in_choice_context(), value) - - def __process_parameters(self): - """Collect values for given web service operation input parameters.""" - for pdef in self.__param_defs: - self.__process_parameter(*pdef) - - def __push_frame(self, ancestry_item): - """Push a new frame on top of the frame stack.""" - frame = self.__frame_factory(ancestry_item) - self.__stack.append(frame) - - def __push_frames(self, ancestry): - """ - Push new frames representing given ancestry items. - - May only be given ancestry items other than None. Ancestry item None - represents the internal sentinel item and should never appear in a - given parameter's ancestry information. - - """ - for x in ancestry: - assert x is not None - self.__push_frame(x) - - def __update_context(self, ancestry): - if not ancestry: - return - match_result = self.__match_ancestry(ancestry) - last_matching_frame, unmatched_ancestry = match_result - self.__pop_frames_above(last_matching_frame) - self.__push_frames(unmatched_ancestry) - - -class Frame: - """ - Base _ArgParser context frame. - - When used directly, as opposed to using a derived class, may represent any - input parameter context/ancestry item except a choice order indicator. - - """ - - def __init__(self, id, error, extra_parameter_errors): - """ - Construct a new Frame instance. - - Passed error function is used to report any argument checking errors. - - """ - assert self.__class__ != Frame or not id or not id.choice() - self.__id = id - self._error = error - self._extra_parameter_errors = extra_parameter_errors - self._args_allowed = 0 - self._args_required = 0 - self._has_value = False - - def args_allowed(self): - return self._args_allowed - - def args_required(self): - return self._args_required - - def has_value(self): - return self._has_value - - def id(self): - return self.__id - - def process_parameter(self, optional, has_value): - args_required = 1 - if optional: - args_required = 0 - self._process_item(has_value, 1, args_required) - - def process_subframe(self, subframe): - self._process_item( - subframe.has_value(), - subframe.args_allowed(), - subframe.args_required()) - - def _process_item(self, has_value, args_allowed, args_required): - self._args_allowed += args_allowed - self._args_required += args_required - if has_value: - self._has_value = True - - -class ChoiceFrame(Frame): - """ - _ArgParser context frame representing a choice order indicator. - - A choice requires as many input arguments as are needed to satisfy the - least requiring of its items. For example, if we use I(n) to identify an - item requiring n parameter, then a choice containing I(2), I(3) & I(7) - requires 2 arguments while a choice containing I(5) & I(4) requires 4. - - Accepts an argument for each of its contained elements but allows at most - one of its directly contained items to have a defined value. - - """ - - def __init__(self, id, error, extra_parameter_errors): - assert id.choice() - Frame.__init__(self, id, error, extra_parameter_errors) - self.__has_item = False - - def _process_item(self, has_value, args_allowed, args_required): - self._args_allowed += args_allowed - self.__update_args_required_for_item(args_required) - self.__update_has_value_for_item(has_value) - - def __update_args_required_for_item(self, item_args_required): - if not self.__has_item: - self.__has_item = True - self._args_required = item_args_required - return - self._args_required = min(self.args_required(), item_args_required) - - def __update_has_value_for_item(self, item_has_value): - if item_has_value: - if self.has_value() and self._extra_parameter_errors: - self._error("got multiple values for a single choice " - "parameter") - self._has_value = True diff --git a/libs_crutch/contrib/suds/bindings/__init__.py b/libs_crutch/contrib/suds/bindings/__init__.py deleted file mode 100755 index 1704fa9..0000000 --- a/libs_crutch/contrib/suds/bindings/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides modules containing classes to support Web Services (SOAP) bindings. -""" diff --git a/libs_crutch/contrib/suds/bindings/binding.py b/libs_crutch/contrib/suds/bindings/binding.py deleted file mode 100755 index 4af184c..0000000 --- a/libs_crutch/contrib/suds/bindings/binding.py +++ /dev/null @@ -1,510 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -(WS) SOAP binding classes. - -""" - -from suds import * -from suds.sax import Namespace -from suds.sax.document import Document -from suds.sax.element import Element -from suds.sudsobject import Factory -from suds.mx import Content -from suds.mx.literal import Literal as MxLiteral -from suds.umx.typed import Typed as UmxTyped -from suds.bindings.multiref import MultiRef -from suds.xsd.query import TypeQuery, ElementQuery -from suds.xsd.sxbasic import Element as SchemaElement -from suds.options import Options -from suds.plugin import PluginContainer - -from copy import deepcopy - - -envns = ("SOAP-ENV", "http://schemas.xmlsoap.org/soap/envelope/") - - -class Binding(object): - """ - The SOAP binding class used to process outgoing and incoming SOAP messages - per the WSDL port binding. - - @ivar wsdl: The WSDL. - @type wsdl: L{suds.wsdl.Definitions} - @ivar schema: The collective schema contained within the WSDL. - @type schema: L{xsd.schema.Schema} - @ivar options: A dictionary options. - @type options: L{Options} - - """ - - def __init__(self, wsdl): - """ - @param wsdl: A WSDL. - @type wsdl: L{wsdl.Definitions} - - """ - self.wsdl = wsdl - self.multiref = MultiRef() - - def schema(self): - return self.wsdl.schema - - def options(self): - return self.wsdl.options - - def unmarshaller(self): - """ - Get the appropriate schema based XML decoder. - - @return: Typed unmarshaller. - @rtype: L{UmxTyped} - - """ - return UmxTyped(self.schema()) - - def marshaller(self): - """ - Get the appropriate XML encoder. - - @return: An L{MxLiteral} marshaller. - @rtype: L{MxLiteral} - - """ - return MxLiteral(self.schema(), self.options().xstq) - - def param_defs(self, method): - """ - Get parameter definitions. - - Each I{pdef} is a (I{name}, L{xsd.sxbase.SchemaObject}) tuple. - - @param method: A service method. - @type method: I{service.Method} - @return: A collection of parameter definitions - @rtype: [I{pdef},...] - - """ - raise Exception("not implemented") - - def get_message(self, method, args, kwargs): - """ - Get a SOAP message for the specified method, args and SOAP headers. - - This is the entry point for creating an outbound SOAP message. - - @param method: The method being invoked. - @type method: I{service.Method} - @param args: A list of args for the method invoked. - @type args: list - @param kwargs: Named (keyword) args for the method invoked. - @type kwargs: dict - @return: The SOAP envelope. - @rtype: L{Document} - - """ - content = self.headercontent(method) - header = self.header(content) - content = self.bodycontent(method, args, kwargs) - body = self.body(content) - env = self.envelope(header, body) - if self.options().prefixes: - body.normalizePrefixes() - env.promotePrefixes() - else: - env.refitPrefixes() - return Document(env) - - def get_reply(self, method, replyroot): - """ - Process the I{reply} for the specified I{method} by unmarshalling it - into into Python object(s). - - @param method: The name of the invoked method. - @type method: str - @param replyroot: The reply XML root node received after invoking the - specified method. - @type replyroot: L{Element} - @return: The unmarshalled reply. The returned value is an L{Object} or - a I{list} depending on whether the service returns a single object - or a collection. - @rtype: L{Object} or I{list} - - """ - soapenv = replyroot.getChild("Envelope", envns) - soapenv.promotePrefixes() - soapbody = soapenv.getChild("Body", envns) - soapbody = self.multiref.process(soapbody) - nodes = self.replycontent(method, soapbody) - rtypes = self.returned_types(method) - if len(rtypes) > 1: - return self.replycomposite(rtypes, nodes) - if len(rtypes) == 0: - return - if rtypes[0].multi_occurrence(): - return self.replylist(rtypes[0], nodes) - if len(nodes): - resolved = rtypes[0].resolve(nobuiltin=True) - return self.unmarshaller().process(nodes[0], resolved) - - def replylist(self, rt, nodes): - """ - Construct a I{list} reply. - - Called for replies with possible multiple occurrences. - - @param rt: The return I{type}. - @type rt: L{suds.xsd.sxbase.SchemaObject} - @param nodes: A collection of XML nodes. - @type nodes: [L{Element},...] - @return: A list of I{unmarshalled} objects. - @rtype: [L{Object},...] - - """ - resolved = rt.resolve(nobuiltin=True) - unmarshaller = self.unmarshaller() - return [unmarshaller.process(node, resolved) for node in nodes] - - def replycomposite(self, rtypes, nodes): - """ - Construct a I{composite} reply. - - Called for replies with multiple output nodes. - - @param rtypes: A list of known return I{types}. - @type rtypes: [L{suds.xsd.sxbase.SchemaObject},...] - @param nodes: A collection of XML nodes. - @type nodes: [L{Element},...] - @return: The I{unmarshalled} composite object. - @rtype: L{Object},... - - """ - dictionary = {} - for rt in rtypes: - dictionary[rt.name] = rt - unmarshaller = self.unmarshaller() - composite = Factory.object("reply") - for node in nodes: - tag = node.name - rt = dictionary.get(tag) - if rt is None: - if node.get("id") is None and not self.options().allowUnknownMessageParts: - message = "<%s/> not mapped to message part" % (tag,) - raise Exception(message) - continue - resolved = rt.resolve(nobuiltin=True) - sobject = unmarshaller.process(node, resolved) - value = getattr(composite, tag, None) - if value is None: - if rt.multi_occurrence(): - value = [] - setattr(composite, tag, value) - value.append(sobject) - else: - setattr(composite, tag, sobject) - else: - if not isinstance(value, list): - value = [value,] - setattr(composite, tag, value) - value.append(sobject) - return composite - - def mkparam(self, method, pdef, object): - """ - Builds a parameter for the specified I{method} using the parameter - definition (pdef) and the specified value (object). - - @param method: A method name. - @type method: str - @param pdef: A parameter definition. - @type pdef: tuple: (I{name}, L{xsd.sxbase.SchemaObject}) - @param object: The parameter value. - @type object: any - @return: The parameter fragment. - @rtype: L{Element} - - """ - marshaller = self.marshaller() - content = Content(tag=pdef[0], value=object, type=pdef[1], - real=pdef[1].resolve()) - return marshaller.process(content) - - def mkheader(self, method, hdef, object): - """ - Builds a soapheader for the specified I{method} using the header - definition (hdef) and the specified value (object). - - @param method: A method name. - @type method: str - @param hdef: A header definition. - @type hdef: tuple: (I{name}, L{xsd.sxbase.SchemaObject}) - @param object: The header value. - @type object: any - @return: The parameter fragment. - @rtype: L{Element} - - """ - marshaller = self.marshaller() - if isinstance(object, (list, tuple)): - return [self.mkheader(method, hdef, item) for item in object] - content = Content(tag=hdef[0], value=object, type=hdef[1]) - return marshaller.process(content) - - def envelope(self, header, body): - """ - Build the B{} for a SOAP outbound message. - - @param header: The SOAP message B{header}. - @type header: L{Element} - @param body: The SOAP message B{body}. - @type body: L{Element} - @return: The SOAP envelope containing the body and header. - @rtype: L{Element} - - """ - env = Element("Envelope", ns=envns) - env.addPrefix(Namespace.xsins[0], Namespace.xsins[1]) - env.append(header) - env.append(body) - return env - - def header(self, content): - """ - Build the B{} for a SOAP outbound message. - - @param content: The header content. - @type content: L{Element} - @return: The SOAP body fragment. - @rtype: L{Element} - - """ - header = Element("Header", ns=envns) - header.append(content) - return header - - def bodycontent(self, method, args, kwargs): - """ - Get the content for the SOAP I{body} node. - - @param method: A service method. - @type method: I{service.Method} - @param args: method parameter values. - @type args: list - @param kwargs: Named (keyword) args for the method invoked. - @type kwargs: dict - @return: The XML content for the . - @rtype: [L{Element},...] - - """ - raise Exception("not implemented") - - def headercontent(self, method): - """ - Get the content for the SOAP I{Header} node. - - @param method: A service method. - @type method: I{service.Method} - @return: The XML content for the . - @rtype: [L{Element},...] - - """ - content = [] - wsse = self.options().wsse - if wsse is not None: - content.append(wsse.xml()) - headers = self.options().soapheaders - if not isinstance(headers, (tuple, list, dict)): - headers = (headers,) - elif not headers: - return content - pts = self.headpart_types(method) - if isinstance(headers, (tuple, list)): - n = 0 - for header in headers: - if isinstance(header, Element): - content.append(deepcopy(header)) - continue - if len(pts) == n: - break - h = self.mkheader(method, pts[n], header) - ns = pts[n][1].namespace("ns0") - h.setPrefix(ns[0], ns[1]) - content.append(h) - n += 1 - else: - for pt in pts: - header = headers.get(pt[0]) - if header is None: - continue - h = self.mkheader(method, pt, header) - ns = pt[1].namespace("ns0") - h.setPrefix(ns[0], ns[1]) - content.append(h) - return content - - def replycontent(self, method, body): - """ - Get the reply body content. - - @param method: A service method. - @type method: I{service.Method} - @param body: The SOAP body. - @type body: L{Element} - @return: The body content. - @rtype: [L{Element},...] - - """ - raise Exception("not implemented") - - def body(self, content): - """ - Build the B{} for a SOAP outbound message. - - @param content: The body content. - @type content: L{Element} - @return: The SOAP body fragment. - @rtype: L{Element} - - """ - body = Element("Body", ns=envns) - body.append(content) - return body - - def bodypart_types(self, method, input=True): - """ - Get a list of I{parameter definitions} (pdefs) defined for the - specified method. - - An input I{pdef} is a (I{name}, L{xsd.sxbase.SchemaObject}) tuple, - while an output I{pdef} is a L{xsd.sxbase.SchemaObject}. - - @param method: A service method. - @type method: I{service.Method} - @param input: Defines input/output message. - @type input: boolean - @return: A list of parameter definitions - @rtype: [I{pdef},...] - - """ - if input: - parts = method.soap.input.body.parts - else: - parts = method.soap.output.body.parts - return [self.__part_type(p, input) for p in parts] - - def headpart_types(self, method, input=True): - """ - Get a list of header I{parameter definitions} (pdefs) defined for the - specified method. - - An input I{pdef} is a (I{name}, L{xsd.sxbase.SchemaObject}) tuple, - while an output I{pdef} is a L{xsd.sxbase.SchemaObject}. - - @param method: A service method. - @type method: I{service.Method} - @param input: Defines input/output message. - @type input: boolean - @return: A list of parameter definitions - @rtype: [I{pdef},...] - - """ - if input: - headers = method.soap.input.headers - else: - headers = method.soap.output.headers - return [self.__part_type(h.part, input) for h in headers] - - def returned_types(self, method): - """ - Get the I{method} return value type(s). - - @param method: A service method. - @type method: I{service.Method} - @return: Method return value type. - @rtype: [L{xsd.sxbase.SchemaObject},...] - - """ - return self.bodypart_types(method, input=False) - - def __part_type(self, part, input): - """ - Get a I{parameter definition} (pdef) defined for a given body or header - message part. - - An input I{pdef} is a (I{name}, L{xsd.sxbase.SchemaObject}) tuple, - while an output I{pdef} is a L{xsd.sxbase.SchemaObject}. - - @param part: A service method input or output part. - @type part: I{suds.wsdl.Part} - @param input: Defines input/output message. - @type input: boolean - @return: A list of parameter definitions - @rtype: [I{pdef},...] - - """ - if part.element is None: - query = TypeQuery(part.type) - else: - query = ElementQuery(part.element) - part_type = query.execute(self.schema()) - if part_type is None: - raise TypeNotFound(query.ref) - if part.type is not None: - part_type = PartElement(part.name, part_type) - if not input: - return part_type - if part_type.name is None: - return part.name, part_type - return part_type.name, part_type - - -class PartElement(SchemaElement): - """ - Message part referencing an XSD type and thus acting like an XSD element. - - @ivar resolved: The part type. - @type resolved: L{suds.xsd.sxbase.SchemaObject} - - """ - - def __init__(self, name, resolved): - """ - @param name: The part name. - @type name: str - @param resolved: The part type. - @type resolved: L{suds.xsd.sxbase.SchemaObject} - - """ - root = Element("element", ns=Namespace.xsdns) - SchemaElement.__init__(self, resolved.schema, root) - self.__resolved = resolved - self.name = name - self.form_qualified = False - - def implany(self): - pass - - def optional(self): - return True - - def namespace(self, prefix=None): - return Namespace.default - - def resolve(self, nobuiltin=False): - if nobuiltin and self.__resolved.builtin(): - return self - return self.__resolved diff --git a/libs_crutch/contrib/suds/bindings/document.py b/libs_crutch/contrib/suds/bindings/document.py deleted file mode 100755 index 825a677..0000000 --- a/libs_crutch/contrib/suds/bindings/document.py +++ /dev/null @@ -1,143 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Classes for the (WS) SOAP I{document/literal} binding. - -""" - -from suds import * -from suds.argparser import parse_args -from suds.bindings.binding import Binding -from suds.sax.element import Element - - -class Document(Binding): - """ - The document/literal style. Literal is the only (@use) supported since - document/encoded is pretty much dead. - - Although the SOAP specification supports multiple documents within the SOAP - , it is very uncommon. As such, suds library supports presenting an - I{RPC} view of service methods defined with only a single document - parameter. To support the complete specification, service methods defined - with multiple documents (multiple message parts), are still presented using - a full I{document} view. - - More detailed description: - - An interface is considered I{wrapped} if: - - There is exactly one message part in that interface. - - The message part resolves to an element of a non-builtin type. - Otherwise it is considered I{bare}. - - I{Bare} interface is interpreted directly as specified in the WSDL schema, - with each message part represented by a single parameter in the suds - library web service operation proxy interface (input or output). - - I{Wrapped} interface is interpreted without the external wrapping document - structure, with each of its contained elements passed through suds - library's web service operation proxy interface (input or output) - individually instead of as a single I{document} object. - - """ - def bodycontent(self, method, args, kwargs): - wrapped = method.soap.input.body.wrapped - if wrapped: - pts = self.bodypart_types(method) - root = self.document(pts[0]) - else: - root = [] - - def add_param(param_name, param_type, in_choice_context, value): - """ - Construct request data for the given input parameter. - - Called by our argument parser for every input parameter, in order. - - A parameter's type is identified by its corresponding XSD schema - element. - - """ - # Do not construct request data for undefined input parameters - # defined inside a choice order indicator. An empty choice - # parameter can still be included in the constructed request by - # explicitly providing an empty string value for it. - #TODO: This functionality might be better placed inside the - # mkparam() function but to do that we would first need to better - # understand how different Binding subclasses in suds work and how - # they would be affected by this change. - if in_choice_context and value is None: - return - - # Construct request data for the current input parameter. - pdef = (param_name, param_type) - p = self.mkparam(method, pdef, value) - if p is None: - return - if not wrapped: - ns = param_type.namespace("ns0") - p.setPrefix(ns[0], ns[1]) - root.append(p) - - parse_args(method.name, self.param_defs(method), args, kwargs, - add_param, self.options().extraArgumentErrors) - - return root - - def replycontent(self, method, body): - if method.soap.output.body.wrapped: - return body[0].children - return body.children - - def document(self, wrapper): - """ - Get the document root. For I{document/literal}, this is the name of the - wrapper element qualified by the schema's target namespace. - - @param wrapper: The method name. - @type wrapper: L{xsd.sxbase.SchemaObject} - @return: A root element. - @rtype: L{Element} - - """ - tag = wrapper[1].name - ns = wrapper[1].namespace("ns0") - return Element(tag, ns=ns) - - def mkparam(self, method, pdef, object): - """ - Expand list parameters into individual parameters each with the type - information. This is because in document arrays are simply - multi-occurrence elements. - - """ - if isinstance(object, (list, tuple)): - return [self.mkparam(method, pdef, item) for item in object] - return super(Document, self).mkparam(method, pdef, object) - - def param_defs(self, method): - """Get parameter definitions for document literal.""" - pts = self.bodypart_types(method) - if not method.soap.input.body.wrapped: - return pts - pt = pts[0][1].resolve() - return [(c.name, c, a) for c, a in pt if not c.isattr()] - - def returned_types(self, method): - rts = super(Document, self).returned_types(method) - if not method.soap.output.body.wrapped: - return rts - return [child for child, ancestry in rts[0].resolve(nobuiltin=True)] diff --git a/libs_crutch/contrib/suds/bindings/multiref.py b/libs_crutch/contrib/suds/bindings/multiref.py deleted file mode 100755 index 52fa47a..0000000 --- a/libs_crutch/contrib/suds/bindings/multiref.py +++ /dev/null @@ -1,124 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides classes for handling soap multirefs. -""" - -from suds import * -from suds.sax.element import Element - - -soapenc = (None, 'http://schemas.xmlsoap.org/soap/encoding/') - -class MultiRef: - """ - Resolves and replaces multirefs. - @ivar nodes: A list of non-multiref nodes. - @type nodes: list - @ivar catalog: A dictionary of multiref nodes by id. - @type catalog: dict - """ - - def __init__(self): - self.nodes = [] - self.catalog = {} - - def process(self, body): - """ - Process the specified soap envelope body and replace I{multiref} node - references with the contents of the referenced node. - @param body: A soap envelope body node. - @type body: L{Element} - @return: The processed I{body} - @rtype: L{Element} - """ - self.nodes = [] - self.catalog = {} - self.build_catalog(body) - self.update(body) - body.children = self.nodes - return body - - def update(self, node): - """ - Update the specified I{node} by replacing the I{multiref} references with - the contents of the referenced nodes and remove the I{href} attribute. - @param node: A node to update. - @type node: L{Element} - @return: The updated node - @rtype: L{Element} - """ - self.replace_references(node) - for c in node.children: - self.update(c) - return node - - def replace_references(self, node): - """ - Replacing the I{multiref} references with the contents of the - referenced nodes and remove the I{href} attribute. Warning: since - the I{ref} is not cloned, - @param node: A node to update. - @type node: L{Element} - """ - href = node.getAttribute('href') - if href is None: - return - id = href.getValue() - ref = self.catalog.get(id) - if ref is None: - import logging - log = logging.getLogger(__name__) - log.error('soap multiref: %s, not-resolved', id) - return - node.append(ref.children) - node.setText(ref.getText()) - for a in ref.attributes: - if a.name != 'id': - node.append(a) - node.remove(href) - - def build_catalog(self, body): - """ - Create the I{catalog} of multiref nodes by id and the list of - non-multiref nodes. - @param body: A soap envelope body node. - @type body: L{Element} - """ - for child in body.children: - if self.soaproot(child): - self.nodes.append(child) - id = child.get('id') - if id is None: continue - key = '#%s' % id - self.catalog[key] = child - - def soaproot(self, node): - """ - Get whether the specified I{node} is a soap encoded root. - This is determined by examining @soapenc:root='1'. - The node is considered to be a root when the attribute - is not specified. - @param node: A node to evaluate. - @type node: L{Element} - @return: True if a soap encoded root. - @rtype: bool - """ - root = node.getAttribute('root', ns=soapenc) - if root is None: - return True - else: - return ( root.value == '1' ) diff --git a/libs_crutch/contrib/suds/bindings/rpc.py b/libs_crutch/contrib/suds/bindings/rpc.py deleted file mode 100755 index f0d5bd9..0000000 --- a/libs_crutch/contrib/suds/bindings/rpc.py +++ /dev/null @@ -1,91 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Classes for the (WS) SOAP I{rpc/literal} and I{rpc/encoded} bindings. - -""" - -from suds import * -from suds.mx.encoded import Encoded as MxEncoded -from suds.umx.encoded import Encoded as UmxEncoded -from suds.bindings.binding import Binding, envns -from suds.sax.element import Element - - -encns = ("SOAP-ENC", "http://schemas.xmlsoap.org/soap/encoding/") - - -class RPC(Binding): - """RPC/Literal binding style.""" - - def param_defs(self, method): - return self.bodypart_types(method) - - def envelope(self, header, body): - env = super(RPC, self).envelope(header, body) - env.addPrefix(encns[0], encns[1]) - env.set("%s:encodingStyle" % (envns[0],), encns[1]) - return env - - def bodycontent(self, method, args, kwargs): - n = 0 - root = self.method(method) - for pd in self.param_defs(method): - if n < len(args): - value = args[n] - else: - value = kwargs.get(pd[0]) - p = self.mkparam(method, pd, value) - if p is not None: - root.append(p) - n += 1 - return root - - def replycontent(self, method, body): - return body[0].children - - def method(self, method): - """ - Get the document root. For I{rpc/(literal|encoded)}, this is the name - of the method qualified by the schema tns. - - @param method: A service method. - @type method: I{service.Method} - @return: A root element. - @rtype: L{Element} - - """ - ns = method.soap.input.body.namespace - if ns[0] is None: - ns = ('ns0', ns[1]) - return Element(method.name, ns=ns) - - -class Encoded(RPC): - """RPC/Encoded (section 5) binding style.""" - - def marshaller(self): - return MxEncoded(self.schema()) - - def unmarshaller(self): - """ - Get the appropriate schema based XML decoder. - - @return: Typed unmarshaller. - @rtype: L{UmxTyped} - - """ - return UmxEncoded(self.schema()) diff --git a/libs_crutch/contrib/suds/builder.py b/libs_crutch/contrib/suds/builder.py deleted file mode 100755 index a3b0ef3..0000000 --- a/libs_crutch/contrib/suds/builder.py +++ /dev/null @@ -1,122 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -The I{builder} module provides an wsdl/xsd defined types factory -""" - -from suds import * -from suds.sudsobject import Factory - - -class Builder: - """ Builder used to construct an object for types defined in the schema """ - - def __init__(self, resolver): - """ - @param resolver: A schema object name resolver. - @type resolver: L{resolver.Resolver} - """ - self.resolver = resolver - - def build(self, name): - """ build a an object for the specified typename as defined in the schema """ - if isinstance(name, str): - type = self.resolver.find(name) - if type is None: - raise TypeNotFound(name) - else: - type = name - cls = type.name - if type.mixed(): - data = Factory.property(cls) - else: - data = Factory.object(cls) - resolved = type.resolve() - md = data.__metadata__ - md.sxtype = resolved - md.ordering = self.ordering(resolved) - history = [] - self.add_attributes(data, resolved) - for child, ancestry in type.children(): - if self.skip_child(child, ancestry): - continue - - self.process(data, child, history[:]) - return data - - def process(self, data, type, history): - """ process the specified type then process its children """ - if type in history: - return - if type.enum(): - return - history.append(type) - resolved = type.resolve() - value = None - - - - if type.multi_occurrence(): - value = [] - else: - if len(resolved) > 0: - if resolved.mixed(): - value = Factory.property(resolved.name) - md = value.__metadata__ - md.sxtype = resolved - else: - value = Factory.object(resolved.name) - md = value.__metadata__ - md.sxtype = resolved - md.ordering = self.ordering(resolved) - - setattr(data, type.name, value if not type.optional() or type.multi_occurrence() else None) - if value is not None: - data = value - if not isinstance(data, list): - self.add_attributes(data, resolved) - for child, ancestry in resolved.children(): - if self.skip_child(child, ancestry): - continue - self.process(data, child, history[:]) - - def add_attributes(self, data, type): - """ add required attributes """ - for attr, ancestry in type.attributes(): - name = '_%s' % attr.name - value = attr.get_default() - setattr(data, name, value) - - def skip_child(self, child, ancestry): - """ get whether or not to skip the specified child """ - if child.any(): return True - for x in ancestry: - if x.choice(): - return True - return False - - def ordering(self, type): - """ get the ordering """ - result = [] - for child, ancestry in type.resolve(): - name = child.name - if child.name is None: - continue - if child.isattr(): - name = '_%s' % child.name - result.append(name) - return result diff --git a/libs_crutch/contrib/suds/cache.py b/libs_crutch/contrib/suds/cache.py deleted file mode 100755 index 8c9c8e5..0000000 --- a/libs_crutch/contrib/suds/cache.py +++ /dev/null @@ -1,334 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Basic caching classes. - -""" - -import suds -import suds.sax.element -import suds.sax.parser - -import datetime -import os -try: - import pickle as pickle -except Exception: - import pickle -import shutil -import tempfile - -from logging import getLogger -log = getLogger(__name__) - - -class Cache(object): - """An object cache.""" - - def get(self, id): - """ - Get an object from the cache by id. - - @param id: The object id. - @type id: str - @return: The object, else None. - @rtype: any - - """ - raise Exception("not-implemented") - - def put(self, id, object): - """ - Put an object into the cache. - - @param id: The object id. - @type id: str - @param object: The object to add. - @type object: any - - """ - raise Exception("not-implemented") - - def purge(self, id): - """ - Purge an object from the cache by id. - - @param id: A object id. - @type id: str - - """ - raise Exception("not-implemented") - - def clear(self): - """Clear all objects from the cache.""" - raise Exception("not-implemented") - - -class NoCache(Cache): - """The pass-through object cache.""" - - def get(self, id): - return - - def put(self, id, object): - pass - - -class FileCache(Cache): - """ - A file-based URL cache. - - @cvar fnprefix: The file name prefix. - @type fnprefix: str - @cvar remove_default_location_on_exit: Whether to remove the default cache - location on process exit (default=True). - @type remove_default_location_on_exit: bool - @ivar duration: The duration after which cached entries expire (0=never). - @type duration: datetime.timedelta - @ivar location: The cached file folder. - @type location: str - - """ - fnprefix = "suds" - __default_location = None - remove_default_location_on_exit = True - - def __init__(self, location=None, **duration): - """ - Initialized a new FileCache instance. - - If no cache location is specified, a temporary default location will be - used. Such default cache location will be shared by all FileCache - instances with no explicitly specified location within the same - process. The default cache location will be removed automatically on - process exit unless user sets the remove_default_location_on_exit - FileCache class attribute to False. - - @param location: The cached file folder. - @type location: str - @param duration: The duration after which cached entries expire - (default: 0=never). - @type duration: keyword arguments for datetime.timedelta constructor - - """ - if location is None: - location = self.__get_default_location() - self.location = location - self.duration = datetime.timedelta(**duration) - self.__check_version() - - def clear(self): - for filename in os.listdir(self.location): - path = os.path.join(self.location, filename) - if os.path.isdir(path): - continue - if filename.startswith(self.fnprefix): - os.remove(path) - log.debug("deleted: %s", path) - - def fnsuffix(self): - """ - Get the file name suffix. - - @return: The suffix. - @rtype: str - - """ - return "gcf" - - def get(self, id): - try: - f = self._getf(id) - try: - return f.read() - finally: - f.close() - except Exception: - pass - - def purge(self, id): - filename = self.__filename(id) - try: - os.remove(filename) - except Exception: - pass - - def put(self, id, data): - try: - filename = self.__filename(id) - f = self.__open(filename, "wb") - try: - f.write(data) - finally: - f.close() - return data - except Exception: - log.debug(id, exc_info=1) - return data - - def _getf(self, id): - """Open a cached file with the given id for reading.""" - try: - filename = self.__filename(id) - self.__remove_if_expired(filename) - return self.__open(filename, "rb") - except Exception: - pass - - def __check_version(self): - path = os.path.join(self.location, "version") - try: - f = self.__open(path) - try: - version = f.read() - finally: - f.close() - if version != suds.__version__: - raise Exception() - except Exception: - self.clear() - f = self.__open(path, "w") - try: - f.write(suds.__version__) - finally: - f.close() - - def __filename(self, id): - """Return the cache file name for an entry with a given id.""" - suffix = self.fnsuffix() - filename = "%s-%s.%s" % (self.fnprefix, id, suffix) - return os.path.join(self.location, filename) - - @staticmethod - def __get_default_location(): - """ - Returns the current process's default cache location folder. - - The folder is determined lazily on first call. - - """ - if not FileCache.__default_location: - tmp = tempfile.mkdtemp("suds-default-cache") - FileCache.__default_location = tmp - import atexit - atexit.register(FileCache.__remove_default_location) - return FileCache.__default_location - - def __mktmp(self): - """Create the I{location} folder if it does not already exist.""" - try: - if not os.path.isdir(self.location): - os.makedirs(self.location) - except Exception: - log.debug(self.location, exc_info=1) - return self - - def __open(self, filename, *args): - """Open cache file making sure the I{location} folder is created.""" - self.__mktmp() - return open(filename, *args) - - @staticmethod - def __remove_default_location(): - """ - Removes the default cache location folder. - - This removal may be disabled by setting the - remove_default_location_on_exit FileCache class attribute to False. - - """ - if FileCache.remove_default_location_on_exit: - # We must not load shutil here on-demand as under some - # circumstances this may cause the shutil.rmtree() operation to - # fail due to not having some internal module loaded. E.g. this - # happens if you run the project's test suite using the setup.py - # test command on Python 2.4.x. - shutil.rmtree(FileCache.__default_location, ignore_errors=True) - - def __remove_if_expired(self, filename): - """ - Remove a cached file entry if it expired. - - @param filename: The file name. - @type filename: str - - """ - if not self.duration: - return - created = datetime.datetime.fromtimestamp(os.path.getctime(filename)) - expired = created + self.duration - if expired < datetime.datetime.now(): - os.remove(filename) - log.debug("%s expired, deleted", filename) - - -class DocumentCache(FileCache): - """XML document file cache.""" - - def fnsuffix(self): - return "xml" - - def get(self, id): - fp = None - try: - fp = self._getf(id) - if fp is not None: - p = suds.sax.parser.Parser() - cached = p.parse(fp) - fp.close() - return cached - except Exception: - if fp is not None: - fp.close() - self.purge(id) - - def put(self, id, object): - if isinstance(object, - (suds.sax.document.Document, suds.sax.element.Element)): - super(DocumentCache, self).put(id, suds.byte_str(str(object))) - return object - - -class ObjectCache(FileCache): - """ - Pickled object file cache. - - @cvar protocol: The pickling protocol. - @type protocol: int - - """ - protocol = 2 - - def fnsuffix(self): - return "px" - - def get(self, id): - fp = None - try: - fp = self._getf(id) - if fp is not None: - cached = pickle.load(fp) - fp.close() - return cached - except Exception: - if fp is not None: - fp.close() - self.purge(id) - - def put(self, id, object): - data = pickle.dumps(object, self.protocol) - super(ObjectCache, self).put(id, data) - return object diff --git a/libs_crutch/contrib/suds/client.py b/libs_crutch/contrib/suds/client.py deleted file mode 100755 index 300355f..0000000 --- a/libs_crutch/contrib/suds/client.py +++ /dev/null @@ -1,950 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Service proxy implementation providing access to web services. - -""" - -import suds -from suds import * -import suds.bindings.binding -from suds.builder import Builder -import suds.cache -import suds.metrics as metrics -from suds.options import Options -from suds.plugin import PluginContainer -from suds.properties import Unskin -from suds.reader import DefinitionsReader -from suds.resolver import PathResolver -from suds.sax.document import Document -import suds.sax.parser -from suds.servicedefinition import ServiceDefinition -import suds.transport -import suds.transport.https -from suds.umx.basic import Basic as UmxBasic -from suds.wsdl import Definitions -from . import sudsobject - -from http.cookiejar import CookieJar -from copy import deepcopy -import http.client - -from logging import getLogger -log = getLogger(__name__) - - -class Client(UnicodeMixin): - """ - A lightweight web service client. - - @ivar wsdl: The WSDL object. - @type wsdl:L{Definitions} - @ivar service: The service proxy used to invoke operations. - @type service: L{Service} - @ivar factory: The factory used to create objects. - @type factory: L{Factory} - @ivar sd: The service definition - @type sd: L{ServiceDefinition} - - """ - - @classmethod - def items(cls, sobject): - """ - Extract I{items} from a suds object. - - Much like the items() method works on I{dict}. - - @param sobject: A suds object - @type sobject: L{Object} - @return: A list of items contained in I{sobject}. - @rtype: [(key, value),...] - - """ - return sudsobject.items(sobject) - - @classmethod - def dict(cls, sobject): - """ - Convert a sudsobject into a dictionary. - - @param sobject: A suds object - @type sobject: L{Object} - @return: A dictionary of items contained in I{sobject}. - @rtype: dict - - """ - return sudsobject.asdict(sobject) - - @classmethod - def metadata(cls, sobject): - """ - Extract the metadata from a suds object. - - @param sobject: A suds object - @type sobject: L{Object} - @return: The object's metadata - @rtype: L{sudsobject.Metadata} - - """ - return sobject.__metadata__ - - def __init__(self, url, **kwargs): - """ - @param url: The URL for the WSDL. - @type url: str - @param kwargs: keyword arguments. - @see: L{Options} - - """ - options = Options() - options.transport = suds.transport.https.HttpAuthenticated() - self.options = options - if "cache" not in kwargs: - kwargs["cache"] = suds.cache.ObjectCache(days=1) - self.set_options(**kwargs) - reader = DefinitionsReader(options, Definitions) - self.wsdl = reader.open(url) - plugins = PluginContainer(options.plugins) - plugins.init.initialized(wsdl=self.wsdl) - self.factory = Factory(self.wsdl) - self.service = ServiceSelector(self, self.wsdl.services) - self.sd = [] - for s in self.wsdl.services: - sd = ServiceDefinition(self.wsdl, s) - self.sd.append(sd) - - def set_options(self, **kwargs): - """ - Set options. - - @param kwargs: keyword arguments. - @see: L{Options} - - """ - p = Unskin(self.options) - p.update(kwargs) - - def add_prefix(self, prefix, uri): - """ - Add I{static} mapping of an XML namespace prefix to a namespace. - - Useful for cases when a WSDL and referenced XSD schemas make heavy use - of namespaces and those namespaces are subject to change. - - @param prefix: An XML namespace prefix. - @type prefix: str - @param uri: An XML namespace URI. - @type uri: str - @raise Exception: prefix already mapped. - - """ - root = self.wsdl.root - mapped = root.resolvePrefix(prefix, None) - if mapped is None: - root.addPrefix(prefix, uri) - return - if mapped[1] != uri: - raise Exception('"%s" already mapped as "%s"' % (prefix, mapped)) - - def clone(self): - """ - Get a shallow clone of this object. - - The clone only shares the WSDL. All other attributes are unique to the - cloned object including options. - - @return: A shallow clone. - @rtype: L{Client} - - """ - class Uninitialized(Client): - def __init__(self): - pass - clone = Uninitialized() - clone.options = Options() - cp = Unskin(clone.options) - mp = Unskin(self.options) - cp.update(deepcopy(mp)) - clone.wsdl = self.wsdl - clone.factory = self.factory - clone.service = ServiceSelector(clone, self.wsdl.services) - clone.sd = self.sd - return clone - - def __unicode__(self): - s = ["\n"] - s.append("Suds ( https://fedorahosted.org/suds/ )") - s.append(" version: %s" % (suds.__version__,)) - if suds.__build__: - s.append(" build: %s" % (suds.__build__,)) - for sd in self.sd: - s.append("\n\n%s" % (str(sd),)) - return "".join(s) - - -class Factory: - """ - A factory for instantiating types defined in the WSDL. - - @ivar resolver: A schema type resolver. - @type resolver: L{PathResolver} - @ivar builder: A schema object builder. - @type builder: L{Builder} - - """ - - def __init__(self, wsdl): - """ - @param wsdl: A schema object. - @type wsdl: L{wsdl.Definitions} - - """ - self.wsdl = wsdl - self.resolver = PathResolver(wsdl) - self.builder = Builder(self.resolver) - - def create(self, name): - """ - Create a WSDL type by name. - - @param name: The name of a type defined in the WSDL. - @type name: str - @return: The requested object. - @rtype: L{Object} - - """ - timer = metrics.Timer() - timer.start() - type = self.resolver.find(name) - if type is None: - raise TypeNotFound(name) - if type.enum(): - result = sudsobject.Factory.object(name) - for e, a in type.children(): - setattr(result, e.name, e.name) - else: - try: - result = self.builder.build(type) - except Exception as e: - log.error("create '%s' failed", name, exc_info=True) - raise BuildError(name, e) - timer.stop() - metrics.log.debug("%s created: %s", name, timer) - return result - - def separator(self, ps): - """ - Set the path separator. - - @param ps: The new path separator. - @type ps: char - - """ - self.resolver = PathResolver(self.wsdl, ps) - - -class ServiceSelector: - """ - The B{service} selector is used to select a web service. - - Most WSDLs only define a single service in which case access by subscript - is passed through to a L{PortSelector}. This is also the behavior when a - I{default} service has been specified. In cases where multiple services - have been defined and no default has been specified, the service is found - by name (or index) and a L{PortSelector} for the service is returned. In - all cases, attribute access is forwarded to the L{PortSelector} for either - the I{first} service or the I{default} service (when specified). - - @ivar __client: A suds client. - @type __client: L{Client} - @ivar __services: A list of I{WSDL} services. - @type __services: list - - """ - def __init__(self, client, services): - """ - @param client: A suds client. - @type client: L{Client} - @param services: A list of I{WSDL} services. - @type services: list - - """ - self.__client = client - self.__services = services - - def __getattr__(self, name): - """ - Attribute access is forwarded to the L{PortSelector}. - - Uses the I{default} service if specified or the I{first} service - otherwise. - - @param name: Method name. - @type name: str - @return: A L{PortSelector}. - @rtype: L{PortSelector}. - - """ - default = self.__ds() - if default is None: - port = self.__find(0) - else: - port = default - return getattr(port, name) - - def __getitem__(self, name): - """ - Provides I{service} selection by name (string) or index (integer). - - In cases where only a single service is defined or a I{default} has - been specified, the request is forwarded to the L{PortSelector}. - - @param name: The name (or index) of a service. - @type name: int|str - @return: A L{PortSelector} for the specified service. - @rtype: L{PortSelector}. - - """ - if len(self.__services) == 1: - port = self.__find(0) - return port[name] - default = self.__ds() - if default is not None: - port = default - return port[name] - return self.__find(name) - - def __find(self, name): - """ - Find a I{service} by name (string) or index (integer). - - @param name: The name (or index) of a service. - @type name: int|str - @return: A L{PortSelector} for the found service. - @rtype: L{PortSelector}. - - """ - service = None - if not self.__services: - raise Exception("No services defined") - if isinstance(name, int): - try: - service = self.__services[name] - name = service.name - except IndexError: - raise ServiceNotFound("at [%d]" % (name,)) - else: - for s in self.__services: - if name == s.name: - service = s - break - if service is None: - raise ServiceNotFound(name) - return PortSelector(self.__client, service.ports, name) - - def __ds(self): - """ - Get the I{default} service if defined in the I{options}. - - @return: A L{PortSelector} for the I{default} service. - @rtype: L{PortSelector}. - - """ - ds = self.__client.options.service - if ds is not None: - return self.__find(ds) - - -class PortSelector: - """ - The B{port} selector is used to select a I{web service} B{port}. - - In cases where multiple ports have been defined and no default has been - specified, the port is found by name (or index) and a L{MethodSelector} for - the port is returned. In all cases, attribute access is forwarded to the - L{MethodSelector} for either the I{first} port or the I{default} port (when - specified). - - @ivar __client: A suds client. - @type __client: L{Client} - @ivar __ports: A list of I{service} ports. - @type __ports: list - @ivar __qn: The I{qualified} name of the port (used for logging). - @type __qn: str - - """ - def __init__(self, client, ports, qn): - """ - @param client: A suds client. - @type client: L{Client} - @param ports: A list of I{service} ports. - @type ports: list - @param qn: The name of the service. - @type qn: str - - """ - self.__client = client - self.__ports = ports - self.__qn = qn - - def __getattr__(self, name): - """ - Attribute access is forwarded to the L{MethodSelector}. - - Uses the I{default} port when specified or the I{first} port otherwise. - - @param name: The name of a method. - @type name: str - @return: A L{MethodSelector}. - @rtype: L{MethodSelector}. - - """ - default = self.__dp() - if default is None: - m = self.__find(0) - else: - m = default - return getattr(m, name) - - def __getitem__(self, name): - """ - Provides I{port} selection by name (string) or index (integer). - - In cases where only a single port is defined or a I{default} has been - specified, the request is forwarded to the L{MethodSelector}. - - @param name: The name (or index) of a port. - @type name: int|str - @return: A L{MethodSelector} for the specified port. - @rtype: L{MethodSelector}. - - """ - default = self.__dp() - if default is None: - return self.__find(name) - return default - - def __find(self, name): - """ - Find a I{port} by name (string) or index (integer). - - @param name: The name (or index) of a port. - @type name: int|str - @return: A L{MethodSelector} for the found port. - @rtype: L{MethodSelector}. - - """ - port = None - if not self.__ports: - raise Exception("No ports defined: %s" % (self.__qn,)) - if isinstance(name, int): - qn = "%s[%d]" % (self.__qn, name) - try: - port = self.__ports[name] - except IndexError: - raise PortNotFound(qn) - else: - qn = ".".join((self.__qn, name)) - for p in self.__ports: - if name == p.name: - port = p - break - if port is None: - raise PortNotFound(qn) - qn = ".".join((self.__qn, port.name)) - return MethodSelector(self.__client, port.methods, qn) - - def __dp(self): - """ - Get the I{default} port if defined in the I{options}. - - @return: A L{MethodSelector} for the I{default} port. - @rtype: L{MethodSelector}. - - """ - dp = self.__client.options.port - if dp is not None: - return self.__find(dp) - - -class MethodSelector: - """ - The B{method} selector is used to select a B{method} by name. - - @ivar __client: A suds client. - @type __client: L{Client} - @ivar __methods: A dictionary of methods. - @type __methods: dict - @ivar __qn: The I{qualified} name of the method (used for logging). - @type __qn: str - - """ - def __init__(self, client, methods, qn): - """ - @param client: A suds client. - @type client: L{Client} - @param methods: A dictionary of methods. - @type methods: dict - @param qn: The I{qualified} name of the port. - @type qn: str - - """ - self.__client = client - self.__methods = methods - self.__qn = qn - - def __getattr__(self, name): - """ - Get a method by name and return it in an I{execution wrapper}. - - @param name: The name of a method. - @type name: str - @return: An I{execution wrapper} for the specified method name. - @rtype: L{Method} - - """ - return self[name] - - def __getitem__(self, name): - """ - Get a method by name and return it in an I{execution wrapper}. - - @param name: The name of a method. - @type name: str - @return: An I{execution wrapper} for the specified method name. - @rtype: L{Method} - - """ - m = self.__methods.get(name) - if m is None: - qn = ".".join((self.__qn, name)) - raise MethodNotFound(qn) - return Method(self.__client, m) - - -class Method: - """ - The I{method} (namespace) object. - - @ivar client: A client object. - @type client: L{Client} - @ivar method: A I{WSDL} method. - @type I{raw} Method. - - """ - - def __init__(self, client, method): - """ - @param client: A client object. - @type client: L{Client} - @param method: A I{raw} method. - @type I{raw} Method. - - """ - self.client = client - self.method = method - - def __call__(self, *args, **kwargs): - """Invoke the method.""" - clientclass = self.clientclass(kwargs) - client = clientclass(self.client, self.method) - try: - return client.invoke(args, kwargs) - except WebFault as e: - if self.faults(): - raise - return http.client.INTERNAL_SERVER_ERROR, e - - def faults(self): - """Get faults option.""" - return self.client.options.faults - - def clientclass(self, kwargs): - """Get SOAP client class.""" - if _SimClient.simulation(kwargs): - return _SimClient - return _SoapClient - - -class RequestContext: - """ - A request context. - - Returned by a suds Client when invoking a web service operation with the - ``nosend`` enabled. Allows the caller to take care of sending the request - himself and return back the reply data for further processing. - - @ivar envelope: The SOAP request envelope. - @type envelope: I{bytes} - - """ - - def __init__(self, process_reply, envelope): - """ - @param process_reply: A callback for processing a user defined reply. - @type process_reply: I{callable} - @param envelope: The SOAP request envelope. - @type envelope: I{bytes} - - """ - self.__process_reply = process_reply - self.envelope = envelope - - def process_reply(self, reply, status=None, description=None): - """ - Re-entry for processing a successful reply. - - Depending on how the ``retxml`` option is set, may return the SOAP - reply XML or process it and return the Python object representing the - returned value. - - @param reply: The SOAP reply envelope. - @type reply: I{bytes} - @param status: The HTTP status code. - @type status: int - @param description: Additional status description. - @type description: I{bytes} - @return: The invoked web service operation return value. - @rtype: I{builtin}|I{subclass of} L{Object}|I{bytes}|I{None} - - """ - return self.__process_reply(reply, status, description) - - -class _SoapClient: - """ - An internal lightweight SOAP based web service operation client. - - Each instance is constructed for specific web service operation and knows - how to: - - Construct a SOAP request for it. - - Transport a SOAP request for it using a configured transport. - - Receive a SOAP reply using a configured transport. - - Process the received SOAP reply. - - Depending on the given suds options, may do all the tasks listed above or - may stop the process at an earlier point and return some intermediate - result, e.g. the constructed SOAP request or the raw received SOAP reply. - See the invoke() method for more detailed information. - - @ivar service: The target method. - @type service: L{Service} - @ivar method: A target method. - @type method: L{Method} - @ivar options: A dictonary of options. - @type options: dict - @ivar cookiejar: A cookie jar. - @type cookiejar: libcookie.CookieJar - - """ - - TIMEOUT_ARGUMENT = "__timeout" - - def __init__(self, client, method): - """ - @param client: A suds client. - @type client: L{Client} - @param method: A target method. - @type method: L{Method} - - """ - self.client = client - self.method = method - self.options = client.options - self.cookiejar = CookieJar() - - def invoke(self, args, kwargs): - """ - Invoke a specified web service method. - - Depending on how the ``nosend`` & ``retxml`` options are set, may do - one of the following: - * Return a constructed web service operation SOAP request without - sending it to the web service. - * Invoke the web service operation and return its SOAP reply XML. - * Invoke the web service operation, process its results and return - the Python object representing the returned value. - - When returning a SOAP request, the request is wrapped inside a - RequestContext object allowing the user to acquire a corresponding SOAP - reply himself and then pass it back to suds for further processing. - - Constructed request data is automatically processed using registered - plugins and serialized into a byte-string. Exact request XML formatting - may be affected by the ``prettyxml`` suds option. - - @param args: A list of args for the method invoked. - @type args: list|tuple - @param kwargs: Named (keyword) args for the method invoked. - @type kwargs: dict - @return: SOAP request, SOAP reply or a web service return value. - @rtype: L{RequestContext}|I{builtin}|I{subclass of} L{Object}|I{bytes}| - I{None} - - """ - timer = metrics.Timer() - timer.start() - binding = self.method.binding.input - timeout = kwargs.pop(_SoapClient.TIMEOUT_ARGUMENT, None) - soapenv = binding.get_message(self.method, args, kwargs) - timer.stop() - method_name = self.method.name - metrics.log.debug("message for '%s' created: %s", method_name, timer) - timer.start() - result = self.send(soapenv, timeout=timeout) - timer.stop() - metrics.log.debug("method '%s' invoked: %s", method_name, timer) - return result - - def send(self, soapenv, timeout=None): - """ - Send SOAP message. - - Depending on how the ``nosend`` & ``retxml`` options are set, may do - one of the following: - * Return a constructed web service operation request without sending - it to the web service. - * Invoke the web service operation and return its SOAP reply XML. - * Invoke the web service operation, process its results and return - the Python object representing the returned value. - - @param soapenv: A SOAP envelope to send. - @type soapenv: L{Document} - @return: SOAP request, SOAP reply or a web service return value. - @rtype: L{RequestContext}|I{builtin}|I{subclass of} L{Object}|I{bytes}| - I{None} - - """ - location = self.__location() - log.debug("sending to (%s)\nmessage:\n%s", location, soapenv) - plugins = PluginContainer(self.options.plugins) - plugins.message.marshalled(envelope=soapenv.root()) - if self.options.prettyxml: - soapenv = soapenv.str() - else: - soapenv = soapenv.plain() - soapenv = soapenv.encode("utf-8") - ctx = plugins.message.sending(envelope=soapenv) - soapenv = ctx.envelope - if self.options.nosend: - return RequestContext(self.process_reply, soapenv) - request = suds.transport.Request(location, soapenv, timeout) - request.headers = self.__headers() - try: - timer = metrics.Timer() - timer.start() - reply = self.options.transport.send(request) - timer.stop() - metrics.log.debug("waited %s on server reply", timer) - except suds.transport.TransportError as e: - content = e.fp and e.fp.read() or "" - return self.process_reply(content, e.httpcode, tostr(e)) - return self.process_reply(reply.message, None, None) - - def process_reply(self, reply, status, description): - """ - Process a web service operation SOAP reply. - - Depending on how the ``retxml`` option is set, may return the SOAP - reply XML or process it and return the Python object representing the - returned value. - - @param reply: The SOAP reply envelope. - @type reply: I{bytes} - @param status: The HTTP status code (None indicates httplib.OK). - @type status: int|I{None} - @param description: Additional status description. - @type description: str - @return: The invoked web service operation return value. - @rtype: I{builtin}|I{subclass of} L{Object}|I{bytes}|I{None} - - """ - if status is None: - status = http.client.OK - debug_message = "Reply HTTP status - %d" % (status,) - if status in (http.client.ACCEPTED, http.client.NO_CONTENT): - log.debug(debug_message) - return - #TODO: Consider whether and how to allow plugins to handle error, - # httplib.ACCEPTED & httplib.NO_CONTENT replies as well as successful - # ones. - if status == http.client.OK: - log.debug("%s\n%s", debug_message, reply) - else: - log.debug("%s - %s\n%s", debug_message, description, reply) - - plugins = PluginContainer(self.options.plugins) - ctx = plugins.message.received(reply=reply) - reply = ctx.reply - - # SOAP standard states that SOAP errors must be accompanied by HTTP - # status code 500 - internal server error: - # - # From SOAP 1.1 specification: - # In case of a SOAP error while processing the request, the SOAP HTTP - # server MUST issue an HTTP 500 "Internal Server Error" response and - # include a SOAP message in the response containing a SOAP Fault - # element (see section 4.4) indicating the SOAP processing error. - # - # From WS-I Basic profile: - # An INSTANCE MUST use a "500 Internal Server Error" HTTP status code - # if the response message is a SOAP Fault. - replyroot = None - if status in (http.client.OK, http.client.INTERNAL_SERVER_ERROR): - replyroot = _parse(reply) - plugins.message.parsed(reply=replyroot) - fault = self.__get_fault(replyroot) - if fault: - if status != http.client.INTERNAL_SERVER_ERROR: - log.warning("Web service reported a SOAP processing fault " - "using an unexpected HTTP status code %d. Reporting " - "as an internal server error.", status) - if self.options.faults: - raise WebFault(fault, replyroot) - return http.client.INTERNAL_SERVER_ERROR, fault - if status != http.client.OK: - if self.options.faults: - #TODO: Use a more specific exception class here. - raise Exception((status, description)) - return status, description - - if self.options.retxml: - return reply - - result = replyroot and self.method.binding.output.get_reply( - self.method, replyroot) - ctx = plugins.message.unmarshalled(reply=result) - result = ctx.reply - if self.options.faults: - return result - return http.client.OK, result - - def __get_fault(self, replyroot): - """ - Extract fault information from a SOAP reply. - - Returns an I{unmarshalled} fault L{Object} or None in case the given - XML document does not contain a SOAP element. - - @param replyroot: A SOAP reply message root XML element or None. - @type replyroot: L{Element}|I{None} - @return: A fault object. - @rtype: L{Object} - - """ - envns = suds.bindings.binding.envns - soapenv = replyroot and replyroot.getChild("Envelope", envns) - soapbody = soapenv and soapenv.getChild("Body", envns) - fault = soapbody and soapbody.getChild("Fault", envns) - return fault is not None and UmxBasic().process(fault) - - def __headers(self): - """ - Get HTTP headers for a HTTP/HTTPS SOAP request. - - @return: A dictionary of header/values. - @rtype: dict - - """ - action = self.method.soap.action - if isinstance(action, str): - action = action.encode("utf-8") - result = { - "Content-Type": "text/xml; charset=utf-8", - "SOAPAction": action} - result.update(**self.options.headers) - log.debug("headers = %s", result) - return result - - def __location(self): - """Returns the SOAP request's target location URL.""" - return Unskin(self.options).get("location", self.method.location) - - -class _SimClient(_SoapClient): - """ - Loopback _SoapClient used for SOAP request/reply simulation. - - Used when a web service operation is invoked with injected SOAP request or - reply data. - - """ - - __injkey = "__inject" - - @classmethod - def simulation(cls, kwargs): - """Get whether injected data has been specified in I{kwargs}.""" - return _SimClient.__injkey in kwargs - - def invoke(self, args, kwargs): - """ - Invoke a specified web service method. - - Uses an injected SOAP request/response instead of a regularly - constructed/received one. - - Depending on how the ``nosend`` & ``retxml`` options are set, may do - one of the following: - * Return a constructed web service operation request without sending - it to the web service. - * Invoke the web service operation and return its SOAP reply XML. - * Invoke the web service operation, process its results and return - the Python object representing the returned value. - - @param args: Positional arguments for the method invoked. - @type args: list|tuple - @param kwargs: Keyword arguments for the method invoked. - @type kwargs: dict - @return: SOAP request, SOAP reply or a web service return value. - @rtype: L{RequestContext}|I{builtin}|I{subclass of} L{Object}|I{bytes}| - I{None} - - """ - simulation = kwargs.pop(self.__injkey) - msg = simulation.get("msg") - if msg is not None: - assert msg.__class__ is suds.byte_str_class - return self.send(_parse(msg)) - msg = self.method.binding.input.get_message(self.method, args, kwargs) - log.debug("inject (simulated) send message:\n%s", msg) - reply = simulation.get("reply") - if reply is not None: - assert reply.__class__ is suds.byte_str_class - status = simulation.get("status") - description = simulation.get("description") - if description is None: - description = "injected reply" - return self.process_reply(reply, status, description) - raise Exception("reply or msg injection parameter expected") - - -def _parse(string): - """ - Parses given XML document content. - - Returns the resulting root XML element node or None if the given XML - content is empty. - - @param string: XML document content to parse. - @type string: I{bytes} - @return: Resulting root XML element node or None. - @rtype: L{Element}|I{None} - - """ - if string: - return suds.sax.parser.Parser().parse(string=string) diff --git a/libs_crutch/contrib/suds/metrics.py b/libs_crutch/contrib/suds/metrics.py deleted file mode 100755 index 9b15f18..0000000 --- a/libs_crutch/contrib/suds/metrics.py +++ /dev/null @@ -1,63 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -The I{metrics} module defines classes and other resources -designed for collecting and reporting performance metrics. -""" - -import time -from suds import * -from math import modf - -from logging import getLogger -log = getLogger(__name__) - - -class Timer: - - def __init__(self): - self.started = 0 - self.stopped = 0 - - def start(self): - self.started = time.time() - self.stopped = 0 - return self - - def stop(self): - if self.started > 0: - self.stopped = time.time() - return self - - def duration(self): - return ( self.stopped - self.started ) - - def __str__(self): - if self.started == 0: - return 'not-running' - if self.started > 0 and self.stopped == 0: - return 'started: %d (running)' % self.started - duration = self.duration() - jmod = ( lambda m : (m[1], m[0]*1000) ) - if duration < 1: - ms = (duration*1000) - return '%d (ms)' % ms - if duration < 60: - m = modf(duration) - return '%d.%.3d (seconds)' % jmod(m) - m = modf(duration/60) - return '%d.%.3d (minutes)' % jmod(m) diff --git a/libs_crutch/contrib/suds/mx/__init__.py b/libs_crutch/contrib/suds/mx/__init__.py deleted file mode 100755 index 562c526..0000000 --- a/libs_crutch/contrib/suds/mx/__init__.py +++ /dev/null @@ -1,60 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides modules containing classes to support marshalling to XML. - -""" - -from suds.sudsobject import Object - - -class Content(Object): - """ - Marshaller content. - - @ivar tag: The content tag. - @type tag: str - @ivar value: The content's value. - @type value: I{any} - - """ - - extensions = [] - - def __init__(self, tag=None, value=None, **kwargs): - """ - @param tag: The content tag. - @type tag: str - @param value: The content's value. - @type value: I{any} - - """ - Object.__init__(self) - self.tag = tag - self.value = value - for k, v in kwargs.items(): - setattr(self, k, v) - - def __getattr__(self, name): - try: - return self.__dict__[name] - except KeyError: - pass - if name in self.extensions: - value = None - setattr(self, name, value) - return value - raise AttributeError("Content has no attribute %s" % (name,)) diff --git a/libs_crutch/contrib/suds/mx/appender.py b/libs_crutch/contrib/suds/mx/appender.py deleted file mode 100755 index 6190153..0000000 --- a/libs_crutch/contrib/suds/mx/appender.py +++ /dev/null @@ -1,282 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides appender classes for I{marshalling}. -""" - -from suds import * -from suds.mx import * -from suds.sudsobject import Object, Property -from suds.sax.element import Element -from suds.sax.text import Text - - -class Matcher: - """ - Appender matcher. - @ivar cls: A class object. - @type cls: I{classobj} - """ - - def __init__(self, cls): - """ - @param cls: A class object. - @type cls: I{classobj} - """ - self.cls = cls - - def __eq__(self, x): - if self.cls is None: - return x is None - return isinstance(x, self.cls) - - -class ContentAppender: - """ - Appender used to add content to marshalled objects. - @ivar default: The default appender. - @type default: L{Appender} - @ivar appenders: A I{table} of appenders mapped by class. - @type appenders: I{table} - """ - - def __init__(self, marshaller): - """ - @param marshaller: A marshaller. - @type marshaller: L{suds.mx.core.Core} - """ - self.default = PrimitiveAppender(marshaller) - self.appenders = ( - (Matcher(None), NoneAppender(marshaller)), - (Matcher(null), NoneAppender(marshaller)), - (Matcher(Property), PropertyAppender(marshaller)), - (Matcher(Object), ObjectAppender(marshaller)), - (Matcher(Element), ElementAppender(marshaller)), - (Matcher(Text), TextAppender(marshaller)), - (Matcher(list), ListAppender(marshaller)), - (Matcher(tuple), ListAppender(marshaller))) - - def append(self, parent, content): - """ - Select an appender and append the content to parent. - @param parent: A parent node. - @type parent: L{Element} - @param content: The content to append. - @type content: L{Content} - """ - appender = self.default - for matcher, candidate_appender in self.appenders: - if matcher == content.value: - appender = candidate_appender - break - appender.append(parent, content) - - -class Appender: - """ - An appender used by the marshaller to append content. - @ivar marshaller: A marshaller. - @type marshaller: L{suds.mx.core.Core} - """ - - def __init__(self, marshaller): - """ - @param marshaller: A marshaller. - @type marshaller: L{suds.mx.core.Core} - """ - self.marshaller = marshaller - - def node(self, content): - """ - Create and return an XML node that is qualified - using the I{type}. Also, make sure all referenced namespace - prefixes are declared. - @param content: The content for which processing has ended. - @type content: L{Object} - @return: A new node. - @rtype: L{Element} - """ - return self.marshaller.node(content) - - def setnil(self, node, content): - """ - Set the value of the I{node} to nill. - @param node: A I{nil} node. - @type node: L{Element} - @param content: The content for which processing has ended. - @type content: L{Object} - """ - self.marshaller.setnil(node, content) - - def setdefault(self, node, content): - """ - Set the value of the I{node} to a default value. - @param node: A I{nil} node. - @type node: L{Element} - @param content: The content for which processing has ended. - @type content: L{Object} - @return: The default. - """ - return self.marshaller.setdefault(node, content) - - def optional(self, content): - """ - Get whether the specified content is optional. - @param content: The content which to check. - @type content: L{Content} - """ - return self.marshaller.optional(content) - - def suspend(self, content): - """ - Notify I{marshaller} that appending this content has suspended. - @param content: The content for which processing has been suspended. - @type content: L{Object} - """ - self.marshaller.suspend(content) - - def resume(self, content): - """ - Notify I{marshaller} that appending this content has resumed. - @param content: The content for which processing has been resumed. - @type content: L{Object} - """ - self.marshaller.resume(content) - - def append(self, parent, content): - """ - Append the specified L{content} to the I{parent}. - @param content: The content to append. - @type content: L{Object} - """ - self.marshaller.append(parent, content) - - -class PrimitiveAppender(Appender): - """ - An appender for python I{primitive} types. - """ - - def append(self, parent, content): - if content.tag.startswith('_'): - attr = content.tag[1:] - value = tostr(content.value) - if value: - parent.set(attr, value) - else: - child = self.node(content) - child.setText(tostr(content.value)) - parent.append(child) - - -class NoneAppender(Appender): - """ - An appender for I{None} values. - """ - - def append(self, parent, content): - child = self.node(content) - default = self.setdefault(child, content) - if default is None: - self.setnil(child, content) - parent.append(child) - - -class PropertyAppender(Appender): - """ - A L{Property} appender. - """ - - def append(self, parent, content): - p = content.value - child = self.node(content) - child.setText(p.get()) - parent.append(child) - for item in list(p.items()): - cont = Content(tag=item[0], value=item[1]) - Appender.append(self, child, cont) - - -class ObjectAppender(Appender): - """ - An L{Object} appender. - """ - - def append(self, parent, content): - object = content.value - child = self.node(content) - parent.append(child) - for item in object: - cont = Content(tag=item[0], value=item[1]) - Appender.append(self, child, cont) - - -class ElementWrapper(Element): - """ - Element wrapper. - """ - - def __init__(self, content): - Element.__init__(self, content.name, content.parent) - self.__content = content - - def str(self, indent=0): - return self.__content.str(indent) - - -class ElementAppender(Appender): - """ - An appender for I{Element} types. - """ - - def append(self, parent, content): - if content.tag.startswith('_'): - raise Exception('raw XML not valid as attribute value') - child = ElementWrapper(content.value) - parent.append(child) - - -class ListAppender(Appender): - """ - A list/tuple appender. - """ - - def append(self, parent, content): - collection = content.value - if len(collection): - self.suspend(content) - for item in collection: - cont = Content(tag=content.tag, value=item) - Appender.append(self, parent, cont) - self.resume(content) - - -class TextAppender(Appender): - """ - An appender for I{Text} values. - """ - - def append(self, parent, content): - if content.tag.startswith('_'): - attr = content.tag[1:] - value = tostr(content.value) - if value: - parent.set(attr, value) - else: - child = self.node(content) - child.setText(content.value) - parent.append(child) diff --git a/libs_crutch/contrib/suds/mx/basic.py b/libs_crutch/contrib/suds/mx/basic.py deleted file mode 100755 index b2de161..0000000 --- a/libs_crutch/contrib/suds/mx/basic.py +++ /dev/null @@ -1,45 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides basic I{marshaller} classes. -""" - -from suds import * -from suds.mx import * -from suds.mx.core import Core - - -class Basic(Core): - """ - A I{basic} (untyped) marshaller. - """ - - def process(self, value, tag=None): - """ - Process (marshal) the tag with the specified value using the - optional type information. - @param value: The value (content) of the XML node. - @type value: (L{Object}|any) - @param tag: The (optional) tag name for the value. The default is - value.__class__.__name__ - @type tag: str - @return: An xml node. - @rtype: L{Element} - """ - content = Content(tag=tag, value=value) - result = Core.process(self, content) - return result diff --git a/libs_crutch/contrib/suds/mx/core.py b/libs_crutch/contrib/suds/mx/core.py deleted file mode 100755 index 5bcd73a..0000000 --- a/libs_crutch/contrib/suds/mx/core.py +++ /dev/null @@ -1,150 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides I{marshaller} core classes. -""" - -from suds import * -from suds.mx import * -from suds.mx.appender import ContentAppender -from suds.sax.document import Document - -from logging import getLogger -log = getLogger(__name__) - - -class Core: - """ - An I{abstract} marshaller. This class implement the core - functionality of the marshaller. - @ivar appender: A content appender. - @type appender: L{ContentAppender} - """ - - def __init__(self): - """ - """ - self.appender = ContentAppender(self) - - def process(self, content): - """ - Process (marshal) the tag with the specified value using the - optional type information. - @param content: The content to process. - @type content: L{Object} - """ - log.debug('processing:\n%s', content) - self.reset() - if content.tag is None: - content.tag = content.value.__class__.__name__ - document = Document() - self.append(document, content) - return document.root() - - def append(self, parent, content): - """ - Append the specified L{content} to the I{parent}. - @param parent: The parent node to append to. - @type parent: L{Element} - @param content: The content to append. - @type content: L{Object} - """ - log.debug('appending parent:\n%s\ncontent:\n%s', parent, content) - if self.start(content): - self.appender.append(parent, content) - self.end(parent, content) - - def reset(self): - """ - Reset the marshaller. - """ - pass - - def node(self, content): - """ - Create and return an XML node. - @param content: Content information for the new node. - @type content: L{Content} - @return: An element. - @rtype: L{Element} - """ - raise NotImplementedError - - def start(self, content): - """ - Appending this content has started. - @param content: The content for which processing has started. - @type content: L{Content} - @return: True to continue appending - @rtype: boolean - """ - return True - - def suspend(self, content): - """ - Appending this content has suspended. - @param content: The content for which processing has been suspended. - @type content: L{Content} - """ - pass - - def resume(self, content): - """ - Appending this content has resumed. - @param content: The content for which processing has been resumed. - @type content: L{Content} - """ - pass - - def end(self, parent, content): - """ - Appending this content has ended. - @param parent: The parent node ending. - @type parent: L{Element} - @param content: The content for which processing has ended. - @type content: L{Content} - """ - pass - - def setnil(self, node, content): - """ - Set the value of the I{node} to nill. - @param node: A I{nil} node. - @type node: L{Element} - @param content: The content to set nil. - @type content: L{Content} - """ - pass - - def setdefault(self, node, content): - """ - Set the value of the I{node} to a default value. - @param node: A I{nil} node. - @type node: L{Element} - @param content: The content to set the default value. - @type content: L{Content} - @return: The default. - """ - pass - - def optional(self, content): - """ - Get whether the specified content is optional. - @param content: The content which to check. - @type content: L{Content} - """ - return False diff --git a/libs_crutch/contrib/suds/mx/encoded.py b/libs_crutch/contrib/suds/mx/encoded.py deleted file mode 100755 index ec09536..0000000 --- a/libs_crutch/contrib/suds/mx/encoded.py +++ /dev/null @@ -1,131 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides encoded I{marshaller} classes. -""" - -from suds import * -from suds.mx import * -from suds.mx.literal import Literal -from suds.mx.typer import Typer -from suds.sudsobject import Factory, Object -from suds.xsd.query import TypeQuery - - -# -# Add encoded extensions -# aty = The soap (section 5) encoded array type. -# -Content.extensions.append('aty') - - -class Encoded(Literal): - """ - A SOAP section (5) encoding marshaller. - This marshaller supports rpc/encoded soap styles. - """ - - def start(self, content): - # - # For soap encoded arrays, the 'aty' (array type) information - # is extracted and added to the 'content'. Then, the content.value - # is replaced with an object containing an 'item=[]' attribute - # containing values that are 'typed' suds objects. - # - start = Literal.start(self, content) - if start and isinstance(content.value, (list,tuple)): - resolved = content.type.resolve() - for c in resolved: - if hasattr(c[0], 'aty'): - content.aty = (content.tag, c[0].aty) - self.cast(content) - break - return start - - def end(self, parent, content): - # - # For soap encoded arrays, the soapenc:arrayType attribute is - # added with proper type and size information. - # Eg: soapenc:arrayType="xs:int[3]" - # - Literal.end(self, parent, content) - if content.aty is None: - return - tag, aty = content.aty - ns0 = ('at0', aty[1]) - ns1 = ('at1', 'http://schemas.xmlsoap.org/soap/encoding/') - array = content.value.item - child = parent.getChild(tag) - child.addPrefix(ns0[0], ns0[1]) - child.addPrefix(ns1[0], ns1[1]) - name = '%s:arrayType' % ns1[0] - value = '%s:%s[%d]' % (ns0[0], aty[0], len(array)) - child.set(name, value) - - def encode(self, node, content): - if content.type.any(): - Typer.auto(node, content.value) - return - if content.real.any(): - Typer.auto(node, content.value) - return - ns = None - name = content.real.name - if self.xstq: - ns = content.real.namespace() - Typer.manual(node, name, ns) - - def cast(self, content): - """ - Cast the I{untyped} list items found in content I{value}. - Each items contained in the list is checked for XSD type information. - Items (values) that are I{untyped}, are replaced with suds objects and - type I{metadata} is added. - @param content: The content holding the collection. - @type content: L{Content} - @return: self - @rtype: L{Encoded} - """ - aty = content.aty[1] - resolved = content.type.resolve() - array = Factory.object(resolved.name) - array.item = [] - query = TypeQuery(aty) - ref = query.execute(self.schema) - if ref is None: - raise TypeNotFound(qref) - for x in content.value: - if isinstance(x, (list, tuple)): - array.item.append(x) - continue - if isinstance(x, Object): - md = x.__metadata__ - md.sxtype = ref - array.item.append(x) - continue - if isinstance(x, dict): - x = Factory.object(ref.name, x) - md = x.__metadata__ - md.sxtype = ref - array.item.append(x) - continue - x = Factory.property(ref.name, x) - md = x.__metadata__ - md.sxtype = ref - array.item.append(x) - content.value = array - return self diff --git a/libs_crutch/contrib/suds/mx/literal.py b/libs_crutch/contrib/suds/mx/literal.py deleted file mode 100755 index a7e5e38..0000000 --- a/libs_crutch/contrib/suds/mx/literal.py +++ /dev/null @@ -1,311 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides literal I{marshaller} classes. - -""" - -from suds import * -from suds.mx import * -from suds.mx.core import Core -from suds.mx.typer import Typer -from suds.resolver import Frame, GraphResolver -from suds.sax.element import Element -from suds.sudsobject import Factory - -from logging import getLogger -log = getLogger(__name__) - - -# add typed extensions -Content.extensions.append("type") # The expected xsd type -Content.extensions.append("real") # The 'true' XSD type -Content.extensions.append("ancestry") # The 'type' ancestry - - -class Typed(Core): - """ - A I{typed} marshaller. - - This marshaller is semi-typed as needed to support both I{document/literal} - and I{rpc/literal} SOAP message styles. - - @ivar schema: An XSD schema. - @type schema: L{xsd.schema.Schema} - @ivar resolver: A schema type resolver. - @type resolver: L{GraphResolver} - - """ - - def __init__(self, schema, xstq=True): - """ - @param schema: A schema object - @type schema: L{xsd.schema.Schema} - @param xstq: The B{x}ml B{s}chema B{t}ype B{q}ualified flag indicates - that the I{xsi:type} attribute values should be qualified by - namespace. - @type xstq: bool - - """ - Core.__init__(self) - self.schema = schema - self.xstq = xstq - self.resolver = GraphResolver(self.schema) - - def reset(self): - self.resolver.reset() - - def start(self, content): - """ - Start marshalling the 'content' by ensuring that both the 'content' - _and_ the resolver are primed with the XSD type information. The - 'content' value is both translated and sorted based on the XSD type. - Only values that are objects have their attributes sorted. - - """ - log.debug("starting content:\n%s", content) - if content.type is None: - name = content.tag - if name.startswith("_"): - name = "@" + name[1:] - content.type = self.resolver.find(name, content.value) - if content.type is None: - raise TypeNotFound(content.tag) - else: - known = None - if isinstance(content.value, Object): - known = self.resolver.known(content.value) - if known is None: - log.debug("object %s has no type information", - content.value) - known = content.type - frame = Frame(content.type, resolved=known) - self.resolver.push(frame) - frame = self.resolver.top() - content.real = frame.resolved - content.ancestry = frame.ancestry - self.translate(content) - self.sort(content) - if self.skip(content): - log.debug("skipping (optional) content:\n%s", content) - self.resolver.pop() - return False - return True - - def suspend(self, content): - """ - Suspend to process list content. - - Primarily, this involves popping the 'list' content off the resolver's - stack so its list items can be marshalled. - - """ - self.resolver.pop() - - def resume(self, content): - """ - Resume processing list content. - - To do this, we really need to simply push the 'list' content back onto - the resolver stack. - - """ - self.resolver.push(Frame(content.type)) - - def end(self, parent, content): - """ - End processing the content. - - Make sure the content ending matches the top of the resolver stack - since for list processing we play games with the resolver stack. - - """ - log.debug("ending content:\n%s", content) - current = self.resolver.top().type - if current != content.type: - raise Exception("content (end) mismatch: top=(%s) cont=(%s)" % ( - current, content)) - self.resolver.pop() - - def node(self, content): - """ - Create an XML node. - - The XML node is namespace qualified as defined by the corresponding - schema element. - - """ - ns = content.type.namespace() - if content.type.form_qualified: - node = Element(content.tag, ns=ns) - if ns[0]: - node.addPrefix(ns[0], ns[1]) - else: - node = Element(content.tag) - self.encode(node, content) - log.debug("created - node:\n%s", node) - return node - - def setnil(self, node, content): - """ - Set the 'node' nil only if the XSD type specifies that it is permitted. - - """ - if content.type.nillable: - node.setnil() - - def setdefault(self, node, content): - """Set the node to the default value specified by the XSD type.""" - default = content.type.default - if default is not None: - node.setText(default) - return default - - def optional(self, content): - if content.type.optional(): - return True - for a in content.ancestry: - if a.optional(): - return True - return False - - def encode(self, node, content): - """ - Add (SOAP) encoding information if needed. - - The encoding information is added only if the resolved type is derived - by extension. Furthermore, the xsi:type value is qualified by namespace - only if the content (tag) and referenced type are in different - namespaces. - - """ - if content.type.any(): - return - if not content.real.extension(): - return - if content.type.resolve() == content.real: - return - ns = None - name = content.real.name - if self.xstq: - ns = content.real.namespace("ns1") - Typer.manual(node, name, ns) - - def skip(self, content): - """ - Get whether to skip this I{content}. - - Should be skipped when the content is optional and value is either None - or an empty list. - - @param content: Content to skip. - @type content: L{Object} - @return: True if content is to be skipped. - @rtype: bool - - """ - if self.optional(content): - v = content.value - if v is None: - return True - if isinstance(v, (list, tuple)) and not v: - return True - return False - - def optional(self, content): - if content.type.optional(): - return True - for a in content.ancestry: - if a.optional(): - return True - return False - - def translate(self, content): - """ - Translate using the XSD type information. - - Python I{dict} is translated to a suds object. Most importantly, - primitive values are translated from python to XML types using the XSD - type. - - @param content: Content to translate. - @type content: L{Object} - @return: self - @rtype: L{Typed} - - """ - v = content.value - if v is None: - return - if isinstance(v, dict): - cls = content.real.name - content.value = Factory.object(cls, v) - md = content.value.__metadata__ - md.sxtype = content.type - return - v = content.real.translate(v, False) - content.value = v - return self - - def sort(self, content): - """ - Sort suds object attributes. - - The attributes are sorted based on the ordering defined in the XSD type - information. - - @param content: Content to sort. - @type content: L{Object} - @return: self - @rtype: L{Typed} - - """ - v = content.value - if isinstance(v, Object): - md = v.__metadata__ - md.ordering = self.ordering(content.real) - return self - - def ordering(self, type): - """ - Attribute ordering defined in the specified XSD type information. - - @param type: XSD type object. - @type type: L{SchemaObject} - @return: An ordered list of attribute names. - @rtype: list - - """ - result = [] - for child, ancestry in type.resolve(): - name = child.name - if child.name is None: - continue - if child.isattr(): - name = "_%s" % (child.name,) - result.append(name) - return result - - -class Literal(Typed): - """ - A I{literal} marshaller. - - This marshaller is semi-typed as needed to support both I{document/literal} - and I{rpc/literal} soap message styles. - - """ - pass diff --git a/libs_crutch/contrib/suds/mx/typer.py b/libs_crutch/contrib/suds/mx/typer.py deleted file mode 100755 index 49dc707..0000000 --- a/libs_crutch/contrib/suds/mx/typer.py +++ /dev/null @@ -1,126 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides XSD typing classes. - -""" - -from suds.sax import Namespace -from suds.sax.text import Text -from suds.sudsobject import Object - - -class Typer: - """ - Provides XML node typing as either automatic or manual. - - @cvar types: Class to XSD type mapping. - @type types: dict - - """ - - types = { - bool: ("boolean", Namespace.xsdns), - float: ("float", Namespace.xsdns), - int: ("int", Namespace.xsdns), - int: ("long", Namespace.xsdns), - str: ("string", Namespace.xsdns), - Text: ("string", Namespace.xsdns), - str: ("string", Namespace.xsdns)} - - @classmethod - def auto(cls, node, value=None): - """ - Automatically set the node's xsi:type attribute based on either - I{value}'s or the node text's class. When I{value} is an unmapped - class, the default type (xs:any) is set. - - @param node: XML node. - @type node: L{sax.element.Element} - @param value: Object that is or would be the node's text. - @type value: I{any} - @return: Specified node. - @rtype: L{sax.element.Element} - - """ - if value is None: - value = node.getText() - if isinstance(value, Object): - known = cls.known(value) - if known.name is None: - return node - tm = known.name, known.namespace() - else: - tm = cls.types.get(value.__class__, cls.types.get(str)) - cls.manual(node, *tm) - return node - - @classmethod - def manual(cls, node, tval, ns=None): - """ - Set the node's xsi:type attribute based on either I{value}'s or the - node text's class. Then adds the referenced prefix(s) to the node's - prefix mapping. - - @param node: XML node. - @type node: L{sax.element.Element} - @param tval: XSD schema type name. - @type tval: str - @param ns: I{tval} XML namespace. - @type ns: (prefix, URI) - @return: Specified node. - @rtype: L{sax.element.Element} - - """ - xta = ":".join((Namespace.xsins[0], "type")) - node.addPrefix(Namespace.xsins[0], Namespace.xsins[1]) - if ns is None: - node.set(xta, tval) - else: - ns = cls.genprefix(node, ns) - qname = ":".join((ns[0], tval)) - node.set(xta, qname) - node.addPrefix(ns[0], ns[1]) - return node - - @classmethod - def genprefix(cls, node, ns): - """ - Generate a prefix. - - @param node: XML node on which the prefix will be used. - @type node: L{sax.element.Element} - @param ns: Namespace needing a unique prefix. - @type ns: (prefix, URI) - @return: I{ns} with a new prefix. - @rtype: (prefix, URI) - - """ - for i in range(1, 1024): - prefix = "ns%d" % (i,) - uri = node.resolvePrefix(prefix, default=None) - if uri in (None, ns[1]): - return prefix, ns[1] - raise Exception("auto prefix, exhausted") - - @classmethod - def known(cls, object): - try: - md = object.__metadata__ - known = md.sxtype - return known - except Exception: - pass diff --git a/libs_crutch/contrib/suds/options.py b/libs_crutch/contrib/suds/options.py deleted file mode 100755 index 998a8d9..0000000 --- a/libs_crutch/contrib/suds/options.py +++ /dev/null @@ -1,162 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Suds basic options classes. -""" - -from suds.cache import Cache, NoCache -from suds.properties import * -from suds.store import DocumentStore, defaultDocumentStore -from suds.transport import Transport -from suds.wsse import Security -from suds.xsd.doctor import Doctor - - -class TpLinker(AutoLinker): - """ - Transport (auto) linker used to manage linkage between - transport objects Properties and those Properties that contain them. - """ - - def updated(self, properties, prev, next): - if isinstance(prev, Transport): - tp = Unskin(prev.options) - properties.unlink(tp) - if isinstance(next, Transport): - tp = Unskin(next.options) - properties.link(tp) - - -class Options(Skin): - """ - Options: - - B{cache} - The XML document cache. May be set to None for no caching. - - type: L{Cache} - - default: L{NoCache()} - - B{documentStore} - The XML document store used to access locally - stored documents without having to download them from an external - location. May be set to None for no internal suds library document - store. - - type: L{DocumentStore} - - default: L{defaultDocumentStore} - - B{extraArgumentErrors} - Raise exceptions when unknown message parts - are detected when receiving a web service reply, compared to the - operation's WSDL schema definition. - - type: I{bool} - - default: True - - B{allowUnknownMessageParts} - Raise exceptions when extra arguments are - detected when invoking a web service operation, compared to the - operation's WSDL schema definition. - - type: I{bool} - - default: False - - B{faults} - Raise faults raised by server, else return tuple from - service method invocation as (httpcode, object). - - type: I{bool} - - default: True - - B{service} - The default service name. - - type: I{str} - - default: None - - B{port} - The default service port name, not tcp port. - - type: I{str} - - default: None - - B{location} - This overrides the service port address I{URL} defined - in the WSDL. - - type: I{str} - - default: None - - B{transport} - The message transport. - - type: L{Transport} - - default: None - - B{soapheaders} - The soap headers to be included in the soap message. - - type: I{any} - - default: None - - B{wsse} - The web services I{security} provider object. - - type: L{Security} - - default: None - - B{doctor} - A schema I{doctor} object. - - type: L{Doctor} - - default: None - - B{xstq} - The B{x}ml B{s}chema B{t}ype B{q}ualified flag indicates - that the I{xsi:type} attribute values should be qualified by - namespace. - - type: I{bool} - - default: True - - B{prefixes} - Elements of the soap message should be qualified (when - needed) using XML prefixes as opposed to xmlns="" syntax. - - type: I{bool} - - default: True - - B{retxml} - Flag that causes the I{raw} soap envelope to be returned - instead of the python object graph. - - type: I{bool} - - default: False - - B{prettyxml} - Flag that causes I{pretty} xml to be rendered when - generating the outbound soap envelope. - - type: I{bool} - - default: False - - B{autoblend} - Flag that ensures that the schema(s) defined within - the WSDL import each other. - - type: I{bool} - - default: False - - B{cachingpolicy} - The caching policy. - - type: I{int} - - 0 = Cache XML documents. - - 1 = Cache WSDL (pickled) object. - - default: 0 - - B{plugins} - A plugin container. - - type: I{list} - - default: I{list()} - - B{nosend} - Create the soap envelope but do not send. - When specified, method invocation returns a I{RequestContext} - instead of sending it. - - type: I{bool} - - default: False - - B{unwrap} - Enable automatic parameter unwrapping when possible. - Enabled by default. If disabled, no input or output parameters are - ever automatically unwrapped. - - type: I{bool} - - default: True - - B{sortNamespaces} - Namespaces are sorted alphabetically. If disabled, - namespaces are left in the order they are received from the source. - Enabled by default for historical purposes. - - type: I{bool} - - default: True - """ - def __init__(self, **kwargs): - domain = __name__ - definitions = [ - Definition('cache', Cache, NoCache()), - Definition('documentStore', DocumentStore, defaultDocumentStore), - Definition('extraArgumentErrors', bool, True), - Definition('allowUnknownMessageParts', bool, False), - Definition('faults', bool, True), - Definition('transport', Transport, None, TpLinker()), - Definition('service', (int, str), None), - Definition('port', (int, str), None), - Definition('location', str, None), - Definition('soapheaders', (), ()), - Definition('wsse', Security, None), - Definition('doctor', Doctor, None), - Definition('xstq', bool, True), - Definition('prefixes', bool, True), - Definition('retxml', bool, False), - Definition('prettyxml', bool, False), - Definition('autoblend', bool, False), - Definition('cachingpolicy', int, 0), - Definition('plugins', (list, tuple), []), - Definition('nosend', bool, False), - Definition('unwrap', bool, True), - Definition('sortNamespaces', bool, True)] - Skin.__init__(self, domain, definitions, kwargs) diff --git a/libs_crutch/contrib/suds/plugin.py b/libs_crutch/contrib/suds/plugin.py deleted file mode 100755 index c078ddd..0000000 --- a/libs_crutch/contrib/suds/plugin.py +++ /dev/null @@ -1,276 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -The plugin module provides suds plugin implementation classes. - -""" - -from suds import * - -from logging import getLogger -log = getLogger(__name__) - - -class Context(object): - """Plugin context.""" - pass - - -class InitContext(Context): - """ - Init Context. - - @ivar wsdl: The WSDL. - @type wsdl: L{wsdl.Definitions} - - """ - pass - - -class DocumentContext(Context): - """ - The XML document load context. - - @ivar url: The URL. - @type url: str - @ivar document: Either the XML text or the B{parsed} document root. - @type document: (str|L{sax.element.Element}) - - """ - pass - - -class MessageContext(Context): - """ - The context for sending the SOAP envelope. - - @ivar envelope: The SOAP envelope to be sent. - @type envelope: (str|L{sax.element.Element}) - @ivar reply: The reply. - @type reply: (str|L{sax.element.Element}|object) - - """ - pass - - -class Plugin: - """Plugin base.""" - pass - - -class InitPlugin(Plugin): - """Base class for all suds I{init} plugins.""" - - def initialized(self, context): - """ - Suds client initialization. - - Called after WSDL the has been loaded. Provides the plugin with the - opportunity to inspect/modify the WSDL. - - @param context: The init context. - @type context: L{InitContext} - - """ - pass - - -class DocumentPlugin(Plugin): - """Base class for suds I{document} plugins.""" - - def loaded(self, context): - """ - Suds has loaded a WSDL/XSD document. - - Provides the plugin with an opportunity to inspect/modify the unparsed - document. Called after each WSDL/XSD document is loaded. - - @param context: The document context. - @type context: L{DocumentContext} - - """ - pass - - def parsed(self, context): - """ - Suds has parsed a WSDL/XSD document. - - Provides the plugin with an opportunity to inspect/modify the parsed - document. Called after each WSDL/XSD document is parsed. - - @param context: The document context. - @type context: L{DocumentContext} - - """ - pass - - -class MessagePlugin(Plugin): - """Base class for suds I{SOAP message} plugins.""" - - def marshalled(self, context): - """ - Suds is about to send the specified SOAP envelope. - - Provides the plugin with the opportunity to inspect/modify the envelope - Document before it is sent. - - @param context: The send context. - The I{envelope} is the envelope document. - @type context: L{MessageContext} - - """ - pass - - def sending(self, context): - """ - Suds is about to send the specified SOAP envelope. - - Provides the plugin with the opportunity to inspect/modify the message - text before it is sent. - - @param context: The send context. - The I{envelope} is the envelope text. - @type context: L{MessageContext} - - """ - pass - - def received(self, context): - """ - Suds has received the specified reply. - - Provides the plugin with the opportunity to inspect/modify the received - XML text before it is SAX parsed. - - @param context: The reply context. - The I{reply} is the raw text. - @type context: L{MessageContext} - - """ - pass - - def parsed(self, context): - """ - Suds has SAX parsed the received reply. - - Provides the plugin with the opportunity to inspect/modify the SAX - parsed DOM tree for the reply before it is unmarshalled. - - @param context: The reply context. - The I{reply} is DOM tree. - @type context: L{MessageContext} - - """ - pass - - def unmarshalled(self, context): - """ - Suds has unmarshalled the received reply. - - Provides the plugin with the opportunity to inspect/modify the - unmarshalled reply object before it is returned. - - @param context: The reply context. - The I{reply} is unmarshalled suds object. - @type context: L{MessageContext} - - """ - pass - - -class PluginContainer: - """ - Plugin container provides easy method invocation. - - @ivar plugins: A list of plugin objects. - @type plugins: [L{Plugin},] - @cvar ctxclass: A dict of plugin method / context classes. - @type ctxclass: dict - - """ - - domains = { - 'init': (InitContext, InitPlugin), - 'document': (DocumentContext, DocumentPlugin), - 'message': (MessageContext, MessagePlugin)} - - def __init__(self, plugins): - """ - @param plugins: A list of plugin objects. - @type plugins: [L{Plugin},] - - """ - self.plugins = plugins - - def __getattr__(self, name): - domain = self.domains.get(name) - if not domain: - raise Exception('plugin domain (%s), invalid' % (name,)) - ctx, pclass = domain - plugins = [p for p in self.plugins if isinstance(p, pclass)] - return PluginDomain(ctx, plugins) - - -class PluginDomain: - """ - The plugin domain. - - @ivar ctx: A context. - @type ctx: L{Context} - @ivar plugins: A list of plugins (targets). - @type plugins: list - - """ - - def __init__(self, ctx, plugins): - self.ctx = ctx - self.plugins = plugins - - def __getattr__(self, name): - return Method(name, self) - - -class Method: - """ - Plugin method. - - @ivar name: The method name. - @type name: str - @ivar domain: The plugin domain. - @type domain: L{PluginDomain} - - """ - - def __init__(self, name, domain): - """ - @param name: The method name. - @type name: str - @param domain: A plugin domain. - @type domain: L{PluginDomain} - - """ - self.name = name - self.domain = domain - - def __call__(self, **kwargs): - ctx = self.domain.ctx() - ctx.__dict__.update(kwargs) - for plugin in self.domain.plugins: - method = getattr(plugin, self.name, None) - if method and callable(method): - method(ctx) - return ctx diff --git a/libs_crutch/contrib/suds/properties.py b/libs_crutch/contrib/suds/properties.py deleted file mode 100755 index f56f77b..0000000 --- a/libs_crutch/contrib/suds/properties.py +++ /dev/null @@ -1,539 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Properties classes. -""" - - -class AutoLinker(object): - """ - Base class, provides interface for I{automatic} link - management between a L{Properties} object and the L{Properties} - contained within I{values}. - """ - def updated(self, properties, prev, next): - """ - Notification that a values was updated and the linkage - between the I{properties} contained with I{prev} need to - be relinked to the L{Properties} contained within the - I{next} value. - """ - pass - - -class Link(object): - """ - Property link object. - @ivar endpoints: A tuple of the (2) endpoints of the link. - @type endpoints: tuple(2) - """ - def __init__(self, a, b): - """ - @param a: Property (A) to link. - @type a: L{Property} - @param b: Property (B) to link. - @type b: L{Property} - """ - pA = Endpoint(self, a) - pB = Endpoint(self, b) - self.endpoints = (pA, pB) - self.validate(a, b) - a.links.append(pB) - b.links.append(pA) - - def validate(self, pA, pB): - """ - Validate that the two properties may be linked. - @param pA: Endpoint (A) to link. - @type pA: L{Endpoint} - @param pB: Endpoint (B) to link. - @type pB: L{Endpoint} - @return: self - @rtype: L{Link} - """ - if pA in pB.links or \ - pB in pA.links: - raise Exception('Already linked') - dA = pA.domains() - dB = pB.domains() - for d in dA: - if d in dB: - raise Exception('Duplicate domain "%s" found' % d) - for d in dB: - if d in dA: - raise Exception('Duplicate domain "%s" found' % d) - kA = list(pA.keys()) - kB = list(pB.keys()) - for k in kA: - if k in kB: - raise Exception('Duplicate key %s found' % k) - for k in kB: - if k in kA: - raise Exception('Duplicate key %s found' % k) - return self - - def teardown(self): - """ - Teardown the link. - Removes endpoints from properties I{links} collection. - @return: self - @rtype: L{Link} - """ - pA, pB = self.endpoints - if pA in pB.links: - pB.links.remove(pA) - if pB in pA.links: - pA.links.remove(pB) - return self - - -class Endpoint(object): - """ - Link endpoint (wrapper). - @ivar link: The associated link. - @type link: L{Link} - @ivar target: The properties object. - @type target: L{Property} - """ - def __init__(self, link, target): - self.link = link - self.target = target - - def teardown(self): - return self.link.teardown() - - def __eq__(self, rhs): - return ( self.target == rhs ) - - def __hash__(self): - return hash(self.target) - - def __getattr__(self, name): - return getattr(self.target, name) - - -class Definition: - """ - Property definition. - @ivar name: The property name. - @type name: str - @ivar classes: The (class) list of permitted values - @type classes: tuple - @ivar default: The default value. - @ivar type: any - """ - def __init__(self, name, classes, default, linker=AutoLinker()): - """ - @param name: The property name. - @type name: str - @param classes: The (class) list of permitted values - @type classes: tuple - @param default: The default value. - @type default: any - """ - if not isinstance(classes, (list, tuple)): - classes = (classes,) - self.name = name - self.classes = classes - self.default = default - self.linker = linker - - def nvl(self, value=None): - """ - Convert the I{value} into the default when I{None}. - @param value: The proposed value. - @type value: any - @return: The I{default} when I{value} is I{None}, else I{value}. - @rtype: any - """ - if value is None: - return self.default - else: - return value - - def validate(self, value): - """ - Validate the I{value} is of the correct class. - @param value: The value to validate. - @type value: any - @raise AttributeError: When I{value} is invalid. - """ - if value is None: - return - if len(self.classes) and \ - not isinstance(value, self.classes): - msg = '"%s" must be: %s' % (self.name, self.classes) - raise AttributeError(msg) - - - def __repr__(self): - return '%s: %s' % (self.name, str(self)) - - def __str__(self): - s = [] - if len(self.classes): - s.append('classes=%s' % str(self.classes)) - else: - s.append('classes=*') - s.append("default=%s" % str(self.default)) - return ', '.join(s) - - -class Properties: - """ - Represents basic application properties. - Provides basic type validation, default values and - link/synchronization behavior. - @ivar domain: The domain name. - @type domain: str - @ivar definitions: A table of property definitions. - @type definitions: {name: L{Definition}} - @ivar links: A list of linked property objects used to create - a network of properties. - @type links: [L{Property},..] - @ivar defined: A dict of property values. - @type defined: dict - """ - def __init__(self, domain, definitions, kwargs): - """ - @param domain: The property domain name. - @type domain: str - @param definitions: A table of property definitions. - @type definitions: {name: L{Definition}} - @param kwargs: A list of property name/values to set. - @type kwargs: dict - """ - self.definitions = {} - for d in definitions: - self.definitions[d.name] = d - self.domain = domain - self.links = [] - self.defined = {} - self.modified = set() - self.prime() - self.update(kwargs) - - def definition(self, name): - """ - Get the definition for the property I{name}. - @param name: The property I{name} to find the definition for. - @type name: str - @return: The property definition - @rtype: L{Definition} - @raise AttributeError: On not found. - """ - d = self.definitions.get(name) - if d is None: - raise AttributeError(name) - return d - - def update(self, other): - """ - Update the property values as specified by keyword/value. - @param other: An object to update from. - @type other: (dict|L{Properties}) - @return: self - @rtype: L{Properties} - """ - if isinstance(other, Properties): - other = other.defined - for n,v in list(other.items()): - self.set(n, v) - return self - - def notset(self, name): - """ - Get whether a property has never been set by I{name}. - @param name: A property name. - @type name: str - @return: True if never been set. - @rtype: bool - """ - self.provider(name).__notset(name) - - def set(self, name, value): - """ - Set the I{value} of a property by I{name}. - The value is validated against the definition and set - to the default when I{value} is None. - @param name: The property name. - @type name: str - @param value: The new property value. - @type value: any - @return: self - @rtype: L{Properties} - """ - self.provider(name).__set(name, value) - return self - - def unset(self, name): - """ - Unset a property by I{name}. - @param name: A property name. - @type name: str - @return: self - @rtype: L{Properties} - """ - self.provider(name).__set(name, None) - return self - - def get(self, name, *df): - """ - Get the value of a property by I{name}. - @param name: The property name. - @type name: str - @param df: An optional value to be returned when the value - is not set - @type df: [1]. - @return: The stored value, or I{df[0]} if not set. - @rtype: any - """ - return self.provider(name).__get(name, *df) - - def link(self, other): - """ - Link (associate) this object with anI{other} properties object - to create a network of properties. Links are bidirectional. - @param other: The object to link. - @type other: L{Properties} - @return: self - @rtype: L{Properties} - """ - Link(self, other) - return self - - def unlink(self, *others): - """ - Unlink (disassociate) the specified properties object. - @param others: The list object to unlink. Unspecified means unlink all. - @type others: [L{Properties},..] - @return: self - @rtype: L{Properties} - """ - if not len(others): - others = self.links[:] - for p in self.links[:]: - if p in others: - p.teardown() - return self - - def provider(self, name, history=None): - """ - Find the provider of the property by I{name}. - @param name: The property name. - @type name: str - @param history: A history of nodes checked to prevent - circular hunting. - @type history: [L{Properties},..] - @return: The provider when found. Otherwise, None (when nested) - and I{self} when not nested. - @rtype: L{Properties} - """ - if history is None: - history = [] - history.append(self) - if name in self.definitions: - return self - for x in self.links: - if x in history: - continue - provider = x.provider(name, history) - if provider is not None: - return provider - history.remove(self) - if len(history): - return None - return self - - def keys(self, history=None): - """ - Get the set of I{all} property names. - @param history: A history of nodes checked to prevent - circular hunting. - @type history: [L{Properties},..] - @return: A set of property names. - @rtype: list - """ - if history is None: - history = [] - history.append(self) - keys = set() - keys.update(list(self.definitions.keys())) - for x in self.links: - if x in history: - continue - keys.update(x.keys(history)) - history.remove(self) - return keys - - def domains(self, history=None): - """ - Get the set of I{all} domain names. - @param history: A history of nodes checked to prevent - circular hunting. - @type history: [L{Properties},..] - @return: A set of domain names. - @rtype: list - """ - if history is None: - history = [] - history.append(self) - domains = set() - domains.add(self.domain) - for x in self.links: - if x in history: - continue - domains.update(x.domains(history)) - history.remove(self) - return domains - - def prime(self): - """ - Prime the stored values based on default values - found in property definitions. - @return: self - @rtype: L{Properties} - """ - for d in list(self.definitions.values()): - self.defined[d.name] = d.default - return self - - def __notset(self, name): - return not (name in self.modified) - - def __set(self, name, value): - d = self.definition(name) - d.validate(value) - value = d.nvl(value) - prev = self.defined[name] - self.defined[name] = value - self.modified.add(name) - d.linker.updated(self, prev, value) - - def __get(self, name, *df): - d = self.definition(name) - value = self.defined.get(name) - if value == d.default and len(df): - value = df[0] - return value - - def str(self, history): - s = [] - s.append('Definitions:') - for d in list(self.definitions.values()): - s.append('\t%s' % repr(d)) - s.append('Content:') - for d in list(self.defined.items()): - s.append('\t%s' % str(d)) - if self not in history: - history.append(self) - s.append('Linked:') - for x in self.links: - s.append(x.str(history)) - history.remove(self) - return '\n'.join(s) - - def __repr__(self): - return str(self) - - def __str__(self): - return self.str([]) - - -class Skin(object): - """ - The meta-programming I{skin} around the L{Properties} object. - @ivar __pts__: The wrapped object. - @type __pts__: L{Properties}. - """ - def __init__(self, domain, definitions, kwargs): - self.__pts__ = Properties(domain, definitions, kwargs) - - def __setattr__(self, name, value): - builtin = name.startswith('__') and name.endswith('__') - if builtin: - self.__dict__[name] = value - return - self.__pts__.set(name, value) - - def __getattr__(self, name): - return self.__pts__.get(name) - - def __repr__(self): - return str(self) - - def __str__(self): - return str(self.__pts__) - - -class Unskin(object): - def __new__(self, *args, **kwargs): - return args[0].__pts__ - - -class Inspector: - """ - Wrapper inspector. - """ - def __init__(self, options): - self.properties = options.__pts__ - - def get(self, name, *df): - """ - Get the value of a property by I{name}. - @param name: The property name. - @type name: str - @param df: An optional value to be returned when the value - is not set - @type df: [1]. - @return: The stored value, or I{df[0]} if not set. - @rtype: any - """ - return self.properties.get(name, *df) - - def update(self, **kwargs): - """ - Update the property values as specified by keyword/value. - @param kwargs: A list of property name/values to set. - @type kwargs: dict - @return: self - @rtype: L{Properties} - """ - return self.properties.update(**kwargs) - - def link(self, other): - """ - Link (associate) this object with anI{other} properties object - to create a network of properties. Links are bidirectional. - @param other: The object to link. - @type other: L{Properties} - @return: self - @rtype: L{Properties} - """ - p = other.__pts__ - return self.properties.link(p) - - def unlink(self, other): - """ - Unlink (disassociate) the specified properties object. - @param other: The object to unlink. - @type other: L{Properties} - @return: self - @rtype: L{Properties} - """ - p = other.__pts__ - return self.properties.unlink(p) diff --git a/libs_crutch/contrib/suds/reader.py b/libs_crutch/contrib/suds/reader.py deleted file mode 100755 index 31c5ee7..0000000 --- a/libs_crutch/contrib/suds/reader.py +++ /dev/null @@ -1,197 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -XML document reader classes providing integration with the suds library's -caching system. - -""" - -import suds.cache -import suds.plugin -import suds.sax.parser -import suds.transport - -try: - from hashlib import md5 -except ImportError: - # 'hashlib' package added in Python 2.5 so use the now deprecated/removed - # 'md5' package in older Python versions. - from md5 import md5 - - -class Reader(object): - """ - Provides integration with the cache. - - @ivar options: An options object. - @type options: I{Options} - - """ - - def __init__(self, options): - """ - @param options: An options object. - @type options: I{Options} - - """ - self.options = options - self.plugins = suds.plugin.PluginContainer(options.plugins) - - def mangle(self, name, x): - """ - Mangle the name by hashing the I{name} and appending I{x}. - - @return: The mangled name. - @rtype: str - - """ - h = md5(name.encode()).hexdigest() - return '%s-%s' % (h, x) - - -class DefinitionsReader(Reader): - """ - Integrates between the WSDL Definitions object and the object cache. - - @ivar fn: A factory function used to create objects not found in the cache. - @type fn: I{Constructor} - - """ - - def __init__(self, options, fn): - """ - @param options: An options object. - @type options: I{Options} - @param fn: A factory function used to create objects not found in the - cache. - @type fn: I{Constructor} - - """ - super(DefinitionsReader, self).__init__(options) - self.fn = fn - - def open(self, url): - """ - Open a WSDL schema at the specified I{URL}. - - First, the WSDL schema is looked up in the I{object cache}. If not - found, a new one constructed using the I{fn} factory function and the - result is cached for the next open(). - - @param url: A WSDL URL. - @type url: str. - @return: The WSDL object. - @rtype: I{Definitions} - - """ - cache = self.__cache() - id = self.mangle(url, "wsdl") - wsdl = cache.get(id) - if wsdl is None: - wsdl = self.fn(url, self.options) - cache.put(id, wsdl) - else: - # Cached WSDL Definitions objects may have been created with - # different options so we update them here with our current ones. - wsdl.options = self.options - for imp in wsdl.imports: - imp.imported.options = self.options - return wsdl - - def __cache(self): - """ - Get the I{object cache}. - - @return: The I{cache} when I{cachingpolicy} = B{1}. - @rtype: L{Cache} - - """ - if self.options.cachingpolicy == 1: - return self.options.cache - return suds.cache.NoCache() - - -class DocumentReader(Reader): - """Integrates between the SAX L{Parser} and the document cache.""" - - def open(self, url): - """ - Open an XML document at the specified I{URL}. - - First, a preparsed document is looked up in the I{object cache}. If not - found, its content is fetched from an external source and parsed using - the SAX parser. The result is cached for the next open(). - - @param url: A document URL. - @type url: str. - @return: The specified XML document. - @rtype: I{Document} - - """ - cache = self.__cache() - id = self.mangle(url, "document") - xml = cache.get(id) - if xml is None: - xml = self.__fetch(url) - cache.put(id, xml) - self.plugins.document.parsed(url=url, document=xml.root()) - return xml - - def __cache(self): - """ - Get the I{object cache}. - - @return: The I{cache} when I{cachingpolicy} = B{0}. - @rtype: L{Cache} - - """ - if self.options.cachingpolicy == 0: - return self.options.cache - return suds.cache.NoCache() - - def __fetch(self, url): - """ - Fetch document content from an external source. - - The document content will first be looked up in the registered document - store, and if not found there, downloaded using the registered - transport system. - - Before being returned, the fetched document content first gets - processed by all the registered 'loaded' plugins. - - @param url: A document URL. - @type url: str. - @return: A file pointer to the fetched document content. - @rtype: file-like - - """ - content = None - store = self.options.documentStore - if store is not None: - content = store.open(url) - if content is None: - request = suds.transport.Request(url) - request.headers = self.options.headers - fp = self.options.transport.open(request) - try: - content = fp.read() - finally: - fp.close() - ctx = self.plugins.document.loaded(url=url, document=content) - content = ctx.document - sax = suds.sax.parser.Parser() - return sax.parse(string=content) diff --git a/libs_crutch/contrib/suds/resolver.py b/libs_crutch/contrib/suds/resolver.py deleted file mode 100755 index 82014f6..0000000 --- a/libs_crutch/contrib/suds/resolver.py +++ /dev/null @@ -1,493 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -The I{resolver} module provides a collection of classes that -provide wsdl/xsd named type resolution. -""" - -from suds import * -from suds.sax import splitPrefix, Namespace -from suds.sudsobject import Object -from suds.xsd.query import BlindQuery, TypeQuery, qualify - -import re - -from logging import getLogger -log = getLogger(__name__) - - -class Resolver: - """ - An I{abstract} schema-type resolver. - @ivar schema: A schema object. - @type schema: L{xsd.schema.Schema} - """ - - def __init__(self, schema): - """ - @param schema: A schema object. - @type schema: L{xsd.schema.Schema} - """ - self.schema = schema - - def find(self, name, resolved=True): - """ - Get the definition object for the schema object by name. - @param name: The name of a schema object. - @type name: basestring - @param resolved: A flag indicating that the fully resolved type - should be returned. - @type resolved: boolean - @return: The found schema I{type} - @rtype: L{xsd.sxbase.SchemaObject} - """ - log.debug('searching schema for (%s)', name) - qref = qualify(name, self.schema.root, self.schema.tns) - query = BlindQuery(qref) - result = query.execute(self.schema) - if result is None: - log.error('(%s) not-found', name) - return None - log.debug('found (%s) as (%s)', name, Repr(result)) - if resolved: - result = result.resolve() - return result - - -class PathResolver(Resolver): - """ - Resolves the definition object for the schema type located at a given path. - The path may contain (.) dot notation to specify nested types. - @ivar wsdl: A wsdl object. - @type wsdl: L{wsdl.Definitions} - """ - - def __init__(self, wsdl, ps='.'): - """ - @param wsdl: A schema object. - @type wsdl: L{wsdl.Definitions} - @param ps: The path separator character - @type ps: char - """ - Resolver.__init__(self, wsdl.schema) - self.wsdl = wsdl - self.altp = re.compile('({)(.+)(})(.+)') - self.splitp = re.compile('({.+})*[^\\%s]+' % ps[0]) - - def find(self, path, resolved=True): - """ - Get the definition object for the schema type located at the specified path. - The path may contain (.) dot notation to specify nested types. - Actually, the path separator is usually a (.) but can be redefined - during contruction. - @param path: A (.) separated path to a schema type. - @type path: basestring - @param resolved: A flag indicating that the fully resolved type - should be returned. - @type resolved: boolean - @return: The found schema I{type} - @rtype: L{xsd.sxbase.SchemaObject} - """ - result = None - parts = self.split(path) - try: - result = self.root(parts) - if len(parts) > 1: - result = result.resolve(nobuiltin=True) - result = self.branch(result, parts) - result = self.leaf(result, parts) - if resolved: - result = result.resolve(nobuiltin=True) - except PathResolver.BadPath: - log.error('path: "%s", not-found' % path) - return result - - def root(self, parts): - """ - Find the path root. - @param parts: A list of path parts. - @type parts: [str,..] - @return: The root. - @rtype: L{xsd.sxbase.SchemaObject} - """ - result = None - name = parts[0] - log.debug('searching schema for (%s)', name) - qref = self.qualify(parts[0]) - query = BlindQuery(qref) - result = query.execute(self.schema) - if result is None: - log.error('(%s) not-found', name) - raise PathResolver.BadPath(name) - log.debug('found (%s) as (%s)', name, Repr(result)) - return result - - def branch(self, root, parts): - """ - Traverse the path until a leaf is reached. - @param parts: A list of path parts. - @type parts: [str,..] - @param root: The root. - @type root: L{xsd.sxbase.SchemaObject} - @return: The end of the branch. - @rtype: L{xsd.sxbase.SchemaObject} - """ - result = root - for part in parts[1:-1]: - name = splitPrefix(part)[1] - log.debug('searching parent (%s) for (%s)', Repr(result), name) - result, ancestry = result.get_child(name) - if result is None: - log.error('(%s) not-found', name) - raise PathResolver.BadPath(name) - result = result.resolve(nobuiltin=True) - log.debug('found (%s) as (%s)', name, Repr(result)) - return result - - def leaf(self, parent, parts): - """ - Find the leaf. - @param parts: A list of path parts. - @type parts: [str,..] - @param parent: The leaf's parent. - @type parent: L{xsd.sxbase.SchemaObject} - @return: The leaf. - @rtype: L{xsd.sxbase.SchemaObject} - """ - name = splitPrefix(parts[-1])[1] - if name.startswith('@'): - result, path = parent.get_attribute(name[1:]) - else: - result, ancestry = parent.get_child(name) - if result is None: - raise PathResolver.BadPath(name) - return result - - def qualify(self, name): - """ - Qualify the name as either: - - plain name - - ns prefixed name (eg: ns0:Person) - - fully ns qualified name (eg: {http://myns-uri}Person) - @param name: The name of an object in the schema. - @type name: str - @return: A qualified name. - @rtype: qname - """ - m = self.altp.match(name) - if m is None: - return qualify(name, self.wsdl.root, self.wsdl.tns) - else: - return (m.group(4), m.group(2)) - - def split(self, s): - """ - Split the string on (.) while preserving any (.) inside the - '{}' alternalte syntax for full ns qualification. - @param s: A plain or qualified name. - @type s: str - @return: A list of the name's parts. - @rtype: [str,..] - """ - parts = [] - b = 0 - while 1: - m = self.splitp.match(s, b) - if m is None: - break - b,e = m.span() - parts.append(s[b:e]) - b = e+1 - return parts - - class BadPath(Exception): pass - - -class TreeResolver(Resolver): - """ - The tree resolver is a I{stateful} tree resolver - used to resolve each node in a tree. As such, it mirrors - the tree structure to ensure that nodes are resolved in - context. - @ivar stack: The context stack. - @type stack: list - """ - - def __init__(self, schema): - """ - @param schema: A schema object. - @type schema: L{xsd.schema.Schema} - """ - Resolver.__init__(self, schema) - self.stack = Stack() - - def reset(self): - """ - Reset the resolver's state. - """ - self.stack = Stack() - - def push(self, x): - """ - Push an I{object} onto the stack. - @param x: An object to push. - @type x: L{Frame} - @return: The pushed frame. - @rtype: L{Frame} - """ - if isinstance(x, Frame): - frame = x - else: - frame = Frame(x) - self.stack.append(frame) - log.debug('push: (%s)\n%s', Repr(frame), Repr(self.stack)) - return frame - - def top(self): - """ - Get the I{frame} at the top of the stack. - @return: The top I{frame}, else None. - @rtype: L{Frame} - """ - if len(self.stack): - return self.stack[-1] - else: - return Frame.Empty() - - def pop(self): - """ - Pop the frame at the top of the stack. - @return: The popped frame, else None. - @rtype: L{Frame} - """ - if len(self.stack): - popped = self.stack.pop() - log.debug('pop: (%s)\n%s', Repr(popped), Repr(self.stack)) - return popped - log.debug('stack empty, not-popped') - return None - - def depth(self): - """ - Get the current stack depth. - @return: The current stack depth. - @rtype: int - """ - return len(self.stack) - - def getchild(self, name, parent): - """Get a child by name.""" - log.debug('searching parent (%s) for (%s)', Repr(parent), name) - if name.startswith('@'): - return parent.get_attribute(name[1:]) - return parent.get_child(name) - - -class NodeResolver(TreeResolver): - """ - The node resolver is a I{stateful} XML document resolver - used to resolve each node in a tree. As such, it mirrors - the tree structure to ensure that nodes are resolved in - context. - """ - - def __init__(self, schema): - """ - @param schema: A schema object. - @type schema: L{xsd.schema.Schema} - """ - TreeResolver.__init__(self, schema) - - def find(self, node, resolved=False, push=True): - """ - @param node: An xml node to be resolved. - @type node: L{sax.element.Element} - @param resolved: A flag indicating that the fully resolved type should be - returned. - @type resolved: boolean - @param push: Indicates that the resolved type should be - pushed onto the stack. - @type push: boolean - @return: The found schema I{type} - @rtype: L{xsd.sxbase.SchemaObject} - """ - name = node.name - parent = self.top().resolved - if parent is None: - result, ancestry = self.query(name, node) - else: - result, ancestry = self.getchild(name, parent) - known = self.known(node) - if result is None: - return result - if push: - frame = Frame(result, resolved=known, ancestry=ancestry) - pushed = self.push(frame) - if resolved: - result = result.resolve() - return result - - def findattr(self, name, resolved=True): - """ - Find an attribute type definition. - @param name: An attribute name. - @type name: basestring - @param resolved: A flag indicating that the fully resolved type should be - returned. - @type resolved: boolean - @return: The found schema I{type} - @rtype: L{xsd.sxbase.SchemaObject} - """ - name = '@%s'%name - parent = self.top().resolved - if parent is None: - result, ancestry = self.query(name, node) - else: - result, ancestry = self.getchild(name, parent) - if result is None: - return result - if resolved: - result = result.resolve() - return result - - def query(self, name, node): - """Blindly query the schema by name.""" - log.debug('searching schema for (%s)', name) - qref = qualify(name, node, node.namespace()) - query = BlindQuery(qref) - result = query.execute(self.schema) - return (result, []) - - def known(self, node): - """Resolve type referenced by @xsi:type.""" - ref = node.get('type', Namespace.xsins) - if ref is None: - return None - qref = qualify(ref, node, node.namespace()) - query = BlindQuery(qref) - return query.execute(self.schema) - - -class GraphResolver(TreeResolver): - """ - The graph resolver is a I{stateful} L{Object} graph resolver - used to resolve each node in a tree. As such, it mirrors - the tree structure to ensure that nodes are resolved in - context. - """ - - def __init__(self, schema): - """ - @param schema: A schema object. - @type schema: L{xsd.schema.Schema} - """ - TreeResolver.__init__(self, schema) - - def find(self, name, object, resolved=False, push=True): - """ - @param name: The name of the object to be resolved. - @type name: basestring - @param object: The name's value. - @type object: (any|L{Object}) - @param resolved: A flag indicating that the fully resolved type - should be returned. - @type resolved: boolean - @param push: Indicates that the resolved type should be - pushed onto the stack. - @type push: boolean - @return: The found schema I{type} - @rtype: L{xsd.sxbase.SchemaObject} - """ - known = None - parent = self.top().resolved - if parent is None: - result, ancestry = self.query(name) - else: - result, ancestry = self.getchild(name, parent) - if result is None: - return None - if isinstance(object, Object): - known = self.known(object) - if push: - frame = Frame(result, resolved=known, ancestry=ancestry) - pushed = self.push(frame) - if resolved: - if known is None: - result = result.resolve() - else: - result = known - return result - - def query(self, name): - """Blindly query the schema by name.""" - log.debug('searching schema for (%s)', name) - schema = self.schema - wsdl = self.wsdl() - if wsdl is None: - qref = qualify(name, schema.root, schema.tns) - else: - qref = qualify(name, wsdl.root, wsdl.tns) - query = BlindQuery(qref) - result = query.execute(schema) - return (result, []) - - def wsdl(self): - """Get the wsdl.""" - container = self.schema.container - if container is None: - return None - else: - return container.wsdl - - def known(self, object): - """Get the type specified in the object's metadata.""" - try: - md = object.__metadata__ - known = md.sxtype - return known - except Exception: - pass - - -class Frame: - def __init__(self, type, resolved=None, ancestry=()): - self.type = type - if resolved is None: - resolved = type.resolve() - self.resolved = resolved.resolve() - self.ancestry = ancestry - - def __str__(self): - return '%s\n%s\n%s' % \ - (Repr(self.type), - Repr(self.resolved), - [Repr(t) for t in self.ancestry]) - - class Empty: - def __getattr__(self, name): - if name == 'ancestry': - return () - else: - return None - - -class Stack(list): - def __repr__(self): - result = [] - for item in self: - result.append(repr(item)) - return '\n'.join(result) diff --git a/libs_crutch/contrib/suds/sax/__init__.py b/libs_crutch/contrib/suds/sax/__init__.py deleted file mode 100755 index fbaca21..0000000 --- a/libs_crutch/contrib/suds/sax/__init__.py +++ /dev/null @@ -1,104 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -The sax module contains a collection of classes that provide a (D)ocument -(O)bject (M)odel representation of an XML document. The goal is to provide an -easy, intuitive interface for managing XML documents. Although the term DOM is -used here, this model is B{far} better. - -XML namespaces in suds are represented using a (2) element tuple containing the -prefix and the URI, e.g. I{('tns', 'http://myns')} - -@var encoder: A I{pluggable} XML special character processor used to encode/ - decode strings. -@type encoder: L{Encoder} - -""" - -from suds.sax.enc import Encoder - -# pluggable XML special character encoder. -encoder = Encoder() - - -def splitPrefix(name): - """ - Split the name into a tuple (I{prefix}, I{name}). The first element in the - tuple is I{None} when the name does not have a prefix. - - @param name: A node name containing an optional prefix. - @type name: basestring - @return: A tuple containing the (2) parts of I{name}. - @rtype: (I{prefix}, I{name}) - - """ - if isinstance(name, str) and ":" in name: - return tuple(name.split(":", 1)) - return None, name - - -class Namespace: - """XML namespace.""" - - default = (None, None) - xmlns = ("xml", "http://www.w3.org/XML/1998/namespace") - xsdns = ("xs", "http://www.w3.org/2001/XMLSchema") - xsins = ("xsi", "http://www.w3.org/2001/XMLSchema-instance") - all = (xsdns, xsins) - - @classmethod - def create(cls, p=None, u=None): - return p, u - - @classmethod - def none(cls, ns): - return ns == cls.default - - @classmethod - def xsd(cls, ns): - try: - return cls.w3(ns) and ns[1].endswith("XMLSchema") - except Exception: - pass - return False - - @classmethod - def xsi(cls, ns): - try: - return cls.w3(ns) and ns[1].endswith("XMLSchema-instance") - except Exception: - pass - return False - - @classmethod - def xs(cls, ns): - return cls.xsd(ns) or cls.xsi(ns) - - @classmethod - def w3(cls, ns): - try: - return ns[1].startswith("http://www.w3.org") - except Exception: - pass - return False - - @classmethod - def isns(cls, ns): - try: - return isinstance(ns, tuple) and len(ns) == len(cls.default) - except Exception: - pass - return False diff --git a/libs_crutch/contrib/suds/sax/attribute.py b/libs_crutch/contrib/suds/sax/attribute.py deleted file mode 100755 index da7c3e2..0000000 --- a/libs_crutch/contrib/suds/sax/attribute.py +++ /dev/null @@ -1,173 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides XML I{attribute} classes. - -""" - -from suds import UnicodeMixin -from suds.sax import splitPrefix, Namespace -from suds.sax.text import Text - - -class Attribute(UnicodeMixin): - """ - An XML attribute object. - - @ivar parent: The node containing this attribute. - @type parent: L{element.Element} - @ivar prefix: The I{optional} namespace prefix. - @type prefix: basestring - @ivar name: The I{unqualified} attribute name. - @type name: basestring - @ivar value: The attribute's value. - @type value: basestring - - """ - - def __init__(self, name, value=None): - """ - @param name: The attribute's name with I{optional} namespace prefix. - @type name: basestring - @param value: The attribute's value. - @type value: basestring - - """ - self.parent = None - self.prefix, self.name = splitPrefix(name) - self.setValue(value) - - def clone(self, parent=None): - """ - Clone this object. - - @param parent: The parent for the clone. - @type parent: L{element.Element} - @return: A copy of this object assigned to the new parent. - @rtype: L{Attribute} - - """ - a = Attribute(self.qname(), self.value) - a.parent = parent - return a - - def qname(self): - """ - Get this attribute's B{fully} qualified name. - - @return: The fully qualified name. - @rtype: basestring - - """ - if self.prefix is None: - return self.name - return ":".join((self.prefix, self.name)) - - def setValue(self, value): - """ - Set the attribute's value. - - @param value: The new value (may be None). - @type value: basestring - @return: self - @rtype: L{Attribute} - - """ - if isinstance(value, Text): - self.value = value - else: - self.value = Text(value) - return self - - def getValue(self, default=Text("")): - """ - Get the attributes value with optional default. - - @param default: An optional value to return when the attribute's value - has not been set. - @type default: basestring - @return: The attribute's value, or I{default}. - @rtype: L{Text} - - """ - return self.value or default - - def hasText(self): - """ - Get whether the attribute has a non-empty I{text} string value. - - @return: True when has I{text}. - @rtype: boolean - - """ - return bool(self.value) - - def namespace(self): - """ - Get the attribute's namespace. This may either be the namespace defined - by an optional prefix, or the default namespace. - - @return: The attribute's namespace. - @rtype: (I{prefix}, I{name}) - - """ - if self.prefix is None: - return Namespace.default - return self.resolvePrefix(self.prefix) - - def resolvePrefix(self, prefix): - """ - Resolve the specified prefix to a known namespace. - - @param prefix: A declared prefix. - @type prefix: basestring - @return: The namespace mapped to I{prefix}. - @rtype: (I{prefix}, I{name}) - - """ - if self.parent is None: - return Namespace.default - return self.parent.resolvePrefix(prefix) - - def match(self, name=None, ns=None): - """ - Match by (optional) name and/or (optional) namespace. - - @param name: The optional attribute tag name. - @type name: str - @param ns: An optional namespace. - @type ns: (I{prefix}, I{name}) - @return: True if matched. - @rtype: boolean - - """ - byname = name is None or (self.name == name) - byns = ns is None or (self.namespace()[1] == ns[1]) - return byname and byns - - def __eq__(self, rhs): - """Equals operator.""" - return (isinstance(rhs, Attribute) and self.prefix == rhs.name and - self.name == rhs.name) - - def __repr__(self): - """Programmer friendly string representation.""" - return "attr (prefix=%s, name=%s, value=(%s))" % (self.prefix, - self.name, self.value) - - def __unicode__(self): - """XML string representation.""" - return '%s="%s"' % (self.qname(), self.value and self.value.escape()) diff --git a/libs_crutch/contrib/suds/sax/date.py b/libs_crutch/contrib/suds/sax/date.py deleted file mode 100755 index be7a439..0000000 --- a/libs_crutch/contrib/suds/sax/date.py +++ /dev/null @@ -1,460 +0,0 @@ -# -*- coding: utf-8 -*- - -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jurko Gospodnetić ( jurko.gospodnetic@pke.hr ) -# based on code by: Glen Walker -# based on code by: Nathan Van Gheem ( vangheem@gmail.com ) - -"""Classes for conversion between XML dates and Python objects.""" - -from suds import UnicodeMixin - -import datetime -import re -import time - - -_SNIPPET_DATE = \ - r"(?P\d{1,})-(?P\d{1,2})-(?P\d{1,2})" -_SNIPPET_TIME = \ - r"(?P\d{1,2}):(?P[0-5]?[0-9]):(?P[0-5]?[0-9])" \ - r"(?:\.(?P\d+))?" -_SNIPPET_ZONE = \ - r"(?:(?P[-+])(?P\d{1,2})" \ - r"(?::(?P[0-5]?[0-9]))?)" \ - r"|(?P[Zz])" - -_PATTERN_DATE = r"^%s(?:%s)?$" % (_SNIPPET_DATE, _SNIPPET_ZONE) -_PATTERN_TIME = r"^%s(?:%s)?$" % (_SNIPPET_TIME, _SNIPPET_ZONE) -_PATTERN_DATETIME = r"^%s[T ]%s(?:%s)?$" % (_SNIPPET_DATE, _SNIPPET_TIME, - _SNIPPET_ZONE) - -_RE_DATE = re.compile(_PATTERN_DATE) -_RE_TIME = re.compile(_PATTERN_TIME) -_RE_DATETIME = re.compile(_PATTERN_DATETIME) - - -class Date(UnicodeMixin): - """ - An XML date object supporting the xsd:date datatype. - - @ivar value: The object value. - @type value: B{datetime}.I{date} - - """ - - def __init__(self, value): - """ - @param value: The date value of the object. - @type value: (datetime.date|str) - @raise ValueError: When I{value} is invalid. - - """ - if isinstance(value, datetime.datetime): - self.value = value.date() - elif isinstance(value, datetime.date): - self.value = value - elif isinstance(value, str): - self.value = self.__parse(value) - else: - raise ValueError("invalid type for Date(): %s" % type(value)) - - @staticmethod - def __parse(value): - """ - Parse the string date. - - Supports the subset of ISO8601 used by xsd:date, but is lenient with - what is accepted, handling most reasonable syntax. - - Any timezone is parsed but ignored because a) it is meaningless without - a time and b) B{datetime}.I{date} does not support timezone - information. - - @param value: A date string. - @type value: str - @return: A date object. - @rtype: B{datetime}.I{date} - - """ - match_result = _RE_DATE.match(value) - if match_result is None: - raise ValueError("date data has invalid format '%s'" % (value,)) - return _date_from_match(match_result) - - def __unicode__(self): - return self.value.isoformat() - - -class DateTime(UnicodeMixin): - """ - An XML datetime object supporting the xsd:dateTime datatype. - - @ivar value: The object value. - @type value: B{datetime}.I{datetime} - - """ - - def __init__(self, value): - """ - @param value: The datetime value of the object. - @type value: (datetime.datetime|str) - @raise ValueError: When I{value} is invalid. - - """ - if isinstance(value, datetime.datetime): - self.value = value - elif isinstance(value, str): - self.value = self.__parse(value) - else: - raise ValueError("invalid type for DateTime(): %s" % type(value)) - - @staticmethod - def __parse(value): - """ - Parse the string datetime. - - Supports the subset of ISO8601 used by xsd:dateTime, but is lenient - with what is accepted, handling most reasonable syntax. - - Subsecond information is rounded to microseconds due to a restriction - in the python datetime.datetime/time implementation. - - @param value: A datetime string. - @type value: str - @return: A datetime object. - @rtype: B{datetime}.I{datetime} - - """ - match_result = _RE_DATETIME.match(value) - if match_result is None: - raise ValueError("date data has invalid format '%s'" % (value,)) - - date = _date_from_match(match_result) - time, round_up = _time_from_match(match_result) - tzinfo = _tzinfo_from_match(match_result) - - value = datetime.datetime.combine(date, time) - value = value.replace(tzinfo=tzinfo) - if round_up: - value += datetime.timedelta(microseconds=1) - return value - - def __unicode__(self): - return self.value.isoformat() - - -class Time(UnicodeMixin): - """ - An XML time object supporting the xsd:time datatype. - - @ivar value: The object value. - @type value: B{datetime}.I{time} - - """ - - def __init__(self, value): - """ - @param value: The time value of the object. - @type value: (datetime.time|str) - @raise ValueError: When I{value} is invalid. - - """ - if isinstance(value, datetime.time): - self.value = value - elif isinstance(value, str): - self.value = self.__parse(value) - else: - raise ValueError("invalid type for Time(): %s" % type(value)) - - @staticmethod - def __parse(value): - """ - Parse the string date. - - Supports the subset of ISO8601 used by xsd:time, but is lenient with - what is accepted, handling most reasonable syntax. - - Subsecond information is rounded to microseconds due to a restriction - in the python datetime.time implementation. - - @param value: A time string. - @type value: str - @return: A time object. - @rtype: B{datetime}.I{time} - - """ - match_result = _RE_TIME.match(value) - if match_result is None: - raise ValueError("date data has invalid format '%s'" % (value,)) - - time, round_up = _time_from_match(match_result) - tzinfo = _tzinfo_from_match(match_result) - if round_up: - time = _bump_up_time_by_microsecond(time) - return time.replace(tzinfo=tzinfo) - - def __unicode__(self): - return self.value.isoformat() - - -class FixedOffsetTimezone(datetime.tzinfo, UnicodeMixin): - """ - A timezone with a fixed offset and no daylight savings adjustment. - - http://docs.python.org/library/datetime.html#datetime.tzinfo - - """ - - def __init__(self, offset): - """ - @param offset: The fixed offset of the timezone. - @type offset: I{int} or B{datetime}.I{timedelta} - - """ - if type(offset) == int: - offset = datetime.timedelta(hours=offset) - elif type(offset) != datetime.timedelta: - raise TypeError("timezone offset must be an int or " - "datetime.timedelta") - if offset.microseconds or (offset.seconds % 60 != 0): - raise ValueError("timezone offset must have minute precision") - self.__offset = offset - - def dst(self, dt): - """ - http://docs.python.org/library/datetime.html#datetime.tzinfo.dst - - """ - return datetime.timedelta(0) - - def utcoffset(self, dt): - """ - http://docs.python.org/library/datetime.html#datetime.tzinfo.utcoffset - - """ - return self.__offset - - def tzname(self, dt): - """ - http://docs.python.org/library/datetime.html#datetime.tzinfo.tzname - - """ - # total_seconds was introduced in Python 2.7 - if hasattr(self.__offset, "total_seconds"): - total_seconds = self.__offset.total_seconds() - else: - total_seconds = (self.__offset.days * 24 * 60 * 60) + \ - (self.__offset.seconds) - - hours = total_seconds // (60 * 60) - total_seconds -= hours * 60 * 60 - - minutes = total_seconds // 60 - total_seconds -= minutes * 60 - - seconds = total_seconds // 1 - total_seconds -= seconds - - if seconds: - return "%+03d:%02d:%02d" % (hours, minutes, seconds) - return "%+03d:%02d" % (hours, minutes) - - def __unicode__(self): - return "FixedOffsetTimezone %s" % (self.tzname(None),) - - -class UtcTimezone(FixedOffsetTimezone): - """ - The UTC timezone. - - http://docs.python.org/library/datetime.html#datetime.tzinfo - - """ - - def __init__(self): - FixedOffsetTimezone.__init__(self, datetime.timedelta(0)) - - def tzname(self, dt): - """ - http://docs.python.org/library/datetime.html#datetime.tzinfo.tzname - - """ - return "UTC" - - def __unicode__(self): - return "UtcTimezone" - - -class LocalTimezone(datetime.tzinfo): - """ - The local timezone of the operating system. - - http://docs.python.org/library/datetime.html#datetime.tzinfo - - """ - - def __init__(self): - self.__offset = datetime.timedelta(seconds=-time.timezone) - self.__dst_offset = None - if time.daylight: - self.__dst_offset = datetime.timedelta(seconds=-time.altzone) - - def dst(self, dt): - """ - http://docs.python.org/library/datetime.html#datetime.tzinfo.dst - - """ - if self.__is_daylight_time(dt): - return self.__dst_offset - self.__offset - return datetime.timedelta(0) - - def tzname(self, dt): - """ - http://docs.python.org/library/datetime.html#datetime.tzinfo.tzname - - """ - if self.__is_daylight_time(dt): - return time.tzname[1] - return time.tzname[0] - - def utcoffset(self, dt): - """ - http://docs.python.org/library/datetime.html#datetime.tzinfo.utcoffset - - """ - if self.__is_daylight_time(dt): - return self.__dst_offset - return self.__offset - - def __is_daylight_time(self, dt): - if not time.daylight: - return False - time_tuple = dt.replace(tzinfo=None).timetuple() - time_tuple = time.localtime(time.mktime(time_tuple)) - return time_tuple.tm_isdst > 0 - - def __unicode__(self): - dt = datetime.datetime.now() - return "LocalTimezone %s offset: %s dst: %s" % (self.tzname(dt), - self.utcoffset(dt), self.dst(dt)) - - -def _bump_up_time_by_microsecond(time): - """ - Helper function bumping up the given datetime.time by a microsecond, - cycling around silently to 00:00:00.0 in case of an overflow. - - @param time: Time object. - @type time: B{datetime}.I{time} - @return: Time object. - @rtype: B{datetime}.I{time} - - """ - dt = datetime.datetime(2000, 1, 1, time.hour, time.minute, - time.second, time.microsecond) - dt += datetime.timedelta(microseconds=1) - return dt.time() - - -def _date_from_match(match_object): - """ - Create a date object from a regular expression match. - - The regular expression match is expected to be from _RE_DATE or - _RE_DATETIME. - - @param match_object: The regular expression match. - @type match_object: B{re}.I{MatchObject} - @return: A date object. - @rtype: B{datetime}.I{date} - - """ - year = int(match_object.group("year")) - month = int(match_object.group("month")) - day = int(match_object.group("day")) - return datetime.date(year, month, day) - - -def _time_from_match(match_object): - """ - Create a time object from a regular expression match. - - Returns the time object and information whether the resulting time should - be bumped up by one microsecond due to microsecond rounding. - - Subsecond information is rounded to microseconds due to a restriction in - the python datetime.datetime/time implementation. - - The regular expression match is expected to be from _RE_DATETIME or - _RE_TIME. - - @param match_object: The regular expression match. - @type match_object: B{re}.I{MatchObject} - @return: Time object + rounding flag. - @rtype: tuple of B{datetime}.I{time} and bool - - """ - hour = int(match_object.group('hour')) - minute = int(match_object.group('minute')) - second = int(match_object.group('second')) - subsecond = match_object.group('subsecond') - - round_up = False - microsecond = 0 - if subsecond: - round_up = len(subsecond) > 6 and int(subsecond[6]) >= 5 - subsecond = subsecond[:6] - microsecond = int(subsecond + "0" * (6 - len(subsecond))) - return datetime.time(hour, minute, second, microsecond), round_up - - -def _tzinfo_from_match(match_object): - """ - Create a timezone information object from a regular expression match. - - The regular expression match is expected to be from _RE_DATE, _RE_DATETIME - or _RE_TIME. - - @param match_object: The regular expression match. - @type match_object: B{re}.I{MatchObject} - @return: A timezone information object. - @rtype: B{datetime}.I{tzinfo} - - """ - tz_utc = match_object.group("tz_utc") - if tz_utc: - return UtcTimezone() - - tz_sign = match_object.group("tz_sign") - if not tz_sign: - return - - h = int(match_object.group("tz_hour") or 0) - m = int(match_object.group("tz_minute") or 0) - if h == 0 and m == 0: - return UtcTimezone() - - # Python limitation - timezone offsets larger than one day (in absolute) - # will cause operations depending on tzinfo.utcoffset() to fail, e.g. - # comparing two timezone aware datetime.datetime/time objects. - if h >= 24: - raise ValueError("timezone indicator too large") - - tz_delta = datetime.timedelta(hours=h, minutes=m) - if tz_sign == "-": - tz_delta *= -1 - return FixedOffsetTimezone(tz_delta) diff --git a/libs_crutch/contrib/suds/sax/document.py b/libs_crutch/contrib/suds/sax/document.py deleted file mode 100755 index e814bd9..0000000 --- a/libs_crutch/contrib/suds/sax/document.py +++ /dev/null @@ -1,176 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides XML I{document} classes. -""" - -from suds import * -from suds.sax import * -from suds.sax.element import Element - - -class Document(UnicodeMixin): - """ An XML Document """ - - DECL = '' - - def __init__(self, root=None): - """ - @param root: A root L{Element} or name used to build - the document root element. - @type root: (L{Element}|str|None) - """ - self.__root = None - self.append(root) - - def root(self): - """ - Get the document root element (can be None) - @return: The document root. - @rtype: L{Element} - """ - return self.__root - - def append(self, node): - """ - Append (set) the document root. - @param node: A root L{Element} or name used to build - the document root element. - @type node: (L{Element}|str|None) - """ - if isinstance(node, str): - self.__root = Element(node) - return - if isinstance(node, Element): - self.__root = node - return - - def getChild(self, name, ns=None, default=None): - """ - Get a child by (optional) name and/or (optional) namespace. - @param name: The name of a child element (may contain prefix). - @type name: basestring - @param ns: An optional namespace used to match the child. - @type ns: (I{prefix}, I{name}) - @param default: Returned when child not-found. - @type default: L{Element} - @return: The requested child, or I{default} when not-found. - @rtype: L{Element} - """ - if self.__root is None: - return default - if ns is None: - prefix, name = splitPrefix(name) - if prefix is None: - ns = None - else: - ns = self.__root.resolvePrefix(prefix) - if self.__root.match(name, ns): - return self.__root - else: - return default - - def childAtPath(self, path): - """ - Get a child at I{path} where I{path} is a (/) separated - list of element names that are expected to be children. - @param path: A (/) separated list of element names. - @type path: basestring - @return: The leaf node at the end of I{path} - @rtype: L{Element} - """ - if self.__root is None: - return None - if path[0] == '/': - path = path[1:] - path = path.split('/',1) - if self.getChild(path[0]) is None: - return None - if len(path) > 1: - return self.__root.childAtPath(path[1]) - else: - return self.__root - - def childrenAtPath(self, path): - """ - Get a list of children at I{path} where I{path} is a (/) separated - list of element names that are expected to be children. - @param path: A (/) separated list of element names. - @type path: basestring - @return: The collection leaf nodes at the end of I{path} - @rtype: [L{Element},...] - """ - if self.__root is None: - return [] - if path[0] == '/': - path = path[1:] - path = path.split('/',1) - if self.getChild(path[0]) is None: - return [] - if len(path) > 1: - return self.__root.childrenAtPath(path[1]) - else: - return [self.__root,] - - def getChildren(self, name=None, ns=None): - """ - Get a list of children by (optional) name and/or (optional) namespace. - @param name: The name of a child element (may contain prefix). - @type name: basestring - @param ns: An optional namespace used to match the child. - @type ns: (I{prefix}, I{name}) - @return: The list of matching children. - @rtype: [L{Element},...] - """ - if name is None: - matched = self.__root - else: - matched = self.getChild(name, ns) - if matched is None: - return [] - else: - return [matched,] - - def str(self): - """ - Get a string representation of this XML document. - @return: A I{pretty} string. - @rtype: basestring - """ - s = [] - s.append(self.DECL) - root = self.root() - if root is not None: - s.append('\n') - s.append(root.str()) - return ''.join(s) - - def plain(self): - """ - Get a string representation of this XML document. - @return: A I{plain} string. - @rtype: basestring - """ - s = [] - s.append(self.DECL) - root = self.root() - if root is not None: - s.append(root.plain()) - return ''.join(s) - - def __unicode__(self): - return self.str() diff --git a/libs_crutch/contrib/suds/sax/element.py b/libs_crutch/contrib/suds/sax/element.py deleted file mode 100755 index 7d9e203..0000000 --- a/libs_crutch/contrib/suds/sax/element.py +++ /dev/null @@ -1,1205 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -XML I{element} classes. - -""" - -from suds import * -from suds.sax import * -from suds.sax.text import Text -from suds.sax.attribute import Attribute - - -class Element(UnicodeMixin): - """ - An XML element object. - - @ivar parent: The node containing this attribute. - @type parent: L{Element} - @ivar prefix: The I{optional} namespace prefix. - @type prefix: basestring - @ivar name: The I{unqualified} name of the attribute. - @type name: basestring - @ivar expns: An explicit namespace (xmlns="..."). - @type expns: (I{prefix}, I{name}) - @ivar nsprefixes: A mapping of prefixes to namespaces. - @type nsprefixes: dict - @ivar attributes: A list of XML attributes. - @type attributes: [I{Attribute},...] - @ivar text: The element's I{text} content. - @type text: basestring - @ivar children: A list of child elements. - @type children: [I{Element},...] - @cvar matcher: A collection of I{lambda} for string matching. - @cvar specialprefixes: A dictionary of builtin-special prefixes. - - """ - - matcher = { - "eq": lambda a, b: a == b, - "startswith": lambda a, b: a.startswith(b), - "endswith": lambda a, b: a.endswith(b), - "contains": lambda a, b: b in a} - - specialprefixes = {Namespace.xmlns[0]: Namespace.xmlns[1]} - - @classmethod - def buildPath(self, parent, path): - """ - Build the specified path as a/b/c. - - Any missing intermediate nodes are built automatically. - - @param parent: A parent element on which the path is built. - @type parent: I{Element} - @param path: A simple path separated by (/). - @type path: basestring - @return: The leaf node of I{path}. - @rtype: L{Element} - - """ - for tag in path.split("/"): - child = parent.getChild(tag) - if child is None: - child = Element(tag, parent) - parent = child - return child - - def __init__(self, name, parent=None, ns=None): - """ - @param name: The element's (tag) name. May contain a prefix. - @type name: basestring - @param parent: An optional parent element. - @type parent: I{Element} - @param ns: An optional namespace. - @type ns: (I{prefix}, I{name}) - - """ - self.rename(name) - self.expns = None - self.nsprefixes = {} - self.attributes = [] - self.text = None - if parent is not None and not isinstance(parent, Element): - raise Exception("parent (%s) not-valid" % - (parent.__class__.__name__,)) - self.parent = parent - self.children = [] - self.applyns(ns) - - def rename(self, name): - """ - Rename the element. - - @param name: A new name for the element. - @type name: basestring - - """ - if name is None: - raise Exception("name (%s) not-valid" % (name,)) - self.prefix, self.name = splitPrefix(name) - - def setPrefix(self, p, u=None): - """ - Set the element namespace prefix. - - @param p: A new prefix for the element. - @type p: basestring - @param u: A namespace URI to be mapped to the prefix. - @type u: basestring - @return: self - @rtype: L{Element} - - """ - self.prefix = p - if p is not None and u is not None: - self.expns = None - self.addPrefix(p, u) - return self - - def qname(self): - """ - Get this element's B{fully} qualified name. - - @return: The fully qualified name. - @rtype: basestring - - """ - if self.prefix is None: - return self.name - return "%s:%s" % (self.prefix, self.name) - - def getRoot(self): - """ - Get the root (top) node of the tree. - - @return: The I{top} node of this tree. - @rtype: I{Element} - - """ - if self.parent is None: - return self - return self.parent.getRoot() - - def clone(self, parent=None): - """ - Deep clone of this element and children. - - @param parent: An optional parent for the copied fragment. - @type parent: I{Element} - @return: A deep copy parented by I{parent} - @rtype: I{Element} - - """ - root = Element(self.qname(), parent, self.namespace()) - for a in self.attributes: - root.append(a.clone(self)) - for c in self.children: - root.append(c.clone(self)) - for ns in list(self.nsprefixes.items()): - root.addPrefix(ns[0], ns[1]) - return root - - def detach(self): - """ - Detach from parent. - - @return: This element removed from its parent's child list and - I{parent}=I{None}. - @rtype: L{Element} - - """ - if self.parent is not None: - if self in self.parent.children: - self.parent.children.remove(self) - self.parent = None - return self - - def set(self, name, value): - """ - Set an attribute's value. - - @param name: The name of the attribute. - @type name: basestring - @param value: The attribute value. - @type value: basestring - @see: __setitem__() - - """ - attr = self.getAttribute(name) - if attr is None: - attr = Attribute(name, value) - self.append(attr) - else: - attr.setValue(value) - - def unset(self, name): - """ - Unset (remove) an attribute. - - @param name: The attribute name. - @type name: str - @return: self - @rtype: L{Element} - - """ - try: - attr = self.getAttribute(name) - self.attributes.remove(attr) - except Exception: - pass - return self - - def get(self, name, ns=None, default=None): - """ - Get the value of an attribute by name. - - @param name: The name of the attribute. - @type name: basestring - @param ns: The optional attribute's namespace. - @type ns: (I{prefix}, I{name}) - @param default: An optional value to be returned when either the - attribute does not exist or has no value. - @type default: basestring - @return: The attribute's value or I{default}. - @rtype: basestring - @see: __getitem__() - - """ - attr = self.getAttribute(name, ns) - if attr is None or attr.value is None: - return default - return attr.getValue() - - def setText(self, value): - """ - Set the element's L{Text} content. - - @param value: The element's text value. - @type value: basestring - @return: self - @rtype: I{Element} - - """ - if not isinstance(value, Text): - value = Text(value) - self.text = value - return self - - def getText(self, default=None): - """ - Get the element's L{Text} content with optional default. - - @param default: A value to be returned when no text content exists. - @type default: basestring - @return: The text content, or I{default}. - @rtype: L{Text} - - """ - if self.hasText(): - return self.text - return default - - def trim(self): - """ - Trim leading and trailing whitespace. - - @return: self - @rtype: L{Element} - - """ - if self.hasText(): - self.text = self.text.trim() - return self - - def hasText(self): - """ - Get whether the element has non-empty I{text} string. - - @return: True when has I{text}. - @rtype: boolean - - """ - return bool(self.text) - - def namespace(self): - """ - Get the element's namespace. - - @return: The element's namespace by resolving the prefix, the explicit - namespace or the inherited namespace. - @rtype: (I{prefix}, I{name}) - - """ - if self.prefix is None: - return self.defaultNamespace() - return self.resolvePrefix(self.prefix) - - def defaultNamespace(self): - """ - Get the default (unqualified namespace). - - This is the expns of the first node (looking up the tree) that has it - set. - - @return: The namespace of a node when not qualified. - @rtype: (I{prefix}, I{name}) - - """ - p = self - while p is not None: - if p.expns is not None: - return None, p.expns - p = p.parent - return Namespace.default - - def append(self, objects): - """ - Append the specified child based on whether it is an element or an - attribute. - - @param objects: A (single|collection) of attribute(s) or element(s) to - be added as children. - @type objects: (L{Element}|L{Attribute}) - @return: self - @rtype: L{Element} - - """ - if not isinstance(objects, (list, tuple)): - objects = (objects,) - for child in objects: - if isinstance(child, Element): - self.children.append(child) - child.parent = self - continue - if isinstance(child, Attribute): - self.attributes.append(child) - child.parent = self - continue - raise Exception("append %s not-valid" % - (child.__class__.__name__,)) - return self - - def insert(self, objects, index=0): - """ - Insert an L{Element} content at the specified index. - - @param objects: A (single|collection) of attribute(s) or element(s) to - be added as children. - @type objects: (L{Element}|L{Attribute}) - @param index: The position in the list of children to insert. - @type index: int - @return: self - @rtype: L{Element} - - """ - objects = (objects,) - for child in objects: - if not isinstance(child, Element): - raise Exception("append %s not-valid" % - (child.__class__.__name__,)) - self.children.insert(index, child) - child.parent = self - return self - - def remove(self, child): - """ - Remove the specified child element or attribute. - - @param child: A child to remove. - @type child: L{Element}|L{Attribute} - @return: The detached I{child} when I{child} is an element, else None. - @rtype: L{Element}|None - - """ - if isinstance(child, Element): - return child.detach() - if isinstance(child, Attribute): - self.attributes.remove(child) - - def replaceChild(self, child, content): - """ - Replace I{child} with the specified I{content}. - - @param child: A child element. - @type child: L{Element} - @param content: An element or collection of elements. - @type content: L{Element} or [L{Element},...] - - """ - if child not in self.children: - raise Exception("child not-found") - index = self.children.index(child) - self.remove(child) - if not isinstance(content, (list, tuple)): - content = (content,) - for node in content: - self.children.insert(index, node.detach()) - node.parent = self - index += 1 - - def getAttribute(self, name, ns=None, default=None): - """ - Get an attribute by name and (optional) namespace. - - @param name: The name of a contained attribute (may contain prefix). - @type name: basestring - @param ns: An optional namespace - @type ns: (I{prefix}, I{name}) - @param default: Returned when attribute not-found. - @type default: L{Attribute} - @return: The requested attribute object. - @rtype: L{Attribute} - - """ - if ns is None: - prefix, name = splitPrefix(name) - if prefix is not None: - ns = self.resolvePrefix(prefix) - for a in self.attributes: - if a.match(name, ns): - return a - return default - - def getChild(self, name, ns=None, default=None): - """ - Get a child by (optional) name and/or (optional) namespace. - - @param name: The name of a child element (may contain prefix). - @type name: basestring - @param ns: An optional namespace used to match the child. - @type ns: (I{prefix}, I{name}) - @param default: Returned when child not-found. - @type default: L{Element} - @return: The requested child, or I{default} when not-found. - @rtype: L{Element} - - """ - if ns is None: - prefix, name = splitPrefix(name) - if prefix is not None: - ns = self.resolvePrefix(prefix) - for c in self.children: - if c.match(name, ns): - return c - return default - - def childAtPath(self, path): - """ - Get a child at I{path} where I{path} is a (/) separated list of element - names that are expected to be children. - - @param path: A (/) separated list of element names. - @type path: basestring - @return: The leaf node at the end of I{path}. - @rtype: L{Element} - - """ - result = None - node = self - for name in path.split("/"): - if not name: - continue - ns = None - prefix, name = splitPrefix(name) - if prefix is not None: - ns = node.resolvePrefix(prefix) - result = node.getChild(name, ns) - if result is None: - return - node = result - return result - - def childrenAtPath(self, path): - """ - Get a list of children at I{path} where I{path} is a (/) separated list - of element names expected to be children. - - @param path: A (/) separated list of element names. - @type path: basestring - @return: The collection leaf nodes at the end of I{path}. - @rtype: [L{Element},...] - - """ - parts = [p for p in path.split("/") if p] - if len(parts) == 1: - return self.getChildren(path) - return self.__childrenAtPath(parts) - - def getChildren(self, name=None, ns=None): - """ - Get a list of children by (optional) name and/or (optional) namespace. - - @param name: The name of a child element (may contain a prefix). - @type name: basestring - @param ns: An optional namespace used to match the child. - @type ns: (I{prefix}, I{name}) - @return: The list of matching children. - @rtype: [L{Element},...] - - """ - if ns is None: - if name is None: - return self.children - prefix, name = splitPrefix(name) - if prefix is not None: - ns = self.resolvePrefix(prefix) - return [c for c in self.children if c.match(name, ns)] - - def detachChildren(self): - """ - Detach and return this element's children. - - @return: The element's children (detached). - @rtype: [L{Element},...] - - """ - detached = self.children - self.children = [] - for child in detached: - child.parent = None - return detached - - def resolvePrefix(self, prefix, default=Namespace.default): - """ - Resolve the specified prefix to a namespace. The I{nsprefixes} is - searched. If not found, walk up the tree until either resolved or the - top of the tree is reached. Searching up the tree provides for - inherited mappings. - - @param prefix: A namespace prefix to resolve. - @type prefix: basestring - @param default: An optional value to be returned when the prefix cannot - be resolved. - @type default: (I{prefix}, I{URI}) - @return: The namespace that is mapped to I{prefix} in this context. - @rtype: (I{prefix}, I{URI}) - - """ - n = self - while n is not None: - if prefix in n.nsprefixes: - return prefix, n.nsprefixes[prefix] - if prefix in self.specialprefixes: - return prefix, self.specialprefixes[prefix] - n = n.parent - return default - - def addPrefix(self, p, u): - """ - Add or update a prefix mapping. - - @param p: A prefix. - @type p: basestring - @param u: A namespace URI. - @type u: basestring - @return: self - @rtype: L{Element} - """ - self.nsprefixes[p] = u - return self - - def updatePrefix(self, p, u): - """ - Update (redefine) a prefix mapping for the branch. - - @param p: A prefix. - @type p: basestring - @param u: A namespace URI. - @type u: basestring - @return: self - @rtype: L{Element} - @note: This method traverses down the entire branch! - - """ - if p in self.nsprefixes: - self.nsprefixes[p] = u - for c in self.children: - c.updatePrefix(p, u) - return self - - def clearPrefix(self, prefix): - """ - Clear the specified prefix from the prefix mappings. - - @param prefix: A prefix to clear. - @type prefix: basestring - @return: self - @rtype: L{Element} - - """ - if prefix in self.nsprefixes: - del self.nsprefixes[prefix] - return self - - def findPrefix(self, uri, default=None): - """ - Find the first prefix that has been mapped to a namespace URI. - - The local mapping is searched, then walks up the tree until it reaches - the top or finds a match. - - @param uri: A namespace URI. - @type uri: basestring - @param default: A default prefix when not found. - @type default: basestring - @return: A mapped prefix. - @rtype: basestring - - """ - for item in list(self.nsprefixes.items()): - if item[1] == uri: - return item[0] - for item in list(self.specialprefixes.items()): - if item[1] == uri: - return item[0] - if self.parent is not None: - return self.parent.findPrefix(uri, default) - return default - - def findPrefixes(self, uri, match="eq"): - """ - Find all prefixes that have been mapped to a namespace URI. - - The local mapping is searched, then walks up the tree until it reaches - the top, collecting all matches. - - @param uri: A namespace URI. - @type uri: basestring - @param match: A matching function L{Element.matcher}. - @type match: basestring - @return: A list of mapped prefixes. - @rtype: [basestring,...] - - """ - result = [] - for item in list(self.nsprefixes.items()): - if self.matcher[match](item[1], uri): - prefix = item[0] - result.append(prefix) - for item in list(self.specialprefixes.items()): - if self.matcher[match](item[1], uri): - prefix = item[0] - result.append(prefix) - if self.parent is not None: - result += self.parent.findPrefixes(uri, match) - return result - - def promotePrefixes(self): - """ - Push prefix declarations up the tree as far as possible. - - Prefix mapping are pushed to its parent unless the parent has the - prefix mapped to another URI or the parent has the prefix. This is - propagated up the tree until the top is reached. - - @return: self - @rtype: L{Element} - - """ - for c in self.children: - c.promotePrefixes() - if self.parent is None: - return - for p, u in list(self.nsprefixes.items()): - if p in self.parent.nsprefixes: - pu = self.parent.nsprefixes[p] - if pu == u: - del self.nsprefixes[p] - continue - if p != self.parent.prefix: - self.parent.nsprefixes[p] = u - del self.nsprefixes[p] - return self - - def refitPrefixes(self): - """ - Refit namespace qualification by replacing prefixes with explicit - namespaces. Also purges prefix mapping table. - - @return: self - @rtype: L{Element} - - """ - for c in self.children: - c.refitPrefixes() - if self.prefix is not None: - ns = self.resolvePrefix(self.prefix) - if ns[1] is not None: - self.expns = ns[1] - self.prefix = None - self.nsprefixes = {} - return self - - def normalizePrefixes(self): - """ - Normalize the namespace prefixes. - - This generates unique prefixes for all namespaces. Then retrofits all - prefixes and prefix mappings. Further, it will retrofix attribute - values that have values containing (:). - - @return: self - @rtype: L{Element} - - """ - PrefixNormalizer.apply(self) - return self - - def isempty(self, content=True): - """ - Get whether the element has no children. - - @param content: Test content (children & text) only. - @type content: boolean - @return: True when element has not children. - @rtype: boolean - - """ - nochildren = not self.children - notext = self.text is None - nocontent = nochildren and notext - if content: - return nocontent - noattrs = not len(self.attributes) - return nocontent and noattrs - - def isnil(self): - """ - Get whether the element is I{nil} as defined by having an - I{xsi:nil="true"} attribute. - - @return: True if I{nil}, else False - @rtype: boolean - - """ - nilattr = self.getAttribute("nil", ns=Namespace.xsins) - return nilattr is not None and (nilattr.getValue().lower() == "true") - - def setnil(self, flag=True): - """ - Set this node to I{nil} as defined by having an I{xsi:nil}=I{flag} - attribute. - - @param flag: A flag indicating how I{xsi:nil} will be set. - @type flag: boolean - @return: self - @rtype: L{Element} - - """ - p, u = Namespace.xsins - name = ":".join((p, "nil")) - self.set(name, str(flag).lower()) - self.addPrefix(p, u) - if flag: - self.text = None - return self - - def applyns(self, ns): - """ - Apply the namespace to this node. - - If the prefix is I{None} then this element's explicit namespace - I{expns} is set to the URI defined by I{ns}. Otherwise, the I{ns} is - simply mapped. - - @param ns: A namespace. - @type ns: (I{prefix}, I{URI}) - - """ - if ns is None: - return - if not isinstance(ns, (list, tuple)): - raise Exception("namespace must be a list or a tuple") - if ns[0] is None: - self.expns = ns[1] - else: - self.prefix = ns[0] - self.nsprefixes[ns[0]] = ns[1] - - def str(self, indent=0): - """ - Get a string representation of this XML fragment. - - @param indent: The indent to be used in formatting the output. - @type indent: int - @return: A I{pretty} string. - @rtype: basestring - - """ - tab = "%*s" % (indent * 3, "") - result = [] - result.append("%s<%s" % (tab, self.qname())) - result.append(self.nsdeclarations()) - for a in self.attributes: - result.append(" %s" % (str(a),)) - if self.isempty(): - result.append("/>") - return "".join(result) - result.append(">") - if self.hasText(): - result.append(self.text.escape()) - for c in self.children: - result.append("\n") - result.append(c.str(indent + 1)) - if len(self.children): - result.append("\n%s" % (tab,)) - result.append("" % (self.qname(),)) - return "".join(result) - - def plain(self): - """ - Get a string representation of this XML fragment. - - @return: A I{plain} string. - @rtype: basestring - - """ - result = ["<%s" % (self.qname(),), self.nsdeclarations()] - for a in self.attributes: - result.append(" %s" % (str(a),)) - if self.isempty(): - result.append("/>") - return "".join(result) - result.append(">") - if self.hasText(): - result.append(self.text.escape()) - for c in self.children: - result.append(c.plain()) - result.append("" % (self.qname(),)) - return "".join(result) - - def nsdeclarations(self): - """ - Get a string representation for all namespace declarations as xmlns="" - and xmlns:p="". - - @return: A separated list of declarations. - @rtype: basestring - - """ - s = [] - myns = None, self.expns - if self.parent is None: - pns = Namespace.default - else: - pns = None, self.parent.expns - if myns[1] != pns[1]: - if self.expns is not None: - s.append(' xmlns="%s"' % (self.expns,)) - for item in list(self.nsprefixes.items()): - p, u = item - if self.parent is not None: - ns = self.parent.resolvePrefix(p) - if ns[1] == u: - continue - s.append(' xmlns:%s="%s"' % (p, u)) - return "".join(s) - - def match(self, name=None, ns=None): - """ - Match by (optional) name and/or (optional) namespace. - - @param name: The optional element tag name. - @type name: str - @param ns: An optional namespace. - @type ns: (I{prefix}, I{name}) - @return: True if matched. - @rtype: boolean - - """ - byname = name is None or (self.name == name) - byns = ns is None or (self.namespace()[1] == ns[1]) - return byname and byns - - def branch(self): - """ - Get a flattened representation of the branch. - - @return: A flat list of nodes. - @rtype: [L{Element},...] - - """ - branch = [self] - for c in self.children: - branch += c.branch() - return branch - - def ancestors(self): - """ - Get a list of ancestors. - - @return: A list of ancestors. - @rtype: [L{Element},...] - - """ - ancestors = [] - p = self.parent - while p is not None: - ancestors.append(p) - p = p.parent - return ancestors - - def walk(self, visitor): - """ - Walk the branch and call the visitor function on each node. - - @param visitor: A function. - @type visitor: single argument function - @return: self - @rtype: L{Element} - - """ - visitor(self) - for c in self.children: - c.walk(visitor) - return self - - def prune(self): - """Prune the branch of empty nodes.""" - pruned = [] - for c in self.children: - c.prune() - if c.isempty(False): - pruned.append(c) - for p in pruned: - self.children.remove(p) - - def __childrenAtPath(self, parts): - result = [] - node = self - ancestors = parts[:-1] - leaf = parts[-1] - for name in ancestors: - ns = None - prefix, name = splitPrefix(name) - if prefix is not None: - ns = node.resolvePrefix(prefix) - child = node.getChild(name, ns) - if child is None: - break - node = child - if child is not None: - ns = None - prefix, leaf = splitPrefix(leaf) - if prefix is not None: - ns = node.resolvePrefix(prefix) - result = child.getChildren(leaf) - return result - - def __len__(self): - return len(self.children) - - def __getitem__(self, index): - if isinstance(index, str): - return self.get(index) - if index < len(self.children): - return self.children[index] - - def __setitem__(self, index, value): - if isinstance(index, str): - self.set(index, value) - else: - if index < len(self.children) and isinstance(value, Element): - self.children.insert(index, value) - - def __eq__(self, rhs): - return (isinstance(rhs, Element) and - self.match(rhs.name, rhs.namespace())) - - def __repr__(self): - return "Element (prefix=%s, name=%s)" % (self.prefix, self.name) - - def __unicode__(self): - return self.str() - - def __iter__(self): - return NodeIterator(self) - - -class NodeIterator: - """ - The L{Element} child node iterator. - - @ivar pos: The current position - @type pos: int - @ivar children: A list of a child nodes. - @type children: [L{Element},...] - - """ - - def __init__(self, parent): - """ - @param parent: An element to iterate. - @type parent: L{Element} - - """ - self.pos = 0 - self.children = parent.children - - def __next__(self): - """ - Get the next child. - - @return: The next child. - @rtype: L{Element} - @raise StopIterator: At the end. - - """ - try: - child = self.children[self.pos] - self.pos += 1 - return child - except Exception: - raise StopIteration() - - -class PrefixNormalizer: - """ - The prefix normalizer provides namespace prefix normalization. - - @ivar node: A node to normalize. - @type node: L{Element} - @ivar branch: The nodes flattened branch. - @type branch: [L{Element},...] - @ivar namespaces: A unique list of namespaces (URI). - @type namespaces: [str,...] - @ivar prefixes: A reverse dict of prefixes. - @type prefixes: {u: p} - - """ - - @classmethod - def apply(cls, node): - """ - Normalize the specified node. - - @param node: A node to normalize. - @type node: L{Element} - @return: The normalized node. - @rtype: L{Element} - - """ - return PrefixNormalizer(node).refit() - - def __init__(self, node): - """ - @param node: A node to normalize. - @type node: L{Element} - - """ - self.node = node - self.branch = node.branch() - self.namespaces = self.getNamespaces() - self.prefixes = self.genPrefixes() - - def getNamespaces(self): - """ - Get the I{unique} set of namespaces referenced in the branch. - - @return: A set of namespaces. - @rtype: set - - """ - s = set() - for n in self.branch + self.node.ancestors(): - if self.permit(n.expns): - s.add(n.expns) - s = s.union(self.pset(n)) - return s - - def pset(self, n): - """ - Convert the nodes nsprefixes into a set. - - @param n: A node. - @type n: L{Element} - @return: A set of namespaces. - @rtype: set - - """ - s = set() - for ns in list(n.nsprefixes.items()): - if self.permit(ns): - s.add(ns[1]) - return s - - def genPrefixes(self): - """ - Generate a I{reverse} mapping of unique prefixes for all namespaces. - - @return: A reverse dict of prefixes. - @rtype: {u: p} - - """ - prefixes = {} - n = 0 - for u in self.namespaces: - prefixes[u] = "ns%d" % (n,) - n += 1 - return prefixes - - def refit(self): - """Refit (normalize) the prefixes in the node.""" - self.refitNodes() - self.refitMappings() - - def refitNodes(self): - """Refit (normalize) all of the nodes in the branch.""" - for n in self.branch: - if n.prefix is not None: - ns = n.namespace() - if self.permit(ns): - n.prefix = self.prefixes[ns[1]] - self.refitAttrs(n) - - def refitAttrs(self, n): - """ - Refit (normalize) all of the attributes in the node. - - @param n: A node. - @type n: L{Element} - - """ - for a in n.attributes: - self.refitAddr(a) - - def refitAddr(self, a): - """ - Refit (normalize) the attribute. - - @param a: An attribute. - @type a: L{Attribute} - - """ - if a.prefix is not None: - ns = a.namespace() - if self.permit(ns): - a.prefix = self.prefixes[ns[1]] - self.refitValue(a) - - def refitValue(self, a): - """ - Refit (normalize) the attribute's value. - - @param a: An attribute. - @type a: L{Attribute} - - """ - p, name = splitPrefix(a.getValue()) - if p is None: - return - ns = a.resolvePrefix(p) - if self.permit(ns): - p = self.prefixes[ns[1]] - a.setValue(":".join((p, name))) - - def refitMappings(self): - """Refit (normalize) all of the nsprefix mappings.""" - for n in self.branch: - n.nsprefixes = {} - n = self.node - for u, p in list(self.prefixes.items()): - n.addPrefix(p, u) - - def permit(self, ns): - """ - Get whether the I{ns} is to be normalized. - - @param ns: A namespace. - @type ns: (p, u) - @return: True if to be included. - @rtype: boolean - - """ - return not self.skip(ns) - - def skip(self, ns): - """ - Get whether the I{ns} is to B{not} be normalized. - - @param ns: A namespace. - @type ns: (p, u) - @return: True if to be skipped. - @rtype: boolean - - """ - return ns is None or ns in ( - Namespace.default, - Namespace.xsdns, - Namespace.xsins, - Namespace.xmlns) diff --git a/libs_crutch/contrib/suds/sax/enc.py b/libs_crutch/contrib/suds/sax/enc.py deleted file mode 100755 index 4d571c6..0000000 --- a/libs_crutch/contrib/suds/sax/enc.py +++ /dev/null @@ -1,94 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides XML I{special character} encoder classes. - -""" - -import re - - -class Encoder: - """ - An XML special character encoder/decoder. - - @cvar encodings: A mapping of special characters encoding. - @type encodings: [(str, str),...] - @cvar decodings: A mapping of special characters decoding. - @type decodings: [(str, str),...] - @cvar special: A list of special characters. - @type special: [char,...] - - """ - - encodings = ( - ("&(?!(amp|lt|gt|quot|apos);)", "&"), - ("<", "<"), - (">", ">"), - ('"', """), - ("'", "'")) - decodings = ( - ("<", "<"), - (">", ">"), - (""", '"'), - ("'", "'"), - ("&", "&")) - special = ("&", "<", ">", '"', "'") - - def encode(self, s): - """ - Encode special characters found in string I{s}. - - @param s: A string to encode. - @type s: str - @return: The encoded string. - @rtype: str - - """ - if isinstance(s, str) and self.__needs_encoding(s): - for x in self.encodings: - s = re.sub(x[0], x[1], s) - return s - - def decode(self, s): - """ - Decode special characters encodings found in string I{s}. - - @param s: A string to decode. - @type s: str - @return: The decoded string. - @rtype: str - - """ - if isinstance(s, str) and "&" in s: - for x in self.decodings: - s = s.replace(x[0], x[1]) - return s - - def __needs_encoding(self, s): - """ - Get whether string I{s} contains special characters. - - @param s: A string to check. - @type s: str - @return: True if needs encoding. - @rtype: boolean - - """ - if isinstance(s, str): - for c in self.special: - if c in s: - return True diff --git a/libs_crutch/contrib/suds/sax/parser.py b/libs_crutch/contrib/suds/sax/parser.py deleted file mode 100755 index b1fc0bf..0000000 --- a/libs_crutch/contrib/suds/sax/parser.py +++ /dev/null @@ -1,137 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Classes providing a (D)ocument (O)bject (M)odel representation of an XML -document. - -The goal is to provide an easy, intuitive interface for managing XML documents. -Although the term DOM is used above, this model is B{far} better. - -XML namespaces in suds are represented using a (2) element tuple containing the -prefix and the URI, e.g. I{('tns', 'http://myns')}. - -""" - -import suds -from suds import * -from suds.sax import * -from suds.sax.attribute import Attribute -from suds.sax.document import Document -from suds.sax.element import Element -from suds.sax.text import Text - -import sys -from xml.sax import make_parser, InputSource, ContentHandler -from xml.sax.handler import feature_external_ges - - -class Handler(ContentHandler): - """SAX handler.""" - - def __init__(self): - self.nodes = [Document()] - - def startElement(self, name, attrs): - top = self.top() - node = Element(str(name)) - for a in attrs.getNames(): - n = str(a) - v = str(attrs.getValue(a)) - attribute = Attribute(n, v) - if self.mapPrefix(node, attribute): - continue - node.append(attribute) - node.charbuffer = [] - top.append(node) - self.push(node) - - def mapPrefix(self, node, attribute): - if attribute.name == "xmlns": - if len(attribute.value): - node.expns = str(attribute.value) - return True - if attribute.prefix == "xmlns": - prefix = attribute.name - node.nsprefixes[prefix] = str(attribute.value) - return True - return False - - def endElement(self, name): - name = str(name) - current = self.pop() - if name != current.qname(): - raise Exception("malformed document") - if current.charbuffer: - current.text = Text("".join(current.charbuffer)) - del current.charbuffer - if current: - current.trim() - - def characters(self, content): - text = str(content) - node = self.top() - node.charbuffer.append(text) - - def push(self, node): - self.nodes.append(node) - return node - - def pop(self): - return self.nodes.pop() - - def top(self): - return self.nodes[-1] - - -class Parser: - """SAX parser.""" - - @classmethod - def saxparser(cls): - p = make_parser() - p.setFeature(feature_external_ges, 0) - h = Handler() - p.setContentHandler(h) - return p, h - - def parse(self, file=None, string=None): - """ - SAX parse XML text. - - @param file: Parse a python I{file-like} object. - @type file: I{file-like} object - @param string: Parse string XML. - @type string: str - @return: Parsed XML document. - @rtype: L{Document} - - """ - if file is None and string is None: - return - timer = suds.metrics.Timer() - timer.start() - source = file - if file is None: - source = InputSource(None) - source.setByteStream(suds.BytesIO(string)) - sax, handler = self.saxparser() - sax.parse(source) - timer.stop() - if file is None: - suds.metrics.log.debug("%s\nsax duration: %s", string, timer) - else: - suds.metrics.log.debug("sax (%s) duration: %s", file, timer) - return handler.nodes[0] diff --git a/libs_crutch/contrib/suds/sax/text.py b/libs_crutch/contrib/suds/sax/text.py deleted file mode 100755 index a5b8c5a..0000000 --- a/libs_crutch/contrib/suds/sax/text.py +++ /dev/null @@ -1,116 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Contains XML text classes. -""" - -from suds import * -from suds.sax import * - - -class Text(str): - """ - An XML text object used to represent text content. - @ivar lang: The (optional) language flag. - @type lang: bool - @ivar escaped: The (optional) XML special character escaped flag. - @type escaped: bool - """ - __slots__ = ('lang', 'escaped') - - @classmethod - def __valid(cls, *args): - return len(args) and args[0] is not None - - def __new__(cls, *args, **kwargs): - if cls.__valid(*args): - lang = kwargs.pop('lang', None) - escaped = kwargs.pop('escaped', False) - result = super(Text, cls).__new__(cls, *args, **kwargs) - result.lang = lang - result.escaped = escaped - else: - result = None - return result - - def escape(self): - """ - Encode (escape) special XML characters. - @return: The text with XML special characters escaped. - @rtype: L{Text} - """ - if not self.escaped: - post = sax.encoder.encode(self) - escaped = ( post != self ) - return Text(post, lang=self.lang, escaped=escaped) - return self - - def unescape(self): - """ - Decode (unescape) special XML characters. - @return: The text with escaped XML special characters decoded. - @rtype: L{Text} - """ - if self.escaped: - post = sax.encoder.decode(self) - return Text(post, lang=self.lang) - return self - - def trim(self): - post = self.strip() - return Text(post, lang=self.lang, escaped=self.escaped) - - def __add__(self, other): - joined = ''.join((self, other)) - result = Text(joined, lang=self.lang, escaped=self.escaped) - if isinstance(other, Text): - result.escaped = self.escaped or other.escaped - return result - - def __repr__(self): - s = [self] - if self.lang is not None: - s.append(' [%s]' % self.lang) - if self.escaped: - s.append(' ') - return ''.join(s) - - def __getstate__(self): - state = {} - for k in self.__slots__: - state[k] = getattr(self, k) - return state - - def __setstate__(self, state): - for k in self.__slots__: - setattr(self, k, state[k]) - - -class Raw(Text): - """ - Raw text which is not XML escaped. - This may include I{string} XML. - """ - def escape(self): - return self - - def unescape(self): - return self - - def __add__(self, other): - joined = ''.join((self, other)) - return Raw(joined, lang=self.lang) diff --git a/libs_crutch/contrib/suds/servicedefinition.py b/libs_crutch/contrib/suds/servicedefinition.py deleted file mode 100755 index 0318cc1..0000000 --- a/libs_crutch/contrib/suds/servicedefinition.py +++ /dev/null @@ -1,246 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -The I{service definition} provides a textual representation of a service. -""" - -from suds import * -import suds.metrics as metrics -from suds.sax import Namespace - -from logging import getLogger -log = getLogger(__name__) - - -class ServiceDefinition(UnicodeMixin): - """ - A service definition provides an object used to generate a textual description - of a service. - @ivar wsdl: A wsdl. - @type wsdl: L{wsdl.Definitions} - @ivar sort_namespaces: Whether to sort namespaces on storing them. - @ivar service: The service object. - @type service: L{suds.wsdl.Service} - @ivar ports: A list of port-tuple: (port, [(method-name, pdef)]) - @type ports: [port-tuple,..] - @ivar prefixes: A list of remapped prefixes. - @type prefixes: [(prefix,uri),..] - @ivar types: A list of type definitions - @type types: [I{Type},..] - """ - - def __init__(self, wsdl, service): - """ - @param wsdl: A WSDL object - @type wsdl: L{Definitions} - @param service: A service B{name}. - @type service: str - @param service: A service B{name}. - @param sort_namespaces: Whether to sort namespaces on storing them. - """ - self.wsdl = wsdl - self.service = service - self.ports = [] - self.params = [] - self.types = [] - self.prefixes = [] - self.addports() - self.paramtypes() - self.publictypes() - self.getprefixes() - self.pushprefixes() - - def pushprefixes(self): - """ - Add our prefixes to the WSDL so that when users invoke methods - and reference the prefixes, they will resolve properly. - """ - for ns in self.prefixes: - self.wsdl.root.addPrefix(ns[0], ns[1]) - - def addports(self): - """ - Look through the list of service ports and construct a list of tuples - where each tuple is used to describe a port and its list of methods as: - (port, [method]). Each method is a tuple: (name, [pdef,..]) where each - pdef is a tuple: (param-name, type). - """ - timer = metrics.Timer() - timer.start() - for port in self.service.ports: - p = self.findport(port) - for op in list(port.binding.operations.values()): - m = p[0].method(op.name) - binding = m.binding.input - method = (m.name, binding.param_defs(m)) - p[1].append(method) - metrics.log.debug("method '%s' created: %s", m.name, timer) - p[1].sort() - timer.stop() - - def findport(self, port): - """ - Find and return a port tuple for the specified port. - Created and added when not found. - @param port: A port. - @type port: I{service.Port} - @return: A port tuple. - @rtype: (port, [method]) - """ - for p in self.ports: - if p[0] == p: return p - p = (port, []) - self.ports.append(p) - return p - - def getprefixes(self): - """Add prefixes for each namespace referenced by parameter types.""" - namespaces = [] - for l in (self.params, self.types): - for t,r in l: - ns = r.namespace() - if ns[1] is None: continue - if ns[1] in namespaces: continue - if Namespace.xs(ns) or Namespace.xsd(ns): - continue - namespaces.append(ns[1]) - if t == r: continue - ns = t.namespace() - if ns[1] is None: continue - if ns[1] in namespaces: continue - namespaces.append(ns[1]) - i = 0 - - if self.wsdl.options.sortNamespaces: - namespaces.sort() - - for u in namespaces: - p = self.nextprefix() - ns = (p, u) - self.prefixes.append(ns) - - def paramtypes(self): - """Get all parameter types.""" - for m in [p[1] for p in self.ports]: - for p in [p[1] for p in m]: - for pd in p: - if pd[1] in self.params: continue - item = (pd[1], pd[1].resolve()) - self.params.append(item) - - def publictypes(self): - """Get all public types.""" - for t in list(self.wsdl.schema.types.values()): - if t in self.params: continue - if t in self.types: continue - item = (t, t) - self.types.append(item) - self.types.sort(key=lambda x: x[0].name) - - def nextprefix(self): - """ - Get the next available prefix. This means a prefix starting with 'ns' with - a number appended as (ns0, ns1, ..) that is not already defined in the - WSDL document. - """ - used = [ns[0] for ns in self.prefixes] - used += [ns[0] for ns in list(self.wsdl.root.nsprefixes.items())] - for n in range(0,1024): - p = 'ns%d'%n - if p not in used: - return p - raise Exception('prefixes exhausted') - - def getprefix(self, u): - """ - Get the prefix for the specified namespace (URI) - @param u: A namespace URI. - @type u: str - @return: The namspace. - @rtype: (prefix, uri). - """ - for ns in Namespace.all: - if u == ns[1]: return ns[0] - for ns in self.prefixes: - if u == ns[1]: return ns[0] - raise Exception('ns (%s) not mapped' % u) - - def xlate(self, type): - """ - Get a (namespace) translated I{qualified} name for specified type. - @param type: A schema type. - @type type: I{suds.xsd.sxbasic.SchemaObject} - @return: A translated I{qualified} name. - @rtype: str - """ - resolved = type.resolve() - name = resolved.name - if type.multi_occurrence(): - name += '[]' - ns = resolved.namespace() - if ns[1] == self.wsdl.tns[1]: - return name - prefix = self.getprefix(ns[1]) - return ':'.join((prefix, name)) - - def description(self): - """ - Get a textual description of the service for which this object represents. - @return: A textual description. - @rtype: str - """ - s = [] - indent = (lambda n : '\n%*s'%(n*3,' ')) - s.append('Service ( %s ) tns="%s"' % (self.service.name, self.wsdl.tns[1])) - s.append(indent(1)) - s.append('Prefixes (%d)' % len(self.prefixes)) - for p in self.prefixes: - s.append(indent(2)) - s.append('%s = "%s"' % p) - s.append(indent(1)) - s.append('Ports (%d):' % len(self.ports)) - for p in self.ports: - s.append(indent(2)) - s.append('(%s)' % p[0].name) - s.append(indent(3)) - s.append('Methods (%d):' % len(p[1])) - for m in p[1]: - sig = [] - s.append(indent(4)) - sig.append(m[0]) - sig.append('(') - sig.append(', '.join("%s %s" % (self.xlate(p[1]), p[0]) for p - in m[1])) - sig.append(')') - try: - s.append(''.join(sig)) - except Exception: - pass - s.append(indent(3)) - s.append('Types (%d):' % len(self.types)) - for t in self.types: - s.append(indent(4)) - s.append(self.xlate(t[0])) - s.append('\n\n') - return ''.join(s) - - def __unicode__(self): - try: - return self.description() - except Exception as e: - log.exception(e) - return tostr(e) diff --git a/libs_crutch/contrib/suds/serviceproxy.py b/libs_crutch/contrib/suds/serviceproxy.py deleted file mode 100755 index 05708aa..0000000 --- a/libs_crutch/contrib/suds/serviceproxy.py +++ /dev/null @@ -1,80 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -The service proxy provides access to web services. - -Replaced by: L{client.Client} -""" - -from suds import * -from suds.client import Client - - -class ServiceProxy(UnicodeMixin): - - """ - A lightweight soap based web service proxy. - @ivar __client__: A client. - Everything is delegated to the 2nd generation API. - @type __client__: L{Client} - @note: Deprecated, replaced by L{Client}. - """ - - def __init__(self, url, **kwargs): - """ - @param url: The URL for the WSDL. - @type url: str - @param kwargs: keyword arguments. - @keyword faults: Raise faults raised by server (default:True), - else return tuple from service method invocation as (http code, object). - @type faults: boolean - @keyword proxy: An http proxy to be specified on requests (default:{}). - The proxy is defined as {protocol:proxy,} - @type proxy: dict - """ - client = Client(url, **kwargs) - self.__client__ = client - - def get_instance(self, name): - """ - Get an instance of a WSDL type by name - @param name: The name of a type defined in the WSDL. - @type name: str - @return: An instance on success, else None - @rtype: L{sudsobject.Object} - """ - return self.__client__.factory.create(name) - - def get_enum(self, name): - """ - Get an instance of an enumeration defined in the WSDL by name. - @param name: The name of a enumeration defined in the WSDL. - @type name: str - @return: An instance on success, else None - @rtype: L{sudsobject.Object} - """ - return self.__client__.factory.create(name) - - def __unicode__(self): - return str(self.__client__) - - def __getattr__(self, name): - builtin = name.startswith('__') and name.endswith('__') - if builtin: - return self.__dict__[name] - else: - return getattr(self.__client__.service, name) diff --git a/libs_crutch/contrib/suds/soaparray.py b/libs_crutch/contrib/suds/soaparray.py deleted file mode 100755 index ea04fa7..0000000 --- a/libs_crutch/contrib/suds/soaparray.py +++ /dev/null @@ -1,71 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -The I{soaparray} module provides XSD extensions for handling -soap (section 5) encoded arrays. -""" - -from suds import * -from logging import getLogger -from suds.xsd.sxbasic import Factory as SXFactory -from suds.xsd.sxbasic import Attribute as SXAttribute - - -class Attribute(SXAttribute): - """ - Represents an XSD that handles special - attributes that are extensions for WSDLs. - @ivar aty: Array type information. - @type aty: The value of wsdl:arrayType. - """ - - def __init__(self, schema, root, aty): - """ - @param aty: Array type information. - @type aty: The value of wsdl:arrayType. - """ - SXAttribute.__init__(self, schema, root) - if aty.endswith('[]'): - self.aty = aty[:-2] - else: - self.aty = aty - - def autoqualified(self): - aqs = SXAttribute.autoqualified(self) - aqs.append('aty') - return aqs - - def description(self): - d = SXAttribute.description(self) - d = d+('aty',) - return d - -# -# Builder function, only builds Attribute when arrayType -# attribute is defined on root. -# -def __fn(x, y): - ns = (None, "http://schemas.xmlsoap.org/wsdl/") - aty = y.get('arrayType', ns=ns) - if aty is None: - return SXAttribute(x, y) - return Attribute(x, y, aty) - -# -# Remap tags to __fn() builder. -# -SXFactory.maptag('attribute', __fn) diff --git a/libs_crutch/contrib/suds/store.py b/libs_crutch/contrib/suds/store.py deleted file mode 100755 index 799c74b..0000000 --- a/libs_crutch/contrib/suds/store.py +++ /dev/null @@ -1,600 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Support for holding XML document content that may then be accessed internally -by suds without having to download them from an external source. Also contains -XML document content to be distributed alongside the suds library. - -""" - -import suds - - -soap5_encoding_schema = suds.byte_str("""\ - - - - - - - 'root' can be used to distinguish serialization roots from other - elements that are present in a serialization but are not roots of - a serialized value graph - - - - - - - - - - - - - Attributes common to all elements that function as accessors or - represent independent (multi-ref) values. The href attribute is - intended to be used in a manner like CONREF. That is, the element - content should be empty iff the href attribute appears - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 'Array' is a complex type for accessors identified by position - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -""") - - -class DocumentStore(object): - """ - Local XML document content repository. - - Each XML document is identified by its location, i.e. URL without any - protocol identifier. Contained XML documents can be looked up using any URL - referencing that same location. - - """ - - def __init__(self, *args, **kwargs): - self.__store = { - 'schemas.xmlsoap.org/soap/encoding/': soap5_encoding_schema} - self.update = self.__store.update - self.update(*args, **kwargs) - - def __len__(self): - return len(self.__store) - - def open(self, url): - """ - Open a document at the specified URL. - - The document URL's needs not contain a protocol identifier, and if it - does, that protocol identifier is ignored when looking up the store - content. - - Missing documents referenced using the internal 'suds' protocol are - reported by raising an exception. For other protocols, None is returned - instead. - - @param url: A document URL. - @type url: str - @return: Document content or None if not found. - @rtype: bytes - - """ - protocol, location = self.__split(url) - content = self.__find(location) - if protocol == 'suds' and content is None: - raise Exception('location "%s" not in document store' % location) - return content - - def __find(self, location): - """ - Find the specified location in the store. - - @param location: The I{location} part of a URL. - @type location: str - @return: Document content or None if not found. - @rtype: bytes - - """ - return self.__store.get(location) - - def __split(self, url): - """ - Split the given URL into its I{protocol} & I{location} components. - - @param url: A URL. - @param url: str - @return: (I{protocol}, I{location}) - @rtype: (str, str) - - """ - parts = url.split('://', 1) - if len(parts) == 2: - return parts - return None, url - - -defaultDocumentStore = DocumentStore() diff --git a/libs_crutch/contrib/suds/sudsobject.py b/libs_crutch/contrib/suds/sudsobject.py deleted file mode 100755 index 6493842..0000000 --- a/libs_crutch/contrib/suds/sudsobject.py +++ /dev/null @@ -1,391 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides a collection of suds objects primarily used for highly dynamic -interactions with WSDL/XSD defined types. - -""" - -from suds import * - -from logging import getLogger -log = getLogger(__name__) - - -def items(sobject): - """ - Extract the I{items} from a suds object. - - Much like the items() method works on I{dict}. - - @param sobject: A suds object - @type sobject: L{Object} - @return: A list of items contained in I{sobject}. - @rtype: [(key, value),...] - - """ - for item in sobject: - yield item - - -def asdict(sobject): - """ - Convert a sudsobject into a dictionary. - - @param sobject: A suds object - @type sobject: L{Object} - @return: A python dictionary containing the items contained in I{sobject}. - @rtype: dict - - """ - return dict(items(sobject)) - -def merge(a, b): - """ - Merge all attributes and metadata from I{a} to I{b}. - - @param a: A I{source} object - @type a: L{Object} - @param b: A I{destination} object - @type b: L{Object} - - """ - for item in a: - setattr(b, item[0], item[1]) - b.__metadata__ = b.__metadata__ - return b - -def footprint(sobject): - """ - Get the I{virtual footprint} of the object. - - This is really a count of all the significant value attributes in the - branch. - - @param sobject: A suds object. - @type sobject: L{Object} - @return: The branch footprint. - @rtype: int - - """ - n = 0 - for a in sobject.__keylist__: - v = getattr(sobject, a) - if v is None: - continue - if isinstance(v, Object): - n += footprint(v) - continue - if hasattr(v, "__len__"): - if len(v): - n += 1 - continue - n += 1 - return n - - -class Factory: - - cache = {} - - @classmethod - def subclass(cls, name, bases, dict={}): - if not isinstance(bases, tuple): - bases = (bases,) - # name is of type unicode in python 2 -> not accepted by type() - name = str(name) - key = ".".join((name, str(bases))) - subclass = cls.cache.get(key) - if subclass is None: - subclass = type(name, bases, dict) - cls.cache[key] = subclass - return subclass - - @classmethod - def object(cls, classname=None, dict={}): - if classname is not None: - subclass = cls.subclass(classname, Object) - inst = subclass() - else: - inst = Object() - for a in list(dict.items()): - setattr(inst, a[0], a[1]) - return inst - - @classmethod - def metadata(cls): - return Metadata() - - @classmethod - def property(cls, name, value=None): - subclass = cls.subclass(name, Property) - return subclass(value) - - -class Object(UnicodeMixin): - - def __init__(self): - self.__keylist__ = [] - self.__printer__ = Printer() - self.__metadata__ = Metadata() - - def __setattr__(self, name, value): - builtin = name.startswith("__") and name.endswith("__") - if not builtin and name not in self.__keylist__: - self.__keylist__.append(name) - self.__dict__[name] = value - - def __delattr__(self, name): - try: - del self.__dict__[name] - builtin = name.startswith("__") and name.endswith("__") - if not builtin: - self.__keylist__.remove(name) - except Exception: - cls = self.__class__.__name__ - raise AttributeError("%s has no attribute '%s'" % (cls, name)) - - def __getitem__(self, name): - if isinstance(name, int): - name = self.__keylist__[int(name)] - return getattr(self, name) - - def __setitem__(self, name, value): - setattr(self, name, value) - - def __iter__(self): - return Iter(self) - - def __len__(self): - return len(self.__keylist__) - - def __contains__(self, name): - return name in self.__keylist__ - - def __repr__(self): - return str(self) - - def __unicode__(self): - return self.__printer__.tostr(self) - - -class Iter: - - def __init__(self, sobject): - self.sobject = sobject - self.keylist = self.__keylist(sobject) - self.index = 0 - - def __next__(self): - keylist = self.keylist - nkeys = len(self.keylist) - while self.index < nkeys: - k = keylist[self.index] - self.index += 1 - if hasattr(self.sobject, k): - v = getattr(self.sobject, k) - return (k, v) - raise StopIteration() - - def __keylist(self, sobject): - keylist = sobject.__keylist__ - try: - keyset = set(keylist) - ordering = sobject.__metadata__.ordering - ordered = set(ordering) - if not ordered.issuperset(keyset): - log.debug("%s must be superset of %s, ordering ignored", - keylist, ordering) - raise KeyError() - return ordering - except Exception: - return keylist - - def __iter__(self): - return self - - -class Metadata(Object): - def __init__(self): - self.__keylist__ = [] - self.__printer__ = Printer() - - -class Facade(Object): - def __init__(self, name): - Object.__init__(self) - md = self.__metadata__ - md.facade = name - - -class Property(Object): - - def __init__(self, value): - Object.__init__(self) - self.value = value - - def items(self): - for item in self: - if item[0] != "value": - yield item - - def get(self): - return self.value - - def set(self, value): - self.value = value - return self - - -class Printer: - """Pretty printing of a Object object.""" - - @classmethod - def indent(cls, n): - return "%*s" % (n * 3, " ") - - def tostr(self, object, indent=-2): - """Get s string representation of object.""" - history = [] - return self.process(object, history, indent) - - def process(self, object, h, n=0, nl=False): - """Print object using the specified indent (n) and newline (nl).""" - if object is None: - return "None" - if isinstance(object, Object): - if len(object) == 0: - return "" - return self.print_object(object, h, n + 2, nl) - if isinstance(object, dict): - if len(object) == 0: - return "" - return self.print_dictionary(object, h, n + 2, nl) - if isinstance(object, (list, tuple)): - if len(object) == 0: - return "" - return self.print_collection(object, h, n + 2) - if isinstance(object, str): - return '"%s"' % (tostr(object),) - return "%s" % (tostr(object),) - - def print_object(self, d, h, n, nl=False): - """Print complex using the specified indent (n) and newline (nl).""" - s = [] - cls = d.__class__ - if d in h: - s.append("(") - s.append(cls.__name__) - s.append(")") - s.append("...") - return "".join(s) - h.append(d) - if nl: - s.append("\n") - s.append(self.indent(n)) - if cls != Object: - s.append("(") - if isinstance(d, Facade): - s.append(d.__metadata__.facade) - else: - s.append(cls.__name__) - s.append(")") - s.append("{") - for item in d: - if self.exclude(d, item): - continue - item = self.unwrap(d, item) - s.append("\n") - s.append(self.indent(n+1)) - if isinstance(item[1], (list,tuple)): - s.append(item[0]) - s.append("[]") - else: - s.append(item[0]) - s.append(" = ") - s.append(self.process(item[1], h, n, True)) - s.append("\n") - s.append(self.indent(n)) - s.append("}") - h.pop() - return "".join(s) - - def print_dictionary(self, d, h, n, nl=False): - """Print complex using the specified indent (n) and newline (nl).""" - if d in h: - return "{}..." - h.append(d) - s = [] - if nl: - s.append("\n") - s.append(self.indent(n)) - s.append("{") - for item in list(d.items()): - s.append("\n") - s.append(self.indent(n+1)) - if isinstance(item[1], (list,tuple)): - s.append(tostr(item[0])) - s.append("[]") - else: - s.append(tostr(item[0])) - s.append(" = ") - s.append(self.process(item[1], h, n, True)) - s.append("\n") - s.append(self.indent(n)) - s.append("}") - h.pop() - return "".join(s) - - def print_collection(self, c, h, n): - """Print collection using the specified indent (n) and newline (nl).""" - if c in h: - return "[]..." - h.append(c) - s = [] - for item in c: - s.append("\n") - s.append(self.indent(n)) - s.append(self.process(item, h, n - 2)) - s.append(",") - h.pop() - return "".join(s) - - def unwrap(self, d, item): - """Translate (unwrap) using an optional wrapper function.""" - try: - md = d.__metadata__ - pmd = getattr(md, "__print__", None) - if pmd is None: - return item - wrappers = getattr(pmd, "wrappers", {}) - fn = wrappers.get(item[0], lambda x: x) - return (item[0], fn(item[1])) - except Exception: - pass - return item - - def exclude(self, d, item): - """Check metadata for excluded items.""" - try: - md = d.__metadata__ - pmd = getattr(md, "__print__", None) - if pmd is None: - return False - excludes = getattr(pmd, "excludes", []) - return item[0] in excludes - except Exception: - pass - return False diff --git a/libs_crutch/contrib/suds/transport/__init__.py b/libs_crutch/contrib/suds/transport/__init__.py deleted file mode 100755 index d9b438e..0000000 --- a/libs_crutch/contrib/suds/transport/__init__.py +++ /dev/null @@ -1,166 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Contains transport interface (classes). - -""" - -from suds import UnicodeMixin - -import sys - - -class TransportError(Exception): - def __init__(self, reason, httpcode, fp=None): - Exception.__init__(self, reason) - self.httpcode = httpcode - self.fp = fp - - -class Request(UnicodeMixin): - """ - A transport request. - - Request URL input data may be given as either a byte or a unicode string, - but it may not under any circumstances contain non-ASCII characters. The - URL value is stored as a str value internally. With Python versions prior - to 3.0, str is the byte string type, while with later Python versions it is - the unicode string type. - - @ivar url: The URL for the request. - @type url: str - @ivar message: The optional message to be sent in the request body. - @type message: bytes|None - @type timeout: int|None - @ivar headers: The HTTP headers to be used for the request. - @type headers: dict - - """ - - def __init__(self, url, message=None, timeout=None): - """ - Raised exception in case of detected non-ASCII URL characters may be - either UnicodeEncodeError or UnicodeDecodeError, depending on the used - Python version's str type and the exact value passed as URL input data. - - @param url: The URL for the request. - @type url: bytes|str|unicode - @param message: The optional message to be sent in the request body. - @type message: bytes|None - - """ - self.__set_URL(url) - self.headers = {} - self.message = message - self.timeout = timeout - - def __unicode__(self): - result = ["URL: %s\nHEADERS: %s" % (self.url, self.headers)] - if self.message is not None: - result.append("MESSAGE:") - result.append(self.message.decode("raw_unicode_escape")) - return "\n".join(result) - - def __set_URL(self, url): - """ - URL is stored as a str internally and must not contain ASCII chars. - - Raised exception in case of detected non-ASCII URL characters may be - either UnicodeEncodeError or UnicodeDecodeError, depending on the used - Python version's str type and the exact value passed as URL input data. - - """ - if isinstance(url, str): - url.encode("ascii") # Check for non-ASCII characters. - self.url = url - elif sys.version_info < (3, 0): - self.url = url.encode("ascii") - else: - self.url = url.decode("ascii") - - -class Reply(UnicodeMixin): - """ - A transport reply. - - @ivar code: The HTTP code returned. - @type code: int - @ivar headers: The HTTP headers included in the received reply. - @type headers: dict - @ivar message: The message received as a reply. - @type message: bytes - - """ - - def __init__(self, code, headers, message): - """ - @param code: The HTTP code returned. - @type code: int - @param headers: The HTTP headers included in the received reply. - @type headers: dict - @param message: The (optional) message received as a reply. - @type message: bytes - - """ - self.code = code - self.headers = headers - self.message = message - - def __unicode__(self): - return """\ -CODE: %s -HEADERS: %s -MESSAGE: -%s""" % (self.code, self.headers, self.message.decode("raw_unicode_escape")) - - -class Transport(object): - """The transport I{interface}.""" - - def __init__(self): - from suds.transport.options import Options - self.options = Options() - - def open(self, request): - """ - Open the URL in the specified request. - - @param request: A transport request. - @type request: L{Request} - @return: An input stream. - @rtype: stream - @raise TransportError: On all transport errors. - - """ - raise Exception('not-implemented') - - def send(self, request): - """ - Send SOAP message. Implementations are expected to handle: - - proxies - - I{HTTP} headers - - cookies - - sending message - - brokering exceptions into L{TransportError} - - @param request: A transport request. - @type request: L{Request} - @return: The reply - @rtype: L{Reply} - @raise TransportError: On all transport errors. - - """ - raise Exception('not-implemented') diff --git a/libs_crutch/contrib/suds/transport/http.py b/libs_crutch/contrib/suds/transport/http.py deleted file mode 100755 index 7122938..0000000 --- a/libs_crutch/contrib/suds/transport/http.py +++ /dev/null @@ -1,249 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Basic HTTP transport implementation classes. - -""" - -from suds.properties import Unskin -from suds.transport import * - -import base64 -from http.cookiejar import CookieJar -import http.client -import socket -import sys -import urllib.request, urllib.error, urllib.parse -import gzip -import zlib - -from logging import getLogger -log = getLogger(__name__) - - -class HttpTransport(Transport): - """ - Basic HTTP transport implemented using using urllib2, that provides for - cookies & proxies but no authentication. - - """ - - def __init__(self, **kwargs): - """ - @param kwargs: Keyword arguments. - - B{proxy} - An HTTP proxy to be specified on requests. - The proxy is defined as {protocol:proxy,} - - type: I{dict} - - default: {} - - B{timeout} - Set the URL open timeout (seconds). - - type: I{float} - - default: 90 - - """ - Transport.__init__(self) - Unskin(self.options).update(kwargs) - self.cookiejar = CookieJar() - self.proxy = {} - self.urlopener = None - - def open(self, request): - try: - url = self.__get_request_url_for_urllib(request) - headers = request.headers - log.debug('opening (%s)', url) - u2request = urllib.request.Request(url, headers=headers) - self.proxy = self.options.proxy - return self.u2open(u2request) - except urllib.error.HTTPError as e: - raise TransportError(str(e), e.code, e.fp) - - def send(self, request): - url = self.__get_request_url_for_urllib(request) - msg = request.message - headers = request.headers - if 'Content-Encoding' in headers: - encoding = headers['Content-Encoding'] - if encoding == 'gzip': - msg = gzip.compress(msg) - elif encoding == 'deflate': - msg = zlib.compress(msg) - try: - u2request = urllib.request.Request(url, msg, headers) - self.addcookies(u2request) - self.proxy = self.options.proxy - request.headers.update(u2request.headers) - log.debug('sending:\n%s', request) - fp = self.u2open(u2request, timeout=request.timeout) - self.getcookies(fp, u2request) - headers = fp.headers - if sys.version_info < (3, 0): - headers = headers.dict - message = fp.read() - if 'Content-Encoding' in headers: - encoding = headers['Content-Encoding'] - if encoding == 'gzip': - message = gzip.decompress(message) - elif encoding == 'deflate': - message = zlib.decompress(message) - reply = Reply(http.client.OK, headers, message) - log.debug('received:\n%s', reply) - return reply - except urllib.error.HTTPError as e: - if e.code not in (http.client.ACCEPTED, http.client.NO_CONTENT): - raise TransportError(e.msg, e.code, e.fp) - - def addcookies(self, u2request): - """ - Add cookies in the cookiejar to the request. - - @param u2request: A urllib2 request. - @rtype: u2request: urllib2.Request. - - """ - self.cookiejar.add_cookie_header(u2request) - - def getcookies(self, fp, u2request): - """ - Add cookies in the request to the cookiejar. - - @param u2request: A urllib2 request. - @rtype: u2request: urllib2.Request. - - """ - self.cookiejar.extract_cookies(fp, u2request) - - def u2open(self, u2request, timeout=None): - """ - Open a connection. - - @param u2request: A urllib2 request. - @type u2request: urllib2.Request. - @return: The opened file-like urllib2 object. - @rtype: fp - - """ - tm = timeout or self.options.timeout - url = self.u2opener() - if (sys.version_info < (3, 0)) and (self.u2ver() < 2.6): - socket.setdefaulttimeout(tm) - return url.open(u2request) - return url.open(u2request, timeout=tm) - - def u2opener(self): - """ - Create a urllib opener. - - @return: An opener. - @rtype: I{OpenerDirector} - - """ - if self.urlopener is None: - return urllib.request.build_opener(*self.u2handlers()) - return self.urlopener - - def u2handlers(self): - """ - Get a collection of urllib handlers. - - @return: A list of handlers to be installed in the opener. - @rtype: [Handler,...] - - """ - return [urllib.request.ProxyHandler(self.proxy)] - - def u2ver(self): - """ - Get the major/minor version of the urllib2 lib. - - @return: The urllib2 version. - @rtype: float - - """ - try: - part = urllib.request.__version__.split('.', 1) - return float('.'.join(part)) - except Exception as e: - log.exception(e) - return 0 - - def __deepcopy__(self, memo={}): - clone = self.__class__() - p = Unskin(self.options) - cp = Unskin(clone.options) - cp.update(p) - return clone - - @staticmethod - def __get_request_url_for_urllib(request): - """ - Returns the given request's URL, properly encoded for use with urllib. - - We expect that the given request object already verified that the URL - contains ASCII characters only and stored it as a native str value. - - urllib accepts URL information as a native str value and may break - unexpectedly if given URL information in another format. - - Python 3.x httplib.client implementation must be given a unicode string - and not a bytes object and the given string is internally converted to - a bytes object using an explicitly specified ASCII encoding. - - Python 2.7 httplib implementation expects the URL passed to it to not - be a unicode string. If it is, then passing it to the underlying - httplib Request object will cause that object to forcefully convert all - of its data to unicode, assuming that data contains ASCII data only and - raising a UnicodeDecodeError exception if it does not (caused by simple - unicode + string concatenation). - - Python 2.4 httplib implementation does not really care about this as it - does not use the internal optimization present in the Python 2.7 - implementation causing all the requested data to be converted to - unicode. - - """ - assert isinstance(request.url, str) - return request.url - - -class HttpAuthenticated(HttpTransport): - """ - Provides basic HTTP authentication for servers that do not follow the - specified challenge/response model. Appends the I{Authorization} HTTP - header with base64 encoded credentials on every HTTP request. - - """ - - def open(self, request): - self.addcredentials(request) - return HttpTransport.open(self, request) - - def send(self, request): - self.addcredentials(request) - return HttpTransport.send(self, request) - - def addcredentials(self, request): - credentials = self.credentials() - if None not in credentials: - credentials = ':'.join(credentials) - if sys.version_info < (3, 0): - encodedString = base64.b64encode(credentials) - else: - encodedBytes = base64.urlsafe_b64encode(credentials.encode()) - encodedString = encodedBytes.decode() - request.headers['Authorization'] = 'Basic %s' % encodedString - - def credentials(self): - return self.options.username, self.options.password diff --git a/libs_crutch/contrib/suds/transport/https.py b/libs_crutch/contrib/suds/transport/https.py deleted file mode 100755 index f1f9051..0000000 --- a/libs_crutch/contrib/suds/transport/https.py +++ /dev/null @@ -1,99 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Contains classes for authenticated HTTP transport implementations. - -""" - -from suds.transport import * -from suds.transport.http import HttpTransport - -import urllib.request, urllib.error, urllib.parse - - -class HttpAuthenticated(HttpTransport): - """ - Provides basic HTTP authentication that follows the RFC-2617 specification. - - As defined by specifications, credentials are provided to the server upon - request (HTTP/1.0 401 Authorization Required) by the server only. - - @ivar pm: The password manager. - @ivar handler: The authentication handler. - - """ - - def __init__(self, **kwargs): - """ - @param kwargs: Keyword arguments. - - B{proxy} - An HTTP proxy to be specified on requests. - The proxy is defined as {protocol:proxy,} - - type: I{dict} - - default: {} - - B{timeout} - Set the URL open timeout (seconds). - - type: I{float} - - default: 90 - - B{username} - The username used for HTTP authentication. - - type: I{str} - - default: None - - B{password} - The password used for HTTP authentication. - - type: I{str} - - default: None - - """ - HttpTransport.__init__(self, **kwargs) - self.pm = urllib.request.HTTPPasswordMgrWithDefaultRealm() - - def open(self, request): - self.addcredentials(request) - return HttpTransport.open(self, request) - - def send(self, request): - self.addcredentials(request) - return HttpTransport.send(self, request) - - def addcredentials(self, request): - credentials = self.credentials() - if None not in credentials: - u = credentials[0] - p = credentials[1] - self.pm.add_password(None, request.url, u, p) - - def credentials(self): - return self.options.username, self.options.password - - def u2handlers(self): - handlers = HttpTransport.u2handlers(self) - handlers.append(urllib.request.HTTPBasicAuthHandler(self.pm)) - return handlers - - -class WindowsHttpAuthenticated(HttpAuthenticated): - """ - Provides Windows (NTLM) based HTTP authentication. - - @author: Christopher Bess - - """ - - def u2handlers(self): - try: - from ntlm import HTTPNtlmAuthHandler - except ImportError: - raise Exception("Cannot import python-ntlm module") - handlers = HttpTransport.u2handlers(self) - handlers.append(HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(self.pm)) - return handlers diff --git a/libs_crutch/contrib/suds/transport/options.py b/libs_crutch/contrib/suds/transport/options.py deleted file mode 100755 index ccaee82..0000000 --- a/libs_crutch/contrib/suds/transport/options.py +++ /dev/null @@ -1,58 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Classes modeling transport options. - -""" - - -from suds.transport import * -from suds.properties import * - - -class Options(Skin): - """ - Options: - - B{proxy} - An HTTP proxy to be specified on requests, defined as - {protocol:proxy, ...}. - - type: I{dict} - - default: {} - - B{timeout} - Set the URL open timeout (seconds). - - type: I{float} - - default: 90 - - B{headers} - Extra HTTP headers. - - type: I{dict} - - I{str} B{http} - The I{HTTP} protocol proxy URL. - - I{str} B{https} - The I{HTTPS} protocol proxy URL. - - default: {} - - B{username} - The username used for HTTP authentication. - - type: I{str} - - default: None - - B{password} - The password used for HTTP authentication. - - type: I{str} - - default: None - - """ - - def __init__(self, **kwargs): - domain = __name__ - definitions = [ - Definition('proxy', dict, {}), - Definition('timeout', (int,float), 90), - Definition('headers', dict, {}), - Definition('username', str, None), - Definition('password', str, None)] - Skin.__init__(self, domain, definitions, kwargs) diff --git a/libs_crutch/contrib/suds/umx/__init__.py b/libs_crutch/contrib/suds/umx/__init__.py deleted file mode 100755 index e7d74c3..0000000 --- a/libs_crutch/contrib/suds/umx/__init__.py +++ /dev/null @@ -1,55 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides modules containing classes to support -unmarshalling (XML). -""" - -from suds.sudsobject import Object - - - -class Content(Object): - """ - @ivar node: The content source node. - @type node: L{sax.element.Element} - @ivar data: The (optional) content data. - @type data: L{Object} - @ivar text: The (optional) content (xml) text. - @type text: basestring - """ - - extensions = [] - - def __init__(self, node, **kwargs): - Object.__init__(self) - self.node = node - self.data = None - self.text = None - for k,v in list(kwargs.items()): - setattr(self, k, v) - - def __getattr__(self, name): - if name not in self.__dict__: - if name in self.extensions: - v = None - setattr(self, name, v) - else: - raise AttributeError('Content has no attribute %s' % name) - else: - v = self.__dict__[name] - return v diff --git a/libs_crutch/contrib/suds/umx/attrlist.py b/libs_crutch/contrib/suds/umx/attrlist.py deleted file mode 100755 index df8da0b..0000000 --- a/libs_crutch/contrib/suds/umx/attrlist.py +++ /dev/null @@ -1,88 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides filtered attribute list classes. -""" - -from suds import * -from suds.umx import * -from suds.sax import Namespace - - -class AttrList: - """ - A filtered attribute list. - Items are included during iteration if they are in either the (xs) or - (xml) namespaces. - @ivar raw: The I{raw} attribute list. - @type raw: list - """ - def __init__(self, attributes): - """ - @param attributes: A list of attributes - @type attributes: list - """ - self.raw = attributes - - def real(self): - """ - Get list of I{real} attributes which exclude xs and xml attributes. - @return: A list of I{real} attributes. - @rtype: I{generator} - """ - for a in self.raw: - if self.skip(a): continue - yield a - - def rlen(self): - """ - Get the number of I{real} attributes which exclude xs and xml attributes. - @return: A count of I{real} attributes. - @rtype: L{int} - """ - n = 0 - for a in self.real(): - n += 1 - return n - - def lang(self): - """ - Get list of I{filtered} attributes which exclude xs. - @return: A list of I{filtered} attributes. - @rtype: I{generator} - """ - for a in self.raw: - if a.qname() == 'xml:lang': - return a.value - return None - - def skip(self, attr): - """ - Get whether to skip (filter-out) the specified attribute. - @param attr: An attribute. - @type attr: I{Attribute} - @return: True if should be skipped. - @rtype: bool - """ - ns = attr.namespace() - skip = ( - Namespace.xmlns[1], - 'http://schemas.xmlsoap.org/soap/encoding/', - 'http://schemas.xmlsoap.org/soap/envelope/', - 'http://www.w3.org/2003/05/soap-envelope', - ) - return ( Namespace.xs(ns) or ns[1] in skip ) diff --git a/libs_crutch/contrib/suds/umx/basic.py b/libs_crutch/contrib/suds/umx/basic.py deleted file mode 100755 index 888a212..0000000 --- a/libs_crutch/contrib/suds/umx/basic.py +++ /dev/null @@ -1,41 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides basic unmarshaller classes. -""" - -from logging import getLogger -from suds import * -from suds.umx import * -from suds.umx.core import Core - - -class Basic(Core): - """ - A object builder (unmarshaller). - """ - - def process(self, node): - """ - Process an object graph representation of the xml I{node}. - @param node: An XML tree. - @type node: L{sax.element.Element} - @return: A suds object. - @rtype: L{Object} - """ - content = Content(node) - return Core.process(self, content) diff --git a/libs_crutch/contrib/suds/umx/core.py b/libs_crutch/contrib/suds/umx/core.py deleted file mode 100755 index a819245..0000000 --- a/libs_crutch/contrib/suds/umx/core.py +++ /dev/null @@ -1,214 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides base classes for XML->object I{unmarshalling}. -""" - -from suds import * -from suds.umx import * -from suds.umx.attrlist import AttrList -from suds.sax.text import Text -from suds.sudsobject import Factory, merge - - -reserved = {'class':'cls', 'def':'dfn'} - - -class Core: - """ - The abstract XML I{node} unmarshaller. This class provides the - I{core} unmarshalling functionality. - """ - - def process(self, content): - """ - Process an object graph representation of the xml I{node}. - @param content: The current content being unmarshalled. - @type content: L{Content} - @return: A suds object. - @rtype: L{Object} - """ - self.reset() - return self.append(content) - - def append(self, content): - """ - Process the specified node and convert the XML document into - a I{suds} L{object}. - @param content: The current content being unmarshalled. - @type content: L{Content} - @return: A I{append-result} tuple as: (L{Object}, I{value}) - @rtype: I{append-result} - @note: This is not the proper entry point. - @see: L{process()} - """ - self.start(content) - self.append_attributes(content) - self.append_children(content) - self.append_text(content) - self.end(content) - return self.postprocess(content) - - def postprocess(self, content): - """ - Perform final processing of the resulting data structure as follows: - - Mixed values (children and text) will have a result of the I{content.node}. - - Simi-simple values (attributes, no-children and text) will have a result of a - property object. - - Simple values (no-attributes, no-children with text nodes) will have a string - result equal to the value of the content.node.getText(). - @param content: The current content being unmarshalled. - @type content: L{Content} - @return: The post-processed result. - @rtype: I{any} - """ - node = content.node - if len(node.children) and node.hasText(): - return node - attributes = AttrList(node.attributes) - if attributes.rlen() and \ - not len(node.children) and \ - node.hasText(): - p = Factory.property(node.name, node.getText()) - return merge(content.data, p) - if len(content.data): - return content.data - lang = attributes.lang() - if content.node.isnil(): - return None - if not len(node.children) and content.text is None: - if self.nillable(content): - return None - else: - return Text('', lang=lang) - if isinstance(content.text, str): - return Text(content.text, lang=lang) - else: - return content.text - - def append_attributes(self, content): - """ - Append attribute nodes into L{Content.data}. - Attributes in the I{schema} or I{xml} namespaces are skipped. - @param content: The current content being unmarshalled. - @type content: L{Content} - """ - attributes = AttrList(content.node.attributes) - for attr in attributes.real(): - name = attr.name - value = attr.value - self.append_attribute(name, value, content) - - def append_attribute(self, name, value, content): - """ - Append an attribute name/value into L{Content.data}. - @param name: The attribute name - @type name: basestring - @param value: The attribute's value - @type value: basestring - @param content: The current content being unmarshalled. - @type content: L{Content} - """ - key = name - key = '_%s' % reserved.get(key, key) - setattr(content.data, key, value) - - def append_children(self, content): - """ - Append child nodes into L{Content.data} - @param content: The current content being unmarshalled. - @type content: L{Content} - """ - for child in content.node: - cont = Content(child) - cval = self.append(cont) - key = reserved.get(child.name, child.name) - if key in content.data: - v = getattr(content.data, key) - if isinstance(v, list): - v.append(cval) - else: - setattr(content.data, key, [v, cval]) - continue - if self.multi_occurrence(cont): - if cval is None: - setattr(content.data, key, []) - else: - setattr(content.data, key, [cval,]) - else: - setattr(content.data, key, cval) - - def append_text(self, content): - """ - Append text nodes into L{Content.data} - @param content: The current content being unmarshalled. - @type content: L{Content} - """ - if content.node.hasText(): - content.text = content.node.getText() - - def reset(self): - pass - - def start(self, content): - """ - Processing on I{node} has started. Build and return - the proper object. - @param content: The current content being unmarshalled. - @type content: L{Content} - @return: A subclass of Object. - @rtype: L{Object} - """ - content.data = Factory.object(content.node.name) - - def end(self, content): - """ - Processing on I{node} has ended. - @param content: The current content being unmarshalled. - @type content: L{Content} - """ - pass - - def single_occurrence(self, content): - """ - Get whether the content has at most a single occurrence (not a list). - @param content: The current content being unmarshalled. - @type content: L{Content} - @return: True if content has at most a single occurrence, else False. - @rtype: boolean - '""" - return not self.multi_occurrence(content) - - def multi_occurrence(self, content): - """ - Get whether the content has more than one occurrence (a list). - @param content: The current content being unmarshalled. - @type content: L{Content} - @return: True if content has more than one occurrence, else False. - @rtype: boolean - '""" - return False - - def nillable(self, content): - """ - Get whether the object is nillable. - @param content: The current content being unmarshalled. - @type content: L{Content} - @return: True if nillable, else False - @rtype: boolean - '""" - return False diff --git a/libs_crutch/contrib/suds/umx/encoded.py b/libs_crutch/contrib/suds/umx/encoded.py deleted file mode 100755 index bb454e1..0000000 --- a/libs_crutch/contrib/suds/umx/encoded.py +++ /dev/null @@ -1,126 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides soap encoded unmarshaller classes. -""" - -from suds import * -from suds.umx import * -from suds.umx.typed import Typed -from suds.sax import Namespace - - -# -# Add encoded extensions -# aty = The soap (section 5) encoded array type. -# -Content.extensions.append('aty') - - -class Encoded(Typed): - """ - A SOAP section (5) encoding unmarshaller. - This marshaller supports rpc/encoded soap styles. - """ - - def start(self, content): - # - # Grab the array type and continue - # - self.setaty(content) - Typed.start(self, content) - - def end(self, content): - # - # Squash soap encoded arrays into python lists. This is - # also where we insure that empty arrays are represented - # as empty python lists. - # - aty = content.aty - if aty is not None: - self.promote(content) - return Typed.end(self, content) - - def postprocess(self, content): - # - # Ensure proper rendering of empty arrays. - # - if content.aty is None: - return Typed.postprocess(self, content) - else: - return content.data - - def setaty(self, content): - """ - Grab the (aty) soap-enc:arrayType and attach it to the - content for proper array processing later in end(). - @param content: The current content being unmarshalled. - @type content: L{Content} - @return: self - @rtype: L{Encoded} - """ - name = 'arrayType' - ns = (None, 'http://schemas.xmlsoap.org/soap/encoding/') - aty = content.node.get(name, ns) - if aty is not None: - content.aty = aty - parts = aty.split('[') - ref = parts[0] - if len(parts) == 2: - self.applyaty(content, ref) - else: - pass # (2) dimensional array - return self - - def applyaty(self, content, xty): - """ - Apply the type referenced in the I{arrayType} to the content - (child nodes) of the array. Each element (node) in the array - that does not have an explicit xsi:type attribute is given one - based on the I{arrayType}. - @param content: An array content. - @type content: L{Content} - @param xty: The XSI type reference. - @type xty: str - @return: self - @rtype: L{Encoded} - """ - name = 'type' - ns = Namespace.xsins - parent = content.node - for child in parent.getChildren(): - ref = child.get(name, ns) - if ref is None: - parent.addPrefix(ns[0], ns[1]) - attr = ':'.join((ns[0], name)) - child.set(attr, xty) - return self - - def promote(self, content): - """ - Promote (replace) the content.data with the first attribute - of the current content.data that is a I{list}. Note: the - content.data may be empty or contain only _x attributes. - In either case, the content.data is assigned an empty list. - @param content: An array content. - @type content: L{Content} - """ - for n,v in content.data: - if isinstance(v, list): - content.data = v - return - content.data = [] diff --git a/libs_crutch/contrib/suds/umx/typed.py b/libs_crutch/contrib/suds/umx/typed.py deleted file mode 100755 index f28cb92..0000000 --- a/libs_crutch/contrib/suds/umx/typed.py +++ /dev/null @@ -1,140 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Provides typed unmarshaller classes. -""" - -from suds import * -from suds.umx import * -from suds.umx.core import Core -from suds.resolver import NodeResolver, Frame -from suds.sudsobject import Factory - -from logging import getLogger -log = getLogger(__name__) - - -# -# Add typed extensions -# type = The expected xsd type -# real = The 'true' XSD type -# -Content.extensions.append('type') -Content.extensions.append('real') - - -class Typed(Core): - """ - A I{typed} XML unmarshaller - @ivar resolver: A schema type resolver. - @type resolver: L{NodeResolver} - """ - - def __init__(self, schema): - """ - @param schema: A schema object. - @type schema: L{xsd.schema.Schema} - """ - self.resolver = NodeResolver(schema) - - def process(self, node, type): - """ - Process an object graph representation of the xml L{node}. - @param node: An XML tree. - @type node: L{sax.element.Element} - @param type: The I{optional} schema type. - @type type: L{xsd.sxbase.SchemaObject} - @return: A suds object. - @rtype: L{Object} - """ - content = Content(node) - content.type = type - return Core.process(self, content) - - def reset(self): - log.debug('reset') - self.resolver.reset() - - def start(self, content): - # - # Resolve to the schema type; build an object and setup metadata. - # - if content.type is None: - found = self.resolver.find(content.node) - if found is None: - log.error(self.resolver.schema) - raise TypeNotFound(content.node.qname()) - content.type = found - else: - known = self.resolver.known(content.node) - frame = Frame(content.type, resolved=known) - self.resolver.push(frame) - real = self.resolver.top().resolved - content.real = real - cls_name = real.name - if cls_name is None: - cls_name = content.node.name - content.data = Factory.object(cls_name) - md = content.data.__metadata__ - md.sxtype = real - - def end(self, content): - self.resolver.pop() - - def multi_occurrence(self, content): - return content.type.multi_occurrence() - - def nillable(self, content): - resolved = content.type.resolve() - return ( content.type.nillable or \ - (resolved.builtin() and resolved.nillable ) ) - - def append_attribute(self, name, value, content): - """ - Append an attribute name/value into L{Content.data}. - @param name: The attribute name - @type name: basestring - @param value: The attribute's value - @type value: basestring - @param content: The current content being unmarshalled. - @type content: L{Content} - """ - type = self.resolver.findattr(name) - if type is None: - log.warning('attribute (%s) type, not-found', name) - else: - value = self.translated(value, type) - Core.append_attribute(self, name, value, content) - - def append_text(self, content): - """ - Append text nodes into L{Content.data} - Here is where the I{true} type is used to translate the value - into the proper python type. - @param content: The current content being unmarshalled. - @type content: L{Content} - """ - Core.append_text(self, content) - known = self.resolver.top().resolved - content.text = self.translated(content.text, known) - - def translated(self, value, type): - """ translate using the schema type """ - if value is not None: - resolved = type.resolve() - return resolved.translate(value) - return value diff --git a/libs_crutch/contrib/suds/version.py b/libs_crutch/contrib/suds/version.py deleted file mode 100755 index 9fdfb67..0000000 --- a/libs_crutch/contrib/suds/version.py +++ /dev/null @@ -1,26 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. - -""" -Module containing the library's version information. - - This version information has been extracted into a separate file so it can be -read from the setup.py script without having to import the suds package itself. -See the setup.py script for more detailed information. - -""" - -__version__ = "0.8.5" -__build__ = "" diff --git a/libs_crutch/contrib/suds/wsdl.py b/libs_crutch/contrib/suds/wsdl.py deleted file mode 100755 index 09f65ca..0000000 --- a/libs_crutch/contrib/suds/wsdl.py +++ /dev/null @@ -1,1005 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -The I{wsdl} module provides an objectification of the WSDL. - -The primary class is I{Definitions}, representing the root element found in a -WSDL schema document. - -""" - -from suds import * -from suds.bindings.document import Document -from suds.bindings.rpc import RPC, Encoded -from suds.reader import DocumentReader -from suds.sax.element import Element -from suds.sudsobject import Object, Facade, Metadata -from suds.xsd import qualify, Namespace -from suds.xsd.query import ElementQuery -from suds.xsd.schema import Schema, SchemaCollection - -import re -from . import soaparray -from urllib.parse import urljoin - -from logging import getLogger -log = getLogger(__name__) - - -wsdlns = (None, "http://schemas.xmlsoap.org/wsdl/") -soapns = (None, "http://schemas.xmlsoap.org/wsdl/soap/") -soap12ns = (None, "http://schemas.xmlsoap.org/wsdl/soap12/") - - -class WObject(Object): - """ - Base object for WSDL types. - - @ivar root: The XML I{root} element. - @type root: L{Element} - - """ - - def __init__(self, root): - """ - @param root: An XML root element. - @type root: L{Element} - - """ - Object.__init__(self) - self.root = root - pmd = Metadata() - pmd.excludes = ["root"] - pmd.wrappers = dict(qname=repr) - self.__metadata__.__print__ = pmd - self.__resolved = False - - def resolve(self, definitions): - """ - Resolve named references to other WSDL objects. - - Can be safely called multiple times. - - @param definitions: A definitions object. - @type definitions: L{Definitions} - - """ - if not self.__resolved: - self.do_resolve(definitions) - self.__resolved = True - - def do_resolve(self, definitions): - """ - Internal worker resolving named references to other WSDL objects. - - May only be called once per instance. - - @param definitions: A definitions object. - @type definitions: L{Definitions} - - """ - pass - - -class NamedObject(WObject): - """ - A B{named} WSDL object. - - @ivar name: The name of the object. - @type name: str - @ivar qname: The I{qualified} name of the object. - @type qname: (name, I{namespace-uri}). - - """ - - def __init__(self, root, definitions): - """ - @param root: An XML root element. - @type root: L{Element} - @param definitions: A definitions object. - @type definitions: L{Definitions} - - """ - WObject.__init__(self, root) - self.name = root.get("name") - self.qname = (self.name, definitions.tns[1]) - pmd = self.__metadata__.__print__ - pmd.wrappers["qname"] = repr - - -class Definitions(WObject): - """ - I{Root} container for all the WSDL objects defined by . - - @ivar id: The object id. - @type id: str - @ivar options: An options dictionary. - @type options: L{options.Options} - @ivar url: The URL used to load the object. - @type url: str - @ivar tns: The target namespace for the WSDL. - @type tns: str - @ivar schema: The collective WSDL schema object. - @type schema: L{SchemaCollection} - @ivar children: The raw list of child objects. - @type children: [L{WObject},...] - @ivar imports: The list of L{Import} children. - @type imports: [L{Import},...] - @ivar messages: The dictionary of L{Message} children keyed by I{qname}. - @type messages: [L{Message},...] - @ivar port_types: The dictionary of L{PortType} children keyed by I{qname}. - @type port_types: [L{PortType},...] - @ivar bindings: The dictionary of L{Binding} children keyed by I{qname}. - @type bindings: [L{Binding},...] - @ivar service: The service object. - @type service: L{Service} - - """ - - Tag = "definitions" - - def __init__(self, url, options, imported_definitions=None): - """ - @param url: A URL to the WSDL. - @type url: str - @param options: An options dictionary. - @type options: L{options.Options} - - """ - log.debug("reading WSDL at: %s ...", url) - reader = DocumentReader(options) - d = reader.open(url) - root = d.root() - WObject.__init__(self, root) - self.id = objid(self) - self.options = options - self.url = url - self.tns = self.mktns(root) - self.types = [] - self.schema = None - self.children = [] - self.imports = [] - self.messages = {} - self.port_types = {} - self.bindings = {} - self.services = [] - self.add_children(self.root) - self.children.sort() - pmd = self.__metadata__.__print__ - pmd.excludes.append("children") - pmd.excludes.append("wsdl") - pmd.wrappers["schema"] = repr - if imported_definitions is None: - imported_definitions = {} - imported_definitions[url] = self - self.open_imports(imported_definitions) - self.resolve() - self.build_schema() - self.set_wrapped() - for s in self.services: - self.add_methods(s) - log.debug("WSDL at '%s' loaded:\n%s", url, self) - - def mktns(self, root): - """Get/create the target namespace.""" - tns = root.get("targetNamespace") - prefix = root.findPrefix(tns) - if prefix is None: - log.debug("warning: tns (%s), not mapped to prefix", tns) - prefix = "tns" - return (prefix, tns) - - def add_children(self, root): - """Add child objects using the factory.""" - for c in root.getChildren(ns=wsdlns): - child = Factory.create(c, self) - if child is None: continue - self.children.append(child) - if isinstance(child, Import): - self.imports.append(child) - continue - if isinstance(child, Types): - self.types.append(child) - continue - if isinstance(child, Message): - self.messages[child.qname] = child - continue - if isinstance(child, PortType): - self.port_types[child.qname] = child - continue - if isinstance(child, Binding): - self.bindings[child.qname] = child - continue - if isinstance(child, Service): - self.services.append(child) - continue - - def open_imports(self, imported_definitions): - """Import the I{imported} WSDLs.""" - for imp in self.imports: - imp.load(self, imported_definitions) - - def resolve(self): - """Tell all children to resolve themselves.""" - for c in self.children: - c.resolve(self) - - def build_schema(self): - """Process L{Types} objects and create the schema collection.""" - loaded_schemata = {} - container = SchemaCollection(self) - for t in (t for t in self.types if t.local()): - for root in t.contents(): - schema = Schema(root, self.url, self.options, loaded_schemata, container) - container.add(schema) - if not container: - root = Element.buildPath(self.root, "types/schema") - schema = Schema(root, self.url, self.options, loaded_schemata, container) - container.add(schema) - self.schema = container.load(self.options, loaded_schemata) - #TODO: Recheck this XSD schema merging. XSD schema imports are not - # supposed to be transitive. They only allow the importing schema to - # reference entities from the imported schema, but do not include them - # as their own content. - for s in (t.schema() for t in self.types if t.imported()): - self.schema.merge(s) - return self.schema - - def add_methods(self, service): - """Build method view for service.""" - bindings = { - "document/literal": Document(self), - "rpc/literal": RPC(self), - "rpc/encoded": Encoded(self)} - for p in service.ports: - binding = p.binding - ptype = p.binding.type - operations = list(p.binding.type.operations.values()) - for name in (op.name for op in operations): - m = Facade("Method") - m.name = name - m.location = p.location - m.binding = Facade("binding") - op = binding.operation(name) - m.soap = op.soap - key = "/".join((op.soap.style, op.soap.input.body.use)) - m.binding.input = bindings.get(key) - key = "/".join((op.soap.style, op.soap.output.body.use)) - m.binding.output = bindings.get(key) - p.methods[name] = m - - def set_wrapped(self): - """Set (wrapped|bare) flag on messages.""" - for b in list(self.bindings.values()): - for op in list(b.operations.values()): - for body in (op.soap.input.body, op.soap.output.body): - body.wrapped = False - if not self.options.unwrap: - continue - if len(body.parts) != 1: - continue - for p in body.parts: - if p.element is None: - continue - query = ElementQuery(p.element) - pt = query.execute(self.schema) - if pt is None: - raise TypeNotFound(query.ref) - resolved = pt.resolve() - if resolved.builtin(): - continue - body.wrapped = True - - def __getstate__(self): - nopickle = ("options",) - state = self.__dict__.copy() - for k in nopickle: - if k in state: - del state[k] - return state - - def __repr__(self): - return "Definitions (id=%s)" % (self.id,) - - -class Import(WObject): - """ - Represents the . - - @ivar location: The value of the I{location} attribute. - @type location: str - @ivar ns: The value of the I{namespace} attribute. - @type ns: str - @ivar imported: The imported object. - @type imported: L{Definitions} - - """ - - def __init__(self, root, definitions): - """ - @param root: An XML root element. - @type root: L{Element} - @param definitions: A definitions object. - @type definitions: L{Definitions} - - """ - WObject.__init__(self, root) - self.location = root.get("location") - self.ns = root.get("namespace") - self.imported = None - pmd = self.__metadata__.__print__ - pmd.wrappers["imported"] = repr - - def load(self, definitions, imported_definitions): - """Load the object by opening the URL.""" - url = self.location - log.debug("importing (%s)", url) - if "://" not in url: - url = urljoin(definitions.url, url) - d = imported_definitions.get(url) - if not d: - d = Definitions(url, definitions.options, imported_definitions) - if d.root.match(Definitions.Tag, wsdlns): - self.import_definitions(definitions, d) - return - if d.root.match(Schema.Tag, Namespace.xsdns): - self.import_schema(definitions, d) - return - raise Exception("document at '%s' is unknown" % url) - - def import_definitions(self, definitions, d): - """Import/merge WSDL definitions.""" - definitions.types += d.types - definitions.messages.update(d.messages) - definitions.port_types.update(d.port_types) - definitions.bindings.update(d.bindings) - self.imported = d - log.debug("imported (WSDL):\n%s", d) - - def import_schema(self, definitions, d): - """Import schema as content.""" - if not definitions.types: - root = Element("types", ns=wsdlns) - definitions.root.insert(root) - types = Types(root, definitions) - definitions.types.append(types) - else: - types = definitions.types[-1] - types.root.append(d.root) - log.debug("imported (XSD):\n%s", d.root) - - def __gt__(self, other): - return False - - -class Types(WObject): - """Represents .""" - - def __init__(self, root, definitions): - """ - @param root: An XML root element. - @type root: L{Element} - @param definitions: A definitions object. - @type definitions: L{Definitions} - - """ - WObject.__init__(self, root) - self.definitions = definitions - - def contents(self): - return self.root.getChildren("schema", Namespace.xsdns) - - def schema(self): - return self.definitions.schema - - def local(self): - return self.definitions.schema is None - - def imported(self): - return not self.local() - - def __gt__(self, other): - return isinstance(other, Import) - - -class Part(NamedObject): - """ - Represents . - - @ivar element: The value of the {element} attribute. Stored as a I{qref} as - converted by L{suds.xsd.qualify}. - @type element: str - @ivar type: The value of the {type} attribute. Stored as a I{qref} as - converted by L{suds.xsd.qualify}. - @type type: str - - """ - - def __init__(self, root, definitions): - """ - @param root: An XML root element. - @type root: L{Element} - @param definitions: A definitions object. - @type definitions: L{Definitions} - - """ - NamedObject.__init__(self, root, definitions) - pmd = Metadata() - pmd.wrappers = dict(element=repr, type=repr) - self.__metadata__.__print__ = pmd - tns = definitions.tns - self.element = self.__getref("element", tns) - self.type = self.__getref("type", tns) - - def __getref(self, a, tns): - """Get the qualified value of attribute named 'a'.""" - s = self.root.get(a) - if s is not None: - return qualify(s, self.root, tns) - - -class Message(NamedObject): - """ - Represents . - - @ivar parts: A list of message parts. - @type parts: [I{Part},...] - - """ - - def __init__(self, root, definitions): - """ - @param root: An XML root element. - @type root: L{Element} - @param definitions: A definitions object. - @type definitions: L{Definitions} - - """ - NamedObject.__init__(self, root, definitions) - self.parts = [] - for p in root.getChildren("part"): - part = Part(p, definitions) - self.parts.append(part) - - def __gt__(self, other): - return isinstance(other, (Import, Types)) - - -class PortType(NamedObject): - """ - Represents . - - @ivar operations: A list of contained operations. - @type operations: list - - """ - - def __init__(self, root, definitions): - """ - @param root: An XML root element. - @type root: L{Element} - @param definitions: A definitions object. - @type definitions: L{Definitions} - - """ - NamedObject.__init__(self, root, definitions) - self.operations = {} - for c in root.getChildren("operation"): - op = Facade("Operation") - op.name = c.get("name") - op.tns = definitions.tns - input = c.getChild("input") - if input is None: - op.input = None - else: - op.input = input.get("message") - output = c.getChild("output") - if output is None: - op.output = None - else: - op.output = output.get("message") - faults = [] - for fault in c.getChildren("fault"): - f = Facade("Fault") - f.name = fault.get("name") - f.message = fault.get("message") - faults.append(f) - op.faults = faults - self.operations[op.name] = op - - def do_resolve(self, definitions): - """ - Resolve named references to other WSDL objects. - - @param definitions: A definitions object. - @type definitions: L{Definitions} - - """ - for op in list(self.operations.values()): - if op.input is None: - op.input = Message(Element("no-input"), definitions) - else: - qref = qualify(op.input, self.root, definitions.tns) - msg = definitions.messages.get(qref) - if msg is None: - raise Exception("msg '%s', not-found" % (op.input,)) - op.input = msg - if op.output is None: - op.output = Message(Element("no-output"), definitions) - else: - qref = qualify(op.output, self.root, definitions.tns) - msg = definitions.messages.get(qref) - if msg is None: - raise Exception("msg '%s', not-found" % (op.output,)) - op.output = msg - for f in op.faults: - qref = qualify(f.message, self.root, definitions.tns) - msg = definitions.messages.get(qref) - if msg is None: - raise Exception("msg '%s', not-found" % (f.message,)) - f.message = msg - - def operation(self, name): - """ - Shortcut used to get a contained operation by name. - - @param name: An operation name. - @type name: str - @return: The named operation. - @rtype: Operation - @raise L{MethodNotFound}: When not found. - - """ - try: - return self.operations[name] - except Exception as e: - raise MethodNotFound(name) - - def __gt__(self, other): - return isinstance(other, (Import, Types, Message)) - - -class Binding(NamedObject): - """ - Represents . - - @ivar operations: A list of contained operations. - @type operations: list - - """ - - def __init__(self, root, definitions): - """ - @param root: An XML root element. - @type root: L{Element} - @param definitions: A definitions object. - @type definitions: L{Definitions} - - """ - NamedObject.__init__(self, root, definitions) - self.operations = {} - self.type = root.get("type") - sr = self.soaproot() - if sr is None: - self.soap = None - log.debug("binding: '%s' not a SOAP binding", self.name) - return - soap = Facade("soap") - self.soap = soap - self.soap.style = sr.get("style", default="document") - self.add_operations(self.root, definitions) - - def soaproot(self): - """Get the soap:binding.""" - for ns in (soapns, soap12ns): - sr = self.root.getChild("binding", ns=ns) - if sr is not None: - return sr - - def add_operations(self, root, definitions): - """Add children.""" - dsop = Element("operation", ns=soapns) - for c in root.getChildren("operation"): - op = Facade("Operation") - op.name = c.get("name") - sop = c.getChild("operation", default=dsop) - soap = Facade("soap") - soap.action = '"%s"' % (sop.get("soapAction", default=""),) - soap.style = sop.get("style", default=self.soap.style) - soap.input = Facade("Input") - soap.input.body = Facade("Body") - soap.input.headers = [] - soap.output = Facade("Output") - soap.output.body = Facade("Body") - soap.output.headers = [] - op.soap = soap - input = c.getChild("input") - if input is None: - input = Element("input", ns=wsdlns) - body = input.getChild("body") - self.body(definitions, soap.input.body, body) - for header in input.getChildren("header"): - self.header(definitions, soap.input, header) - output = c.getChild("output") - if output is None: - output = Element("output", ns=wsdlns) - body = output.getChild("body") - self.body(definitions, soap.output.body, body) - for header in output.getChildren("header"): - self.header(definitions, soap.output, header) - faults = [] - for fault in c.getChildren("fault"): - sf = fault.getChild("fault") - if sf is None: - continue - fn = fault.get("name") - f = Facade("Fault") - f.name = sf.get("name", default=fn) - f.use = sf.get("use", default="literal") - faults.append(f) - soap.faults = faults - self.operations[op.name] = op - - def body(self, definitions, body, root): - """Add the input/output body properties.""" - if root is None: - body.use = "literal" - body.namespace = definitions.tns - body.parts = () - return - parts = root.get("parts") - if parts is None: - body.parts = () - else: - body.parts = re.split("[\\s,]", parts) - body.use = root.get("use", default="literal") - ns = root.get("namespace") - if ns is None: - body.namespace = definitions.tns - else: - prefix = root.findPrefix(ns, "b0") - body.namespace = (prefix, ns) - - def header(self, definitions, parent, root): - """Add the input/output header properties.""" - if root is None: - return - header = Facade("Header") - parent.headers.append(header) - header.use = root.get("use", default="literal") - ns = root.get("namespace") - if ns is None: - header.namespace = definitions.tns - else: - prefix = root.findPrefix(ns, "h0") - header.namespace = (prefix, ns) - msg = root.get("message") - if msg is not None: - header.message = msg - part = root.get("part") - if part is not None: - header.part = part - - def do_resolve(self, definitions): - """ - Resolve named references to other WSDL objects. This includes - cross-linking information (from) the portType (to) the I{SOAP} protocol - information on the binding for each operation. - - @param definitions: A definitions object. - @type definitions: L{Definitions} - - """ - self.__resolveport(definitions) - for op in list(self.operations.values()): - self.__resolvesoapbody(definitions, op) - self.__resolveheaders(definitions, op) - self.__resolvefaults(definitions, op) - - def __resolveport(self, definitions): - """ - Resolve port_type reference. - - @param definitions: A definitions object. - @type definitions: L{Definitions} - - """ - ref = qualify(self.type, self.root, definitions.tns) - port_type = definitions.port_types.get(ref) - if port_type is None: - raise Exception("portType '%s', not-found" % (self.type,)) - # Later on we will require access to the message data referenced by - # this port_type instance, and in order for those data references to be - # available, port_type first needs to dereference its message - # identification string. The only scenario where the port_type might - # possibly not have already resolved its references, and where this - # explicit resolve() call is required, is if we are dealing with a - # recursive WSDL import chain. - port_type.resolve(definitions) - self.type = port_type - - def __resolvesoapbody(self, definitions, op): - """ - Resolve SOAP body I{message} parts by cross-referencing with operation - defined in port type. - - @param definitions: A definitions object. - @type definitions: L{Definitions} - @param op: An I{operation} object. - @type op: I{operation} - - """ - ptop = self.type.operation(op.name) - if ptop is None: - raise Exception("operation '%s' not defined in portType" % ( - op.name,)) - soap = op.soap - parts = soap.input.body.parts - if parts: - pts = [] - for p in ptop.input.parts: - if p.name in parts: - pts.append(p) - soap.input.body.parts = pts - else: - soap.input.body.parts = ptop.input.parts - parts = soap.output.body.parts - if parts: - pts = [] - for p in ptop.output.parts: - if p.name in parts: - pts.append(p) - soap.output.body.parts = pts - else: - soap.output.body.parts = ptop.output.parts - - def __resolveheaders(self, definitions, op): - """ - Resolve SOAP header I{message} references. - - @param definitions: A definitions object. - @type definitions: L{Definitions} - @param op: An I{operation} object. - @type op: I{operation} - - """ - soap = op.soap - headers = soap.input.headers + soap.output.headers - for header in headers: - mn = header.message - ref = qualify(mn, self.root, definitions.tns) - message = definitions.messages.get(ref) - if message is None: - raise Exception("message '%s', not-found" % (mn,)) - pn = header.part - for p in message.parts: - if p.name == pn: - header.part = p - break - if pn == header.part: - raise Exception("message '%s' has not part named '%s'" % ( - ref, pn)) - - def __resolvefaults(self, definitions, op): - """ - Resolve SOAP fault I{message} references by cross-referencing with - operations defined in the port type. - - @param definitions: A definitions object. - @type definitions: L{Definitions} - @param op: An I{operation} object. - @type op: I{operation} - - """ - ptop = self.type.operation(op.name) - if ptop is None: - raise Exception("operation '%s' not defined in portType" % ( - op.name,)) - soap = op.soap - for fault in soap.faults: - for f in ptop.faults: - if f.name == fault.name: - fault.parts = f.message.parts - continue - if hasattr(fault, "parts"): - continue - raise Exception("fault '%s' not defined in portType '%s'" % ( - fault.name, self.type.name)) - - def operation(self, name): - """ - Shortcut used to get a contained operation by name. - - @param name: An operation name. - @type name: str - @return: The named operation. - @rtype: Operation - @raise L{MethodNotFound}: When not found. - - """ - try: - return self.operations[name] - except Exception: - raise MethodNotFound(name) - - def __gt__(self, other): - return not isinstance(other, Service) - - -class Port(NamedObject): - """ - Represents a service port. - - @ivar service: A service. - @type service: L{Service} - @ivar binding: A binding name. - @type binding: str - @ivar location: The service location (URL). - @type location: str - - """ - - def __init__(self, root, definitions, service): - """ - @param root: An XML root element. - @type root: L{Element} - @param definitions: A definitions object. - @type definitions: L{Definitions} - @param service: A service object. - @type service: L{Service} - - """ - NamedObject.__init__(self, root, definitions) - self.__service = service - self.binding = root.get("binding") - address = root.getChild("address") - self.location = address is not None and address.get("location") - self.methods = {} - - def method(self, name): - """ - Get a method defined in this portType by name. - - @param name: A method name. - @type name: str - @return: The requested method object. - @rtype: I{Method} - - """ - return self.methods.get(name) - - -class Service(NamedObject): - """ - Represents . - - @ivar port: The contained ports. - @type port: [Port,..] - @ivar methods: The contained methods for all ports. - @type methods: [Method,..] - - """ - - def __init__(self, root, definitions): - """ - @param root: An XML root element. - @type root: L{Element} - @param definitions: A definitions object. - @type definitions: L{Definitions} - - """ - NamedObject.__init__(self, root, definitions) - self.ports = [] - for p in root.getChildren("port"): - port = Port(p, definitions, self) - self.ports.append(port) - - def port(self, name): - """ - Locate a port by name. - - @param name: A port name. - @type name: str - @return: The port object. - @rtype: L{Port} - - """ - for p in self.ports: - if p.name == name: - return p - - def setlocation(self, url, names=None): - """ - Override the invocation location (URL) for service method. - - @param url: A URL location. - @type url: A URL. - @param names: A list of method names. None=ALL - @type names: [str,..] - - """ - for p in self.ports: - for m in list(p.methods.values()): - if names is None or m.name in names: - m.location = url - - def do_resolve(self, definitions): - """ - Resolve named references to other WSDL objects. Ports without SOAP - bindings are discarded. - - @param definitions: A definitions object. - @type definitions: L{Definitions} - - """ - filtered = [] - for p in self.ports: - ref = qualify(p.binding, self.root, definitions.tns) - binding = definitions.bindings.get(ref) - if binding is None: - raise Exception("binding '%s', not-found" % (p.binding,)) - if binding.soap is None: - log.debug("binding '%s' - not a SOAP binding, discarded", - binding.name) - continue - # After we have been resolved, our caller will expect that the - # binding we are referencing has been fully constructed, i.e. - # resolved, as well. The only scenario where the operations binding - # might possibly not have already resolved its references, and - # where this explicit resolve() call is required, is if we are - # dealing with a recursive WSDL import chain. - binding.resolve(definitions) - p.binding = binding - filtered.append(p) - self.ports = filtered - - def __gt__(self, other): - return True - - -class Factory: - """ - Simple WSDL object factory. - - @cvar tags: Dictionary of tag-->constructor mappings. - @type tags: dict - - """ - - tags = { - "import": Import, - "types": Types, - "message": Message, - "portType": PortType, - "binding": Binding, - "service": Service} - - @classmethod - def create(cls, root, definitions): - """ - Create an object based on the root tag name. - - @param root: An XML root element. - @type root: L{Element} - @param definitions: A definitions object. - @type definitions: L{Definitions} - @return: The created object. - @rtype: L{WObject} - - """ - fn = cls.tags.get(root.name) - if fn is not None: - return fn(root, definitions) diff --git a/libs_crutch/contrib/suds/wsse.py b/libs_crutch/contrib/suds/wsse.py deleted file mode 100755 index 96d9eb6..0000000 --- a/libs_crutch/contrib/suds/wsse.py +++ /dev/null @@ -1,236 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -The I{wsse} module provides WS-Security. -""" - -from logging import getLogger -from suds import * -from suds.sudsobject import Object -from suds.sax.element import Element -from suds.sax.date import DateTime, UtcTimezone -from datetime import datetime, timedelta - -try: - from hashlib import md5 -except ImportError: - # Python 2.4 compatibility - from md5 import md5 - - -dsns = \ - ('ds', - 'http://www.w3.org/2000/09/xmldsig#') -wssens = \ - ('wsse', - 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd') -wsuns = \ - ('wsu', - 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd') -wsencns = \ - ('wsenc', - 'http://www.w3.org/2001/04/xmlenc#') - -nonce_encoding_type = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-soap-message-security-1.0#Base64Binary" -username_token_profile = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0" -wsdigest = "%s#PasswordDigest" % username_token_profile -wstext = "%s#PasswordText" % username_token_profile - - -class Security(Object): - """ - WS-Security object. - @ivar tokens: A list of security tokens - @type tokens: [L{Token},...] - @ivar signatures: A list of signatures. - @type signatures: TBD - @ivar references: A list of references. - @type references: TBD - @ivar keys: A list of encryption keys. - @type keys: TBD - """ - - def __init__(self): - """ """ - Object.__init__(self) - self.mustUnderstand = True - self.tokens = [] - self.signatures = [] - self.references = [] - self.keys = [] - - def xml(self): - """ - Get xml representation of the object. - @return: The root node. - @rtype: L{Element} - """ - root = Element('Security', ns=wssens) - root.set('mustUnderstand', str(self.mustUnderstand).lower()) - for t in self.tokens: - root.append(t.xml()) - return root - - -class Token(Object): - """ I{Abstract} security token. """ - - @classmethod - def now(cls): - return datetime.now() - - @classmethod - def utc(cls): - return datetime.utcnow().replace(tzinfo=UtcTimezone()) - - @classmethod - def sysdate(cls): - utc = DateTime(cls.utc()) - return str(utc) - - def __init__(self): - Object.__init__(self) - - -class UsernameToken(Token): - """ - Represents a basic I{UsernameToken} WS-Secuirty token. - @ivar username: A username. - @type username: str - @ivar password: A password. - @type password: str - @type password_digest: A password digest - @ivar nonce: A set of bytes to prevent replay attacks. - @type nonce: str - @ivar created: The token created. - @type created: L{datetime} - """ - - def __init__(self, username=None, password=None): - """ - @param username: A username. - @type username: str - @param password: A password. - @type password: str - """ - Token.__init__(self) - self.username = username - self.password = password - self.nonce = None - self.created = None - self.password_digest = None - self.nonce_has_encoding = False - - def setnonceencoding(self, value=False): - self.nonce_has_encoding = value - - def setpassworddigest(self, passwd_digest): - """ - Set password digest which is a text returned by - auth WS. - """ - self.password_digest = passwd_digest - - def setnonce(self, text=None): - """ - Set I{nonce} which is an arbitrary set of bytes to prevent replay - attacks. - @param text: The nonce text value. - Generated when I{None}. - @type text: str - """ - if text is None: - s = [] - s.append(self.username) - s.append(self.password) - s.append(Token.sysdate()) - m = md5() - m.update(':'.join(s).encode('utf-8')) - self.nonce = m.hexdigest() - else: - self.nonce = text - - def setcreated(self, dt=None): - """ - Set I{created}. - @param dt: The created date & time. - Set as datetime.utc() when I{None}. - @type dt: L{datetime} - """ - if dt is None: - self.created = Token.utc() - else: - self.created = dt - - def xml(self): - """ - Get xml representation of the object. - @return: The root node. - @rtype: L{Element} - """ - root = Element('UsernameToken', ns=wssens) - u = Element('Username', ns=wssens) - u.setText(self.username) - root.append(u) - p = Element('Password', ns=wssens) - p.setText(self.password) - if self.password_digest: - p.set("Type", wsdigest) - p.setText(self.password_digest) - else: - p.set("Type", wstext) - root.append(p) - if self.nonce is not None: - n = Element('Nonce', ns=wssens) - if self.nonce_has_encoding: - n.set("EncodingType", nonce_encoding_type) - n.setText(self.nonce) - root.append(n) - if self.created is not None: - n = Element('Created', ns=wsuns) - n.setText(str(DateTime(self.created))) - root.append(n) - return root - - -class Timestamp(Token): - """ - Represents the I{Timestamp} WS-Secuirty token. - @ivar created: The token created. - @type created: L{datetime} - @ivar expires: The token expires. - @type expires: L{datetime} - """ - - def __init__(self, validity=90): - """ - @param validity: The time in seconds. - @type validity: int - """ - Token.__init__(self) - self.created = Token.utc() - self.expires = self.created + timedelta(seconds=validity) - - def xml(self): - root = Element("Timestamp", ns=wsuns) - created = Element('Created', ns=wsuns) - created.setText(str(DateTime(self.created))) - expires = Element('Expires', ns=wsuns) - expires.setText(str(DateTime(self.expires))) - root.append(created) - root.append(expires) - return root diff --git a/libs_crutch/contrib/suds/xsd/__init__.py b/libs_crutch/contrib/suds/xsd/__init__.py deleted file mode 100755 index 39c2b1d..0000000 --- a/libs_crutch/contrib/suds/xsd/__init__.py +++ /dev/null @@ -1,75 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - - -from suds import * -from suds.sax import Namespace, splitPrefix - - -def qualify(ref, resolvers, defns=Namespace.default): - """ - Get a reference that is I{qualified} by namespace. - @param ref: A referenced schema type name. - @type ref: str - @param resolvers: A list of objects to be used to resolve types. - @type resolvers: [L{sax.element.Element},] - @param defns: An optional target namespace used to qualify references - when no prefix is specified. - @type defns: A default namespace I{tuple: (prefix,uri)} used when ref not prefixed. - @return: A qualified reference. - @rtype: (name, namespace-uri) - """ - ns = None - p, n = splitPrefix(ref) - if p is not None: - if not isinstance(resolvers, (list, tuple)): - resolvers = (resolvers,) - for r in resolvers: - resolved = r.resolvePrefix(p) - if resolved[1] is not None: - ns = resolved - break - if ns is None: - raise Exception('prefix (%s) not resolved' % p) - else: - ns = defns - return (n, ns[1]) - -def isqref(object): - """ - Get whether the object is a I{qualified reference}. - @param object: An object to be tested. - @type object: I{any} - @rtype: boolean - @see: L{qualify} - """ - return (\ - isinstance(object, tuple) and \ - len(object) == 2 and \ - isinstance(object[0], str) and \ - isinstance(object[1], str)) - - -class Filter: - def __init__(self, inclusive=False, *items): - self.inclusive = inclusive - self.items = items - def __contains__(self, x): - if self.inclusive: - result = ( x in self.items ) - else: - result = ( x not in self.items ) - return result diff --git a/libs_crutch/contrib/suds/xsd/depsort.py b/libs_crutch/contrib/suds/xsd/depsort.py deleted file mode 100755 index 61847db..0000000 --- a/libs_crutch/contrib/suds/xsd/depsort.py +++ /dev/null @@ -1,71 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -Dependency/topological sort implementation. - -""" - -from suds import * - -from logging import getLogger -log = getLogger(__name__) - - -def dependency_sort(dependency_tree): - """ - Sorts items 'dependencies first' in a given dependency tree. - - A dependency tree is a dictionary mapping an object to a collection its - dependency objects. - - Result is a properly sorted list of items, where each item is a 2-tuple - containing an object and its dependency list, as given in the input - dependency tree. - - If B is directly or indirectly dependent on A and they are not both a part - of the same dependency cycle (i.e. then A is neither directly nor - indirectly dependent on B) then A needs to come before B. - - If A and B are a part of the same dependency cycle, i.e. if they are both - directly or indirectly dependent on each other, then it does not matter - which comes first. - - Any entries found listed as dependencies, but that do not have their own - dependencies listed as well, are logged & ignored. - - @return: The sorted items. - @rtype: list - - """ - sorted = [] - processed = set() - for key, deps in dependency_tree.items(): - _sort_r(sorted, processed, key, deps, dependency_tree) - return sorted - - -def _sort_r(sorted, processed, key, deps, dependency_tree): - """Recursive topological sort implementation.""" - if key in processed: - return - processed.add(key) - for dep_key in deps: - dep_deps = dependency_tree.get(dep_key) - if dep_deps is None: - log.debug('"%s" not found, skipped', Repr(dep_key)) - continue - _sort_r(sorted, processed, dep_key, dep_deps, dependency_tree) - sorted.append((key, deps)) diff --git a/libs_crutch/contrib/suds/xsd/doctor.py b/libs_crutch/contrib/suds/xsd/doctor.py deleted file mode 100755 index 5a52e76..0000000 --- a/libs_crutch/contrib/suds/xsd/doctor.py +++ /dev/null @@ -1,223 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -The I{doctor} module provides classes for fixing broken (sick) -schema(s). -""" - -from suds.sax import Namespace -from suds.sax.element import Element -from suds.plugin import DocumentPlugin, DocumentContext - -from logging import getLogger -log = getLogger(__name__) - - -class Doctor: - """ - Schema Doctor. - """ - def examine(self, root): - """ - Examine and repair the schema (if necessary). - @param root: A schema root element. - @type root: L{Element} - """ - pass - - -class Practice(Doctor): - """ - A collection of doctors. - @ivar doctors: A list of doctors. - @type doctors: list - """ - - def __init__(self): - self.doctors = [] - - def add(self, doctor): - """ - Add a doctor to the practice - @param doctor: A doctor to add. - @type doctor: L{Doctor} - """ - self.doctors.append(doctor) - - def examine(self, root): - for d in self.doctors: - d.examine(root) - return root - - -class TnsFilter: - """ - Target Namespace filter. - @ivar tns: A list of target namespaces. - @type tns: [str,...] - """ - - def __init__(self, *tns): - """ - @param tns: A list of target namespaces. - @type tns: [str,...] - """ - self.tns = [] - self.add(*tns) - - def add(self, *tns): - """ - Add I{targetNamespaces} to be added. - @param tns: A list of target namespaces. - @type tns: [str,...] - """ - self.tns += tns - - def match(self, root, ns): - """ - Match by I{targetNamespace} excluding those that - are equal to the specified namespace to prevent - adding an import to itself. - @param root: A schema root. - @type root: L{Element} - """ - tns = root.get('targetNamespace') - if len(self.tns): - matched = ( tns in self.tns ) - else: - matched = 1 - itself = ( ns == tns ) - return ( matched and not itself ) - - -class Import: - """ - An to be applied. - @cvar xsdns: The XSD namespace. - @type xsdns: (p,u) - @ivar ns: An import namespace. - @type ns: str - @ivar location: An optional I{schemaLocation}. - @type location: str - @ivar filter: A filter used to restrict application to - a particular schema. - @type filter: L{TnsFilter} - """ - - xsdns = Namespace.xsdns - - def __init__(self, ns, location=None): - """ - @param ns: An import namespace. - @type ns: str - @param location: An optional I{schemaLocation}. - @type location: str - """ - self.ns = ns - self.location = location - self.filter = TnsFilter() - - def setfilter(self, filter): - """ - Set the filter. - @param filter: A filter to set. - @type filter: L{TnsFilter} - """ - self.filter = filter - - def apply(self, root): - """ - Apply the import (rule) to the specified schema. - If the schema does not already contain an import for the - I{namespace} specified here, it is added. - @param root: A schema root. - @type root: L{Element} - """ - if not self.filter.match(root, self.ns): - return - if self.exists(root): - return - node = Element('import', ns=self.xsdns) - node.set('namespace', self.ns) - if self.location is not None: - node.set('schemaLocation', self.location) - log.debug('inserting: %s', node) - root.insert(node) - - def add(self, root): - """ - Add an to the specified schema root. - @param root: A schema root. - @type root: L{Element} - """ - node = Element('import', ns=self.xsdns) - node.set('namespace', self.ns) - if self.location is not None: - node.set('schemaLocation', self.location) - log.debug('%s inserted', node) - root.insert(node) - - def exists(self, root): - """ - Check to see if the already exists - in the specified schema root by matching I{namespace}. - @param root: A schema root. - @type root: L{Element} - """ - for node in root.children: - if node.name != 'import': - continue - ns = node.get('namespace') - if self.ns == ns: - return 1 - return 0 - - -class ImportDoctor(Doctor, DocumentPlugin): - """ - Doctor used to fix missing imports. - @ivar imports: A list of imports to apply. - @type imports: [L{Import},...] - """ - - def __init__(self, *imports): - self.imports = [] - self.add(*imports) - - def add(self, *imports): - """ - Add a namespace to be checked. - @param imports: A list of L{Import} objects. - @type imports: [L{Import},..] - """ - self.imports += imports - - def examine(self, node): - for imp in self.imports: - imp.apply(node) - - def parsed(self, context): - node = context.document - # xsd root - if node.name == 'schema' and Namespace.xsd(node.namespace()): - self.examine(node) - return - # look deeper - context = DocumentContext() - for child in node: - context.document = child - self.parsed(context) diff --git a/libs_crutch/contrib/suds/xsd/query.py b/libs_crutch/contrib/suds/xsd/query.py deleted file mode 100755 index 4092325..0000000 --- a/libs_crutch/contrib/suds/xsd/query.py +++ /dev/null @@ -1,208 +0,0 @@ -# This program is free software; you can redistribute it and/or modify -# it under the terms of the (LGPL) GNU Lesser General Public License as -# published by the Free Software Foundation; either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Library Lesser General Public License for more details at -# ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -The I{query} module defines a class for performing schema queries. -""" - -from suds import * -from suds.sudsobject import * -from suds.xsd import qualify, isqref -from suds.xsd.sxbuiltin import Factory - -from logging import getLogger -log = getLogger(__name__) - - -class Query(Object): - """ - Schema query base class. - - """ - def __init__(self, ref=None): - """ - @param ref: The schema reference being queried. - @type ref: qref - """ - Object.__init__(self) - self.id = objid(self) - self.ref = ref - self.history = [] - self.resolved = False - if not isqref(self.ref): - raise Exception('%s, must be qref' % tostr(self.ref)) - - def execute(self, schema): - """ - Execute this query using the specified schema. - @param schema: The schema associated with the query. The schema is used - by the query to search for items. - @type schema: L{schema.Schema} - @return: The item matching the search criteria. - @rtype: L{sxbase.SchemaObject} - """ - raise Exception('not-implemented by subclass') - - def filter(self, result): - """ - Filter the specified result based on query criteria. - @param result: A potential result. - @type result: L{sxbase.SchemaObject} - @return: True if result should be excluded. - @rtype: boolean - """ - if result is None: - return True - reject = ( result in self.history ) - if reject: - log.debug('result %s, rejected by\n%s', Repr(result), self) - return reject - - def result(self, result): - """ - Query result post processing. - @param result: A query result. - @type result: L{sxbase.SchemaObject} - """ - if result is None: - log.debug('%s, not-found', self.ref) - return - if self.resolved: - result = result.resolve() - log.debug('%s, found as: %s', self.ref, Repr(result)) - self.history.append(result) - return result - - -class BlindQuery(Query): - """ - Schema query class that I{blindly} searches for a reference in the - specified schema. It may be used to find Elements and Types but will match - on an Element first. This query will also find builtins. - - """ - def execute(self, schema): - if schema.builtin(self.ref): - name = self.ref[0] - b = Factory.create(schema, name) - log.debug('%s, found builtin (%s)', self.id, name) - return b - result = None - for d in (schema.elements, schema.types): - result = d.get(self.ref) - if self.filter(result): - result = None - else: - break - if result is None: - eq = ElementQuery(self.ref) - eq.history = self.history - result = eq.execute(schema) - return self.result(result) - - -class TypeQuery(Query): - """ - Schema query class that searches for Type references in the specified - schema. Matches on root types only. - - """ - def execute(self, schema): - if schema.builtin(self.ref): - name = self.ref[0] - b = Factory.create(schema, name) - log.debug('%s, found builtin (%s)', self.id, name) - return b - result = schema.types.get(self.ref) - if self.filter(result): - result = None - return self.result(result) - - -class GroupQuery(Query): - """ - Schema query class that searches for Group references in the specified - schema. - - """ - def execute(self, schema): - result = schema.groups.get(self.ref) - if self.filter(result): - result = None - return self.result(result) - - -class AttrQuery(Query): - """ - Schema query class that searches for Attribute references in the specified - schema. Matches on root Attribute by qname first, then searches deeper into - the document. - - """ - def execute(self, schema): - result = schema.attributes.get(self.ref) - if self.filter(result): - result = self.__deepsearch(schema) - return self.result(result) - - def __deepsearch(self, schema): - from suds.xsd.sxbasic import Attribute - result = None - for e in schema.all: - result = e.find(self.ref, (Attribute,)) - if self.filter(result): - result = None - else: - break - return result - - -class AttrGroupQuery(Query): - """ - Schema query class that searches for attributeGroup references in the - specified schema. - - """ - def execute(self, schema): - result = schema.agrps.get(self.ref) - if self.filter(result): - result = None - return self.result(result) - - -class ElementQuery(Query): - """ - Schema query class that searches for Element references in the specified - schema. Matches on root Elements by qname first, then searches deeper into - the document. - - """ - def execute(self, schema): - result = schema.elements.get(self.ref) - if self.filter(result): - result = self.__deepsearch(schema) - return self.result(result) - - def __deepsearch(self, schema): - from suds.xsd.sxbasic import Element - result = None - for e in schema.all: - result = e.find(self.ref, (Element,)) - if self.filter(result): - result = None - else: - break - return result diff --git a/libs_crutch/contrib/suds/xsd/schema.py b/libs_crutch/contrib/suds/xsd/schema.py deleted file mode 100755 index 8bcd08c..0000000 --- a/libs_crutch/contrib/suds/xsd/schema.py +++ /dev/null @@ -1,464 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -""" -The I{schema} module provides an intelligent representation of an XSD schema. -The I{raw} model is the XML tree and the I{model} is a denormalized, -objectified and intelligent view of the schema. Most of the I{value-add} -provided by the model is centered around transparent referenced type resolution -and targeted denormalization. - -""" - -from suds import * -from suds.xsd import * -from suds.xsd.depsort import dependency_sort -from suds.xsd.sxbuiltin import * -from suds.xsd.sxbase import SchemaObject -from suds.xsd.sxbasic import Factory as BasicFactory -from suds.xsd.sxbuiltin import Factory as BuiltinFactory -from suds.sax import splitPrefix, Namespace -from suds.sax.element import Element - -from logging import getLogger -log = getLogger(__name__) - - -class SchemaCollection(UnicodeMixin): - """ - A collection of schema objects. - - This class is needed because a WSDL may contain more then one - node. - - @ivar wsdl: A WSDL object. - @type wsdl: L{suds.wsdl.Definitions} - @ivar children: A list contained schemas. - @type children: [L{Schema},...] - @ivar namespaces: A dictionary of contained schemas by namespace. - @type namespaces: {str: L{Schema}} - - """ - - def __init__(self, wsdl): - """ - @param wsdl: A WSDL object. - @type wsdl: L{suds.wsdl.Definitions} - - """ - self.wsdl = wsdl - self.children = [] - self.namespaces = {} - - def add(self, schema): - """ - Add a schema node to the collection. Schema(s) within the same target - namespace are consolidated. - - @param schema: A schema object. - @type schema: (L{Schema}) - - """ - key = schema.tns[1] - existing = self.namespaces.get(key) - if existing is None: - self.children.append(schema) - self.namespaces[key] = schema - else: - existing.root.children += schema.root.children - existing.root.nsprefixes.update(schema.root.nsprefixes) - - def load(self, options, loaded_schemata): - """ - Load schema objects for the root nodes. - - de-reference schemas - - merge schemas - - @param options: An options dictionary. - @type options: L{options.Options} - @param loaded_schemata: Already loaded schemata cache (URL --> Schema). - @type loaded_schemata: dict - @return: The merged schema. - @rtype: L{Schema} - - """ - if options.autoblend: - self.autoblend() - for child in self.children: - child.build() - for child in self.children: - child.open_imports(options, loaded_schemata) - for child in self.children: - child.dereference() - log.debug("loaded:\n%s", self) - merged = self.merge() - log.debug("MERGED:\n%s", merged) - return merged - - def autoblend(self): - """ - Ensure that all schemas within the collection import each other which - has a blending effect. - - @return: self - @rtype: L{SchemaCollection} - - """ - namespaces = list(self.namespaces.keys()) - for s in self.children: - for ns in namespaces: - tns = s.root.get("targetNamespace") - if tns == ns: - continue - for imp in s.root.getChildren("import"): - if imp.get("namespace") == ns: - continue - imp = Element("import", ns=Namespace.xsdns) - imp.set("namespace", ns) - s.root.append(imp) - return self - - def locate(self, ns): - """ - Find a schema by namespace. Only the URI portion of the namespace is - compared to each schema's I{targetNamespace}. - - @param ns: A namespace. - @type ns: (prefix, URI) - @return: The schema matching the namespace, else None. - @rtype: L{Schema} - - """ - return self.namespaces.get(ns[1]) - - def merge(self): - """ - Merge contained schemas into one. - - @return: The merged schema. - @rtype: L{Schema} - - """ - if self.children: - schema = self.children[0] - for s in self.children[1:]: - schema.merge(s) - return schema - - def __len__(self): - return len(self.children) - - def __unicode__(self): - result = ["\nschema collection"] - for s in self.children: - result.append(s.str(1)) - return "\n".join(result) - - -class Schema(UnicodeMixin): - """ - The schema is an objectification of a (XSD) definition. It - provides inspection, lookup and type resolution. - - @ivar root: The root node. - @type root: L{sax.element.Element} - @ivar baseurl: The I{base} URL for this schema. - @type baseurl: str - @ivar container: A schema collection containing this schema. - @type container: L{SchemaCollection} - @ivar children: A list of direct top level children. - @type children: [L{SchemaObject},...] - @ivar all: A list of all (includes imported) top level children. - @type all: [L{SchemaObject},...] - @ivar types: A schema types cache. - @type types: {name:L{SchemaObject}} - @ivar imports: A list of import objects. - @type imports: [L{SchemaObject},...] - @ivar elements: A list of objects. - @type elements: [L{SchemaObject},...] - @ivar attributes: A list of objects. - @type attributes: [L{SchemaObject},...] - @ivar groups: A list of group objects. - @type groups: [L{SchemaObject},...] - @ivar agrps: A list of attribute group objects. - @type agrps: [L{SchemaObject},...] - @ivar form_qualified: The flag indicating: (@elementFormDefault). - @type form_qualified: bool - - """ - - Tag = "schema" - - def __init__(self, root, baseurl, options, loaded_schemata=None, - container=None): - """ - @param root: The XML root. - @type root: L{sax.element.Element} - @param baseurl: The base URL used for importing. - @type baseurl: basestring - @param options: An options dictionary. - @type options: L{options.Options} - @param loaded_schemata: An optional already loaded schemata cache (URL - --> Schema). - @type loaded_schemata: dict - @param container: An optional container. - @type container: L{SchemaCollection} - - """ - self.root = root - self.id = objid(self) - self.tns = self.mktns() - self.baseurl = baseurl - self.container = container - self.children = [] - self.all = [] - self.types = {} - self.imports = [] - self.elements = {} - self.attributes = {} - self.groups = {} - self.agrps = {} - if options.doctor is not None: - options.doctor.examine(root) - form = self.root.get("elementFormDefault") - self.form_qualified = form == "qualified" - - # If we have a container, that container is going to take care of - # finishing our build for us in parallel with building all the other - # schemata in that container. That allows the different schema within - # the same container to freely reference each other. - #TODO: check whether this container content build parallelization is - # really necessary or if we can simply build our top-level WSDL - # contained schemata one by one as they are loaded - if container is None: - if loaded_schemata is None: - loaded_schemata = {} - loaded_schemata[baseurl] = self - #TODO: It seems like this build() step can be done for each schema - # on its own instead of letting the container do it. Building our - # XSD schema objects should not require any external schema - # information and even references between XSD schema objects within - # the same schema can not be established until all the XSD schema - # objects have been built. The only reason I can see right now why - # this step has been placed under container control is so our - # container (a SchemaCollection instance) can add some additional - # XML elements to our schema before our XSD schema object entities - # get built, but there is bound to be a cleaner way to do this, - # similar to how we support such XML modifications in suds plugins. - self.build() - self.open_imports(options, loaded_schemata) - log.debug("built:\n%s", self) - self.dereference() - log.debug("dereferenced:\n%s", self) - - def mktns(self): - """ - Make the schema's target namespace. - - @return: namespace representation of the schema's targetNamespace - value. - @rtype: (prefix, URI) - - """ - tns = self.root.get("targetNamespace") - tns_prefix = None - if tns is not None: - tns_prefix = self.root.findPrefix(tns) - return tns_prefix, tns - - def build(self): - """ - Build the schema (object graph) using the root node using the factory. - - Build the graph. - - Collate the children. - - """ - self.children = BasicFactory.build(self.root, self) - collated = BasicFactory.collate(self.children) - self.children = collated[0] - self.attributes = collated[2] - self.imports = collated[1] - self.elements = collated[3] - self.types = collated[4] - self.groups = collated[5] - self.agrps = collated[6] - - def merge(self, schema): - """ - Merge the schema contents. - - Only objects not already contained in this schema's collections are - merged. This provides support for bidirectional imports producing - cyclic includes. - - @returns: self - @rtype: L{Schema} - - """ - for item in list(schema.attributes.items()): - if item[0] in self.attributes: - continue - self.all.append(item[1]) - self.attributes[item[0]] = item[1] - for item in list(schema.elements.items()): - if item[0] in self.elements: - continue - self.all.append(item[1]) - self.elements[item[0]] = item[1] - for item in list(schema.types.items()): - if item[0] in self.types: - continue - self.all.append(item[1]) - self.types[item[0]] = item[1] - for item in list(schema.groups.items()): - if item[0] in self.groups: - continue - self.all.append(item[1]) - self.groups[item[0]] = item[1] - for item in list(schema.agrps.items()): - if item[0] in self.agrps: - continue - self.all.append(item[1]) - self.agrps[item[0]] = item[1] - schema.merged = True - return self - - def open_imports(self, options, loaded_schemata): - """ - Instruct all contained L{sxbasic.Import} children to import all of - their referenced schemas. The imported schema contents are I{merged} - in. - - @param options: An options dictionary. - @type options: L{options.Options} - @param loaded_schemata: Already loaded schemata cache (URL --> Schema). - @type loaded_schemata: dict - - """ - for imp in self.imports: - imported = imp.open(options, loaded_schemata) - if imported is None: - continue - imported.open_imports(options, loaded_schemata) - log.debug("imported:\n%s", imported) - self.merge(imported) - - def dereference(self): - """Instruct all children to perform dereferencing.""" - all = [] - indexes = {} - for child in self.children: - child.content(all) - dependencies = {} - for x in all: - x.qualify() - midx, deps = x.dependencies() - dependencies[x] = deps - indexes[x] = midx - for x, deps in dependency_sort(dependencies): - midx = indexes.get(x) - if midx is None: - continue - d = deps[midx] - log.debug("(%s) merging %s <== %s", self.tns[1], Repr(x), Repr(d)) - x.merge(d) - - def locate(self, ns): - """ - Find a schema by namespace. Only the URI portion of the namespace is - compared to each schema's I{targetNamespace}. The request is passed on - to the container. - - @param ns: A namespace. - @type ns: (prefix, URI) - @return: The schema matching the namespace, else None. - @rtype: L{Schema} - - """ - if self.container is not None: - return self.container.locate(ns) - - def custom(self, ref, context=None): - """ - Get whether the specified reference is B{not} an (xs) builtin. - - @param ref: A str or qref. - @type ref: (str|qref) - @return: True if B{not} a builtin, else False. - @rtype: bool - - """ - return ref is None or not self.builtin(ref, context) - - def builtin(self, ref, context=None): - """ - Get whether the specified reference is an (xs) builtin. - - @param ref: A str or qref. - @type ref: (str|qref) - @return: True if builtin, else False. - @rtype: bool - - """ - w3 = "http://www.w3.org" - try: - if isqref(ref): - ns = ref[1] - return ref[0] in Factory.tags and ns.startswith(w3) - if context is None: - context = self.root - prefix = splitPrefix(ref)[0] - prefixes = context.findPrefixes(w3, "startswith") - return prefix in prefixes and ref[0] in Factory.tags - except Exception: - return False - - def instance(self, root, baseurl, loaded_schemata, options): - """ - Create and return an new schema object using the specified I{root} and - I{URL}. - - @param root: A schema root node. - @type root: L{sax.element.Element} - @param baseurl: A base URL. - @type baseurl: str - @param loaded_schemata: Already loaded schemata cache (URL --> Schema). - @type loaded_schemata: dict - @param options: An options dictionary. - @type options: L{options.Options} - @return: The newly created schema object. - @rtype: L{Schema} - @note: This is only used by Import children. - - """ - return Schema(root, baseurl, options, loaded_schemata) - - def str(self, indent=0): - tab = "%*s" % (indent * 3, "") - result = [] - result.append("%s%s" % (tab, self.id)) - result.append("%s(raw)" % (tab,)) - result.append(self.root.str(indent + 1)) - result.append("%s(model)" % (tab,)) - for c in self.children: - result.append(c.str(indent + 1)) - result.append("") - return "\n".join(result) - - def __repr__(self): - return '<%s tns="%s"/>' % (self.id, self.tns[1]) - - def __unicode__(self): - return self.str() diff --git a/libs_crutch/contrib/suds/xsd/sxbase.py b/libs_crutch/contrib/suds/xsd/sxbase.py deleted file mode 100755 index 92eb11a..0000000 --- a/libs_crutch/contrib/suds/xsd/sxbase.py +++ /dev/null @@ -1,748 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -"""I{Base} classes representing XSD schema objects.""" - -from suds import * -from suds.xsd import * -from suds.sax.element import Element -from suds.sax import Namespace - -from logging import getLogger -log = getLogger(__name__) - - -class SchemaObject(UnicodeMixin): - """ - A schema object is an extension to object with schema awareness. - - @ivar root: The XML root element. - @type root: L{Element} - @ivar schema: The schema containing this object. - @type schema: L{schema.Schema} - @ivar form_qualified: A flag indicating that @elementFormDefault has a - value of I{qualified}. - @type form_qualified: boolean - @ivar nillable: A flag indicating that @nillable has a value of I{true}. - @type nillable: boolean - @ivar default: The default value. - @type default: object - @ivar rawchildren: A list raw of all children. - @type rawchildren: [L{SchemaObject},...] - - """ - - @classmethod - def prepend(cls, d, s, filter=Filter()): - """ - Prepend B{s}ource XSD schema objects to the B{d}estination list. - - B{filter} is used to decide which objects to prepend and which to skip. - - @param d: The destination list. - @type d: list - @param s: The source list. - @type s: list - @param filter: A filter allowing items to be prepended. - @type filter: L{Filter} - - """ - i = 0 - for x in s: - if x in filter: - d.insert(i, x) - i += 1 - - @classmethod - def append(cls, d, s, filter=Filter()): - """ - Append B{s}ource XSD schema objects to the B{d}estination list. - - B{filter} is used to decide which objects to append and which to skip. - - @param d: The destination list. - @type d: list - @param s: The source list. - @type s: list - @param filter: A filter that allows items to be appended. - @type filter: L{Filter} - - """ - for item in s: - if item in filter: - d.append(item) - - def __init__(self, schema, root): - """ - @param schema: The containing schema. - @type schema: L{schema.Schema} - @param root: The XML root node. - @type root: L{Element} - - """ - self.schema = schema - self.root = root - self.id = objid(self) - self.name = root.get("name") - self.qname = (self.name, schema.tns[1]) - self.min = root.get("minOccurs") - self.max = root.get("maxOccurs") - self.type = root.get("type") - self.ref = root.get("ref") - self.form_qualified = schema.form_qualified - self.nillable = False - self.default = root.get("default") - self.rawchildren = [] - - def attributes(self, filter=Filter()): - """ - Get only the attribute content. - - @param filter: A filter to constrain the result. - @type filter: L{Filter} - @return: A list of (attr, ancestry) tuples. - @rtype: [(L{SchemaObject}, [L{SchemaObject},..]),..] - - """ - result = [] - for child, ancestry in self: - if child.isattr() and child in filter: - result.append((child, ancestry)) - return result - - def children(self, filter=Filter()): - """ - Get only the I{direct} or non-attribute content. - - @param filter: A filter to constrain the result. - @type filter: L{Filter} - @return: A list tuples: (child, ancestry) - @rtype: [(L{SchemaObject}, [L{SchemaObject},..]),..] - - """ - result = [] - for child, ancestry in self: - if not child.isattr() and child in filter: - result.append((child, ancestry)) - return result - - def get_attribute(self, name): - """ - Get (find) an attribute by name. - - @param name: A attribute name. - @type name: str - @return: A tuple: the requested (attribute, ancestry). - @rtype: (L{SchemaObject}, [L{SchemaObject},..]) - - """ - for child, ancestry in self.attributes(): - if child.name == name: - return child, ancestry - return None, [] - - def get_child(self, name): - """ - Get (find) a I{non-attribute} child by name. - - @param name: A child name. - @type name: str - @return: A tuple: the requested (child, ancestry). - @rtype: (L{SchemaObject}, [L{SchemaObject},..]) - - """ - for child, ancestry in self.children(): - if child.any() or child.name == name: - return child, ancestry - return None, [] - - def namespace(self, prefix=None): - """ - Get this property's namespace. - - @param prefix: The default prefix. - @type prefix: str - @return: The schema's target namespace. - @rtype: (I{prefix}, I{URI}) - - """ - ns = self.schema.tns - if ns[0] is None: - ns = (prefix, ns[1]) - return ns - - def default_namespace(self): - return self.root.defaultNamespace() - - def multi_occurrence(self): - """ - Get whether the node has multiple occurrences, i.e. is a I{collection}. - - @return: True if it has, False if it has at most 1 occurrence. - @rtype: boolean - - """ - max = self.max - if max is None: - return False - if max.isdigit(): - return int(max) > 1 - return max == "unbounded" - - def optional(self): - """ - Get whether this type is optional. - - @return: True if optional, else False. - @rtype: boolean - - """ - return self.min == "0" - - def required(self): - """ - Get whether this type is required. - - @return: True if required, else False. - @rtype: boolean - - """ - return not self.optional() - - def resolve(self, nobuiltin=False): - """ - Resolve the node's type reference and return the referenced type node. - - Only XSD schema objects that actually support 'having a type' custom - implement this interface while others simply resolve as themselves. - - @param nobuiltin: Flag indicating whether resolving to an external XSD - built-in type should not be allowed. - @return: The resolved (true) type. - @rtype: L{SchemaObject} - """ - return self - - def sequence(self): - """ - Get whether this is an . - - @return: True if , else False. - @rtype: boolean - - """ - return False - - def xslist(self): - """ - Get whether this is an . - - @return: True if , else False. - @rtype: boolean - - """ - return False - - def all(self): - """ - Get whether this is an . - - @return: True if , else False. - @rtype: boolean - - """ - return False - - def choice(self): - """ - Get whether this is an . - - @return: True if , else False. - @rtype: boolean - - """ - return False - - def any(self): - """ - Get whether this is an . - - @return: True if , else False. - @rtype: boolean - - """ - return False - - def builtin(self): - """ - Get whether this is a built-in schema-instance XSD type. - - @return: True if a built-in type, else False. - @rtype: boolean - - """ - return False - - def enum(self): - """ - Get whether this is a simple-type containing an enumeration. - - @return: True if enumeration, else False. - @rtype: boolean - - """ - return False - - def isattr(self): - """ - Get whether the object is a schema I{attribute} definition. - - @return: True if an attribute, else False. - @rtype: boolean - - """ - return False - - def extension(self): - """ - Get whether the object is an extension of another type. - - @return: True if an extension, else False. - @rtype: boolean - - """ - return False - - def restriction(self): - """ - Get whether the object is an restriction of another type. - - @return: True if a restriction, else False. - @rtype: boolean - - """ - return False - - def mixed(self): - """Get whether the object has I{mixed} content.""" - return False - - def find(self, qref, classes=[], ignore=None): - """ - Find a referenced type in self or children. Return None if not found. - - Qualified references for all schema objects checked in this search will - be added to the set of ignored qualified references to avoid the find - operation going into an infinite loop in case of recursively defined - structures. - - @param qref: A qualified reference. - @type qref: qref - @param classes: A collection of classes used to qualify the match. - @type classes: Collection(I{class},...), e.g. [I(class),...] - @param ignore: A set of qualified references to ignore in this search. - @type ignore: {qref,...} - @return: The referenced type. - @rtype: L{SchemaObject} - @see: L{qualify()} - - """ - if not len(classes): - classes = (self.__class__,) - if ignore is None: - ignore = set() - if self.qname in ignore: - return - ignore.add(self.qname) - if self.qname == qref and self.__class__ in classes: - return self - for c in self.rawchildren: - p = c.find(qref, classes, ignore=ignore) - if p is not None: - return p - - def translate(self, value, topython=True): - """ - Translate between an XSD type values and Python objects. - - When converting a Python object to an XSD type value the operation may - return any Python object whose string representation matches the - desired XSD type value. - - @param value: A value to translate. - @type value: str if topython is True; any Python object otherwise - @param topython: Flag indicating the translation direction. - @type topython: bool - @return: The converted I{language} type. - - """ - return value - - def childtags(self): - """ - Get a list of valid child tag names. - - @return: A list of child tag names. - @rtype: [str,...] - - """ - return () - - def dependencies(self): - """ - Get a list of dependencies for dereferencing. - - @return: A merge dependency index and a list of dependencies. - @rtype: (int, [L{SchemaObject},...]) - - """ - return None, [] - - def autoqualified(self): - """ - The list of I{auto} qualified attribute values. - - Qualification means to convert values into I{qref}. - - @return: A list of attribute names. - @rtype: list - - """ - return ["type", "ref"] - - def qualify(self): - """ - Convert reference attribute values into a I{qref}. - - Constructed I{qref} uses the default document namespace. Since many - WSDL schemas are written improperly: when the document does not define - its default namespace, the schema target namespace is used to qualify - references. - - """ - defns = self.root.defaultNamespace() - if Namespace.none(defns): - defns = self.schema.tns - for a in self.autoqualified(): - ref = getattr(self, a) - if ref is None: - continue - if isqref(ref): - continue - qref = qualify(ref, self.root, defns) - log.debug("%s, convert %s='%s' to %s", self.id, a, ref, qref) - setattr(self, a, qref) - - def merge(self, other): - """Merge another object as needed.""" - other.qualify() - for n in ("default", "max", "min", "name", "nillable", "qname", - "type"): - if getattr(self, n) is not None: - continue - v = getattr(other, n) - if v is None: - continue - setattr(self, n, v) - - def content(self, collection=None, filter=Filter(), history=None): - """ - Get a I{flattened} list of this node's contents. - - @param collection: A list to fill. - @type collection: list - @param filter: A filter used to constrain the result. - @type filter: L{Filter} - @param history: The history list used to prevent cyclic dependency. - @type history: list - @return: The filled list. - @rtype: list - - """ - if collection is None: - collection = [] - if history is None: - history = [] - if self in history: - return collection - history.append(self) - if self in filter: - collection.append(self) - for c in self.rawchildren: - c.content(collection, filter, history) - history.pop() - return collection - - def str(self, indent=0, history=None): - """ - Get a string representation of this object. - - @param indent: The indent. - @type indent: int - @return: A string. - @rtype: str - - """ - if history is None: - history = [] - if self in history: - return "%s ..." % Repr(self) - history.append(self) - tab = "%*s" % (indent * 3, "") - result = ["%s<%s" % (tab, self.id)] - for n in self.description(): - if not hasattr(self, n): - continue - v = getattr(self, n) - if v is None: - continue - result.append(' %s="%s"' % (n, v)) - if len(self): - result.append(">") - for c in self.rawchildren: - result.append("\n") - result.append(c.str(indent+1, history[:])) - if c.isattr(): - result.append("@") - result.append("\n%s" % (tab,)) - result.append("" % (self.__class__.__name__,)) - else: - result.append(" />") - return "".join(result) - - def description(self): - """ - Get the names used for repr() and str() description. - - @return: A dictionary of relevant attributes. - @rtype: [str,...] - - """ - return () - - def __unicode__(self): - return str(self.str()) - - def __repr__(self): - s = [] - s.append("<%s" % (self.id,)) - for n in self.description(): - if not hasattr(self, n): - continue - v = getattr(self, n) - if v is None: - continue - s.append(' %s="%s"' % (n, v)) - s.append(" />") - return "".join(s) - - def __len__(self): - n = 0 - for x in self: - n += 1 - return n - - def __iter__(self): - return Iter(self) - - def __getitem__(self, index): - """ - Returns a contained schema object referenced by its 0-based index. - - Returns None if such an object does not exist. - - """ - i = 0 - for c in self: - if i == index: - return c - i += 1 - - -class Iter: - """ - The content iterator - used to iterate the L{Content} children. - - The iterator provides a I{view} of the children that is free of container - elements such as , or . - - @ivar stack: A stack used to control nesting. - @type stack: list - - """ - - class Frame: - """A content iterator frame.""" - - def __init__(self, sx): - """ - @param sx: A schema object. - @type sx: L{SchemaObject} - - """ - self.sx = sx - self.items = sx.rawchildren - self.index = 0 - - def __next__(self): - """ - Get the I{next} item in the frame's collection. - - @return: The next item or None - @rtype: L{SchemaObject} - - """ - if self.index < len(self.items): - result = self.items[self.index] - self.index += 1 - return result - - def __init__(self, sx): - """ - @param sx: A schema object. - @type sx: L{SchemaObject} - - """ - self.stack = [] - self.push(sx) - - def push(self, sx): - """ - Create a frame and push the specified object. - - @param sx: A schema object to push. - @type sx: L{SchemaObject} - - """ - self.stack.append(Iter.Frame(sx)) - - def pop(self): - """ - Pop the I{top} frame. - - @return: The popped frame. - @rtype: L{Frame} - @raise StopIteration: when stack is empty. - - """ - if self.stack: - return self.stack.pop() - raise StopIteration() - - def top(self): - """ - Get the I{top} frame. - - @return: The top frame. - @rtype: L{Frame} - @raise StopIteration: when stack is empty. - - """ - if self.stack: - return self.stack[-1] - raise StopIteration() - - def __next__(self): - """ - Get the next item. - - @return: A tuple: the next (child, ancestry). - @rtype: (L{SchemaObject}, [L{SchemaObject},..]) - @raise StopIteration: A the end. - - """ - frame = self.top() - while True: - result = next(frame) - if result is None: - self.pop() - return next(self) - if isinstance(result, Content): - ancestry = [f.sx for f in self.stack] - return result, ancestry - self.push(result) - return next(self) - - def __iter__(self): - return self - - -class XBuiltin(SchemaObject): - """Represents a built-in XSD schema node.""" - - def __init__(self, schema, name): - """ - @param schema: The containing schema. - @type schema: L{schema.Schema} - - """ - root = Element(name) - SchemaObject.__init__(self, schema, root) - self.name = name - self.nillable = True - - def namespace(self, prefix=None): - return Namespace.xsdns - - def builtin(self): - return True - - -class Content(SchemaObject): - """XSD schema objects representing real XML document content.""" - pass - - -class NodeFinder: - """ - Find nodes based on flexable criteria. - - I{matcher} may be any object implementing a match(n) method. - - @ivar matcher: An object used as criteria for match. - @type matcher: I{any}.match(n) - @ivar limit: Limit the number of matches. 0=unlimited. - @type limit: int - - """ - def __init__(self, matcher, limit=0): - """ - @param matcher: An object used as criteria for match. - @type matcher: I{any}.match(n) - @param limit: Limit the number of matches. 0=unlimited. - @type limit: int - - """ - self.matcher = matcher - self.limit = limit - - def find(self, node, list): - """ - Traverse the tree looking for matches. - - @param node: A node to match on. - @type node: L{SchemaObject} - @param list: A list to fill. - @type list: list - - """ - if self.matcher.match(node): - list.append(node) - self.limit -= 1 - if self.limit == 0: - return - for c in node.rawchildren: - self.find(c, list) - return self diff --git a/libs_crutch/contrib/suds/xsd/sxbasic.py b/libs_crutch/contrib/suds/xsd/sxbasic.py deleted file mode 100755 index 38f7785..0000000 --- a/libs_crutch/contrib/suds/xsd/sxbasic.py +++ /dev/null @@ -1,862 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -"""Classes representing I{basic} XSD schema objects.""" - -from suds import * -from suds.reader import DocumentReader -from suds.sax import Namespace -from suds.transport import TransportError -from suds.xsd import * -from suds.xsd.query import * -from suds.xsd.sxbase import * - -from urllib.parse import urljoin - -from logging import getLogger -log = getLogger(__name__) - - -class RestrictionMatcher: - """For use with L{NodeFinder} to match restriction.""" - def match(self, n): - return isinstance(n, Restriction) - - -class TypedContent(Content): - """Represents any I{typed} content.""" - - def __init__(self, *args, **kwargs): - Content.__init__(self, *args, **kwargs) - self.resolved_cache = {} - - def resolve(self, nobuiltin=False): - """ - Resolve the node's type reference and return the referenced type node. - - Returns self if the type is defined locally, e.g. as a - subnode. Otherwise returns the referenced external node. - - @param nobuiltin: Flag indicating whether resolving to XSD built-in - types should not be allowed. - @return: The resolved (true) type. - @rtype: L{SchemaObject} - - """ - cached = self.resolved_cache.get(nobuiltin) - if cached is not None: - return cached - resolved = self.__resolve_type(nobuiltin) - self.resolved_cache[nobuiltin] = resolved - return resolved - - def __resolve_type(self, nobuiltin=False): - """ - Private resolve() worker without any result caching. - - @param nobuiltin: Flag indicating whether resolving to XSD built-in - types should not be allowed. - @return: The resolved (true) type. - @rtype: L{SchemaObject} - - """ - # There is no need for a recursive implementation here since a node can - # reference an external type node but XSD specification explicitly - # states that that external node must not be a reference to yet another - # node. - qref = self.qref() - if qref is None: - return self - query = TypeQuery(qref) - query.history = [self] - log.debug("%s, resolving: %s\n using:%s", self.id, qref, query) - resolved = query.execute(self.schema) - if resolved is None: - log.debug(self.schema) - raise TypeNotFound(qref) - if resolved.builtin() and nobuiltin: - return self - return resolved - - def qref(self): - """ - Get the I{type} qualified reference to the referenced XSD type. - - This method takes into account simple types defined through restriction - which are detected by determining that self is simple (len == 0) and by - finding a restriction child. - - @return: The I{type} qualified reference. - @rtype: qref - - """ - qref = self.type - if qref is None and len(self) == 0: - ls = [] - m = RestrictionMatcher() - finder = NodeFinder(m, 1) - finder.find(self, ls) - if ls: - return ls[0].ref - return qref - - -class Complex(SchemaObject): - """ - Represents an XSD schema node. - - @cvar childtags: A list of valid child node names. - @type childtags: (I{str},...) - - """ - - def childtags(self): - return ("all", "any", "attribute", "attributeGroup", "choice", - "complexContent", "group", "sequence", "simpleContent") - - def description(self): - return ("name",) - - def extension(self): - for c in self.rawchildren: - if c.extension(): - return True - return False - - def mixed(self): - for c in self.rawchildren: - if isinstance(c, SimpleContent) and c.mixed(): - return True - return False - - -class Group(SchemaObject): - """ - Represents an XSD schema node. - - @cvar childtags: A list of valid child node names. - @type childtags: (I{str},...) - - """ - - def childtags(self): - return "all", "choice", "sequence" - - def dependencies(self): - deps = [] - midx = None - if self.ref is not None: - query = GroupQuery(self.ref) - g = query.execute(self.schema) - if g is None: - log.debug(self.schema) - raise TypeNotFound(self.ref) - deps.append(g) - midx = 0 - return midx, deps - - def merge(self, other): - SchemaObject.merge(self, other) - self.rawchildren = other.rawchildren - - def description(self): - return "name", "ref" - - -class AttributeGroup(SchemaObject): - """ - Represents an XSD schema node. - - @cvar childtags: A list of valid child node names. - @type childtags: (I{str},...) - - """ - - def childtags(self): - return "attribute", "attributeGroup" - - def dependencies(self): - deps = [] - midx = None - if self.ref is not None: - query = AttrGroupQuery(self.ref) - ag = query.execute(self.schema) - if ag is None: - log.debug(self.schema) - raise TypeNotFound(self.ref) - deps.append(ag) - midx = 0 - return midx, deps - - def merge(self, other): - SchemaObject.merge(self, other) - self.rawchildren = other.rawchildren - - def description(self): - return "name", "ref" - - -class Simple(SchemaObject): - """Represents an XSD schema node.""" - - def childtags(self): - return "any", "list", "restriction" - - def enum(self): - for child, ancestry in self.children(): - if isinstance(child, Enumeration): - return True - return False - - def mixed(self): - return len(self) - - def description(self): - return ("name",) - - def extension(self): - for c in self.rawchildren: - if c.extension(): - return True - return False - - def restriction(self): - for c in self.rawchildren: - if c.restriction(): - return True - return False - - -class List(SchemaObject): - """Represents an XSD schema node.""" - - def childtags(self): - return () - - def description(self): - return ("name",) - - def xslist(self): - return True - - -class Restriction(SchemaObject): - """Represents an XSD schema node.""" - - def __init__(self, schema, root): - SchemaObject.__init__(self, schema, root) - self.ref = root.get("base") - - def childtags(self): - return "attribute", "attributeGroup", "enumeration" - - def dependencies(self): - deps = [] - midx = None - if self.ref is not None: - query = TypeQuery(self.ref) - super = query.execute(self.schema) - if super is None: - log.debug(self.schema) - raise TypeNotFound(self.ref) - if not super.builtin(): - deps.append(super) - midx = 0 - return midx, deps - - def restriction(self): - return True - - def merge(self, other): - SchemaObject.merge(self, other) - filter = Filter(False, self.rawchildren) - self.prepend(self.rawchildren, other.rawchildren, filter) - - def description(self): - return ("ref",) - - -class Collection(SchemaObject): - """Represents an XSD schema collection (a.k.a. order indicator) node.""" - - def childtags(self): - return "all", "any", "choice", "element", "group", "sequence" - - -class All(Collection): - """Represents an XSD schema node.""" - def all(self): - return True - - -class Choice(Collection): - """Represents an XSD schema node.""" - def choice(self): - return True - - -class Sequence(Collection): - """Represents an XSD schema node.""" - def sequence(self): - return True - - -class ComplexContent(SchemaObject): - """Represents an XSD schema node.""" - - def childtags(self): - return "attribute", "attributeGroup", "extension", "restriction" - - def extension(self): - for c in self.rawchildren: - if c.extension(): - return True - return False - - def restriction(self): - for c in self.rawchildren: - if c.restriction(): - return True - return False - - -class SimpleContent(SchemaObject): - """Represents an XSD schema node.""" - - def childtags(self): - return "extension", "restriction" - - def extension(self): - for c in self.rawchildren: - if c.extension(): - return True - return False - - def restriction(self): - for c in self.rawchildren: - if c.restriction(): - return True - return False - - def mixed(self): - return len(self) - - -class Enumeration(Content): - """Represents an XSD schema node.""" - - def __init__(self, schema, root): - Content.__init__(self, schema, root) - self.name = root.get("value") - - def description(self): - return ("name",) - - def enum(self): - return True - - -class Element(TypedContent): - """Represents an XSD schema node.""" - - def __init__(self, schema, root): - TypedContent.__init__(self, schema, root) - is_reference = self.ref is not None - is_top_level = root.parent is schema.root - if is_reference or is_top_level: - self.form_qualified = True - else: - form = root.get("form") - if form is not None: - self.form_qualified = (form == "qualified") - nillable = self.root.get("nillable") - if nillable is not None: - self.nillable = (nillable in ("1", "true")) - self.implany() - - def implany(self): - """ - Set the type to when implicit. - - An element has an implicit type when it has no body and no - explicitly defined type. - - @return: self - @rtype: L{Element} - - """ - if self.type is None and self.ref is None and self.root.isempty(): - self.type = self.anytype() - - def childtags(self): - return "any", "attribute", "complexType", "simpleType" - - def extension(self): - for c in self.rawchildren: - if c.extension(): - return True - return False - - def restriction(self): - for c in self.rawchildren: - if c.restriction(): - return True - return False - - def dependencies(self): - deps = [] - midx = None - e = self.__deref() - if e is not None: - deps.append(e) - midx = 0 - return midx, deps - - def merge(self, other): - SchemaObject.merge(self, other) - self.rawchildren = other.rawchildren - - def description(self): - return "name", "ref", "type" - - def anytype(self): - """Create an xsd:anyType reference.""" - p, u = Namespace.xsdns - mp = self.root.findPrefix(u) - if mp is None: - mp = p - self.root.addPrefix(p, u) - return ":".join((mp, "anyType")) - - def namespace(self, prefix=None): - """ - Get this schema element's target namespace. - - In case of reference elements, the target namespace is defined by the - referenced and not the referencing element node. - - @param prefix: The default prefix. - @type prefix: str - @return: The schema element's target namespace - @rtype: (I{prefix},I{URI}) - - """ - e = self.__deref() - if e is not None: - return e.namespace(prefix) - return super(Element, self).namespace() - - def __deref(self): - if self.ref is None: - return - query = ElementQuery(self.ref) - e = query.execute(self.schema) - if e is None: - log.debug(self.schema) - raise TypeNotFound(self.ref) - return e - - -class Extension(SchemaObject): - """Represents an XSD schema node.""" - - def __init__(self, schema, root): - SchemaObject.__init__(self, schema, root) - self.ref = root.get("base") - - def childtags(self): - return ("all", "attribute", "attributeGroup", "choice", "group", - "sequence") - - def dependencies(self): - deps = [] - midx = None - if self.ref is not None: - query = TypeQuery(self.ref) - super = query.execute(self.schema) - if super is None: - log.debug(self.schema) - raise TypeNotFound(self.ref) - if not super.builtin(): - deps.append(super) - midx = 0 - return midx, deps - - def merge(self, other): - SchemaObject.merge(self, other) - filter = Filter(False, self.rawchildren) - self.prepend(self.rawchildren, other.rawchildren, filter) - - def extension(self): - return self.ref is not None - - def description(self): - return ("ref",) - - -class Import(SchemaObject): - """ - Represents an XSD schema node. - - @cvar locations: A dictionary of namespace locations. - @type locations: dict - @ivar ns: The imported namespace. - @type ns: str - @ivar location: The (optional) location. - @type location: namespace-uri - @ivar opened: Opened and I{imported} flag. - @type opened: boolean - - """ - - locations = {} - - @classmethod - def bind(cls, ns, location=None): - """ - Bind a namespace to a schema location (URI). - - This is used for imports that do not specify a schemaLocation. - - @param ns: A namespace-uri. - @type ns: str - @param location: The (optional) schema location for the namespace. - (default=ns) - @type location: str - - """ - if location is None: - location = ns - cls.locations[ns] = location - - def __init__(self, schema, root): - SchemaObject.__init__(self, schema, root) - self.ns = (None, root.get("namespace")) - self.location = root.get("schemaLocation") - if self.location is None: - self.location = self.locations.get(self.ns[1]) - self.opened = False - - def open(self, options, loaded_schemata): - """ - Open and import the referenced schema. - - @param options: An options dictionary. - @type options: L{options.Options} - @param loaded_schemata: Already loaded schemata cache (URL --> Schema). - @type loaded_schemata: dict - @return: The referenced schema. - @rtype: L{Schema} - - """ - if self.opened: - return - self.opened = True - log.debug("%s, importing ns='%s', location='%s'", self.id, self.ns[1], - self.location) - result = self.__locate() - if result is None: - if self.location is None: - log.debug("imported schema (%s) not-found", self.ns[1]) - else: - url = self.location - if "://" not in url: - url = urljoin(self.schema.baseurl, url) - result = (loaded_schemata.get(url) or - self.__download(url, loaded_schemata, options)) - log.debug("imported:\n%s", result) - return result - - def __locate(self): - """Find the schema locally.""" - if self.ns[1] != self.schema.tns[1]: - return self.schema.locate(self.ns) - - def __download(self, url, loaded_schemata, options): - """Download the schema.""" - try: - reader = DocumentReader(options) - d = reader.open(url) - root = d.root() - root.set("url", url) - return self.schema.instance(root, url, loaded_schemata, options) - except TransportError: - msg = "import schema (%s) at (%s), failed" % (self.ns[1], url) - log.error("%s, %s", self.id, msg, exc_info=True) - raise Exception(msg) - - def description(self): - return "ns", "location" - - -class Include(SchemaObject): - """ - Represents an XSD schema node. - - @ivar location: The (optional) location. - @type location: namespace-uri - @ivar opened: Opened and I{imported} flag. - @type opened: boolean - - """ - - locations = {} - - def __init__(self, schema, root): - SchemaObject.__init__(self, schema, root) - self.location = root.get("schemaLocation") - if self.location is None: - self.location = self.locations.get(self.ns[1]) - self.opened = False - - def open(self, options, loaded_schemata): - """ - Open and include the referenced schema. - - @param options: An options dictionary. - @type options: L{options.Options} - @param loaded_schemata: Already loaded schemata cache (URL --> Schema). - @type loaded_schemata: dict - @return: The referenced schema. - @rtype: L{Schema} - - """ - if self.opened: - return - self.opened = True - log.debug("%s, including location='%s'", self.id, self.location) - url = self.location - if "://" not in url: - url = urljoin(self.schema.baseurl, url) - result = (loaded_schemata.get(url) or - self.__download(url, loaded_schemata, options)) - log.debug("included:\n%s", result) - return result - - def __download(self, url, loaded_schemata, options): - """Download the schema.""" - try: - reader = DocumentReader(options) - d = reader.open(url) - root = d.root() - root.set("url", url) - self.__applytns(root) - return self.schema.instance(root, url, loaded_schemata, options) - except TransportError: - msg = "include schema at (%s), failed" % url - log.error("%s, %s", self.id, msg, exc_info=True) - raise Exception(msg) - - def __applytns(self, root): - """Make sure included schema has the same target namespace.""" - TNS = "targetNamespace" - tns = root.get(TNS) - if tns is None: - tns = self.schema.tns[1] - root.set(TNS, tns) - else: - if self.schema.tns[1] != tns: - raise Exception("%s mismatch" % TNS) - - def description(self): - return "location" - - -class Attribute(TypedContent): - """Represents an XSD schema node.""" - - def __init__(self, schema, root): - TypedContent.__init__(self, schema, root) - self.use = root.get("use", default="") - - def childtags(self): - return ("restriction",) - - def isattr(self): - return True - - def get_default(self): - """ - Gets the attribute value. - - @return: The default value for the attribute - @rtype: str - - """ - return self.root.get("default", default="") - - def optional(self): - return self.use != "required" - - def dependencies(self): - deps = [] - midx = None - if self.ref is not None: - query = AttrQuery(self.ref) - a = query.execute(self.schema) - if a is None: - log.debug(self.schema) - raise TypeNotFound(self.ref) - deps.append(a) - midx = 0 - return midx, deps - - def description(self): - return "name", "ref", "type" - - -class Any(Content): - """Represents an XSD schema node.""" - - def get_child(self, name): - root = self.root.clone() - root.set("note", "synthesized (any) child") - child = Any(self.schema, root) - return child, [] - - def get_attribute(self, name): - root = self.root.clone() - root.set("note", "synthesized (any) attribute") - attribute = Any(self.schema, root) - return attribute, [] - - def any(self): - return True - - -class Factory: - """ - @cvar tags: A factory to create object objects based on tag. - @type tags: {tag:fn,} - - """ - - tags = { - "all": All, - "any": Any, - "attribute": Attribute, - "attributeGroup": AttributeGroup, - "choice": Choice, - "complexContent": ComplexContent, - "complexType": Complex, - "element": Element, - "enumeration": Enumeration, - "extension": Extension, - "group": Group, - "import": Import, - "include": Include, - "list": List, - "restriction": Restriction, - "simpleContent": SimpleContent, - "simpleType": Simple, - "sequence": Sequence, - } - - @classmethod - def maptag(cls, tag, fn): - """ - Map (override) tag => I{class} mapping. - - @param tag: An XSD tag name. - @type tag: str - @param fn: A function or class. - @type fn: fn|class. - - """ - cls.tags[tag] = fn - - @classmethod - def create(cls, root, schema): - """ - Create an object based on the root tag name. - - @param root: An XML root element. - @type root: L{Element} - @param schema: A schema object. - @type schema: L{schema.Schema} - @return: The created object. - @rtype: L{SchemaObject} - - """ - fn = cls.tags.get(root.name) - if fn is not None: - return fn(schema, root) - - @classmethod - def build(cls, root, schema, filter=("*",)): - """ - Build an xsobject representation. - - @param root: An schema XML root. - @type root: L{sax.element.Element} - @param filter: A tag filter. - @type filter: [str,...] - @return: A schema object graph. - @rtype: L{sxbase.SchemaObject} - - """ - children = [] - for node in root.getChildren(ns=Namespace.xsdns): - if "*" in filter or node.name in filter: - child = cls.create(node, schema) - if child is None: - continue - children.append(child) - c = cls.build(node, schema, child.childtags()) - child.rawchildren = c - return children - - @classmethod - def collate(cls, children): - imports = [] - elements = {} - attributes = {} - types = {} - groups = {} - agrps = {} - for c in children: - if isinstance(c, (Import, Include)): - imports.append(c) - continue - if isinstance(c, Attribute): - attributes[c.qname] = c - continue - if isinstance(c, Element): - elements[c.qname] = c - continue - if isinstance(c, Group): - groups[c.qname] = c - continue - if isinstance(c, AttributeGroup): - agrps[c.qname] = c - continue - types[c.qname] = c - for i in imports: - children.remove(i) - return children, imports, attributes, elements, types, groups, agrps - - -####################################################### -# Static Import Bindings :-( -####################################################### -Import.bind( - "http://schemas.xmlsoap.org/soap/encoding/", - "suds://schemas.xmlsoap.org/soap/encoding/") -Import.bind( - "http://www.w3.org/XML/1998/namespace", - "http://www.w3.org/2001/xml.xsd") -Import.bind( - "http://www.w3.org/2001/XMLSchema", - "http://www.w3.org/2001/XMLSchema.xsd") diff --git a/libs_crutch/contrib/suds/xsd/sxbuiltin.py b/libs_crutch/contrib/suds/xsd/sxbuiltin.py deleted file mode 100755 index 9d0c125..0000000 --- a/libs_crutch/contrib/suds/xsd/sxbuiltin.py +++ /dev/null @@ -1,347 +0,0 @@ -# -*- coding: utf-8 -*- - -# This program is free software; you can redistribute it and/or modify it under -# the terms of the (LGPL) GNU Lesser General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License -# for more details at ( http://www.gnu.org/licenses/lgpl.html ). -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# written by: Jeff Ortel ( jortel@redhat.com ) - -"""Classes representing I{built-in} XSD schema objects.""" - -from suds import * -from suds.xsd import * -from suds.sax.date import * -from suds.xsd.sxbase import XBuiltin - -import datetime -import decimal -import sys - - -class XAny(XBuiltin): - """Represents an XSD node.""" - - def __init__(self, schema, name): - XBuiltin.__init__(self, schema, name) - self.nillable = False - - def get_child(self, name): - child = XAny(self.schema, name) - return child, [] - - def any(self): - return True - - -class XBoolean(XBuiltin): - """Represents an XSD boolean built-in type.""" - - _xml_to_python = {"1": True, "true": True, "0": False, "false": False} - _python_to_xml = {True: "true", 1: "true", False: "false", 0: "false"} - - @staticmethod - def translate(value, topython=True): - if topython: - if isinstance(value, str): - return XBoolean._xml_to_python.get(value) - else: - if isinstance(value, (bool, int)): - return XBoolean._python_to_xml.get(value) - return value - - -class XDate(XBuiltin): - """Represents an XSD built-in type.""" - - @staticmethod - def translate(value, topython=True): - if topython: - if isinstance(value, str) and value: - return Date(value).value - else: - if isinstance(value, datetime.date): - return Date(value) - return value - - -class XDateTime(XBuiltin): - """Represents an XSD built-in type.""" - - @staticmethod - def translate(value, topython=True): - if topython: - if isinstance(value, str) and value: - return DateTime(value).value - else: - if isinstance(value, datetime.datetime): - return DateTime(value) - return value - - -class XDecimal(XBuiltin): - """ - Represents an XSD built-in type. - - Excerpt from the XSD datatype specification - (http://www.w3.org/TR/2004/REC-xmlschema-2-20041028): - - > 3.2.3 decimal - > - > [Definition:] decimal represents a subset of the real numbers, which can - > be represented by decimal numerals. The ·value space· of decimal is the - > set of numbers that can be obtained by multiplying an integer by a - > non-positive power of ten, i.e., expressible as i × 10^-n where i and n - > are integers and n >= 0. Precision is not reflected in this value space; - > the number 2.0 is not distinct from the number 2.00. The ·order-relation· - > on decimal is the order relation on real numbers, restricted to this - > subset. - > - > 3.2.3.1 Lexical representation - > - > decimal has a lexical representation consisting of a finite-length - > sequence of decimal digits (#x30-#x39) separated by a period as a decimal - > indicator. An optional leading sign is allowed. If the sign is omitted, - > "+" is assumed. Leading and trailing zeroes are optional. If the - > fractional part is zero, the period and following zero(es) can be - > omitted. For example: -1.23, 12678967.543233, +100000.00, 210. - - """ - - # Python versions before 2.7 do not support the decimal.Decimal.canonical() - # method but they maintain their decimal.Decimal encoded in canonical - # format internally so we can easily emulate that function by simply - # returning the same decimal instance. - if sys.version_info < (2, 7): - _decimal_canonical = staticmethod(lambda decimal: decimal) - else: - _decimal_canonical = decimal.Decimal.canonical - - @staticmethod - def _decimal_to_xsd_format(value): - """ - Converts a decimal.Decimal value to its XSD decimal type value. - - Result is a string containing the XSD decimal type's lexical value - representation. The conversion is done without any precision loss. - - Note that Python's native decimal.Decimal string representation will - not do here as the lexical representation desired here does not allow - representing decimal values using float-like `E' - format, e.g. 12E+30 or 0.10006E-12. - - """ - value = XDecimal._decimal_canonical(value) - negative, digits, exponent = value.as_tuple() - - # The following implementation assumes the following tuple decimal - # encoding (part of the canonical decimal value encoding): - # - digits must contain at least one element - # - no leading integral 0 digits except a single one in 0 (if a non-0 - # decimal value has leading integral 0 digits they must be encoded - # in its 'exponent' value and not included explicitly in its - # 'digits' tuple) - assert digits - assert digits[0] != 0 or len(digits) == 1 - - result = [] - if negative: - result.append("-") - - # No fractional digits. - if exponent >= 0: - result.extend(str(x) for x in digits) - result.extend("0" * exponent) - return "".join(result) - - digit_count = len(digits) - - # Decimal point offset from the given digit start. - point_offset = digit_count + exponent - - # Trim trailing fractional 0 digits. - fractional_digit_count = min(digit_count, -exponent) - while fractional_digit_count and digits[digit_count - 1] == 0: - digit_count -= 1 - fractional_digit_count -= 1 - - # No trailing fractional 0 digits and a decimal point coming not after - # the given digits, meaning there is no need to add additional trailing - # integral 0 digits. - if point_offset <= 0: - # No integral digits. - result.append("0") - if digit_count > 0: - result.append(".") - result.append("0" * -point_offset) - result.extend(str(x) for x in digits[:digit_count]) - else: - # Have integral and possibly some fractional digits. - result.extend(str(x) for x in digits[:point_offset]) - if point_offset < digit_count: - result.append(".") - result.extend(str(x) for x in digits[point_offset:digit_count]) - return "".join(result) - - @classmethod - def translate(cls, value, topython=True): - if topython: - if isinstance(value, str) and value: - return decimal.Decimal(value) - else: - if isinstance(value, decimal.Decimal): - return cls._decimal_to_xsd_format(value) - return value - - -class XFloat(XBuiltin): - """Represents an XSD built-in type.""" - - @staticmethod - def translate(value, topython=True): - if topython: - if isinstance(value, str) and value: - return float(value) - else: - return value - - -class XInteger(XBuiltin): - """Represents an XSD built-in type.""" - - @staticmethod - def translate(value, topython=True): - if topython: - if isinstance(value, str) and value: - return int(value) - else: - return value - - -class XLong(XBuiltin): - """Represents an XSD built-in type.""" - - @staticmethod - def translate(value, topython=True): - if topython: - if isinstance(value, str) and value: - return int(value) - else: - return value - - -class XString(XBuiltin): - """Represents an XSD node.""" - pass - - -class XTime(XBuiltin): - """Represents an XSD built-in type.""" - - @staticmethod - def translate(value, topython=True): - if topython: - if isinstance(value, str) and value: - return Time(value).value - else: - if isinstance(value, datetime.time): - return Time(value) - return value - - -class Factory: - - tags = { - # any - "anyType": XAny, - # strings - "string": XString, - "normalizedString": XString, - "ID": XString, - "Name": XString, - "QName": XString, - "NCName": XString, - "anySimpleType": XString, - "anyURI": XString, - "NOTATION": XString, - "token": XString, - "language": XString, - "IDREFS": XString, - "ENTITIES": XString, - "IDREF": XString, - "ENTITY": XString, - "NMTOKEN": XString, - "NMTOKENS": XString, - # binary - "hexBinary": XString, - "base64Binary": XString, - # integers - "int": XInteger, - "integer": XInteger, - "unsignedInt": XInteger, - "positiveInteger": XInteger, - "negativeInteger": XInteger, - "nonPositiveInteger": XInteger, - "nonNegativeInteger": XInteger, - # longs - "long": XLong, - "unsignedLong": XLong, - # shorts - "short": XInteger, - "unsignedShort": XInteger, - "byte": XInteger, - "unsignedByte": XInteger, - # floats - "float": XFloat, - "double": XFloat, - "decimal": XDecimal, - # dates & times - "date": XDate, - "time": XTime, - "dateTime": XDateTime, - "duration": XString, - "gYearMonth": XString, - "gYear": XString, - "gMonthDay": XString, - "gDay": XString, - "gMonth": XString, - # boolean - "boolean": XBoolean, - } - - @classmethod - def maptag(cls, tag, fn): - """ - Map (override) tag => I{class} mapping. - - @param tag: An XSD tag name. - @type tag: str - @param fn: A function or class. - @type fn: fn|class. - - """ - cls.tags[tag] = fn - - @classmethod - def create(cls, schema, name): - """ - Create an object based on the root tag name. - - @param schema: A schema object. - @type schema: L{schema.Schema} - @param name: The name. - @type name: str - @return: The created object. - @rtype: L{XBuiltin} - - """ - fn = cls.tags.get(name, XBuiltin) - return fn(schema, name) diff --git a/libs_crutch/core/__init__.py b/libs_crutch/core/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/core/backup.py b/libs_crutch/core/backup.py deleted file mode 100644 index 554f063..0000000 --- a/libs_crutch/core/backup.py +++ /dev/null @@ -1,724 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys -import os -import stat -import re -from os import path -from calculate.core.server.core_interfaces import MethodsInterface -from calculate.lib.utils.files import (makeDirectory, removeDir, tar_directory, - FilePermission, find, listDirectory, - FilesError, process, readFileEx, - readFile, pathJoin, FindFileType) -from calculate.lib.utils.content import FileOwnersRestricted, ContentsStorage -from calculate.lib.utils.portage import getInstalledAtom, makeCfgName -from calculate.lib.configparser import ConfigParserCaseSens -from calculate.lib.cl_template import Template -from calculate.lib.utils.accounts import Passwd, Group, Shadow - -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate -from .variables.action import Actions -import tarfile -import shutil -import glob -from itertools import chain - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - -__ = getLazyLocalTranslate(_) - - -class BackupError(Exception): - """ - Исключение вызванное во время резервного копирования настроек - """ - - -class Backup(MethodsInterface): - """ - Выполнение резервного копирования настроек - """ - - def init(self): - self.apply_files = set() - self.unlink_autorun = set() - self.uid_map = {} - self.gid_map = {} - - def prepare_backup(self, dn, rootname): - makeDirectory(path.join(dn, rootname)) - return True - - def remove_directory(self, dn): - removeDir(dn) - return True - - def backup_user_changed(self, owner, dn_root): - """ - Сохранить конфигурационные файлы изменённые пользователем - :param owner: - :param dn_root: - :return: - """ - for fn in owner.get_md5_failed(lambda x: x.startswith('/etc')): - self.backup_file(fn, dn_root) - return True - - def prepare_contents(self, dn, contents_file, rootname): - dn_root = path.join(dn, rootname) - fo = FileOwnersRestricted( - "/", ["/%s" % x for x in find(path.join(dn, rootname), - fullpath=False)] + ["/etc"]) - self.backup_user_changed(fo, dn_root) - cs = ContentsStorage(contents_file, fo) - cs.keep(dn_root, dn_root) - return True - - def create_archive(self, dn, archfile): - arch_dn = path.dirname(archfile) - if not path.exists(arch_dn): - makeDirectory(arch_dn) - os.chmod(arch_dn, FilePermission.UserAll) - tar_directory(dn, archfile) - return True - - def open_archive(self, dn, archfile): - makeDirectory(dn) - with tarfile.open(archfile, 'r:bz2') as f: - f.extractall(dn) - return True - - def restore_configs(self, archfile, dn, contents_name, root_name): - """ - Восстановить все файлы настроек - :param archfile: - :param dn: - :return: - """ - dirs_data = {} - used_dirs = set() - _gid = lambda x: self.gid_map.get(x, x) - _uid = lambda x: self.uid_map.get(x, x) - with tarfile.open(archfile, 'r:bz2') as f: - try: - # исключить из переноса файлы, которые принадлежат пакетам, - # которые не установлены в системе - contents = f.extractfile(f.getmember(contents_name)) - pkg_file = [x.split()[:3:2] for x in contents] - not_installed_files = [ - x for x in pkg_file - if not any(getInstalledAtom(x[0].partition(":")[0]))] - skip_packages = sorted(list( - set([x[0] for x in not_installed_files]))) - if skip_packages: - self.printWARNING( - _("Settings ignored for following packages: %s") % - ", ".join(x.partition(":")[0] for x in skip_packages)) - not_installed_files = [x[1] for x in not_installed_files] - except KeyError: - raise BackupError(_("CONTENTS file not found")) - for ti in (x for x in f if x.name.startswith("%s/" % root_name)): - if ti.name[4:] in not_installed_files: - continue - if ti.issym() and not path.exists(ti.linkpath): - continue - if ti.name[5:]: - fn_system = path.join(dn, ti.path[5:]) - if ti.isdir(): - dirs_data[fn_system] = (ti.mode, _uid(ti.uid), - _gid(ti.gid)) - continue - dirs_list = fn_system.split('/') - for i in range(2, len(dirs_list)): - used_dirs.add("/".join(dirs_list[:i])) - if path.lexists(fn_system): - stat_system = os.lstat(fn_system) - if ti.issym(): - if stat.S_ISLNK(stat_system.st_mode): - system_link = os.readlink(fn_system) - if system_link == ti.linkpath: - continue - else: - if (stat.S_IMODE(stat_system.st_mode) == ti.mode and - stat_system.st_uid == _uid(ti.uid) and - stat_system.st_gid == _gid(ti.gid)): - data_system = readFile(fn_system) - extr_file = f.extractfile(ti) - if extr_file: - data_ti = extr_file.read() - if self.is_equal_files(data_system, - data_ti): - continue - ti.name = ti.name[5:] - f.extract(ti, dn) - os.chown(fn_system, _uid(ti.uid), _gid(ti.gid)) - if ti.isfile() or ti.issym(): - # если симлинк в списке предварительного удаления - # то исключаем его из списка изменённых файлов - if fn_system in self.unlink_autorun: - self.unlink_autorun.remove(fn_system) - else: - self.apply_files.add(fn_system) - # восстановление прав у каталогов, конфиги в которых должны были - # восстанавливаться - for dn_name in sorted(used_dirs): - if dn_name in dirs_data: - dn_mode, dn_uid, dn_gid = dirs_data[dn_name] - if path.lexists(dn_name): - stat_system = os.lstat(dn_name) - if (stat.S_IMODE(stat_system.st_mode) != dn_mode or - stat_system.st_uid != dn_uid or - stat_system.st_gid != dn_gid): - os.chmod(dn_name, dn_mode) - os.chown(dn_name, dn_uid, dn_gid) - - return True - - def sava_ini(self, section, key, val): - ini = ConfigParserCaseSens(strict=False) - ini.read(self.clVars.Get('cl_backup_ini_env'), encoding="utf-8") - if not ini.has_section(section): - ini.add_section(section) - ini[section][key] = str(val) - - ini["backup"]["version"] = self.clVars.Get('cl_ver') - with open(self.clVars.Get('cl_backup_ini_env'), 'w') as f: - ini.write(f) - return True - - def load_ini(self, section, key): - ini = ConfigParserCaseSens(strict=False) - ini.read(self.clVars.Get('cl_backup_ini_env'), encoding="utf-8") - return ini.get(section, key, fallback="") - - def save_initd(self, dn, root_name): - """ - Сохранить список init.d - :param dn: - :param root_name: - :return: - """ - self.sava_ini("backup", "init", - ','.join(listDirectory('/etc/init.d', fullPath=False))) - dn_root = path.join(dn, root_name) - for dn in ('/etc/runlevels/sysinit', - '/etc/runlevels/default', - '/etc/runlevels/boot'): - try: - dn_backup = pathJoin(dn_root, dn) - if not path.exists(dn_backup): - makeDirectory(dn_backup) - for fn in listDirectory(dn, fullPath=True): - if path.islink(fn): - link = os.readlink(fn) - symname = pathJoin(dn_root, fn) - if not path.lexists(symname): - os.symlink(link, symname) - except (OSError, IOError) as e: - raise BackupError(_("Failed to enable service at startup") + - (_(": %s") % (str(e)))) - return True - - def make_directory_sync(self, base_dn, dn, prefix="/"): - """ - Создать директорию и сохранить права из prefix - :param dn: - :param prefix: - :return: - """ - if not path.exists(dn): - self.make_directory_sync(base_dn, path.dirname(dn), prefix) - rel_dn = path.relpath(dn, base_dn) - system_dn = path.join(prefix, rel_dn) - system_dn_stat = os.lstat(system_dn) - if not makeDirectory(dn): - raise BackupError(_("Failed to create directory %s") % dn) - os.chown(dn, system_dn_stat.st_uid, system_dn_stat.st_gid) - os.chmod(dn, stat.S_IMODE(system_dn_stat.st_mode)) - - def backup_file(self, source_fn, target_dn, prefix="/"): - """ - Сделать резервную копию указанного файла - :param source_fn: - :param target_dn: - :return: - """ - target_fn = path.join(target_dn, path.relpath(source_fn, prefix)) - source_stat = os.lstat(source_fn) - target_base_dn = path.dirname(target_fn) - self.make_directory_sync(target_dn, target_base_dn, prefix=prefix) - if stat.S_ISLNK(source_stat.st_mode): - source_link = os.readlink(source_fn) - os.symlink(source_link, target_fn) - elif stat.S_ISREG(source_stat.st_mode): - shutil.copy2(source_fn, target_fn) - os.chown(target_fn, source_stat.st_uid, source_stat.st_gid) - return True - - def backup_marked(self, source_dn, target_dn, subdn, root_name): - """ - Сохранить файлы из указанного каталога, отмеченного комментариями - выполнения шаблонов - :return: - """ - source_etc_dn = path.join(source_dn, subdn) - root_dn = path.join(target_dn, root_name) - reCfg = re.compile('._cfg\d{4}_') - try: - for fn in find(source_etc_dn, filetype=FindFileType.RegularFile): - if (not reCfg.search(fn) and - " Modified Calculate" in readFileEx(fn, - headbyte=300)): - self.backup_file(fn, root_dn, prefix=source_dn) - except (OSError, IOError) as e: - raise BackupError(_("Failed to backup configuration files that " - "were modified by templates") + - (_(": %s") % (str(e)))) - return True - - def clear_autorun(self): - """ - Удалить все файлы из автозапуска, которые ссылаются на файлы из списка - init.d - :return: - """ - files = ["/etc/init.d/%s" % x.strip() - for x in self.load_ini("backup", "init").split(',')] - for dn in ('/etc/runlevels/sysinit', - '/etc/runlevels/default', - '/etc/runlevels/boot'): - for fn in listDirectory(dn, fullPath=True): - if path.islink(fn) and os.readlink(fn) in files: - os.unlink(fn) - self.unlink_autorun.add(fn) - return True - - def restore_contents(self, contentsfile, dn): - cs = ContentsStorage(contentsfile) - cs.restore(dn, files=self.apply_files) - return True - - def set_service_action(self): - self.clVars.Set('core.cl_backup_action', Actions.Service, force=True) - return True - - nm_name = "NetworkManager" - nm_config = "/etc/NetworkManager" - nm_connections = path.join(nm_config, "system-connections") - - def _do_service(self, service, action): - """ - Выполнить действие с сервисом (restart, start, stop, zap) - :param service: - :param action: - :return: - """ - actions = { - 'restart': _("Failed to restart {name} service"), - 'start': _("Failed to start {name} service"), - 'stop': _("Failed to stop {name} service"), - 'zap': _("Failed to zap {name} service"), - 'status': _("Failed to get status of {name} service") - } - try: - p = process(service, action) - if p.failed(): - data = p.readerr().strip() - if ("has started, but is inactive" not in data and - "will start when" not in data): - for line in data.split('\n'): - self.printERROR(line) - raise BackupError(actions.get(action, action).format( - name=path.basename(service))) - except FilesError: - self.printERROR(actions.get(action, action).format( - name=path.basename(service))) - raise - return True - - def stop_net_services(self): - """ - Остановить все сетевые службы (и NM и openrc) - :return: - """ - self._do_service("/etc/init.d/netmount", "zap") - for fn in chain(["/etc/init.d/%s" % self.nm_name], - glob.glob('/etc/init.d/net.*')): - if fn.endswith('.lo') or not path.exists(fn): - continue - self._do_service(fn, "stop") - return True - - def unlink_openrc_net_services(self, files): - """ - Удалить сетевые сервисы openrc сервисы openrc - :return: - """ - for fn in glob.glob('/etc/init.d/net.*'): - if fn.endswith('.lo') or not path.exists(fn) or fn in files: - continue - try: - os.unlink(fn) - self.apply_files.add(fn) - except OSError as e: - self.printERROR(_("Failed to remove %s service"), - path.basename(fn)) - self.printERROR(str(e)) - return True - - def is_networkmanager_backup(self, backup_path): - """ - Проверить сетевой менеджер в резервной копии - :param backup_path: - :return: - """ - return path.lexists(path.join( - backup_path, "root/etc/runlevels/default/%s" % self.nm_name)) - - def is_networkmanager_system(self): - """ - Проверить сетевой менеджер в текущей системе - :return: - """ - return path.lexists("/etc/runlevels/default/%s" % self.nm_name) - - def restore_openrc_net_initd(self, files): - """ - Восстановить сервисы net.* и запустить их - :return: - """ - for fn in files: - if not path.exists(fn): - os.symlink("/etc/init.d/net.lo", fn) - self.apply_files.add(fn) - self._do_service(fn, "start") - return True - - def restore_files(self, backup_path, files, notapply=False): - """ - Восстановить указанные файлы из backup/root - :param backup_path: - :param files: список файлов (поддерживаются глобальные символы) - :return: - """ - len_source_prefix = len(path.join(backup_path, "root")) - _gid = lambda x: self.gid_map.get(x, x) - _uid = lambda x: self.uid_map.get(x, x) - for source in chain(*[glob.glob(pathJoin(backup_path, "root", x)) - for x in files]): - dest = source[len_source_prefix:] - if path.lexists(source): - dn = path.dirname(dest) - if not path.exists(dn): - makeDirectory(dn) - if path.lexists(dest): - if self.is_equal_system_backup(dest, source): - continue - - shutil.copy2(source, dest) - fn_stat = os.lstat(source) - os.chown(dest, _uid(fn_stat.st_uid), _gid(fn_stat.st_gid)) - if not notapply: - self.apply_files.add(dest) - return True - - def clear_nm_connections(self, backup_path): - """ - Удалить доступные соединения для NetworkManager - :return: - """ - base_dir = pathJoin(backup_path, "root") - for fn in listDirectory(self.nm_connections, fullPath=True): - try: - if not path.exists(pathJoin(base_dir, fn)): - os.unlink(fn) - self.apply_files.add(fn) - except OSError as e: - raise BackupError(str(e)) - - def is_equal_files(self, text1, text2): - """ - Сравнить два файла отбросив комментарии и пробельные символы в начале и - в конце - :param text1: - :param text2: - :return: - """ - text1 = Template.removeComment(text1).strip() - text2 = Template.removeComment(text2).strip() - return text1 == text2 - - def is_equal_system_backup(self, system_fn, backup_fn): - """ - Проверить одинаковый ли файл в резервной копии и системе - :param system_fn: - :param backup_fn: - :return: - """ - _gid = lambda x: self.gid_map.get(x, x) - _uid = lambda x: self.uid_map.get(x, x) - if path.islink(system_fn) != path.islink(backup_fn): - return False - if path.islink(system_fn): - return os.readlink(system_fn) == os.readlink(backup_fn) - if path.isfile(system_fn) != path.isfile(backup_fn): - return False - data_system = readFile(system_fn) - data_backup = readFile(backup_fn) - if not self.is_equal_files(data_system, data_backup): - return False - stat_system = os.lstat(system_fn) - stat_backup = os.lstat(backup_fn) - if (stat.S_IMODE(stat_system.st_mode) != - stat.S_IMODE(stat_backup.st_mode) or - stat_system.st_uid != _uid(stat_backup.st_uid) or - stat_system.st_gid != _gid(stat_backup.st_gid)): - return False - return True - - def check_backup_for_network(self, backup_path, files): - """ - Проверить конфигурационные файлы настройки сети на соответствие текущим - :param backup_path: - :return: - """ - backup_nm = self.is_networkmanager_backup(backup_path) - system_nm = self.is_networkmanager_system() - # проверить совпадает ли сетевой менеджер - if backup_nm != system_nm: - if backup_nm and not any( - getInstalledAtom("net-misc/networkmanager")): - return True - return False - # если nm проверить совпадение system-connections - if backup_nm: - connection_files = set(path.basename(x) for x in chain( - glob.glob("%s/*" % self.nm_connections), - glob.glob("%s/*" % (pathJoin(backup_path, "root", - self.nm_connections))) - )) - for fn in connection_files: - system_fn = pathJoin(self.nm_connections, fn) - backup_fn = pathJoin(backup_path, "root", - self.nm_connections, fn) - if not self.is_equal_system_backup(system_fn, backup_fn): - return False - # если openrc проверить conf.d/net и соответствие net.* - else: - system_fn = "/etc/conf.d/net" - backup_fn = pathJoin(backup_path, "root", system_fn) - if not self.is_equal_system_backup(system_fn, backup_fn): - return False - system_net_set = (set(path.basename(x) - for x in glob.glob('/etc/init.d/net.*')) - - {"net.lo"}) - backup_net_set = set(path.basename(x) for x in files) - if system_net_set != backup_net_set: - return False - return True - - def restore_network(self, backup_path): - """ - Восстановить сеть из backup - :param backup_path: - :return: - """ - files = ["/etc/init.d/%s" % x.strip() - for x in self.load_ini("backup", "init").split(',') - if x.startswith("net.") and x != "net.lo"] - if self.check_backup_for_network(backup_path, files): - self.endTask("skip") - return True - self.stop_net_services() - self.unlink_openrc_net_services(files) - self.clear_nm_connections(backup_path) - self.restore_files(backup_path, ["/etc/conf.d/hostname", - "/etc/resolv.conf", - "/etc/hosts"]) - self._do_service("/etc/init.d/hostname", "restart") - if self.is_networkmanager_backup(backup_path): - self.unlink_openrc_net_services([]) - self.restore_files(backup_path, [ - "/etc/NetworkManager/system-connections/*", - "/etc/NetworkManager/dispatcher.d/*", - ]) - self._do_service("/etc/init.d/NetworkManager", "start") - else: - self.restore_files(backup_path, ["/etc/conf.d/net"]) - self.restore_openrc_net_initd(files) - self._do_service("/etc/init.d/netmount", "start") - return True - - def special_backup(self, backup_path): - """ - Выполнить специализирование резервное копирование модулей - :param backup_path: - :return: - """ - for backup_obj in self.iterate_modules(): - backup_obj.backup(backup_path) - return True - - def special_restore(self, backup_path): - """ - Выполнить специализирование восстановление из резервной копии - :param backup_path: - :return: - """ - for backup_obj in self.iterate_modules(): - backup_obj.restore(backup_path) - return True - - def iterate_modules(self): - """ - Перебрать все модули backup - :return: - """ - site_packages = [path.join(x, "calculate") - for x in sys.path - if (x.endswith('site-packages') and - x.startswith('/usr/lib'))] - ret_list = [] - for module, modDir in chain( - *map(lambda x: map(lambda y: (path.basename(y), y), - listDirectory(x, True, True)), - site_packages)): - if path.exists(path.join(modDir, "backup_%s.py" % module)): - if not "calculate-%s" % module in ret_list: - ret_list.append("calculate-%s" % module) - cl_backup = ret_list - for pack in cl_backup: - if pack: - module_name = '%s.backup_%s' % (pack.replace("-", "."), - pack.rpartition("-")[2]) - import importlib - - try: - backup_module = importlib.import_module(module_name) - backup_obj = backup_module.Backup(self, self.clVars) - yield backup_obj - except ImportError: - sys.stderr.write(_("Unable to import %s") % module_name) - - def display_changed_configs(self): - """ - Отобразить список восстановленных файлов - :return: - """ - t = Template(self.clVars, printWARNING=self.printWARNING, - printERROR=self.printERROR, printSUCCESS=self.printSUCCESS) - t.verboseOutput(sorted(list(self.apply_files | self.unlink_autorun))) - return True - - def display_backup_configs(self, archfile): - """ - Отобразить список помещённых в резервную копию файлов - :return: - """ - with tarfile.open(archfile, 'r:bz2') as f: - self.printWARNING(_("Calculate Utilities have backuped files") - + _(":")) - for fn in sorted("/%s" % x.path.partition('/')[2] for x in - f.getmembers() if (not x.isdir() and ( - x.path.startswith("root") or - x.path.startswith("ldap")))): - self.printSUCCESS(" " * 5 + fn) - return True - - def run_openrc(self, command): - p = process("/sbin/openrc", "default") - p.success() - return True - - passwd_fn = '/etc/passwd' - group_fn = '/etc/group' - shadow_fn = '/etc/shadow' - - def save_accounts(self, backup_path): - accounts_path = path.join(backup_path, "accounts") - for source_fn in (self.passwd_fn, self.group_fn, self.shadow_fn): - self.backup_file(source_fn, accounts_path, prefix="/etc") - return True - - def restore_accounts(self, backup_path): - accounts_path = path.join(backup_path, "accounts") - backup_passwd_fn = pathJoin(accounts_path, - path.basename(self.passwd_fn)) - backup_group_fn = pathJoin(accounts_path, path.basename(self.group_fn)) - backup_shadow_fn = pathJoin(accounts_path, - path.basename(self.shadow_fn)) - - if any(not path.exists(x) for x in (backup_passwd_fn, - backup_group_fn, - backup_shadow_fn)): - return "skip" - # пользователи - passwd = Passwd(readFile(self.passwd_fn)) - backup_passwd = Passwd(readFile(backup_passwd_fn)) - added_users = [x.name for x in passwd.new_users(backup_passwd)] - keep_users = [x.name for x in backup_passwd.new_users(passwd)] - if self.clVars.GetBool('cl_backup_verbose_set') and added_users: - self.printSUCCESS( - _("Restored users:") + " " + ", ".join(added_users)) - self.uid_map = backup_passwd.get_uid_map(passwd) - passwd.join(backup_passwd) - with open(makeCfgName(self.passwd_fn), 'w') as f: - passwd.write(f) - os.chown(self.passwd_fn, 0, 0) - os.chmod(self.passwd_fn, - FilePermission.OtherRead | - FilePermission.GroupRead | - FilePermission.UserRead | - FilePermission.UserWrite) - - # группы - groups = Group(readFile(self.group_fn)) - backup_groups = Group(readFile(backup_group_fn)) - added_groups = [x.name for x in groups.new_groups(backup_groups)] - if self.clVars.GetBool('cl_backup_verbose_set') and added_groups: - self.printSUCCESS(_("Restored groups:") + " " - + ", ".join(added_groups)) - self.gid_map = backup_groups.get_gid_map(groups) - groups.join(backup_groups, keep_users=keep_users) - with open(makeCfgName(self.group_fn), 'w') as f: - groups.write(f) - os.chown(self.group_fn, 0, 0) - os.chmod(self.group_fn, - FilePermission.OtherRead | - FilePermission.GroupRead | - FilePermission.UserRead | - FilePermission.UserWrite) - - # пароли - shadow = Shadow(readFile(self.shadow_fn)) - backup_shadow = Shadow(readFile(backup_shadow_fn)) - changed_shadow = [x.name - for x in shadow.changed_passwords(backup_shadow)] - if self.clVars.GetBool('cl_backup_verbose_set') and changed_shadow: - self.printSUCCESS(_("Restored user passwords:") + " " - + ", ".join(changed_shadow)) - shadow.join(backup_shadow) - with open(makeCfgName(self.shadow_fn), 'w') as f: - shadow.write(f) - os.chown(self.shadow_fn, 0, 0) - os.chmod(self.shadow_fn, - FilePermission.UserRead | - FilePermission.UserWrite) - return True diff --git a/libs_crutch/core/client/__init__.py b/libs_crutch/core/client/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/core/core_main.py b/libs_crutch/core/core_main.py deleted file mode 100644 index ffed785..0000000 --- a/libs_crutch/core/core_main.py +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# from server.cl_server import main -# print(main) - -from __future__ import print_function -from __future__ import absolute_import -def core_main(): - import sys - - if hasattr(sys, "setdefaultencoding"): - sys.setdefaultencoding("utf-8") - from calculate.lib.cl_lang import setLocalTranslate - - _ = lambda x: x - setLocalTranslate('cl_core', sys.modules[__name__]) - from traceback import print_exc - from os import path - - if not path.exists('/dev/urandom'): - sys.stderr.write("/dev/urandom not found\n") - sys.exit(1) - - try: - from .server.cl_server import main - - reload(sys) - from calculate.lib.datavars import CriticalError, DataVarsError - - try: - sys.exit(main()) - except (CriticalError, DataVarsError) as e: - sys.stderr.write("%s\n" % str(e)) - sys.exit(1) - except ImportError as e: - print_exc() - cannot_import = 'cannot import name ' - no_module = 'No module named ' - if e.message.startswith(cannot_import): - print (_('Failed to import %s') - % e.message.rpartition(cannot_import)[2]) - elif e.message.startswith(no_module): - print (_('No module named %s') % - e.message.rpartition(no_module)[2]) - else: - print(e.message) - sys.exit(1) - except KeyboardInterrupt: - print() - print(_("Task interrupted")) - - -if (__name__ == "__main__"): - core_main() \ No newline at end of file diff --git a/libs_crutch/core/datavars.py b/libs_crutch/core/datavars.py deleted file mode 100644 index b4892f3..0000000 --- a/libs_crutch/core/datavars.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -__version__ = "3.1.8" -__app__ = "calculate-core" - -from calculate.lib.datavars import DataVars - - -class DataVarsCore(DataVars): - """Variable class for core""" - - def importCore(self, **args): - """ - Import lib and core variables - """ - self.importVariables() - self.importVariables('calculate.core.variables') - self.defaultModule = "core" diff --git a/libs_crutch/core/result_viewer.py b/libs_crutch/core/result_viewer.py deleted file mode 100644 index 3b994d5..0000000 --- a/libs_crutch/core/result_viewer.py +++ /dev/null @@ -1,692 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -from itertools import cycle -from calculate.lib.utils.colortext import get_terminal_print, Terminal, \ - TextState, convert_xml_to_terminal, Print -from calculate.lib.cl_progressbar import get_progress_bar -import sys -from calculate.lib.utils.files import getch, set_active_tty, get_active_tty -from calculate.lib.utils.text import tableReport -import threading -from calculate.lib.utils.tools import classificate - -Colors = TextState.Colors -from calculate.lib.cl_lang import setLocalTranslate - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -class Spinner(threading.Thread): - def __init__(self, *args, **kwargs): - self.__halt = threading.Event() - self.__main_thread = threading.currentThread() - threading.Thread.__init__(self, *args, **kwargs) - self.start() - - def run(self): - Terminal().cursor = False - try: - sys.stdout.write(" |") - for c in cycle('/-\|'): - sys.stdout.write('\b' + c) - sys.stdout.flush() - self.__halt.wait(0.2) - sys.stdout.flush() - if self.__halt.is_set(): - sys.stdout.write('\b\b \b\b') - return - if not self.__main_thread.is_alive(): - return - finally: - Terminal().cursor = True - - def stop(self): - self.__halt.set() - self.join() - - -class Table(tableReport): - def __init__(self, *args, **kwargs): - self.res = [] - tableReport.__init__(self, *args, **kwargs) - - def printFunc(self, s): - self.res.append(s) - - def printTable(self): - self.setAutosize() - self.printReport(printRows=False) - return "".join(self.res) - - -def printTable(data, header=None): - try: - if any(data): - return Table(None, header, data, colSpan=0).printTable() - else: - return "" - except Exception: - # print str(e) - raise - - -def echo_on(f): - def wrapper(self, *args, **kw): - oldecho = self.parent.terminal_info.echo - self.parent.terminal_info.echo = True - try: - return f(self, *args, **kw) - finally: - self.parent.terminal_info.echo = oldecho - - return wrapper - - -class TaskState(object): - """ - Текущее состояние вывода сообщений - """ - - def __init__(self, parent): - self.parent = parent - - @property - def state(self): - return self.parent.task_state - - def process_tags(self, s): - """ - Выполнить текстовое преобразование - """ - s = s or "" - return convert_xml_to_terminal(s).replace(" ", " ") - - def display_asterisk(self, color): - """ - Отобразить маркер - """ - self.parent.printer(" ") - self.parent.printer.foreground(color).bold("*") - self.parent.printer(" ") - - def _right_indent(self, indent, width=-1): - """ - Выполнить выравнивание от правого края - """ - if width > 0: - self.parent.printer('\r') - self.parent.printer.right(width - indent) - else: - self.parent.printer(" ") - - def _change_asterisk(self, color, width=-1): - if width > 0: - self.parent.printer('\r') - self.display_asterisk(color) - - def dotting(self): - if self.parent.spinner: - self.parent.spinner.stop() - self.parent.printer(" ...") - self.parent.printer.flush() - - def _print_result(self, text, color): - width = self.parent.terminal_info.width - self._change_asterisk(color, width) - self._right_indent(len(text) + 4, width) - self.parent.printer.bold.foreground(TextState.Colors.BLUE)("[ ") - self.parent.printer.bold.foreground(color)(text) - self.parent.printer.bold.foreground(TextState.Colors.BLUE)(" ]") - self.parent.printer("\n") - - def _print_ok(self): - self._print_result("ok", TextState.Colors.GREEN) - - def _print_failed(self): - self._print_result("!!", TextState.Colors.RED) - - def _print_skip(self): - self._print_result("skip", TextState.Colors.YELLOW) - - def display_result(self, result): - func_map = {"skip": self._print_skip, - False: self._print_failed} - func_map.get(result, self._print_ok)() - self.parent.printer.flush() - - def startTask(self, message, progress, num): - pass - - def endTask(self, result, progress_message=None): - pass - - def breakTask(self): - pass - - def printMessage(self, color, message): - for i, line in classificate(self.process_tags(message).split('\n')): - self.display_asterisk(color) - self.parent.printer(line) - if not i.last: - self.parent.printer('\n') - try: - self.parent.printer.flush() - except IOError: - pass - - def printERROR(self, message): - self.printMessage(Colors.RED, message) - - def printSUCCESS(self, message): - self.printMessage(Colors.GREEN, message) - - def printWARNING(self, message): - self.printMessage(Colors.YELLOW, message) - - def startGroup(self, message): - self.parent.printer.foreground(Colors.WHITE)(self.process_tags(message)) - self.parent.printer('\n') - - def endGroup(self): - pass - - def beginFrame(self, message): - self.parent.terminal_info.echo = False - - def endFrame(self): - self.parent.terminal_info.echo = True - - def addProgress(self, message): - pass - - def setProgress(self, percent, short_message, long_message): - pass - - @echo_on - def askConfirm(self, message, default): - self.parent.printer("\n") - while True: - try: - _print = Print(output=self.parent.printer.output) - if default in "yes": - yes_color, no_color = Colors.GREEN, Colors.LIGHT_RED - else: - yes_color, no_color = Colors.LIGHT_RED, Colors.GREEN - yes = _print.foreground(yes_color)("Yes") - no = _print.foreground(no_color)("No") - white_message = _print.foreground(Colors.WHITE)(message) - ask = raw_input(white_message + ' (%s/%s): ' % (yes, no)) - except (EOFError, KeyboardInterrupt): - ask = 'no' - print() - if ask.lower() in ['n', 'no']: - return "no" - if ask.lower() in ['y', 'yes']: - return "yes" - if ask == '': - return default - - def printPre(self, message): - self.parent.printer(self.process_tags(message)) - self.parent.printer('\n') - - def printDefault(self, message): - self.parent.printer(self.process_tags(message)) - self.parent.printer('\n') - - @echo_on - def askChoice(self, message, answers): - self.parent.printer("\n") - Colors = TextState.Colors - printer = self.parent.printer - _print = Print(output=printer.output) - # ability answer by first letter - firstletter = 0 - i_value, i_comment = 0, 1 - answerByChar = map(lambda x: x[i_value][firstletter], answers) - - if filter(lambda x: answerByChar.count(x) > 1, answerByChar): - use_getch = False - sa = slice(0, None) - else: - use_getch = True - sa = slice(1) - message = _print.foreground(Colors.WHITE)(message) - full_message = (message + - ' (%s): ' % ("/".join(map( - lambda x: "%s[%s]" % (x[i_comment], x[i_value][sa]), - answers)))) - while True: - CTRC_C = chr(3) - if use_getch: - printer(full_message) - ask = getch() - printer("\n") - if ask in (CTRC_C, ""): - raise KeyboardInterrupt - else: - try: - ask = raw_input(full_message) - except (EOFError, KeyboardInterrupt): - printer("\n") - raise KeyboardInterrupt - ask = ask.lower() - like_answers = filter(lambda x: x[i_value].startswith(ask), - answers) - if not like_answers: - self.state.printERROR(_('The answer is uncertain')) - continue - if len(like_answers) == 1: - return like_answers[i_value][firstletter] - else: - self.state.printERROR(_('Ambiguous answer:') + - ",".join(map(lambda x: x[i_comment], - like_answers))) - - @echo_on - def askQuestion(self, message): - self.parent.printer("\n") - return raw_input(message + _(":")) - - def askPassword(self, message, twice): - from calculate.lib.utils.common import getpass - - old_tty = None - try: - if self.parent.terminal_info.is_boot_console(): - old_tty = get_active_tty() - set_active_tty(1) - - text1 = _("%s: ") % message - if not twice: - return getpass.getpass(text1) - text2 = _('Repeat: ') - pass1 = 'password' - pass2 = 'repeat' - try: - while pass1 != pass2: - pass1 = getpass.getpass(text1) - pass2 = getpass.getpass(text2) - if pass1 != pass2: - self.state.printERROR(_('Passwords do not match')) - except KeyboardInterrupt: - return None - passwd = pass1 if (pass1 and pass1 == pass2) else None - return passwd - finally: - if old_tty and old_tty.isdigit(): - set_active_tty(int(old_tty)) - - def printTable(self, table_name, head, body): - self.state.printSUCCESS(message=table_name) - self.parent.printer(printTable(body, head)) - - -class CleanState(TaskState): - """ - Ожидается вывод - """ - - def startTask(self, message, progress, num): - self.printMessage(Colors.GREEN, message) - self.parent.spinner = Spinner() - self.parent.set_state('start') - if progress: - self.parent.addProgress() - - def printERROR(self, message): - super(CleanState, self).printERROR(message) - self.parent.printer('\n') - - def printSUCCESS(self, message): - super(CleanState, self).printSUCCESS(message) - self.parent.printer('\n') - - def printWARNING(self, message): - super(CleanState, self).printWARNING(message) - self.parent.printer('\n') - - -class CleanStateNoProgress(CleanState): - """ - ... без отображения прогрессов - """ - - def startTask(self, message, progress, num): - self.display_asterisk(Colors.GREEN) - self.parent.printer(message) - self.dotting() - self.parent.set_state('start') - - -class StartState(TaskState): - """ - Выполняется задача (отображается spinner) - """ - - def startTask(self, message, progress, num): - self.parent.endTask(True) - self.parent.startTask(message, progress, num) - - def endTask(self, result, progress_message=None): - self.dotting() - self.parent.set_state('clean') - self.display_result(result) - - def breakTask(self): - self.dotting() - self.parent.set_state('clean') - self.parent.printer('\n') - - def printERROR(self, message): - self.dotting() - self.parent.printer('\n') - self.parent.set_state('clean') - self.state.printERROR(message) - - def printSUCCESS(self, message): - self.dotting() - self.parent.set_state('breaked') - self.state.printSUCCESS(message) - - def printWARNING(self, message): - self.dotting() - self.parent.set_state('breaked') - self.state.printWARNING(message) - - def startGroup(self, message): - self.state.endTask(True) - self.state.startGroup(message) - - def endGroup(self): - self.state.endTask(True) - self.state.endGroup() - - def beginFrame(self, message): - self.state.endTask(True) - self.state.beginFrame(message) - - def endFrame(self): - self.state.endTask(True) - self.state.endFrame() - - def addProgress(self, message): - self.parent.set_state("pre-progress") - self.state.addProgress(message) - - def printPre(self, message): - self.parent.endTask(True) - self.state.printPre(message) - - def printDefault(self, message): - self.state.endTask(True) - self.state.printDefault(message) - - def askChoice(self, message, answers): - self.breakTask() - return self.state.askChoice(message, answers) - - def askQuestion(self, message): - self.breakTask() - return self.state.askQuestion(message) - - def askPassword(self, message, twice): - self.breakTask() - return self.state.askPassword(message, twice) - - def askConfirm(self, message, default): - self.breakTask() - return self.state.askConfirm(message, default) - - def printTable(self, table_name, head, body): - self.breakTask() - self.state.printTable(table_name, head, body) - - -class StartStateNoProgress(StartState): - """ - ... без прогресса - """ - - def startTask(self, message, progress, num): - self.parent.endTask(True) - self.parent.startTask(message, progress, num) - - def endTask(self, result, progress_message=None): - self.parent.set_state('clean') - self.display_result(result) - - def breakTask(self): - self.parent.printer('\n') - - def printERROR(self, message): - self.breakTask() - self.parent.set_state('clean') - self.state.printERROR(message) - - def printSUCCESS(self, message): - self.breakTask() - self.parent.set_state('clean') - self.state.printSUCCESS(message) - - def printWARNING(self, message): - self.breakTask() - self.parent.set_state('clean') - self.state.printWARNING(message) - - def addProgress(self, message): - pass - - -class BreakedState(StartState): - """ - Во время выполнения задачи выведено сообщение - """ - - def stop_spinner_newline(self): - self.parent.spinner.stop() - self.parent.printer('\n') - - def startTask(self, message, progress, num): - self.state.endTask(True) - self.state.startTask(message, progress, num) - - def breakTask(self): - self.stop_spinner_newline() - self.parent.set_state('clean') - - def endTask(self, result, progress_message=None): - self.breakTask() - - def printERROR(self, message): - self.parent.endTask(True) - self.state.printERROR(message) - - def printSUCCESS(self, message): - self.stop_spinner_newline() - TaskState.printSUCCESS(self, message) - self.parent.spinner = Spinner() - - def printWARNING(self, message): - self.stop_spinner_newline() - TaskState.printWARNING(self, message) - self.parent.spinner = Spinner() - - -class PreProgressState(StartState): - """ - Задача запрошена как с прогрессом но проценты еще не обрабатывались - """ - - def addProgress(self, message): - pass - - def setProgress(self, percent, short_message, long_message): - self.parent.set_state("progress") - self.dotting() - self.parent.printer("\n") - self.parent.add_progressbar() - self.parent.terminal_info.cursor = False - self.state.setProgress(percent, short_message, long_message) - - -class ProgressState(StartState): - """ - Отображается progressbar - """ - - def finish_and_clean(self): - self.parent.printer('\r') - self.parent.printer.flush() - self.parent.progress.finish() - self.parent.terminal_info.cursor = True - self.parent.set_progressbar(None) - self.parent.printer.up(1).clear_line("") - self.parent.printer.up(1)("") - - def setProgress(self, percent, short_message, long_message): - if not 0 <= percent <= 100: - self.breakTask() - else: - self.parent.progress.update(percent) - - def breakTask(self): - self.finish_and_clean() - self.parent.set_state('clean') - self.parent.printer('\n') - - def endTask(self, result, progress_message=None): - self.finish_and_clean() - self.parent.set_state('clean') - self.display_result(result) - - def printERROR(self, message): - self.finish_and_clean() - self.parent.printer.down(1)("") - self.parent.set_state('clean') - self.state.printERROR(message) - - def printSUCCESS(self, message): - self.finish_and_clean() - self.parent.set_state('breaked') - self.state.printSUCCESS(message) - - def printWARNING(self, message): - self.finish_and_clean() - self.parent.set_state('breaked') - self.state.printWARNING(message) - - -class ResultViewer(object): - """ - Просмотрщик результатов - """ - - def __init__(self): - self.printer = \ - get_terminal_print(sys.stdout) - self.terminal_info = Terminal() - self.states = {'clean': CleanState(self), - 'breaked': BreakedState(self), - 'pre-progress': PreProgressState(self), - 'progress': ProgressState(self), - 'start': StartState(self)} - self.task_state = self.states['clean'] - self.spinner = None - self.progress = None - self.no_questions = False - - def set_no_progress(self): - self.states = {'clean': CleanStateNoProgress(self), - 'start': StartStateNoProgress(self)} - self.set_state('clean') - - def set_no_questions(self): - self.no_questions = True - - def set_state(self, state): - self.task_state = self.states[state] - - def add_progressbar(self): - self.set_progressbar(get_progress_bar()) - - def set_progressbar(self, pb): - self.progress = pb - - def endTask(self, result=None, progress_message=None): - self.task_state.endTask(result, progress_message) - - def startTask(self, message, progress=False, num=1): - self.task_state.startTask(message, progress, num) - - def printERROR(self, message, onlyShow=None): - if onlyShow != 'gui': - self.task_state.printERROR(message) - - def printSUCCESS(self, message, onlyShow=None): - if onlyShow != 'gui': - self.task_state.printSUCCESS(message) - - def printWARNING(self, message, onlyShow=None): - if onlyShow != 'gui': - self.task_state.printWARNING(message) - - def startGroup(self, message): - self.task_state.startGroup(message) - - def endGroup(self): - self.task_state.endGroup() - - def beginFrame(self, message=None): - self.task_state.beginFrame(message) - - def endFrame(self): - self.task_state.endFrame() - - def addProgress(self, message=None): - self.task_state.addProgress(message) - - def setProgress(self, percent, short_message=None, long_message=None): - self.task_state.setProgress(percent, short_message, long_message) - - def printPre(self, message, onlyShow=None): - if onlyShow != 'gui': - self.task_state.printPre(message) - - def printDefault(self, message='', onlyShow=None): - if onlyShow != 'gui': - self.task_state.printDefault(message) - - def askConfirm(self, message, default="yes"): - if self.no_questions: - return default - return self.task_state.askConfirm(message, default) - - def askChoice(self, message, answers=(("yes", "Yes"), ("no", "No"))): - return self.task_state.askChoice(message, answers) - - def askPassword(self, message, twice=False): - return self.task_state.askPassword(message, twice) - - def askQuestion(self, message): - return self.task_state.askQuestion(message) - - def printTable(self, table_name, head, body, fields=None, - onClick=None, addAction=None, step=None, records=None): - self.task_state.printTable(table_name, head, body) diff --git a/libs_crutch/core/result_viewer_gui.py b/libs_crutch/core/result_viewer_gui.py deleted file mode 100644 index e88067c..0000000 --- a/libs_crutch/core/result_viewer_gui.py +++ /dev/null @@ -1,129 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -from calculate.lib.cl_progressbar import get_progress_bar, get_message_box -from calculate.lib.utils.colortext import TextState -import sys -from .result_viewer import PreProgressState, ProgressState - -Colors = TextState.Colors -from calculate.lib.cl_lang import setLocalTranslate - -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -class PreProgressStateGui(PreProgressState): - """ - Задача запрошена как с прогрессом но проценты еще не обрабатывались - """ - - def addProgress(self, message): - self.dotting() - self.parent.printer("\n") - self.parent.add_progressbar() - self.parent.set_state("progress") - - -class ProgressStateGui(ProgressState): - """ - Отображение для gui прогресса - """ - - def finish_and_clean(self): - self.parent.progress.finish() - self.parent.printer.up(1)("") - self.parent.set_progressbar(None) - - -class ResultViewerDecorator(object): - def __init__(self, rv): - self.rv = rv - for v in self.rv.states.values(): - v.parent = self - - def __getattr__(self, item): - return getattr(self.rv, item) - - -class ProgressGui(ResultViewerDecorator): - """ - Отображение прогресса в Qt диалогах - """ - - def __init__(self, rv): - super(ProgressGui, self).__init__(rv) - self.rv.states['pre-progress'] = PreProgressStateGui(self) - self.rv.states['progress'] = ProgressStateGui(self) - self.progress_title = "" - - def add_progressbar(self): - self.set_progressbar(get_progress_bar("gui", self.progress_title)) - - def startTask(self, message, progress=False, num=1): - self.rv.startTask(message, progress, num) - self.progress_title = message - - -class ErrorGui(ResultViewerDecorator): - """ - Отображение ошибок через gui - """ - - def __init__(self, rv): - super(ErrorGui, self).__init__(rv) - self.messages = [] - - def show_messages(self): - get_message_box().critical("\n".join(self.messages).decode('utf-8')) - - def printERROR(self, message, onlyShow=None): - self.rv.printERROR(message, onlyShow) - if onlyShow != 'gui': - if message: - self.messages.append(message) - - def endFrame(self): - self.rv.task_state.endFrame() - if self.messages: - self.show_messages() - - -class WarningGui(ResultViewerDecorator): - """ - Отображение предупреждений через gui - """ - - def __init__(self, rv): - super(WarningGui, self).__init__(rv) - self.warnings = [] - - def show_messages(self): - get_message_box().warning("\n".join(self.warnings).decode('utf-8')) - - def printWARNING(self, message, onlyShow=None): - self.rv.printWARNING(message, onlyShow) - if onlyShow != 'gui': - if message: - self.warnings.append(message) - - def endFrame(self): - self.rv.task_state.endFrame() - if not self.messages and self.warnings: - self.show_messages() - elif self.messages: - self.messages.extend(self.warnings) - self.rv.show_messages() diff --git a/libs_crutch/core/server/__init__.py b/libs_crutch/core/server/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/core/server/admin.py b/libs_crutch/core/server/admin.py deleted file mode 100644 index 3dceed6..0000000 --- a/libs_crutch/core/server/admin.py +++ /dev/null @@ -1,88 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2017 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.lib.configparser import ConfigParser -from calculate.lib.utils.files import pathJoin, writeFile, readFileEx -from calculate.lib.utils.text import _u8 -import re -from os import path -from calculate.lib.cl_lang import getLazyLocalTranslate, setLocalTranslate -from collections import MutableMapping - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class Admins(MutableMapping): - """ - Объект работы с информацией о локальных администраторах - """ - section = "admin" - - def __init__(self, dv, chroot=False): - self.dv = dv - self.cp = ConfigParser(strict=False) - if chroot: - self.ini_path = pathJoin( - dv.Get('cl_chroot_path'), - dv.Get('core.cl_core_admin_path')) - else: - self.ini_path = dv.Get('core.cl_core_admin_path') - self.cert_database = dv.Get('core.cl_core_database') - - if path.exists(self.ini_path): - self.cp.read(self.ini_path, encoding="utf-8") - - def __getitem__(self, item): - return _u8(self.cp.get(self.section, item, fallback="")) - - def __contains__(self, item): - return self.cp.has_option(self.section, item) - - def __len__(self): - return len(list(iter(self))) - - def __iter__(self): - if not self.cp.has_section(self.section): - return iter(()) - else: - return iter(_u8(x) for x in self.cp[self.section]) - - def has_certificate(self, user): - """ - Проверить: выдавался ли сертификат указанному пользователю - """ - certdata = readFileEx(self.cert_database, grab=True) - return bool(re.search("^\S+\s+(?:\S+\s+){6}%s\s*$" % user, - certdata, flags=re.M)) - - def __setitem__(self, item, value): - if not self.cp.has_section(self.section): - self.cp.add_section(self.section) - self.cp[self.section][item] = value - - def __delitem__(self, item): - if self.cp.has_section(self.section): - self.cp.remove_option(self.section, item) - - def save(self): - with writeFile(self.ini_path) as f: - self.cp.write(f) - - def clear(self): - self.cp.remove_section(self.section) diff --git a/libs_crutch/core/server/api_types.py b/libs_crutch/core/server/api_types.py deleted file mode 100644 index 29a85b7..0000000 --- a/libs_crutch/core/server/api_types.py +++ /dev/null @@ -1,874 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -from calculate.core.server.loaded_methods import LoadedMethods - -import calculate.contrib -from spyne import String, Integer, Boolean -from spyne.model.primitive import string_encoding -from spyne import Array, ComplexModel -# from spyne.model import Mandatory -from spyne import rpc - - - -import sys -import pickle -import os -from calculate.lib.datavars import SourceReadonlyVariable -from calculate.core.server.core_interfaces import CoreServiceInterface - -_ = lambda x: x -from calculate.lib.cl_lang import setLocalTranslate - -setLocalTranslate('cl_core3', sys.modules[__name__]) -String.Attributes.encoding = "utf-8" -##### -# API -##### -class CommonInfo(ComplexModel): - """ - Common info for all Info classes - - cl_templates_locate - templates location - cl_dipatch_conf - update config method - cl_verbose_set - display appling templates - """ - cl_templates_locate = Array(String) - cl_dispatch_conf = String - cl_verbose_set = Boolean - - Default = Array(String) - CheckAll = Boolean - -class LazyString(object): - pass - - -class DataVarsSerializer(ComplexModel): - """Serializer for datavars types""" - - class Attributes(ComplexModel.Attributes): - default = None - min_occurs = 1 - - def elementByType(self, typeobj): - """Get element by variable type, given for table or not""" - elementMap = {'table': 'table', - "string": "input", - "bool": "check", - "boolauto": "check_tristate", - "bool3": "check_tristate", - "choice": "combo", - "choiceedit": "comboEdit", - "choiceedit-list": "multichoice_add", - "choice-list": "multichoice", - "bool-list": "multichoice", - "list": "multichoice_add", - "password": "password", - "onepassword": "password", - "need-onepassword": "password", - "need-password": "password"} - return elementMap.get(typeobj, 'input') - - def arrayString(self, value): - if type(value) in (list, tuple): - return [""] + map(lambda x: (",".join(x) - if type(x) in (list, tuple) - else x), - value) - else: - return value - - def arrayArrayString(self, value): - if type(value) in (list, tuple): - return map(self.arrayString, value) - else: - return value - - def getChoice(self, var_obj): - if ("choice" in var_obj.type or "file" in var_obj.type or - var_obj.element == "radio"): - choice, comments = var_obj.parent.ChoiceAndComments(var_obj.name) - return self.arrayString(choice), self.arrayString(comments) - else: - return None, None - - -class ChoiceValue(DataVarsSerializer): - typefield = String(default=None, min_occurs=1) - values = Array(String, default=None, min_occurs=1) - comments = Array(String, default=None, min_occurs=1) - onChanged = Array(String, default=None, min_occurs=1) - - def __init__(self, dv=None, varObj=None, readOnly=False, **kwargs): - if dv: - super(ChoiceValue, self).__init__() - if not readOnly: - self.values, self.comments = self.getChoice(varObj) - elif isinstance(varObj, SourceReadonlyVariable): - items = varObj.getMapHumanReadable().items() - if items: - self.values, self.comments = zip(*items) - else: - self.values, self.comments = [], [] - self.values = self.arrayString(self.values) - self.comments = self.arrayString(self.comments) - if varObj.mode == 'w': - self.typefield = (varObj.element or - self.elementByType(varObj.type)) - else: - if readOnly: - self.typefield = "readonly" - else: - self.typefield = "text" - else: - super(ChoiceValue, self).__init__(**kwargs) - - def elementByType(self, typeobj): - """Get element by variable type, given for table or not""" - if "-list" in typeobj: - typeobj = typeobj.replace("-list", "", 1) - elif "list" in typeobj: - typeobj = typeobj.replace("list", "", 1) - return DataVarsSerializer.elementByType(self, - typeobj) - - -class Table(DataVarsSerializer): - # head = Mandatory(Array(String(default=None, min_occurs=1), min_occurs=1)) - head = Array(String(default=None, min_occurs=1), min_occurs=1) - fields = Array(String(default=None, min_occurs=1), min_occurs=1) # name fields in params object - body = Array(Array(String), default=None, min_occurs=1) - values = Array(ChoiceValue, default=None, min_occurs=1) - onClick = String(default=None, min_occurs=1) - addAction = String(default=None, min_occurs=1) - # шаг на который нужно прейти при выполнении onClick - step = String(default=None, min_occurs=1) - # количество записей в таблице - records = String(default=None, min_occurs=1) - - def __init__(self, dv=None, briefmode=None, - varObj=None, head=None, body=None, values=None, - fields=None, onClick=None, addAction=None, step=None, - records=None): - super(Table, self).__init__() - # print("DEBUG Table init") - - if dv: - self.head = [] - self.body = [] - self.values = [] - # get information about all table columns - self.writable = True - for i, col in enumerate(varObj.source): - # get columns variable obj - varSource = dv.getInfo(col) - # invalidate columns vars for uncompatible table - if varSource.uncompatible(): - dv.Invalidate(col, onlySet=True) - # header label - self.head.append(varSource.label or varSource.name) - # if column writable then add ChoiceValue info - if varSource.mode == 'w' or i == 0: - self.values.append(ChoiceValue(dv, varSource)) - if varSource.mode == 'r' and i == 0: - self.writable = False - else: - self.values.append( - ChoiceValue(dv, varSource, readOnly=True)) - # get table body - # empty value at start add for fix data transfer - self.body = self.arrayArrayString( - dv.Get(varObj.name, humanreadable=True if briefmode else False)) - else: - self.head = head - self.fields = fields - self.body = body - self.values = values - self.onClick = onClick - self.addAction = addAction - self.step = step or "0" - self.records = records or "0" - - -class Option(DataVarsSerializer): - shortopt = String(default=None, min_occurs=1) - longopt = String(default=None, min_occurs=1) - metavalue = String(default=None, min_occurs=1) - syntax = String(default=None, min_occurs=1) - help = String(default=None, min_occurs=1) - - def __init__(self, optlist, metaval, helpval, syntax=""): - super(Option, self).__init__() - self.help = helpval - self.metavalue = metaval - self.syntax = syntax - for val in optlist: - if val.startswith('--'): - self.longopt = val - else: - self.shortopt = val - - -class Field(DataVarsSerializer): - name = String(default=None, min_occurs=1) # varname from Datavars - label = String(default=None, min_occurs=1) # label for GUI - type = String(default=None, min_occurs=1) # data type of variable - opt = Option # opt for cmdline - help = String(default=None, min_occurs=1) # help for cmdline (GUI?) - element = String(default=None, min_occurs=1) # type element - guitype = String(default=None, min_occurs=1) # addon guitype - choice = Array(String, default=None, min_occurs=1) # value (combobox,comboedit) - listvalue = Array(String, default=None, min_occurs=1) # current listvalue - default = Boolean(default=None, min_occurs=1) # default value or False - value = String(default=None, min_occurs=1) # current value - tablevalue = Table # current table value - uncompatible = String(default=None, min_occurs=1) # message for uncompatibility variable - comments = Array(String, default=None, min_occurs=1) # comments for choice - - def __init__(self, dv=None, varObj=None, expert=False, briefmode=False, - inbrief=False, groupVars=(), onlyhelp=False, **kwargs): - """ - dv - datavars, varObj - current variable, expert - expert variable flag, - briefmode - view request for brief, inbrief - variable palced in brief, - """ - if dv: - super(Field, self).__init__() - self.name = varObj.name - self.type = varObj.type - if varObj.opt: - self.opt = Option(varObj.opt, varObj.metavalue, varObj.help, - varObj.syntax) - self.help = varObj.help - self.element = varObj.element or self.elementByType(self.type) - self.label = str(varObj.label or varObj.name) - self.guitype = varObj.guitype - if (not onlyhelp or varObj.syntax or self.type == "table" or - self.type == "bool"): - if inbrief: - self.uncompatible = "" - else: - self.uncompatible = dv.Uncompatible(varObj.name) - if self.uncompatible: - # блок используется для отмены пометки о несовместимости - # если несовместимая переменная, и та из-за которой - # она является несовместимой находятся на одной и той же - # странице (например доступность MBR зависит от опции UEFI, - # они находятся на одной и той же странице) - for var in varObj.reqUncompat: - if not briefmode and var in groupVars: - if not dv.Uncompatible(var.name): - self.uncompatible = "" - break - else: - dv.Invalidate(varObj.name, onlySet=True) - if expert: - self.default = not varObj.wasSet - self.choice, self.comments = self.getChoice(varObj) - if self.type == "table": - # print("DEBUG_table creation") - - self.tablevalue = Table(dv=dv, briefmode=briefmode, - varObj=varObj) - if self.tablevalue.writable: - self.type = "writable" - else: - full_varname = "%s.%s" % (varObj.section, varObj.name) - value = dv.Get(full_varname) - if type(value) in (list, tuple): - if briefmode and "choice" not in self.type: - self.value = dv.Get(full_varname, - humanreadable=True) - self.listvalue = self.arrayString(value) - else: - if briefmode: # and not "choice" in self.type: - self.value = dv.Get(full_varname, - humanreadable=True) - else: - self.value = value - # if self.value: - # self.default = self.value - else: - super(Field, self).__init__(**kwargs) - - -class GroupField(DataVarsSerializer): - name = String(default=None, min_occurs=1) - fields = Array(Field, default=None, min_occurs=1) - prevlabel = String(default=None, min_occurs=1) - nextlabel = String(default=None, min_occurs=1) - last = Boolean(default=None, min_occurs=1) - - def __init__(self, name="", fields=list(), prevlabel="", - nextlabel="", last=False, dv=None, info=None, - expert=False, brief=False, onlyhelp=False): - super(GroupField, self).__init__() - self.last = last - if dv: - self.name = info['name'] - if not onlyhelp: - self.nextlabel = info['next_label'] - self.prevlabel = _("Previous") - else: - self.nextlabel = None - self.prevlabel = None - self.fields = [] - - # get all variables with deps which using in this group - groupDepVars = [] - from itertools import chain - - for varname in chain(info['normal'], info['expert']): - for var in dv.getRequired(varname): - if var not in groupDepVars: - groupDepVars.append(var) - - if brief: - uniqBrief = list(info['brief']) + [x for x in - info['brief_force'] - if x not in info['brief']] - for varname in uniqBrief: - varObj = dv.getInfo(varname) - inbrief = varname in info['brief_force'] - self.fields.append(Field(dv=dv, inbrief=inbrief, - briefmode=brief, - varObj=varObj, - groupVars=groupDepVars, - onlyhelp=onlyhelp)) - else: - uniqBrief = [] - for varname in (x for x in info['normal'] if x not in uniqBrief): - inbrief = brief and varname in info['brief_force'] - if brief and 'hide' in info and varname in info['hide']: - continue - varObj = dv.getInfo(varname) - self.fields.append(Field(dv=dv, inbrief=inbrief, - briefmode=brief, - varObj=varObj, - groupVars=groupDepVars, - onlyhelp=onlyhelp)) - if info['expert']: - for varname in (x for x in info['expert'] if - x not in uniqBrief): - if expert is True or expert is None and dv.getInfo( - varname).wasSet: - self.fields.append(Field(name="expert", - element="expert", - label=str(info['expert_label']), - value="open")) - for varname_ in info['expert']: - inbrief = brief and varname_ in info['brief_force'] - if (brief and 'hide' in info and - varname_ in info['hide']): - continue - varObj = dv.getInfo(varname_) - self.fields.append(Field(dv=dv, expert=True, - inbrief=inbrief, - briefmode=brief, - varObj=varObj, - groupVars=groupDepVars, - onlyhelp=onlyhelp)) - break - else: - if expert is False: - for varname in info['expert']: - vn = varname.rpartition('.')[2] - dv.Invalidate(vn, True) - if not onlyhelp: - self.fields.append(Field(name="expert", - element="expert", - label=str(info['expert_label']), - value="close", - onlyhelp=onlyhelp)) - else: - self.name = name - self.fields = fields - self.nextlabel = nextlabel - - -class ViewInfo(DataVarsSerializer): - groups = Array(GroupField, default=None, min_occurs=1) - has_brief = Boolean(default=None, min_occurs=1) - - def __init__(self, datavars=None, step=None, expert=None, allsteps=False, - brief=None, brief_label=None, has_brief=False, groups=list(), - viewparams=None): - super(ViewInfo, self).__init__() - onlyhelp = False - if viewparams: - # for compatible specifing step by param - step = viewparams.step if step is None else step - expert = viewparams.expert if expert is None else expert - brief = viewparams.brief if brief is None else brief - onlyhelp = viewparams.onlyhelp - self.has_brief = has_brief - - if datavars: - self.groups = [] - varGroups = datavars.getGroups() - lastGroup = len(varGroups) - 1 - # interate all vars group - if step in (0, -1, None) or allsteps: - briefData = datavars.getBrief() - # print("DEBUG viewInfo") - # print("var groups: %s " % bool(varGroups)) - # print(datavars) - self.groups.append(self.stepGroup(varGroups, brief_label, - help_value=briefData.get( - "help", None), - next_value=briefData.get( - "next", None), - image_value=briefData.get( - "image", ""))) - for i, groupInfo in enumerate(varGroups): - if step in (None, -1) or step == i: - # print("viewInfo step %s " % i) - - self.groups.append(GroupField(dv=datavars, info=groupInfo, - last=(lastGroup == i), - expert=expert, - brief=brief, - onlyhelp=onlyhelp)) - if groupInfo['custom_buttons']: - for but in groupInfo['custom_buttons']: - if len(but) > 4: - listval = but[4] - else: - listval = None - if len(but) > 5: - f = but[5] - if f is None or callable(f) and f(datavars.Get): - enable = True - else: - enable = False - else: - enable = True - self.groups[-1].fields.append(Field( - name=but[0], label=but[1], value=but[2], - element=but[3], listvalue=listval, - guitype=None if enable else "readonly")) - - else: - self.groups = groups - - def stepGroup(self, groupInfo, brief_label, help_value=None, - next_value=None, image_value=""): - """Step group""" - # print("DEBUG what are you doing stepGroup") - # print(groupInfo) - # print(brief_label) - return GroupField(fields=[ - Field(name="Steps", - element="table", - label=brief_label, - type='steps', - help=help_value, - value=next_value, - tablevalue=Table( - head=[i['name'] for i in groupInfo], - fields=[i.get('image', '') - for i in groupInfo] + [image_value], - body=[self.arrayString( - list(i['normal']) + [""] + list(i['expert'])) - for i in groupInfo]))]) - - # element = ['table', 'radio', 'combo', 'comboEdit', 'multichoice', \ - # 'multichoice_add', 'check', 'check_tristate', 'expert', 'input'] - - -class ViewParams(ComplexModel): - """ - Struct for _view methods - """ - step = Integer(default=None, min_occurs=1) # number of group variables - expert = Boolean(default=None, min_occurs=1) # request expert variables - brief = Boolean(default=None, min_occurs=1) # request brief variables - onlyhelp = Boolean(default=None, min_occurs=1) # request params for only help - help_set = Boolean(default=None, min_occurs=1) # set cl_help_set to on - conargs = Array(String, default=None, min_occurs=1) # set cl_console_args - dispatch_usenew = Boolean(default=None, min_occurs=1) # set cl_dispatch_conf to usenew - clienttype = String(default=None, min_occurs=1) # type of client "gui","console" - - -######### -# MESSAGE -######### -class ReturnedMessage(ComplexModel): - type = String(default=None, min_occurs=1) - field = String(default=None, min_occurs=1) - message = String(default=None, min_occurs=1) - expert = Boolean(default=None, min_occurs=1) - field_obj = Field - - def __init__(self, type=None, field=None, message=None, - expert=False, field_obj=None): - super(ReturnedMessage, self).__init__() - self.type = type - self.field = field - self.message = message - self.expert = expert - if field_obj: - self.field_obj = Field(dv=field_obj.parent, varObj=field_obj) - else: - self.field_obj = None - - -class Message(ComplexModel): - type = String(default=None, min_occurs=1) - message = String(default=None, min_occurs=1) - id = Integer(default=None, min_occurs=1) - result = Boolean(default=None, min_occurs=1) - onlyShow = String(default=None, min_occurs=1) - default = String(default=None, min_occurs=1) - - def __init__(self, type='normal', message=None, id=None, - result=None, onlyShow=None, default=None): - super(Message, self).__init__() - self.type = type - self.message = message - self.id = id - self.result = result - self.onlyShow = onlyShow - self.default = default - - -class ReturnProgress(ComplexModel): - percent = Integer(default=None, min_occurs=1) - short_message = String(default=None, min_occurs=1) - long_message = String(default=None, min_occurs=1) - control = Integer(default=None, min_occurs=1) - - def __init__(self, percent=0, short_message=None, long_message=None, - control=None): - super(ReturnProgress, self).__init__() - self.percent = percent - self.short_message = short_message - self.long_message = long_message - self.control = control - - -class Frame(ComplexModel): - values = Array(Message, default=None, min_occurs=1) - - -# get and send client messages -class CoreWsdl(CoreServiceInterface): - perm_denied = [Message(type='error', message='403 Forbidden')] - @staticmethod - def callAction(cls, sid, info, logicClass=None, - method_name=None, actionClass=None, - callbackRefresh=lambda sid, dv: True, - invalidators=None, depend_methods=()): - """ - Общий алгоритм вызова действия - """ - if not logicClass: - logicClass = {} - dv = cls.get_cache(sid, method_name, "vars") - try: - if not dv: - dv = getattr(cls, "%s_vars" % method_name)() - else: - callbackRefresh(cls, sid, dv) - dv.processRefresh() - checkonly = False - checkall = False - if info and hasattr(info, "CheckOnly"): - checkonly = info.CheckOnly - if info and not hasattr(info, "CheckOnly"): - checkall = True - elif info and hasattr(info, "CheckAll"): - checkall = info.CheckAll - errors = map(lambda x: ReturnedMessage(**x), - dv.checkGroups(info, - allvars=checkall or not checkonly, - invalidators=invalidators)) - # if dv.Get('cl_env_debug_set') == 'on': - # dv.printGroup(info) - if errors: - return errors - if checkonly: - returnmess = ReturnedMessage(type='', message=None) - return [returnmess] - if not actionClass: - return [] - objs = {} - from calculate.core.server.func import CommonLink - - if isinstance(logicClass, dict): - for k, v in logicClass.items(): - objs[k] = type("Logic", (CommonLink, v, object), {})() - install_meth = type("CommonCore", (cls.Common, - actionClass, object), {}) - pid = cls.startprocess(cls, sid, target=install_meth, - method="run", - method_name=method_name, - args_proc=(objs, dv,)) - returnmess = ReturnedMessage(type='pid', message=pid) - returnmess.type = "pid" - returnmess.message = pid - cls.clear_cache(sid, method_name) - cache_cleared = [method_name] - clear_list = list(LoadedMethods.methodDepends[method_name]) - while clear_list: - method_clear_name = clear_list.pop() - if method_clear_name not in cache_cleared: - clear_list.extend( - LoadedMethods.methodDepends[method_clear_name]) - cls.clear_cache_method(method_clear_name) - cache_cleared.append(method_clear_name) - dv = None - return [returnmess] - finally: - if dv: - # print "Set cache", dv.Get('ur_unix_group_name_exists') - cls.set_cache(sid, method_name, "vars", dv, smart=False) - - @staticmethod - def fixInstallLocalization(cls, sid, dv): - """ - Метод смены локализации интерфейса на лету (во время выбора - параметров метода) - """ - # print("DEBUG fixInstallLocalization") - # print(dv) - if "--start" not in sys.argv: - return False - import threading - - curThread = threading.currentThread() - curThread.lang = dv.Get('install.os_install_locale_lang') - # print("LOCALE DEBUG: thread lang: %s" % curThread.lang) - currentLang = cls.get_cache(sid, "install", "lang") - # print("LOCALE DEBUG: cached lang: %s" % currentLang) - if currentLang != curThread.lang: - dv.clearGroups() - cls.install_vars(cls, dv) - dv.reinit() - return True - else: - return False - - # verification of compliance certificate and process (pid) - @staticmethod - def check_cert_pid(cls, sid, pid): - import threading - - curThread = threading.currentThread() - cert = curThread.client_cert - - from .cert_cmd import find_cert_id - - cert_id = find_cert_id(cert, cls.data_path, cls.certbase) - cert_id = int(cert_id) - if cert_id == 0: - return 0 - - # session file - if not os.path.exists(cls.sids): - os.system('mkdir %s' % cls.sids) - - check = 0 - try: - fd = open(cls.sids_file, 'r') - except IOError: - return 0 - while 1: - try: - # read all on one record - list_sid = pickle.load(fd) - except (EOFError, KeyError, IOError): - break - # find session id in sids file - if cert_id == int(list_sid[1]): - if sid == int(list_sid[0]): - check = 1 - - if check == 0: - return 0 - fd = open(cls.sids_pids, 'r') - while 1: - try: - # read out on 1 record - list_pid = pickle.load(fd) - except (EOFError, KeyError, IOError): - break - if sid == int(list_pid[0]): - if pid == int(list_pid[1]): - fd.close() - return 1 - fd.close() - return 0 - - # send to client all new message - @staticmethod - def process_messages(cls, pid, client_type): - result = [] - while len(cls.glob_frame_list[pid]) > \ - cls.glob_process_dict[pid]['counter']: - item = cls.glob_process_dict[pid]['counter'] - only_show = cls.glob_frame_list[pid][item].onlyShow - if (not client_type or - not only_show or only_show == client_type): - result.append(cls.glob_frame_list[pid][item]) - cls.glob_process_dict[pid]['counter'] += 1 - return result - - # send to client new message from frame - @staticmethod - def client_get_frame(cls, sid, pid, client_type): - if cls.check_cert_pid(cls, sid, pid): - return cls.process_messages(cls, pid, client_type) - return CoreWsdl.perm_denied - - # send to client new message from frame - @staticmethod - def client_get_entire_frame(cls, sid, pid): - if cls.check_cert_pid(cls, sid, pid): - try: - results = cls.glob_frame_list[pid] - except (AttributeError, KeyError, IndexError): - return CoreWsdl.perm_denied - len_glob_frame_list = len(cls.glob_frame_list[pid]) - cls.glob_process_dict[pid]['counter'] = len_glob_frame_list - return results - return CoreWsdl.perm_denied - - @staticmethod - def client_get_table(cls, sid, pid, id): - if cls.check_cert_pid(cls, sid, pid): - return cls.glob_table_dict[pid][id] - return CoreWsdl.perm_denied - - @staticmethod - def client_get_progress(cls, sid, pid, id): - if cls.check_cert_pid(cls, sid, pid): - return cls.glob_progress_dict[pid][id] - return ReturnProgress(0, control=403) - - @staticmethod - # get message from client - def client_send_message(cls, sid, pid, text): - if cls.check_cert_pid(cls, sid, pid): - cls.glob_process_dict[pid]['answer'] = text - return Message(type='normal', message="") - return CoreWsdl.perm_denied - - - -class WsdlAdapter(object): - adapted_class = None - - def __init__(self, source): - self.source = source - - @classmethod - def from_detect(cls, source): - if isinstance(source, (cls.adapted_class, WsdlAdapter)): - return source - else: - return cls(source) - - def __getattr__(self, item): - return getattr(self.source, item) - - @staticmethod - def Array(field, field_type): - def wrapper(self): - if getattr(self.source, field): - return [field_type(x) - for x in getattr(getattr(self.source, field), - field_type.__name__[:-7])] - else: - return [] - - return property(wrapper) - - @staticmethod - def StringArray(field): - def wrapper(self): - source_field = getattr(self.source, field) - return source_field.string if source_field else [] - - return property(wrapper) - - -class ChoiceValueAdapter(WsdlAdapter): - adapted_class = ChoiceValue - - values = WsdlAdapter.StringArray("values") - - comments = WsdlAdapter.StringArray("comments") - - onChanged = WsdlAdapter.StringArray("onChanged") - - -class TableAdapter(WsdlAdapter): - adapted_class = Table - - fields = WsdlAdapter.StringArray("fields") - head = WsdlAdapter.StringArray("head") - - @classmethod - def get_matrix(cls, value): - if hasattr(value, 'stringArray'): - return [row.string - for row in value.stringArray - if hasattr(row, "string")] - elif isinstance(value, list): - return value - return [] - - @property - def body(self): - return self.get_matrix(self.source.body) - - values = WsdlAdapter.Array("values", ChoiceValueAdapter) - - -class FieldAdapter(WsdlAdapter): - adapted_class = Field - - choice = WsdlAdapter.StringArray("choice") - listvalue = WsdlAdapter.StringArray("listvalue") - comments = WsdlAdapter.StringArray("comments") - - @property - def tablevalue(self): - return TableAdapter(self.source.tablevalue) - - -class GroupFieldAdapter(WsdlAdapter): - adapted_class = GroupField - - fields = WsdlAdapter.Array("fields", FieldAdapter) - - -class ViewInfoAdapter(WsdlAdapter): - adapted_class = ViewInfo - - groups = WsdlAdapter.Array("groups", GroupFieldAdapter) - - -class ArrayReturnedMessage(WsdlAdapter): - @classmethod - def from_detect(cls, source): - if isinstance(source, (list, tuple)): - return source - else: - return source.ReturnedMessage diff --git a/libs_crutch/core/server/baseClass.py b/libs_crutch/core/server/baseClass.py deleted file mode 100644 index 4a24aac..0000000 --- a/libs_crutch/core/server/baseClass.py +++ /dev/null @@ -1,406 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -from __future__ import absolute_import -import os -import sys - -import calculate.contrib -from spyne import rpc -from spyne import Service -from spyne import String, Integer, Array - -from calculate.core.server.core_interfaces import CoreServiceInterface -from calculate.lib.utils.files import readFile - -_ = lambda x: x -from calculate.lib.cl_lang import getLazyLocalTranslate, setLocalTranslate - -setLocalTranslate('cl_core3', sys.modules[__name__]) - -__ = getLazyLocalTranslate(_) - -from .clean import sid_monitor, monitor -from .tasks import restart, dbus_stop -from .loaded_methods import LoadedMethods -from .api_types import Message, ReturnProgress, Table -from . import post_cert -from . import post_request -from . import send_cert - -String.Attributes.encoding = "utf-8" - -# class Basic(CoreServiceInterface): -class Basic(Service, CoreServiceInterface): - """ Basic server class """ - - SERV_VERS = 0.11 - glob_process_dict = {} - process_pid = {} - glob_progress_dict = {} - glob_table_dict = {} - glob_frame_list = {} - manager = None - - data_path = None - certbase = None - rights = None - sids = None - pids = None - sids_file = None - ssl_certificate = None - ssl_private_key = None - cachedict = {} - - #workaround: Spyne won't let us use self ref in @rpc, - # and ctx leads to original service class (Basic in this case) - # but some methods are gathered from CoreWsdl classes, so we need - # a ref to combined class - comb_class_ref = None - - # function getting object from cache - @staticmethod - def get_cache(sid, meth_name, obj_name): - if sid in Basic.cachedict: - if meth_name in Basic.cachedict[sid]: - if obj_name in Basic.cachedict[sid][meth_name]: - return Basic.cachedict[sid][meth_name][obj_name] - return None - - # function placing object in cache - @staticmethod - def set_cache(sid, meth_name, obj_name, obj, smart=True): - try: - if sid not in Basic.cachedict: - Basic.cachedict[sid] = {} - if meth_name not in Basic.cachedict[sid]: - Basic.cachedict[sid][meth_name] = {} - if obj_name not in Basic.cachedict[sid][meth_name]: - Basic.cachedict[sid][meth_name][obj_name] = obj - return True - if smart: - for var_name, var_value in obj.__dict__.viewitems(): - if var_value is not None: - setattr(Basic.cachedict[sid][meth_name][obj_name], - var_name, var_value) - else: - Basic.cachedict[sid][meth_name][obj_name] = obj - return True - except Exception as e: - if isinstance(e, SyntaxError): - raise - return False - - - - @staticmethod - def clear_cache(sid, meth_name=None, obj_name=None): - if sid not in Basic.cachedict: - return True - if meth_name: - if meth_name not in Basic.cachedict[sid]: - return True - - if obj_name: - if obj_name not in Basic.cachedict[sid][meth_name]: - return True - else: - obj = Basic.cachedict[sid][meth_name].pop(obj_name, None) - if hasattr(obj, 'close'): - obj.close() - else: - method_dict = Basic.cachedict[sid].pop(meth_name, None) - for val_obj in method_dict.values(): - if hasattr(val_obj, 'close'): - val_obj.close() - else: - session_dict = Basic.cachedict.pop(sid, None) - for method_dict in session_dict.values(): - for val_obj in method_dict.values(): - if hasattr(val_obj, 'close'): - val_obj.close() - - @staticmethod - def clear_cache_method(method=None): - for k, v in Basic.cachedict.items(): - Basic.clear_cache(k, method) - - @staticmethod - def set_paths(data_path, certbase, serv_certbase, rights, - group_rights, sids, pids, sids_pids, sids_file, pids_file, - max_sid, max_pid, cert_path, log_filename, - cert="server.crt", key="server.key"): - """ set system path for main class """ - Basic.data_path = data_path - Basic.certbase = certbase - Basic.serv_certbase = serv_certbase - Basic.rights = rights - Basic.group_rights = group_rights - Basic.sids = sids - Basic.pids = pids - Basic.sids_pids = sids_pids - Basic.sids_file = sids_file - Basic.pids_file = pids_file - Basic.ssl_certificate = cert - Basic.ssl_private_key = key - Basic.cert_path = cert_path - Basic.max_sid = int(max_sid) - Basic.max_pid = int(max_pid) - Basic.log_filename = log_filename - - - @staticmethod - def set_comb_class_ref(comb_class_ref): - Basic.comb_class_ref = comb_class_ref - - @staticmethod - def run_tasks(): - """ - Запуситить регулярные задачи - """ - import threading - # start monitor and sid_monitor threads - monitoring = threading.Thread(target=monitor, - args=(Basic.certbase, Basic.sids_file)) - sid_mon = threading.Thread(target=sid_monitor, - args=(Basic.sids_file, Basic.sids, Basic.comb_class_ref)) - restart_watchdog = threading.Thread(target=restart, - args=(Basic.comb_class_ref.glob_process_dict,)) - dbus_stop_mon = threading.Thread(target=dbus_stop, - args=(Basic.comb_class_ref.glob_process_dict, Basic.comb_class_ref.sids, Basic.comb_class_ref)) - threads = [] - for thread, success, failed in ( - (monitoring, _("General monitoring started"), - _("Monitoring error")), - (sid_mon, _("Session monitoring started"), - _("Session monitoring failed")), - (restart_watchdog, _("Restart watchdog started"), - _("Restart watchdog failed")), - (dbus_stop_mon, _("Inactive watchdog started"), - _("Inactive watchdog failed"))): - try: - threads.append(thread) - thread.daemon = True - thread.start() - print(success) - except Exception: - print(failed) - print() - - @staticmethod - def killall(): - sys.stdout.write('\n' + _('Closing all processes') + '...') - sys.stdout.flush() - import time - # Waiting for closing - for pid in Basic.process_pid.keys(): - try: - os.kill(Basic.process_pid[pid].pid, 2) - except OSError: - pass - while True: - num_active_process = 0 - for pid in Basic.process_pid.keys(): - if Basic.process_pid[pid].is_alive(): - num_active_process += 1 - - if num_active_process: - sys.stdout.write('.') - sys.stdout.flush() - else: - print('\n' + _('All processes are closed.')) - return 0 - time.sleep(0.5) - - - @rpc(_returns=Array(Integer)) - def post_cert(ctx): - # import post_cert - returns = post_cert.serv_post_cert(Basic.comb_class_ref) - return returns - - @rpc(Integer, _returns=Integer) - def clear_session_cache(ctx, sid): - check_sid = Basic.comb_class_ref.check_sid_cert(Basic.comb_class_ref, sid) - if not check_sid: - return 1 - # clear cache - Basic.clear_cache(sid) - return 0 - - @rpc(Integer, String, _returns=Integer) - def clear_method_cache(ctx, sid, method_name): - check_sid = Basic.comb_class_ref.check_sid_cert(Basic.comb_class_ref, sid) - if not check_sid: - return 1 - # clear cache - Basic.clear_cache(sid, method_name) - return 0 - - @rpc(Integer, Integer, _returns=Integer) - def clear_pid_cache(ctx, sid, pid): - if not Basic.comb_class_ref.check_sid_cert(Basic.comb_class_ref, sid): - return 1 - - if pid in Basic.comb_class_ref.find_sid_pid_file(Basic.comb_class_ref, sid): - # clear pid cache - Basic.comb_class_ref.delete_pid(Basic.comb_class_ref, sid, pid) - return 0 - - return 2 - - @rpc(Integer, Integer, String, _returns=Array(Integer)) - def post_sid(self, sid, cert_id, lang): - return Basic.comb_class_ref.sid_cmp(Basic.comb_class_ref, sid, cert_id, lang) - - @rpc(Integer, String, _returns=(Array(Integer), Array(Integer))) - def init_session(ctx, sid, lang): - return Basic.comb_class_ref.serv_init_session(Basic.comb_class_ref, sid, lang) - - @rpc(Integer, _returns=Array(String)) - # @Dec.check_permissions(['del_sid']) - def del_sid(self, sid): - flag = Basic.comb_class_ref.del_sid_pid(Basic.comb_class_ref, sid) - Basic.comb_class_ref.clear_cache(int(sid)) - if not flag: - return Basic.comb_class_ref.del_sid_from_file(Basic.comb_class_ref, - sid) - else: - return ['-1'] - - @rpc(Integer, Integer, _returns=Integer) - def pid_kill(ctx, pid, sid): - return Basic.comb_class_ref.serv_pid_kill(Basic.comb_class_ref, pid, sid) - - @rpc(Integer, _returns=Array(Integer)) - def list_pid(ctx, sid): - return Basic.comb_class_ref.find_sid_pid_file(Basic.comb_class_ref, sid) - - @rpc(Integer, String, _returns=Array(Array(String))) - def get_methods(ctx, sid, client_type): - return map(lambda x: map(str, x), - Basic.comb_class_ref.serv_get_methods(Basic.comb_class_ref, client_type)) - - @rpc(Integer, _returns=Array(String)) - @LoadedMethods.core_method(rights=["get-sessions"]) - # @Dec.console('list-session') - def get_sessions(ctx, sid): - if not Basic.comb_class_ref.check_sid_cert(Basic.comb_class_ref, sid): - return [''] - return Basic.comb_class_ref.serv_get_sessions(Basic.comb_class_ref) - - @rpc(Integer, Integer, _returns=Array(String)) - # @Dec.check_permissions(["pid_info"]) - def pid_info(ctx, sid, pid): - return Basic.comb_class_ref.serv_pid_info(Basic.comb_class_ref, sid, pid) - - @rpc(Integer, _returns=Array(String)) - @LoadedMethods.core_method(rights=["session_info"]) - def sid_info(ctx, sid): - return Basic.comb_class_ref.serv_sid_info(Basic.comb_class_ref, sid) - - @rpc(Integer, String, _returns=Array(String)) - @LoadedMethods.check_permissions(["view_cert_right"]) - # @Dec.console('view-cert-right') - # @Dec.gui('System') - def view_cert_right(ctx, cert_id, client_type): - return Basic.comb_class_ref.serv_view_cert_right(Basic.comb_class_ref, - cert_id, Basic.data_path, client_type) - - @rpc(Integer, _returns=Integer) - def active_client(ctx, sid): - return Basic.comb_class_ref.active_clients(Basic.comb_class_ref, sid) - - @rpc(String, String, String, String, _returns=String) - def post_client_request(ctx, request, ip, mac, client_type): - res = post_request.serv_post_client_request(request, Basic.data_path, - ip, mac, client_type, - Basic.certbase, - Basic.cert_path) - return res - - @rpc(String, String, _returns=Array(String)) - def get_client_cert(ctx, req_id, request): - res = post_request.serv_get_client_cert(req_id, request, - Basic.data_path, Basic.certbase, - Basic.cert_path) - return res - - @rpc(String, String, String, _returns=String) - def post_server_request(ctx, request, ip, mac): - res = post_request.serv_post_server_request(request, Basic.data_path, - ip, mac, Basic.serv_certbase, - Basic.cert_path) - return res - - @rpc(String, String, _returns=Array(String)) - def get_server_cert(ctx, req_id, request): - res = post_request.serv_get_server_request(req_id, request, - Basic.data_path, - Basic.serv_certbase, - Basic.cert_path) - return res - - @rpc(_returns=String) - def get_crl(self): - if os.path.exists(Basic.data_path + '/server_certs/ca.crl'): - return readFile(Basic.data_path + '/server_certs/ca.crl') - return ' ' - - @rpc(_returns=String) - def get_server_host_name(ctx): - import OpenSSL - - cert = readFile(Basic.ssl_certificate) - cert_obj = OpenSSL.crypto.load_certificate( - OpenSSL.SSL.FILETYPE_PEM, cert) - subject = cert_obj.get_subject().get_components() - for subj in subject: - if subj[0] == 'CN': - return subj[1] - return '' - - @rpc(_returns=String) - def get_ca(ctx): - return send_cert.get_ca(Basic.cert_path) - - #moved here from api_types: - - @rpc(Integer, Integer, String, _returns=Array(Message)) - # @Dec.check_permissions() - def get_frame(ctx, sid, pid, client_type): - return Basic.comb_class_ref.client_get_frame(Basic.comb_class_ref, sid, pid, client_type) - - @rpc(Integer, Integer, _returns=Array(Message)) - def get_entire_frame(self, sid, pid): - return Basic.comb_class_ref.client_get_entire_frame(Basic.comb_class_ref, sid, pid) - - @rpc(Integer, Integer, Integer, _returns=ReturnProgress) - # @Dec.check_permissions() - def get_progress(self, sid, pid, id): - return Basic.comb_class_ref.client_get_progress(Basic.comb_class_ref, sid, pid, id) - - @rpc(Integer, Integer, Integer, _returns=Table) - # @Dec.check_permissions() - def get_table(self, sid, pid, id): - return Basic.comb_class_ref.client_get_table(Basic.comb_class_ref, sid, pid, id) - - # get messages, transferred from client - @rpc(Integer, Integer, String, _returns=Message) - # @Dec.check_permissions() - def send_message(self, sid, pid, text): - return Basic.comb_class_ref.client_send_message(Basic.comb_class_ref, sid, pid, text) diff --git a/libs_crutch/core/server/bootstrap.py b/libs_crutch/core/server/bootstrap.py deleted file mode 100644 index f13a729..0000000 --- a/libs_crutch/core/server/bootstrap.py +++ /dev/null @@ -1,501 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -from __future__ import absolute_import -from . import cert_cmd -from . import post_request -import datetime -import subprocess -import shutil - -from calculate.core.client.cert_func import new_key_req -from calculate.core.client.function import get_ip_mac_type -from calculate.core.datavars import DataVarsCore -from calculate.lib.utils.files import (makeDirectory, pathJoin, readFile, - writeFile, readFileEx) -from calculate.lib.utils.mount import isMount -from calculate.core.server.admin import Admins -import os -import hashlib -import pwd -import socket -import sys -import re - -_ = lambda x: x -from calculate.lib.cl_lang import setLocalTranslate - -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -def parse_cert_date(date): - year = int(date[:4]) - month = int(date[4:6]) - day = int(date[6:8]) - hour = int(date[8:10]) - minute = int(date[10:12]) - sec = int(date[12:14]) - return datetime.datetime(year, month, day, hour, minute, sec) - - -def check(cert, key): - error_flag = 0 - if not os.path.isfile(cert): - error_flag = 1 - print(_('Certificate %s not found') % cert) - print(key, cert) - if not os.path.isfile(key): - error_flag = 1 - print(_('Private key %s not found') % key) - if os.path.isfile(cert) and os.path.isfile(key): - import OpenSSL - # check correspondence certificate and private key - cmd_cert = 'openssl x509 -noout -modulus -in ' + cert - cmd_key = 'openssl rsa -noout -modulus -in ' + key - p_cert = subprocess.Popen(cmd_cert.split(), stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - p_key = subprocess.Popen(cmd_key.split(), stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - if not p_cert.stdout.read().strip() == p_key.stdout.read().strip(): - print(_('The certificate does not match the private key')) - error_flag = 1 - - # check certificate date - cert_data = readFile(cert) - certobj = OpenSSL.crypto.load_certificate( - OpenSSL.SSL.FILETYPE_PEM, cert_data) - certobj.get_notBefore() - try: - not_after = parse_cert_date(certobj.get_notAfter()) - not_before = parse_cert_date(certobj.get_notBefore()) - - date = datetime.datetime.now() - - if not_before > date: - print(_('Certificate creation date later than current date')) - error_flag = 1 - elif not_after < date: - print(_('Certificate expired')) - error_flag = 1 - except ValueError: - print(_('Failed to get certificate work date')) - error_flag = 1 - - sys.exit(error_flag) - - -def init(cert, key, cert_path, data_path, certbase, args, port): - if args.remove_certificates: - key_force(cert_path, data_path) - - new_serv_cert = False - if not check_serv_cert(cert_path): - print(_('Generating the server certificate')) - for step in range(2): - args = change_args(args, step) - create_server_cert(cert, key, cert_path, args, port) - new_serv_cert = True - else: - print(_('Server certificate now exists.')) - - os.chmod(data_path, 0o700) - -def force_user_cert(server_cert, cert_path, data_path, cert_base, user_name, - dv=None): - def is_crypthome_notmount(dv, username): - dv.Set('ur_login', user_name, force=True) - homedir = dv.Get('ur_home_path') - if (dv.GetBool('ur_home_crypt_set') and - '.Private' not in isMount(homedir)): - return True - return False - - if not check_client_cert(user_name, server_cert=server_cert): - print(_('Generating the client certificate')) - else: - print(_('Regenerating the client certificate')) - group = "all" - if dv: - admins = Admins(dv) - if user_name not in admins: - admins[user_name] = group - admins.save() - group = admins[user_name] - if is_crypthome_notmount(dv, user_name): - print(_("User profile is encrypted. Please perform user login for " - "complete of certificate generation")) - return - create_client_cert(server_cert, cert_path, data_path, cert_base, - user_name, group) - - -def check_serv_cert(cert_path): - if os.path.isfile(os.path.join(cert_path, 'server.crt')) and \ - os.path.isfile(os.path.join(cert_path, 'server.key')): - return True - return False - - -def check_client_cert(user_name, server_cert=None): - client_cert_path = check_user_path(user_name) - if server_cert: - server_host_name = get_certificate_dn(server_cert) - else: - server_host_name = socket.getfqdn() - crt_fn = os.path.join(client_cert_path, server_host_name + '.crt') - key_fn = os.path.join(client_cert_path, server_host_name + '.key') - if os.path.isfile(crt_fn) and os.path.isfile(key_fn): - return True - return False - - -def change_args(args, step=None): - if step == 0: - args.host = False - args.gen_root_cert = True - args.root_host = False - args.use_root_cert = False - elif step == 1: - args.gen_root_cert = False - args.use_root_cert = True - - return args - - -def create_server_cert(cert, key, cert_path, args, port): - cert_cmd.check_server_certificate(cert, key, cert_path, args, port, - auto=True) - - -def create_client_cert(server_cert, cert_path, data_path, certbase, user_name, - group="all"): - client_cert_path = check_user_path(user_name) - if not client_cert_path: - print(_('no path to the client certificate')) - return 1 - req_id = create_request(server_cert, cert_path, data_path, certbase, - client_cert_path, user_name) - sign_certificate(req_id, cert_path, data_path, group) - get_certificate(cert_path, data_path, certbase, client_cert_path, user_name, - server_cert=server_cert) - - -def check_user_path(user_name): - try: - pwdObj = pwd.getpwnam(user_name) - except KeyError as e: - print(e) - return None - - home_dir = pwdObj.pw_dir - if not os.path.isdir(home_dir): - if not makeDirectory(home_dir): - return None - os.chown(home_dir, pwdObj.pw_uid, pwdObj.pw_gid) - os.chmod(home_dir, 0o700) - calc_dir = os.path.join(home_dir, '.calculate') - cert_dir = os.path.join(calc_dir, 'client_cert') - for directory in [calc_dir, cert_dir]: - if not os.path.isdir(directory): - if not makeDirectory(directory): - return None - os.chown(directory, pwdObj.pw_uid, pwdObj.pw_gid) - os.chmod(directory, 0o755) - - for path in os.walk(cert_dir): - os.chown(path[0], pwdObj.pw_uid, pwdObj.pw_gid) - for _file in path[2]: - fn = pathJoin(path[0], _file) - if os.path.isfile(fn): - os.chown(fn, pwdObj.pw_uid, pwdObj.pw_gid) - os.chmod(fn, 0o644) - return cert_dir - - -def create_request(server_cert, cert_path, data_path, certbase, - client_cert_path, user_name): - server_host_name = get_certificate_dn(server_cert) - - key = os.path.join(client_cert_path, server_host_name + '.key') - - client_req_file = new_key_req(key, client_cert_path, server_host_name, - auto=True) - - try: - pwdObj = pwd.getpwnam(user_name) - except KeyError as e: - print(e) - return None - for files in [client_req_file, key + '_pub']: - if os.path.exists(files): - os.chown(files, pwdObj.pw_uid, pwdObj.pw_gid) - os.chmod(files, 0o644) - if os.path.exists(key): - os.chown(key, pwdObj.pw_uid, pwdObj.pw_gid) - os.chmod(key, 0o600) - - ip, mac, client_type = get_ip_mac_type() - data = readFile(client_req_file) - - req_id = post_request.serv_post_client_request( - data, data_path, ip, mac, client_type, certbase, cert_path) - - fc = open(os.path.join(client_cert_path, 'req_id'), 'w') - fc.write(req_id) - fc.close() - return req_id - - -def sign_certificate(req_id, cert_path, data_path, group="all"): - cert_cmd.sing_req_by_server(req_id, cert_path, data_path, auto=True, - group_name=group) - - -def get_certificate_dn(cert_file): - cert_data = readFile(cert_file) - if cert_data: - import OpenSSL - certobj = OpenSSL.crypto.load_certificate( - OpenSSL.SSL.FILETYPE_PEM, cert_data) - cert_info = dict(certobj.get_subject().get_components()) - return cert_info["CN"] - return "localhost" - - -def clear_localuser_certificates(certbase): - """ - Удалить все пользовательские сертификаты, создаваемые для локальных - пользователей - """ - certdata = readFileEx(certbase, grab=True) - certdn = os.path.dirname(certbase) - # оставляем только сертификаты, которые не содержат отметки - # для какого локального пользователя они созданы - writedata = "\n".join(x[0] for x in re.findall("^((\S+\s+){6}\S+)\s*$", - certdata, flags=re.M)) - with writeFile(certbase) as f: - f.write("%s\n"%writedata) - - # удаляем физически сертификаты, созданные для локальных пользователей - for localcert in re.finditer("^(\S+)\s+(\S+\s+){6}\S+\s*$", - certdata, flags=re.M): - cert_fn = "%s/%s.crt"%(certdn, localcert.group(1)) - try: - os.unlink(cert_fn) - except OSError: - print(_("Failed to remove local client certificate") % cert_fn) - - -def get_certificate(cert_path, data_path, certbase, client_cert_path, - user_name, server_cert=None): - req_id_file = os.path.join(client_cert_path, 'req_id') - if not os.path.exists(req_id_file): - print(_("request not sent or file %s deleted") % req_id_file) - return 1 - fc = open(req_id_file, 'r') - req_id = fc.read() - fc.close() - - if server_cert: - server_host_name = get_certificate_dn(server_cert) - else: - server_host_name = socket.getfqdn() - - req_file = os.path.join(client_cert_path, server_host_name + '.csr') - if not os.path.exists(req_file): - print(_('Request %s not found') % req_file) - return 1 - request = readFile(req_file) - md5 = hashlib.md5() - md5.update(request) - md5sum = md5.hexdigest() - - result = post_request.serv_get_client_cert( - req_id, md5sum, data_path, certbase, cert_path, - localuser=user_name) - - cert = result[0] - if len(result) > 1: - ca_root = result[1] - else: - return None - - if cert == '1': - print(_('The signature request was rejected!')) - return 1 - elif cert == '2': - print(_("The signature request has not been examined yet.")) - print(_("Your request ID = %s") % req_id) - return 1 - elif cert == '3': - print(_("The signature request does not match earlier data.")) - return 1 - elif cert == '4': - print(_("The request was sent from another IP.")) - return 1 - cert_file = os.path.join(client_cert_path, server_host_name + '.crt') - fc = open(cert_file, 'w') - fc.write(cert) - fc.close() - - try: - pwdObj = pwd.getpwnam(user_name) - except KeyError as e: - print(e) - return None - os.chown(cert_file, pwdObj.pw_uid, pwdObj.pw_gid) - os.chmod(cert_file, 0o600) - - os.unlink(req_id_file) - print(_('Certificate saved. Your certificate ID: %s') % req_id) - - if ca_root: - clVars = DataVarsCore() - clVars.importCore() - clVars.flIniFile() - system_ca_db = clVars.Get('cl_glob_root_cert') - if os.path.exists(system_ca_db): - if ca_root in readFile(system_ca_db): - return 0 - - ca_dir = os.path.join(client_cert_path, 'ca') - if not os.path.isdir(ca_dir): - os.makedirs(ca_dir) - os.chown(ca_dir, pwdObj.pw_uid, pwdObj.pw_gid) - os.chmod(ca_dir, 0o755) - root_cert_md5 = os.path.join(ca_dir, "cert_list") - - md5 = hashlib.md5() - md5.update(ca_root) - md5sum = md5.hexdigest() - - if not os.path.exists(root_cert_md5): - fc = open(root_cert_md5, "w") - fc.close() - - filename = None - with open(root_cert_md5) as fd: - t = fd.read() - # for each line - for line in t.splitlines(): - # Split string into a words list - words = line.split(' ', 1) - if words[0] == md5sum: - filename = words[1] - if not filename: - import OpenSSL - - certobj = OpenSSL.crypto.load_certificate( - OpenSSL.SSL.FILETYPE_PEM, ca_root) - issuer = certobj.get_issuer().get_components() - for item in issuer: - if item[0] == 'CN': - filename = item[1] - - fc = open(root_cert_md5, "a") - fc.write('%s %s\n' % (md5sum, filename)) - fc.close() - os.chown(root_cert_md5, pwdObj.pw_uid, pwdObj.pw_gid) - os.chmod(root_cert_md5, 0o644) - - if not filename: - print(_('Field "CN" not found in the certificate!')) - return 1 - - ca_cert = os.path.join(ca_dir, filename) - fd = open(ca_cert, 'w') - fd.write(ca_root) - fd.close() - os.chown(ca_cert, pwdObj.pw_uid, pwdObj.pw_gid) - os.chmod(ca_cert, 0o644) - - user_root_cert = os.path.join(ca_dir, 'ca_root.crt') - fa = open(user_root_cert, 'a') - fa.write(ca_root) - fa.close() - os.chown(user_root_cert, pwdObj.pw_uid, pwdObj.pw_gid) - os.chmod(user_root_cert, 0o644) - # print _("Certificate added") - # else: - # print _("file with the CA certificate now exists") - - trust_dir = os.path.join(client_cert_path, 'trusted') - if not os.path.isdir(trust_dir): - os.makedirs(trust_dir) - os.chown(trust_dir, pwdObj.pw_uid, pwdObj.pw_gid) - os.chmod(trust_dir, 0o755) - - ca_certs = os.path.join(trust_dir, "cert.list") - - if not os.path.exists(ca_certs): - fc = open(ca_certs, "w") - fc.close() - os.chown(ca_certs, pwdObj.pw_uid, pwdObj.pw_gid) - os.chmod(ca_certs, 0o644) - - host = 'localhost' - filename = host - cert_file_trust = os.path.join(trust_dir, filename) - fc = open(cert_file_trust, "w") - fc.write(ca_root) - fc.close() - os.chown(cert_file_trust, pwdObj.pw_uid, pwdObj.pw_gid) - os.chmod(cert_file_trust, 0o644) - with open(ca_certs) as fd: - t = fd.read() - # for each line - for line in t.splitlines(): - # Split string into a words list - words = line.split() - if len(words) > 1: - # if first word... - if words[0] == host: - return 0 - - # Open file with compliance server certificates and server hostname - fcl = open(ca_certs, "a") - fcl.write(host + ' ' + filename + '\n') - fcl.close() - return 0 - - -def key_force(cert_path, data_path): - while True: - try: - resp = raw_input(_('Do you really want to remove all ' - 'certificates, requests and config files from ' - 'the server?') + ' (yes/no): ') - except KeyboardInterrupt: - resp = 'no' - if resp.lower() in ['n', 'no']: - return 0 - elif resp.lower() in ['y', 'yes']: - break - - if os.path.isdir(cert_path): - shutil.rmtree(cert_path) - remove_dirs = ['conf', 'server_certs', 'client_certs', 'pids', 'sids'] - for rm_dir in remove_dirs: - remove_dir = os.path.join(data_path, rm_dir) - if os.path.isdir(remove_dir): - shutil.rmtree(remove_dir) - - remove_files = ['sid.db', 'sid_pid'] - for rm_file in remove_files: - remove_file = os.path.join(data_path, rm_file) - if os.path.isfile(remove_file): - os.unlink(remove_file) diff --git a/libs_crutch/core/server/cert_cmd.py b/libs_crutch/core/server/cert_cmd.py deleted file mode 100644 index b7fc968..0000000 --- a/libs_crutch/core/server/cert_cmd.py +++ /dev/null @@ -1,1252 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Job with Command Line and Certificates -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -from __future__ import absolute_import -import os -import glob -import socket -import subprocess -import hashlib -import sys -from .func import new_key_req, uniq -from calculate.lib.utils import ip as ip -from calculate.core.datavars import DataVarsCore -from calculate.lib.utils.files import readFile -from calculate.lib.utils.text import _u8 -# import urllib2 as u2 -import urllib.request as u2 - -if hasattr(u2, "ssl"): - u2.ssl._create_default_https_context = u2.ssl._create_unverified_context - -_ = lambda x: x -from calculate.lib.cl_lang import setLocalTranslate - -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -def getIpLocal(): - for interface in ip.getInterfaces(): - return ip.getIp(interface) - else: - return "" - - -def getHwAddr(): - """ get MAC adress for interface """ - for interface in ip.getInterfaces(): - return ip.getMac(interface) - else: - return "" - - -# method for generating server certificates -def check_server_certificate(cert, key, cert_path, args, port, auto=False): - if not os.path.isdir(cert_path): - os.makedirs(cert_path) - # generate a root certificate - if args.gen_root_cert: - if auto: - c = 'n' - else: - c = raw_input(_("Enter the certificate date manually? [y]/n: ")) - from M2Crypto import X509 - - name = X509.X509_Name() - - ob = DataVarsCore() - ob.importCore() - if not ob.flIniFile(): - sys.exit(1) - - lang = ob.Get('os_locale_locale')[:2] - - host_name = socket.getfqdn() - from .create_cert import (passphrase_callback, generateRSAKey, - makePKey, CreateCertError, create_selfsigned_ca) - # Generating public key - rsa = generateRSAKey() - rsa.save_key(cert_path + '/root.key' + '_pub', cipher=None, - callback=passphrase_callback) - - # Generating private key - pkey = makePKey(rsa) - pkey.save_key(cert_path + '/root.key', cipher=None, - callback=passphrase_callback) - - name = {} - - if c.lower() in ['n', 'no']: - name['CN'] = host_name # (Common Name); - name['OU'] = 'www.calculate-linux.ru' # (Organization Unit); - name['O'] = 'calculate-linux' # (Organization Name); - name['L'] = host_name + ':' + str(port) # (Locality Name); - name['ST'] = 'Spb' # (State Name); - name['C'] = lang # (Country); - else: - print(_('Do not use spaces or tabs.')) - host_name = socket.getfqdn() - name['CN'] = raw_input(_('Hostname [%s] : ') % host_name) - if name['CN'] in ['', None]: - name['CN'] = host_name - name['OU'] = raw_input(_('Organization unit: ')) - if not name['OU']: - name['OU'] = '' - else: - name['OU'] = name['OU'].replace(' ', '_').replace('\t', '_') - name['O'] = raw_input(_('Organization name: ')) - if not name['O']: - name['O'] = '' - else: - name['O'] = name['O'].replace(' ', '_').replace('\t', '_') - network = _('Full network address (host:port)') - name['L'] = raw_input(network + ' [%s:%d]: ' % (host_name, port)) - if name['L'] in ['', None]: - name['L'] = host_name + ':' + str(port) - name['ST'] = raw_input(_('City: ')) - if not name['ST']: - name['ST'] = '' - else: - name['ST'] = name['ST'].replace(' ', '_').replace('\t', '_') - name['C'] = raw_input(_('Country (two letters only!) [%s]: ') % lang) - if not name['C']: - name['C'] = lang - - try: - create_selfsigned_ca(name, cert_path + '/root.key', cert_path + '/root.crt') - except CreateCertError: - print (_('Failed to create root certificate')) - - # add certificate in trusted - try: - with open(cert_path + '/root.crt', 'r') as fd_r: - with open(cert_path + '/ca_root.crt', 'a') as fd_w: - fd_w.write(fd_r.read()) - except IOError: - print (_('error writing to (reading from) files in directory %s') - % cert_path) - print(_("OK")) - - # use self root certificate as server certificate - elif args.use_root_cert: - if not os.path.exists(cert_path + '/root.crt'): - print(_('root certificate not found (use cl-core with ' - 'option --gen-root-cert)')) - return 1 - - print(_('Using the root certificate as the server certificate')) - # use root certificate as server certificate - ft = open(cert_path + '/root.crt', 'rb') - fd = open(cert_path + '/server.crt', 'wb') - ft.seek(0) - fd.write(ft.read()) - ft.close() - fd.close() - - ft = open(cert_path + '/root.key', 'rb') - fd = open(cert_path + '/server.key', 'wb') - ft.seek(0) - fd.write(ft.read()) - ft.close() - fd.close() - - print(_("OK")) - return 0 - - # send a certificate signing request to another server - elif args.host: - url = "https://%s:%d/?wsdl" % (args.host, port) - print(url + '\n' + _("connecting...")) - import calculate.contrib - from suds.client import Client - from .client_class import HTTPSClientsCertTransport - from urllib2 import URLError - - try: - client = Client(url, transport=HTTPSClientsCertTransport( - None, None, None)) - except (KeyboardInterrupt, URLError): - print('\n' + _("Close. Connection Error.")) - return 1 - - serv_host_name = client.service.get_server_host_name() - - if os.path.exists(key) and os.path.exists(cert_path + '/server.csr'): - print(_("the private key and request now exist")) - ask = raw_input(_("Create a new private key and request?") + - " y/[n]: ") - if ask.lower() in ['y', 'yes']: - new_key_req(key, cert_path, serv_host_name, port) - else: - new_key_req(key, cert_path, serv_host_name, port) - - local_ip = getIpLocal() - mac = getHwAddr() - data = readFile(cert_path + '/server.csr') - res = client.service.post_server_request(request=data, ip=local_ip, - mac=mac) - if int(res) < 0: - print(_("This server is not enabled to sign certificates!")) - return 1 - with open(cert_path + '/req_id', 'w') as fc: - fc.write(res) - print(_("Your request ID = %s") % _u8(res)) - return 0 - - # get a signed certificate from another server - elif args.root_host: - if not os.path.exists(cert_path + '/req_id'): - print (_("request not sent or file %s deleted") - % (cert_path + '/req_id')) - return 1 - req_id = readFile(cert_path + '/req_id') - - url = "https://%s:%d/?wsdl" % (args.root_host, port) - print(url + '\n' + _("connecting...")) - - import calculate.contrib - from suds.client import Client - from .client_class import HTTPSClientsCertTransport - - try: - client = Client( - url, transport=HTTPSClientsCertTransport(None, None, None)) - except KeyboardInterrupt: - print('\n' + _("Close. Connection Error.")) - return 1 - - request = readFile(cert_path + '/server.csr') - md5 = hashlib.md5() - md5.update(request) - md5sum = md5.hexdigest() - - result = client.service.get_server_cert(req_id, md5sum) - cert = result[0][0] - if cert == '1': - print(_('The signature request was rejected!')) - return 1 - elif cert == '2': - print(_("The signature request has not been examined yet.")) - print(_("Your request ID = %s") % req_id) - return 1 - elif cert == '3': - print(_('The signature request does not match earlier data.')) - return 1 - elif cert == '4': - print(_("The request was sent from another IP.")) - return 1 - with open(cert_path + '/server.crt', 'w') as fc: - fc.write(cert) - ca_root = result[0][1] - os.unlink(cert_path + '/req_id') - print(_('Certificate saved. Your certificate ID = %s') % req_id) - with open(cert_path + '/ca_root.crt', 'w') as fd: - if ca_root: - fd.write(ca_root) - # fd.write(cert) - if os.path.exists(cert_path + '/ca_root.crt'): - fd.write(readFile(cert_path + '/ca_root.crt')) - return 0 - - -def create_path(data_path, certbase, rights, group_rights, local_data_path): - if not os.path.exists(certbase): - if not os.path.exists(data_path + '/client_certs'): - try: - os.makedirs(data_path + '/client_certs') - except OSError: - print (_("cannot create directory %s") - % (data_path + '/client_certs')) - open(certbase, 'w').close() - - if not os.path.exists(data_path + '/conf'): - try: - os.makedirs(data_path + '/conf') - except OSError: - print(_("cannot create directory %s") % (data_path + '/conf')) - - if not os.path.exists(local_data_path + '/conf'): - try: - os.makedirs(local_data_path + '/conf') - except OSError: - print(_("cannot create directory %s") % (local_data_path + '/conf')) - - if not os.path.isfile(rights): - rights_text = '# example of content:\n' + \ - '# certificate number 2 has right to run method ' + \ - '"pid_info", and the certificate\n' + \ - '# number 1 does not have rights. Use key --right-add ' + \ - 'and --right-del. See man.\n' + \ - '#pid_info 2 -1\n' - fr = open(rights, 'w') - fr.write(rights_text) - fr.close() - - if not os.path.isfile(group_rights): - group_rights_text = ('# example of content:\n' - '#manager pid_info,list_pid,cl_template,install\n' - 'system_update update,setupsystem,configure') - fgr = open(group_rights, 'w') - fgr.write("%s\n"%group_rights_text) - fgr.close() - # if not os.path.exists(data_path+'/server_certs/CRL'): - # open(data_path+'/server_certs/CRL', 'w') - - -# find a id by certificate -def find_cert_id(certificate, data_path, certbase): - # Open database - if not os.path.exists(certbase): - if not os.path.exists(data_path + '/client_certs'): - try: - os.makedirs(data_path + '/client_certs') - except OSError: - print (_("cannot create directory %s") - % (data_path + '/client_certs')) - pass - temp = open(certbase, 'w') - temp.close() - - md5 = hashlib.md5() - md5.update(certificate) - md5sum = md5.hexdigest() - - cert_id = [] - with open(certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split() - if len(words) > 1: - # if in line present certificate id - if words[1] == md5sum: - cert_id.append(words[0]) - - cert_path = data_path + '/client_certs/' - for certId in cert_id: - if os.path.isfile(cert_path + certId + '.crt'): - fp = open(cert_path + certId + '.crt', 'r') - cert = fp.read() - fp.close() - else: - cert = 'no cert' - if certificate == cert: - return certId - return 0 - - -# find a certificate by id -def find_id_cert(cert_id, data_path): - cert_file = data_path + '/client_certs/%s.crt' % str(cert_id) - if os.path.exists(cert_file): - fp = open(cert_file, 'r') - cert = fp.read() - fp.close() - return cert - return 0 - - -# delete selected clients certificate -def del_cert(certbase, data_path, cert_id): - cert_id = str(cert_id) - ft = open(certbase + '_temp', 'w') - with open(certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split() - # if in line present certificate id - if not words[0] == cert_id: - ft.write(line + '\n') - ft.close() - fd.close() - - ft = open(certbase + '_temp', 'rb') - fc = open(certbase, 'wb') - ft.seek(0) - fc.write(ft.read()) - ft.close() - fc.close() - os.unlink(certbase + '_temp') - try: - os.unlink(data_path + '/client_certs/' + cert_id) - except OSError: - print(_("failed to delete the certificate!")) - - -def add_right(cert_id, method, rights): - ft = open(rights + "_temp", 'w') - # open file with rights - find_perm_flag = False - with open(rights) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - if not line: - continue - flag = 0 - # and each word in line - words = line.split() - if words[0] == method: - find_perm_flag = True - for word in words: - # if in line present certificate id - if cert_id == word: - flag = 1 - if '-' + cert_id == word: - line = line.replace(' ' + word, '') - if not flag: - line += "%s " % cert_id - print("id %s - add %s" % (cert_id, method)) - ft.write(line + '\n') - fd.close() - if not find_perm_flag: - ft.write('%s %s \n' % (method, str(cert_id))) - print("id %s - add %s" % (cert_id, method)) - ft.close() - - # copy all from temp file - ft = open(rights + '_temp', 'rb') - fd = open(rights, 'wb') - ft.seek(0) - fd.write(ft.read()) - ft.close() - fd.close() - # delete temp file - os.unlink(rights + '_temp') - - -def del_right(cert_id, method, rights): - ft = open(rights + "_temp", 'w') - find_perm_flag = False - # open file with rights - with open(rights) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - flag = 0 - # and each word in line - words = line.split() - if words[0] == method: - find_perm_flag = True - for word in words: - # if in line present certificate id - if '-' + cert_id == word: - flag = 1 - if cert_id == word: - line = line.replace(' ' + word, '') - if not flag: - line += "-%s " % cert_id - print("id %s - remove %s" % (cert_id, method)) - ft.write(line + '\n') - fd.close() - if not find_perm_flag: - ft.write('%s -%s \n' % (method, str(cert_id))) - print("id %s - remove %s" % (cert_id, method)) - ft.close() - - # copy all from temp file - ft = open(rights + '_temp', 'rb') - fd = open(rights, 'wb') - ft.seek(0) - fd.write(ft.read()) - ft.close() - fd.close() - # delete temp file - os.unlink(rights + '_temp') - - -# add or delete rights a selected certificate -def change_rights_cert(cert_id, right_add, right_del, - rights, group_rights, certbase): - list_id = [] - if not cert_id == 'all': - try: - int(cert_id) - list_id.append(cert_id) - except ValueError: - ls_id = cert_id.split(',') - try: - for i in ls_id: - int(i) - list_id.append(i) - except ValueError: - print(_('to change permissions, the certificate number must ' - 'be integer')) - return 1 - - elif cert_id == 'all': - with open(certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split() - # if in line present certificate id - list_id.append(words[0]) - - for cert_id in list_id: - cert_id = str(cert_id) - - if right_add: - if not os.path.exists(rights): - print(_('file %s not found!') % rights) - return 1 - ls_rig_add = right_add.split(',') - for meth in ls_rig_add: - add_right(cert_id, meth, rights) - - if right_del: - if not os.path.exists(rights): - print(_('file %s not found!') % rights) - return 1 - ls_rig_del = right_del.split(',') - for meth in ls_rig_del: - del_right(cert_id, meth, rights) - - -# Detailed view clients certificates -def view_cert_info(cert, cert_id, rights, group_rights): - import OpenSSL - - certobj = OpenSSL.crypto.load_certificate(OpenSSL.SSL.FILETYPE_PEM, cert) - print(certobj.get_extension(certobj.get_extension_count() - 1)) - print(_("Fingerprint = "), certobj.digest('SHA1')) - print(_("Serial number = "), certobj.get_serial_number()) - issuer = certobj.get_issuer().get_components() - print('\n' + _("Issuer")) - for i in issuer: - print(" %s : %s" % (i[0], i[1])) - subject = certobj.get_subject().get_components() - print('\n' + _("Subject")) - for item in subject: - print(" %s : %s" % (item[0], item[1])) - - print('\n' + _("Permissions: ")) - - certobj = OpenSSL.crypto.load_certificate(OpenSSL.SSL.FILETYPE_PEM, cert) - com = certobj.get_extension(certobj.get_extension_count() - 1).get_data() - groups = com.split(':')[1] - groups_list = groups.split(',') - - results = [] - if not os.path.exists(group_rights): - open(group_rights, 'w').close() - - with open(group_rights) as fd: - t = fd.read() - fd.close() - for line in t.splitlines(): - if not line: - continue - words = line.split(' ', 1) - # first word in line equal name input method - if words[0] in groups_list: - methods = words[1].split(',') - for i in methods: - results.append(i.strip()) - - results = uniq(results) - - add_list_rights = [] - del_list_rights = [] - - with open(rights) as fr: - t = fr.read() - for line in t.splitlines(): - words = line.split() - meth = words[0] - for word in words: - try: - word = int(word) - except ValueError: - continue - # compare with certificat number - if cert_id == word: - # if has right - add_list_rights.append(meth) - if cert_id == -word: - del_list_rights.append(meth) - - if 'all' in groups_list: - sys.stdout.write(' ' + _('all methods')) - sys.stdout.flush() - if del_list_rights: - sys.stdout.write(_(', except:') + '\n') - sys.stdout.flush() - for meth in del_list_rights: - print(' ' + meth) - return - - results += add_list_rights - results = uniq(results) - - for method in results: - if method in del_list_rights: - results.remove(method) - - if not results: - print(_("No methods available")) - else: - for meth in results: - print(' ' + meth) - - -# View, change rights, delete clients certificates on server -def view_cert(args, certbase, data_path, rights, group_rights): - cert_id = args.Id - dump = args.dump - remove = args.remove - right_add = args.right_add - right_del = args.right_del - - for i in [right_add, right_del]: - if i: - change_rights_cert(cert_id, right_add, right_del, - rights, group_rights, certbase) - return 0 - - if not os.path.exists(certbase): - fc = open(certbase, "w") - fc.close() - if cert_id == 'all': - count = 0 - with open(certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split() - count += 1 - cert = find_id_cert(words[0], data_path) - if cert == 0: - count -= 1 - continue - print(_("Certificate ID = %s") % words[0]) - if dump: - cert = find_id_cert(words[0], data_path) - if cert == 0: - count -= 1 - continue - view_cert_info(cert, words[0], rights, group_rights) - print("#############################################\n") - print(_("Total: %d certificates.") % count) - - if remove: - answer = raw_input( - _("Are you sure? Delete all client certificates?") + ' y/[n]: ') - if answer.lower() in ['y', 'yes']: - fc = open(certbase, "w") - fc.close() - return 0 - try: - cert_id = int(cert_id) - except ValueError: - print(_("certificate number not int and not 'all'")) - return 1 - - cert = find_id_cert(cert_id, data_path) - - if not cert: - print(_("Certificate not found")) - return 1 - if dump: - print(cert) - else: - view_cert_info(cert, cert_id, rights, group_rights) - - if remove: - answer = raw_input( - _("Delete the client certificate with ID %d? y/[n]: ") % cert_id) - if answer.lower() in ['y', 'yes']: - del_cert(certbase, data_path, cert_id) - print(_("Deleted")) - return 0 - - -# Sign client request by server certificate -def sing_req_by_server(id_client_req, cert_path, data_path, auto=False, - group_name="all"): - server_cert = cert_path + '/root.crt' - server_key = cert_path + '/root.key' - - if id_client_req: - try: - int(id_client_req) - except ValueError: - print(_("The certificate number must be int")) - return 1 - cl_req = data_path + '/client_certs/%s.csr' % id_client_req - cl_cert = data_path + '/client_certs/%s.crt' % id_client_req - if not os.path.exists(cl_req): - print(_("Signature request %s not found") % cl_req) - return 1 - - if os.path.exists(cl_cert): - print(_("Certificate %s now exists") % cl_cert) - return 1 - - if not auto: - group_name = "" - while not group_name.split(): - group_name = "%s" % raw_input( - _("Enter the group of the new certificate " - "(group name or 'all'): ")) - config = data_path + '/client_certs/ssl-client.cfg' - if os.path.exists(config): - os.unlink(config) - - from .create_cert import (sign_client_certifacation_request, - CreateCertError) - - try: - sign_client_certifacation_request( - server_key, server_cert, cl_req, cl_cert, group_name) - except CreateCertError: - print (_('Failed to sign client certificate')) - - # print 'startdate = ', startdate - # cfg_text = (#"[ ssl_client ]\n" - # "basicConstraints = CA:FALSE\n" - # "nsCertType = client\n" - # "keyUsage = digitalSignature, keyEncipherment\n" - # "extendedKeyUsage = clientAuth\n" - # "nsComment = %s\n" %group) - # print 'config = ', cfg_text - # fc = open(config, 'w') - # fc.write(cfg_text) - # fc.close() - - # cmd=("openssl x509 -req -days 11000 -CA %s -CAkey %s -CAcreateserial " - # "-extfile %s -extensions ssl_client -in %s -out %s") \ - # %(server_cert, server_key, config, cl_req, cl_cert) - # print cmd - # PIPE = subprocess.PIPE - # p = subprocess.Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, - # stderr=subprocess.STDOUT, close_fds=True) - # p.wait() - if os.path.exists(cl_cert): - print(_("Certificate %s is signed") % cl_cert) - else: - print(_("Certificate %s has not been signed") % cl_cert) - return 0 - - -# Sign server request by root certificate -def sing_req_by_root(args, cert_path, data_path): - root_cert = cert_path + '/root.crt' - root_key = cert_path + '/root.key' - - if not os.path.exists(root_cert) or not os.path.exists(root_key): - print(_("Root certificate or private key not found")) - print(_("see %s") % cert_path) - return 1 - - if args.id_server_req: - try: - int(args.id_server_req) - except ValueError: - print(_("The certificate number must be int")) - return 1 - sign_req = data_path + '/server_certs/%s.csr' % args.id_server_req - sign_cert = data_path + '/server_certs/%s.crt' % args.id_server_req - if not os.path.exists(sign_req): - print(_("Signature request %s not found") % sign_req) - return 1 - - if os.path.exists(sign_cert): - print(_("Certificate %s now exists") % sign_cert) - return 1 - - # config = cert_path + '/ssl-server-ca.cfg' - # if not os.path.exists(config): - # cfg_text = ("[ ssl_server_ca ]\nsubjectKeyIdentifier=hash\n" - # "authorityKeyIdentifier = keyid, issuer\n" - # "basicConstraints = CA:true\n" - # "nsCertType = server\n" - # "keyUsage = keyCertSign, digitalSignature, keyEncipherment, " - # "cRLSign, nonRepudiation\n" - # "extendedKeyUsage = serverAuth, clientAuth, nsSGC, msSGC") - # fc = open(config, 'w') - # fc.write(cfg_text) - # fc.close() - - # cmd=("openssl x509 -req -days 11000 -CA %s -CAkey %s -CAcreateserial " - # "-extfile %s -extensions ssl_server_ca -in %s -out %s") \ - # %(root_cert, root_key, config, sign_req, sign_cert) - cmd = ("openssl x509 -req -days 11000 -CA %s -CAkey %s -CAcreateserial " - "-in %s -out %s") % (root_cert, root_key, sign_req, sign_cert) - - print(cmd) - PIPE = subprocess.PIPE - p = subprocess.Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, - stderr=subprocess.STDOUT, close_fds=True) - p.wait() - print(_("Certificate %s is signed") % sign_cert) - return 0 - - -# Detailed view server signed certificates -def view_signed_cert_info(cert_id, serv_certbase, data_path, mid_path): - cert_file = data_path + '/%s/%d.crt' % (mid_path, cert_id) - print(cert_file) - import OpenSSL - - if os.path.exists(cert_file): - fp = open(cert_file, 'r') - cert = fp.read() - fp.close() - certobj = OpenSSL.crypto.load_certificate( - OpenSSL.SSL.FILETYPE_PEM, cert) - if mid_path == 'client_certs': - print(certobj.get_extension(certobj.get_extension_count() - 1)) - print(_("Fingerprint = "), certobj.digest('SHA1')) - print(_("Serial number = "), certobj.get_serial_number()) - issuer = certobj.get_issuer().get_components() - print('\n' + _("Issuer")) - for i in issuer: - print("%s : %s" % (i[0], i[1])) - subject = certobj.get_subject().get_components() - print('\n' + _("Subject")) - for item in subject: - print("%s : %s" % (item[0], item[1])) - - if not os.path.exists(serv_certbase): - fc = open(serv_certbase, "w") - fc.close() - - with open(serv_certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split() - # if in line present certificate id - if words[0] == str(cert_id): - print('\n' + _('request sent from:')) - print('IP: %s' % words[4]) - print('MAC: %s' % words[5]) - print(_('date') + ' - %s %s' % (words[2], words[3])) - break - - else: - print(_("Certificate not found!")) - - print("\n###################################################\n") - - # Detailed view server request - req_file = data_path + '/%s/%d.csr' % (mid_path, cert_id) - - print(req_file) - - if os.path.exists(req_file): - fp = open(req_file, 'r') - request = fp.read() - fp.close() - - reqobj = OpenSSL.crypto.load_certificate_request( - OpenSSL.SSL.FILETYPE_PEM, request) - subject = reqobj.get_subject().get_components() - print('\n' + _("Subject")) - for item in subject: - print(" %s : %s" % (item[0], item[1])) - - if not os.path.exists(serv_certbase): - fc = open(serv_certbase, "w") - fc.close() - - with open(serv_certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split() - # if in line present certificate id - if words[0] == str(cert_id): - print('\n' + _('request sent from:')) - print('IP: %s' % words[4]) - print('MAC: %s' % words[5]) - print(_('date') + ' - %s %s' % (words[2], words[3])) - break - else: - print(_("Request for signature not found!")) - return 0 - - -# view servers signed certificates -def view_signed_cert(args, serv_certbase, data_path): - cert_id = args.cert_id - # dump = args.dump - # remove = args.remove - - if cert_id == 'all': - if not os.path.exists(serv_certbase): - fc = open(serv_certbase, "w") - fc.close() - count = 0 - with open(serv_certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split() - cert = '%s/server_certs/%s.crt' % (data_path, words[0]) - req = '%s/server_certs/%s.csr' % (data_path, words[0]) - if os.path.exists(cert): - print('Certificate \t', cert) - count += 1 - if os.path.exists(req): - if not os.path.exists(cert): - print(_('Request \t%s: not signed') % req) - else: - print(_('Request \t%s') % req) - count += 1 - if not count: - print(_("Certificate or request not found!")) - return 0 - - try: - cert_id = int(cert_id) - except ValueError: - print(_("certificate (request) number not int and not 'all'")) - return 1 - view_signed_cert_info(cert_id, serv_certbase, data_path, 'server_certs') - return 0 - - -# View clients requests on server -def view_client_request(args, client_certbase, data_path): - req_id = args.req_id - if req_id == 'all': - if not os.path.exists(client_certbase): - fc = open(client_certbase, "w") - fc.close() - count = 0 - with open(client_certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split() - cert = '%s/client_certs/%s.crt' % (data_path, words[0]) - req = '%s/client_certs/%s.csr' % (data_path, words[0]) - if os.path.exists(cert): - print(_('Certificate \t'), cert) - count += 1 - if os.path.exists(req): - if not os.path.exists(cert): - print(_('Request \t%s: not signed') % req) - else: - print(_('Request \t%s') % req) - count += 1 - if not count: - print(_("Certificate or request not found!")) - return 0 - - try: - req_id = int(req_id) - except ValueError: - print(_("certificate (request) number not int and not 'all'")) - return 1 - view_signed_cert_info(req_id, client_certbase, data_path, 'client_certs') - return 0 - - -# refuse to sign request -def del_request(id_del_serv_req, id_del_client_req, serv_certbase, - client_certbase, data_path): - if id_del_serv_req: - sub_path = 'server_certs' - id_del_req = id_del_serv_req - certbase = serv_certbase - else: - sub_path = 'client_certs' - id_del_req = id_del_client_req - certbase = client_certbase - try: - int(id_del_req) - except ValueError: - print(_("The ID must be int.")) - return 1 - - request = os.path.join(data_path, sub_path, '%s.csr' % id_del_req) - cert = os.path.join(data_path, sub_path, '%s.crt' % id_del_req) - - # chect exists request and certificate files - print(request) - if not os.path.exists(request) and not os.path.exists(cert): - print(_("Request or certificate with ID = %s not found!") % id_del_req) - return 1 - - if not os.path.exists(request): - print(_("request %s not found!") % request) - - if os.path.exists(cert): - print(_("Request duly signed")) - ask = raw_input( - _("Delete the certificate and the signature request? y/[n]: ")) - if not ask.lower() in ['y', 'yes']: - print(_("Not deleted")) - return 0 - - # create temp file - if not os.path.isfile(certbase): - fd = open(certbase, 'w') - fd.close() - ft = open(certbase + '_temp', 'w') - with open(certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split() - # if in line present certificate id - if not words[0] == id_del_req: - ft.write(line + '\n') - ft.close() - fd.close() - - ft = open(certbase + '_temp', 'rb') - fc = open(certbase, 'wb') - ft.seek(0) - fc.write(ft.read()) - ft.close() - fc.close() - os.unlink(certbase + '_temp') - try: - if os.path.exists(request): - os.unlink(request) - print(_("signature request deleted")) - if os.path.exists(cert): - os.unlink(cert) - print(_("certificate deleted")) - except OSError: - print(_("failed to delete the file!")) - - -def revoke_signed_cert(revoke_cert_id, data_path, cert_path): - CRL = data_path + '/server_certs/ca.crl' - CRL_mid_dir = "/server_certs/CRL/" - CRL_db_dir = data_path + CRL_mid_dir - - if revoke_cert_id == 'rm': - if os.path.exists(CRL_db_dir): - for filename in glob.glob(CRL_db_dir + "*"): - os.unlink(filename) - - if os.path.exists(CRL): - os.unlink(CRL) - print(_("CRL deleted")) - return 0 - print(_("CRL does not exist")) - return 0 - - try: - int(revoke_cert_id) - except ValueError: - print(_("The ID of a certificate to be revoked must be integer!")) - return 1 - - cert_file = data_path + "/server_certs/%s.crt" % revoke_cert_id - if not os.path.exists(cert_file): - print(_("Certificate %s not found") % cert_file) - return 1 - - if not os.path.exists(CRL_db_dir): - os.makedirs(CRL_db_dir) - - index_file = CRL_db_dir + 'index' - if not os.path.exists(index_file): - open(index_file, 'w').close() - - serial_file = CRL_db_dir + 'serial' - if not os.path.exists(serial_file): - open(serial_file, 'w').close() - - default_crl_days = 14 - - conf_file = data_path + "/server_certs/ca.config" - # if not os.path.exists (conf_file): - content_conf = ("[ ca ]\ndefault_ca = CA_CLIENT\n" - "[ CA_CLIENT ]\n" - "dir = %s\n" - "database = %s\n" - "serial = %s\n" - "certificate = %s\n" - "private_key = %s\n" - # "default_days = 365" - "default_crl_days = %d\n" - "default_md = md5\n") % ( - CRL_db_dir, index_file, - serial_file, cert_path + '/root.crt', - cert_path + '/root.key', - default_crl_days) - fd = open(conf_file, 'w') - fd.write(content_conf) - fd.close() - - # server_cert = open (cert_file, 'r').read() - - cmd_rev_cert = "openssl ca -config %s -revoke %s" % (conf_file, cert_file) - os.system(cmd_rev_cert) - - cmd_create_crl = "openssl ca -gencrl -config %s -out %s" % (conf_file, CRL) - os.system(cmd_create_crl) - - print() - cmd_show_crl = "openssl crl -text -noout -in %s" % CRL - os.system(cmd_show_crl) - - -################################################################# -def parse(full=False): - import argparse - from .methods_func import RawAndDefaultsHelpFormatter - - parser = argparse.ArgumentParser( - add_help=False, formatter_class=RawAndDefaultsHelpFormatter) - parser.add_argument( - '-h', '--help', action='store_true', default=False, - dest='help', help=_("show this help message and exit")) - if full: - parser.add_argument( - '--start', action='store_true', default=False, dest='start', - help=_('server started')) - parser.add_argument( - '--port', type=int, dest='port', - help=_('port number')) - parser.add_argument( - '--pid-file', type=str, dest='pidfile', - help=_('Specify the location of the PID file')) - parser.add_argument( - '--close-on-inactive', action='store_true', dest='inactiveclose', - default=False, - help=_('stop the server if there are no active sessions')) - parser.add_argument( - '--cert', type=str, dest='Id', - help=_('operations with certificates (number or "all"). Server not' - ' running.')) - parser.add_argument( - '--show-request', type=str, dest='req_id', - help=_("view client requests (number or \"all\")")) - parser.add_argument( - '--sc', '--server-cert', type=str, dest='cert_id', - help=_('view servers certificates (number or "all"). ' - 'Server not running.')) - parser.add_argument( - '-b', '--bootstrap', action='store_true', default=False, - dest='bootstrap_user_name', help=_('bootstrap action')) - parser.add_argument( - '-u', '--create-localuser-cert', type=str, dest='cert_user_name', - help=_('create certificate for local user')) - parser.add_argument( - '--clear-localuser-cert', action='store_true', default=False, - dest='clear_user_cert', - help=_('remove all local user certificates')) - parser.add_argument( - '--remove-certs', action='store_true', default=False, - dest='remove_certificates', - help=_('use with option --bootstrap to remove ' - 'all certificates, requests and config files on the server')) - parser.add_argument( - '--dump', action='store_true', default=False, dest='dump', - help=_('dump (use with option -c [ID])')) - parser.add_argument( - '--db', '--debug', action='store_true', default=False, dest='debug', - help=_('debug')) - if full: - parser.add_argument( - '--rm', '--remove', action='store_true', default=False, - dest='remove', - help=_('remove the selected certificate (use with option -c [ID])')) - parser.add_argument( - '--right-add', type=str, dest='right_add', - help=_('add permissions for a certificate (or certificates, ' - 'comma-separated) (use with option -c [ID])')) - parser.add_argument( - '--right-del', type=str, dest='right_del', help=_( - 'remove permissions for a certificate ' - '(use with option -c [ID])')) - parser.add_argument( - '--gen-root-cert', action='store_true', default=False, - dest='gen_root_cert', help=_('generate a root (or CA) certificate')) - parser.add_argument( - '--gen-cert-by', type=str, dest='host', - help=_("send a signature request for the root certificate")) - parser.add_argument( - '--get-cert-from', type=str, dest='root_host', - help=_('fetch the signed certificate from the server')) - parser.add_argument( - '--use-root-as-server', action='store_true', default=False, - dest='use_root_cert', help=_('use the root certificate as the ' - 'server certificate')) - parser.add_argument( - '--sign-client', type=str, dest='id_client_req', - help=_("sign the client's request with the server certificate")) - parser.add_argument( - '--sign-server', type=str, dest='id_server_req', - help=_("sign the server's request with the root certificate")) - parser.add_argument( - '--del-server-req', type=str, dest='id_del_req', - metavar='id_req', help=_("refuse to sign the certificate at " - "the server's request")) - parser.add_argument( - '--del-client-req', type=str, dest='id_del_client_req', - metavar='id_req', help=_("decline client's request for signature")) - parser.add_argument( - '--rv', '--revoke-cert', type=str, dest='revoke_cert_id', help=_( - 'revoke an earlier signed server (or CA) certificate. ' - 'Run rm to remove the CRL')) - parser.add_argument( - '--method', type=str, dest='method', help=_('call method')) - parser.add_argument( - '--list-methods', action='store_true', default=False, - dest='list_methods', help=_('display all available methods')) - parser.add_argument( - '--no-progress', action='store_true', default=False, - dest='no_progress', help=_('do not display the progress bar')) - parser.add_argument( - '--gui-progress', action='store_true', default=False, - dest='gui_progress', help=_("display the GUI progress bar")) - parser.add_argument( - '--gui-warning', action='store_true', default=False, - dest='gui_warning', help=_('display warnings at the end')) - parser.add_argument( - '-f', '--force', action='store_true', default=False, - dest='no_questions', help=_('silent during the process')) - parser.add_argument( - '-P', action='store_true', default=False, - dest='stdin_passwd', - help=_('use passwords from standard input for users accounts')) - if full: - parser.add_argument( - '--check', action='store_true', default=False, dest='check', - help=_('configuration check')) - parser.add_argument( - '--create-symlink', action='store_true', default=False, - dest='create_symlink', help=_("Create symlinks for methods")) - parser.add_argument( - '--log-path', type=str, dest='log_path', - help=_('path to log files')) - parser.add_argument( - '--version', action='store_true', default=False, - dest='version', help=_('print the version number, then exit')) - return parser diff --git a/libs_crutch/core/server/certificate.py b/libs_crutch/core/server/certificate.py deleted file mode 100644 index 9caed80..0000000 --- a/libs_crutch/core/server/certificate.py +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2010-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import MethodsInterface - -from calculate.lib.cl_lang import getLazyLocalTranslate, setLocalTranslate -from calculate.lib.utils.common import getPagesInterval - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class Certificate(MethodsInterface): - """ - Объект работы с сертификатами - """ - - def show_certs_meth(self, page_count, page_offset): - """ - Отобразить таблицу с сертификатами - """ - dv = self.clVars - list_cert_id = dv.Get('cl_list_cert_id') - try: - list_cert_id.sort(key=lambda x: int(x)) - except ValueError: - list_cert_id.sort() - - if not list_cert_id: - self.printSUCCESS(_("No certificates")) - - head = [_('Certificates'), _('Groups'), _('Permissions')] - body = [] - fields = ['cl_cert_id', ''] - - for cert in list_cert_id[page_offset:page_offset + page_count]: - dv.Set('cl_cert_id', cert) - group_rights = ', '.join(dv.Get('cl_cert_perms')) - cert_groups = ', '.join(dv.Get('cl_cert_groups')) - body.append([str(cert), cert_groups, group_rights]) - - if body: - self.printTable(_("List of certificates"), head, - body, fields=fields, - onClick='core_detail_view_cert') - num_page, count_page = getPagesInterval( - page_count, page_offset, - len(list_cert_id)) - self.printSUCCESS(_('page %d from ') % num_page + str(count_page)) - return True diff --git a/libs_crutch/core/server/cl_server.py b/libs_crutch/core/server/cl_server.py deleted file mode 100644 index daab782..0000000 --- a/libs_crutch/core/server/cl_server.py +++ /dev/null @@ -1,384 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -from __future__ import absolute_import -import sys -import os -from calculate.lib.utils.tools import unpack_single_opts -from . import cert_cmd -import pwd -from calculate.core.server.func import clearDataVars - -from calculate.lib.cl_lang import setLocalTranslate - -import calculate.contrib -# from spyne.protocol.http import HttpRpc -# from spyne.protocol.xml import XmlDocument -# from spyne.protocol.json import JsonDocument -from spyne.protocol.soap import Soap11 -from spyne import Application -# from spyne.server.wsgi import WsgiApplication - -from .spyne_adapter import CoreInnerWsdl, make_service - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - -_("User must be root") -_('Failed to import %s') -_('No module named %s') - - -@clearDataVars -def main(*args, **keywords): - _args = list(unpack_single_opts(sys.argv[1:])) - if os.path.basename(sys.argv[0]) != 'cl-core': - parser = cert_cmd.parse(full=False) - args, unknown_args = parser.parse_known_args(_args) - args.method = '_temp_' - else: - parser = cert_cmd.parse(full=True) - args, unknown_args = parser.parse_known_args(_args) - if args.method: - parser = cert_cmd.parse(full=False) - args, unknown_args = parser.parse_known_args(_args) - if not args.method: - if unknown_args: - args = parser.parse_args(_args) - - if args.help and not args.method: - parser.print_help() - return 0 - - from calculate.core.datavars import DataVarsCore - - ob = DataVarsCore() - ob.importCore() - - # set var env - if not ob.flIniFile(): - sys.exit(1) - # cl_wsdl = ob.Get('cl_wsdl') - cl_wsdl = ob.Get('cl_wsdl_available') - data_path = ob.Get('cl_core_data') - local_data_path = ob.Get('cl_core_local_data') - certbase = ob.Get('cl_core_database') - serv_certbase = ob.Get('cl_core_serv_database') - rights = ob.Get('cl_core_rights') - group_rights = ob.Get('cl_core_group_rights_path') - sids = ob.Get('cl_core_sids_path') - pids = ob.Get('cl_core_pids_path') - sids_pids = ob.Get('cl_core_sids_pids') - sids_file = ob.Get('cl_core_sids_file') - pids_file = ob.Get('cl_core_pids_file') - max_sid = ob.Get('cl_core_max_sid') - max_pid = ob.Get('cl_core_max_pid') - cert_path = ob.Get('cl_core_cert_path') - cert = ob.Get('cl_core_cert') - key = ob.Get('cl_core_key') - cl_ver = ob.Get('cl_ver') - log_path_var = ob.Get('cl_log_path') - cl_core_port = ob.GetInteger('cl_core_port') - port = cl_core_port - file_logger = None - - # создать симлинки на команды - if not args.method and args.create_symlink: - from .func import create_symlink, initialization - - initialization(cl_wsdl) - create_symlink(local_data_path, data_path) - return 0 - - if args.version: - print(cl_ver) - return 0 - - log_filename = None - - if ob.Get('cl_ebuild_phase') == '' and os.getuid() == 0: - import logging - import logging.handlers - - # logging.raiseExceptions = 0 - - log_path = args.log_path if args.log_path else log_path_var - - if not os.path.exists(log_path): - os.makedirs(log_path) - log_filename = os.path.join(log_path, 'logging_cl_core.out') - file_logger = logging.getLogger('MyLogger') - file_logger.setLevel(logging.DEBUG) - - # Add the log message handler to the logger - try: - handler = logging.handlers.RotatingFileHandler( - log_filename, maxBytes=10000000, backupCount=3) - - file_logger.addHandler(handler) - - # debug - if args.debug: - logging.basicConfig(level=logging.DEBUG) - logger = logging.getLogger('spyne.server.wsgi') - logger.setLevel(logging.DEBUG) - except IOError: - pass - - from urllib2 import URLError - - from traceback import print_exc - - ob.close() - if not args.method: - try: - port = args.port or cl_core_port - if args.check: - from . import bootstrap - - bootstrap.check(cert, key) - return 0 - - if args.bootstrap_user_name: - from . import bootstrap - bootstrap.init(cert, key, cert_path, data_path, certbase, args, - port) - if not args.cert_user_name: - return 0 - if args.clear_user_cert: - from . import bootstrap - bootstrap.clear_localuser_certificates(certbase) - if not args.cert_user_name: - return 0 - if args.cert_user_name: - cert_user_name = args.cert_user_name - try: - pwd.getpwnam(cert_user_name) - except KeyError: - print(_("User %s does not exist") % cert_user_name) - return 1 - - from . import bootstrap - bootstrap.force_user_cert(cert, cert_path, data_path, - certbase, cert_user_name, dv=ob) - return 0 - if args.revoke_cert_id: - cert_cmd.revoke_signed_cert(args.revoke_cert_id, data_path, - cert_path) - return 0 - if (args.host or args.gen_root_cert or args.root_host or - args.use_root_cert): - cert_cmd.check_server_certificate(cert, key, cert_path, args, - port) - return 0 - if args.id_client_req: - cert_cmd.sing_req_by_server(args.id_client_req, cert_path, - data_path) - return 0 - if args.Id: - cert_cmd.view_cert(args, certbase, data_path, rights, - group_rights) - return 0 - if args.cert_id: - cert_cmd.view_signed_cert(args, serv_certbase, data_path) - return 0 - if args.req_id: - cert_cmd.view_client_request(args, certbase, data_path) - return 0 - # Sign request by root certificate - if args.id_server_req: - cert_cmd.sing_req_by_root(args, cert_path, data_path) - return 0 - if args.id_del_req or args.id_del_client_req: - cert_cmd.del_request(args.id_del_req, args.id_del_client_req, - serv_certbase, certbase, data_path) - return 0 - except BaseException as e: - from urllib2 import URLError - - if isinstance(e, URLError) and log_filename: - if file_logger: - fd = open(log_filename, 'a') - file_logger.debug(print_exc(file=fd)) - fd.close() - print(e) - else: - raise - params_list = ["start", "create_symlink", "method", - "list_methods"] - for param in params_list: - if hasattr(args, param) and getattr(args, param): - break - else: - parser.print_help() - return 0 - ##################### - # importing other modules - from .func import initialization - - outer_wsdl_classes = initialization(cl_wsdl) - - pack = "calculate.core.server" - import importlib - - func_metaclass = importlib.import_module('%s.func_metaclass' % pack) - core_wsdl_classes = [] - core_wsdl_classes.append(func_metaclass.Func_MetaClass) - - from calculate.core.server.baseClass import Basic - # make server metaclass - if args.method or args.list_methods: - from .local_call import local_method, LocalCall - - ClService = CoreInnerWsdl("ClService", - tuple([LocalCall] + outer_wsdl_classes + [Basic] + core_wsdl_classes), - { - "__metaclass__" : CoreInnerWsdl - }) - tc = ClService() - tc.set_comb_class_ref(tc) - return local_method(tc, args, unknown_args) - - ClService = make_service(Basic, core_wsdl_classes, outer_wsdl_classes, "ClService") - - from .server_class import ClApplication, OpenSSLAdapter - #do we even need this anymore? - tc = ClService() - App = Application([ClService], 'tns', - name="ClApplication", - in_protocol=Soap11(), - out_protocol=Soap11(), - ) - # delete all sid and pid informations file - wsgi_application = ClApplication(App, log=file_logger) - cert_cmd.create_path(data_path, certbase, rights, group_rights, - local_data_path) - - # set all path - tc.set_paths(data_path, certbase, serv_certbase, - rights, group_rights, sids, pids, sids_pids, - sids_file, pids_file, max_sid, - max_pid, cert_path, log_filename, - cert, key) - - tc.set_comb_class_ref(tc) - tc.run_tasks() - - - - max_num = 99999999 - import calculate.contrib - from cherrypy.wsgiserver import CherryPyWSGIServer, WSGIPathInfoDispatcher - - dispatcher = WSGIPathInfoDispatcher({'': wsgi_application}) - server = CherryPyWSGIServer(('0.0.0.0', port), dispatcher, - numthreads=10, max=max_num, - request_queue_size=max_num) - - # logger = logging.getLogger("spyne.application") - # logger.setLevel(0) - print(_("listening to https://0.0.0.0:%d") % port) - print(_("wsdl is located at: https://0.0.0.0:%d/?wsdl") % port) - - - - - ca_cert = cert_path + "/ca_root.crt" - if not os.path.exists(ca_cert): - ca_cert = None - - ssl_adapter = OpenSSLAdapter(cert, key, ca_cert) - - ssl_adapter.certbase = certbase - - server.ssl_adapter = ssl_adapter - server.certbase = certbase - server.serv_certbase = serv_certbase - server.rights = rights - server.group_rights = group_rights - server.sids = sids - server.pids = pids - server.sids_file = sids_file - server.pids_file = pids_file - server.data_path = data_path - server.cert_path = cert_path - - server.ssl_certificate = cert - server.ssl_private_key = key - from OpenSSL.SSL import Error as SSLError - import socket - - try: - if args.pidfile: - try: - with open(args.pidfile, "w") as f: - f.write(str(os.getpid())) - except OSError: - sys.stderr.write(_("failed to create PID file %s") - % args.pidfile + "\n") - sys.exit(1) - # For cleaning of sessions at server reboot - from .clean import clean - from .gen_pid import clear_finished_pids - from calculate.lib.utils.files import writeFile - - clean(sids_file, pids_file, sids_pids, sids, pids) - clear_finished_pids(ob) - print(_("Server started")) - dbus_stop_file = ob.Get('cl_core_dbus_stop_path') - if args.inactiveclose: - try: - writeFile(dbus_stop_file).close() - except (OSError, IOError): - pass - else: - if os.path.exists(dbus_stop_file): - os.unlink(dbus_stop_file) - server.start() - except KeyboardInterrupt: - try: - ClService.killall() - except KeyboardInterrupt: - pass - print('\n' + _("Server stopped")) - server.stop() - sys.exit(0) - except socket.error as e: - if e.message == "No socket could be created": - print(_("No socket could be created")) - print(_('Port %d already in use') % port) - else: - if file_logger: - fd = open(log_filename, 'a') - file_logger.debug(print_exc(file=fd)) - fd.close() - print(e) - except SSLError: - print('\n', _('Server certificate not found')) # , e - print(_("use cl-core with option --gen-cert-by HOST " - "(--get-cert-from HOST) or --use-root-as-server)")) - except Exception: - if file_logger: - fd = open(log_filename, 'a') - file_logger.debug(print_exc(file=fd)) - fd.close() - - server.stop() - if args.pidfile: - if os.path.exists(args.pidfile): - os.unlink(args.pidfile) - sys.exit(0) diff --git a/libs_crutch/core/server/clean.py b/libs_crutch/core/server/clean.py deleted file mode 100644 index c2bcb4d..0000000 --- a/libs_crutch/core/server/clean.py +++ /dev/null @@ -1,191 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -import os -import glob -import sys -import time -import datetime -import pickle -from calculate.core.datavars import DataVarsCore -from calculate.core.server.core_interfaces import CoreServiceInterface -from calculate.lib.cl_lang import setLocalTranslate - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -def clean(sid_file, pid_file, sid_pid, sids_dir, pids_dir): - """ - Удалить все файлы сервера после перезапуска - """ - for fn in (sid_file, pid_file, sid_pid): - if os.path.exists(fn): - try: - os.unlink(fn) - except OSError: - pass - - for dn in (sids_dir, pids_dir): - if os.path.isdir(dn): - for filename in glob.glob(os.path.join(dn, "*.sid")): - try: - os.unlink(filename) - except OSError: - pass - - -class CoreWsdl(CoreServiceInterface): - # watch for process - @staticmethod - def watcher_pid_proc(cls, sid, pid): - period = 2 - time.sleep(period) - try: - # while process status "Active" - while cls.glob_process_dict[pid]['status'] == 1: - # frequency check - time.sleep(period) - - cls.delete_pid(cls, sid, pid) - except IOError as e: - print('Except IOError', str(e)) - except Exception: - print(_("PID %d watcher error") % pid) - try: - cls.delete_pid(cls, sid, pid) - except Exception: - pass - time.sleep(0.1) - - @staticmethod - def delete_pid(cls, sid, pid): - while len(cls.glob_frame_list[pid]) > \ - cls.glob_process_dict[pid]['counter']: - time.sleep(1) - methodname = cls.glob_process_dict[pid]['method_name'] - if methodname: - cls.clear_cache(sid, methodname) - cls.del_pid(cls, pid) - cls.del_pid_from_sid_pid(cls, pid) - - -def monitor(certbase, sid_file): - """ function to delete old session """ - # Get value of period and lifetime session from DataVars - try: - ob = DataVarsCore() - ob.importCore() - - if not ob.flIniFile(): - sys.exit(1) - period = float(ob.Get('cl_core_monitor_period')) - sid_live = float(ob.Get('cl_core_sid_live')) - except Exception: - print(_("Variable cl_core_monitor_period or cl_core_sid_live not " - "found")) - raise - # Check lifetime. if necessary, remove - while True: - # check session - try: - sid_file_t = sid_file + '_temp' - fd = open(sid_file, 'r') - ft = open(sid_file_t, 'w') - while 1: - try: - # read all on one record - list_sid = pickle.load(fd) - except (EOFError, KeyError, IOError): - break - # how time exists session - delta = datetime.datetime.now() - list_sid[2] - # if not outdated, then leave - if delta.seconds < sid_live * 60: - pickle.dump(list_sid, ft) - fd.close() - ft.close() - - # copy all from temp file - ft = open(sid_file_t, 'rb') - fd = open(sid_file, 'wb') - ft.seek(0) - fd.write(ft.read()) - ft.close() - fd.close() - - # Delete temp file - os.unlink(sid_file_t) - except (IOError, OSError): - return 1 - - # Частота проверки - time.sleep(60 * period) - - -# check client's presence -def sid_monitor(sid_fn, sids_dn, cls): - # check interval - period = 21 - while True: - try: - sids = [] - # create, if file not exists - if not os.path.exists(sid_fn): - temp = open(sid_fn, 'w') - temp.close() - fd = open(sid_fn, 'r') - while 1: - try: - # read all on one record - list_sid = pickle.load(fd) - except (EOFError, KeyError, IOError): - break - # add session id in sesession list - sids.append(list_sid[0]) - fd.close() - except (IOError, OSError): - print(_("Error reading SID files")) - return - - try: - # for each session - for filename in sids: - # find file this session - sid_path = sids_dn + "/%d.sid" % filename - with cls.sid_locker: - if os.path.isfile(sid_path): - with open(sid_path) as fd: - # read information about session - sid_inf = pickle.load(fd) - # if number of missed inspections more 3 - if sid_inf[1] > 3: - # flag client absence - sid_inf[2] = 1 - fd.close() - if os.path.isfile(sid_path): - # add to digit missed inspections - # client constantly nulls this value! - ft = open(sid_path, 'w') - if sid_inf[1] < 4: - sid_inf[1] += 1 - pickle.dump(sid_inf, ft) - ft.close() - except (IOError, OSError, KeyError): - pass - # check period - time.sleep(period) diff --git a/libs_crutch/core/server/client_class.py b/libs_crutch/core/server/client_class.py deleted file mode 100644 index 0296d19..0000000 --- a/libs_crutch/core/server/client_class.py +++ /dev/null @@ -1,248 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -import urllib2 as u2 -import os -import sys -import calculate.contrib -from suds.transport.http import HttpTransport -from httplib import HTTPConnection, HTTPSConnection -import socket -from calculate.core.datavars import DataVarsCore -from calculate.lib.cl_lang import setLocalTranslate - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -class clientHTTPSConnection(HTTPSConnection): - def __init__(self, host, port=None, key_file=None, cert_file=None, - strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - source_address=None, cert_path=None): - HTTPConnection.__init__(self, host, port, strict, timeout, - source_address) - self.key_file = key_file - self.cert_file = cert_file - self.cert_path = cert_path - - # get filename store cert server - def cert_list(self, host, ca_certs, server_cert): - if not os.path.exists(self.ca_path): - try: - os.makedirs(self.ca_path) - except OSError: - pass - if not os.path.exists(ca_certs): - fc = open(ca_certs, "w") - fc.close() - filename = None - try: - with open(ca_certs) as fd: - t = fd.read() - # for each line - for line in t.splitlines(): - # Split string into a words list - words = line.split() - if len(words) > 1: - # if first word... - if words[0] == host: - filename = words[1] - if not filename: - return None - except (IOError, IndexError): - print(_("Certificate not found on the client's side")) - return None - try: - fd = open(self.ca_path + filename, 'r') - store_cert = fd.read() - fd.close() - if store_cert == server_cert: - return filename - except IOError as e: - print(_("Failed to open the file") + "%s%s %s" % (self.ca_path, - filename, str(e))) - return None - - # add certificate server in trusted - def add_server_cert(self, cert): - print(_("Untrusted server certificate!")) - import OpenSSL - - certobj = OpenSSL.crypto.load_certificate(OpenSSL.SSL.FILETYPE_PEM, - cert) - print('\n' + _("Fingerprint = %s") % certobj.digest('SHA1')) - print(_("Serial number = "), certobj.get_serial_number()) - Issuer = certobj.get_issuer().get_components() - print('\n' + _("Issuer")) - for i in Issuer: - print("%s : %s" % (i[0], i[1])) - Subject = certobj.get_subject().get_components() - print('\n' + _("Subject")) - for item in Subject: - print("%s : %s" % (item[0], item[1])) - - choice = raw_input( - _("add this certificate to trusted and continue? y/[n]: ")) - if choice in ['y', 'yes', 'Y', 'YES']: - ca_certs = self.ca_path + "cert.list" - - if not os.path.exists(ca_certs): - fc = open(ca_certs, "w") - fc.close() - - filename = self.host - fc = open(self.ca_path + filename, "w") - fc.write(cert) - fc.close() - with open(ca_certs) as fd: - t = fd.read() - # for each line - for line in t.splitlines(): - # Split string into a words list - words = line.split() - if len(words) > 1: - # if first word... - if words[0] == self.host: - return 0 - # Open file with compliance server certificates and server hostname - fcl = open(ca_certs, "a") - fcl.write(self.host + ' ' + filename + '\n') - fcl.close() - - else: - sys.exit() - - def connect_trusted_server(self, sock): - import ssl - - if hasattr(ssl, "PROTOCOL_TLSv1_2"): - ssl_version = ssl.PROTOCOL_TLSv1_2 - else: - print(_("SSL library is not support TLSv1_2")) - return 1 - self.ca_path = self.cert_path + "ca/" - ca_certs = self.ca_path + "cert.list" - server_cert = ssl.get_server_certificate(addr=(self.host, self.port)) - if (not hasattr(HTTPSClientsCertTransport, 'filename') or - HTTPSClientsCertTransport.filename is None): - HTTPSClientsCertTransport.filename = self.cert_list( - self.host, ca_certs, server_cert) - if HTTPSClientsCertTransport.filename: - try: - self.sock = ssl.wrap_socket(sock, - certfile=self.cert_file, - keyfile=self.key_file, - ssl_version=ssl_version, - cert_reqs=ssl.CERT_NONE) - - dercert_after_connect = self.sock.getpeercert(True) - cert_after_connect = ssl.DER_cert_to_PEM_cert( - dercert_after_connect) - filename2 = self.cert_list(self.host, ca_certs, - cert_after_connect) - - if not HTTPSClientsCertTransport.filename == filename2: - print('\n' + _("WARNING!!! %s trying to replace the " - "certificate!") % self.host + '\n') - self.sock.close() - return 2 - return 0 - except Exception: - print(_("Error. The server certificate and the private " - "key are probably invalid!")) - HTTPSClientsCertTransport.filename = None - return 1 - else: - self.sock = ssl.wrap_socket(sock) - self.add_server_cert(server_cert) - - def connect(self): - """Connect to a host on a given (SSL) port.""" - timeout = 15 - sock = socket.create_connection((self.host, self.port), - timeout, self.source_address) - if self._tunnel_host: - self.sock = sock - self._tunnel() - - clVars = DataVarsCore() - clVars.importCore() - if not clVars.flIniFile(): - sys.exit(1) - import ssl - - if hasattr(ssl, "PROTOCOL_TLSv1_2"): - ssl_version = ssl.PROTOCOL_TLSv1_2 - else: - print(_("SSL library is not support TLSv1_2")) - sys.exit(1) - - self.sock = ssl.wrap_socket(sock, - certfile=self.cert_file, - keyfile=self.key_file, - ssl_version=ssl_version, - cert_reqs=ssl.CERT_NONE) - - -class HTTPSClientAuthHandler(u2.HTTPSHandler): - def __init__(self, key, cert, cert_path): - u2.HTTPSHandler.__init__(self) - self.key = key - self.cert = cert - self.cert_path = cert_path - - def https_open(self, req): - # Rather than pass in a reference to a connection class, we pass in - # a reference to a function which, for all intents and purposes, - # will behave as a constructor - return self.do_open(self.getConnection, req) - - def getConnection(self, host, timeout=300): - return clientHTTPSConnection(host, key_file=self.key, - cert_file=self.cert, - cert_path=self.cert_path) - - -class HTTPSClientsCertTransport(HttpTransport): - def __init__(self, key, cert, path_to_cert, *args, **kwargs): - HttpTransport.__init__(self, *args, **kwargs) - self.key = key - self.cert = cert - self.cert_path = path_to_cert - - def u2open(self, u2request): - """ - Open a connection. - @param u2request: A urllib2 request. - @type u2request: urllib2.Requet. - @return: The opened file-like urllib2 object. - @rtype: fp - """ - tm = self.options.timeout - url = u2.build_opener(HTTPSClientAuthHandler(self.key, self.cert, - self.cert_path)) - - # from urllib2 import URLError - # try: - if hasattr(self, "u2ver"): - if self.u2ver() < 2.6: - socket.setdefaulttimeout(tm) - return url.open(u2request) - else: - return url.open(u2request, timeout=tm) - else: - return url.open(u2request, timeout=tm) diff --git a/libs_crutch/core/server/core_interfaces.py b/libs_crutch/core/server/core_interfaces.py deleted file mode 100644 index dd47c39..0000000 --- a/libs_crutch/core/server/core_interfaces.py +++ /dev/null @@ -1,423 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2015-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from calculate.lib.datavars import DataVars -import calculate.contrib -from spyne import Service - -class CoreServiceInterface(object): - ######## - # Fields - ######## - - ############## - # baseClass.py - ############## - Common = object - glob_process_dict = None - process_pid = None - glob_progress_dict = None - glob_table_dict = None - glob_frame_list = None - manager = None - - data_path = None - certbase = None - rights = None - sids = None - pids = None - sids_file = None - ssl_certificate = None - ssl_private_key = None - cachedict = None - - # set_paths - sids_pids = None - cert_path = None - serv_certbase = None - group_rights = None - max_sid = None - max_pid = None - log_filename = None - pids_file = None - - #workaround: Spyne won't let us use self ref in @rpc, - # and ctx leads to original service class (Basic in this case) - # but some methods are gathered from CoreWsdl classes, so we need - # a ref to combined class - comb_class_ref = None - - ############ - # gen_sid.py - ############ - sid_locker = None - - ######### - # Methods - ######### - - ############## - # baseClass.py - ############## - def get_cache(sid, meth_name, obj_name): - """ - Получить значение кэша - :param sid: id сессии - :param meth_name: имя метода - :param obj_name: имя объекта - :return: - """ - raise NotImplementedError - - def clear_cache(sid, meth_name=None, obj_name=None): - """ - Удалить объект из кэша, если не указан метод - очищается весь кэш - для указанной сессий, если не указано имя объекта - очищается весь - кэш для метода сессии - :param sid: id сессии - :param meth_name: имя метода - :param obj_name: имя объекта - :return: - """ - raise NotImplementedError - - def clear_cache_method(method=None): - """ - Удалить из всех сессий для указанного метода - :param meth_name: имя метода - :return: - """ - raise NotImplementedError - - def set_cache(sid, meth_name, obj_name, obj, smart=True): - """ - Установить кэш для метода, если используется smart, то происходит - корриктровка атрибутов предыдущего объекта - :param sid: id сессии - :param meth_name: имя метода - :param obj_name: имя объекта - :param obj: кэшируемый объект - :param smart: корректировка предыдущего значения - :return: - """ - raise NotImplementedError - - def set_comb_class_ref(cls, comb_class_ref): - cls.comb_class_ref = comb_class_ref - - ######### - # func.py - ######### - def startprocess(cls, sid, target=None, method=None, method_name=None, - auto_delete=False, args_proc=()): - """ - Запустить задачу - :param sid: id сессии - :param target: объект с методом - :param method: метод - :param method_name: название метода - :param auto_delete: отслеживание процесса (нигде не используется) - :param args_proc: параметры для метода - :return: - """ - raise NotImplementedError - - def install_vars(cls, dv=None): - """ - Метод описания параметров метода install (создаётся автоматически - через метакласс (datavarsConstructor.wrapper) - :param dv: объект переменных - :return: - """ - raise NotImplementedError - - def serv_get_methods(cls, client_type): - """ - Получить доступные методы - :param client_type: тип клиента (console или gui или None) - :return: - """ - raise NotImplementedError - - def serv_get_sessions(cls): - """ - Получить текущие сессии - :return: - """ - raise NotImplementedError - - def active_clients(cls, sid): - """ - Зафиксировать активность клиента и проверить рабочая ли сессия - :param sid: id сессии - :return: - """ - raise NotImplementedError - - def serv_view_cert_right(cls, cert_id, data_path, client_type=None): - """ - Получить права указанного сертификата - :param cert_id: id сертификата - :param data_path: база сертификатов - :param client_type: тип клиента (console или gui, или None) - :return: - """ - raise NotImplementedError - - ############ - # gen_pid.py - ############ - - def check_sid_cert(cls, sid): - """ - Проверить сертификат в указанной сессии - :param sid: id сессии - :return: - """ - raise NotImplementedError - - def find_sid_pid_file(cls, sid): - """ - Найти id процессов указанной сессии - :param sid: id сессии - :return: список pid - """ - raise NotImplementedError - - def serv_pid_kill(cls, pid, sid): - """ - Заверишить процесс - :param pid: - :param sid: - :return: - """ - raise NotImplementedError - - def del_pid(cls, pid): - """ - Удалить процесс из списка процессов - :param pid: id процесса - :return: - """ - raise NotImplementedError - - def gen_pid(cls): - """ - Сгенерировать идентификатор процесса - :return: - """ - - ########## - # clean.py - ########## - def delete_pid(cls, sid, pid): - """ - Удалить сохранённые данные указанного процесса у сессии - :param sid: id сессии - :param pid: id процесса - :return: - """ - raise NotImplementedError - - def watcher_pid_proc(cls, sid, pid): - """ - Функция для потока для наблюдение за процессом сессии - :param sid: id сессии - :param pid: id процесса - :return: - """ - - ########### - # gen_sid.py - ########### - def sid_cmp(cls, sid, cert_id, lang): - """ - Найти указанную сессию в сервере или создать новую - :param sid: 0 или id сессии - :param cert_id: id сертификата - :param lang: язык - :return: - """ - raise NotImplementedError - - def serv_init_session(cls, sid, lang): - """ - Инициализация сессии - :param sid: 0 или id сессии - :param lang: язык - :return: - """ - raise NotImplementedError - - def del_sid_from_file(cls, sid): - """ - Удалить сессию из файла (закрыть сессию) - :param sid: id сессии - :return: - """ - raise NotImplementedError - - def serv_sid_info(cls, sid): - """ - Получить информацию о сессии - :param sid: id сессии - :return: - """ - raise NotImplementedError - - def find_sid_in_file(cls, sid): - """ - Найти идентификатор сессии в файле - :param sid: id сессии - :return: - """ - - ################# - # sid_pid_file.py - ################# - def del_sid_pid(cls, sid): - """ - Удалить сессию и все её процессы - :param sid: id сессии - :return: - """ - raise NotImplementedError - - def serv_pid_info(cls, sid, pid): - """ - Получить информацию о процессе - :param sid: id сессии - :param pid: id процесса - :return: - """ - raise NotImplementedError - - def del_pid_from_sid_pid(cls, pid): - """ - Удалить информацию о процессе из sid_pid файла - :param pid: id процесса - :return: - """ - raise NotImplementedError - - def add_sid_pid(cls, sid, pid): - """ - Добавить запись соответивия процесса сессии - :param sid: id сессии - :param pid: id процесса - :return: - """ - raise NotImplementedError - - -class MethodsInterface(object): - clVars = DataVars() - method_name = None - - def applyTemplates(self, *args, **kw): - raise NotImplementedError - - def dispatchConf(self, *args, **kw): - raise NotImplementedError - - def pauseProcess(self): - raise NotImplementedError - - def resumeProcess(self): - raise NotImplementedError - - def writeFile(self): - raise NotImplementedError - - def setProgress(self, perc, short_message=None, long_message=None): - raise NotImplementedError - - def setStatus(self, stat): - raise NotImplementedError - - def setData(self, dat): - raise NotImplementedError - - def getStatus(self): - raise NotImplementedError - - def getProgress(self): - raise NotImplementedError - - def getAnswer(self): - raise NotImplementedError - - def addProgress(self, message=""): - raise NotImplementedError - - def printTable(self, table_name, head, body, fields=None, - onClick=None, addAction=None, step=None, - records=None): - raise NotImplementedError - - def addMessage(self, type='normal', message=None, id=None, - onlyShow='', default=None): - raise NotImplementedError - - def printSUCCESS(self, message='', onlyShow=None): - raise NotImplementedError - - def printPre(self, message='', onlyShow=None): - raise NotImplementedError - - def printDefault(self, message='', onlyShow=None): - raise NotImplementedError - - def printWARNING(self, message, onlyShow=None): - raise NotImplementedError - - def printERROR(self, message='', onlyShow=None): - raise NotImplementedError - - def startTask(self, message, progress=False, num=1): - raise NotImplementedError - - def setTaskNumber(self, number=None): - raise NotImplementedError - - def endTask(self, result=None, progress_message=None): - raise NotImplementedError - - def askConfirm(self, message, default="yes"): - raise NotImplementedError - - def isInteractive(self): - raise NotImplementedError - - def askChoice(self, message, answers=(("yes", "Yes"), ("no", "No"))): - raise NotImplementedError - - def askQuestion(self, message): - raise NotImplementedError - - def askPassword(self, message, twice=False): - raise NotImplementedError - - def beginFrame(self, message=None): - raise NotImplementedError - - def endFrame(self): - raise NotImplementedError - - def startGroup(self, message): - raise NotImplementedError - - def endGroup(self): - raise NotImplementedError diff --git a/libs_crutch/core/server/create_cert.py b/libs_crutch/core/server/create_cert.py deleted file mode 100644 index 279e7dd..0000000 --- a/libs_crutch/core/server/create_cert.py +++ /dev/null @@ -1,190 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -import sys -from M2Crypto import RSA, X509, EVP, m2 -from calculate.lib.cl_lang import setLocalTranslate -from binascii import hexlify -import hashlib -from calculate.lib.utils.text import _u8 - -from M2Crypto import m2 -from M2Crypto.X509 import X509_Extension -from calculate.lib.utils.files import writeFile, readFile -from ctypes import * - - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -def passphrase_callback(v): - return "" - - -def generateRSAKey(): - return RSA.gen_key(2048, m2.RSA_F4) - - -def makePKey(key): - pkey = EVP.PKey() - pkey.assign_rsa(key) - return pkey - - -def makeRequest(pubkey, pkey, serv_host, port): - """ create query to the signing on server """ - req = X509.Request() - # Seems to default to 0, but we can now set it as well, so just API test - req.set_version(req.get_version()) - req.set_pubkey(pkey) - name = X509.X509_Name() - c = raw_input(_("Enter the certificate date manually? [y]/n: ")) - if c.lower() in ['n', 'no']: - name.CN = 'root_cert' # (Common Name); - name.OU = 'www.calculate-linux.ru' # (Organization Unit); - name.O = 'calculate-linux' # (Organization Name); - name.L = '' # (Locality Name); - name.ST = 'Spb' # (State Name); - name.C = 'En' # (Country); - else: - import socket - - print(_('Do not use spaces or tabs.')) - host_name = socket.getfqdn() - # if serv_host == host_name: - # print '\n'+_("Want to create self-signed certificate?\n" - # "Use key --gen-cert-self") - # return None - if serv_host in host_name: - host_name = host_name.replace('.' + serv_host, '') - list_host_name = host_name.split('.') - print('list_host_name = ', list_host_name) - result_host_name = \ - list_host_name[len(list_host_name) - 1] + "." + serv_host - else: - host_name = socket.getfqdn() - list_host_name = host_name.split('.') - result_host_name = list_host_name[0] + "." + serv_host - - def cleardata(x): - if x: - return x.replace(' ', '_').replace('\t', '_') - return "" - - _CN = raw_input(_('Hostname [%s] : ') % _u8(result_host_name)) - name.CN = _CN or result_host_name or "" - _OU = raw_input(_('Organization unit: ')) - name.OU = cleardata(_OU) - _O = raw_input(_('Organization name: ')) - name.O = cleardata(_O) - network = _('Full network address (host:port)') - _L = raw_input(network + ' [%s:%d]: ' % (_u8(host_name), port)) - name.L = cleardata(_L) or (_u8(host_name) + ':' + str(port)) - _ST = raw_input(_('City: ')) - name.ST = cleardata(_ST) - _C = raw_input(_('Country (two letters only!): ')) - name.C = _C or "C" - - req.set_subject_name(name) - ext1 = X509.new_extension('nsComment', 'Auto Generated') - extstack = X509.X509_Extension_Stack() - extstack.push(ext1) - req.add_extensions(extstack) - req.sign(pkey, 'md5') - return req - -class CreateCertError(Exception): - pass - -def create_selfsigned_ca(dn_data, keyfile, certfile): - from OpenSSL import crypto - - certpem = readFile(keyfile) - if not certpem: - raise CreateCertError(_("Key file {} not found").format(keyfile)) - - try: - pkey = crypto.load_privatekey( - crypto.FILETYPE_PEM, certpem) - ca = crypto.X509() - ca.set_version(2) - subject = ca.get_subject() - subject.countryName = dn_data['C'] - subject.commonName = dn_data['CN'] - subject.stateOrProvinceName = dn_data['ST'] - subject.localityName = dn_data['L'] - subject.organizationName = dn_data['O'] - subject.organizationalUnitName = dn_data['OU'] - - ca.gmtime_adj_notBefore(-60*60*24) - ca.gmtime_adj_notAfter(60*60*24*365*20) - ca.set_issuer(subject) - ca.set_pubkey(pkey) - ca.add_extensions([ - crypto.X509Extension(b'basicConstraints', True, b'CA:TRUE'), - #crypto.X509Extension(b'keyUsage', False, b'keyCertSign, cRLSign'), - crypto.X509Extension(b'subjectKeyIdentifier', False, b'hash', subject=ca)]) - ca.add_extensions([crypto.X509Extension(b'authorityKeyIdentifier', False, b'keyid:always',issuer=ca)]) - ca.sign(pkey, 'sha1') - - with writeFile(certfile) as f: - f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, ca)) - except crypto.Error as e: - raise CreateCertError(str(e)) - -def sign_client_certifacation_request(ca_keyfile, ca_certfile, requestfile, out_cert, group): - from OpenSSL import crypto - cakeyfilepem = readFile(ca_keyfile) - cacertpem = readFile(ca_certfile) - requestpem = readFile(requestfile) - if not cakeyfilepem: - raise CreateCertError( - _("Key file {} not found").format(ca_keyfile)) - if not cacertpem: - raise CreateCertError( - _("CA certitficate file {} not found").format(ca_certfile)) - if not requestpem: - raise CreateCertError( - _("Request file {} not found").format(requestfile)) - - try: - pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, cakeyfilepem) - ca = crypto.load_certificate(crypto.FILETYPE_PEM, cacertpem) - req = crypto.load_certificate_request(crypto.FILETYPE_PEM, requestpem) - - cert = crypto.X509() - cert.set_version(2) - cert.gmtime_adj_notBefore(-60*60*24) - cert.gmtime_adj_notAfter(60*60*24*365*20) - cert.set_issuer(ca.get_subject()) - cert.set_subject(req.get_subject()) - cert.set_pubkey(req.get_pubkey()) - - cert.add_extensions([ - crypto.X509Extension(b'basicConstraints', False, b'CA:FALSE'), - crypto.X509Extension(b'nsCertType', False, b'client'), - crypto.X509Extension(b'keyUsage', False, b'digitalSignature, keyEncipherment'), - crypto.X509Extension(b'extendedKeyUsage', False, b'clientAuth'), - crypto.X509Extension(b'nsComment', False, 'group:{}'.format(group)), - ]) - cert.sign(pkey, 'sha1') - - with writeFile(out_cert) as f: - f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) - except crypto.Error as e: - raise CreateCertError(str(e)) diff --git a/libs_crutch/core/server/func.py b/libs_crutch/core/server/func.py deleted file mode 100644 index 9366daf..0000000 --- a/libs_crutch/core/server/func.py +++ /dev/null @@ -1,1918 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -from __future__ import absolute_import -import pickle -import random -import threading -import sys -import os -import re -from os import path -import glob -import traceback -from traceback import print_exc -from calculate.core.server.core_interfaces import (CoreServiceInterface, - MethodsInterface) -from calculate.install.distr import Distributive -from calculate.lib.cl_log import log -from calculate.lib.utils.colortext import convert_console_to_xml -from .api_types import ReturnProgress -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - -from calculate.lib.utils.files import (process, readFile, processProgress, - readLinesFile, - makeDirectory, getProgPath) -from calculate.lib.datavars import DataVarsError, CriticalError, Variable -from calculate.lib.utils.content import getCfgFiles -from itertools import * - -import calculate.contrib -from spyne import String, Integer, Boolean, Array - -from calculate.core.server.api_types import ReturnedMessage, CommonInfo -from calculate.core.server.api_types import (Field, - GroupField, ViewInfo, ViewParams) -from calculate.lib.cl_template import Template -from calculate.lib.datavars import DataVars -from .loaded_methods import LoadedMethods - - -class CommonMethods(MethodsInterface): - def dispatchConf(self, filesApply=None, prefix="/"): - """ - Common dispatch conf. Using if ._cfg files created. - """ - - def normalize_config(text): - """ - Нормализовать конфигурационный файл для сравнения: - * удалить calculate заголовок - * добавить перевод строки в конец если файл без перевода строки - """ - if text.endswith('\n'): - return Template.removeComment(text) - else: - return "%s\n" % Template.removeComment(text) - - i_orig, i_data = 0, 1 - i_mime, i_cfgname = 0, 1 - cfg_files = getCfgFiles(prefix=prefix).items() - info = filter(lambda x: (filesApply is None or - x[i_data][0][i_cfgname] in filesApply), - cfg_files) - max_info = len(info) - for ind, data in enumerate(info): - out = [] - orig, data = data - data = data[0] - - origdata = readFile(orig) - newdata = readFile(data[i_cfgname]) - pattern = "%s/._cfg????_%s" % (os.path.dirname(orig), - os.path.basename(orig)) - answ_map = {'usenew': 'use new', 'skip': 'next'} - dispatch_var = self.clVars.Get('cl_dispatch_conf') - for fn in glob.glob(pattern): - try: - if fn == data[i_cfgname]: - continue - os.unlink(fn) - except (OSError, IndexError): - pass - if (self.clVars.Get('cl_autoupdate_set') == 'on' or - origdata == newdata): - answ = "use new" - elif dispatch_var in answ_map: - answ = answ_map.get(dispatch_var) - else: - orig_content = normalize_config(readFile(orig)) - new_content = normalize_config(readFile(data[i_cfgname])) - if orig_content == new_content: - answ = "use new" - else: - for i, s in enumerate(list(process("diff", "-Nu", - orig, data[i_cfgname]))): - s = convert_console_to_xml(s) - if s.startswith('+') and i > 1: - out.append('%s' % s) - elif s.startswith('-') and i > 1: - out.append('%s' % s) - else: - out.append(s) - self.printPre("
".join(out)) - self.printSUCCESS(_("({one} of {_all}) -- {fname}").format( - one=ind + 1, _all=max_info, fname=orig)) - answ = self.askChoice(_("Choose a configuration action:"), - answers=(("zap new", _("Zap new")), - ("use new", _("Use new")), - ("next", _("Next")))) - if answ == "next": - continue - elif answ == "use new": - try: - with open(orig, 'w') as fd: - fd.write(readFile(data[i_cfgname])) - os.unlink(data[i_cfgname]) - if filesApply: - try: - i = filesApply.index(data[i_cfgname]) - filesApply[i] = orig - except Exception as e: - print(str(e)) - except Exception as e: - print(str(e)) - self.printERROR( - _("Failed to copy {ffrom} to {fto}").format( - ffrom=data[i_cfgname], fto=orig)) - continue - elif answ == "zap new": - try: - os.unlink(data[i_cfgname]) - if filesApply: - try: - filesApply.remove(data[i_cfgname]) - except Exception as e: - print(str(e)) - except OSError: - self.printERROR( - _("Failed to remove %s") % data[i_cfgname]) - return True - - def setVariable(self, varname, varvalue, force=False): - """ - Установить значение переменной - """ - self.clVars.Set(varname, varvalue, force=force) - return True - - def invalidateVariables(self, *variables): - for varname in (x.rpartition('.')[2] for x in variables): - self.clVars.Invalidate(varname, force=True) - return True - - def applyTemplates(self, target=None, useClt=None, cltFilter=False, - root=None, useDispatch=True, critical=False): - """ - Применить шаблоны. - - Args: - target: дистрибутив, куда необходимо выполнить шаблоны (/ по умолчанию) - useClt: использовать clt шаблоны - cltFilter: применять фильтр на clt шаблоны - root: каталог, куда будут наложны шаблоны (cl_root_path) - """ - from calculate.lib.cl_template import (TemplatesError, ProgressTemplate) - - if target is None: - chroot = '/' - elif isinstance(target, Distributive): - chroot = target.getDirectory() - else: - chroot = target - if root is None: - root = '/' - elif isinstance(root, Distributive): - root = root.getDirectory() - clt_filter = True if cltFilter in (True, "on") else False - self.clVars.Set("cl_chroot_path", chroot, True) - self.clVars.Set("cl_root_path", root, True) - # определение каталогов содержащих шаблоны - use_clt = useClt in ("on", True) - self.addProgress() - null_progress = lambda *args, **kw: None - dispatch = self.dispatchConf if useDispatch else None - cl_templ = ProgressTemplate(null_progress, self.clVars, - cltObj=use_clt, - cltFilter=clt_filter, - printSUCCESS=self.printSUCCESS, - printWARNING=self.printWARNING, - askConfirm=self.askConfirm, - dispatchConf=dispatch, - printERROR=self.printERROR, - critical=critical) - try: - cl_templ.applyTemplates() - if cl_templ.hasError(): - if cl_templ.getError(): - raise TemplatesError(cl_templ.getError()) - finally: - if cl_templ: - if cl_templ.cltObj: - cl_templ.cltObj.closeFiles() - cl_templ.closeFiles() - return True - - -class CommonLink(object): - """ - Объект-связка объектов тип Install,Client,Action с Common объектом - """ - com = None - - @staticmethod - def link_object(source, target): - for fn in (x for x in dir(CoreWsdl.Common) if not x.startswith("_")): - if hasattr(source, fn): - setattr(target, fn, getattr(source, fn)) - - def set_link(self, com): - """ - Установить связь с Common объектом - """ - self.com = com - self.link_object(com, self) - - -class ActionError(Exception): - pass - - -class Tasks(object): - """ - Класс для создания проверок необходимости запуска задачи в зависимости - от результатра работы предыдущих задач - """ - - def __init__(self, check): - self.check = check - - def __call__(self, result, all_result): - return self.check(result, all_result) - - def __or__(self, y): - return Tasks(lambda result, all_result: self(result, all_result) or y(result, all_result)) - - def __ror__(self, y): - return Tasks(lambda result, all_result: y(result, all_result) or self(result, all_result)) - - def __and__(self, y): - return Tasks(lambda result, all_result: self(result, all_result) and y(result, all_result)) - - def __rand__(self, y): - return Tasks(lambda result, all_result: y(result, all_result) and self(result, all_result)) - - def __invert__(self): - return Tasks(lambda result, all_result: not self(result, all_result)) - - @classmethod - def _result(self, result, all_result): - return result - - @classmethod - def success_all(cls, *tasks): - """ - Все указанные задачи выполнены и выполнены без ошибок - """ - return cls( - lambda *args: all(x in cls._result(*args) and cls._result(*args)[x] for x in tasks)) - - @classmethod - def success_one_of(cls, *tasks): - """ - Хотя бы одна из задач выполнена и хотя бы одна из выполненных без ошибок - """ - return cls( - lambda *args: any(cls._result(*args)[x] for x in tasks if x in cls._result(*args))) - - @classmethod - def success(cls, inessential=()): - """ - Все ранее запущенные задачи успешно завершены, результат задач - inessential не важен - """ - return cls(lambda *args: all(cls._result(*args)[x] for x in cls._result(*args) - if x not in inessential)) - - @classmethod - def failed(cls, inessential=()): - """ - Хотя бы одна из задач завершилась неудачно, результат задач - inessential не важен - """ - return cls(lambda *args: any(not cls._result(*args)[x] for x in cls._result(*args) - if x not in inessential)) - - @classmethod - def failed_all(cls, *tasks): - """ - Выполнена хотя бы одна задача и все те, которые выполнены с ошибкой - """ - def not_empty_all(l): - l = list(l) - return bool(l and all(l)) - return cls( - lambda *args: not_empty_all(not cls._result(*args)[x] for x in tasks if x in cls._result(*args))) - - @classmethod - def failed_one_of(cls, *tasks): - """ - Хотя бы одна из указанных задач выполнена и выполнена с ошибкой - """ - return cls( - lambda *args: any(x in cls._result(*args) and not cls._result(*args)[x] for x in tasks)) - - @classmethod - def has(cls, *tasks): - """ - Был запуск всех перечисленных задач - """ - return cls(lambda *args: all(x in cls._result(*args) for x in tasks)) - - @classmethod - def hasnot(cls, *tasks): - """ - Не было запуска ни одной из перечисленных задач - """ - return cls(lambda *args: all(x not in cls._result(*args) for x in tasks)) - - @classmethod - def result(cls, task, eq=None, ne=None): - if eq: - wrapper = lambda *args: task in cls._result(*args) and cls._result(*args)[task] == eq - elif ne: - wrapper = lambda *args: task not in cls._result(*args) or cls._result(*args)[task] != ne - else: - wrapper = lambda *args: task in cls._result(*args) and cls._result(*args)[task] - return cls(wrapper) - - @classmethod - def has_any(cls, *tasks): - """ - Был запуск любой из задач - """ - return cls(lambda *args: any(x in cls._result(*args) for x in tasks)) - - -class AllTasks(Tasks): - @classmethod - def _result(cls, result, all_result): - return all_result - -class Action(MethodsInterface): - """ - Класс для реализации выполнения действия - # default = {'depend':Tasks.success(), - # # прятать вывод - # 'hideout':False, - # # задача важна, в случае False результат - # # не сохраняется в self.result - # 'essential':True} - """ - eachvar = None - - # список выполняемых задач - tasks = [] - # список исключений, которые выводятся в сокращенном формате - # (ожидаемые ошибки) - # остальные выводятся с именем модуля и номером строки - native_error = () - - # сообщение об удачном завершении действия - successMessage = None - # сообщение при ошибке - failedMessage = None - # сообщение о прерывании - interruptMessage = None - - # добавить стандартные сообщения в конце - finishMessage = True - - def __init__(self): - if self.finishMessage: - tasks = [] - if self.failedMessage: - tasks.append( - # вывести сообщение в случае ошибки - {'name': 'failed', - 'error': self.failedMessage, - 'depend': (Tasks.failed() & Tasks.hasnot("interrupt"))}) - if self.successMessage: - tasks.append( - # вывести сообщение в случае успеха - {'name': 'success', - 'message': self.successMessage, - 'depend': (Tasks.success() & Tasks.hasnot("failed"))}) - if self.interruptMessage: - tasks.append( - # вывести сообщение о том, что действие прервано - {'name': 'intmessage', - 'error': self.interruptMessage, - 'depend': (Tasks.has("interrupt"))}) - self.tasks = self.tasks + tasks - self.group_name = "" - self.clVars = None - - @classmethod - def program(cls, progName): - """ - Проверить наличие программы - """ - return lambda dv: bool(getProgPath(progName)) - - @classmethod - def packageInstalled(cls, pkg): - """ - Проверить было ли обновление пакета - """ - return lambda dv: False - - @classmethod - def variables(cls, *varnames): - """ - Передать переменные как аргументы, поддерживается True,False - """ - return lambda dv: [dv.Get(x) if x not in (True, False) else x - for x in varnames] - - reMethod = re.compile("^([A-Za-z]+)\.([A-Za-z0-9_]+)\(([^)]*)\)$") - reMessageVars = re.compile("\{([^}]+)\}") - - def parseMethod(self, objs, dv, s, task): - """ - Разобрать строку метода, на объект, метод, аргументы - """ - result = self.reMethod.search(s) - if not result: - raise ActionError(_("Wrong method for task %s") % task) - objname, methodname, args = result.groups() - if objname not in objs: - raise ActionError(_("Object %s not found") % objname) - obj = objs[objname] - if not hasattr(obj, methodname): - raise ActionError(_("Method {method} for {obj} not found"). - format(method=methodname, obj=objname)) - - def _convertMethodArg(param): - """ - Конвертировать аргумент для метода, взять по словарю, - либо строка - имя переменной - """ - param = param.strip() - mapstd = {'True': True, - 'False': False, - 'None': None, - '""': "", - "''": ""} - if param in mapstd: - return mapstd[param] - if param.isdigit(): - return int(param) - if param.startswith('"') and param.endswith('"'): - return param.strip('"') - if param == 'eachvar': - return self.eachvar - _type = dv.getInfo(param).type - if _type == "int": - return dv.GetInteger(param) - if _type in ("bool", "boolauto"): - return dv.GetBool(param) - return dv.Get(param) - - if args: - args = map(_convertMethodArg, args.split(',')) - else: - args = () - return getattr(obj, methodname), args - - def formatMessage(self, dv, message): - """ - Вставить значения переменных в текст сообщения - """ - - class TextTrasformer(object): - - @staticmethod - def first_letter_upper(s): - return "%s%s" % (s[0].upper(), s[1:]) - - tt = TextTrasformer() - - def replace_value(match): - var = match.group(1) - if ":" in var: - var, func = var.split(':') - else: - func = None - if var == "eachvar": - val = self.eachvar - else: - val = dv.Get(var) - if type(val) in (list, tuple): - val = ", ".join(val) - if func: - if hasattr(tt, func): - val = getattr(tt, func)(val) - else: - val = getattr(val, func)() - return "{0}".format(val) - - return self.reMessageVars.sub(replace_value, str(message)) - - def runCondition(self, func_condition): - """ - Запустить метод проверки условия (если аргумент называется Get, - то передавать в него не объект DataVars а метод Get, - если у нет аргументов, то не передавать туда аргументы - """ - args = [] - arg_count = func_condition.__code__.co_argcount - for param_name in func_condition.__code__.co_varnames[:arg_count]: - if param_name in ('Get', 'GetBool', 'Select', 'ZipVars'): - args.append(getattr(self.clVars, param_name)) - elif param_name == 'eachvar': - args.append(self.eachvar) - else: - args.append(self.clVars) - return func_condition(*args) - - def getFormatMessage(self, action, *fields): - """ - Получить сообщение для вывода среди нескольких с приоритетом и - метод вывода - """ - for field in (x for x in fields if x in action): - if "error" in field: - print_func = self.printERROR - elif "warning" in field: - print_func = self.printWARNING - else: - print_func = self.printSUCCESS - return print_func, self.formatMessage(self.clVars, action[field]) - return None, None - - def get_tasks(self, tasks, result, all_result): - """ - Герератор задач (поддержка линейной обработки задач в группах) - """ - for task in tasks: - if "group" in task or "tasks" in task: - if all(self.get_condition_context(task, result, - all_result).values()): - self.group_name = task.get("group", "") - if "while" in task: - depend = task.get("while", []) - depend = (depend - if type(depend) in (list, tuple) else [depend]) - depend.append(~Tasks.has_any("interrupt")) - while all([x(result, all_result) for x in depend]): - for action in self.get_tasks(task["tasks"], - result, all_result): - yield action - else: - for action in self.get_tasks(task["tasks"], result, - all_result): - yield action - if not self.group_name: - self.endGroup() - else: - self.group_name = "" - else: - yield task - - def get_condition_context(self, action, result, all_result): - """ - Получить результаты проверки по зависимосятм и условиям - """ - group, op, name = action.get("name", - "").rpartition(':') - # проверить по результатам - # если указанно группа к имени с '!', то проверяется - # только условие принадлежности задачи к группе - if group and group.endswith('!'): - group = group.strip('!') - depend = [Tasks.success_all(group)] - else: - depend = action.get("depend", Tasks.success()) - depend = (depend - if type(depend) in (list, tuple) else [depend]) - if group: - depend.append(Tasks.success_all(group)) - depend_result = all([x(result, all_result) for x in depend]) - # проверить по условиям - if depend_result: - condition_funcs = action.get("condition", lambda dv: True) - condition_funcs = (condition_funcs - if type(condition_funcs) in (list, tuple) - else [condition_funcs]) - condition_result = all( - [self.runCondition(x) for x in condition_funcs]) - else: - condition_result = True - return {'condition': condition_result, 'depend': depend_result} - - def run(self, objs, dv): - """Запустить список действий""" - - class StubLogger(object): - def info(self, s): - pass - - result = {} - all_result = {} - - self.group_name = "" - self.clVars = dv - if dv.Get('cl_env_debug_set') == 'off' or \ - dv.Get('cl_root_readonly') == 'on' or \ - dv.Get('cl_ebuild_phase') or os.getuid(): - logger = StubLogger() - else: - logger = log("core-action.log", - filename="/var/log/calculate/core-action.log", - formatter="%(asctime)s - %(levelname)s - %(message)s") - for obj in objs.values(): - obj.set_link(self) - obj.clVars = dv - if hasattr(obj, "init"): - obj.init() - try: - self.beginFrame() - logger.info("Start {methodname}".format( - methodname=self.method_name)) - for action in self.get_tasks(self.tasks, result, all_result): - foreach = action.get("foreach", "") - if foreach: - foreach = self.clVars.Get(foreach) - else: - foreach = [""] - self.eachvar = "" - for eachvar in foreach: - self.eachvar = eachvar - group, op, name = action.get("name", - "").rpartition(':') - res = True - task = False - self.clVars.Set('cl_task_name', name, force=True) - try: - run_context = self.get_condition_context(action, result, - all_result) - actinfo = "Run" if all(run_context.values()) else "Skip" - logger.info( - "{action} {name}: condition: {condition}, " - "depend: {depend}".format( - action=actinfo, - name=name, - condition=run_context['condition'], - depend=run_context['depend'])) - - elsePrint, elseMessage = ( - self.getFormatMessage(action, "else_error", - "else_warning", - "else_message")) - if (run_context['depend'] and - not run_context['condition'] and elseMessage): - if "else_error" in action: - all_result[name] = False - if action.get("essential", True): - result[name] = False - elsePrint(elseMessage) - if all(run_context.values()): - self.writeFile() - if self.group_name: - self.startGroup(str(self.group_name)) - self.group_name = None - printFunc, message = self.getFormatMessage( - action, "error", "warning", "message") - if "confirm" in action and message: - all_result[name] = \ - self.askConfirm(str(message), - action["confirm"]) - result[name] = all_result[name] - continue - elif message: - # если действие с командой - if ("error" not in action and - "method" in action or - "command" in action): - self.startTask(str(message)) - task = True - # действие содержит только сообщение - else: - if "error" in action: - res = False - printFunc(message) - # запустить метод объекта - if "method" in action: - try: - method, args = self.parseMethod( - objs, dv, action["method"], name) - if "decoration" in action: - decfunc, decargs = self.parseMethod( - objs, dv, action["decoration"], - name) - method = decfunc(*decargs)(method) - res = method(*args) - if res is None: - res = False - except CriticalError as e: - self.printERROR(str(e)) - self.endFrame() - return False - except self.native_error as e: - if action.get('essential', True): - printerror = self.printERROR - else: - printerror = self.printWARNING - if hasattr(e, "addon") and e.addon: - printerror(str(e.addon)) - printerror(str(e)) - res = False - except Exception: - error = shortTraceback(*sys.exc_info()) - self.printERROR(error) - res = False - # запустить системную команду - if "command" in action: - hideout = action.get("hideout", False) - cmdParam = map(lambda x: x.strip('"\''), - re.findall( - '["\'][^"\']+["\']|\S+', - action["command"])) - cmd = processProgress(*cmdParam) - for line in cmd.progress(): - if not hideout: - self.printSUCCESS(line) - if cmd.failed(): - lineCmd = cmd.pipe.stderr.read().split('\n') - for line in filter(None, lineCmd): - self.printERROR(line) - res = cmd.success() - all_result[name] = res - if action.get("essential", True): - result[name] = res - failedPrint, failedMessage = ( - self.getFormatMessage(action, "failed_error", - "failed_warning", - "failed_message")) - if not res and failedPrint: - failedPrint(failedMessage) - if task and res in (True, False, "skip"): - self.endTask(res) - logger.info("{name}: Result is {result}".format( - name=name, result=res)) - if res is True: - on_success = action.get('on_success', None) - if on_success: - on_success() - # else: - # print "[-] Skip ",name - except KeyboardInterrupt: - all_result[name] = False - if action.get("essential", True): - result[name] = False - self.endTask(False) - self.printWARNING(_("Task interrupted")) - all_result["interrupt"] = False - result["interrupt"] = False - logger.info("{name}: Interrupeted".format(name=name)) - except self.native_error as e: - if action.get('essential', True): - printerror = self.printERROR - else: - printerror = self.printWARNING - if hasattr(e, "addon") and e.addon: - printerror(str(e.addon)) - printerror(str(e)) - result[name] = False - all_result[name] = False - logger.info("{name}: Native error".format(name=name)) - except CriticalError as e: - self.printERROR(str(e)) - self.endFrame() - return False - except BaseException as e: - result[name] = False - all_result[name] = False - error = shortTraceback(*sys.exc_info()) - self.printERROR("%s:%s" % (name, error)) - logger.info("{name}: Unknown exception {exp}".format( - name=name, exp=e.__class__.__name__)) - finally: - dv.close() - self.endFrame() - if any(x in ("failed", "interrupt") for x in result): - return False - return True - -# Never used. Why do we need it? -# def commonView(self, sid, params, arg): -# dv = self.get_cache(sid, arg, "vars") -# if not dv: -# dv = getattr(self, "%s_vars" % arg)() -# else: -# dv.processRefresh() -# view = ViewInfo(dv, viewparams=params) -# self.set_cache(sid, arg, "vars", dv, smart=False) -# return view - - -def catchExcept(*skipException): - class wrapper: - def __init__(self, f_static): - f = f_static.__func__ - self.f = f - self.__name__ = f.__name__ - self.__code__ = f.__code__ - self.__doc__ = f.__doc__ - self.__name__ = f.__name__ - - def __call__(self, *args, **kwargs): - try: - return self.f(*args, **kwargs) - except BaseException as e: - from calculate.core.server.api_types import ViewInfo, \ - GroupField, Field - - if isinstance(e, KeyboardInterrupt): - error = _("Task interrupted") - else: - error = str(e) - view = ViewInfo(groups=[]) - group = GroupField(name=_("Error"), last=True) - group.fields = [] - group.fields.append(Field( - name="error", - label=error, - default='color:red;', - element="error")) - view.groups.append(group) - - if not any(isinstance(e, x) - for x in chain(skipException, (KeyboardInterrupt,))): - print(shortTraceback(*sys.exc_info())) - - return view - - return wrapper - - -def shortTraceback(e1, e2, e3): - """ - Return short traceback - """ - frame = e3 - for i in traceback.format_exception(*(e1, e2, e3)): - print(i, end=' ') - while frame.tb_next: - frame = frame.tb_next - module, part = os.path.split(frame.tb_frame.f_code.co_filename) - if part.endswith('.py'): - part = part[:-3] - fallbackmod = part - modname = [part] - while module != '/' and not module.endswith('site-packages'): - module, part = os.path.split(module) - modname.insert(0, part) - if module.endswith('site-packages'): - modname = ".".join(modname) - else: - modname = fallbackmod - return "%s:%s(%s:%s)" % (e1.__name__, str(e2), modname, frame.tb_lineno) - - -class ActiveClientStatus(object): - Success = 0 - Failed = 1 - WrongSID = 2 - - -class CoreWsdl(CoreServiceInterface): - # client signals about presence - @staticmethod - def active_clients(cls, sid): - # curThread = threading.currentThread() - # REMOTE_ADDR = curThread.REMOTE_ADDR - - #why is this here? - # cls.get_lang(cls, sid, "from active clients") - if 0 < sid < cls.max_sid: - try: - # open file its session - sid_file = cls.sids + "/%d.sid" % sid - if not os.path.isfile(sid_file): - return ActiveClientStatus.Failed - # check sid in sid.db - if not (os.path.isfile(cls.sids_file) and - cls.find_sid_in_file(cls, sid)): - try: - os.unlink(sid_file) - except (OSError, IOError): - pass - return ActiveClientStatus.Failed - with cls.sid_locker: - with open(sid_file) as fd: - # read information about session - sid_inf = pickle.load(fd) - # reset counters - sid_inf[1] = 0 - sid_inf[2] = 0 - fd.close() - if not os.path.isfile(sid_file): - return ActiveClientStatus.Failed - fd = open(sid_file, "w") - pickle.dump(sid_inf, fd) - fd.close() - return ActiveClientStatus.Success - except Exception: - return ActiveClientStatus.Failed - else: - return ActiveClientStatus.WrongSID - - @staticmethod - def serv_get_methods(cls, client_type): - curThread = threading.currentThread() - certificate = curThread.client_cert - from .cert_cmd import find_cert_id - - cert_id = find_cert_id(certificate, cls.data_path, cls.certbase) - - rights = cls.serv_view_cert_right(cert_id, cls.data_path, client_type) - return_list = [] - if client_type == "console": - for meth in cls.return_conMethod(): - right_flag = True - for right in LoadedMethods.rightsMethods[meth[1]]: - if right not in rights: - right_flag = False - if right_flag: - return_list.append(meth) - if not len(return_list): - return [['0', '0']] - return return_list - else: - for meth in cls.return_guiMethod(): - right_flag = True - for right in LoadedMethods.rightsMethods[meth[1]]: - if right not in rights: - right_flag = False - if right_flag: - return_list.append(meth) - if not len(return_list): - return [['0', '0']] - return return_list - - # return a list of methods for the console as list - @staticmethod - def return_conMethod(): - from .loaded_methods import LoadedMethods - - results = [] - for item in LoadedMethods.conMethods: - temp = [item] - for i in LoadedMethods.conMethods[item]: - temp.append(i) - results.append(temp) - return results - - # return a list of methods for the GUI as list - @staticmethod - def return_guiMethod(): - from .loaded_methods import LoadedMethods - - results = [] - dv = DataVars() - dv.importVariables() - - for item in LoadedMethods.guiMethods: - for i in range(0, len(LoadedMethods.guiMethods[item]), 4): - if LoadedMethods.guiMethods[item][i + 3]: - method_on = LoadedMethods.guiMethods[item][i + 3](dv.Get) - else: - method_on = True - if method_on: - temp = [item] - for j in range(3): - temp.append(LoadedMethods.guiMethods[item][i + j]) - results.append(temp) - dv.close() - return results - - # get available sessions - @staticmethod - def serv_get_sessions(cls): - result = [] - fd = open(cls.sids_file, 'r') - while 1: - try: - # read all on one record - list_sid = pickle.load(fd) - except (KeyError, IOError, EOFError): - break - # if session id found - result.append(str(list_sid[0])) - fd.close() - return result - - # check client alive - @staticmethod - def client_alive(cls, sid, SIDS_DIR): - sid_path = SIDS_DIR + "/%d.sid" % sid - if not os.path.isfile(sid_path): - return 1 - with cls.sid_locker: - with open(sid_path) as fd: - # read information about session - sid_inf = pickle.load(fd) - # flag absence client - fd.close() - if sid_inf[2] == 1: - return 0 - else: - return 1 - - class Common(CommonMethods, MethodsInterface): - """ class to interact with the processes """ - - def __init__(self, process_dict, progress_dict, table_dict, - frame_list, pid): - self.process_dict = process_dict - self.progress_dict = progress_dict - self.progress_dict['id'] = 0 - self.table_dict = table_dict - self.frame_list = frame_list - self.pid = pid - self.Num = 100000 - - def pauseProcess(self): - from calculate.core.server.gen_pid import ProcessStatus - - self.method_status = ProcessStatus.Paused - self.writeFile() - - def resumeProcess(self): - from calculate.core.server.gen_pid import ProcessStatus - - self.method_status = ProcessStatus.Worked - self.writeFile() - - def writeFile(self): - """ write data in file """ - from .baseClass import Basic - from calculate.core.server.gen_pid import ProcessMode - - if not os.path.exists(Basic.pids): - makeDirectory(Basic.pids) - build_id = "" - try: - from calculate.builder.variables.action import Actions - - if self.clVars.Get('cl_action') in Actions.All: - build_id = self.clVars.Get('builder.cl_builder_id') - except Exception as e: - if isinstance(e, KeyboardInterrupt): - raise - - pid_file = path.join(Basic.pids, '%d.pid' % self.pid) - try: - with open(pid_file, 'w') as f: - d = {'name': self.process_dict['method_name'], - 'mode': ProcessMode.CoreDaemon, - 'os_pid': os.getpid(), - 'status': self.process_dict['status'], - 'id': build_id - } - pickle.dump(d, f) - except (IOError, OSError) as e: - print(str(e)) - print(_("Failed to write the PID file %s!") % pid_file) - - def setProgress(self, perc, short_message=None, long_message=None): - try: - id = self.progress_dict['id'] - self.progress_dict[id] = ReturnProgress(perc, short_message, - long_message) - except IOError: - pass - - def setStatus(self, stat): - self.process_dict['status'] = stat - - def setData(self, dat): - self.data_list = dat - - def getStatus(self): - try: - return self.process_dict['status'] - except IOError: - return -1 - - def getProgress(self): - try: - id = self.progress_dict['id'] - if id in self.progress_dict: - return self.progress_dict[id].percent - except IOError: - pass - return 0 - - def getAnswer(self): - import time - - while self.process_dict['answer'] is None: - time.sleep(0.5) - res = self.process_dict['answer'] - self.process_dict['answer'] = None - self.frame_list.pop(len(self.frame_list) - 1) - self.process_dict['counter'] -= 1 - return res - - def addProgress(self, message=""): - id = random.randint(1, self.Num) - while id in self.progress_dict: - id = random.randint(1, self.Num) - self.progress_dict['id'] = id - self.progress_dict[id] = ReturnProgress(0, '', '') - self.addMessage(type='progress', id=id) - - def printTable(self, table_name, head, body, fields=None, - onClick=None, addAction=None, step=None, - records=None): - id = random.randint(1, self.Num) - while id in self.table_dict: - id = random.randint(1, self.Num) - - from .api_types import Table - - table = Table(head=head, body=map(lambda x: map(str, x), body), - fields=fields, - onClick=onClick, addAction=addAction, step=step, - values=None, records=records) - self.table_dict[id] = table - self.addMessage(type='table', message=table_name, id=id) - - def addMessage(self, type='normal', message=None, id=None, - onlyShow='', default=None): - from .api_types import Message - - re_clean = re.compile('\[(?:\d+;)?\d+m') - messageObj = Message( - type=type, - message=( - None if message in (None, True, False) - else re_clean.sub('', filter(lambda x: x >= ' ', message))), - result=message if message in (True, False) else None, - id=id, onlyShow=onlyShow, default=default) - try: - self.frame_list.append(messageObj) - except BaseException as e: - if isinstance(e, KeyboardInterrupt): - raise - print(_(("%s:" % type) + str(message))) - - def dropProgress(self): - perc = self.getProgress() - if perc == 0: - self.setProgress(100) - elif self.getProgress() > 0: - self.setProgress(0 - self.getProgress()) - else: - # self.setProgress(-100) - self.setProgress(perc) - - def printSUCCESS(self, message='', onlyShow=None): - self.dropProgress() - self.addMessage(type='normal', message=message, - onlyShow=onlyShow) - - def printPre(self, message='', onlyShow=None): - self.dropProgress() - self.addMessage(type='pre', message=message, - onlyShow=onlyShow) - - def printDefault(self, message='', onlyShow=None): - self.dropProgress() - self.addMessage(type='plain', message=message, - onlyShow=onlyShow) - - def printWARNING(self, message, onlyShow=None): - self.dropProgress() - self.addMessage(type='warning', message=message, - onlyShow=onlyShow) - - def printERROR(self, message='', onlyShow=None): - self.dropProgress() - self.addMessage(type='error', message=message, - onlyShow=onlyShow) - - def startTask(self, message, progress=False, num=1): - if progress: - self.addMessage(type='startTask', message=message, id=num) - self.addProgress() - else: - self.addMessage(type='startTask', message=message, id=num) - - def setTaskNumber(self, number=None): - self.addMessage(type='taskNumber', message=str(number)) - - def endTask(self, result=None, progress_message=None): - self.addMessage(type='endTask', message=result) - self.setProgress(100, progress_message) - - def askConfirm(self, message, default="yes"): - self.addMessage(type='confirm', message=message, default=default) - ret = self.getAnswer() - if ret == "": - return default - return ret - - def isInteractive(self): - return True - - def askChoice(self, message, answers=(("yes", "Yes"), ("no", "No"))): - self.addMessage(type='choice', message="%s|%s" % ( - message, - ",".join(map(lambda x: "%s(%s)" % (x[0], x[1]), answers)))) - return self.getAnswer() - - def askQuestion(self, message): - self.addMessage(type='question', message=message) - return self.getAnswer() - - def askPassword(self, message, twice=False): - pas_repeat = 2 if twice else 1 - self.addMessage(type='password', message=message, - id=pas_repeat) - return self.getAnswer() - - def beginFrame(self, message=None): - self.addMessage(type='beginFrame', message=message) - - def endFrame(self): - self.addMessage(type='endFrame') - - def startGroup(self, message): - self.addMessage(type='startGroup', message=message) - - def endGroup(self): - self.addMessage(type='endGroup') - - # def cache(self, param): - # sid = self.process_dict['sid'] - # self.args[sid] = collections.OrderedDict() - - @staticmethod - def startprocess(cls, sid, target=None, method=None, method_name=None, - auto_delete=False, args_proc=()): - """ start process """ - pid = cls.gen_pid(cls) - cls.add_sid_pid(cls, sid, pid) - - import multiprocessing - - if cls.manager is None: - cls.manager = multiprocessing.Manager() - - - # Manager for sending glob_process_dict between watcher and process - # manager = multiprocessing.Manager() - cls.glob_process_dict[pid] = cls.manager.dict() - cls.glob_process_dict[pid]['sid'] = sid - cls.glob_process_dict[pid]['status'] = 0 - cls.glob_process_dict[pid]['time'] = "" - cls.glob_process_dict[pid]['answer'] = None - cls.glob_process_dict[pid]['name'] = "" - cls.glob_process_dict[pid]['flag'] = 0 - cls.glob_process_dict[pid]['counter'] = 0 - - cls.glob_frame_list[pid] = cls.manager.list() - cls.glob_progress_dict[pid] = cls.manager.dict() - cls.glob_table_dict[pid] = cls.manager.dict() - - # create object Common and send parameters - com = target(cls.glob_process_dict[pid], - cls.glob_progress_dict[pid], - cls.glob_table_dict[pid], - cls.glob_frame_list[pid], pid) - - - if len(com.__class__.__bases__) > 1 and \ - hasattr(com.__class__.__bases__[1], '__init__'): - com.__class__.__bases__[1].__init__(com) - # start helper - com.method_name = method_name - p = multiprocessing.Process(target=cls.target_helper, - args=(cls, com, getattr(com, method)) + - (method_name,) + args_proc) - - cls.process_pid[pid] = p - p.start() - if auto_delete: - # start watcher (for kill process on signal) - watcher = threading.Thread(target=cls.watcher_pid_proc, - args=(cls, sid, pid)) - - watcher.start() - return str(pid) - - # wrap all method - @staticmethod - def target_helper(cls, com, target_proc, method_name, *args_proc): - if not os.path.exists(cls.pids): - os.system('mkdir %s' % cls.pids) - # PID_FILE = cls.pids + '/%d.pid'%com.pid - import datetime - - dat = datetime.datetime.now() - - com.process_dict['status'] = 1 - com.process_dict['time'] = dat - # if method_name: - com.process_dict['method_name'] = method_name - com.process_dict['name'] = target_proc.__func__.__name__ - - try: - result = target_proc(*args_proc) - except Exception: - result = False - print_exc() - fd = open(cls.log_filename, 'a') - print_exc(file=fd) - fd.close() - try: - if result is True: - com.setStatus(0) - com.writeFile() - elif result is False: - if com.getStatus() == 1: - com.setStatus(2) - com.writeFile() - else: - if com.getStatus() == 1: - com.setStatus(2) - else: - com.setStatus(0) - com.writeFile() - try: - if 0 < com.getProgress() < 100: - com.setProgress(0 - com.getProgress()) - if len(com.frame_list): - last_message = com.frame_list[len(com.frame_list) - 1] - if last_message.type != 'endFrame': - com.endFrame() - else: - com.endFrame() - except IOError: - pass - - except Exception: - print_exc() - fd = open(cls.log_filename, 'a') - print_exc(file=fd) - fd.close() - com.endFrame() - - def serv_view_cert_right(cls, cert_id, data_path, client_type=None): - """ rights for the selected certificate """ - try: - cert_id = int(cert_id) - except ValueError: - return ["-2"] - cert_file = data_path + '/client_certs/%s.crt' % str(cert_id) - if not os.path.exists(cert_file): - return ["-1"] - cert = readFile(cert_file) - - # try: - import OpenSSL - - certobj = OpenSSL.crypto.load_certificate( - OpenSSL.SSL.FILETYPE_PEM, cert) - com = certobj.get_extension( - certobj.get_extension_count() - 1).get_data() - groups = com.split(':')[1] - groups_list = groups.split(',') - # except: - # return ['-1'] - results = [] - find_flag = False - # if group = all and not redefined group all - if 'all' in groups_list: - fd = open(cls.group_rights, 'r') - t = fd.read() - # find all in group_rights file - for line in t.splitlines(): - if not line: - continue - if line.split()[0] == 'all': - find_flag = True - break - if not find_flag: - result = [] - if client_type == 'console': - for meth_list in cls.return_conMethod(): - for right in LoadedMethods.rightsMethods[meth_list[1]]: - result.append(right) - else: - for meth_list in cls.return_guiMethod(): - for right in LoadedMethods.rightsMethods[meth_list[1]]: - result.append(right) - result = uniq(result) - results = result - - if 'all' not in groups_list or find_flag: - if not os.path.exists(cls.group_rights): - open(cls.group_rights, 'w').close() - with open(cls.group_rights) as fd: - t = fd.read() - for line in t.splitlines(): - if not line: - continue - try: - words = line.split(' ', 1) - if len(words) < 2: - continue - # first word in line equal name input method - if words[0] in groups_list: - methods = words[1].split(',') - for i in methods: - results.append(i.strip()) - except IndexError: - print('except IndexError in serv_view_cert_right') - continue - results = uniq(results) - - add_list_rights = [] - del_list_rights = [] - - with open(cls.rights) as fr: - t = fr.read() - for line in t.splitlines(): - words = line.split() - meth = words[0] - for word in words: - try: - word = int(word) - except ValueError: - continue - # compare with certificat number - if cert_id == word: - # if has right - add_list_rights.append(meth) - if cert_id == -word: - del_list_rights.append(meth) - - results += add_list_rights - results = uniq(results) - - for method in results: - if method in del_list_rights: - results.remove(method) - - if not results: - results.append("No Methods") - return results - - @staticmethod - def get_lang(cls, sid, method_name=""): - """ get clients lang """ - # print("-------------------------------------") - # print("DEBUG get_lang") - - lang = None - SIDS_DIR = cls.sids - with cls.sid_locker: - - sid_file = SIDS_DIR + "/%d.sid" % int(sid) - if os.path.exists(sid_file): - # fd = open(sid_file, 'r') - - with open(sid_file, 'r') as fd: - try: - # print("file exists: %s " % sid_file) - # print("sid: %s" % sid) - list_sid = pickle.load(fd) - list_sid_sid = int(list_sid[0]) - # print(list_sid) - # print(list_sid_sid) - # print(sid) - # print(sid == list_sid_sid) - # print(type(list_sid_sid)) - # print(type(sid)) - if sid == list_sid_sid: - # print("SID FOUND") - lang = list_sid[3] - except IOError: - print("some io error") - pass - except KeyError: - print("Key error") - pass - except EOFError: - print("EOF error") - pass - try: - # print(lang) - if lang.lower() not in ('uk', 'fr', 'ru', 'en'): - lang = "en" - except AttributeError: - # print("Attr error") - lang = "en" - import locale - - try: - lang = locale.locale_alias[lang.lower()] - # print("lang got from locale: %s" % lang) - except (TypeError, AttributeError, IndexError, KeyError): - lang = locale.locale_alias['en'] - # print("Setting lang: %s " % lang) - return lang - - -def create_symlink(data_path, old_data_path): - meths = LoadedMethods.conMethods - path_to_link = '/usr/bin' - core_wrapper = "/usr/libexec/calculate/cl-core-wrapper" - #path_to_user_link = '/usr/bin' - old_symlinks_file = os.path.join(old_data_path, 'conf/symlinks') - symlinks_file = os.path.join(data_path, 'conf/symlinks') - if not os.path.exists(os.path.join(data_path, 'conf')): - try: - os.makedirs(os.path.join(data_path, 'conf')) - except OSError: - print (_("cannot create directory %s") - % (os.path.join(data_path, 'conf'))) - if os.path.exists(old_symlinks_file) and not os.path.exists(symlinks_file): - with open(symlinks_file, 'w') as fd: - fd.write(readFile(old_symlinks_file)) - os.unlink(old_symlinks_file) - with open(symlinks_file, 'a') as fd: - for link in meths: - link_path = os.path.join(path_to_link, link) - if os.path.islink(link_path): - continue - if os.path.isfile(link_path): - red = '\033[31m * \033[0m' - print(red + link_path + _(' is a file, not a link!')) - continue - try: - if (os.path.islink(link_path) and - os.readlink(link_path) != core_wrapper): - os.unlink(link_path) - os.symlink(core_wrapper, link_path) - fd.write(link_path + '\n') - except OSError as e: - print(e.message) - print(_('Symlink %s created') % link_path) - - temp_text_file = '' - for line in readLinesFile(symlinks_file): - cmdname = os.path.basename(line) - if cmdname not in meths.keys() or not line.startswith(path_to_link): - if os.path.islink(line): - os.unlink(line) - print(_('Symlink %s deleted') % line) - else: - temp_text_file += line + '\n' - fd = open(symlinks_file, 'w') - fd.write(temp_text_file) - fd.close() - - -def initialization(cl_wsdl): - """ find modules for further added in server class """ - cl_apis = [] - for pack in cl_wsdl: - if pack: - module_name = '%s.wsdl_%s' % (pack.replace("-", "."), - pack.rpartition("-")[2]) - import importlib - - cl_wsdl_core = importlib.import_module(module_name) - try: - cl_apis.append(cl_wsdl_core.Wsdl) - except ImportError: - sys.stderr.write(_("Unable to import %s") % module_name) - return cl_apis - - -# Creation of secret key of the client -def new_key_req(key, cert_path, serv_host_name, port): - from .create_cert import (generateRSAKey, makePKey, makeRequest, - passphrase_callback) - - rsa = generateRSAKey() - rsa.save_key(key + '_pub', cipher=None, callback=passphrase_callback) - - pkey = makePKey(rsa) - pkey.save_key(key, cipher=None, callback=passphrase_callback) - - req = makeRequest(rsa, pkey, serv_host_name, port) - if not req: - sys.exit() - crtreq = req.as_pem() - crtfile = open(cert_path + '/server.csr', 'w') - crtfile.write(crtreq) - crtfile.close() - - -# delete dublicate from list -def uniq(seq): - seen = set() - seen_add = seen.add - return [x for x in seq if x not in seen and not seen_add(x)] - - -class WsdlMeta(type): - """ - Метакласс для создания методов по атрибуту methdos - """ - datavars = {} - - def __new__(mcs, name, bases, attrs): - if "methods" in attrs: - for method in attrs["methods"]: - attrs[method['method_name']] = mcs.caller_constructor(**method) - attrs["%s_vars" % method[ - 'method_name']] = mcs.datavars_constructor(**method) - attrs["%s_view" - % method['method_name']] = mcs.view_constructor(**method) - return type.__new__(mcs, name, bases, attrs) - - @classmethod - def close_datavars(mcs): - for dv in WsdlMeta.datavars.values(): - dv.close() - - @classmethod - def create_info_obj(mcs, **kwargs): - """ - Создание передаваемой структуры данных для WSDL - """ - def type_convert(s): - if "bool3" in s: - return String - if "bool" in s: - return Boolean - elif "table" in s: - return Array(Array(String)) - elif "list" in s: - return Array(String) - else: - return String - - d = {} - d["cl_console_args"] = Array(String) - if kwargs['datavars'] in WsdlMeta.datavars: - dv = WsdlMeta.datavars[kwargs['datavars']] - else: - dv = DataVars() - dv.importVariables() - dv.importVariables('calculate.%s.variables' % kwargs['datavars']) - dv.defaultModule = kwargs['datavars'] - WsdlMeta.datavars[kwargs['datavars']] = dv - - def group(*args, **kwargs): - for v in chain(kwargs.get('normal', ()), kwargs.get('expert', ())): - varname = v.rpartition(".")[2] - d[varname] = type_convert(dv.getInfo(v).type) - - for gr in kwargs['groups']: - gr(group) - # if "brief" in kwargs: - #if "cl_page_count" not in d: - if True: - d["CheckOnly"] = Boolean - return d - - @classmethod - def caller_constructor(mcs, **kwargs): - """ - Конструктор для создания метода-вызова для действия - """ - # @staticmethod - def wrapper(cls, sid, info): - # костыль для локализации install - callback_refresh = ( - cls.fixInstallLocalization - if kwargs['method_name'] == 'install' else lambda cls, dv, sid: True) - return cls.callAction(cls, sid, info, logicClass=kwargs['logic'], - actionClass=kwargs['action'], - method_name=kwargs['method_name'], - callbackRefresh=callback_refresh, - invalidators=kwargs.get('invalidators', None) - ) - - wrapper.__name__ = kwargs['method_name'] - wrapper = staticmethod(wrapper) - func = LoadedMethods.core_method(category=kwargs.get('category', None), - title=kwargs['title'], - image=kwargs.get('image', None), - gui=kwargs['gui'], - user=kwargs.get('user', False), - command=kwargs.get('command', None), - rights=kwargs['rights'], - depends=kwargs.get('depends', ()), - static_method=True)( - wrapper) - if "--start" in sys.argv: - info_obj = mcs.create_info_obj(**kwargs) - info_class = type("%sInfo" % kwargs["method_name"], (CommonInfo,), - info_obj) - - #total hack: carry over info_class for later use - func.__func__.info_class = info_class - # return func - # else: - return func - - @classmethod - def modify_datavars(mcs, dv, data): - """ - Поменять значения в datavars согласно data - """ - # установить заданные значения (!) принудительная установка - for k, v in data.items(): - # если значение функция - if callable(v): - v = v(dv) - else: - if isinstance(v, (str, unicode)): - v = Variable._value_formatter.format(v, dv.Get) - dv.Set(k.strip('!'), v, force=k.endswith('!')) - - @classmethod - def view_constructor(mcs, **kwargs): - """ - Конструктор для создания метода-представления - """ - # @staticmethod - def wrapper(cls, sid, params): - # print("DEBUG view constructor") - dv = cls.get_cache(sid, kwargs["method_name"], "vars") - # print("got dv from cache: %s " % dv) - lang_changed = False - if kwargs["groups"]: - def group(*args, **kwargs): - if isinstance(kwargs.get('normal', ()), (unicode, str)): - raise DataVarsError(_("Wrong normal varaiables list")) - if isinstance(kwargs.get('expert', ()), (unicode, str)): - raise DataVarsError(_("Wrong expert varaiables list")) - - for gr in kwargs['groups']: - gr(group) - if not dv: - dv = getattr(cls, "%s_vars" % kwargs["method_name"])(cls, - params=params) - # print("got dv from cls: %s" % dv) - if hasattr(params, "clienttype"): - if params.clienttype == 'gui' and "guivars" in kwargs: - mcs.modify_datavars(dv, kwargs['guivars']) - if params.clienttype != 'gui' and "consolevars" in kwargs: - mcs.modify_datavars(dv, kwargs['consolevars']) - dv.Set('main.cl_client_type', params.clienttype, force=True) - else: - # костыль для метода install, который меняет локализацию - # интрефейса в зависимости от выбранного параметра lang - # print("dv groups: %s" % bool(dv.getGroups())) - if kwargs["method_name"] == 'install': - lang_changed = cls.fixInstallLocalization(cls, sid, dv) - lang = dv.Get('install.os_install_locale_lang') - cls.set_cache(sid, "install", "lang", lang, smart=False) - # print("dv before refresh groups: %s" % bool(dv.getGroups())) - dv.processRefresh() - # print("dv after refresh groups: %s" % bool(dv.getGroups())) - - cls.set_cache(sid, kwargs["method_name"], "vars", dv, smart=False) - if "brief" in kwargs and "name" in kwargs['brief']: - brief_label = str(kwargs['brief']['name']) - else: - brief_label = None - - if kwargs["groups"]: - # print("group view info creation") - # print(params) - # print(dv) - # print(brief_label) - # print(dv.getGroups()) - # print("dv groups: %s" % bool(dv.getGroups())) - view = ViewInfo(dv, viewparams=params, - has_brief="brief" in kwargs, - allsteps=lang_changed, - brief_label=brief_label) - else: - # print("creating empty viewInfo") - view = ViewInfo() - return view - - wrapper.__name__ = "%s_view" % kwargs['method_name'] - wrapper = staticmethod(wrapper) - - return catchExcept(kwargs.get("native_error", ()))(wrapper) - - @classmethod - def datavars_constructor(mcs, **kwargs): - """ - Конструктор для создания метода описания параметров - """ - @staticmethod - def wrapper(cls, dv=None, params=None): - if not dv: - _dv = DataVars() - _dv.importVariables() - _dv.importVariables( - 'calculate.%s.variables' % kwargs['datavars']) - _dv.defaultModule = kwargs['datavars'] - _dv.flIniFile() - if params and params.help_set: - _dv.Set('cl_help_set', "on", force=True) - if params and params.dispatch_usenew: - _dv.Set('cl_dispatch_conf', "usenew", force=True) - if params: - if params.conargs: - conargs = list(params.conargs) - else: - conargs = [] - if params.dispatch_usenew: - conargs.append("--force") - _dv.Set('cl_console_args', conargs, force=True) - else: - _dv = dv - # созданием группы переменных из datavars согласно параметрам groups - for groupfunc in kwargs['groups']: - groupfunc(_dv.addGroup) - if not dv: - mcs.modify_datavars(_dv, kwargs['setvars']) - # указание brief если нужно - if "brief" in kwargs: - _dv.addBrief( - next_label=str(kwargs['brief'].get('next', _('Next'))), - image=kwargs['brief'].get('image', None)) - return _dv - - return wrapper - - -class WsdlBase(object): - """ - Базовый класс для автосоздания методов по описанию methods - """ - __metaclass__ = WsdlMeta - - -def clearDataVars(func): - def wrapper(*args, **kwargs): - try: - return func(*args, **kwargs) - finally: - WsdlMeta.close_datavars() - - return wrapper - -class CustomButton(object): - @classmethod - def sort_argv(cls, args): - behavior = [] - condition = None - for arg in args: - if callable(arg): - condition = arg - else: - behavior.append(arg) - return (behavior or None), condition - - @classmethod - def run_method(cls, method_name, id, label, *args): - behavior, condition = cls.sort_argv(args) - return id, label, method_name, "button", behavior, condition - - @classmethod - def open_method(cls, method_name, id, label, *args): - behavior, condition = cls.sort_argv(args) - return id, label, method_name, "button_view", behavior, condition - - @classmethod - def next_button(cls, id=None, label=None): - return id, label, None, "button_next" - - class Behavior(object): - @classmethod - def link(cls, source=None, target=None): - return "%s=%s" % (target, source) - - @classmethod - def linkerror(cls, source=None, target=None): - return "%s->%s" % (source, target) - - @classmethod - def setvalue(cls, variable=None, value=None): - return "%s!=%s" % (variable, value) - diff --git a/libs_crutch/core/server/func_metaclass.py b/libs_crutch/core/server/func_metaclass.py deleted file mode 100644 index d8c247f..0000000 --- a/libs_crutch/core/server/func_metaclass.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import importlib -import logging -# logger = logging.getLogger(__name__) - -list_module = ["clean", "gen_pid", - "sid_pid_file", "gen_sid", "func", "api_types"] - -imported_modules = [] -pack = "calculate.core.server" -for module_name in list_module: - imported_modules.append(importlib.import_module( - '%s.%s' % (pack, module_name)).CoreWsdl) - - -imported_modules.append(object) -imported = tuple(imported_modules) - -# create metaclass, including all methods server class -Func_MetaClass = type("Func_MetaClass", imported, {}) diff --git a/libs_crutch/core/server/gen_pid.py b/libs_crutch/core/server/gen_pid.py deleted file mode 100644 index 23a5a1c..0000000 --- a/libs_crutch/core/server/gen_pid.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -from __future__ import absolute_import -import os -import pickle -import threading -from os import path -import random -from calculate.core.server.core_interfaces import CoreServiceInterface -from calculate.lib.utils.files import listDirectory, readFile, readLinesFile -from calculate.lib.utils.tools import ignore -import socket - -from .cert_cmd import find_cert_id - - -class ProcessStatus(object): - SuccessFinished = 0 - Worked = 1 - FailedFinished = 2 - NotFound = 3 - Paused = 4 - - -class ProcessMode(object): - CoreDaemon = "core" - LocalCall = "local" - - -def get_symlink_commands(): - """ - Получить список команд утилит - """ - symlinks = "/var/lib/calculate/calculate-core/conf/symlinks" - for line in readLinesFile(symlinks): - yield line.strip() - yield "/usr/bin/cl-core" - yield "/usr/bin/cl-update" - yield "/usr/bin/cl-core-patch" - - -def search_worked_process(method_name, clVars, - statuses=(ProcessStatus.Worked,)): - """ - Найти все работающие процессы - - Возвращает список процессов со статусом Worked и существующем системным - процессом - """ - - def generator(): - pids = clVars.Get('core.cl_core_pids_path') - for pidfile in listDirectory(pids, fullPath=True): - try: - status = pickle.load(open(pidfile)) - if ((method_name is None or status['name'] == method_name) and - status['status'] in statuses): - pid_path = path.join("/proc", str(status['os_pid'])) - if path.exists(pid_path): - cmdline = readFile(path.join(pid_path, "cmdline")) - if cmdline and any(x in cmdline - for x in get_symlink_commands()): - yield status['os_pid'] - except (socket.error, ValueError, KeyError, EOFError, OSError): - pass - - return list(generator()) - - -worked_filter = lambda x: x['status'] == ProcessStatus.Worked - - -def search_worked_process2(clVars, filter_func=lambda x: True, - status_filter=worked_filter): - pids = clVars.Get('core.cl_core_pids_path') - for pidfile in listDirectory(pids, fullPath=True): - try: - status = pickle.load(open(pidfile)) - if status_filter(status) and filter_func(status): - pid_path = path.join("/proc", str(status['os_pid'])) - if path.exists(pid_path): - cmdline = readFile(path.join(pid_path, "cmdline")) - if cmdline and any(x in cmdline - for x in get_symlink_commands()): - yield status['os_pid'] - except (socket.error, ValueError, KeyError, EOFError, OSError): - pass - - -def get_pid_info(clVars, statuses=(ProcessStatus.Worked,)): - """ - Получить информацию о процессах - """ - - def generator(): - pids = clVars.Get('core.cl_core_pids_path') - for pidfile in listDirectory(pids, fullPath=True): - try: - status = pickle.load(open(pidfile)) - if status['status'] in statuses: - if path.exists(path.join("/proc", str(status['os_pid']))): - yield status - except (socket.error, ValueError, KeyError, EOFError, OSError): - pass - - return list(generator()) - - # try: - # pidfile = path.join(pids,str(pid)) - # status = pickle.load(open(pidfile)) - # return status - # except (socket.error, ValueError, IOError, KeyError, EOFError, OSError): - # return None - - -def clear_finished_pids(clVars): - """ - Удалить все идентификационные файлы завершившихся процессов - """ - pids = clVars.Get('core.cl_core_pids_path') - for pidfile in listDirectory(pids, fullPath=True): - try: - d = pickle.load(open(pidfile)) - if path.exists(path.join("/proc", str(d['os_pid']))): - continue - except Exception: - pass - with ignore(OSError): - os.unlink(pidfile) - - -# process management -class CoreWsdl(CoreServiceInterface): - #for debugging: - gen_pid_testing_val = "Gen pid represents" - - # delete process id from list process - @staticmethod - def del_pid(cls, pid): - try: - rst = [] - pid_str = str(pid) - - # open the file list of process - with open(cls.pids_file) as fd: - t = fd.read() - for line in t.splitlines(): - # Leave all but removed - if line != pid_str: - rst.append(line) - - # write all in file - fd = open(cls.pids_file, 'w') - fd.write('\n'.join(rst)) - fd.write('\n') # with join we lose the last newline char - fd.close() - cls.glob_process_dict.pop(pid) - cls.glob_progress_dict.pop(pid) - cls.glob_table_dict.pop(pid) - cls.glob_frame_list.pop(pid) - with ignore(OSError): - rm_fn = path.join(cls.pids, "%d.pid" % pid) - if path.exists(rm_fn): - os.unlink(rm_fn) - return 0 - except Exception: - return 1 - - # find process id in file processes, 1 - yes, 0 - none - @staticmethod - def find_pid_in_file(cls, find_pid): - temp_line = '' - - # create, if file not exists - if not os.path.exists(cls.pids_file): - temp = open(cls.pids_file, 'w') - temp.close() - with open(cls.pids_file) as fd: - t = fd.read() - # for each line - for line in t.splitlines(): - try: - temp_line = int(line) - except ValueError: - pass - # if process id found - if temp_line == find_pid: - return 1 - fd.close() - return 0 - - # add process id in file - @staticmethod - def add_pid_in_file(cls, pid): - pid_t = str(pid) - fd = open(cls.pids_file, 'a') - fd.write(pid_t) - fd.write('\n') - fd.close() - return 0 - - # issue new pid for created process - @staticmethod - def gen_pid(cls): - while True: - new_pid = random.randint(1, cls.max_pid) - # flag = 1 - exists, 0 - missing in PID_FILE - if cls.find_pid_in_file(cls, new_pid) == 0: - cls.add_pid_in_file(cls, new_pid) - return new_pid - - @staticmethod - def check_sid_cert(cls, sid): - curThread = threading.currentThread() - certificate = curThread.client_cert - cert_id = find_cert_id(certificate, cls.data_path, cls.certbase) - - # if certificate not found in database - if cert_id == 0: - return -1 - # check, This certificate is launched session - # Data taken from sid.db - flag = 0 - # create, if file not exists - if not os.path.exists(cls.sids_file): - return 0 - # temp = open(cls.sids_file, 'w') - # temp.close() - fd = open(cls.sids_file, 'r') - while 1: - try: - # read all on one record - list_sid = pickle.load(fd) - except (EOFError, KeyError, IOError): - break - # when session id equal readable... - if int(sid) == int(list_sid[0]): - # ... and certificate id equal launched this session... - if int(cert_id) == int(list_sid[1]): - # ... set flag - flag = 1 - fd.close() - # if validation fails - return flag - - @staticmethod - def serv_pid_kill(cls, pid, sid): - """ Set flag to complete the process """ - check_sid = cls.check_sid_cert(cls, sid) - if not check_sid: - return -2 - - # write complete flag (pid_list[6] = 1) in process file - if not os.path.exists(cls.pids): - os.mkdir(cls.pids) - if pid not in cls.process_pid: - return 3 - meth = cls.process_pid[pid] - if meth.is_alive(): - try: - os.kill(meth.pid, 2) - except OSError as e: - print('No such process %d' % meth.pid, e) - return 1 - return 0 diff --git a/libs_crutch/core/server/gen_sid.py b/libs_crutch/core/server/gen_sid.py deleted file mode 100644 index 2d64d4e..0000000 --- a/libs_crutch/core/server/gen_sid.py +++ /dev/null @@ -1,260 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Session management -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import os -import datetime -import threading -import random -import pickle -from calculate.core.server.core_interfaces import CoreServiceInterface -from .cert_cmd import find_cert_id - - -class CoreWsdl(CoreServiceInterface): - sid_locker = threading.Lock() - - # delete client session from file (close session) - @staticmethod - def del_sid_from_file(cls, sid): - try: - # temp file - sid_file = cls.sids_file - sid_file_t = sid_file + 'temp' - - with cls.sid_locker: - fd = open(sid_file, 'r') - ft = open(sid_file_t, 'w') - while True: - try: - # read all on one record - list_sid = pickle.load(fd) - except (EOFError, IOError, KeyError): - break - # Leave all but removed - if sid != list_sid[0]: - pickle.dump(list_sid, ft) - fd.close() - ft.close() - - # copy all from temp file - ft = open(sid_file_t, 'rb') - fd = open(sid_file, 'wb') - ft.seek(0) - fd.write(ft.read()) - ft.close() - fd.close() - # delete temp file - os.unlink(sid_file_t) - return ['0'] - except (IOError, EOFError, KeyError, OSError): - return ['1'] - - # find session id in file - @staticmethod - def find_sid_in_file(cls, sid): - sid_file = cls.sids_file - # create, if file not exists - with cls.sid_locker: - if not os.path.exists(sid_file): - temp = open(sid_file, 'w') - temp.close() - fd = open(sid_file, 'r') - while True: - try: - # read all on one record - list_sid = pickle.load(fd) - except (EOFError, IOError, KeyError): - break - # if session id found - if sid == list_sid[0]: - fd.close() - return 1 - fd.close() - return 0 - - # add session id in file - @staticmethod - def add_sid_in_file(cls, sid, cert_id, lang): - # list Format (session number, cert number, time start session) - list_sid = [sid, cert_id, datetime.datetime.now()] - # session's file - if not os.path.exists(cls.sids): - os.mkdir(cls.sids) - sids_dir = cls.sids - sid_file = sids_dir + "/%d.sid" % sid - - # create session's file - with cls.sid_locker: - fp = open(sid_file, 'w') - sid_list = [sid, 0, 0, lang] - pickle.dump(sid_list, fp) - fp.close() - - # add session in list sessions - fd = open(cls.sids_file, 'a') - pickle.dump(list_sid, fd) - fd.close() - return 0 - - @staticmethod - def set_sid_lang(cls, sid, lang): - sids_dir = cls.sids - sid_file = os.path.join(sids_dir, "%d.sid" % sid) - with cls.sid_locker: - if not os.path.isfile(sid_file): - fp = open(sid_file, 'w') - fp.close() - fd = open(sid_file, 'r') - try: - list_sid = pickle.load(fd) - except (EOFError, KeyError, IOError): - list_sid = [sid, 0, 0, lang] - fd.close() - - fp = open(sid_file, 'w') - list_sid[3] = lang - pickle.dump(list_sid, fp) - fp.close() - - # issue number of new session (and registered its) - @staticmethod - def sid_cmp(cls, sid, cert_id, lang): - if sid < 0 or sid > cls.max_sid: - sid = 0 - session = 1 - # if session is new - if sid == 0: - while True: - new_sid = random.randint(1, cls.max_sid) - # flag = 1 - exists, 0 - missing in SID_FILE - if cls.find_sid_in_file(cls, new_sid) == 0: - cls.add_sid_in_file(cls, new_sid, cert_id, lang) - sid = new_sid - break - # if session is old - else: - # find number in file registered - # if not registered - if not cls.find_sid_in_file(cls, sid): - # add session id in file - cls.add_sid_in_file(cls, sid, cert_id, lang) - else: - cls.set_sid_lang(cls, sid, lang) - # set - old session - session = 0 - - # session id and flag (new or old) session - return [sid, session] - - @staticmethod - def serv_init_session(cls, sid, lang): - day_cert = 600 - cur_thread = threading.currentThread() - certificate = cur_thread.client_cert - if certificate is None: - return [-3], [0] - - checked_id = find_cert_id(certificate, cls.data_path, cls.certbase) - try: - if int(checked_id) < 1: - return [-4], [0] - except ValueError: - return [-4], [0] - results = [] - cert_id = checked_id - with open(cls.certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split() - # if in line present certificate id - if len(words) > 3: - if words[0] == checked_id: - results.append(checked_id) - date = datetime.datetime.strptime( - words[2] + ' ' + words[3], '%Y-%m-%d %H:%M:%S.%f') - d = datetime.datetime.now() - date - v = day_cert - d.days # How many days left certificate - if v < 0: - # Method deleted certificate - v = -2 # expiry date has passed - elif v > 60: # For a long time, is not displayed to - # the client - v = -1 - results.append(v) - # return results - if not results: - return [-4], [0] - - return results, cls.sid_cmp(cls, sid, cert_id, lang) - - @staticmethod - def serv_sid_info(cls, sid): - """ Get information about sid """ - cert_id = 0 - results = [] - sid_file = cls.sids_file - with cls.sid_locker: - fd = open(sid_file, 'r') - while 1: - try: - # read all on one record - list_sid = pickle.load(fd) - except (IOError, KeyError, EOFError): - break - # if sid found - if sid == list_sid[0]: - cert_id = list_sid[1] - fd.close() - - # Get information about sid - if cert_id == 0: - return ["-1"] - - with cls.sid_locker: - with open(cls.certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split() - # if in line present certificate id - if words[0] == str(cert_id): - # certificate id - results.append(words[0]) - # Date issue certificate - results.append(words[2] + ' ' + words[3]) - # ip - results.append(words[4]) - # mac - results.append(words[5]) - # client type - results.append(words[6]) - if not os.path.exists(cls.sids): - os.makedirs(cls.sids) - sid_path = cls.sids + "/%d.sid" % sid - with open(sid_path) as fs: - # read info about session - sid_inf = pickle.load(fs) - # flag absence client - results.append(str(sid_inf[2])) - - return results - return ["-2"] diff --git a/libs_crutch/core/server/groups.py b/libs_crutch/core/server/groups.py deleted file mode 100644 index 95a7d9a..0000000 --- a/libs_crutch/core/server/groups.py +++ /dev/null @@ -1,133 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2010-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.core_interfaces import MethodsInterface - -from calculate.lib.utils.files import readLinesFile -from calculate.lib.utils.common import getPagesInterval -from calculate.lib.cl_lang import getLazyLocalTranslate, setLocalTranslate - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class Groups(MethodsInterface): - """ - Объект работы с группами прав - """ - - def show_groups_meth(self, page_count, page_offset): - """ - Отобразить таблицу с группами - """ - dv = self.clVars - list_group_name = sorted(dv.Choice('cl_core_group')) - - if not list_group_name: - self.printSUCCESS(_("No groups")) - - head = [_('Groups'), _('Permissions')] - body = [] - fields = ['cl_core_group', ''] - - for group in list_group_name[page_offset:page_offset + page_count]: - dv.Set('cl_core_group', group) - group_rights = ', '.join(dv.Get('cl_core_group_rights')) - body.append([group, group_rights]) - - if body: - self.printTable(_("List of available groups"), head, body, - fields=fields, onClick='core_detail_group', - addAction='core_group_add') - num_page, count_page = getPagesInterval(page_count, page_offset, - len(list_group_name)) - self.printSUCCESS(_('page %d from ') % num_page + str(count_page)) - return True - - def change_group_meth(self, cl_group_name, cl_group_rights, - group_rights_file): - """ - Изменить группу - """ - changed_flag = False - result = [] - - for line in readLinesFile(group_rights_file): - if line.startswith('#') or not line: - result.append(line) - continue - words = line.split(' ', 1) - # first word in line equal name input method - if words[0] == cl_group_name: - line = cl_group_name + ' ' + ','.join(cl_group_rights) - changed_flag = True - result.append(line) - - if cl_group_name == 'all' and not changed_flag: - result.append(cl_group_name + ' ' + ','.join(cl_group_rights)) - - fd = open(group_rights_file, 'w') - for lines in result: - fd.write(lines + '\n') - fd.close() - return True - - def add_group_meth(self, cl_group_name, cl_group_rights, group_rights_file): - """ - Добавить группу - """ - result = [] - for line in readLinesFile(group_rights_file): - if line.startswith('#') or not line: - result.append(line) - continue - words = line.split(' ', 1) - # first word in line equal name input method - if words[0] == cl_group_name: - self.printERROR(_('Group %s already exists!') - % cl_group_name) - return False - result.append(line) - - result.append(cl_group_name + ' ' + ",".join(cl_group_rights)) - - fd = open(group_rights_file, 'w') - for lines in result: - fd.write(lines + '\n') - fd.close() - return True - - def del_group_meth(self, cl_group_name, group_rights_file): - """ - Удалить группу - """ - result = [] - for line in readLinesFile(group_rights_file): - if line.startswith('#') or not line: - result.append(line) - continue - words = line.split(' ', 1) - # first word in line equal name input method - if words[0] != cl_group_name: - result.append(line) - - fd = open(group_rights_file, 'w') - for lines in result: - fd.write(lines + '\n') - fd.close() - return True diff --git a/libs_crutch/core/server/loaded_methods.py b/libs_crutch/core/server/loaded_methods.py deleted file mode 100644 index 186d820..0000000 --- a/libs_crutch/core/server/loaded_methods.py +++ /dev/null @@ -1,69 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from collections import defaultdict - - -class LoadedMethods(object): - conMethods = {} - guiMethods = {} - rightsMethods = {} - methodDepends = defaultdict(list) - - # add needed rights for call method - @classmethod - def check_permissions(cls, add_right=(), static_method = False): - def wrapper(function): - name = function.__func__.__name__ if static_method else function.__name__ - cls.rightsMethods[name] = add_right - return function - - return wrapper - - # add needed rights for call method - @classmethod - def core_method(cls, gui=False, category=None, title=None, image=None, - command=None, rights=(), user=False, depends=(), static_method = False): - def wrapper(function): - name = function.__func__.__name__ if static_method else function.__name__ - if gui: - if category not in cls.guiMethods.keys(): - cls.guiMethods[category] = [] - cls.guiMethods[category].append(name) - cls.guiMethods[category].append(title) - cls.guiMethods[category].append(image) - if callable(gui): - cls.guiMethods[category].append(gui) - else: - cls.guiMethods[category].append(None) - - if command: - if command not in cls.conMethods.keys(): - cls.conMethods[command] = [] - cls.conMethods[command].append(name) - cls.conMethods[command].append(user) - if title: - cls.conMethods[command].append(title) - for depend in depends: - cls.methodDepends[depend].append(name) - - if rights: - cls.rightsMethods[name] = rights - if not name.endswith('_view'): - cls.rightsMethods[name + '_view'] = rights - return function - - return wrapper diff --git a/libs_crutch/core/server/local_call.py b/libs_crutch/core/server/local_call.py deleted file mode 100644 index 0148658..0000000 --- a/libs_crutch/core/server/local_call.py +++ /dev/null @@ -1,678 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import print_function -from __future__ import absolute_import -import pickle - -import sys -import termios -import os -from os import path -from fcntl import ioctl -from array import array -import threading -import argparse -import re - -from calculate.lib.utils.colortext import get_terminal_print -from calculate.lib.utils.colortext.palette import TextState -from calculate.lib.utils.text import tableReport -from calculate.lib.cl_print import color_print -from calculate.lib.datavars import VariableError, CriticalError -from calculate.lib.cl_lang import setLocalTranslate -from calculate.core.server.api_types import FieldAdapter -from calculate.lib.utils.tools import ignore -from calculate.lib.utils.files import makeDirectory -from calculate.core.result_viewer import ResultViewer -from calculate.core.server.loaded_methods import LoadedMethods -from calculate.core.result_viewer_gui import ProgressGui, ErrorGui, WarningGui -from .gen_pid import ProcessStatus - -from .methods_func import (get_method_argparser, collect_object, - check_result_msg, get_param_pwd, _print, - display_error) -from .api_types import ViewInfo, ViewParams -from .cert_cmd import parse -from .methods_func import GotErrorField -from .func import shortTraceback, CommonMethods, CommonLink - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -class LocalCall(object): - method_status = ProcessStatus.NotFound - no_progress = None - gui_progress = None - gui_warning = None - no_questions = None - - @staticmethod - def startprocess(cls, sid, target=None, method=None, method_name=None, - auto_delete=False, args_proc=()): - """ start process """ - if "LANG" in os.environ: - curThread = threading.currentThread() - curThread.lang = os.environ["LANG"] - com = target(cls.no_progress, cls.gui_progress, cls.gui_warning, - cls.no_questions) - if len(com.__class__.__bases__) > 1 and \ - hasattr(com.__class__.__bases__[1], '__init__'): - com.__class__.__bases__[1].__init__(com) - com.method_name = method_name - com.method_status = ProcessStatus.Worked - if getattr(com, method)(*args_proc): - cls.method_status = ProcessStatus.SuccessFinished - else: - cls.method_status = ProcessStatus.FailedFinished - com.method_status = cls.method_status - cls.del_pid_file(cls, os.getpid(), com.clVars) - return 0 - - @staticmethod - def del_pid_file(cls, pid, clVars=None): - if clVars: - pids = clVars.Get('core.cl_core_pids_path') - else: - pids = '/tmp' - pid_file = path.join(pids, '%d.pid' % pid) - with ignore(OSError): - if path.exists(pid_file): - os.unlink(pid_file) - - class Common(CommonMethods, CommonLink): - """ class to interact with the processes """ - - def __init__(self, no_progress, gui_progress, gui_warning, - no_questions): - self.pid = 0 - self.method_name = "" - self.method_status = ProcessStatus.Worked - self.color_print = color_print() - self.result_viewer = ResultViewer() - if no_questions: - self.result_viewer.set_no_questions() - if no_progress: - self.result_viewer.set_no_progress() - if gui_progress: - self.result_viewer = ErrorGui(ProgressGui(self.result_viewer)) - if gui_warning: - self.result_viewer = WarningGui(self.result_viewer) - self.set_link(self.result_viewer) - - def pauseProcess(self): - self.method_status = ProcessStatus.Paused - self.writeFile() - - def resumeProcess(self): - self.method_status = ProcessStatus.Worked - self.writeFile() - - def writeFile(self): - """ write data in file """ - from calculate.core.server.gen_pid import ProcessMode - - if os.getuid(): - return - pid = os.getpid() - pids = self.clVars.Get('core.cl_core_pids_path') - # пропустить создание файла если идет сборка пакета - if self.clVars.Get('cl_ebuild_phase'): - return - - if self.clVars.Get('cl_root_readonly') == 'on': - return - build_id = "" - try: - from calculate.builder.variables.action import Actions - - if self.clVars.Get('cl_action') in Actions.All: - build_id = self.clVars.Get('builder.cl_builder_id') - except Exception: - pass - if not os.path.exists(pids): - makeDirectory(pids) - pid_file = path.join(pids, '%d.pid' % pid) - try: - with open(pid_file, 'w') as f: - d = {'name': self.method_name, - 'mode': ProcessMode.LocalCall, - 'os_pid': pid, - 'status': self.method_status, - 'id': build_id} - pickle.dump(d, f) - except (IOError, OSError) as e: - print(str(e)) - print(_("Failed to write the PID file %s!") % pid_file) - - def isInteractive(self): - """ - Check interactive ability - """ - return sys.stdin.isatty() - - -def cout_progress(string=None): - try: - h, w = array('h', ioctl(sys.stderr, termios.TIOCGWINSZ, '\0' * 8))[:2] - except IOError: - return - sys.stdout.write('\r' + (' ' * w)) - if string: - sys.stdout.write('\r' + string) - else: - sys.stdout.write('\r') - sys.stdout.flush() - - -def local_method(metaObject, args, unknown_args): - """ - Call method from metaclass, check method existing. - - Generate help, for method, run method by 'call_method'. - """ - import os - - sym_link = os.path.basename(sys.argv[0]) - if sym_link != 'cl-core': - if sym_link in LoadedMethods.conMethods.keys(): - args.method = LoadedMethods.conMethods[sym_link][0] - else: - _print(_("Method not found for %s") % sym_link) - sys.exit(1) - - if args.list_methods: - for k, v in sorted(LoadedMethods.conMethods.items(), - key=lambda x: x[1]): - name, user, title = v - print("%s - %s" % (name, title)) - return 0 - - colorPrint = color_print() - # metaObject = metaclass() - method_name = args.method - method_view_name = method_name + '_view' - if args.method and args.help: - force_param = args.no_questions or has_force_arg(unknown_args) - while True: - view_obj = ViewParams() - view_obj.step = None - view_obj.expert = True - view_obj.brief = None - view_obj.onlyhelp = True - view_obj.help_set = True - view_obj.conargs = [x[0] for x in args._get_kwargs() if x[1] is not None] - view_obj.dispatch_usenew = \ - force_param - try: - view = getattr(metaObject, method_view_name)(metaObject, 0, view_obj) - except AttributeError: - colorPrint.printERROR(_('Method not found: ') + method_view_name) - return 1 - try: - method_parser = get_method_argparser(view, args, cl_core=True) - except Exception: - # import traceback - # for i in apply(traceback.format_exception, sys.exc_info()): - # sys.stderr.write(i) - # sys.stderr.flush() - metaObject.clear_cache(0, method_name) - return 1 - _unknown_args = method_parser.fixBoolVariables(unknown_args) - _args, _unknown_args = method_parser.parse_known_args(_unknown_args) - if (view_obj.dispatch_usenew == _args.no_questions - or args.no_questions): - method_parser.print_help() - break - else: - force_param = _args.no_questions - metaObject.clear_cache(0, method_name) - else: - try: - call_method(metaObject, args, unknown_args, colorPrint) - metaObject.clear_cache(0, method_name) - return metaObject.method_status - except (VariableError) as e: - colorPrint.printERROR(str(e)) - # colorPrint.printERROR(shortTraceback(*sys.exc_info())) - except (ValueError, CriticalError) as e: - colorPrint.printERROR(str(e)) - # colorPrint.printERROR(shortTraceback(*sys.exc_info())) - except (KeyboardInterrupt, EOFError): - colorPrint.printERROR(_('Manually interrupted')) - except (GotErrorField,): - pass - except Exception: - colorPrint.printERROR(shortTraceback(*sys.exc_info())) - pass - # print 'Error: ', e - metaObject.clear_cache(0, method_name) - - -def call_method(metaObject, args, unknown_args, colorPrint): - """ - Function for call method through metaObject and args - """ - method_name = args.method - stdin_passwd = args.stdin_passwd - method_view_name = method_name + '_view' - metaObject.no_progress = args.no_progress - metaObject.gui_progress = args.gui_progress - metaObject.gui_warning = args.gui_warning - metaObject.no_questions = False - view = None - method_parser = None - - dispatch_usenew = args.no_questions or has_force_arg(unknown_args) - while True: - view_obj = ViewInfo() - view_obj.step = None - view_obj.expert = True - view_obj.brief = None - view_obj.onlyhelp = True - view_obj.help_set = False - view_obj.conargs = [x for x in unknown_args] - view_obj.dispatch_usenew = dispatch_usenew - - try: - view = getattr(metaObject, method_view_name)(metaObject, 0, view_obj) - except AttributeError: - colorPrint.printERROR(_('Method not found: ') + method_name) - return None - method_parser = get_method_argparser(view, args, cl_core=True) - - _unknown_args = method_parser.fixBoolVariables(unknown_args) - _args, _unknown_args = method_parser.parse_known_args(_unknown_args) - if (view_obj.dispatch_usenew == _args.no_questions or - args.no_questions): - break - else: - dispatch_usenew = _args.no_questions - metaObject.clear_cache(0, method_name) - - no_questions = dispatch_usenew - - param_object = create_param_object(view) - try: - unknown_args = method_parser.fixBoolVariables(unknown_args) - args, unknown_args = method_parser.parse_known_args(unknown_args) - metaObject.no_questions = no_questions - except SystemExit: - return 1 - except Exception: - import traceback - - for i in traceback.format_exception(*sys.exc_info()): - sys.stderr.write(i) - sys.stderr.flush() - raise - for i in unknown_args: - if i.startswith('-'): - if i in parse(True).parse_known_args()[1]: - _print(_('Unknown parameter'), i) - return 1 - else: - _print(_('Unknown argument'), i) - return 1 - - param_object, steps = collect_object(None, param_object, view, args, - stdin_passwd=stdin_passwd) - if view.has_brief: - setattr(param_object, 'CheckOnly', True) - check_res = {} - while True: - method_result = getattr(metaObject, method_name)(metaObject, 0, param_object) - if not method_result: - print(_('Method not available')) - return None - - if method_result[0].type and method_result[0].type != "pid": - check_res = check_result_msg(method_result, view, check_res, - args) - if not check_res: - return None - else: - param_object = get_param_pwd(check_res, view, - param_object, - stdin_passwd=stdin_passwd) - else: - break - - view_obj = ViewInfo() - view_obj.step = None - view_obj.expert = True - view_obj.brief = True - view_obj.onlyhelp = False - view_obj.help_set = False - view_obj.conargs = [x[0] for x in args._get_kwargs() if x[1] is not None] - view_obj.dispatch_usenew = dispatch_usenew - try: - view = getattr(metaObject, method_view_name)(metaObject, 0, view_obj) - except AttributeError: - colorPrint.printERROR(_('Method not found: ') + method_name) - - print_brief(view, steps.label) - for group in view.groups: - for field in group.fields: - if "error" in field.name: - return None - if not no_questions: - if stdin_passwd: - colorPrint.printERROR("Could not use the interactive mode. " - "Use option '-f' for run the process.") - return None - try: - ask = ResultViewer().askConfirm(_("Run process?")) - except KeyboardInterrupt: - ask = "no" - if ask.lower() in ['n', 'no']: - colorPrint.printERROR(_('Manually interrupted')) - return None - - setattr(param_object, 'CheckOnly', False) - method_result = [] - try: - check_res = {} - while True: - method_result = getattr(metaObject, method_name)(metaObject, 0, param_object) - if not method_result: - colorPrint.printERROR(_('method unavailable')) - return None - - if method_result[0].type and method_result[0].type != "pid": - check_res = check_result_msg(method_result, view, check_res, - args) - if not check_res: - return None - else: - param_object = get_param_pwd(check_res, view, - param_object, - stdin_passwd=stdin_passwd) - else: - break - except VariableError as e: - _print(e) - return None - #for ReturnedMessage in method_result: - # if ReturnedMessage.type and ReturnedMessage.type != "pid": - # display_error(ReturnedMessage, args, view.groups) - # # params_text = '' - # # for Group in view.groups: - # # for field in Group.fields: - # # if field.name == ReturnedMessage.field: - # # params_text += getErrorOnParam(args, field) - # # colorPrint.printERROR('\r' + params_text % \ - # # str(ReturnedMessage.message)) - # return None - return method_result - - -def create_param_object(view): - param_object = type('collect_object', (object,), {}) - param_object.CheckAll = True - param_object._type_info = {} - for Group in view.groups: - if not Group.fields: - continue - for field in Group.fields: - setattr(param_object, field.name, None) - param_object._type_info[field.name] = None - return param_object - - -def print_brief(view, brief_label): - for Group in view.groups: - if Group.name: - if not Group.fields: - continue - print_brief_group(Group.fields, Group.name) - - -class ColorTable(tableReport): - def __init__(self, head, body, printer, head_printer=None, - line_printer=None, body_printer=None): - super(ColorTable, self).__init__(None, head, body, colSpan=0) - self.default_printer = printer - self.line_printer = line_printer or printer - self.head_printer = head_printer or printer - self.body_printer = body_printer or printer - self.head = head - self.body = body - - -class Display(object): - def __init__(self): - self._print = get_terminal_print(color_print().defaultPrint) - - def print_info(self, label, value): - GREEN = TextState.Colors.GREEN - self.display_asterisk(GREEN) - self._print(_("%s: ") % label) - WHITE = TextState.Colors.WHITE - self._print.foreground(WHITE)(value) - self._print("\n") - - def print_label(self, label): - GREEN = TextState.Colors.GREEN - self.display_asterisk(GREEN) - self._print(_("%s: ") % label) - self._print("\n") - - def display_asterisk(self, color): - self._print(" ") - self._print.foreground(color).bold("*") - self._print(" ") - - def print_error(self, message): - RED = TextState.Colors.RED - self.display_asterisk(RED) - self._print(message) - self._print("\n") - - def print_warning(self, message): - YELLOW = TextState.Colors.YELLOW - self.display_asterisk(YELLOW) - self._print(message) - self._print("\n") - - def print_table(self, data, head): - WHITE = TextState.Colors.WHITE - ColorTable(head, data, self._print, - body_printer=self._print.foreground( - WHITE).clone()).printReport(False) - # sys.stdout.write('%s\n' % printTable(data, head)) - - def print_group(self, label): - self._print(label) - self._print("\n") - - -class InformationElement(object): - def __init__(self, field, display): - self.value = "" - self.label = "" - self.display = display - - @classmethod - def from_field(cls, field, display): - if field.type == 'steps': - return None - map_elements = {'input': ValueInfo, - 'openfile': ValueInfo, - 'combo': ChoiceInfo, - 'comboEdit': ChoiceInfo, - 'radio': ChoiceInfo, - 'file': ChoiceInfo, - 'multichoice': MultiChoiceInfo, - 'multichoice_add': MultiChoiceInfo, - 'selecttable': MultiChoiceInfo, - 'selecttable_add': MultiChoiceInfo, - 'error': ErrorInfo, - 'check': CheckInfo, - 'check_tristate': CheckInfo, - 'table': TableInfo - } - if field.element in map_elements: - return map_elements[field.element](field, display) - return None - - def show(self): - self.display.print_info(self.label, self.value) - - -class ValueInfo(InformationElement): - def __init__(self, field, display): - super(ValueInfo, self).__init__(field, display) - self.value = field.value or '' - self.label = field.label - - -class CheckInfo(InformationElement): - def __init__(self, field, display): - super(CheckInfo, self).__init__(field, display) - self.label = field.label - map_answer = {'on': _('yes'), 'off': _("no"), 'auto': _('auto')} - self.value = map_answer.get(field.value, field.value) - - -class ChoiceInfo(InformationElement): - def __init__(self, field, display): - super(ChoiceInfo, self).__init__(field, display) - self.label = field.label or '' - if field.choice and field.comments: - map_comment = dict(zip(field.choice, field.comments)) - self.value = map_comment.get(field.value, field.value) or '' - else: - self.value = field.value if field.value else '' - - -class MultiChoiceInfo(InformationElement): - def __init__(self, field, display): - super(MultiChoiceInfo, self).__init__(field, display) - self.label = field.label or '' - if field.listvalue: - value = field.listvalue - # удалить пустой первый элемент (особенности wsdl) - if value and not value[0]: - value.pop(0) - if field.choice and field.comments: - map_comment = dict(zip(field.choice, field.comments)) - else: - map_comment = {} - self.value = ", ".join([map_comment.get(x, x) or '' for x in value]) - else: - self.value = field.value or "" - - -class ErrorInfo(InformationElement): - def __init__(self, field, display): - super(ErrorInfo, self).__init__(field, display) - self.label = field.label - - def show(self): - self.display.print_error(self.label) - - -class TableInfo(InformationElement): - """ - Табличная информация - """ - - def map_row(self, row, typedata): - map_answer = {'on': _('yes'), 'off': _("no"), 'auto': _('auto')} - for cell, typefield in zip(row, typedata): - if typefield in ['check', 'check_tristate']: - yield map_answer.get(cell, cell) or "" - elif "password" in typefield: - yield "***" - else: - yield cell or "" - - def __init__(self, field, display): - super(TableInfo, self).__init__(field, display) - self.label = field.label - self.head = field.tablevalue.head - - # удаление первого элемента строки (для wsdl) - body = [x[1:] if x and not x[0] else x for x in field.tablevalue.body] - - if not filter(None, map(lambda x: x, body)): - self.body = None - else: - type_values = [x.typefield for x in field.tablevalue.values] - self.body = [list(self.map_row(x, type_values)) for x in body] - - def show(self): - if self.body: - self.display.print_label(self.label) - self.display.print_table(self.body, self.head) - - -def print_brief_group(Fields, group_name): - display = Display() - show_group = True - try: - for element in filter(None, - (InformationElement.from_field( - FieldAdapter.from_detect(x), - display) - for x in Fields if not x.uncompatible)): - if show_group: - display.print_group(group_name) - show_group = False - element.show() - except Exception: - import traceback - - traceback.print_exc() - raise - - -class Methods(LocalCall.Common, object): - _instance = None - - def __new__(cls, *args, **kwargs): - if not cls._instance: - cls._instance = super(Methods, cls).__new__( - cls, *args, **kwargs) - return cls._instance - - def __init__(self): - LocalCall.Common.__init__(self, False, False, False, False) - - -def has_force_arg(args): - """ - Содержат ли аргумент force. Предварительное определение, так как на 100% - невозможно определить является ли -sf двумя опциями -s,-f или это одна - опция -s со значением "f" - :param args: - :return: - """ - force_parser = argparse.ArgumentParser(add_help=False) - force_parser.add_argument( - '-f', '--force', default=False, dest='force', action="store_true") - _args, _drop = force_parser.parse_known_args(args) - - if _args.force: - return True - - re_force = re.compile("^--force|-[a-zA-Z0-9]*f[a-zA-Z0-9]*$") - for arg in args: - if re_force.search(arg): - return True - else: - return False diff --git a/libs_crutch/core/server/methods_func.py b/libs_crutch/core/server/methods_func.py deleted file mode 100644 index 7c60662..0000000 --- a/libs_crutch/core/server/methods_func.py +++ /dev/null @@ -1,787 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -from __future__ import absolute_import -import sys -import os -import argparse -import re -from calculate.lib.utils.common import getpass -from calculate.lib.cl_print import color_print -from calculate.lib.cl_lang import setLocalTranslate -from calculate.core.server.api_types import TableAdapter -from functools import reduce - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -from itertools import * -from .api_types import ViewInfoAdapter, ArrayReturnedMessage, FieldAdapter -from calculate.lib.utils.colortext import get_terminal_print, TextState - -colorPrint = color_print() - - -class GotErrorField(Exception): - """ - Исключение о получение поля error среди - элементов view - """ - pass - - -class BoolAction(argparse.Action): - reTrue = re.compile("^(?:on|yes)$", re.I) - reFalse = re.compile("^(?:off|no)$", re.I) - available_values = ("on", "off","yes", "no") - - def __init__(self, option_strings, dest, nargs="?", - const=None, default=None, type=None, choices=None, - required=False, help=None, metavar=None): - super(BoolAction, self).__init__( - option_strings=option_strings, dest=dest, - nargs=nargs, const=const, default=default, - type=type, choices=choices, required=required, - help=help, metavar=metavar) - - def get_values(self, value): - value = value.lower() - for v in self.available_values: - if value == v: - return v - else: - return "" - - def __call__(self, parser, ns, values, option_string=None): - if values is None: - values = "on" - else: - if self.reTrue.match(values): - values = "on" - if self.reFalse.match(values): - values = "off" - values = self.get_values(values) - if not values: - msg = _('the value may be {0}').format( - formatListOr(self.available_values)) - parser.error(msg) - setattr(ns, self.dest, values) - -class BoolAutoAction(BoolAction): - available_values = ("on", "off", "yes", "no", "auto") - -def _print(*args): - print(" ".join(map(lambda x: _u8(x), args))) - - -def get_password(text1=None, text2=None, getfromstdin=False, - needrepeat=True): - if getfromstdin: - try: - passwd = '' - while not passwd: - passwd = sys.stdin.readline() - if not passwd: - return None - passwd = passwd.rstrip('\n') - return passwd - except BaseException: - return None - if not text1: - text1 = _('Password: ') - if not text2: - text2 = _('Repeat: ') - try: - pass1 = 'password' - pass2 = 'repeat' - while pass1 != pass2: - pass1 = getpass.getpass(text1) - if not needrepeat: - return pass1 - pass2 = getpass.getpass(text2) - if pass1 != pass2: - print(_('Passwords do not match')) - except KeyboardInterrupt: - return None - passwd = pass1 if (pass1 and pass1 == pass2) else '' - return passwd - - -def listToArray(client, _list, _type='string'): - if not client: - return _list - array = client.factory.create('%sArray' % _type) - for i in _list: - array['%s' % _type].append(i) - return array - - -def listToArrayArray(client, _list, _type='string'): - if not client: - return _list - array_array = client.factory.create('%sArrayArray' % _type) - for i in _list: - array = client.factory.create('%sArray' % _type) - for j in i: - array[_type].append(j) - array_array['%sArray' % _type].append(array) - return array_array - - -def _getattr(obj, attr): - return getattr(obj, attr) if hasattr(obj, attr) else None - - -import argparse -import textwrap as _textwrap -from calculate.lib.utils.text import get_term_size, _u, _u8, _uu8, formatListOr - - -class RawAndDefaultsHelpFormatter(argparse.HelpFormatter): - def __init__(self, prog, max_help_position=24, **kwargs): - # Use the whole terminal width - height, width = get_term_size() - argparse.HelpFormatter.__init__(self, prog, width=width, - max_help_position=max_help_position, - **kwargs) - - def _split_lines(self, text, width): - text = self._whitespace_matcher.sub(' ', _u(text)).strip() - return _uu8(*_textwrap.wrap(text, width)) - - -def get_method_argparser(view, args, cl_core=False): - """ - Get argparser by ViewInfo get from WSDL server (or stub) - - cl_core - argparser for cl_core (local call) - """ - error_flag = False - method = args.method - if cl_core: - progr = os.path.basename(sys.argv[0]) - else: - progr = 'cl-console --method ' + method - - bool_vars = ["f"] - - def fix_bool_variables(args): - prevlen = 0 - local_args = args - while prevlen != len(local_args): - prevlen = len(local_args) - local_args = reduce(lambda x, y: ( - x + [y[:2], "-%s" % y[2:]] - if (len(y) > 2 and y[:1] == "-" and y[1:2] in bool_vars and - not y[2:].lower().startswith("on") and - not y[2:].lower().startswith("off")) - else x + [y]), local_args, []) - return local_args - - def get_list(data): - if data is not None: - for entry in data if cl_core else data[0]: - yield entry - - parser = argparse.ArgumentParser( - prog=progr, add_help=False, formatter_class=RawAndDefaultsHelpFormatter) - parser.fixBoolVariables = fix_bool_variables - - for Group in ifilter(lambda x: x.fields, get_list(view.groups)): - group = parser.add_argument_group(Group.name) - for field in get_list(Group.fields): - if field.element == 'error': - error_flag = True - colorPrint.printERROR(field.label) - elif field.opt: - opt = field.opt - data = {'dest': field.name, 'help': opt.help} - if "choice" in field.type and hasattr(field.opt, "syntax") and \ - field.opt.syntax and "{" in field.opt.syntax: - lgroup = group.add_mutually_exclusive_group() - help = dict(map(lambda x: (x[0].strip("'"), x[2]), - map(lambda x: x.partition(' - '), - field.help.split(',\n')))) - choice = field.choice - if not type(choice) in (list, tuple): - choice = choice.string - for val in filter(None, choice): - data['action'] = 'store_const' - # data['nargs'] = '?' - data['const'] = val - data['metavar'] = "" - if hasattr(field, "value") and field.value == val: - data['help'] = (help.get(val, field.help) + - " " + _("(by default)")) - else: - data['help'] = help.get(val, field.help) - lgroup.add_argument(field.opt.syntax.format(choice=val), - **data) - continue - if "password" in field.type: - data['action'] = "store_true" - if "need" in field.type: - data['help'] = argparse.SUPPRESS - else: - data['type'] = str - if field.element in ['check', 'check_tristate']: - if field.element == "check_tristate": - data['action'] = BoolAutoAction - else: - data['action'] = BoolAction - if field.value == 'on': - data['help'] = data['help'] + " " + _( - "(enabled by default)") - if opt.shortopt: - bool_vars.append(opt.shortopt[1]) - elif field.element == 'radio' and field.type == 'bool': - data['action'] = BoolAction - if field.value == 'on': - data['help'] = data['help'] + " " + _( - "(enabled by default)") - if opt.shortopt: - bool_vars.append(opt.shortopt[1]) - if field.element == 'table' and field.type != 'steps': - data['action'] = 'append' - - if data.get('action') != "store_true": - data['metavar'] = opt.metavalue if opt.metavalue \ - else field.name.upper() - # if ':' in data['metavar']: - # data['metavar'] = field.name.upper() - if "choice" in field.type: - if "list" in field.type: - data['help'] = "%s (%s)" % ( - data['help'], - _("'list' for displaying possible values, " - "'none' is never one")) - else: - data['help'] = "%s (%s)" % ( - data['help'], - _("'list' for displaying possible values")) - if "boolauto" in field.type: - data['metavar'] = "ON/OFF/AUTO" - elif "bool3" in field.type: - data['metavar'] = "ON/OFF/AUTO" - elif "bool" in field.type: - data['metavar'] = "ON/OFF" - try: - opts = filter(None, [opt.shortopt, opt.longopt]) - if any("-" not in x for x in opts): - data.pop('dest') - data['nargs'] = '?' - group.add_argument(*opts, **data) - except argparse.ArgumentError: - continue - group = parser.add_argument_group(_("Common arguments")) - group.add_argument( - '-f', '--force', action='store_true', default=False, - dest='no_questions', help=_('silent during the process')) - if error_flag: - raise GotErrorField - return parser - - -def set_obj_item(client, param_object, field_name, value): - """ - Set value for Info object. By client detect (local or WSDL call) - """ - if client: - param_object.__setitem__(field_name, value) - else: - setattr(param_object, field_name, value) - return param_object - - -def set_table_pwd(client, param_object, field, value): - if type(field.tablevalue.values) in [list, tuple]: - choice_values = field.tablevalue.values - # column = len(field.tablevalue.head) - else: - choice_values = field.tablevalue.values.ChoiceValue - # column = len(field.tablevalue.head.string) - # print ChoiceValue, column - found_flag = False - changed_string = None - if not getattr(param_object, field.name): - for column in range(len(choice_values)): - if found_flag: - break - if "password" in choice_values[column].typefield: - if hasattr(field.tablevalue.body, 'stringArray'): - body = field.tablevalue.body.stringArray - else: - body = field.tablevalue.body - for _row in body: - if hasattr(_row, 'string'): - row = _row.string - else: - if not _row[0]: - row = _row[1:] - else: - row = _row - if not row[column]: - row[column] = value - temp = [] - for item in row: - temp.append(item) if item else temp.append('') - changed_string = temp - found_flag = True - break - else: - user_table = getattr(param_object, field.name) - if hasattr(user_table, 'stringArray'): - user_table = user_table.stringArray - for column in range(len(choice_values)): - if found_flag: - break - if "password" in choice_values[column].typefield: - for _row in user_table: - if hasattr(_row, 'string'): - row = _row.string - else: - if not _row[0]: - row = _row[1:] - else: - row = _row - if not row[column]: - row[column] = value - temp = [] - for item in row: - temp.append(item) if item else temp.append('') - changed_string = temp - found_flag = True - break - - # код выполняется через сетевую консоль - if client: - if not getattr(param_object, field.name): - setattr(param_object, field.name, field.tablevalue.body) - object_array = TableAdapter.get_matrix(param_object[field.name]) - # код выполняется локально - else: - if not getattr(param_object, field.name): - setattr(param_object, field.name, - map(lambda x: x[1:] if not x[0] else x, - field.tablevalue.body)) - object_array = getattr(param_object, field.name) - - result = [] - for _array in object_array: - temp = [] - for item in _array: - temp.append(item) if item else temp.append('') - result.append(temp) - if changed_string: - for item in result: - if str(item[0]) == str(changed_string[0]): - result.remove(item) - - result.append(changed_string) - - if client: - param_object[field.name] = listToArrayArray(client, result) - else: - setattr(param_object, field.name, result) - return param_object - - -def display_error(error, args, groups): - params_text = '' - sys.stdout.write('\r') - sys.stdout.flush() - list_answer = False - varname, comments, values, value, list_value = None, None, None, None, [] - if error.type != "commonerror": - for group in groups: - for field in group.fields: - if field.name == error.field: - if args is not None: - if (getattr(args, field.name) == "list" and - "choice" in field.type): - if error.field_obj: - field_obj = FieldAdapter.from_detect(error.field_obj) - list_answer = True - varname = (field_obj.label or - field_obj.name) - comments = field_obj.comments - values = field_obj.choice - value = field_obj.value - list_value = error.field_obj.listvalue or [] - params_text += getErrorOnParam(args, field) - else: - if field.opt.shortopt or field.opt.longopt: - params_text += _('Wrong option ') - params_text += ' ' + ', '.join( - filter(None, [field.opt.shortopt, - field.opt.longopt])) + '. %s' - if list_answer: - __print = get_terminal_print(colorPrint.defaultPrint) - __print.foreground(TextState.Colors.WHITE)( - _("%s values:") % varname) - __print("\n") - if comments and values: - maxlen = len(max(values, key=len)) - for v, c in zip(values, comments): - if not v and not c: - continue - __print(" ") - __print.bold("[" + v + "]") - __print(" " * (maxlen - len(v))) - __print(" ") - __print(c) - if value == v or v in list_value: - __print(" ") - __print.bold.foreground(TextState.Colors.WHITE)("*") - __print("\n") - if not comments: - for v in filter(None, values): - __print(" ") - __print.bold("[" + v + "]") - if value == v: - __print(" ") - __print.bold.foreground(TextState.Colors.WHITE)("*") - __print("\n") - elif error.type != "commonerror": - colorPrint.printERROR(params_text % error.message) - else: - colorPrint.printWARNING(error.message) - - -def check_result_msg(method_result, view, input_error_dict=None, args=None): - if not input_error_dict: - input_error_dict = {} - password_errors = {} - method_result = ArrayReturnedMessage.from_detect(method_result) - view = ViewInfoAdapter.from_detect(view) - - for error in method_result: - if error.type == 'pwderror': - password_errors[error.field] = error.message - continue - - display_error(error, args, view.groups) - - # если все ошибки связаны с паролем - if len(password_errors) == len(method_result): - if (not dict([x for x in input_error_dict.items() - if x not in password_errors.items()]) and - not dict([x for x in password_errors.items() - if x not in input_error_dict.items()])): - return None - return password_errors - else: - return None - - -def get_param_pwd(check_res, view, param_object, client=None, - stdin_passwd=False): - view = ViewInfoAdapter.from_detect(view) - for pwd_field in check_res: - if not stdin_passwd: - _print(check_res[pwd_field]) - for group in view.groups: - for field in group.fields: - if field.name == pwd_field: - if field.element == 'table': - value = get_password(getfromstdin=stdin_passwd) - if value is None: - _print(check_res[pwd_field]) - raise KeyboardInterrupt - set_table_pwd(client, param_object, field, value) - else: - value = get_password(getfromstdin=stdin_passwd) - if value is None: - _print(check_res[pwd_field]) - raise KeyboardInterrupt - setattr(param_object, pwd_field, value) - return param_object - - -def collect_object(client, param_object, view, args, wait_thread=None, - stdin_passwd=False): - """ - Collect Info object by args - """ - steps = None - view = ViewInfoAdapter.from_detect(view) - for group in view.groups: - for field in group.fields: - #if field.uncompatible: - # continue - if (field.element in ['check', 'check_tristate'] and - field.type in ('bool', 'boolauto', 'bool3')) or ( - field.element == 'radio' and field.type == 'bool'): - value = _getattr(args, field.name) - if field.type == 'bool3': - pass - else: - if value: - if _getattr(args, field.name).lower() in ['on', 'yes']: - value = True - elif _getattr(args, - field.name).lower() in ['off', 'no']: - value = False - else: - value = None - else: - value = None - param_object = set_obj_item(client, param_object, - field.name, value) - elif (field.element == 'input' and - field.name in ['cl_page_offset', 'cl_page_count']): - val = _getattr(args, field.name) - if not val: - val = 0 - if wait_thread: - wait_thread.stop() - # param_object[field.name] = val - param_object = set_obj_item(client, param_object, field.name, - val) - - elif field.element in ['input', 'openfile', - 'file', 'radio', 'combo', 'comboEdit']: - param_object = set_obj_item(client, param_object, field.name, - _getattr(args, field.name)) - elif 'passw' in field.element and _getattr(args, field.name) \ - or field.type and "need" in field.type: - if wait_thread: - wait_thread.pause() - label = field.label or _("Password") - password = get_password(label + _(": "), - _('Repeat password: '), - getfromstdin=stdin_passwd, - needrepeat="one" not in field.type) - if password is None: - raise KeyboardInterrupt - param_object = set_obj_item(client, param_object, field.name, - password) - if wait_thread: - wait_thread.resume() - elif field.element in ['multichoice', 'multichoice_add', - 'selecttable', 'selecttable_add']: - val = _getattr(args, field.name) - if val in ['off', 'none']: - if client: - value = listToArray(client, [None]) - else: - value = [] - else: - value = listToArray(client, - val.split(',')) if val else None - param_object = set_obj_item(client, param_object, field.name, - value) - - elif field.element == 'table' and field.type != 'steps': - val = _getattr(args, field.name) - value = collect_table(field, val, client, wait_thread, - stdin_passwd) - param_object = set_obj_item(client, param_object, field.name, - value) - - elif field.element == 'table' and field.type == 'steps': - steps = field - if hasattr(param_object, 'CheckAll'): - param_object = set_obj_item(client, param_object, 'CheckAll', True) - - return param_object, steps - - -def convertArgDictToList(argDict): - """ - Convert list of dict like {'arg_1':None,'arg_2':'value2','arg_5':'value5'} - to [..,['','value2','','','value5']..] (iterator) - """ - for row in argDict: - yield [row[i] or '' for i in sorted(row.keys())] - - -def collect_table(field, val_list, client, wait_thread=None, - stdin_passwd=False): - def split_param(l, delimeter=","): - for x in l: - for y in x.split(delimeter): - yield y - - if not val_list: - return None - # if specified syntax for table row - if hasattr(field.opt, "syntax") and field.opt.syntax: - reArgs = re.compile(field.opt.syntax) - # check for correct sentense - for wrong in ifilterfalse(reArgs.search, val_list): - # raise Error on wrong - raise ValueError(_("Wrong %s value syntax") % - (field.opt.shortopt or field.opt.longopt) - + " " + wrong) - # create groupdict from all vals - argDict = map(lambda x: x.groupdict(), map(reArgs.search, val_list)) - # convert groupdicts to val_table - val_table = list(convertArgDictToList(argDict)) - # standard syntax - else: - reduced_list = list(val_list) - val_table = map(lambda x: x.split(':'), reduced_list) - - if type(field.tablevalue.values) in [list, tuple]: - choiceValue = field.tablevalue.values - else: - choiceValue = field.tablevalue.values.ChoiceValue - choiceValue = filter(lambda x: x.typefield != 'readonly', choiceValue) - lenChoiceValue = len(choiceValue) - for wrong in ifilter(lambda x: len(x) > lenChoiceValue, - val_table): - if type(wrong) in (list, tuple): - wrong = ":".join(wrong) - raise ValueError(_("Wrong %s value syntax") % - (field.opt.shortopt or field.opt.longopt) - + " " + wrong) - - # obj_body = [] - - # if type(field.tablevalue.body) == list: - # temp_body = field.tablevalue.body - # else: - # temp_body = field.tablevalue.body.stringArray - # for obj_row in temp_body: - # if type(obj_row) != list: - # obj_row = obj_row.string - # for item in range(len(obj_row)): - # if not obj_row[item]: - # obj_row[item] = '' - # else: - # if obj_row: - # if obj_row[0] == '': - # obj_row.pop(0) - # obj_body.append(obj_row) - # - # obj_body = collect_obj_body(obj_body, field) - - is_password_get = any('password' in x.typefield for x in choiceValue) - obj_body = [] - for line in val_table: - received_password = None - if is_password_get: - if len(line) > 0 and line[0].lower() != '': - if wait_thread: - wait_thread.stop() - sys.stdout.write('\r') - sys.stdout.flush() - received_password = get_password( - _('Password for %s: ') % line[0], - _('Repeat password for %s: ') % line[0], - getfromstdin=stdin_passwd) - if received_password is None: - raise KeyboardInterrupt - temp_row = [] - for val, choice_value in izip_longest( - line, filter(lambda x: x.typefield != 'readonly', choiceValue), - fillvalue=''): - typefield = choice_value.typefield - # if readonly, except first column - if typefield in ['check', 'check_tristate']: - if BoolAction.reTrue.match(val): - temp_row.append('on') - elif BoolAction.reFalse.match(val): - temp_row.append('off') - else: - temp_row.append(val) - elif typefield in ['input', 'combo', 'comboEdit', 'openfile', - 'file', 'radio', 'text', 'multichoice', - 'multichoice_add']: - temp_row.append(val) - elif typefield == 'password': - if received_password is not None: - temp_row.append(received_password) - else: - temp_row.append(val) - obj_body.append(temp_row) - - if not obj_body: - obj_body = [[None]] - return listToArrayArray(client, obj_body) - - -def collect_obj_body(body, field): - field = FieldAdapter.from_detect(field) - column = len(field.tablevalue.head) - result_table = [] - choice_value = field.tablevalue.values - for i in range(len(body)): - temp_row = [] - for j in range(column): - # not adding if readonly - if j > (len(choice_value) + 1): - continue - typefield = choice_value[j].typefield - if typefield == 'readonly' and j > 0: - continue - elif typefield in ['check', 'check_tristate']: - if len(body[i]) < j + 1: - temp_row.append('') - continue - if not body[i][j]: - temp_row.append('') - elif body[i][j].lower() in ['on', 'yes']: - temp_row.append('on') - elif body[i][j].lower() in ['off', 'no']: - temp_row.append('off') - else: - temp_row.append(body[i][j]) - - elif typefield in ['input', 'combo', 'comboEdit', 'openfile', - 'file', 'password', 'radio', 'text']: - if len(body[i]) < j + 1: - temp_row.append('') - elif not body[i][j]: - temp_row.append('') - else: - temp_row.append(body[i][j]) - elif typefield in ['multichoice', 'multichoice_add']: - if len(body[i]) < j + 1: - temp_row.append('') - elif not body[i][j]: - temp_row.append('') - else: - temp_row.append(body[i][j]) - result_table.append(temp_row) - return result_table - - -def getErrorOnParam(args, field): - """ - Get errors for param - """ - params_text = "" - if any("-" in x - for x in filter(None, (field.opt.longopt, field.opt.shortopt))): - param_name = ', '.join(filter(None, - [field.opt.shortopt, field.opt.longopt])) - else: - param_name = field.opt.metavalue - if getattr(args, field.name) is None and "need" not in field.type: - params_text += "%s. " + _("Use the parameter") + " " - params_text += param_name + '. ' - else: - if "need" in field.type: - params_text += \ - _('Error in field \'%s\'. ') % field.label + " %s" - elif getattr(args, field.name) == "list" and "choice" in field.type: - params_text += "%s" - else: - params_text += _('Error in parameter ') - params_text += param_name + '. %s' - return params_text diff --git a/libs_crutch/core/server/post_cert.py b/libs_crutch/core/server/post_cert.py deleted file mode 100644 index 909a340..0000000 --- a/libs_crutch/core/server/post_cert.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import datetime -import threading -from .cert_cmd import find_cert_id -# Time life certificate in days -DAY_CERT = 600 - - -def serv_post_cert(cls): - """ transfer the client certificate """ - cur_thread = threading.currentThread() - certificate = cur_thread.client_cert - if certificate is None: - return [-3] - - checked_id = find_cert_id(certificate, cls.data_path, cls.certbase) - try: - if int(checked_id) < 1: - return [-2] - except ValueError: - return [-4] - results = [] - with open(cls.certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split() - # if in line present certificate id - if len(words) > 3: - if words[0] == checked_id: - results.append(checked_id) - date = datetime.datetime.strptime( - words[2] + ' ' + words[3], '%Y-%m-%d %H:%M:%S.%f') - d = datetime.datetime.now() - date - v = DAY_CERT - d.days # How many days left certificate - if v < 0: - # Method deleted certificate - v = -2 # expiry date has passed - # For a long time, is not displayed to the client - elif v > 60: - v = -1 - results.append(v) - return results - return [-4] diff --git a/libs_crutch/core/server/post_request.py b/libs_crutch/core/server/post_request.py deleted file mode 100644 index 32b9111..0000000 --- a/libs_crutch/core/server/post_request.py +++ /dev/null @@ -1,271 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import threading -import os -import hashlib -import datetime -from calculate.lib.utils.files import readFile - -MAX = 10000 - -# Sign client certificate -def serv_post_client_request(request, data_path, - ip, mac, client_type, client_certbase, cert_path): - if not os.path.exists(cert_path + '/root.crt') or \ - not os.path.exists(cert_path + '/root.key'): - return '-1' - - if not os.path.exists(client_certbase): - if not os.path.exists(data_path + '/client_certs/'): - os.makedirs(data_path + '/client_certs/') - fp = open(client_certbase, 'w') - fp.close() - - # get ip - cur_thread = threading.currentThread() - try: - ip = cur_thread.REMOTE_ADDR - except AttributeError: - ip = 'localhost' - - # Finding Id for the current certificate - id_file = data_path + '/client_certs/id.int' - - if os.path.exists(id_file): - i = int(readFile(id_file)) - else: - with open(client_certbase) as fd: - t = fd.read() - count = len(t.splitlines()) + 1 - with open(id_file, 'w') as fi: - fi.write(str(count)) - i = count - - req_file = data_path + '/client_certs/' + str(i) + '.csr' - - # Record of request of the client in file req_file - with open(req_file, 'w') as f: - f.write(request) - - md5 = hashlib.md5() - md5.update(request) - md5sum = md5.hexdigest() - date = datetime.datetime.now() - - # record - with open(client_certbase, "a") as fc: - fc.write("%d %s %s %s %s %s\n" % - (i, md5sum, date, ip, mac, client_type)) - - # record next Id in id.int file - i += 1 - with open(id_file, 'w') as fi: - temp = str(i) - fi.write(temp) - return str(i - 1) - - -def serv_get_client_cert(req_id, request, data_path, client_certbase, - cert_path, localuser=None): - req_file = data_path + '/client_certs/' + req_id + '.csr' - if not os.path.exists(req_file): - return '1' - - cert_file = data_path + '/client_certs/' + req_id + '.crt' - if not os.path.exists(cert_file): - return '2' - - # read client certificate in buffer - fp = open(cert_file, 'r') - cert = fp.read() - fp.close() - - md5 = hashlib.md5() - md5.update(cert) - md5sum = md5.hexdigest() - date = datetime.datetime.now() - - ft = open(client_certbase + '_temp', 'w') - # open file with server certificate certbase - with open(client_certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split(' ') - if not words: - continue - if words[0] == req_id: - try: - cur_thread = threading.currentThread() - ip = cur_thread.REMOTE_ADDR - except AttributeError: - ip = 'localhost' - if not request == words[1]: - fd.close() - ft.close() - os.unlink(client_certbase + '_temp') - return '3' - mac = words[5] - client_type = words[6] - if localuser: - line = ("%s %s %s %s %s %s %s" % (req_id, md5sum, date, ip, mac, - client_type, localuser)) - else: - line = ("%s %s %s %s %s %s" % (req_id, md5sum, date, ip, mac, - client_type)) - ft.write(line + '\n') - - # copy all from temp file - ft = open(client_certbase + '_temp', 'rb') - fd = open(client_certbase, 'wb') - ft.seek(0) - fd.write(ft.read()) - ft.close() - fd.close() - # delete temp file - os.unlink(client_certbase + '_temp') - os.unlink(req_file) - - cert_file_path = os.path.join(cert_path, 'root.crt') - if not os.path.exists(cert_file_path): - open(cert_file_path, 'w').close() - ca_root = readFile(cert_file_path) - return [cert, ca_root] - - -# sign server certificate -def serv_post_server_request(request, data_path, - ip, mac, serv_certbase, cert_path): - if (not os.path.exists(cert_path + '/root.crt') or - not os.path.exists(cert_path + '/root.key')): - return '-1' - - if not os.path.exists(serv_certbase): - if not os.path.exists(data_path + '/server_certs/'): - os.makedirs(data_path + '/server_certs/') - fp = open(serv_certbase, 'w') - fp.close() - - # get ip - cur_thread = threading.currentThread() - try: - ip = cur_thread.REMOTE_ADDR - except AttributeError: - # print "EXCEPT ip = curThread.REMOTE_ADDR!!!!!!" - ip = 'not_defined' - - # Finding Id for the current certificate - id_file = data_path + '/server_certs/id.int' - - if os.path.exists(id_file): - with open(id_file, 'r') as fi: - temp = fi.read() - i = int(temp) - else: - with open(serv_certbase) as fd: - t = fd.read() - count = len(t.splitlines()) - - count += 1 - with open(id_file, 'w') as fi: - fi.write(str(count)) - i = count - - req_file = data_path + '/server_certs/' + str(i) + '.csr' - - # Record of request of the client in file REQ_FILE - with open(req_file, 'w') as f: - f.write(request) - - md5 = hashlib.md5() - md5.update(request) - md5sum = md5.hexdigest() - date = datetime.datetime.now() - - # record - with open(serv_certbase, "a") as fc: - fc.write("%d %s %s %s %s\n" % (i, md5sum, date, ip, mac)) - - # record next Id in id.int file - with open(id_file, 'w') as fi: - fi.write(str(i + 1)) - return str(i) - - -def serv_get_server_request(req_id, request, data_path, serv_certbase, - cert_path): - req_file = data_path + '/server_certs/' + req_id + '.csr' - if not os.path.exists(req_file): - return '1' - - cert_file = data_path + '/server_certs/' + req_id + '.crt' - if not os.path.exists(cert_file): - return '2' - - # fp = open(REQ_FILE, 'r') - # req = fp.read() - # fp.close() - - # read client certificate in buffer - fp = open(cert_file, 'r') - cert = fp.read() - fp.close() - - md5 = hashlib.md5() - md5.update(cert) - md5sum = md5.hexdigest() - date = datetime.datetime.now() - - ft = open(serv_certbase + '_temp', 'w') - # open file with server certificate certbase - with open(serv_certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split(' ') - if words[0] == req_id: - cur_thread = threading.currentThread() - try: - ip = cur_thread.REMOTE_ADDR - except AttributeError: - return '3' - if not request == words[1]: - fd.close() - ft.close() - os.unlink(serv_certbase + '_temp') - return '3' - mac = words[5] - line = ("%s %s %s %s %s" % (req_id, md5sum, date, ip, mac)) - ft.write(line + '\n') - - # copy all from temp file - ft = open(serv_certbase + '_temp', 'rb') - fd = open(serv_certbase, 'wb') - ft.seek(0) - fd.write(ft.read()) - ft.close() - fd.close() - # delete temp file - os.unlink(serv_certbase + '_temp') - os.unlink(req_file) - - if not os.path.exists(cert_path + '/ca_root.crt'): - open(cert_path + '/ca_root.crt', 'w').close() - ca_root = readFile(cert_path + '/ca_root.crt') - return [cert, ca_root] diff --git a/libs_crutch/core/server/request.py b/libs_crutch/core/server/request.py deleted file mode 100644 index f902731..0000000 --- a/libs_crutch/core/server/request.py +++ /dev/null @@ -1,160 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2010-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -import os -import subprocess -from calculate.core.server.core_interfaces import MethodsInterface - -from calculate.lib.utils.files import pathJoin, readFile -from calculate.lib.utils.common import getPagesInterval -from calculate.lib.cl_lang import getLazyLocalTranslate, setLocalTranslate - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - -__ = getLazyLocalTranslate(_) - - -class Request(MethodsInterface): - """ - Объект работы с запросами на создание сертификатов - """ - - def show_request_meth(self, page_count, page_offset): - """ - Отобразить таблицу с текущими запросами - """ - dv = self.clVars - list_req_id = dv.Get('cl_list_req_id') - try: - list_req_id.sort(key=lambda x: int(x)) - except ValueError: - list_req_id.sort() - - if not list_req_id: - self.printSUCCESS(_('No requests')) - - head = ['Id', _('UserName'), 'IP', 'MAC', _('Date'), _('Location'), - _('Group')] - body = [] - fields = ['cl_req_id', '', '', '', '', '', ''] - - for req in list_req_id[page_offset:page_offset + page_count]: - dv.Set('cl_req_id', req) - mac = dv.Get('cl_req_mac') - ip = dv.Get('cl_req_ip') - date = dv.Get('cl_req_date') - username = dv.Get('cl_req_user_name') - location = dv.Get('cl_req_location') - group = dv.Get('cl_req_group') - if not group: - group = _('Not signed') - body.append([str(req), username, ip, mac, date, location, group]) - - if body: - self.printTable(_("List of requests"), head, body, - fields=fields, onClick='core_detail_request') - num_page, count_page = getPagesInterval( - page_count, page_offset, - len(list_req_id)) - self.printSUCCESS(_('page %d from ') % num_page + str(count_page)) - return True - - def confirm_request_meth(self, client_certs, cert_path, - cl_req_csr_path, - cl_req_crt_path, cl_req_group): - """ - Подтвердить запрос на сертификат и создать его - """ - server_cert = cert_path + '/root.crt' - server_key = cert_path + '/root.key' - - if not os.path.exists(cl_req_csr_path): - self.printERROR(_("Signature request %s not found") % - cl_req_csr_path) - return False - - if os.path.exists(cl_req_crt_path): - self.printWARNING(_("Certificate %s has been signed") % - cl_req_crt_path) - return False - - group = "group:%s" % cl_req_group - config = pathJoin(client_certs, 'ssl-client.cfg') - if os.path.exists(config): - os.unlink(config) - - cfg_text = ("[ ssl_client ]\n" - "basicConstraints = CA:FALSE\n" - "nsCertType = client\n" - "keyUsage = digitalSignature, keyEncipherment\n" - "extendedKeyUsage = clientAuth\n" - "nsComment = %s") % group - fc = open(config, 'w') - fc.write(cfg_text) - fc.close() - - cmd = ("openssl x509 -req -days 11000 -CA %s -CAkey %s " - "-CAcreateserial " - "-extfile %s -extensions ssl_client -in %s -out %s" - % (server_cert, server_key, config, - cl_req_csr_path, cl_req_crt_path)) - PIPE = subprocess.PIPE - p = subprocess.Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, - stderr=subprocess.STDOUT, close_fds=True) - p.wait() - return True - - def del_request_meth(self, certbase, request, cert, id_del_req): - - # chect exists request and certificate files - if not os.path.exists(request) and not os.path.exists(cert): - self.printERROR(_('Request or certificate with ID = %s not ' - 'found!') % id_del_req) - return False - - if not os.path.exists(request): - self.printERROR(_("Request %s not found!") % request) - - if os.path.exists(cert): - self.printWARNING(_("Request signed")) - - certbase_temp = certbase + '_temp' - # create temp file - ft = open(certbase_temp, 'w') - with open(certbase) as fd: - t = fd.read() - # See each line - for line in t.splitlines(): - # and each word in line - words = line.split() - # if in line present certificate id - if not words[0] == id_del_req: - ft.write(line + '\n') - ft.close() - fd.close() - - with open(certbase, 'w') as f: - f.write(readFile(certbase_temp)) - os.unlink(certbase + '_temp') - if os.path.exists(request): - os.unlink(request) - self.printSUCCESS(_("Signature request deleted")) - if os.path.exists(cert): - os.unlink(cert) - self.printSUCCESS(_("Certificate deleted")) - return True diff --git a/libs_crutch/core/server/send_cert.py b/libs_crutch/core/server/send_cert.py deleted file mode 100644 index 7e61762..0000000 --- a/libs_crutch/core/server/send_cert.py +++ /dev/null @@ -1,83 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re -import threading -import hashlib -import datetime -from calculate.lib.utils.files import readFile, readLinesFile - - -def add_cert(mac, client_type, client_counter, client_certname, - client_certbase): - cur_thread = threading.currentThread() - try: - ip = cur_thread.REMOTE_ADDR - except AttributeError: - ip = "localhost" - - cert = cur_thread.client_cert - - # получить идентификатор из файла-счетчика или посчитать количество строк - try: - i = int(readFile(client_counter)) - except ValueError: - i = len(list(readLinesFile(client_certbase))) + 1 - - with open(client_certname, 'w') as f: - f.write(cert) - - md5 = hashlib.md5() - md5.update(cert) - md5sum = md5.hexdigest() - date = datetime.datetime.now() - - with open(client_certbase, 'a') as f: - f.write("%s %s %s %s %s %s\n" % (i, md5sum, date, ip, mac, client_type)) - - with open(client_counter, 'w') as f: - f.write(str(i + 1)) - return str(i) - - -def get_ca(cert_path): - import OpenSSL - - server_cert = readFile(cert_path + '/server.crt') - ca_certs = readFile(cert_path + '/ca_root.crt') - - certobj = OpenSSL.crypto.load_certificate( - OpenSSL.SSL.FILETYPE_PEM, server_cert) - - Issuer = certobj.get_issuer().get_components() - issuer_CN = None - for item in Issuer: - if item[0] == 'CN': - issuer_CN = item[1] - - if issuer_CN is None: - return '1' - - p = re.compile('[-]+[\w ]+[-]+\n+[\w\n\+\\=/]+[-]+[\w ]+[-]+\n?') - ca_certs_list = p.findall(ca_certs) - for ca in ca_certs_list: - certobj = OpenSSL.crypto.load_certificate(OpenSSL.SSL.FILETYPE_PEM, ca) - - Subject = certobj.get_subject().get_components() - for subj in Subject: - if subj[0] == 'CN' and subj[1] == issuer_CN: - return ca - return '2' diff --git a/libs_crutch/core/server/server_class.py b/libs_crutch/core/server/server_class.py deleted file mode 100644 index cf14b8e..0000000 --- a/libs_crutch/core/server/server_class.py +++ /dev/null @@ -1,414 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -from __future__ import absolute_import -from spyne.server.wsgi import WsgiApplication - -import re -import logging -import os -#cStringIO was moved to io in python3 -import cStringIO as io - -logger = logging.getLogger(__name__) - -import datetime -import pickle -from .loaded_methods import LoadedMethods - -# for OpenSSLAdapter -import calculate.contrib -from cherrypy.wsgiserver.ssl_pyopenssl import pyOpenSSLAdapter - -HTTP_500 = '500 Internal server error' -HTTP_200 = '200 OK' -HTTP_405 = '405 Method Not Allowed' -HTTP_403 = '403 Forbidden' -not_log_list = ['post_server_request', 'post_client_request', 'del_sid', - 'get_server_cert', 'get_client_cert', 'get_entire_frame', - 'get_crl', 'get_server_host_name', 'get_ca', 'get_table', - 'post_cert', 'post_sid', 'active_client', 'list_pid', - 'get_methods', 'get_frame', 'get_progress', 'pid_info'] - - -class ClApplication(WsgiApplication): - def __init__(self, app, log=None): - super(ClApplication, self).__init__(app) - # add object logging - self.log = logger - - #verification of compliance certificate and session (sid) - def check_cert_sid(self, sid, server): - import threading - - curthread = threading.currentThread() - cert = curthread.client_cert - from .cert_cmd import find_cert_id - - cert_id = find_cert_id(cert, server.data_path, server.certbase) - cert_id = int(cert_id) - if cert_id == 0: - return 0 - - # session file - if not os.path.exists(server.sids): - os.system('mkdir %s' % server.sids) - - if not os.path.isfile(server.sids_file): - open(server.sids_file, 'w').close() - with open(server.sids_file, 'r') as fd: - while 1: - try: - # read all on one record - list_sid = pickle.load(fd) - except (IOError, EOFError, KeyError): - break - # find session id in sids file - if cert_id == int(list_sid[1]): - if int(sid) == int(list_sid[0]): - return 1 - return 0 - - # input parameters - certificate and name method - def check_rights(self, method_name, req_env, sid): - """ check right client certificate for the method """ - import OpenSSL - - # rmethod = re.compile('[{\w]+[}]') - # method_rep = rmethod.findall(method_name) - # method_name = method_name.replace(method_rep[0], '') - import threading - - curthread = threading.currentThread() - - cert = curthread.client_cert - server_cert = curthread.server.ssl_certificate - server_key = curthread.server.ssl_private_key - certbase = curthread.server.certbase - rights = curthread.server.rights - group_rights = curthread.server.group_rights - data_path = curthread.server.data_path - permitted_methods = ['post_server_request', 'post_client_request', - 'get_server_cert', 'get_client_cert', - 'get_crl', 'get_server_host_name', 'get_ca'] - - if method_name in permitted_methods: - return 1 - if cert is None: - if method_name not in permitted_methods: - return 0 - return 1 - - if (sid and - (method_name in LoadedMethods.rightsMethods or - method_name.endswith('_view') and - method_name[:-5] in LoadedMethods.rightsMethods)): - if not self.check_cert_sid(sid, curthread.server): - return 0 - - with open(server_cert, 'r') as f: - data_server_cert = f.read() - certobj = OpenSSL.crypto.load_certificate( - OpenSSL.SSL.FILETYPE_PEM, data_server_cert) - - with open(server_key, 'r') as f: - data_server_key = f.read() - Pkey = OpenSSL.crypto.load_privatekey(OpenSSL.SSL.FILETYPE_PEM, - data_server_key, 'qqqq') - signature = OpenSSL.crypto.sign(Pkey, cert, 'SHA1') - try: - OpenSSL.crypto.verify(certobj, signature, cert, 'SHA1') - except Exception as e: - print(e) - return 0 - if method_name == 'cert_add': - return 0 - certobj_cl = OpenSSL.crypto.load_certificate( - OpenSSL.SSL.FILETYPE_PEM, cert) - try: - com = certobj_cl.get_extension(certobj_cl.get_extension_count() - 1) - groups = com.get_data().split(':')[1] - except IndexError: - groups = "" - except Exception: - return 0 - groups_list = groups.split(',') - # open certificates database - if not os.path.exists(certbase): - open(certbase, "w").close() - from .cert_cmd import find_cert_id - - checked_id = find_cert_id(cert, data_path, certbase) - cert_id = int(checked_id) - count = 0 - find_flag = False - # if certificate found - if cert_id > 0: - if method_name not in LoadedMethods.rightsMethods: - return 1 - - # if group = all and not redefined group all - if 'all' in groups_list: - find_flag = False - with open(group_rights, 'r') as fd: - t = fd.read() - # find all in group_rights file - for line in t.splitlines(): - if not line: - continue - if line.split()[0] == 'all': - find_flag = True - break - # if not find_flag: - # return 1 - - for right_param in LoadedMethods.rightsMethods[method_name]: - flag = 0 - try: - # check rights - if not os.path.exists(rights): - open(rights, 'w').close() - with open(rights) as fr: - t = fr.read() - for line in t.splitlines(): - words = line.split() - # first word in line equal name input method - if words[0] == right_param: - for word in words: - try: - word = int(word) - except ValueError: - continue - # compare with certificat number - if cert_id == word: - # if has right - count += 1 - flag = 1 - break - if cert_id == -word: - return 0 - if flag: - break - - if flag: - break - # open file with groups rights - if not os.path.exists(group_rights): - open(group_rights, 'w').close() - with open(group_rights) as fd: - t = fd.read() - for line in t.splitlines(): - if not line: - continue - words = line.split(' ', 1) - # first word in line equal name input method - if words[0] in groups_list: - methods = words[1].split(',') - for word in methods: - # compare with certificat number - if right_param == word.strip(): - # if has right - count += 1 - flag = 1 - break - if flag: - break - except Exception: - return 0 - if count == len(LoadedMethods.rightsMethods[method_name]): - return 1 - if not find_flag and 'all' in groups_list: - return 1 - elif method_name in ['post_cert', 'init_session']: - return 1 - return 0 - - def create_path(self): - """ create paths for server files """ - import threading - - curthread = threading.currentThread() - data_path = curthread.server.data_path - sids = curthread.server.sids - pids = curthread.server.pids - cert_path = curthread.server.cert_path - if not os.path.exists(sids): - if not os.path.exists(data_path): - os.makedirs(data_path) - os.makedirs(sids) - if not os.path.exists(pids): - if not os.path.exists(data_path): - os.makedirs(data_path) - os.makedirs(pids) - if not os.path.exists(data_path + '/conf'): - if not os.path.exists(data_path): - os.makedirs(data_path) - os.makedirs(data_path + '/conf') - - if not os.path.exists(data_path + '/conf/right.conf'): - open(data_path + '/conf/right.conf', 'w').close() - - if not os.path.exists(data_path + '/conf/group_right.conf'): - open(data_path + '/conf/group_right.conf', 'w').close() - - if not os.path.exists(data_path + '/client_certs'): - os.makedirs(data_path + '/client_certs') - - if not os.path.exists(data_path + '/server_certs'): - os.makedirs(data_path + '/server_certs') - - if not os.path.exists(cert_path): - os.makedirs(cert_path) - - - def get_method_name_from_http(self, http_req_env): - retval = None - # check HTTP_SOAPACTION - retval = http_req_env.get("HTTP_SOAPACTION") - - if retval is not None: - if retval.startswith('"') and retval.endswith('"'): - retval = retval[1:-1] - - if retval.find('/') >0: - retvals = retval.split('/') - retval = '{%s}%s' % (retvals[0], retvals[1]) - - logger.debug("\033[92m" - "Method name from HTTP_SOAPACTION: %r" - "\033[0m" % retval) - if(not retval): - logger.critical("Couldn't get method name from HTTP_SOAPACTION") - return retval - - def get_sid_from_soap(self, http_req_env): - """ - rips sid param from soap request (if there is one) - """ - if (not "wsgi.input" in http_req_env): - return None - length = http_req_env.get("CONTENT_LENGTH") - input = http_req_env["wsgi.input"] - body = input.read(int(length)) - res = re.search("(.*?)<\/ns.:sid>", body) - #horrbile hack: - #cherrypy provides rfile in req_env which is consumed upon .read() without - # workarounds, and both we and spyne need the data on it - #so we pass a dummy with the data and read() method on to spyne - http_req_env["wsgi.input"] = io.StringIO(body) - - if(res): - return int(res.group(1)) - else: - return None - - - def handle_rpc(self, req_env, start_response): - """ - Overriding spyne.wsgiApplication method - """ - import OpenSSL - import threading - http_resp_headers = { - 'Content-Type': 'text/xml', - 'Content-Length': '0', - } - curthread = threading.currentThread() - curthread.REMOTE_ADDR = req_env.get('REMOTE_ADDR') - curthread.REMOTE_PORT = req_env.get('REMOTE_PORT') - ip = req_env.get('REMOTE_ADDR') - self.create_path() - sid = self.get_sid_from_soap(req_env) - method_name = self.get_method_name_from_http(req_env) - if method_name is None: - resp = "Could not extract method name from the request!" - http_resp_headers['Content-Length'] = str(len(resp)) - start_response(HTTP_500, http_resp_headers.items()) - return [resp] - - service = self.app.services[0] - import threading - - curthread = threading.currentThread() - # check if client certificate exists - if not hasattr(curthread, 'client_cert'): - curthread.client_cert = None - # check rights client certificate for the method - check = self.check_rights(method_name, req_env, sid) - if not check: - if curthread.client_cert: - certobj = OpenSSL.crypto.load_certificate( - OpenSSL.SSL.FILETYPE_PEM, curthread.client_cert) - finger = certobj.digest('SHA1') - if self.log: - self.log.debug('%s %s %s forbidden %s' - % (datetime.datetime.now().__str__(), - finger, ip, - method_name[5:])) - resp = "Permission denied: " + method_name - http_resp_headers['Content-Length'] = str(len(resp)) - start_response(HTTP_403, http_resp_headers.items()) - return [resp] - - - if sid: - curthread.lang = service.get_lang(service, sid, method_name) - if curthread.client_cert: - certobj = OpenSSL.crypto.load_certificate( - OpenSSL.SSL.FILETYPE_PEM, curthread.client_cert) - finger = certobj.digest('SHA1') - if (not method_name[5:] in not_log_list and - not method_name[5:].endswith('_view')): - if self.log: - self.log.debug('%s %s %s allowed %s' - % (datetime.datetime.now().__str__(), - finger, ip, - method_name[5:])) - - return super(ClApplication, self).handle_rpc(req_env, start_response) - -class OpenSSLAdapter(pyOpenSSLAdapter): - def verify_func(self, connection, x509, errnum, errdepth, ok): - # get client certificate - import OpenSSL - import threading - - curthread = threading.currentThread() - if errdepth == 0: - curthread.client_cert = OpenSSL.crypto.dump_certificate( - OpenSSL.crypto.FILETYPE_PEM, x509) - else: - curthread.client_cert = None - return ok - - def get_context(self): - """Return an SSL.Context from self attributes.""" - # See http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/442473 - import OpenSSL - - c = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_2_METHOD) - c.set_options(OpenSSL.SSL.OP_NO_SSLv2 | OpenSSL.SSL.OP_NO_SSLv3) - - # c.set_passwd_cb(lambda *unused: 'qqqq') - c.use_privatekey_file(self.private_key) - c.set_verify(OpenSSL.SSL.VERIFY_PEER, self.verify_func) - - if self.certificate_chain: - c.load_verify_locations(self.certificate_chain) - - c.use_certificate_file(self.certificate) - return c diff --git a/libs_crutch/core/server/sid_pid_file.py b/libs_crutch/core/server/sid_pid_file.py deleted file mode 100644 index a1e5064..0000000 --- a/libs_crutch/core/server/sid_pid_file.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import time -from os import path -import pickle -from threading import Lock -from calculate.core.server.core_interfaces import CoreServiceInterface -from calculate.lib.utils.tools import ignore - - -class CoreWsdl(CoreServiceInterface): - sid_pid_locker = Lock() - - @staticmethod - def del_sid_pid(cls, sid): - # delete conformity pid and sid of sid_pid file - if not os.path.exists(cls.sids_pids): - temp = open(cls.sids_pids, 'w') - temp.close() - sid_pid_t = cls.sids_pids + '_temp' - - with cls.sid_pid_locker: - fd = open(cls.sids_pids, 'r') - ft = open(sid_pid_t, 'w') - while 1: - try: - # read out on 1 record - list_sid = pickle.load(fd) - except (IOError, KeyError, EOFError): - break - if sid != list_sid[0]: - pickle.dump(list_sid, ft) - else: - # end process pid = list_sid[1] - cls.serv_pid_kill(list_sid[1], sid) - # delete this of process file - while cls.glob_process_dict[list_sid[1]]['status'] == 1: - time.sleep(0.1) - cls.del_pid(cls, list_sid[1]) - # delete process file - rm_fn = path.join(cls.pids, "%d.pid" % list_sid[1]) - if path.exists(rm_fn): - with ignore(OSError): - os.unlink(rm_fn) - fd.close() - ft.close() - - ft = open(sid_pid_t, 'rb') - fd = open(cls.sids_pids, 'wb') - ft.seek(0) - fd.write(ft.read()) - ft.close() - fd.close() - - # delete sid file - sid_file = os.path.join(cls.sids, "%d.sid" % sid) - if os.path.exists(sid_file): - os.unlink(sid_file) - os.unlink(sid_pid_t) - return 0 - - @staticmethod - def del_pid_from_sid_pid(cls, pid): - # delete conformity pid and sid of sid_pid file - if not os.path.exists(cls.sids_pids): - temp = open(cls.sids_pids, 'w') - temp.close() - sid_pid_t = cls.sids_pids + '_temp' - - with cls.sid_pid_locker: - fd = open(cls.sids_pids, 'r') - ft = open(sid_pid_t, 'w') - while 1: - try: - # read out on 1 record - list_sid = pickle.load(fd) - except (KeyError, EOFError, IOError): - break - if pid != list_sid[1]: - pickle.dump(list_sid, ft) - fd.close() - ft.close() - - ft = open(sid_pid_t, 'rb') - fd = open(cls.sids_pids, 'wb') - ft.seek(0) - fd.write(ft.read()) - ft.close() - fd.close() - - # delete temp file - os.unlink(sid_pid_t) - return 0 - - @staticmethod - def find_sid_pid_file(cls, sid): - results = [] - with cls.sid_pid_locker: - if not os.path.exists(cls.sids_pids): - temp = open(cls.sids_pids, 'w') - temp.close() - fd = open(cls.sids_pids, 'r') - while 1: - try: - # read out on 1 record - list_sid = pickle.load(fd) - except (IOError, KeyError, EOFError): - break - if sid == list_sid[0]: - results.append(list_sid[1]) - if not results: - results.append(0) - fd.close() - return results - - @staticmethod - def serv_pid_info(cls, sid, pid): - f = 0 - results = [] - # Check pid presence and conformity sid - - with cls.sid_pid_locker: - fd = open(cls.sids_pids, 'r') - while 1: - try: - # read out on 1 record - list_sid = pickle.load(fd) - except (IOError, KeyError, EOFError): - break - if sid == list_sid[0]: - if pid == list_sid[1]: - f = 1 - fd.close() - - # Get information about pid - if f == 1: - _t = cls.glob_process_dict[pid] - # process id - results.append(str(pid)) - # current state - results.append(str(cls.glob_process_dict[pid]['status'])) - # start time - results.append(str(cls.glob_process_dict[pid]['time'])) - # process (function) name - results.append(str(cls.glob_process_dict[pid]['name'])) - # process soap method name - results.append(str(cls.glob_process_dict[pid]['method_name'])) - return results - - @staticmethod - def add_sid_pid(cls, sid, pid): - """ - add conformity pid and sin in sid_pid file - """ - with cls.sid_pid_locker: - if not os.path.exists(cls.sids_pids): - with open(cls.sids_pids, 'w'): - pass - try: - with open(cls.sids_pids, 'a') as fd: - pickle.dump([sid, pid], fd) - return 0 - except (OSError, IOError, KeyError, EOFError): - return 1 diff --git a/libs_crutch/core/server/spyne_adapter.py b/libs_crutch/core/server/spyne_adapter.py deleted file mode 100644 index f21ca88..0000000 --- a/libs_crutch/core/server/spyne_adapter.py +++ /dev/null @@ -1,106 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2021 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import calculate.contrib -from spyne.service import Service, ServiceMeta -from spyne import String, Integer, Array, rpc -from .func import WsdlMeta -from .api_types import ReturnedMessage, ViewInfo, ViewParams -from spyne.protocol._outbase import OutProtocolBase -from .api_types import LazyString - -#monkey patch: -def to_unicode(self, cls, value, *args, **kwargs): - if value is None: - return None - - #### PATCH #################### - if isinstance(value, LazyString): - value = str(value) - ############################### - - handler = self._to_unicode_handlers[cls] - retval = handler(cls, value, *args, **kwargs) - - return retval - -OutProtocolBase.to_unicode = to_unicode - - -class ServiceMetaAdapter(ServiceMeta): - #ref to the created class - global_class_ref = None - -#this is used for a localCall -class CoreInnerWsdl(ServiceMeta, WsdlMeta): - pass - -# Nevermind, following is deprecated: - -#the idea is to decorate methods that need rpc with this -#and then find them in make_service and wrap them in funcs without self ref -# def rpc_a(): -# def func(f): -# f.wrap_rpc = True -# return f -# return func - -#wraps a func in rpc decorator -#this is needed because Spyne service does not allow self ref in function calls -def make_rpc_func_view(func_to_call): - def _function(ctx, sid, params): - return func_to_call(ServiceMetaAdapter.global_class_ref, sid, params) - _function.__name__ = func_to_call.__name__ - return rpc(Integer, ViewParams, _returns=ViewInfo)(_function) - -def make_rpc_func_vars(func_to_call): - def _function(ctx, dv=None, params=None): - return func_to_call(ServiceMetaAdapter.global_class_ref, dv, params) - _function.__name__ = func_to_call.__name__ - return staticmethod(_function) - -def make_rpc_func_caller(func_to_call): - def _function(ctx, sid, params): - return func_to_call(ServiceMetaAdapter.global_class_ref, sid, params) - _function.__name__ = func_to_call.__name__ - info_class = func_to_call.info_class - return rpc(Integer, info_class, _returns=Array(ReturnedMessage))(_function) - -def make_service(basic_class, wsdl_core_class_list, outer_wsdl_class_list, service_name): - #for Core, rpc methods only present in basic_class, we need others just to have a mono class - #for outer wsdl classes, we have to add stateless wrap methods on our own - - saved_pub_methods = basic_class.public_methods - - functions_to_add = {} - for klass in outer_wsdl_class_list: - for meth in klass.__dict__: - if not meth.startswith("__") and not meth == "methods": - if meth.endswith("_view"): - functions_to_add[meth] = make_rpc_func_view(getattr(klass, meth)) - elif meth.endswith("_vars"): - functions_to_add[meth] = make_rpc_func_vars(getattr(klass, meth)) - else: - functions_to_add[meth] = make_rpc_func_caller(getattr(klass, meth)) - - functions_to_add.update({"__metaclass__" : ServiceMetaAdapter}) - ClService = ServiceMetaAdapter(service_name, tuple([basic_class] + wsdl_core_class_list), functions_to_add) - ClService.public_methods.update(saved_pub_methods) - #TODO replace with a single ref - ServiceMetaAdapter.global_class_ref = ClService - basic_class.set_comb_class_ref(ClService) - return ClService diff --git a/libs_crutch/core/server/tasks.py b/libs_crutch/core/server/tasks.py deleted file mode 100644 index 0feb3ab..0000000 --- a/libs_crutch/core/server/tasks.py +++ /dev/null @@ -1,100 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2014-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import sys -from os import path -import time -import glob -import pickle -from calculate.core.datavars import DataVarsCore -from calculate.lib.cl_lang import setLocalTranslate - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -def restart(process_dict): - """ - Функция отслеживает необходимость перезапуска демона и - перезапускает init.d/calculate-core - """ - # cert_live = 10080 - # Get value of period and lifetime session from DataVars - ob = DataVarsCore() - ob.importCore() - - if not ob.flIniFile(): - return - - restart_file = ob.Get('cl_core_restart_path') - stop_file = ob.Get('cl_core_dbus_stop_path') - service_name = "calculate-core" - if path.exists(restart_file): - os.unlink(restart_file) - - while True: - if path.exists(restart_file) and not process_dict: - # Частота проверки - if path.exists(stop_file): - kill_server() - else: - os.system('/etc/init.d/%s restart &>/dev/null &' % service_name) - return - time.sleep(1) - -def kill_server(): - import os - os.kill(os.getpid(), 2) - - -def dbus_stop(process_dict, sids_dn, base_obj): - """ - Функция отслеживает необходимость остнова демона если - нет активной сессии и демон запускался через dbus - """ - # cert_live = 10080 - # Get value of period and lifetime session from DataVars - ob = DataVarsCore() - ob.importCore() - - if not ob.flIniFile(): - return - - stop_file = ob.Get('cl_core_dbus_stop_path') - stop_server = False - - while True: - if path.exists(stop_file) and not process_dict: - with base_obj.sid_locker: - for fn in glob.glob("%s/*.sid" % sids_dn): - if os.path.isfile(fn): - with open(fn) as fd: - sid_inf = pickle.load(fd) - # обнаружена рабочая сессия - if sid_inf[2] != 1: - stop_server = True - break - else: - # не выключаем сессию до того пока не пройдёт хотя бы одна сессия - if stop_server: - # остановить сервер - try: - os.unlink(stop_file) - except: - pass - kill_server() - time.sleep(5) diff --git a/libs_crutch/core/set_vars.py b/libs_crutch/core/set_vars.py deleted file mode 100644 index b2ccbe9..0000000 --- a/libs_crutch/core/set_vars.py +++ /dev/null @@ -1,113 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import sys -import re -from calculate.core.server.core_interfaces import MethodsInterface - -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - -VARIABLE, MODE, LOCATION, VALUE = 0, 1, 2, 3 - - -class Variables(MethodsInterface): - def writeVariables(self, vardata): - """ - Write variable to env files, or delete from env files - """ - dv = self.clVars - data = filter(lambda x: x[LOCATION] or - not x[LOCATION] and - dv.isFromIni(x[VARIABLE]), - vardata) - if data: - head = [_("Variable"), _("Mode"), _("Location"), _("Value")] - self.printTable(_("List of variables"), head, data) - for varname, mode, location, value in data: - if location: - value = dv.unserialize(dv.getInfo(varname).type, str(value)) - section, op, varname = varname.rpartition('.') - if not location: - for env_location in dv.Get('main.cl_env_location'): - if not dv.Delete(varname, env_location, header=section): - self.printWARNING( - _("Failed to delete variable {var} from " - "{location}").format( - var=varname, location=env_location)) - else: - if varname in dv.iniCache: - oldValue = dv.unserialize( - dv.getInfo(varname).type, - str(dv.iniCache[varname]['value'])) - else: - oldValue = None - if value != oldValue: - dv.Write(varname, value, location=location, - header=section) - else: - self.printSUCCESS("Nothing to set") - return True - - def showVariables(self, showVal, filterVal, vardata): - """ - Show variables by cl_variable_filter - """ - dv = self.clVars - removeQuotes = lambda x: x if x != "''" else "" - reIndex = re.compile("((?:\w+\.)?(\w+))(?:\[(\d+)\])") - if showVal: - index = reIndex.search(showVal) - if index: - varname = index.group(1) - index = int(index.group(3)) - else: - varname = showVal - prevVal = str(dv.Select('cl_variable_value', - where='cl_variable_fullname', - eq=varname, limit=1)) - if index is not None: - typeVar = dv.getInfo(varname).type - val = dv.unserialize(typeVar, prevVal) - if index < len(val): - self.printDefault(removeQuotes(val[index])) - else: - self.printDefault("") - else: - self.printDefault(removeQuotes(prevVal)) - return True - filter_names = {'all': None, - 'userset': lambda x: x[LOCATION], - 'writable': lambda x: x[MODE].startswith("w"), - 'system': lambda x: x[LOCATION] == "system", - 'local': lambda x: x[LOCATION] == "local", - 'remote': lambda x: x[LOCATION] == "remote"} - filterFunc = filter_names.get(filterVal, - lambda x: filterVal in x[VARIABLE]) - body = filter(filterFunc, vardata) - dv.close() - if body: - head = [_("Variable"), _("Mode"), - _("Location"), _("Value")] - self.printTable(_("List of variables"), head, body) - return True - else: - self.printWARNING(_("No such variables")) - return True diff --git a/libs_crutch/core/setup_cache.py b/libs_crutch/core/setup_cache.py deleted file mode 100644 index 6b95a64..0000000 --- a/libs_crutch/core/setup_cache.py +++ /dev/null @@ -1,118 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2015-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re -import os -from os import path -from itertools import chain, groupby -import time - -from calculate.lib.datavars import DataVars -from calculate.lib.cl_template import templateFunction -from calculate.lib.utils.files import readFile, writeFile - -get_pkgname_by_filename = templateFunction.get_pkgname_by_filename - - -class Cache(object): - reMerge = re.compile("(merge|mergepkg)\(([-\w/]*)(?:\[[^\]]\])?\)[-!=<>]") - rePatch = re.compile("^#\s*Calculate.*ac_install_patch==on") - - PATCH_TYPE = "patch" - MERGE_TYPE = "merge" - DIRECTORY_TEMPLATE = ".calculate_directory" - CLT_SUFFIX = ".clt" - - def __init__(self, dv=None): - if dv is None: - dv = DataVars() - dv.importData() - dv.flIniFile() - self.dv = dv - self.base_dn = "/var/lib/calculate/calculate-core/cache" - self.fn_mtime = path.join(self.base_dn, "merge.mtime") - self.fn_patch = path.join(self.base_dn, "merge-patch.list") - self.fn_setup = path.join(self.base_dn, "merge-setup.list") - - def search_merge(self, dn): - """ - Сканировать директорию с шаблонами - """ - patch_dirs = [] - for root, dirs, files in os.walk(dn): - for fn in (path.join(root, x) for x in files): - data = readFile(fn) - if self.rePatch.search(data): - if path.basename(fn) == self.DIRECTORY_TEMPLATE: - patch_dirs.append(path.dirname(fn)) - patch_template = True - else: - if any(fn.startswith(x) for x in patch_dirs): - patch_template = True - else: - patch_template = False - for fname, pkg in self.reMerge.findall(data): - pkg = pkg or get_pkgname_by_filename(fn) - yield (self.PATCH_TYPE if patch_template - else self.MERGE_TYPE, pkg) - - def search_merge_clt(self, dn): - """ - Сканировать clt шаблоны - """ - for root, dirs, files in os.walk(dn): - for fn in (path.join(root, x) for x in files - if x.endswith(self.CLT_SUFFIX)): - data = readFile(fn) - for fname, pkg in self.reMerge.findall(data): - pkg = pkg or get_pkgname_by_filename(fn) - yield (self.MERGE_TYPE, pkg) - - @staticmethod - def check_new_that(mtime_fn, dirs, fn_filter=None): - """ - Проверить появились ли новые файлы после последней проверки - """ - if not path.exists(mtime_fn): - return True - check_mtime = os.stat(mtime_fn).st_mtime - for dn in dirs: - for root, dirs, files in os.walk(dn): - for fn in (path.join(root, x) for x in files - if fn_filter is None or fn_filter(x)): - if os.stat(fn).st_mtime > check_mtime: - return True - return False - - def update(self, force=False): - template_path = self.dv.Get('main.cl_template_path') - if (force or - self.check_new_that( - self.fn_mtime, ['/etc'], - fn_filter=lambda x: x.endswith(self.CLT_SUFFIX)) or - self.check_new_that(self.fn_mtime, template_path)): - all_packages = chain(self.search_merge_clt('/etc'), - *[self.search_merge(x) for x in template_path]) - for _type, pkgs in groupby(sorted(all_packages), lambda x: x[0]): - list_packages = sorted(set(y for x, y in pkgs)) - if _type == self.MERGE_TYPE: - with writeFile(self.fn_setup) as f: - f.write("\n".join(list_packages)) - if _type == self.PATCH_TYPE: - with writeFile(self.fn_patch) as f: - f.write("\n".join(list_packages)) - with writeFile(self.fn_mtime) as f: - f.write(str(time.time())) diff --git a/libs_crutch/core/setup_package.py b/libs_crutch/core/setup_package.py deleted file mode 100644 index 5a28078..0000000 --- a/libs_crutch/core/setup_package.py +++ /dev/null @@ -1,414 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import sys -import os -from os import path - -from calculate.core.datavars import DataVars -from calculate.core.server.core_interfaces import MethodsInterface -from calculate.core.server.gen_pid import get_pid_info, ProcessMode -from calculate.lib.datavars import Variable -from calculate.lib.cl_log import log -from calculate.lib.utils.common import getPasswdUsers -from calculate.lib.utils.portage import isPkgInstalled, reVerSplitToPV -from calculate.lib.utils.colortext import get_color_print -import pwd -import glob -import calculate.lib.cl_template as cl_template - -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - -__ = getLazyLocalTranslate(_) - - -class SetupPackageError(Exception): - """ - Исключение вызванное во время настройки пакета - """ - - -class UpdateLogger(MethodsInterface): - """ - Логгер для обновления настроек системы - """ - - logger = log("apply-templates", - filename="/var/log/calculate/update_config.log", - formatter="%(asctime)s - %(levelname)s - %(message)s") - - def ERROR(self, *arg, **argv): - """ - Вывести ошибку в лог и на экран - """ - self.logger.error(arg[0]) - self.printERROR(*arg, **argv) - - def SUCCESS(self, *arg, **argv): - """ - Вывести сообщение в лог и на экран - """ - self.logger.info(arg[0]) - self.printSUCCESS(*arg, **argv) - - def WARNING(self, *arg, **argv): - """ - Вывести предупреждение в лог и на экран - """ - self.logger.warn(arg[0]) - self.printWARNING(*arg, **argv) - - -class ChainProgressTemplate(cl_template.ProgressTemplate): - """ - Наложение шаблонов с определением перенастройки зависимых пакетов - """ - - def __init__(self, startTask, endTask, *args, **kwargs): - self.startTask = startTask - self.endTask = endTask - cl_template.ProgressTemplate.__init__(self, *args, **kwargs) - - def changeMergePackage(self, packages): - """ - Изменился настраиваемый пакет (по зависимостям) - """ - self.endTask() - packages = filter(isPkgInstalled, - packages) - self.startTask(_("Configuring dependencies: %s") % - ",".join(packages)) - return True - - -class StubVariable(Variable): - """ - Переменная-заглушка используется при обновлении настроек пакета - в emerge. Если переменная не найдена, то будет возвращена пустая - строка. - """ - value = "" - - -class UpdateConfigs(UpdateLogger): - """ - Обновить настройки пакета в пользовательских профилях - """ - - def init(self): - self.color_print = get_color_print() - - def getXUsers(self): - """ - Получить пользователей в X сессии - """ - return list( - self.clVars.Get('desktop.cl_desktop_online_user')) + ["root"] - - def getConfiguredPasswdUsers(self): - """ - Получить пользоватлей, которые есть в /etc/passwd (UID>=1000) - и при этом у них есть настройка профиля (.calculate/ini.env) - """ - user, dn = 0, 1 - iniEnv = ".calculate/ini.env" - return map(lambda x: x[user], - filter(lambda x: path.exists(path.join(x[dn], iniEnv)), - map(lambda x: (x, pwd.getpwnam(x).pw_dir), - getPasswdUsers()))) - - def _setClMergePkg(self, clVars, category, nameProgram, slot=None): - """ - Установить переменную cl_merge_pkg в зависимости от category и - nameProgram - """ - # выбрана перенастройка всех пакетов, установленных в системе - if nameProgram == "all": - clVars.Set("cl_merge_pkg", - map(lambda x: "{CATEGORY}/{PN}".format(**x), - filter(None, - map(reVerSplitToPV, - glob.glob('/var/db/pkg/*/*')))), - True) - else: - if slot: - clVars.Set("cl_merge_pkg", ["%s/%s:%s" % (category, - nameProgram, slot)], True) - else: - clVars.Set("cl_merge_pkg", ["%s/%s" % (category, nameProgram)], - True) - clVars.Set("cl_merge_set", "on", True) - - def updateDesktopConfig(self, nameProgram, version, slot, category, - configPath, - rootSet, verbose, dispatchConf, templates_locate, - ebuildPhase, useClt, arch_machine): - """ - Настроить пакеты в профилях пользователей - """ - # настраиватся будут пользователи из активных X сессии - # и сконфигурированные - xUsers = filter(lambda x: not "(unknown)" in x, - self.getXUsers()) - if not xUsers: - self.logger.info(_("Package %s") % nameProgram) - self.logger.warn(_("X session users not found")) - return True - self.logger.info(_("Package %s") % nameProgram) - self.logger.info(_("Updating user configuration files")) - firstValue = True - - clVars = DataVars() - try: - clVars.importData() - clVars.flIniFile() - setupable_users = set(xUsers + self.getConfiguredPasswdUsers()) - for userName in list(setupable_users): - clVars.Set("cl_root_path", '/', True) - clVars.Set("ur_login", userName, True) - clVars.Set("cl_action", "desktop", True) - clVars.Set("cl_verbose_set", verbose, True) - clVars.Set("cl_protect_use_set", "off", True) - clVars.Set("cl_template_path_use", templates_locate, True) - clVars.Set("install.os_install_arch_machine", arch_machine, - True) - - self._setClMergePkg(clVars, category, nameProgram) - - clTempl = ChainProgressTemplate(self.startTask, - self.endTask, - self.setProgress, - clVars, cltObj=False, - printSUCCESS=self.printSUCCESS, - printERROR=self.printERROR, - askConfirm=self.askConfirm, - printWARNING=self.printWARNING, - printWarning=False) - clTempl.onFirstValue = lambda *args: \ - self.startTask( - _("User configuring the {nameProgram} package by " - "Calculate Utilities").format( - nameProgram=nameProgram)) - clTempl.firstValue = firstValue - clTempl.applyTemplates() - firstValue = clTempl.firstValue - nofastlogin_users = set(getPasswdUsers()) - setupable_users - fastlogin_path = self.clVars.Get( - 'desktop.cl_desktop_fastlogin_path') - for user in nofastlogin_users: - fastlogin_user = path.join(fastlogin_path, user) - if path.exists(fastlogin_user): - try: - os.unlink(fastlogin_user) - except OSError: - pass - finally: - clVars.close() - self.endTask() - return True - - def updateSystemConfig(self, nameProgram, version, slot, category, - configPath, - rootSet, verbose, dispatchConf, templates_locate, - ebuildPhase, useClt, arch_machine): - """ - Обновить конфигурационные файлы системы - """ - self.logger.info(_("Package %s") % nameProgram) - self.logger.info(_("Updating system cofiguration files")) - if not os.path.exists(configPath): - self.ERROR(_("Path '%s' does not exist") % configPath) - return False - - clVars = DataVars() - try: - clVars.importData() - clVars.flIniFile() - clVars.Set("cl_root_path", configPath, True) - - # если конфигурирование пакета происходит не в корне - if rootSet: - # остальные пакеты настраиваются в корень - clVars.Set("cl_root_path_next", '/', True) - - if self.clVars.Get('core.cl_core_pkg_slot_opt') != "all": - self._setClMergePkg(clVars, category, nameProgram, slot) - else: - self._setClMergePkg(clVars, category, nameProgram) - clVars.Set("cl_action", 'merge', True) - clVars.Set("cl_verbose_set", verbose, True) - clVars.Set("cl_dispatch_conf", dispatchConf, True) - clVars.Set("cl_template_path_use", templates_locate, True) - clVars.Set("core.cl_core_pkg_slot", slot, True) - clVars.Set("install.os_install_arch_machine", arch_machine, True) - useClt = useClt in (True, "on") - - dictVer = {slot: version} - cl_template.templateFunction.installProg.update( - {"%s/%s" % (category, nameProgram): dictVer, - "%s" % nameProgram: dictVer}) - - # используем объект шаблонов - # с clt шаблонами, clt фильтром, без использования postDispatchConf - clTempl = ChainProgressTemplate( - self.startTask, - self.endTask, - self.setProgress, - clVars, cltObj=useClt, - cltFilter=True, - printSUCCESS=self.printSUCCESS, - printERROR=self.printERROR, - printWARNING=self.printWARNING, - askConfirm=self.askConfirm, - dispatchConf=self.dispatchConf - if not ebuildPhase and self.isInteractive() else None, - printWarning=False) - # выводим сообщение о настройке пакета только если действительно - # менялись файлы - clTempl.onFirstValue = lambda *args: \ - self.startTask(_("System configuring for {nameProgram} " - "package by Calculate Utilities").format( - nameProgram=nameProgram)) - clTempl.applyTemplates() - finally: - clVars.close() - self.endTask() - return True - - def patchPackage(self, configPath, nameProgram, arch_machine): - """ - Наложить патчи на пакет - """ - self.clVars.Set("cl_root_path", configPath, True) - self.clVars.Set("install.os_install_arch_machine", arch_machine, True) - clTempl = ChainProgressTemplate(self.startTask, - self.endTask, - self.setProgress, - self.clVars, cltObj=False, - printSUCCESS=self.printSUCCESS, - printERROR=self.printERROR, - askConfirm=self.askConfirm, - printWARNING=self.printWARNING, - printWarning=False) - clTempl.onFirstValue = lambda *args: self.startTask( - _("Using patches for the {nameProgram} package by " - "Calculate Utilities").format( - nameProgram=nameProgram), - progress=True) - clTempl.applyTemplates() - return True - - def checkRunning(self): - """ - Проверить наличие запущенных процессов в cl-core - """ - from calculate.core.server.loaded_methods import LoadedMethods - - cur_pid = os.getpid() - pid_list = [pid for pid in get_pid_info(self.clVars) - if (pid.get("mode", '') == ProcessMode.CoreDaemon and - pid.get("os_pid", '') != cur_pid)] - if pid_list: - _print = self.color_print - method_names = {value[0]: value[2] for key, value in - LoadedMethods.conMethods.items()} - self.printSUCCESS( - _("Calculate core is executing the following tasks")) - mult = _print.bold("*") - for pid in pid_list: - name = pid['name'] - method_name = method_names.get(name, name) - self.printDefault( - " {mult} {title} ({name})".format(mult=mult, - title=method_name, - name=name)) - answer = self.askConfirm( - _("Would you like to terminate these tasks?"), "no") - if answer == "no": - raise KeyboardInterrupt - return True - - def restartService(self, service_name): - """ - Перезапустить указанный сервис - """ - import time - - time.sleep(1) - os.system('/etc/init.d/%s restart &>/dev/null &' % service_name) - return True - - def processConfig(self, nameProgram, version, slot, category, - verbose, dispatchConf, templates_locate, - ebuildPhase, useClt, arch_machine): - """ - Обновить конфигурационные файлы системы - """ - self.logger.info(_("Package %s") % nameProgram) - self.logger.info(_("Updating system cofiguration files")) - - clVars = DataVars() - try: - clVars.importData() - clVars.flIniFile() - clVars.Set("cl_root_path", "/", True) - - if self.clVars.Get('core.cl_core_pkg_slot_opt') != "all": - self._setClMergePkg(clVars, category, nameProgram, slot) - else: - self._setClMergePkg(clVars, category, nameProgram) - clVars.Set("cl_action", 'config', True) - clVars.Set("cl_verbose_set", verbose, True) - clVars.Set("cl_dispatch_conf", dispatchConf, True) - clVars.Set("cl_template_path_use", templates_locate, True) - clVars.Set("core.cl_core_pkg_slot", slot, True) - useClt = False - - #dictVer = {slot: version} - #cl_template.templateFunction.installProg.update( - # {"%s/%s" % (category, nameProgram): dictVer, - # "%s" % nameProgram: dictVer}) - - # используем объект шаблонов - # с clt шаблонами, clt фильтром, без использования postDispatchConf - clTempl = ChainProgressTemplate( - self.startTask, - self.endTask, - self.setProgress, - clVars, cltObj=useClt, - cltFilter=True, - printSUCCESS=self.printSUCCESS, - printERROR=self.printERROR, - printWARNING=self.printWARNING, - askConfirm=self.askConfirm, - dispatchConf=self.dispatchConf - if not ebuildPhase and self.isInteractive() else None, - printWarning=False) - # выводим сообщение о настройке пакета только если действительно - # менялись файлы - clTempl.onFirstValue = lambda *args: \ - self.startTask(_("System configuring for {nameProgram} " - "package by Calculate Utilities").format( - nameProgram=nameProgram)) - clTempl.applyTemplates() - finally: - clVars.close() - self.endTask() - return True diff --git a/libs_crutch/core/utils/__init__.py b/libs_crutch/core/utils/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/core/utils/cl_backup.py b/libs_crutch/core/utils/cl_backup.py deleted file mode 100644 index e795fb6..0000000 --- a/libs_crutch/core/utils/cl_backup.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import Action, Tasks -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate -from calculate.lib.utils.files import FilesError -from calculate.lib.datavars import VariableError, DataVarsError -from calculate.core.backup import BackupError -from calculate.lib.cl_template import TemplatesError - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class ClBackupAction(Action): - """ - Создание резервной копии настроек - """ - # ошибки, которые отображаются без подробностей - native_error = (BackupError, FilesError, DataVarsError, VariableError, - TemplatesError) - successMessage = __("Backup successfully completed!") - failedMessage = __("Failed to perform backup!") - interruptMessage = __("Backup manually interrupted") - - tasks = [ - {'name': 'prepare_dir', - 'method': 'Backup.prepare_backup(core.cl_backup_path,' - 'core.cl_backup_root_name)' - }, - {'name': 'backup_marked', - 'message': __("Backing up files configured by templates"), - 'method': 'Backup.backup_marked("/",core.cl_backup_path,' - '"etc",core.cl_backup_root_name)' - }, - {'name': 'templates', - 'message': __("Templates preparing for backup"), - 'method': 'Backup.applyTemplates(install.cl_source,' - 'False,True,None,True,True)', - }, - {'name': 'special_backup', - 'method': 'Backup.special_backup(core.cl_backup_path)' - }, - {'name': 'accounts_backup', - 'message': __("Backing up accounts info"), - 'method': 'Backup.save_accounts(core.cl_backup_path)' - }, - {'name': 'prepare_content', - 'message': __("Calculating checksums"), - 'method': 'Backup.prepare_contents(core.cl_backup_path,' - 'core.cl_backup_file_contents,core.cl_backup_root_name)', - }, - {'name': 'save_initd', - 'method': 'Backup.save_initd(core.cl_backup_path,' - 'core.cl_backup_root_name)', - }, - {'name': 'pack_backup', - 'message': __("Packing backup"), - 'method': 'Backup.create_archive(core.cl_backup_path,' - 'core.cl_backup_file)' - }, - {'name': 'remove_dir', - 'message': __("Clearing temporary files"), - 'method': 'Backup.remove_directory(core.cl_backup_path)', - 'depend': Tasks.success_one_of("prepare_dir") - }, - {'name': 'display_verbose', - 'method': 'Backup.display_backup_configs(core.cl_backup_file)', - 'condition': lambda Get: Get('core.cl_backup_verbose_set') == 'on' - }, - {'name': 'display_arch', - 'message': __("Archive created: {core.cl_backup_file}") - } - ] - diff --git a/libs_crutch/core/utils/cl_backup_restore.py b/libs_crutch/core/utils/cl_backup_restore.py deleted file mode 100644 index 16e62b0..0000000 --- a/libs_crutch/core/utils/cl_backup_restore.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import Action, Tasks -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate -from calculate.lib.utils.files import FilesError -from calculate.lib.datavars import VariableError, DataVarsError -from calculate.core.backup import BackupError -from calculate.lib.cl_template import TemplatesError -from tarfile import ReadError - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class ClBackupRestoreAction(Action): - """ - Восстановление настроек из резервной копии - """ - # ошибки, которые отображаются без подробностей - native_error = (BackupError, FilesError, DataVarsError, VariableError, - TemplatesError, ReadError) - successMessage = __("Files successfully restored from backup!") - failedMessage = __("Failed to restore from backup!") - interruptMessage = __("Restoration from backup manually interrupted") - - restore_tasks = [] - - tasks = [ - {'name': 'unpack_backup', - 'message': __("Unpacking backup"), - 'method': 'Backup.open_archive(core.cl_backup_path,' - 'core.cl_backup_file)' - }, - {'name': 'restore_accounts', - 'message': __("Restoring user accounts"), - 'method': 'Backup.restore_accounts(core.cl_backup_path)' - }, - {'name': 'restore_network', - 'message': __("Restoring network"), - 'method': 'Backup.restore_network(core.cl_backup_path)' - }, - {'name': 'special_restore', - 'method': 'Backup.special_restore(core.cl_backup_path)' - }, - {'name': 'templates', - 'message': __("Restoring services"), - 'method': 'Backup.applyTemplates(install.cl_source,' - 'False,True,None,True,True)', - }, - {'name': 'clear_autorun', - 'method': 'Backup.clear_autorun()', - }, - {'name': 'restore_configs', - 'message': __("Unpacking configuration files"), - 'method': 'Backup.restore_configs(core.cl_backup_file,"/",' - 'core.cl_backup_contents_name,core.cl_backup_root_name)' - }, - {'name': 'restore_content', - 'message': __("Restoring file owners"), - 'method': 'Backup.restore_contents(core.cl_backup_file_contents,' - '"/")', - }, - {'name': 'display_verbose', - 'method': 'Backup.display_changed_configs()', - 'condition': lambda Get: Get('core.cl_backup_verbose_set') == 'on' - }, - {'name': 'set_service_mode', - 'method': 'Backup.set_service_action()', - }, - {'name': 'templates_service', - 'message': __("Configuration after restoring from backup"), - 'method': 'Backup.applyTemplates(install.cl_source,' - 'False,True,None,True,True)', - }, - {'name': 'remove_dir', - 'method': 'Backup.remove_directory(core.cl_backup_path)', - 'depend': Tasks.success_one_of("unpack_backup") - }, - {'name': 'dispatch_conf', - 'message': __("Updating configuration files"), - 'method': 'Backup.dispatchConf()', - 'condition': lambda Get: Get('cl_dispatch_conf') != 'skip' - }, - {'name': 'openrc_default', - 'message': __("Running stopped services"), - 'method': 'Backup.run_openrc("default")', - } - ] diff --git a/libs_crutch/core/utils/cl_config.py b/libs_crutch/core/utils/cl_config.py deleted file mode 100644 index 5150bcb..0000000 --- a/libs_crutch/core/utils/cl_config.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import Action -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate -from calculate.lib.utils.files import FilesError -from calculate.lib.datavars import VariableError, DataVarsError -from calculate.lib.cl_template import TemplatesError - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class ClConfigAction(Action): - """ - Действие настройка пакета для пользователя и системы - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError, TemplatesError, VariableError, DataVarsError) - templateTaskMessage = __("The system is being configured") - successMessage = None - failedMessage = __("Failed to configure the system!") - interruptMessage = __("Configuration manually interrupted") - - tasks = [ - {'name': 'process_config', - # наложить шаблоны настройки пакета - 'method': 'UpdateConfigs.processConfig(cl_core_pkg_name,' - 'cl_core_pkg_version,cl_core_pkg_slot,' - 'cl_core_pkg_category,cl_verbose_set,cl_dispatch_conf,' - 'cl_template_path_use,cl_ebuild_phase,' - 'cl_template_clt_set,cl_core_arch_machine)', - }, - ] diff --git a/libs_crutch/core/utils/cl_core_custom.py b/libs_crutch/core/utils/cl_core_custom.py deleted file mode 100644 index 6a63c50..0000000 --- a/libs_crutch/core/utils/cl_core_custom.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2013-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import Action -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate -from calculate.lib.datavars import VariableError -from calculate.lib.cl_template import TemplatesError -from calculate.lib.utils.files import FilesError - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class ClCoreCustomAction(Action): - """ - Действие для настройки параметров видео - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError, TemplatesError, VariableError) - - successMessage = __("Action successfully completed!") - failedMessage = __("Failed to perform action!") - interruptMessage = __("Action manually interrupted") - - def __init__(self): - # список задач для действия - self.tasks = [ - {'name': 'set_vars', - 'method': ( - 'UpdateConfigs.setVariable("install.os_install_arch_machine",' - 'cl_core_arch_machine, True)') - }, - {'name': 'apply_templates', - # наложить шаблоны на текущий дистрибутив, включая clt шаблоны - # без использования фильтров по clt шаблонам - 'method': 'UpdateConfigs.applyTemplates(None,False,' - 'None,None)', - }, - {'name': 'failed_action', - 'error': __("Action {ac_custom_name} not found"), - 'condition': lambda Get: not filter( - lambda x: (x and x[0] == 'ac_custom_name' and - x[1] == Get('ac_custom_name')), - Get('cl_used_action')) - } - ] - - Action.__init__(self) diff --git a/libs_crutch/core/utils/cl_core_dispatch.py b/libs_crutch/core/utils/cl_core_dispatch.py deleted file mode 100644 index 60e9e36..0000000 --- a/libs_crutch/core/utils/cl_core_dispatch.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import Action -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate -from calculate.lib.utils.files import FilesError - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class ClCoreDispatchAction(Action): - """ - Действие обновление конфигурационных файлов - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError,) - - successMessage = __("Dispatch complete!") - failedMessage = __("Failed to dispatch configuration files!") - interruptMessage = __("Dispatching manually interrupted") - - # список задач для действия - tasks = [ - {'name': 'dispatch', - 'method': 'UpdateConfigs.dispatchConf()', - }] diff --git a/libs_crutch/core/utils/cl_core_group.py b/libs_crutch/core/utils/cl_core_group.py deleted file mode 100644 index c008191..0000000 --- a/libs_crutch/core/utils/cl_core_group.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import Action -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate -from calculate.lib.utils.files import FilesError - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class ClCoreGroupShow(Action): - """ - Отображение групп - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError,) - successMessage = None - failedMessage = None - interruptMessage = __("Viewing manually interrupted") - - tasks = [ - {'name': 'view_group', - 'method': 'Groups.show_groups_meth(cl_page_count,cl_page_offset)' - }] - - -class ClCoreGroupMod(Action): - """ - Изменение группы - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError,) - successMessage = __("Group {cl_core_group} changed") - failedMessage = __("Failed to change {cl_core_group} group") - interruptMessage = __("Modifying manually interrupted") - - tasks = [ - {'name': 'mod_group', - 'method': ( - 'Groups.change_group_meth(cl_core_group,cl_core_group_rights,' - 'cl_core_group_rights_path)') - }] - - -class ClCoreGroupAdd(Action): - """ - Добавление группы - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError,) - successMessage = __("Group {cl_core_group} added") - failedMessage = __("Failed to add {cl_core_group} group") - interruptMessage = __("Adding manually interrupted") - - tasks = [ - {'name': 'add_group', - 'method': 'Groups.add_group_meth(cl_core_group,cl_core_group_rights,' - 'cl_core_group_rights_path)' - }] - - -class ClCoreGroupDel(Action): - """ - Удаление группы - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError,) - successMessage = __("Group {cl_core_group} deleted") - failedMessage = __("Failed to delete {cl_core_group} group") - interruptMessage = __("Deleting manually interrupted") - - tasks = [ - {'name': 'del_group', - 'method': 'Groups.del_group_meth(cl_core_group,' - 'cl_core_group_rights_path)' - }] diff --git a/libs_crutch/core/utils/cl_core_patch.py b/libs_crutch/core/utils/cl_core_patch.py deleted file mode 100644 index 2dbe807..0000000 --- a/libs_crutch/core/utils/cl_core_patch.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import Action -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate -from calculate.lib.utils.files import FilesError -from calculate.lib.datavars import VariableError, DataVarsError -from calculate.lib.cl_template import TemplatesError - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class ClCorePatchAction(Action): - """ - Действие наложния патчей - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError, TemplatesError, VariableError, DataVarsError) - templateTaskMessage = __("Appling patches to package sources") - successMessage = None - failedMessage = __("Failed to patch package sources!") - interruptMessage = __("Patching manually interrupted") - - tasks = [ - {'name': 'patch_package', - # наложить патчи для пакета - 'method': 'UpdateConfigs.patchPackage(cl_core_pkg_path,' - 'cl_core_pkg_name,cl_core_arch_machine)' - } - ] diff --git a/libs_crutch/core/utils/cl_core_request.py b/libs_crutch/core/utils/cl_core_request.py deleted file mode 100644 index a7f7487..0000000 --- a/libs_crutch/core/utils/cl_core_request.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import Action -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate -from calculate.lib.utils.files import FilesError - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class ClCoreRequestShow(Action): - """ - Отображение запросов - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError,) - successMessage = None - failedMessage = None - interruptMessage = __("Viewing manually interrupted") - - tasks = [ - {'name': 'view_req', - 'method': 'Request.show_request_meth(cl_page_count,cl_page_offset)' - }] - - -class ClCoreRequestConfirm(Action): - """ - Подтверждение запроса - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError,) - successMessage = __("Certificate {cl_req_crt_path} is signed") - failedMessage = __("Failed to sign {cl_req_csr_path} request") - interruptMessage = __("Signing manually interrupted") - - tasks = [ - {'name': 'mod_req', - 'method': 'Request.confirm_request_meth(cl_core_client_certs_path,' - 'cl_core_cert_path,cl_req_csr_path,cl_req_crt_path,' - 'cl_req_group)' - }] - - -class ClCoreRequestDel(Action): - """ - Удаление запроса - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError,) - successMessage = None - failedMessage = __("Failed to delete the request with ID={cl_req_id}") - interruptMessage = __("Deleting manually interrupted") - - tasks = [ - {'name': 'del_req', - 'method': 'Request.del_request_meth(cl_core_database,cl_req_csr_path,' - 'cl_req_crt_path,cl_req_id)' - }] diff --git a/libs_crutch/core/utils/cl_core_restart.py b/libs_crutch/core/utils/cl_core_restart.py deleted file mode 100644 index e620312..0000000 --- a/libs_crutch/core/utils/cl_core_restart.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2013-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import Action -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate -from calculate.lib.datavars import VariableError -from calculate.lib.cl_template import TemplatesError -from calculate.lib.utils.files import FilesError - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class ClCoreRestartAction(Action): - """ - Действие для настройки параметров видео - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError, TemplatesError, VariableError) - - successMessage = __("Action successfully completed!") - failedMessage = __("Failed to perform action!") - interruptMessage = __("Action manually interrupted") - - def __init__(self): - # список задач для действия - self.tasks = [ - {'name': 'check_running', - # проверить запущенные процессы - 'method': 'UpdateConfigs.checkRunning()', - }, - {'name': 'restart', - # перезапустить calculate-core - 'message': _("Restarting calculate-core"), - 'method': 'UpdateConfigs.restartService("calculate-core")', - }] - - Action.__init__(self) diff --git a/libs_crutch/core/utils/cl_core_setup.py b/libs_crutch/core/utils/cl_core_setup.py deleted file mode 100644 index 119b022..0000000 --- a/libs_crutch/core/utils/cl_core_setup.py +++ /dev/null @@ -1,77 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import Action -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate -from calculate.lib.utils.files import FilesError -from calculate.lib.datavars import VariableError, DataVarsError -from calculate.lib.cl_template import TemplatesError - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class ClCoreSetupAction(Action): - """ - Действие настройка пакета для пользователя и системы - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError, TemplatesError, VariableError, DataVarsError) - templateTaskMessage = __("The system is being configured") - successMessage = None - failedMessage = __("Failed to configure the system!") - interruptMessage = __("Configuration manually interrupted") - - tasks = [ - {'name': 'system_rm_package', - # наложить шаблоны удаления пакета - 'method': 'UpdateConfigs.updateSystemConfig(cl_core_pkg_name,' - '"",cl_core_pkg_slot,cl_core_pkg_category,cl_core_pkg_path,' - 'cl_core_pkg_root_set,cl_verbose_set,cl_dispatch_conf,' - 'cl_template_path_use,cl_ebuild_phase,' - 'cl_template_clt_set,cl_core_arch_machine)', - 'condition': lambda dv: ( - dv.Get('cl_core_pkg_system_set') == 'on' and - dv.Get('cl_ebuild_phase') in ('prerm', 'postrm')) - }, - {'name': 'system_setup_package', - # наложить шаблоны настройки пакета - 'method': 'UpdateConfigs.updateSystemConfig(cl_core_pkg_name,' - 'cl_core_pkg_version,cl_core_pkg_slot,' - 'cl_core_pkg_category,cl_core_pkg_path,' - 'cl_core_pkg_root_set,cl_verbose_set,cl_dispatch_conf,' - 'cl_template_path_use,cl_ebuild_phase,' - 'cl_template_clt_set,cl_core_arch_machine)', - 'condition': lambda dv: ( - dv.Get('cl_core_pkg_system_set') == 'on' and - dv.Get('cl_ebuild_phase') not in ('prerm', 'postrm')) - }, - {'name': 'user_setup_package', - # наложить шаблоны настройки пакета - 'method': 'UpdateConfigs.updateDesktopConfig(cl_core_pkg_name,' - 'cl_core_pkg_version,cl_core_pkg_slot,' - 'cl_core_pkg_category,cl_core_pkg_path,' - 'cl_core_pkg_root_set,cl_verbose_set,cl_dispatch_conf,' - 'cl_template_path_use,cl_ebuild_phase,' - 'cl_template_clt_set,cl_core_arch_machine)', - 'condition': lambda dv: ( - dv.Get('cl_core_pkg_desktop_set') == 'on' and - dv.isModuleInstalled('desktop') and - dv.Get('cl_ebuild_phase') not in ("preinst", "prerm")) - } - ] diff --git a/libs_crutch/core/utils/cl_core_variables.py b/libs_crutch/core/utils/cl_core_variables.py deleted file mode 100644 index 019fc82..0000000 --- a/libs_crutch/core/utils/cl_core_variables.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import Action -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate -from calculate.lib.utils.files import FilesError -from calculate.lib.datavars import VariableError, DataVarsError - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class ClCoreVariables(Action): - """ - Отображение сертификатов - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError, DataVarsError, VariableError) - successMessage = None - failedMessage = None - interruptMessage = __("Modification of variables manually interrupted") - - tasks = [ - {'name': 'write_vars', - # записать переменные - 'method': 'Variables.writeVariables(cl_variable_data)' - } - ] - - -class ClCoreVariablesShow(Action): - """ - Отображение сертификатов - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError, DataVarsError, VariableError) - successMessage = None - failedMessage = None - interruptMessage = __("Viewing manually interrupted") - - tasks = [ - {'name': 'view_vars', - # отобразить переменные - 'method': 'Variables.showVariables(cl_variable_show,' - 'cl_variable_filter,cl_variable_data)' - } - ] diff --git a/libs_crutch/core/utils/cl_core_view_cert.py b/libs_crutch/core/utils/cl_core_view_cert.py deleted file mode 100644 index 6237aea..0000000 --- a/libs_crutch/core/utils/cl_core_view_cert.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import Action -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate -from calculate.lib.utils.files import FilesError - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class ClCoreViewCert(Action): - """ - Отображение сертификатов - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError,) - successMessage = None - failedMessage = None - interruptMessage = __("Viewing manually interrupted") - - tasks = [ - {'name': 'view_cert', - # наложить патчи для пакета - 'method': 'Certificate.show_certs_meth(cl_page_count,cl_page_offset)' - } - ] diff --git a/libs_crutch/core/variables/__init__.py b/libs_crutch/core/variables/__init__.py deleted file mode 100644 index bbb1a1e..0000000 --- a/libs_crutch/core/variables/__init__.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from . import action -from . import core -from . import request -from . import certificate -from . import groups -from . import setup_package -from . import variable -from . import backup - -section = "core" diff --git a/libs_crutch/core/variables/action.py b/libs_crutch/core/variables/action.py deleted file mode 100644 index 6188b44..0000000 --- a/libs_crutch/core/variables/action.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.lib.datavars import ActionVariable, Variable, VariableError - -from calculate.lib.cl_lang import setLocalTranslate - -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -class Actions(object): - Restore = "restore" - Service = "service" - - Backup = "backup" - BackupRestore = "restore" - - -class VariableAcBackupCreate(ActionVariable): - """ - Создание архива резвервной копии - """ - - def action(self, cl_action): - if cl_action == Actions.Backup: - return "on" - return "off" - - -class VariableAcBackupRestore(ActionVariable): - """ - Пошаговое восстановление сервисов и настроек из архива резервной копии - """ - - def action(self, cl_action): - if (cl_action == Actions.BackupRestore and self.Get( - 'cl_backup_action') == Actions.Restore): - return "on" - return "off" - -class VariableAcBackupService(ActionVariable): - """ - Шаблоны выполняемые после восстановленных конфигурационных файлов - """ - - def action(self, cl_action): - if (cl_action == Actions.BackupRestore and self.Get( - 'cl_backup_action') == Actions.Service): - return "on" - return "off" - - -class VariableClBackupAction(Variable): - """ - Подтип действия связанного с резервной копией настроек - """ - value = "" - - def check(self, value): - """ - Проверка наличия архива резервных настроек для восстановления - :param value: - :return: - """ - if value in (Actions.Restore, Actions.Service): - if not self.Get('cl_backup_file'): - raise VariableError(_("Failed to find backup")) diff --git a/libs_crutch/core/variables/backup.py b/libs_crutch/core/variables/backup.py deleted file mode 100644 index e1dbdfd..0000000 --- a/libs_crutch/core/variables/backup.py +++ /dev/null @@ -1,168 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import sys -from os import path -from calculate.lib.datavars import (Variable, ReadonlyVariable) -from calculate.lib.utils.files import get_free_dirname, listDirectory - -from calculate.lib.cl_lang import setLocalTranslate -import datetime -from .action import Actions - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -class VariableClBackupPath(Variable): - """ - Путь до каталога, где будет подготавливаться backup - """ - preferred_dn = '/var/calculate/tmp/backup_prepare' - - def get(self): - return get_free_dirname(self.preferred_dn) - - -class VariableClBackupContentsName(Variable): - """ - Названия файла CONTENTS в архиве - """ - value_format = "CONTENTS" - - -class VariableClBackupRootName(Variable): - """ - Названия каталога, куда сохраняются конфигурационные файлы - """ - value_format = "root" - - -class VariableClBackupFileContents(ReadonlyVariable): - """ - CONTENTS в архиве настроек - """ - value_format = "{cl_backup_path}/{cl_backup_contents_name}" - - -class VariableClBackupIniEnv(Variable): - """ - Названия файла CONTENTS в архиве [install] init = - """ - value_format = "{cl_backup_path}/ini.env" - - -class VariableClBackupBasePath(Variable): - """ - Директория в которой будет создан архив - """ - value = "/var/calculate/backup" - - -class VariableClBackupTime(ReadonlyVariable): - """ - Переменная содержащая информацию: время создания backup - """ - - def init(self): - self.label = _("Backup created") - - def get(self): - backup_fn = self.Get('cl_backup_file') - if backup_fn and path.exists(backup_fn): - try: - dt = datetime.datetime.strptime( - path.basename(backup_fn).rpartition("-")[2], - "%Y%m%d%H%M%S.tar.bz2") - return dt.strftime("%s") - except ValueError: - pass - return "" - - def humanReadable(self): - ts = self.Get() - if ts: - ret = [] - backup_tm = datetime.datetime.fromtimestamp(int(ts)) - now_tm = datetime.datetime.now() - backup_diff = now_tm - backup_tm - if backup_diff.days: - ret.append(_("%d days") % backup_diff.days) - minute = 60 - hour = 60 * minute - times = backup_diff.seconds - if times >= hour: - ret.append(_("%d hours") % (times / hour)) - times %= hour - if times >= minute: - ret.append(_("%d minutes") % (times / minute)) - times %= minute - if times: - ret.append(_("%d seconds") % times) - return _("%s ago") % (" ".join(ret)) - - return _("Unknown") - - -class VariableClBackupFile(Variable): - """ - Путь до создаваемое архива резервной копии - """ - - def init(self): - self.label = _("Backup file") - - def get_backup(self): - """ - Получить путь для подготовки архива настроек - :return: - """ - dn = self.Get('cl_backup_base_path') - dt = datetime.datetime.now().strftime( - "calculate-backup-%Y%m%d%H%M%S.tar.bz2") - return path.join(dn, dt) - - def get_restore(self): - """ - Получить путь для распаковки архива настроек - :return: - """ - dn = self.Get('cl_backup_base_path') - for fn in sorted((x for x in listDirectory(dn, fullPath=True) - if x.endswith(".tar.bz2")), - reverse=True): - # получить самый свежий файл - return fn - return "" - - def get(self): - action = self.Get('cl_action') - if action == Actions.Backup: - return self.get_backup() - elif action == Actions.BackupRestore: - return self.get_restore() - -class VariableClBackupVerboseSet(Variable): - """ - Verbose output for backup - """ - type = "bool" - opt = ["-v", "--verbose"] - value = "off" - - def init(self): - self.help = _("verbose output") - self.label = _("Verbose output") diff --git a/libs_crutch/core/variables/certificate.py b/libs_crutch/core/variables/certificate.py deleted file mode 100644 index 96a3004..0000000 --- a/libs_crutch/core/variables/certificate.py +++ /dev/null @@ -1,225 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from calculate.lib.datavars import Variable, ReadonlyVariable, VariableError -import os -import sys - -from calculate.lib.cl_lang import setLocalTranslate -from calculate.lib.utils.files import pathJoin, listDirectory, readFile -from calculate.core.server.loaded_methods import LoadedMethods -from calculate.core.server.func import uniq - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -class VariableClListCertId(ReadonlyVariable): - def get(self): - cert_dir = self.Get('cl_core_client_certs_path') - return map(lambda x: x[:-4], - filter(lambda x: x.endswith('.crt'), - listDirectory(cert_dir))) - - -class VariableClCertId(Variable): - """ - Certificate Identification - """ - type = "choice" - opt = ["-c"] - metavalue = "CERT_ID" - - def init(self): - self.help = _("Certificate identifier") - self.label = _("Certificate identifier") - - def choice(self): - return self.Get('cl_list_cert_id') - - def check(self, cert_id): - # if cert_id != 'all': - try: - int(cert_id) - except ValueError: - raise VariableError(_("The certificate ID must be int")) - - list_certs_id = self.Get('cl_list_cert_id') - if cert_id not in list_certs_id: - raise VariableError(_("The certificate with ID %s not exists") - % cert_id) - - -class VariableClCertPerms(Variable): - """ - Certificate Permissions - """ - type = "choice-list" - opt = ["--cert-perm"] - metavalue = "perm[,perm2[..]]" - - def init(self): - self.help = _("Certificate permissions") - self.label = _("Certificate permissions") - - def choice(self): - right_list = [] - for key in LoadedMethods.rightsMethods.keys(): - right_list += LoadedMethods.rightsMethods[key] - - uniq_right_list = uniq(right_list) - uniq_right_list.sort() - return uniq_right_list - - def get(self): - cert_id = self.Get('cl_cert_id') - groups_list = self.Get('cl_cert_groups') - - group_rights = self.Get('cl_core_group_rights_path') - rights = self.Get('cl_core_rights') - # if group = all and not redefined group all - results = [] - if 'all' in groups_list: - find_flag = False - fd = open(group_rights, 'r') - t = fd.read() - # find all in group_rights file - for line in t.splitlines(): - if not line: - continue - if line.split()[0] == 'all': - find_flag = True - break - if not find_flag: - right_list = [] - for key in LoadedMethods.rightsMethods.keys(): - right_list += LoadedMethods.rightsMethods[key] - - uniq_right_list = uniq(right_list) - uniq_right_list.sort() - return uniq_right_list - - else: - if not os.path.exists(group_rights): - return ["No Methods"] - with open(group_rights) as fd: - t = fd.read() - for line in t.splitlines(): - if not line: - continue - words = line.split(' ', 1) - # first word in line equal name input method - if words[0] in groups_list: - methods = words[1].split(',') - for i in methods: - results.append(i.strip()) - - results = uniq(results) - - add_list_rights = [] - del_list_rights = [] - - t = readFile(rights) - for line in t.splitlines(): - words = line.split() - meth = words[0] - for word in words: - try: - word = int(word) - except ValueError: - continue - # compare with certificat number - if cert_id == word: - # if has right - add_list_rights.append(meth) - if cert_id == -word: - del_list_rights.append(meth) - - results += add_list_rights - results = uniq(results) - - for method in results: - if method in del_list_rights: - results.remove(method) - - if not results: - results.append("No Methods") - - return results - - def uncompatible(self): - return _('You cannot change the certificate permissions') - - -class VariableClCertGroups(Variable): - """ - Certificate Groups - """ - type = "choice-list" - # opt = ["--cert-group"] - # metavalue = "perm[,perm2[..]]" - - def init(self): - self.help = _("Certificate groups") - self.label = _("Certificate groups") - - def choice(self): - group_rights = self.Get('cl_core_group_rights_path') - - with open(group_rights, 'r') as f: - t = f.read() - result = [] - for line in t.splitlines(): - words = line.split() - if words and len(words): - if not words[0].startswith('#'): - result.append(words[0]) - if 'all' not in result: - result.append('all') - return result - - def get(self): - try: - import OpenSSL - - try: - cert_file = self.Get('cl_cert_crt_path') - - with open(cert_file, 'r') as f: - cert = f.read() - certobj = OpenSSL.crypto.load_certificate( - OpenSSL.SSL.FILETYPE_PEM, cert) - com = certobj.get_extension( - certobj.get_extension_count() - 1).get_data() - groups = com.rpartition(':')[2] - groups_list = groups.split(',') - return groups_list - except OpenSSL.crypto.Error: - return [] - except (IOError, ImportError): - return [] - - def uncompatible(self): - return _('You cannot change the certificate permissions') - - -class VariableClCertCrtPath(Variable): - """ - Путь до сертификата (при указании иденификатора сертификата) - """ - - def get(self): - return pathJoin(self.Get('cl_core_client_certs_path'), - "%s.crt" % self.Get('cl_cert_id')) diff --git a/libs_crutch/core/variables/core.py b/libs_crutch/core/variables/core.py deleted file mode 100644 index 5b6c440..0000000 --- a/libs_crutch/core/variables/core.py +++ /dev/null @@ -1,284 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from calculate.lib.datavars import Variable -import sys -from os import path - -from calculate.lib.cl_lang import setLocalTranslate - -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -class VariableClCoreMonitorPeriod(Variable): - """ - Variable store period monitoring session - """ - type = "int" - value = "30" - - -class VariableClCoreSidLive(Variable): - """ - Variable store time session live - """ - type = "int" - value = "1440" - - -class VariableClCoreRights(Variable): - """ - Variable store path to file with rights - """ - - def get(self): - return path.join(self.Get('cl_core_data'), "conf/right.conf") - - -class VariableClCoreGroupRightsPath(Variable): - """ - Variable store path to file with group rights - """ - - def get(self): - return path.join(self.Get('cl_core_data'), "conf/group_right.conf") - - -class VariableClCoreLocalData(Variable): - """ - Variable store path to data files - """ - value = '/var/lib/calculate/calculate-core' - - -class VariableClCoreData(Variable): - """ - Variable store path to data files - """ - value = '/var/calculate/server' - - -class VariableClCoreClientCertsPath(Variable): - """ - Переменная хранит путь до клиентских сертификатов - """ - - def get(self): - return path.join(self.Get('cl_core_data'), "client_certs") - - -class VariableClCoreClientNewIdPath(Variable): - """ - Переменная хранит путь до счетчика клиентских сертификатов - """ - - def get(self): - return path.join(self.Get('cl_core_client_certs_path'), "id.int") - - -class VariableClCoreDatabase(Variable): - """ - Variable store name files containing clients certificates - """ - - def get(self): - return path.join(self.Get('cl_core_client_certs_path'), - "Database") - - -class VariableClCoreServDatabase(Variable): - """ - Variable store name files containing signed servers certificates - """ - - def get(self): - return path.join(self.Get('cl_core_data'), "server_certs/Database") - - -class VariableClCoreSidsPath(Variable): - """ - Variable store path to sessions id files - """ - - def get(self): - return path.join(self.Get('cl_core_data'), "sids") - - -class VariableClCorePidsPath(Variable): - """ - Variable store path to process id files - """ - - def get(self): - return path.join(self.Get('cl_core_data'), "pids") - - -class VariableClCoreSidsFile(Variable): - """ - Variable store name sessions database files - """ - - def get(self): - return path.join(self.Get('cl_core_data'), "sid.db") - - -class VariableClCorePidsFile(Variable): - """ - Variable store name process id database files - """ - - def get(self): - return path.join(self.Get('cl_core_data'), "pid.db") - - -class VariableClCoreSidsPids(Variable): - """ - Variable store name file comparison sessions and process - """ - - def get(self): - return path.join(self.Get('cl_core_data'), "sid_pid") - - -class VariableClCoreMaxSid(Variable): - """ - Variable store maximum session id - """ - value = "10000" - - -class VariableClCoreMaxPid(Variable): - """ - Variable store maximum process id - """ - value = "100000" - - -class VariableClCoreCert(Variable): - """ - Server certificate - """ - - def get(self): - return path.join(self.Get('cl_core_cert_path'), "server.crt") - - -class VariableClCoreCertPath(Variable): - """ - Server certificate path - """ - - def get(self): - return path.join(self.Get('cl_core_data'), "ca") - - -class VariableClCoreKey(Variable): - """ - Private server key - """ - - def get(self): - return path.join(self.Get('cl_core_cert_path'), "server.key") - - -class VariableClCoreCertLive(Variable): - """ - Max time live sessions (in minutes) - """ - type = "int" - value = "1576800" - - -class VariableClCoreClientActivePeriod(Variable): - """ - Period client activity (in seconds) - """ - type = "int" - value = "15" - - -class VariableClCoreGetFramePeriod(Variable): - """ - Period request frames (in seconds) - """ - type = "int" - value = "2" - - -class VariableClClientCertDir(Variable): - """ - Client certificates directory - """ - value = '~/.calculate/client_cert/' - - -class VariableClLogPath(Variable): - """ - Server log file path certificates directory - """ - value = '/var/log/calculate' - - -class VariableClUserRootCert(Variable): - """ - Trusted certificates added by client - """ - - def get(self): - return path.join(self.Get('cl_client_cert_dir'), "ca/ca_root.crt") - - -class VariableClGlobRootCert(Variable): - """ - Trusted certificates installed in system - """ - - def get(self): - return path.join(self.Get('cl_core_cert_path'), "sys_ca.crt") - - -class VariableClCorePort(Variable): - """ - Port for cl-core WSDL server - """ - type = "int" - #was 8888 - value = "2007" - - -class VariableClCoreRestartPath(Variable): - """ - Файл-флаг необходимости перезапуска сервера утилит - """ - - def get(self): - return path.join(self.Get('cl_core_local_data'), "restart") - - -class VariableClCoreDbusStopPath(Variable): - """ - Файл-флаг необходимости остонова сервера утилит при отсутствии клиента - """ - - def get(self): - return path.join(self.Get('cl_core_local_data'), "close_on_inactive") - - -class VariableClCoreAdminPath(Variable): - """ - Путь до ini.env сервера, который содержит информацию о локальных - администраторах - """ - value_format = "{core.cl_core_data}/ini.env" diff --git a/libs_crutch/core/variables/groups.py b/libs_crutch/core/variables/groups.py deleted file mode 100644 index 002feab..0000000 --- a/libs_crutch/core/variables/groups.py +++ /dev/null @@ -1,127 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from calculate.lib.datavars import Variable, VariableError -import sys -import re - -from calculate.lib.cl_lang import setLocalTranslate -from calculate.core.server.loaded_methods import LoadedMethods -from calculate.core.server.func import uniq -from calculate.lib.utils.files import readFile - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -class VariableClCoreGroup(Variable): - """ - Certificate Group - """ - type = "choiceedit" - opt = ["cl_core_group"] - metavalue = "GROUP_NAME" - untrusted = True - - def init(self): - self.help = _("Set the certificate group") - self.label = _("Group name") - - def choice(self): - group_rights = self.Get('cl_core_group_rights_path') - - t = readFile(group_rights) - result = [] - for line in t.splitlines(): - words = line.split() - if words and len(words): - if not words[0].startswith('#'): - result.append(words[0]) - if 'all' not in result: - result.append('all') - return result - - def check(self, group): - if not group: - raise VariableError(_("Group name is a required parameter")) - name_re = re.compile("^[a-zA-Z_0-9]{2,20}$") - if not name_re.findall(group): - raise VariableError( - _('The group name may only contain words, ' - 'digits and underline symbols') + '\n' + - _('The group name must consist of 2 to 20 symbols')) - group_rights = self.Get('cl_core_group_rights_path') - - if group == 'all': - return - t = readFile(group_rights) - find = False - for line in t.splitlines(): - words = line.split() - if words[0].startswith('#'): - continue - if group == words[0]: - find = True - if self.Get('cl_action') == "add" and find: - raise VariableError(_('Group %s already exists!') % group) - elif self.Get('cl_action') != "add" and not find: - raise VariableError(_("Group %s does not exist") % group) - - -class VariableClCoreGroupRights(Variable): - """ - Certificate Group - """ - type = "choice-list" - opt = ["--group-rights"] - metavalue = "right[,right2[..]]" - - def init(self): - self.help = _("Group permissions") - self.label = _("Group permissions") - - def choice(self): - right_list = [] - for key in LoadedMethods.rightsMethods.keys(): - right_list += LoadedMethods.rightsMethods[key] - - uniq_right_list = uniq(right_list) - uniq_right_list.sort() - return uniq_right_list - - def get(self): - group_name = self.Get('cl_core_group') - group_rights = self.Get('cl_core_group_rights_path') - - t = readFile(group_rights) - results = [] - for line in t.splitlines(): - words = line.split(' ', 1) - if len(words) > 1: - if words[0] == group_name: - methods = words[1].split(',') - for i in methods: - results.append(i.strip()) - - if group_name == 'all' and results == []: - right_list = [] - for key in LoadedMethods.rightsMethods.keys(): - right_list += LoadedMethods.rightsMethods[key] - - uniq_right_list = uniq(right_list) - uniq_right_list.sort() - return uniq_right_list - - return results diff --git a/libs_crutch/core/variables/request.py b/libs_crutch/core/variables/request.py deleted file mode 100644 index 178b561..0000000 --- a/libs_crutch/core/variables/request.py +++ /dev/null @@ -1,364 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from calculate.lib.datavars import Variable, ReadonlyVariable, VariableError -import os -import glob -import sys - -from calculate.lib.cl_lang import setLocalTranslate -from calculate.lib.utils.files import readLinesFile, pathJoin, readFile - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - - -class VariableClListReqId(Variable): - def get(self): - data_path = self.Get('cl_core_data') - result = [] - cert_dir = data_path + '/client_certs/' - for filename in glob.glob(cert_dir + "*"): - if filename.endswith('.csr'): - temp = filename.split('.')[0].split('/') - id = temp[len(temp) - 1] - result.append(id) - return result - - -class VariableClReqId(Variable): - """ - Certificate Identification - """ - type = "choice" - opt = ["-r"] - metavalue = "REQ_ID" - untrusted = True - - def init(self): - self.help = _("request identifier") - self.label = _("Request identifier") - - def choice(self): - return self.Get('cl_list_req_id') - - def check(self, req_id): - if not req_id: - raise VariableError(_("The request ID is a required parameter")) - try: - int(req_id) - except (ValueError, TypeError): - raise VariableError(_("The request ID must be int")) - - def raiseNothingValue(self): - raise VariableError(_("Not found any requests")) - - def get(self): - # if not self.choice(): - # self.raiseNothingValue() - return "" - - -class VariableClPageLimit(Variable): - """ - Ограничение ???? - """ - type = "int" - opt = ["--page-limit"] - metavalue = "PAGE_LIMIT" - element = 'input' - - def init(self): - self.help = _("set the page limit value") - self.label = _("Page limit") - - def check(self, limit): - try: - int(limit) - except ValueError: - raise VariableError(_("The limit number must be int")) - - -class VariableClPageCount(Variable): - """ - Количество записей на страницу - """ - type = "int" - opt = ["--page-count"] - metavalue = "PAGE_COUNT" - element = 'input' - - def init(self): - self.help = _("set the page count value") - self.label = _("Page count") - - def get(self): - return self.Get('cl_page_max') - - def set(self, value): - if not value or int(value) <= 0: - value = self.Get('cl_page_max') - return value - - -class VariableClPageOffset(Variable): - """ - Смещение по записям относительно начала - """ - type = "int" - opt = ["--page-offset"] - metavalue = "PAGE_OFFSET" - element = 'input' - value = "0" - - def init(self): - self.help = _("set the page offset value") - self.label = _("Page offset") - - def set(self, value): - try: - max_id = int(self.Get('cl_page_max')) - value = int(value) - if value < 0: - value = max_id + value - return str(max(0, min(int(value), max_id - 1))) - except ValueError: - return str(0) - - -class VariableClPageMax(ReadonlyVariable): - """ - Максимальное количество записей - """ - type = "int" - - def get(self): - return "0" - - -class VariableClReqBaseData(Variable): - """ - """ - - def get(self): - req_id = self.Get('cl_req_id') - certbase = self.Get('cl_core_database') - - for line in readLinesFile(certbase): - data = line.strip().split() - if len(data) < 6: - continue - data += [""] * (8-len(data)) - if data[0] == str(req_id): - return data - return [''] * 8 - - -class VariableClReqData(Variable): - """ - Данные о запросах - """ - - def get(self): - try: - import OpenSSL - - req_file = self.Get('cl_req_csr_path') - if os.path.exists(req_file): - fp = open(req_file, 'r') - request = fp.read() - fp.close() - reqobj = OpenSSL.crypto.load_certificate_request( - OpenSSL.SSL.FILETYPE_PEM, request) - subject = reqobj.get_subject().get_components() - return subject - except ImportError: - OpenSSL = None - return [['', '']] * 6 - - -class VariableClReqIp(ReadonlyVariable): - """ - Ip Request - """ - - def init(self): - self.help = _("request IP address") - self.label = _("Request IP address") - - def uncompatible(self): - return 'Ip adress' - - def get(self): - return self.Get('cl_req_base_data')[4] - - -class VariableClReqMac(ReadonlyVariable): - """ - Mac Adress Request - """ - - def init(self): - self.help = _("request MAC adress") - self.label = _("Request MAC address") - - def uncompatible(self): - return 'Mac adress' - - def get(self): - return self.Get('cl_req_base_data')[5] - - -class VariableClReqDate(ReadonlyVariable): - """ - Date send Request - """ - - def init(self): - self.help = _("request date") - self.label = _("Request date") - - def uncompatible(self): - return 'Request Date' - - def get(self): - words = self.Get('cl_req_base_data') - if words: - return '%s %s' % (words[2], words[3]) - else: - return "" - - -class VariableClReqUserName(ReadonlyVariable): - """ - UserName Owner Request - """ - - def init(self): - self.help = _("request owner username") - self.label = _("Request owner username") - - def uncompatible(self): - return 'User name request owner' - - def get(self): - subject = self.Get('cl_req_data') - for item in subject: - if item[0] == 'OU': - return item[1] - return '' - - -class VariableClReqLocation(ReadonlyVariable): - """ - Location Owner Request - """ - - def init(self): - self.help = _("request location") - self.label = _("Request location") - - def uncompatible(self): - return 'Location' - - def get(self): - subject = self.Get('cl_req_data') - for item in subject: - if item[0] == 'L': - return item[1] - return '' - - -class VariableClReqGroup(Variable): - """ - Certificate Group - """ - type = "choice" - opt = ["-g"] - metavalue = "REQ_GROUP" - untrusted = True - - def init(self): - self.help = _("set the certificate group") - self.label = _("Certificate group") - - def choice(self): - group_rights = self.Get('cl_core_group_rights_path') - - t = readFile(group_rights) - result = [] - for line in t.splitlines(): - words = line.split() - - if not words[0].startswith('#'): - result.append(words[0]) - if 'all' not in result: - result.append('all') - return result - - def get(self): - try: - import OpenSSL - - try: - cert_file = self.Get('cl_req_crt_path') - fp = open(cert_file, 'r') - cert = fp.read() - fp.close() - certobj = OpenSSL.crypto.load_certificate( - OpenSSL.SSL.FILETYPE_PEM, cert) - com = certobj.get_extension( - certobj.get_extension_count() - 1).get_data() - return com.split(':')[1] - except OpenSSL.crypto.Error: - return "" - except (IOError, ImportError): - return "" - - def check(self, group): - if not group: - raise VariableError(_("Group permissions is a required parameter")) - group_rights = self.Get('cl_core_group_rights_path') - - if group == 'all': - return - t = readFile(group_rights) - for line in t.splitlines(): - words = line.split() - if words[0].startswith('#'): - continue - if group == words[0]: - return - raise VariableError(_("Group %s does not exist") % group) - - -class VariableClReqCrtPath(ReadonlyVariable): - """ - Путь до сертификата при использовании запроса на сертификат - """ - - def get(self): - return pathJoin(self.Get('cl_core_client_certs_path'), - '%s.crt' % self.Get('cl_req_id')) - - -class VariableClReqCsrPath(ReadonlyVariable): - """ - Путь до запроса - """ - - def get(self): - return pathJoin(self.Get('cl_core_client_certs_path'), - '%s.csr' % self.Get('cl_req_id')) diff --git a/libs_crutch/core/variables/setup_package.py b/libs_crutch/core/variables/setup_package.py deleted file mode 100644 index 122c34d..0000000 --- a/libs_crutch/core/variables/setup_package.py +++ /dev/null @@ -1,403 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from calculate.lib.datavars import Variable, VariableError -import sys -import os -from os import path - -from calculate.lib.cl_lang import setLocalTranslate -import pwd - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - -from calculate.lib.utils.files import listDirectory -from calculate.lib.utils.portage import (isPkgInstalled, getPkgSlot, - getInstalledAtom) -from calculate.lib.utils.text import formatListOr -from calculate.lib.utils.common import getTupleVersion -from calculate.lib.variables.user import VariableUrLogin -from itertools import * -import glob - - -class VariableClCorePkgName(Variable): - """ - Package name - """ - type = "choiceedit" - opt = ["--pkg-name"] - metavalue = "PN" - untrusted = True - - def init(self): - self.label = _("Package name") - self.help = _("package name") - - def check(self, value): - if not value: - raise VariableError(_("Please choose a package")) - if not os.environ.get( - "EBUILD_PHASE", "") and value not in self.choice(): - raise VariableError(_("Package not found")) - - def choice(self): - if self.Get('cl_ebuild_phase'): - return [""] - return [""] + ["all"] + sorted( - set(chain(*map(lambda x: map( - lambda y: (y[0].rpartition('-')[0] - if y[2].startswith('r') else y[0]), - imap(lambda y: y.rpartition('-'), - listDirectory(x))), - listDirectory('/var/db/pkg', onlyDir=True, - fullPath=True))))) - - -class VariableClCorePkgVersionOpt(Variable): - """ - Package version - """ - type = "choiceedit" - opt = ["--pkg-version"] - metavalue = "PVR" - untrusted = True - - def init(self): - self.label = _("Package version") - self.help = _("package version number with the revision") - - def get(self): - return "all" - - def choice(self): - pkg = getInstalledAtom("%s/%s" % (self.Get('cl_core_pkg_category'), - self.Get('cl_core_pkg_name'))) - pkg = list(sorted(pkg)) - if pkg: - return ["all"] + [x['PVR'] for x in pkg] - return ["all"] - - def check(self, value): - pkg_name = self.Get('cl_core_pkg_name') - if not value and pkg_name and pkg_name != "all": - raise VariableError(_("Please choose the version")) - if value != "all": - pkg_name = self.Get('cl_core_pkg_name') - if not value and pkg_name and pkg_name != "all": - raise VariableError(_("Please choose the version")) - - if self.Get('cl_action') != "patch": - pkgs = getInstalledAtom("%s/%s" % (self.Get('cl_core_pkg_category'), - self.Get('cl_core_pkg_name'))) - pkgs = list(sorted(pkgs)) - if pkgs: - versions = [x['PVR'] for x in pkgs] - else: - versions = [] - if value not in versions and pkg_name and pkg_name != "all": - raise VariableError(_("Please choose the version") + _(": ") + - formatListOr(versions)) - - -class VariableClCorePkgVersion(Variable): - """ - Package version - """ - type = "choiceedit" - opt = ["--pkg-version"] - metavalue = "PVR" - untrusted = True - - def init(self): - self.label = _("Package version") - self.help = _("package version number with the revision") - - def get(self): - vers = self.Get('cl_core_pkg_version_opt') - if vers != "all": - return vers - pkg = isPkgInstalled("%s/%s" % (self.Get('cl_core_pkg_category'), - self.Get('cl_core_pkg_name'))) - if pkg: - return sorted(pkg, - key=lambda x: getTupleVersion(x['PVR']))[-1]['PVR'] - return "" - - def check(self, value): - pkg_name = self.Get('cl_core_pkg_name') - if not value and pkg_name and pkg_name != "all": - raise VariableError(_("Please choose the version")) - - -class VariableClCorePkgSlot(Variable): - """ - Package Slot - """ - type = "choiceedit" - opt = ["--pkg-slot"] - metavalue = "SLOT" - untrusted = True - - def init(self): - self.label = _("Package slot") - self.help = _("package slot") - - def get(self): - slot = self.Get('cl_core_pkg_slot_opt') - if slot != "all": - return slot - pkg = getPkgSlot("%s/%s" % (self.Get('cl_core_pkg_category'), - self.Get('cl_core_pkg_name'))) - if pkg: - return sorted(pkg)[-1] - return "" - - def check(self, value): - pkg_name = self.Get('cl_core_pkg_name') - if not value and pkg_name and pkg_name != "all": - raise VariableError(_("Please choose the slot")) - -class VariableClCorePkgSlotOpt(Variable): - """ - Selected Package Slot - """ - type = "choiceedit" - opt = ["--pkg-slot"] - metavalue = "SLOT" - untrusted = True - - def init(self): - self.label = _("Package slot") - self.help = _("package slot") - - def get(self): - veropt = self.Get('cl_core_pkg_version_opt') - if veropt != 'all': - pkgs = getInstalledAtom( - "=%s/%s-%s" % (self.Get('cl_core_pkg_category'), - self.Get('cl_core_pkg_name'), veropt)) - pkgs = list(pkgs) - if pkgs: - return pkgs[0]["SLOTONLY"] - elif self.Get('cl_action') == 'config': - pkgs = getInstalledAtom( - "%s/%s" % (self.Get('cl_core_pkg_category'), - self.Get('cl_core_pkg_name'))) - pkgs = list(pkgs) - if len(pkgs) == 1: - return pkgs[0]["SLOTONLY"] - return "" - return "all" - - def choice(self): - veropt = self.Get('cl_core_pkg_version_opt') - if veropt != "all": - pkg = getInstalledAtom("=%s/%s-%s" % (self.Get('cl_core_pkg_category'), - self.Get('cl_core_pkg_name'), - veropt)) - else: - pkg = getInstalledAtom("%s/%s" % (self.Get('cl_core_pkg_category'), - self.Get('cl_core_pkg_name'))) - pkg = list(sorted(pkg)) - if self.Get('cl_action') == 'config': - allvalue = [] - else: - allvalue = ["all"] - if pkg: - return allvalue + [x['SLOTONLY'] for x in pkg] - return allvalue - - - def check(self, value): - if self.Get('cl_action') == 'config' and value == "all": - raise VariableError(_("Please choose the slot")) - if value != "all": - pkg_name = self.Get('cl_core_pkg_name') - if not value and pkg_name and pkg_name != "all": - raise VariableError(_("Please choose the slot")) - veropt = self.Get('cl_core_pkg_version_opt') - if veropt == 'all': - pkgs = getInstalledAtom( - "=%s/%s" % (self.Get('cl_core_pkg_category'), - self.Get('cl_core_pkg_name'))) - slots = [x["SLOTONLY"] for x in sorted(pkgs)] - else: - pkgs = getInstalledAtom( - "=%s/%s-%s" % (self.Get('cl_core_pkg_category'), - self.Get('cl_core_pkg_name'), veropt)) - slots = [x["SLOTONLY"] for x in sorted(pkgs)] - if self.Get('cl_action') != "patch" and \ - value not in slots and pkg_name and pkg_name != "all": - raise VariableError(_("Please choose the slot") + _(": ") + - formatListOr(slots)) - - -class VariableClCorePkgCategory(Variable): - """ - Package category - """ - type = "choiceedit" - opt = ["--pkg-category"] - metavalue = "CATEGORY" - untrusted = True - pkgCategories = {} - - def init(self): - self.label = _("Package category") - self.help = _("package category name") - - def choice(self): - if self.Get('cl_ebuild_phase'): - return [] - pkgname = self.Get('cl_core_pkg_name') - if pkgname not in self.pkgCategories: - self.pkgCategories[pkgname] = \ - glob.glob('/usr/portage/*/%s' % pkgname) - return map(lambda x: path.split(path.split(x)[0])[1], - self.pkgCategories[pkgname]) - - def get(self): - category = isPkgInstalled(self.Get('cl_core_pkg_name')) - if category: - if type(category[0]) == dict: - category = "" - else: - category = category[0].rpartition('/')[2] - else: - choice = self.choice() - if len(choice) == 1: - return choice[0] - else: - category = "" - return category - - def check(self, value): - pkg_name = self.Get('cl_core_pkg_name') - if not value and pkg_name and pkg_name != "all": - raise VariableError(_("Please choose the category")) - - -class VariableClCorePkgPath(Variable): - """ - Package configure path - """ - - value = "/" - opt = ["--pkg-path"] - metavalue = "PATH" - - def init(self): - self.label = _("Path for configuration") - self.help = _("root path for saving the updated configuration files") - - def check(self, value): - if not path.isdir(value): - raise VariableError(_("Directory %s not found") % value) - - -class VariableClCorePkgSystemSet(Variable): - """ - Package configure system - """ - - type = "bool" - value = "on" - opt = ["--system"] - metavalue = "ON/OFF" - - def init(self): - self.label = _("Configure the system") - self.help = _("updating system configuration files") - - -class VariableClCorePkgDesktopSet(Variable): - """ - Package configure desktop - """ - - type = "bool" - opt = ["--desktop"] - metavalue = "ON/OFF" - - def get(self): - if self.Get('cl_templates_locate') == ["clt"]: - return "off" - return "on" - - def check(self, value): - if self.Get('cl_templates_locate') == ["clt"] and value == "on": - raise VariableError( - _("You must not choose only clt location " - "for desktop templates")) - - def init(self): - self.label = _("Configure users") - self.help = _("updating desktop (user) configuration files") - - -class VariableClCorePkgRootSet(Variable): - """ - Other packages (specified by merge=) will configured in / - """ - type = "bool" - value = "on" - metavalue = "ON/OFF" - - opt = ["--depend-in-root"] - - def init(self): - self.label = _("Configure dependent packages in root") - self.help = _("configure the dependent packages in the same " - "directory as the specified package, instead of root") - - -class VariableUrCoreLogin(VariableUrLogin): - """ - User Login - """ - opt = ["--login"] - alias = "ur_login" - - def check(self, value): - """Пользователь существует""" - try: - pwd.getpwnam(value).pw_gid - except Exception: - raise VariableError(_("User %s does not exist") % value) - - def get(self): - return self.Get('ur_login') - - -class VariableClCoreArchMachine(Variable): - """ - Архитектура для настройки пакета - """ - type = 'choice' - opt = ['--march'] - metavalue = "ARCH" - available_arch = ["i686", "x86_64"] - - def init(self): - self.label = _("Machine architecture") - self.help = _("set machine architecture") - - def get(self): - return self.Get('os_arch_machine') - - def choice(self): - return [(x, x) for x in self.available_arch] diff --git a/libs_crutch/core/variables/variable.py b/libs_crutch/core/variables/variable.py deleted file mode 100644 index 8ded95f..0000000 --- a/libs_crutch/core/variables/variable.py +++ /dev/null @@ -1,303 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -import re -from os import path -from calculate.lib.datavars import (Variable, ReadonlyVariable, VariableError, - TableVariable) -from calculate.core.server.api_types import LazyString - -from calculate.lib.cl_lang import setLocalTranslate - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) - -from calculate.lib.utils.files import listDirectory -from itertools import * - - -class VarHelper(object): - aliases = (('main', 'lib'),) - mapName = dict(aliases) - mapMethod = {'importLib': 'importVariables', - 'importConsolegui': 'importGui'} - mapObject = {'DataVarsLib': 'DataVars', - 'DataVarsConsolegui': 'DataVarsGui'} - mapSection = dict(map(lambda x: (x[1], x[0]), aliases)) - - -class VariableClVariableData(TableVariable): - """ - Table for modification variables - """ - type = "table" - opt = ["--set"] - metavalue = "VAR[=VALUE[:LOCATION]]" - # (arg-0:VAR)(arg-1:=(arg-2:VALUE):WRITE) - syntax = "^(?P[^=:]+)(?P=(?P[^=]*?)(:?:[a-z]+)?)?$" - # "(?:!=(?P\w*)))$") - source = ['cl_variable_fullname', - 'cl_variable_type', - 'cl_variable_location', - 'cl_variable_value'] - - def set(self, value): - """ - Fix data received from cmdline - """ - action = self.Get('cl_action') - - def fix_cmdline_params(fullname, location, v): - if location.startswith("="): - if re.match("^.*?:[a-z]+$", location): - location = location.rpartition(':')[2] - else: - location = "system" - return fullname, location, v - - def fix_value_for_append(fullname, location, v): - if action == 'append': - prevval = self.Get(fullname) - typevar = self.parent.getInfo(fullname).type - val = self.parent.unserialize(typevar, v) - prevval.extend(filter(lambda x: x not in prevval, val)) - else: - return fullname, location, v - - res = list(starmap(fix_value_for_append, - starmap(fix_cmdline_params, - value))) - return res - - def init(self): - self.label = _("Variables") - self.help = _("write to the variable. VAR is the variable name. " - "VALUE is the new variable value. LOCATION is where " - "the env file is located (system,local,remote). " - "Variable will be removed from all env files if " - "only VAR is specified.") - - def raiseReadonlyIndexError(self, fieldname="", variablename="", - value=""): - """ - Behavior on change readonly index - """ - raise VariableError(_("Variable %s not found") % value) - - -class VariableClVariableModuleName(ReadonlyVariable): - """ - Available modules - """ - type = "list" - - def get(self): - site_packages = map(lambda x: path.join(x, "calculate"), - filter(lambda x: (x.endswith('site-packages') and - x.startswith('/usr/lib')), - sys.path)) - retlist = [] - for module, modDir in chain( - *map(lambda x: map(lambda y: (path.basename(y), y), - listDirectory(x, True, True)), - site_packages)): - if path.exists(path.join(modDir, "datavars.py")): - retlist.append(module) - mod_map = {'lib': 0, 'install': 1, 'core': 2} - return sorted(retlist, key=lambda x: mod_map.get(x, x)) - - -class VariableClVariableFullname(VarHelper, ReadonlyVariable): - """ - Variable name - """ - type = "list" - - def init(self): - self.label = _("Name") - - def get_all_var_name(self): - """ - Init all variable modules and get names - """ - for moduleName in self.Get('cl_variable_module_name'): - module_section = self.mapSection.get(moduleName, moduleName) - if module_section not in self.parent.importedModules: - self.parent.importVariables( - 'calculate.%s.variables' % moduleName) - self.parent.flIniFile() - for varname, vardata in sorted(self.parent.allVars.items()): - if (vardata[0] == module_section and - not varname.startswith('cl_variable')): - yield "%s.%s" % (module_section, varname) - - def get(self): - return list(self.get_all_var_name()) - - -class VariableClVariableType(VarHelper, ReadonlyVariable): - """ - Variable type - """ - type = "list" - - def init(self): - self.label = _("Type") - - def fill_type(self, var): - varobj = self.parent.getInfo(var) - return "%s%s" % (varobj.mode, varobj.getCharType()) - - def get(self): - return map(self.fill_type, - self.Get('cl_variable_fullname')) - - -class VariableClVariableValue(VarHelper, Variable): - """ - Variable value - """ - type = "list" - - class LazyVal(LazyString): - """ - Lazy value return string value only before using - """ - - def __init__(self, dv, var): - self.var = var - self.dv = dv - self.dv.flIniFile() - - def __call__(self): - var_val = self.dv.Get(self.var) - type_var = self.dv.getInfo(self.var).type - return self.dv.serialize(type_var, var_val) - - def __str__(self): - return self() - - def init(self): - self.label = _("Value") - - def fill_var(self, var): - """ - Fill all value of variables only lazy call - """ - if var not in ('cl_variable_value', 'cl_variable_data'): - return self.LazyVal(self.parent, var) - else: - return "" - - def get(self): - return map(self.fill_var, - self.Get('cl_variable_fullname')) - - def check(self, value): - checklist = [] - for var, write, val in filter(lambda x: x[1] != "", - zip(self.Get('cl_variable_fullname'), - self.Get('cl_variable_location'), - value)): - val = str(val) - type_var = self.parent.getInfo(var).type - # convert list value to list (',' - separator) - val = self.parent.unserialize(type_var, val) - # increase verbosity of error on set variable - checklist.append((var, val)) - # double check - for i in [0, 1]: - for var, val in checklist: - try: - self.parent.Set(var, val) - except VariableError as e: - raise VariableError([VariableError( - _("Error writing to variable %s:") % var), e]) - - -class VariableClVariableLocation(VarHelper, Variable): - """ - Flag write variable to ini - """ - type = "choice-list" - - def init(self): - self.label = _("Location") - - def choice(self): - return [("", "Default")] + map(lambda x: (x, x), - self.Get('cl_env_location')) - - def fill_write(self, var): - return self.parent.isFromIni(var) - - def get(self): - return map(lambda x: self.parent.isFromIni(x), - self.Get('cl_variable_fullname')) - - -class VariableClVariableFilter(VarHelper, Variable): - """ - Display variables - """ - type = "choiceedit" - value = "all" - - opt = ["--filter"] - metavalue = "FILTER" - - def init(self): - self.label = _("Filter") - self.help = _("display variables ('userset' displays user set " - "variables, 'writable' displays only writable " - "variables, 'system' displays user set variables " - "from the system env file, 'local' displays user " - "set variables from the local env file, 'remote' " - "displays user set variables from the remote env " - "file, 'all' displays all variables or filters them " - "by part of name)") - - def choice(self): - return (("userset", _("User set")), - ("writable", _("Writable")), - ("system", _("System")), - ("local", _("Local")), - ("remote", _("Remote")), - ("all", _("All"))) - - -class VariableClVariableShow(VarHelper, Variable): - """ - Display only value - """ - type = "choice" - value = "" - - opt = ["--only-value"] - metavalue = "VARIABLE" - - def init(self): - self.label = _("Show the value") - self.help = _("show the variable value") - - def raiseWrongChoice(self, name, choice_val, val, error): - re_index = re.compile("((?:\w+\.)?(\w+))(?:\[(\d+)\])?") - varname = re_index.search(val) - if varname and not varname.group(1) in choice_val: - raise VariableError(_("Variable %s not found") % val) - - def choice(self): - return self.Get('cl_variable_fullname') diff --git a/libs_crutch/core/wsdl_core.py b/libs_crutch/core/wsdl_core.py deleted file mode 100644 index 1cb13af..0000000 --- a/libs_crutch/core/wsdl_core.py +++ /dev/null @@ -1,795 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2011-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import sys - -from calculate.lib.datavars import VariableError, DataVarsError - -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate - -_ = lambda x: x -setLocalTranslate('cl_core3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - -from . import setup_package -from . import server.certificate as certificate -from . import server.groups as groups -from . import server.request as request -from . import set_vars -from .backup import Backup, BackupError -from calculate.core.utils.cl_backup import ClBackupAction -from calculate.core.utils.cl_backup_restore import ClBackupRestoreAction -from calculate.core.server.func import WsdlBase -from calculate.core.utils.cl_core_setup import ClCoreSetupAction -from calculate.core.utils.cl_core_patch import ClCorePatchAction -from calculate.core.utils.cl_config import ClConfigAction -from calculate.core.utils.cl_core_dispatch import ClCoreDispatchAction -from calculate.core.utils.cl_core_view_cert import ClCoreViewCert -from calculate.core.utils.cl_core_group import ( - ClCoreGroupShow, ClCoreGroupMod, ClCoreGroupAdd, ClCoreGroupDel) -from calculate.core.utils.cl_core_request import ( - ClCoreRequestShow, ClCoreRequestConfirm, ClCoreRequestDel) -from calculate.core.utils.cl_core_variables import (ClCoreVariables, - ClCoreVariablesShow) -from calculate.core.utils.cl_core_custom import ClCoreCustomAction -from calculate.core.utils.cl_core_restart import ClCoreRestartAction -from calculate.core.variables.action import Actions - - -class Wsdl(WsdlBase): - methods = [ - # - # Настройка пакета во время установки (cl-core-setup) - # - { - # идентификатор метода - 'method_name': "core_setup", - # категория метода - 'category': __('Configuration'), - # заголовок метода - 'title': __("Configure a Package"), - # иконка для графической консоли - 'image': 'calculate-core-setup,' - 'preferences-desktop-default-applications', - # метод присутствует в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-core-setup', - # права для запуска метода - 'rights': ['setup_package'], - # объект содержащий модули для действия - 'logic': {'UpdateConfigs': setup_package.UpdateConfigs}, - # описание действия - 'action': ClCoreSetupAction, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError, - setup_package.SetupPackageError), - # значения по умолчанию для переменных этого метода - 'setvars': {'cl_action!': 'merge', 'cl_verbose_set': "on"}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Configure a package"), - normal=('cl_core_pkg_name',), - expert=('cl_core_pkg_category', - 'cl_core_pkg_version_opt', - 'cl_core_pkg_slot_opt', - 'cl_core_pkg_path', - 'cl_core_arch_machine', - 'cl_templates_locate', - 'cl_core_pkg_system_set', - 'cl_core_pkg_desktop_set', - 'cl_core_pkg_root_set', - 'cl_verbose_set', - 'cl_dispatch_conf'), - next_label=_("Run"))]}, - # - # Патч исходников пакета (cl-core-patch) - # - { - # идентификатор метода - 'method_name': "core_patch", - # категория метода - 'category': __('Configuration'), - # заголовок метода - 'title': __("Patch"), - # иконка для графической консоли - 'image': None, - # метода нет в графической консоли - 'gui': False, - # консольная команда - 'command': 'cl-core-patch', - # права для запуска метода - 'rights': ['configure'], - # объект содержащий модули для действия - 'logic': {'UpdateConfigs': setup_package.UpdateConfigs}, - # описание действия - 'action': ClCorePatchAction, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError, - setup_package.SetupPackageError), - # значения по умолчанию для переменных этого метода - 'setvars': {'cl_action!': 'patch', - 'cl_protect_use_set!': 'off'}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Configure a package"), - normal=('cl_core_pkg_name',), - expert=('cl_core_pkg_category', - 'cl_core_pkg_version', - 'cl_core_pkg_slot', - 'cl_core_pkg_path', - 'cl_core_arch_machine', - 'cl_templates_locate', - 'cl_verbose_set'), - next_label=_("Run"))]}, - # - # Обновление конфигурационных файлов (cl-dispatch-conf) - # - { - # идентификатор метода - 'method_name': "core_dispatch", - # категория метода - 'category': __('Update '), - # заголовок метода - 'title': __("Update Settings"), - # иконка для графической консоли - 'image': 'calculate-core-dispatch,edit-find-replace,computer', - # метод в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-dispatch-conf', - # права для запуска метода - 'rights': ['configure'], - # объект содержащий модули для действия - 'logic': {'UpdateConfigs': setup_package.UpdateConfigs}, - # описание действия - 'action': ClCoreDispatchAction, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError, - setup_package.SetupPackageError), - # значения по умолчанию для переменных этого метода - 'setvars': {'cl_action!': 'dispatch'}, - # описание груп (список лямбда функций) - 'groups': []}, - # - # Отобразить сертификаты - # - { - # идентификатор метода - 'method_name': "core_view_cert", - # категория метода - 'category': __('Utilities'), - # заголовок метода - 'title': __("Show Certificates"), - # иконка для графической консоли - 'image': 'calculate-core-view-cert,certificate-server,' - 'application-certificate', - # метод в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-core-view-cert', - # права для запуска метода - 'rights': ['certificates'], - # объект содержащий модули для действия - 'logic': {'Certificate': certificate.Certificate}, - # описание действия - 'action': ClCoreViewCert, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': { - 'cl_page_max!': lambda dv: len(dv.Get('cl_list_cert_id'))}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Certificates"), - normal=('cl_page_count', 'cl_page_offset'), - next_label=_("Next"))]}, - # - # Отобразить детали сертификата - # - { - # идентификатор метода - 'method_name': "core_detail_view_cert", - # категория метода - # 'category':__('Utilities'), - # заголовок метода - 'title': __("Certificate Details"), - # иконка для графической консоли - 'image': None, - # метод в графической консоли - 'gui': True, - # права для запуска метода - 'rights': ['certificates'], - # объект содержащий модули для действия - 'logic': {}, - # описание действия - 'action': None, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': {}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Certificate details"), - normal=('cl_cert_id', 'cl_cert_groups', - 'cl_cert_perms'), - custom_buttons=[('but0', _("Back"), - "core_view_cert", - "button")])]}, - # - # Группы - # - { - # идентификатор метода - 'method_name': "core_group_show", - # категория метода - 'category': __('Utilities'), - # заголовок метода - 'title': __("Show Groups"), - # иконка для графической консоли - 'image': 'calculate-core-group-show,user-group-properties,' - 'view-certificate-import,application-certificate', - # метод в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-core-group-show', - # права для запуска метода - 'rights': ['core_group'], - # объект содержащий модули для действия - 'logic': {'Groups': groups.Groups}, - # описание действия - 'action': ClCoreGroupShow, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': { - 'cl_page_max!': lambda dv: len(dv.Choice('cl_core_group'))}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Groups"), - normal=('cl_page_count', 'cl_page_offset'), - next_label=_("Next"))]}, - # - # Отобразить детали группы - # - { - # идентификатор метода - 'method_name': "core_detail_group", - # категория метода - # 'category':__('Utilities'), - # заголовок метода - 'title': __("Group Details"), - # иконка для графической консоли - 'image': None, - # метод в графической консоли - 'gui': True, - # права для запуска метода - 'rights': ['core_group'], - # объект содержащий модули для действия - 'logic': {}, - # описание действия - 'action': None, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': {}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Group details"), - normal=( - 'cl_core_group', - 'cl_core_group_rights'), - custom_buttons=[('but0', _("Back"), - "core_group_show", - "button"), - ('but1', _("Change"), - "core_group_mod", - "button"), - ('but2', _("Delete"), - "core_group_del", - "button")])]}, - # - # Изменить группу - # - { - # идентификатор метода - 'method_name': "core_group_mod", - # категория метода - # 'category':__('Utilities'), - # заголовок метода - 'title': __("Modify Group"), - # иконка для графической консоли - 'image': None, - # метод в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-core-groupmod', - # права для запуска метода - 'rights': ['core_group'], - # объект содержащий модули для действия - 'logic': {'Groups': groups.Groups}, - # описание действия - 'action': ClCoreGroupMod, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': {'cl_action!': 'modify'}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Modify group"), - normal=( - 'cl_core_group', - 'cl_core_group_rights'), - next_label=_("Done"), - custom_buttons=[('but2', _("Confirm"), - 'core_change_group', - "button")])]}, - # - # Добавить группу - # - { - # идентификатор метода - 'method_name': "core_group_add", - # категория метода - # 'category':__('Utilities'), - # заголовок метода - 'title': __("Add a Group"), - # иконка для графической консоли - 'image': None, - # метод в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-core-groupadd', - # права для запуска метода - 'rights': ['core_group'], - # объект содержащий модули для действия - 'logic': {'Groups': groups.Groups}, - # описание действия - 'action': ClCoreGroupAdd, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': {'cl_action!': 'add'}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Add a group"), - normal=( - 'cl_core_group', - 'cl_core_group_rights'), - next_label=_("Add"))]}, - # - # Удалить группу - # - { - # идентификатор метода - 'method_name': "core_group_del", - # категория метода - # 'category':__('Utilities'), - # заголовок метода - 'title': __("Delete the Group"), - # иконка для графической консоли - 'image': None, - # метод в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-core-groupdel', - # права для запуска метода - 'rights': ['core_group'], - # объект содержащий модули для действия - 'logic': {'Groups': groups.Groups}, - # описание действия - 'action': ClCoreGroupDel, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': {'cl_action!': 'delete'}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Delete the group"), - normal=('cl_core_group',), - next_label=_("Delete"))]}, - # - # Запрос на сертификат - # - { - # идентификатор метода - 'method_name': "core_request_show", - # категория метода - 'category': __('Utilities'), - # заголовок метода - 'title': __("Show Requests"), - # иконка для графической консоли - 'image': 'calculate-core-request-show,view-certificate-import,' - 'application-certificate', - # метод в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-core-request-show', - # права для запуска метода - 'rights': ['request'], - # объект содержащий модули для действия - 'logic': {'Request': request.Request}, - # описание действия - 'action': ClCoreRequestShow, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': { - 'cl_page_max!': lambda dv: len(dv.Get('cl_list_req_id'))}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Show requests"), - normal=('cl_page_count', 'cl_page_offset'), - next_label=_("Next"))]}, - # - # Отобразить детали запроса - # - { - # идентификатор метода - 'method_name': "core_detail_request", - # категория метода - # 'category':__('Utilities'), - # заголовок метода - 'title': __("Request Details"), - # иконка для графической консоли - 'image': None, - # метод в графической консоли - 'gui': True, - # права для запуска метода - 'rights': ['request'], - # объект содержащий модули для действия - 'logic': {}, - # описание действия - 'action': None, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': {}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Group details"), - normal=('cl_req_id', 'cl_req_user_name', - 'cl_req_ip', - 'cl_req_mac', 'cl_req_date', - 'cl_req_location', - 'cl_req_group'), - custom_buttons=[('but0', _("Back"), - "core_request_show", - "button"), - ('but1', _("Confirm"), - "core_request_confirm", - "button"), - ('but2', _("Delete"), - "core_request_del", - "button")])]}, - # - # Удалить запрос - # - { - # идентификатор метода - 'method_name': "core_request_del", - # категория метода - # 'category':__('Utilities'), - # заголовок метода - 'title': __("Delete the Request"), - # иконка для графической консоли - 'image': None, - # метод в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-core-request-del', - # права для запуска метода - 'rights': ['request'], - # объект содержащий модули для действия - 'logic': {'Request': request.Request}, - # описание действия - 'action': ClCoreRequestDel, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': {'cl_action!': 'delete'}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Delete the request"), - normal=('cl_req_id',), - next_label=_("Delete"))]}, - # - # Подтвердить запрос - # - { - # идентификатор метода - 'method_name': "core_request_confirm", - # категория метода - # 'category':__('Utilities'), - # заголовок метода - 'title': __("Confirm the Request"), - # иконка для графической консоли - 'image': None, - # метод в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-core-request-confirm', - # права для запуска метода - 'rights': ['request'], - # объект содержащий модули для действия - 'logic': {'Request': request.Request}, - # описание действия - 'action': ClCoreRequestConfirm, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': {'cl_action!': 'confirm'}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Delete the request"), - normal=('cl_req_id', 'cl_req_group'), - next_label=_("Delete"))]}, - # - # установить переменные - # - { - # идентификатор метода - 'method_name': "core_variables", - # категория метода - 'category': __('Utilities'), - # заголовок метода - 'title': __("Setup Variables"), - # иконка для графической консоли - 'image': 'calculate-core-variables,applications-versioncontrol,' - 'text-x-preview,text-x-makefile', - # метод в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-core-variables', - # права для запуска метода - 'rights': ['setup_variables'], - # объект содержащий модули для действия - 'logic': {'Variables': set_vars.Variables}, - # описание действия - 'action': ClCoreVariables, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': {}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Setup variables"), - normal=('cl_variable_data',), - next_label=_("Save"))]}, - # - # отобразить переменные - # - { - # идентификатор метода - 'method_name': "core_variables_show", - # категория метода - 'category': __('Utilities'), - # заголовок метода - 'title': __("View Variables"), - # иконка для графической консоли - 'image': None, - # метод в графической консоли - 'gui': False, - # консольная команда - 'command': 'cl-core-variables-show', - # права для запуска метода - 'rights': ['configure'], - # объект содержащий модули для действия - 'logic': {'Variables': set_vars.Variables}, - # описание действия - 'action': ClCoreVariablesShow, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': {}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Setup variables"), - normal=( - 'cl_variable_filter', - 'cl_variable_show'), - next_label=_("Show"))]}, - # - # Выполнить настройку пакета (cl-config) - # - { - # идентификатор метода - 'method_name': "core_config", - # категория метода - 'category': __('Configuration'), - # заголовок метода - 'title': __("Config"), - # иконка для графической консоли - 'image': None, - # метода нет в графической консоли - 'gui': False, - # консольная команда - 'command': 'cl-config', - # права для запуска метода - 'rights': ['configure'], - # объект содержащий модули для действия - 'logic': {'UpdateConfigs': setup_package.UpdateConfigs}, - # описание действия - 'action': ClConfigAction, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError, - setup_package.SetupPackageError), - # значения по умолчанию для переменных этого метода - 'setvars': {'cl_action!': 'config', 'cl_verbose_set': "on"}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Configure a package"), - normal=('cl_core_pkg_name',), - expert=('cl_core_pkg_category', - 'cl_core_pkg_version_opt', - 'cl_core_pkg_slot_opt', - 'cl_templates_locate', - 'cl_verbose_set'), - next_label=_("Run"))]}, - # - # отобразить переменные - # - { - # идентификатор метода - 'method_name': "core_custom", - # категория метода - 'category': __('Utilities'), - # заголовок метода - 'title': __("Custom Action"), - # иконка для графической консоли - 'image': 'calculate-core-custom,gnome-desktop-config,desktop-config', - # метод в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-core-custom', - # права для запуска метода - 'rights': ['custom_configure'], - # объект содержащий модули для действия - 'logic': {'UpdateConfigs': setup_package.UpdateConfigs}, - # описание действия - 'action': ClCoreCustomAction, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': {'cl_verbose_set': "on", 'cl_human_edit_set': "on"}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Custom action"), - normal=( - 'ac_custom_name', 'cl_human_edit_set', - 'cl_verbose_set'), - expert=( - 'ur_core_login', 'cl_core_arch_machine', - 'cl_templates_locate', - 'cl_dispatch_conf'), - next_label=_("Run"))]}, - # - # перезапустить сервис calculate core - # - { - # идентификатор метода - 'method_name': "core_restart", - # категория метода - 'category': __('Utilities'), - # заголовок метода - 'title': __("Restart calculate-core"), - # иконка для графической консоли - 'image': 'calculate-core-restart,view-refresh', - # метод в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-core-restart', - # права для запуска метода - 'rights': ['core_restart'], - # объект содержащий модули для действия - 'logic': {'UpdateConfigs': setup_package.UpdateConfigs}, - # описание действия - 'action': ClCoreRestartAction, - # объект переменных - 'datavars': "core", - 'native_error': (VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': {'cl_action!': 'restart'}, - # описание груп (список лямбда функций) - 'groups': []}, - # - # создание резервной копии настроек - # - { - # идентификатор метода - 'method_name': "backup", - # категория метода - 'category':__('Backup'), - # заголовок метода - 'title': __("Backup"), - # иконка для графической консоли - 'image': 'calculate-backup', - # метод в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-backup', - # права для запуска метода - 'rights': ['backup'], - # объект содержащий модули для действия - 'logic': {'Backup': Backup}, - # описание действия - 'action': ClBackupAction, - # объект переменных - 'datavars': "core", - 'native_error': (BackupError, VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': {'cl_action!': Actions.Backup}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("System backup"), - normal=('cl_backup_verbose_set',), - next_label=_("Run"))]}, - # - # восстановление настроек из резервной копии - # - { - # идентификатор метода - 'method_name': "backup_restore", - # категория метода - 'category':__('Backup'), - # заголовок метода - 'title': __("Restore"), - # иконка для графической консоли - 'image': 'calculate-backup-restore', - # метод в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-backup-restore', - # права для запуска метода - 'rights': ['backup'], - # объект содержащий модули для действия - 'logic': {'Backup': Backup}, - # описание действия - 'action': ClBackupRestoreAction, - # объект переменных - 'datavars': "core", - 'native_error': (BackupError, VariableError, DataVarsError), - # значения по умолчанию для переменных этого метода - 'setvars': {'cl_action!': Actions.BackupRestore, - 'core.cl_backup_action': Actions.Restore}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("System restore"), - normal=('cl_backup_verbose_set',), - brief=('cl_backup_file', 'cl_backup_time'), - next_label=_("Run"))], - 'brief': {'next': __("Run"), - 'name': __("System restore")}}, - ] diff --git a/libs_crutch/install/__init__.py b/libs_crutch/install/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/install/datavars.py b/libs_crutch/install/datavars.py deleted file mode 100644 index 8b4c1e4..0000000 --- a/libs_crutch/install/datavars.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2010-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -__app__ = 'calculate-install' -__version__ = '3.1.8' - -import sys -from calculate.lib.datavars import DataVars - -from calculate.lib.cl_lang import setLocalTranslate - -setLocalTranslate('cl_install3', sys.modules[__name__]) - - -class DataVarsInstall(DataVars): - """Variable class for installation""" - - def importInstall(self, **args): - """Import install variables""" - self.importVariables() - self.importVariables('calculate.install.variables') - self.defaultModule = "install" diff --git a/libs_crutch/install/distr.py b/libs_crutch/install/distr.py deleted file mode 100644 index 2e1fd53..0000000 --- a/libs_crutch/install/distr.py +++ /dev/null @@ -1,2096 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2010-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from os import path -from random import choice -import string -import os -from time import sleep -import datetime -import re -import sys -import operator -import json -from shutil import copyfile, copytree -from subprocess import Popen, PIPE -from itertools import * -from functools import partial -from calculate.lib.datavars import VariableError - -from calculate.lib.utils.mount import isMount, Btrfs, BtrfsError -from calculate.lib.utils.files import (removeDir, - processProgress, STDOUT, - typeFile, pathJoin, process, - listDirectory, checkUtils, - MAGIC_COMPRESS, MAGIC_SYMLINK, - MAGIC_CONTINUE, makeDirectory, - isEmpty, check_rw, calculate_exclude, - PercentProgress, getProgPath, xztaropen) -import calculate.lib.utils.device as device -from calculate.lib.utils.device import (detectDeviceForPartition, - countPartitions) -from calculate.lib.utils.tools import classproperty, Signal, traverse -from calculate.lib.utils.text import _u8 -from calculate.lib.variables.linux import LinuxDataVars, Linux - -from calculate.lib.cl_lang import setLocalTranslate, _ -from functools import reduce - -setLocalTranslate('cl_install3', sys.modules[__name__]) - - -class DefaultMountPath(object): - """ - Пути по умолчанию для монтирования образов - """ - BaseMountPath = '/run/calculate/mount' - BuildDirectory = '/var/calculate/tmp/iso' - - @classproperty - def IsoImage(cls): - return path.join(cls.BaseMountPath, "iso") - - @classproperty - def SquashImage(cls): - return path.join(cls.BaseMountPath, "squash") - - @classproperty - def ArchiveImage(cls): - return path.join(cls.BaseMountPath, "tarball") - - @classproperty - def DefaultMount(cls): - return path.join(cls.BaseMountPath, "distro") - - @classproperty - def InstallMount(cls): - return path.join(cls.BaseMountPath, "install") - - -class cpProcessProgress(processProgress): - def init(self, *args, **kwargs): - self.maxfiles = kwargs.get('maxfiles', 1) - self.stderr = STDOUT - self.command.append("-dRv") - - def processInit(self): - self.value = 0 - self.percent = 0 - return 0 - - def processEnd(self): - self.value = self.maxfiles - self.percent = 100 - return 100 - - def processString(self, strdata): - if strdata.startswith("cp:") or strdata.startswith("/bin/cp:"): - self._cachedata.append(strdata.partition(": ")[2]) - if "->" in strdata: - self.value += 1 - if self.maxfiles: - percent = 100 * self.value / self.maxfiles - percent = min(percent, 99) - if percent > self.percent: - self.percent = percent - return percent - else: - return None - - -def progressCopyFile(source, dest): - """ - Copy file with progress - """ - size = int(os.lstat(source).st_size) - bufsize = (100 - (size % 100) + size) / 100 - with open(source, 'rb') as infile: - with open(dest, 'w') as outfile: - for i in xrange(1, 101): - outfile.write(infile.read(bufsize)) - yield i - - -class DistributiveError(Exception): - """Error for distributive operations""" - pass - - -class Distributive(object): - """ - Distributive object. Parent object for all distributive. - """ - - class Type(object): - Directory = "dir" - Partition = "part" - SquashFS = "squash" - Archive = "arch" - Iso = "iso" - Layered = "layered" - - mountError = _("Failed to mount") + " %s:\n%s" - reLive = re.compile(r"^live[^.]*\.squashfs(\.(\d+))?$", re.S) - contentCache = {} - needFormat = True - - def __init__(self, parent=None): - self.childs = [] - self.ref = 0 - self.locked = False - # if specified parent - if isinstance(parent, Distributive): - # save parent type for correct resource release - self.parent = parent - # append this object as child to specified parent - parent.childs.append(self) - else: - self.parent = None - - @classmethod - def fromFile(cls, filename): - """Get Distributive object by filename""" - # MAGIC_COMPRESS 0x000004 Check inside compressed files - tf = typeFile(magic=MAGIC_COMPRESS | MAGIC_SYMLINK | MAGIC_CONTINUE) - ftype = tf.getMType(filename) - if ftype: - if "block special" in ftype: - for distrType in (IsoDistributive, FlashDistributive, - PartitionDistributive): - distr = distrType(filename) - res = distr.probe() - distr.close() - if res: - return distr - if "ISO 9660 CD-ROM" in ftype: - return IsoDistributive(filename) - elif "7-zip" in ftype or \ - "POSIX tar archive" in ftype: - if "rootfs.tar" in filename: - return ContainerDistributive(path.dirname(filename)) - else: - return ArchiveDistributive(filename) - elif "Squashfs filesystem" in ftype: - return SquashDistributive(filename) - elif path.isdir(filename): - if path.isfile(path.join(filename, "rootfs.tar.xz")): - return ContainerDistributive(filename) - elif path.isfile(path.join(filename, "livecd")): - return IsoDistributive(filename) - else: - important_dirs = ("etc", "lib", "bin", "sbin", "var") - if all(path.exists(path.join(filename, x)) - for x in important_dirs): - return DirectoryDistributive(filename) - raise DistributiveError(_("Wrong distribution") + " '%s':\n%s" \ - % (filename, ftype)) - - def getType(self): - return _("empty") - - def detach(self): - """Detach child distributive from parent. - - At example: ArchiveDistributive create child DirectoryDistributive by - unpacking to temporary directory and at close method remove it. If the - removing directroy do not need, then need call detach in - DirectoryDistributive object - - dist1 = ArchiveDistributive(file="/os.tar.bz2",mdirectory="/tmp/test") - dist2 = dist1.convertToDirectory() - dist2.detach() - dist1.close() - ... - """ - self.parent = None - - def reserve(self): - self.locked = True - - def release(self): - self.locked = False - - def __enter__(self): - self.ref += 1 - return self - - def __exit__(self, type, value, traceback): - self.ref -= 1 - if not self.ref: - self.close() - - def close(self): - """Release all child distributive and release himself. - - Need call this method at end work with object for right releasing - resources. - - Example: - dist1 = PartitionDistributive(partition="/dev/sda2") - dist2 = dist1.convertToDirectory() - dist1.close() - """ - # print "Close", self, self.locked - if self.locked: - return False - if self.ref: - self.ref -= 1 - return False - else: - try: - # close all child - if self.childs: - for child in reversed(self.childs): - # check detach - if child.parent: - child.close() - self.childs = [] - # if has parent - if self.parent: - self.parent.releaseChild(self) - finally: - self.parent = None - self.childs = [] - return True - - def releaseChild(self, child): - """Method of release child state of distributive - - At example: PartitionDistributive may be DirectoryDistributive by - mounting it to directory. But at end this directory must be - unmounted.""" - pass - - def convertToDirectory(self): - """Default c raise error about impossible convert object""" - raise DistributiveError(_("Failed to convert") + " '%s' " \ - % self.__class__.__name__ + _("to") + \ - " '%s'" % "DirectoryDistributive") - - # def __del__(self): - # """Uncomment this method for automaticaly release all distributive - # instance""" - # self.close() - - @staticmethod - def clear_empty_directories(dn): - dn = path.dirname(dn) - while dn and path.exists(dn) and isEmpty(dn) and not isMount(dn): - os.rmdir(dn) - dn = path.dirname(dn) - - def getDirectory(self): - """Get directory which contains distro""" - return self.convertToDirectory().directory - - def getBootDirectory(self): - """Get directory which contains boot""" - return path.join(self.getDirectory(), "boot") - - def getEfiDirectory(self): - """Get directory which contains boot/efi""" - return self.getEfiDirectories()[0] - - def getEfiDirectories(self): - return [path.join(self.getDirectory(), "boot/efi")] - - def _makeDirectory(self, pathname): - """Make directory and parent. - - If directory exists then return False else True""" - try: - parent = path.split(path.normpath(pathname))[0] - if not path.exists(parent): - self._makeDirectory(parent) - else: - if path.exists(pathname): - return False - os.mkdir(pathname) - return True - except Exception as e: - raise DistributiveError(_("Failed to create the directory") + - " '%s':\n%s" % (pathname, str(e))) - except KeyboardInterrupt as e: - raise DistributiveError(_("Failed to create the directory") + - " '%s':\n%s" % (pathname, str(e))) - - def get_squash_size(self): - """ - Получить размер squash образа - :return: - """ - raise DistributiveError(_("Squash size unsupported for %s") % - str(self.__class__.__name__)) - - def _removeDirectory(self, directory): - """Remove directory and files contained in it""" - try: - if path.exists(directory): - removeDir(directory) - except Exception as e: - raise DistributiveError(_("Failed to remove the directory from") + \ - " '%s':\n%s" % (directory, str(e))) - except KeyboardInterrupt as e: - raise DistributiveError(_("Failed to remove the directory from") + \ - " '%s':\n%s" % (directory, str(e))) - - def _copyfile(self, infile, outfile): - try: - copyfile(infile, outfile) - except Exception as e: - raise DistributiveError(_("Failed to copy") + " '%s' to '%s':\n%s" \ - % (infile, outfile, str(e))) - - def _copytree(self, indir, outdir): - try: - copytree(indir, outdir, symlinks=True) - except Exception as e: - raise DistributiveError(_("Failed to copy") + " '%s' to '%s':\n%s" \ - % (indir, outdir, str(e))) - except KeyboardInterrupt as e: - raise DistributiveError(_("Failed to copy") + " '%s' to '%s':\n%s" \ - % (indir, outdir, str(e))) - - def rsync(self, fromdir, todir, callbackProgress=None, - byfile=None, filesnum=0, noperm=False, **kwargs): - """Copy files from 'fromdir' directory to 'todir' directory""" - cpCmd = getProgPath('/bin/cp') - if not cpCmd: - raise DistributiveError(_("'%s' not found") % "cp") - try: - joinFrom = partial(path.join, fromdir) - params = [cpCmd, "-x"] + \ - (["--no-preserve=mode,ownership"] if noperm else ["-a"]) + \ - map(joinFrom, - ifilterfalse(byfile or operator.not_, - listDirectory(fromdir))) + \ - [todir] - cpProcess = cpProcessProgress(*params, - maxfiles=filesnum, stderr=STDOUT) - for perc in cpProcess.progress(): - if callbackProgress and not byfile: callbackProgress(perc) - res = cpProcess.success() - errmes = cpProcess.read() - # copy by one file - if byfile: - percFiles = filter(byfile, - listDirectory(fromdir)) - if len(percFiles) > 1: - maxPercOnFile = 100 / len(percFiles) - recountPerc = \ - lambda perc, num: perc / len( - percFiles) + maxPercOnFile * num - else: - recountPerc = lambda perc, num: perc - for i, copyfile in enumerate(percFiles): - for perc in progressCopyFile(joinFrom(copyfile), - path.join(todir, copyfile)): - if callbackProgress: - callbackProgress(recountPerc(perc, i)) - except Exception as e: - res = False - errmes = str(e) - if not res: - raise DistributiveError(_("Failed to copy files from") + \ - " '%s' " % fromdir + _("to") + \ - " '%s':\n%s" % (todir, errmes)) - - def _mountToBind(self, srcDirectory, destDirectory): - """Mount srcDirectory to destDirectory""" - mount = process('/bin/mount', "-o", "bind", srcDirectory, destDirectory, - stderr=STDOUT) - if mount.success(): - return True - else: - try: - os.rmdir(destDirectory) - except OSError: - pass - raise DistributiveError( - self.mountError % (srcDirectory, mount.read())) - - def performFormat(self): - pass - - def formatPartition(self, dev, format="ext4", label="", purpose=None): - pass - - def rndString(self): - """Get random string with len 8 char""" - return "".join([choice(string.ascii_letters + string.digits) - for i in xrange(0, 8)]) - - def _getSquashNum(self, reMatch): - if reMatch.groups()[1] and reMatch.groups()[1].isdigit(): - return int(reMatch.groups()[1]) - else: - return 0 - - def _getLastLive(self, directory): - """Get last live squashfs image from directory""" - squashfiles = filter(lambda x: x, - map(self.reLive.search, - listDirectory(directory))) - if squashfiles: - return max(squashfiles, key=self._getSquashNum).group() - return None - - def _mountToDirectory(self, file, directory, mountopts="", count=2): - """Mount squashfs to directory""" - # 2816 code return by mount if device is absent (update /dev by udev) - NO_SUCH_DEVICE = 2816 - if isMount(directory): - raise DistributiveError( - _("Failed to mount to the directory: %s\n") - % directory + _("Directory already mounted")) - mountopts_list = filter(lambda x: x, - mountopts.split(" ")) - mountProcess = process('/bin/mount', file, directory, *mountopts_list, - stderr=STDOUT) - if mountProcess.success(): - return True - else: - # try mount 3 times with interval 0.5 second - if mountProcess.returncode() == NO_SUCH_DEVICE and count: - sleep(0.5) - mountProcess.close() - return self._mountToDirectory(file, directory, mountopts, - count - 1) - try: - self._removeDirectory(directory) - except Exception: - pass - raise DistributiveError( - self.mountError % (file, mountProcess.read())) - - def _umountDirectory(self, directory): - """Umount directory""" - if isMount(directory): - processUmount = None - for wait in [0, 0.5, 2, 5]: - sleep(wait) - processUmount = process('/bin/umount', directory, stderr=STDOUT) - if processUmount.success(): - return True - else: - raise DistributiveError(_("Failed to umount") + " %s:\n%s" % - (directory, processUmount.read())) - else: - return True - - def _getMntDirectory(self, directory): - """Get directory name, which will use for mounting or unpacking - - If queried name is not free then to name append random string - """ - newDirectoryName = directory - for i in range(2, 9999): - if not path.exists(newDirectoryName): - return newDirectoryName - else: - newDirectoryName = "%s.%02d" % (directory, i) - return newDirectoryName - - @staticmethod - def getInfoFromDirectory(directory): - d = {} - try: - if path.lexists(path.join(directory, 'lib64')): - d['os_arch_machine'] = 'x86_64' - elif path.lexists(path.join(directory, 'lib')): - d['os_arch_machine'] = 'i686' - else: - d['os_arch_machine'] = '' - dv = LinuxDataVars(systemRoot=directory) - d["os_linux_shortname"] = dv.Get('os_linux_shortname') - d['os_linux_ver'] = dv.Get('os_linux_ver') - d['os_linux_build'] = dv.Get('os_linux_build') - d['os_linux_name'] = dv.Get('os_linux_name') - d['os_linux_subname'] = dv.Get('os_linux_subname') - d['os_linux_system'] = dv.Get('os_linux_system') - d['cl_make_profile'] = dv.Get('cl_make_profile') - d['cl_profile_name'] = dv.Get('cl_profile_name') - # make lazy call - d['os_linux_files'] = partial(dv.Get, 'os_linux_files') - d['os_chrootable_set'] = "off" - try: - if process("/usr/bin/chroot", directory, "/bin/true").success(): - d['os_chrootable_set'] = "on" - except Exception: - pass - except VariableError: - pass - return d.copy() - - def getInfo(self, filename=None): - """Get info from content""" - image = None - try: - # get from cache - keyCache = None - if not filename: - for keyname in ("file", "partition", "directory"): - if hasattr(self, keyname): - keyCache = getattr(self, keyname) - elif filename in self.contentCache: - keyCache = filename - - if keyCache and keyCache in self.contentCache: - return Distributive.contentCache[keyCache].copy() - - distr = None - # may be directory is isodir (directory which contains iso image) - extname = "isodir" - try: - distr = self.fromFile(filename) if filename else self - mapExtName = {DirectoryDistributive: "dir", - IsoDistributive: "isodir", - FlashDistributive: "flash", - ContainerDistributive: "lxc", - PartitionDistributive: "partdir"} - extname = mapExtName.get(distr.__class__, "") - if isinstance(distr, ContainerDistributive): - d = distr.get_information() - d['ext'] = extname - return d.copy() - if isinstance(distr, ArchiveDistributive): - raise DistributiveError("Not for archive distributive") - image = distr.convertToDirectory() - except Exception as e: - # TODO: отладка почему образ не подходит - #print str(e) - if distr: - distr.close() - return {}.copy() - d = self.getInfoFromDirectory(image.directory) - d['ext'] = extname - if distr: - distr.close() - if keyCache and not path.isdir(keyCache): - Distributive.contentCache[keyCache] = d - return d.copy() - finally: - if image: - image.close() - - @classmethod - def unserialize(cls, data, parent=None): - class_mapper = {cls.Type.Directory: DirectoryDistributive, - cls.Type.Iso: IsoDistributive, - cls.Type.Partition: PartitionDistributive, - cls.Type.SquashFS: SquashDistributive, - cls.Type.Layered: LayeredDistributive} - if not data.get('type', '') in class_mapper: - raise DistributiveError(_("Failed to unserialize type %s") % - data.get('type', '')) - return class_mapper[data['type']].unserialize(data, parent=parent) - - @staticmethod - def required(*params): - def decor(f): - def wrapper(cls, data, **kw): - if any(not x in data for x in params): - raise DirectoryDistributive( - _("Failed to unserialize %s") % cls.__class__.__name__) - return f(cls, data, **kw) - - return wrapper - - return decor - - def restore(self): - raise DistributiveError(_("Recovery is not implemented")) - - def is_invalid(self): - try: - return not "cl_make_profile" in self.getInfo() - except DistributiveError: - return True - - def post_clear(self): - return True - - -class DirectoryDistributive(Distributive): - """ - Дистрибутив в директории - """ - data = [{'name': 'proc', - 'type': 'proc', - 'target': 'proc', - 'source': 'none'}, - {'name': 'sys', - 'type': 'sysfs', - 'target': 'sys', - 'source': 'none'}, - {'name': 'dev', - 'type': 'bind', - 'target': 'dev', - 'source': '/dev'}, - {'name': 'devpts', - 'type': 'bind', - 'target': 'dev/pts', - 'source': '/dev/pts'}, - {'name': 'remote', - 'type': 'bind', - 'target': 'var/calculate/remote', - 'source': '/var/calculate/remote'}, - ] - - def __init__(self, directory, parent=None, mdirectory=None): - Distributive.__init__(self, parent=parent) - self.no_unmount = False - self.directory = directory - self.mdirectory = mdirectory - self.system_mounted = False - if not parent: - self._makeDirectory(self.directory) - - def hasSystemDirectories(self): - return self.system_mounted or self.directory == '/' - - def mountSystemDirectories(self, skip=("remote",)): - """ - Подключить к дистрибутиву системые ресурсы (/proc, /sys) - :return: - """ - if not self.system_mounted: - for obj in filter(lambda x: x['name'] not in skip, self.data): - target_path = path.join(self.directory, obj['target']) - if obj['type'] == 'bind': - if not path.exists(target_path): - self._makeDirectory(target_path) - if not path.exists(obj['source']): - self._makeDirectory(obj['source']) - self._mountToBind(obj['source'], target_path) - else: - self._mountToDirectory(obj['source'], target_path, - "-t %s" % obj['type']) - self.system_mounted = True - - def umountSystemDirectories(self): - for obj in reversed(self.data): - target_path = path.join(self.directory, obj['target']) - if isMount(target_path): - self._umountDirectory(target_path) - self.system_mounted = False - - def getType(self): - return _("directory '%s'") % _u8(self.directory) - - def bindDirectory(self, mdirectory): - for child in self.childs: - if isinstance(child, DirectoryDistributive) and \ - mdirectory in child.directory: - return child - mdirectory = self._getMntDirectory(mdirectory) - self._makeDirectory(mdirectory) - self._mountToBind(self.directory, mdirectory) - return DirectoryDistributive(mdirectory, parent=self) - - def releaseChild(self, child): - """Remove child Directory distributive""" - if isinstance(child, DirectoryDistributive): - self._umountDirectory(child.directory) - self._removeDirectory(child.directory) - child.directory = None - - def close(self): - if not self.locked and self.system_mounted: - self.umountSystemDirectories() - Distributive.close(self) - - def convertToDirectory(self): - if self.mdirectory: - return self.bindDirectory(self.mdirectory) - else: - return self - - def performFormat(self): - """Format for directory - removing all files""" - rm_command = getProgPath("/bin/rm") - p = process(rm_command, "-rf", "--one-file-system", self.directory, - stderr=process.STDOUT) - if p.success(): - self._makeDirectory(self.directory) - return True - else: - raise DistributiveError(_("Failed to clean directory") + - " %s:\n%s" % (self.directory, p.read())) - - def post_clear(self): - if path.exists(self.directory): - rm_command = getProgPath("/bin/rm") - p = process(rm_command, "-rf", "--one-file-system", self.directory) - p.success() - self.clear_empty_directories(self.directory) - return True - - def installFrom(self, source, **kwargs): - """Install distributive to directory from source distributive""" - if isinstance(source, ArchiveDistributive): - source.unpackTo(self.directory) - else: - # get source distributive as directory distributive - dFrom = source.convertToDirectory() - # copy distributive from source to this - self.rsync(dFrom.directory, self.directory, **kwargs) - - def serialize(self): - d = {'type': Distributive.Type.Directory, - 'directory': self.directory, - 'system_mounted': self.system_mounted, - 'childs': [x.serialize() for x in self.childs]} - if self.mdirectory: - d['mdirectory'] = self.mdirectory - return d - - @classmethod - @Distributive.required("directory", "system_mounted") - def unserialize(cls, data, parent=None): - ld = DirectoryDistributive(_u8(data['directory']), parent=parent) - ld.system_mounted = _u8(data['system_mounted']) - if "mdirectory" in data: - ld.mdirectory = _u8(data['mdirectory']) - ld.childs = [Distributive.unserialize(x, parent=ld) for x in - data.get('childs', [])] - return ld - - -class DataPartition(object): - """Data partition""" - dev = None - mountPoint = None - fileSystem = "ext4" - isFormat = False - systemId = None - partitionTable = None - - -class MultiPartitions: - """Data partition list""" - - def __init__(self): - self.listPartitions = [] - - def addPartition(self, **argv): - """Add partition in data partition list""" - dictDataPart = reduce(lambda x, y: \ - x.update({y: getattr(DataPartition, y)}) or x, - filter(lambda x: not x.startswith('_'), - DataPartition.__dict__), {}) - updateAttrData = filter(lambda x: x[1] is not None, - dictDataPart.items()) - defaultAttr = [] - for attrName, attrValue in updateAttrData: - if not attrName in argv.keys(): - defaultAttr.append(attrName) - argv[attrName] = attrValue - if set(argv.keys()) != set(dictDataPart.keys()): - notFoundAttr = set(dictDataPart.keys()) - set(argv.keys()) - if notFoundAttr: - raise DistributiveError(_("The following attributes " - "are not specified: (%s)") \ - % ", ".join( - map(lambda x: "DataPartition.%s" % x, notFoundAttr))) - unnecessaryAttr = (set(dictDataPart.keys()) ^ set(argv.keys())) - \ - set(dictDataPart.keys()) - if unnecessaryAttr: - raise DistributiveError(_("Failed to use attributes (%s) ") \ - % ", ".join( - map(lambda x: "DataPartition.%s" % x, unnecessaryAttr))) - else: - partObj = DataPartition() - for attr, value in argv.items(): - if attr in defaultAttr: - continue - setattr(partObj, attr, value) - self.listPartitions.append(partObj) - - def getSystemId(self): - """Get systemID for change [None,82,...]""" - return map(lambda x: x.systemId, self.listPartitions) - - def getPartitionTable(self): - """Get systemID for change [dos,gpt,...]""" - return map(lambda x: x.partitionTable, self.listPartitions) - - def getIsFormat(self): - """Get list is format [True,...]""" - return map(lambda x: x.isFormat, self.listPartitions) - - def getFileSystems(self): - """Get list filesystems ["reiserfs",...]""" - return map(lambda x: x.fileSystem, self.listPartitions) - - def getPartitions(self): - """Get list partition ["/dev/sda",...]""" - return map(lambda x: x.dev, self.listPartitions) - - def getMountPoints(self): - """Get list mount point ["/boot",...]""" - return map(lambda x: x.mountPoint, self.listPartitions) - - -class FormatProcess(process): - format_util = "" - dos_id = "0" - gpt_id = "0" - - def __init__(self, dev, label=None, purpose=None, compression=None): - self.dev = dev - self._label = label - self.purpose = purpose - self.compression = compression - super(FormatProcess, self).__init__(*self.format_command()) - - def format_command(self): - cmd = getProgPath(self.format_util) - if not cmd: - raise DistributiveError(_("command '%s' not found") % cmd) - - params = (self.bootparam() - if self.purpose in ("root", "boot") - else self.param()) - return list(traverse( - [cmd] + [x for x in params if x])) - - def postaction(self): - return True - - def bootparam(self): - return self.param() - - def param(self): - return self.get_label(), self.dev - - def get_label(self): - if not self._label: - return () - else: - return self.label() - - def label(self): - return () - - -class FormatProcessGeneric(FormatProcess): - dos_id = "83" - gpt_id = "8300" - - def label(self): - return "-L", self._label - - def param(self): - return self.get_label(), self.dev - - -class FormatExt2(FormatProcessGeneric): - format_util = "/sbin/mkfs.ext2" - - -class FormatExt3(FormatExt2): - format_util = "/sbin/mkfs.ext3" - - -class FormatExt4(FormatExt2): - format_util = "/sbin/mkfs.ext4" - - -class FormatJfs(FormatProcessGeneric): - format_util = "/sbin/mkfs.jfs" - - def param(self): - return self.get_label(), "-f", self.dev - - -class FormatF2FS(FormatProcessGeneric): - format_util = "/usr/sbin/mkfs.f2fs" - - def label(self): - return "-l", self._label - - def param(self): - return self.get_label(), "-f", self.dev - -class FormatReiserfs(FormatProcessGeneric): - format_util = "/sbin/mkfs.reiserfs" - - def param(self): - return self.get_label(), "-f", self.dev - - def label(self): - return "-l", self._label - - -class FormatNilfs2(FormatProcessGeneric): - format_util = "/sbin/mkfs.nilfs2" - - -class FormatXfs(FormatProcessGeneric): - format_util = "/sbin/mkfs.xfs" - - def param(self): - return self.get_label(), "-f", self.dev - - def nosparse(self): - return "-i", "sparse=0" - - def bootparam(self): - return self.get_label(), self.nosparse(), "-f", self.dev - - -class FormatVfat(FormatProcess): - dos_id = "0b" - gpt_id = "0700" - format_util = "/usr/sbin/mkfs.vfat" - - def param(self): - return self.get_label(), "-F", "32", self.dev - - def label(self): - return "-n", self._label - - -class FormatNtfs(FormatProcessGeneric): - dos_id = "07" - gpt_id = "0700" - format_util = "/usr/sbin/mkfs.ntfs" - - def param(self): - return self.get_label(), "-FQ", self.dev - - -class FormatUefi(FormatVfat): - dos_id = "ef" - gpt_id = "EF00" - - def label(self): - return () - - -class FormatSwap(FormatProcess): - dos_id = "82" - gpt_id = "8200" - format_util = "/sbin/mkswap" - - def param(self): - return self.dev, - - -class FormatBtrfs(FormatProcessGeneric): - format_util = "/sbin/mkfs.btrfs" - - def param(self): - return "-f", self.get_label(), self.dev - - -class PartitionDistributive(Distributive): - format_map = { - 'ext2': FormatExt2, - 'ext3': FormatExt3, - 'ext4': FormatExt4, - 'jfs': FormatJfs, - 'f2fs': FormatF2FS, - 'reiserfs': FormatReiserfs, - 'btrfs': FormatBtrfs, - 'nilfs2': FormatNilfs2, - 'xfs': FormatXfs, - 'vfat': FormatVfat, - 'ntfs-3g': FormatNtfs, - 'ntfs': FormatNtfs, - 'uefi': FormatUefi, - 'btrfs': FormatBtrfs, - 'btrfs-compress': FormatBtrfs, - 'swap': FormatSwap - } - - def __init__(self, partition, parent=None, mdirectory=None, - check=False, multipartition=None, flagRemoveDir=True, - fileSystem="ext4", isFormat=True, systemId=None, - rootLabel="Calculate", partitionTable=None, - compression=None): - """Initialize partition distributive - - mdirectory - directory for mount - check - check partition name and raise DistributiveError if partition - has bad name - """ - Distributive.__init__(self, parent=parent) - self.partition = partition - self.fileSystem = fileSystem - self.mdirectory = mdirectory or DefaultMountPath.DefaultMount - self.multipartition = multipartition - self.flagRemoveDir = flagRemoveDir - self.isFormat = isFormat - self.DirectoryObject = None - self.systemId = systemId - self.rootLabel = rootLabel - self.compression = compression - self.partitionTable = partitionTable - - def getType(self): - return _("partition '%s'") % self.partition - - def probe(self): - """Check directory for flash content""" - try: - pathname = self.getDirectory() - except Exception: - return False - return Linux().detectOtherShortname(pathname) - - def _mountPartition(self, partition, directory, opts=""): - """Mount partition to directory""" - self._makeDirectory(directory) - if "ntfs" in opts: - source_dir = isMount(partition) - if source_dir: - self._mountToBind(source_dir, directory) - return - self._mountToDirectory(partition, directory, opts) - - def _umountPartition(self, directory): - """Umount partition and remove directory""" - self._umountDirectory(directory) - if self.flagRemoveDir: - self._removeDirectory(directory) - - def releaseChild(self, child): - """Umount child Directory distributive""" - if isinstance(child, DirectoryDistributive): - self._umountPartition(child.directory) - child.directory = None - - def _mountBind(self, srcDirectory, destDirectory): - """Mount directory to directory""" - self._makeDirectory(destDirectory) - self._makeDirectory(srcDirectory) - self._mountToBind(srcDirectory, destDirectory) - - def postinstallMountBind(self): - """Mount bind mount point and create mount dirs""" - if self.multipartition and self.DirectoryObject: - mulipartDataBind = filter(lambda x: x[2] == "bind", - self.getMultipartData()) - dirObj = self.DirectoryObject - mdirectory = dirObj.directory - for srcDir, destDir, fileSystem, isFormat, partTable \ - in mulipartDataBind: - realDestDir = pathJoin(mdirectory, destDir) - realSrcDir = pathJoin(mdirectory, srcDir) - self._mountBind(realSrcDir, realDestDir) - isFormat = False - partObj = PartitionDistributive(realSrcDir, flagRemoveDir=False, - fileSystem=fileSystem, - isFormat=isFormat, - parent=dirObj) - DirectoryDistributive(realDestDir, parent=partObj) - - def getEfiDirectories(self): - return [path.join(self.getDirectory(), x) - for x in self.multipartition.getMountPoints() - if x.startswith("/boot/efi") - ] - - def getMultipartData(self): - """Get multipartition data""" - mulipartData = zip(self.multipartition.getPartitions(), - self.multipartition.getMountPoints(), - self.multipartition.getFileSystems(), - self.multipartition.getIsFormat(), - self.multipartition.getPartitionTable()) - return mulipartData - - def convertToDirectory(self): - """Convert partition to directory by mounting""" - mapFS = {'btrfs-compress': 'btrfs'} - mapOpts = {'btrfs-compress': ' -o compress=%s' % self.compression} - mdirectory = self.mdirectory - for child in self.childs: - if isinstance(child, DirectoryDistributive) and \ - mdirectory in child.directory: - return child - mdirectory = self._getMntDirectory(mdirectory) - self._mountPartition(self.partition, mdirectory, - mapOpts.get(self.fileSystem,"")) - dirObj = DirectoryDistributive(mdirectory, parent=self) - if self.multipartition: - mulipartDataNotBind = filter(lambda x: x[2] != "bind", - self.getMultipartData()) - for dev, mountPoint, fileSystem, isFormat, partTable \ - in sorted(mulipartDataNotBind, key=lambda x: x[1]): - realMountPoint = None - if fileSystem != "swap": - realMountPoint = pathJoin(mdirectory, mountPoint) - self._mountPartition( - dev, realMountPoint, - "-t %s" % mapFS.get(fileSystem,fileSystem) + - mapOpts.get(fileSystem,"")) - partObj = PartitionDistributive(dev, flagRemoveDir=False, - fileSystem=fileSystem, - isFormat=isFormat, - parent=dirObj) - if realMountPoint is not None: - DirectoryDistributive(realMountPoint, parent=partObj) - self.DirectoryObject = dirObj - return dirObj - - def formatAllPartitions(self): - """Format all partitions""" - FS, DEV, NEEDFORMAT, NEWID, PARTTABLE, MP = 0, 1, 2, 3, 4, 5 - # get all information to matrix - dataPartitions = zip(self.multipartition.getFileSystems() + \ - [self.fileSystem], - self.multipartition.getPartitions() + \ - [self.partition], - self.multipartition.getIsFormat() + \ - [self.isFormat], - self.multipartition.getSystemId() + \ - [self.systemId], - self.multipartition.getPartitionTable() + \ - [self.partitionTable], - self.multipartition.getMountPoints() + \ - ["/"]) - # get partition which need format - formatPartitions = map(lambda x: (x[FS], x[DEV], x[NEWID], x[MP]), - filter( - lambda x: x[NEEDFORMAT] and x[FS] != "bind", - dataPartitions)) - # if has separate /boot partition - bootmp = ["/boot", "/"] - purpose_map = {"/boot": "boot", - "/var/calculate": "calculate", - "/": "root"} - # format all get partition - for fileSystem, dev, newID, mp in formatPartitions: - if fileSystem == "swap": - self.formatSwapPartition(dev) - else: - if newID == "EF00": - fileSystem = "uefi" - if dev == self.partition: - self.formatPartition(dev, format=fileSystem, - label=self.rootLabel, - purpose=purpose_map.get(mp, None)) - else: - if mp == '/var/calculate': - self.formatPartition(dev, format=fileSystem, - label="Calculate") - else: - self.formatPartition(dev, format=fileSystem, - purpose=purpose_map.get(mp, None)) - # change system id for partitions - changeidPartitions = map(lambda x: (x[NEWID], x[DEV], x[PARTTABLE]), - filter(lambda x: x[NEWID], - dataPartitions)) - for systemid, dev, partTable in changeidPartitions: - self.changeSystemID(dev, systemid, partTable) - return True - - def _checkMount(self, dev): - """Checking mount point""" - if isMount(dev): - raise DistributiveError( - _("Failed to format %s: this partition is mounted") % dev) - - def formatPartition(self, dev, format="ext4", label="", purpose=None): - """Format partition""" - if not format in self.format_map: - raise DistributiveError( - _("The specified format of '%s' is not supported") % format) - with open("/proc/swaps") as f: - if dev in map(lambda y: y.split(" ")[0], - filter(lambda x: x.startswith("/"),f)): - raise DistributiveError( - _("Failed to format %s: this partition is used as swap") % dev) - self._checkMount(dev) - if not os.access(dev, os.W_OK): - raise DistributiveError(_("Failed to format the partition") + - " %s:\n%s" % (dev, _("Permission denied"))) - - format_process = self.format_map[format](dev, label=label, - purpose=purpose, compression=self.compression) - if format_process.failed(): - raise DistributiveError( - _("Failed to format the partition") + - " %s:\n%s" % (dev, format_process.readerr())) - format_process.postaction() - - def performFormat(self): - """Perform format for all partition of this distributive""" - if self.multipartition: - self.formatAllPartitions() - elif self.isFormat: - self.formatPartition(self.partition, format=self.fileSystem, - label=self.rootLabel, purpose="root") - if self.systemId: - self.changeSystemID(self.partition, self.systemId, - self.partitionTable) - - def changeSystemID(self, dev, systemid, parttable): - """Change partition id, specified by systemid""" - deviceName = detectDeviceForPartition(dev) - if deviceName is None: - raise DistributiveError( - _("Failed to determine the parent device for %s") % dev) - # device hasn't any partition - elif deviceName == "": - return True - fdiskProg, gdiskProg = checkUtils('/sbin/fdisk', '/usr/sbin/gdisk') - info = device.udev.get_device_info(name=dev) - partitionNumber = (info.get('ID_PART_ENTRY_NUMBER','') or - info.get('UDISKS_PARTITION_NUMBER', '')) - devicePartitionCount = countPartitions(deviceName) - if deviceName and not partitionNumber: - raise DistributiveError( - _("Failed to determine the partition number for %s") % dev) - if parttable in ("dos", "primary", "extended", "logical"): - fdisk = process(fdiskProg, deviceName, stderr=STDOUT) - pipe = Popen([fdiskProg, deviceName], - stdin=PIPE, stdout=PIPE, stderr=PIPE) - if devicePartitionCount > 1: - pipe.stdin.write("t\n%s\n%s\nw\n" % (partitionNumber, - systemid)) - else: - pipe.stdin.write("t\n%s\nw\n" % systemid) - pipe.stdin.close() - pipe.wait() - elif parttable == "gpt": - pipe = Popen([gdiskProg, deviceName], - stdin=PIPE, stdout=PIPE, stderr=PIPE) - if devicePartitionCount > 1: - pipe.stdin.write("t\n%s\n%s\nw\ny\n" % (partitionNumber, - systemid)) - else: - pipe.stdin.write("t\n%s\nw\ny\n" % systemid) - pipe.stdin.close() - pipe.wait() - for waittime in (0.1, 0.2, 0.5, 1, 2, 4): - if path.exists(dev): - return True - else: - sleep(waittime) - raise DistributiveError( - _( - "Failed to found partition %s after changing the system ID") % dev) - - def formatSwapPartition(self, dev): - """Format swap partition""" - with open("/proc/swaps") as f: - if dev in map(lambda y: y.split(" ")[0], - filter(lambda x: x.startswith("/"),f)): - raise DistributiveError( - _("Failed to execute 'mkswap %s': " - "the swap partition is used " - "by the current system") % dev) - if isMount(dev): - raise DistributiveError( - _("Failed to format %s: this partition is mounted") % dev) - format_process = self.format_map["swap"](dev) - if format_process.failed(): - raise DistributiveError( - _("Failed to format the swap partition") + - " %s:\n%s" % (dev, format_process.readerr())) - - def installFrom(self, source, **kwargs): - """Install distributive to partition from source distributive""" - # get currect partition as directory - distrTo = self.convertToDirectory() - # install into directroy distributive from source - distrTo.installFrom(source, **kwargs) - - def serialize(self): - d = {'type': Distributive.Type.Partition, - 'partition': self.partition, - 'mdirectory': self.mdirectory, - 'flag_remove_directory': self.flagRemoveDir, - 'childs': [x.serialize() for x in self.childs]} - return d - - @classmethod - @Distributive.required("partition", "mdirectory", "flag_remove_directory") - def unserialize(cls, data, parent=None): - ld = PartitionDistributive(_u8(data['partition']), parent=parent) - ld.mdirectory = _u8(data['mdirectory']) - ld.flagRemoveDir = _u8(data['flag_remove_directory']) - ld.childs = [Distributive.unserialize(x, parent=ld) for x in - data.get('childs', [])] - return ld - - -class ArchiveDistributive(Distributive): - def __init__(self, file, parent=None, - mdirectory="/var/calculate/tmp/stage"): - Distributive.__init__(self, parent=parent) - self.file = file - self.mdirectory = mdirectory - - def getType(self): - return _("archive %s") % self.file - - def _detectArchive(self, file): - """Detect archive by "/usr/bin/file" command - - Return bzip2,gzip,7z or None - """ - file_cmd = getProgPath('/usr/bin/file') - p_file = process(file_cmd, file, stderr=process.STDOUT) - if "bzip2 compressed data" in p_file.read(): - return "bzip2" - elif "gzip compressed data" in p_file.read(): - return "gzip" - elif "7-zip archive data" in p_file.read(): - return "7z" - elif "XZ compressed data" in p_file.read(): - return "xz" - elif file and file.endswith(".tar.lzma"): - if path.exists('/usr/bin/7za'): - return "7z" - else: - return "lzma" - return None - - def _unpackArchive(self, archfile, directory): - """Unpack archive""" - # archive is exists - if not path.exists(archfile): - raise DistributiveError(_("File '%s' not found") % archfile) - # detect type archive - archiveType = self._detectArchive(archfile) - # make directory if archive was detected normally - if archiveType: - self._makeDirectory(directory) - # unpack archive - - tar_cmd = getProgPath('/bin/tar') - if archiveType == "7z": - arch_cmd = getProgPath('/usr/bin/7za') - params = ["x", "-so"] - elif archiveType == "lzma": - arch_cmd = getProgPath('/usr/bin/lzma') - params = ["-dc"] - elif archiveType == "bzip2": - arch_cmd = getProgPath('/bin/bunzip2') - params = ["-dc"] - elif archiveType == "gzip": - arch_cmd = getProgPath('/bin/gzip') - params = ["-dc"] - elif archiveType == "xz": - arch_cmd = getProgPath('/bin/xz') - params = ["-dc"] - else: - raise DistributiveError(_("Unknown archive type '%s'") % - archfile) - if not arch_cmd or not tar_cmd: - raise DistributiveError(_("Archive type '%s' is not supported") % - archiveType) - arch_process = process(*([arch_cmd] + params + [archfile])) - tar_process = process(tar_cmd, "xf", "-", "-C", "%s/" % directory, - stdin=arch_process) - if tar_process.failed(): - arch_error = arch_process.readerr().strip() - tar_error = tar_process.readerr().strip() - if arch_error: - message = "%s\n%s" % (arch_error, tar_error) - else: - message = tar_error - raise DistributiveError(_("Unpacking error\n%s") % message) - - def unpackTo(self, directory): - """Unpack currect archive to directory""" - self._unpackArchive(self.file, directory) - - def convertToDirectory(self): - """Get archive as directory (unpack to directory)""" - # check may be the archive already unpacked - raise DistributiveError(_("Unsupported")) - mdirectory = self.mdirectory - for child in self.childs: - if isinstance(child, DirectoryDistributive) and \ - mdirectory in child.directory: - return child - # get temporary directory for unpacking - mdirectory = self._getMntDirectory(mdirectory) - dirdist = DirectoryDistributive(mdirectory, parent=self) - self._unpackArchive(self.file, mdirectory) - return dirdist - - def releaseChild(self, child): - """Remove child Directory distributive""" - if isinstance(child, DirectoryDistributive): - self._removeDirectory(child.directory) - child.directory = None - - def packToArchive(self, directory, file): - tar_command = getProgPath("/usr/bin/tar") - p = process(tar_command, "cf", file, "-C", directory, ".", - stderr=process.STDOUT) - if p.failed(): - raise DistributiveError(_("Failed to create the archive") + - " '%s':\n%s" % (file, p.read())) - - def installFrom(self, source, **kwargs): - """Install distributive to partition from source distributive""" - # get source distributive as directory distributive - dFrom = source.convertToDirectory() - # install into directroy distributive from source - self.packToArchive(dFrom.directory, self.file) - - def serialize(self): - d = {'type': Distributive.Type.Archive, - 'file': self.file, - 'mdirectory': self.mdirectory, - 'childs': [x.serialize() for x in self.childs]} - return d - - @classmethod - @Distributive.required("mdirectory", "file") - def unserialize(cls, data, parent=None): - ld = ArchiveDistributive(_u8(data['file']), - mdirectory=_u8(data['mdirectory']), - parent=parent) - ld.childs = [Distributive.unserialize(x, parent=ld) - for x in data.get('childs', [])] - return ld - - -class ContainerDistributive(ArchiveDistributive): - def __init__(self, basedir, parent=None, exclude=None, - include=None, - mdirectory="/var/calculate/tmp/stage"): - Distributive.__init__(self, parent=parent) - self.basedirectory = basedir - self.file = path.join(basedir, "rootfs.tar.xz") - self.meta = path.join(basedir, "meta.tar.xz") - self.lxd = path.join(basedir, "lxd.tar.xz") - self.exclude = [] if not exclude else exclude - self.include = [] if not include else include - self.mdirectory = mdirectory - - def mtime2build(self, fn): - if path.exists(fn): - build_time = datetime.datetime.fromtimestamp(os.stat(fn).st_mtime) - return build_time.strftime("%Y%m%d") - return "" - - def packToArchive(self, directory, file): - if not path.exists(self.basedirectory): - self._makeDirectory(self.basedirectory) - tar_command = getProgPath("/usr/bin/tar") - params = ["cf", file, "-J", "-C", directory] - if self.exclude: - exclude_list = list(calculate_exclude( - directory, exclude=self.exclude, include=self.include)) - params += map(lambda x: "--exclude=./%s" % x, exclude_list) - params += ["."] - - #debug_file = "/var/calculate/tmp/rootfs.tar.xz" - #if path.exists(debug_file): - # p = process("/bin/cp", debug_file, file) - #else: - p = process(tar_command, *params, stderr=process.STDOUT) - try: - if p.failed(): - raise DistributiveError(_("Failed to create the archive") + - " '%s':\n%s" % (file, p.read())) - except BaseException: - removeDir(self.basedirectory) - raise - - def get_information(self): - if path.exists(self.lxd): - with xztaropen(self.lxd) as f: - try: - metadata_yaml = f.getmember("metadata.yaml") - metadata = json.load(f.extractfile(metadata_yaml)) - return { - 'os_linux_build': - metadata["calculate"]['os_linux_build'], - 'os_arch_machine': - metadata["calculate"]['os_arch_machine'], - 'os_linux_shortname': - metadata["calculate"]['os_linux_shortname'], - 'os_linux_subname': - metadata["calculate"]['os_linux_subname'], - 'cl_profile_name': - metadata["calculate"]['cl_profile_name'], - 'os_linux_name': - metadata["calculate"]['os_linux_name'], - 'os_linux_ver': - metadata["calculate"].get('os_linux_ver', "17"), - } - except (KeyError, ValueError) as e: - pass - return { - 'os_linux_build': self.mtime2build(self.file), - 'os_arch_machine': "x86_64", - 'os_linux_shortname': 'Container', - 'os_linux_subname': "", - 'cl_profile_name': "", - 'os_linux_name': _('Unknown Container') - } - - -class SquashDistributive(Distributive): - def __init__(self, file, parent=None, mdirectory=None, exclude=None, - compress="", include=None): - Distributive.__init__(self, parent=parent) - self.file = file - self.mdirectory = mdirectory or DefaultMountPath.SquashImage - self.exclude = exclude or [] - self.include = include or [] - self.compress = compress if compress and compress != "gzip" else "" - - def getType(self): - return _("squash image %s") % self.file - - def _mountSquash(self, file, directory): - """Mount squashfs to directory""" - self._makeDirectory(directory) - self._mountToDirectory(file, directory, mountopts="-o loop -t squashfs") - - def _umountSquash(self, directory): - self._umountDirectory(directory) - self._removeDirectory(directory) - - def convertToDirectory(self): - mdirectory = self.mdirectory - for child in self.childs: - if isinstance(child, DirectoryDistributive) and \ - child.directory and \ - mdirectory in child.directory: - return child - mdirectory = self._getMntDirectory(mdirectory) - self._mountSquash(self.file, mdirectory) - return DirectoryDistributive(mdirectory, parent=self) - - def releaseChild(self, child): - """Umount child Directory distributive""" - if isinstance(child, DirectoryDistributive): - self._umountSquash(child.directory) - child.directory = None - - def packToSquash(self, directory, file, **kwargs): - mksquashfsUtil = '/usr/bin/mksquashfs' - if not path.exists(mksquashfsUtil): - raise DistributiveError( - _("Failed to create squash") + " : %s" % - _("command '%s' not found") % mksquashfsUtil) - cmd = [mksquashfsUtil, "%s/" % directory, file] - if self.compress: - cmd += ["-comp", self.compress] - cmd += ["-progress"] - if self.exclude: - exclude_list = list(calculate_exclude(directory, exclude=self.exclude, - include=self.include)) - cmd += ["-e"] + exclude_list - # возможность использовать заранее подготовленный livecd.squashfs - if path.exists('/var/calculate/tmp/livecd.squashfs'): - os.system('cp -L /var/calculate/tmp/livecd.squashfs %s' % file) - else: - callbackProgress = kwargs.get('callbackProgress', None) - processMkSquash = PercentProgress(*cmd, stderr=STDOUT, atty=True) - for perc in processMkSquash.progress(): - if callable(callbackProgress): - callbackProgress(perc) - if processMkSquash.failed(): - raise DistributiveError(_("Failed to create squashfs") + - " '%s':\n%s" % ( - file, processMkSquash.read())) - - def installFrom(self, source, **kwargs): - """Install distributive to partition from source distributive""" - # get source distributive as directory distributive - dFrom = source.convertToDirectory() - # install into directroy distributive from source - self.packToSquash(dFrom.directory, self.file, **kwargs) - - def serialize(self): - d = {'type': Distributive.Type.SquashFS, - 'file': self.file, - 'mdirectory': self.mdirectory, - 'childs': [x.serialize() for x in self.childs]} - return d - - @classmethod - @Distributive.required("mdirectory", "file") - def unserialize(cls, data, parent=None): - ld = SquashDistributive(_u8(data['file']), - mdirectory=_u8(data['mdirectory']), - parent=parent) - ld.childs = [Distributive.unserialize(x, parent=ld) - for x in data.get('childs', [])] - return ld - - -class IsoDistributive(Distributive): - def __init__(self, file, parent=None, mdirectory=None, - bdirectory=None, exclude=None, compress="gzip", - vol_id="CALCULATE", include=None): - if bdirectory is None: - bdirectory = DefaultMountPath.BuildDirectory - Distributive.__init__(self, parent=parent) - self.file = file - self.vol_id = vol_id - if path.isdir(self.file): - self.mdirectory = self.file - else: - self.mdirectory = mdirectory or DefaultMountPath.IsoImage - if file == bdirectory: - self.bdirectory = file - else: - self.bdirectory = self._getMntDirectory(bdirectory) - self.exclude = [] if not exclude else exclude - self.include = [] if not include else include - self.compress = compress - self.eventPrepareIso = Signal() - - def getType(self): - tf = typeFile(magic=MAGIC_COMPRESS | MAGIC_SYMLINK | MAGIC_CONTINUE) - ftype = tf.getMType(self.file) - if "block special" in ftype: - return _("live image %s") % self.file - if path.isdir(self.file): - return _("image directory %s") % self.file - else: - return _("ISO image %s") % self.file - - def probe(self): - """Check directory for iso content""" - try: - pathname = self.getIsoContentDirectory() - except Exception: - return False - return path.exists(path.join(pathname, "syslinux")) and \ - path.exists(path.join(pathname, "isolinux")) - - def get_squash_size(self): - try: - dn = self.getIsoContentDirectory() - fn = path.join(dn, "livecd.squashfs") - return path.getsize(fn) - except Exception: - raise DistributiveError(_("Failed to get size of the squash image")) - - def _mountIso(self, file, directory): - if self.file != self.mdirectory: - self._makeDirectory(directory) - tf = typeFile(magic=MAGIC_COMPRESS | MAGIC_SYMLINK | MAGIC_CONTINUE) - ftype = tf.getMType(file) - if "block special" in ftype: - mopts = "-o ro" - else: - mopts = "-o ro,loop" - self._mountToDirectory(file, directory, mountopts=mopts) - - def _umountIso(self, directory): - if self.file != self.mdirectory: - self._umountDirectory(directory) - self._removeDirectory(directory) - - def convertToSquash(self): - mdirectory = self.mdirectory - for child in self.childs: - if isinstance(child, SquashDistributive) and \ - mdirectory in child.file: - return child - if self.mdirectory != self.file: - mdirectory = self._getMntDirectory(mdirectory) - self._mountIso(self.file, mdirectory) - fileLive = self._getLastLive(mdirectory) - if not fileLive: - self._umountIso(mdirectory) - raise DistributiveError(_("ISO %s contains no live image") % - self.file) - return SquashDistributive(path.join(mdirectory, fileLive), - parent=self, exclude=self.exclude, - compress=self.compress, - include=self.include) - - def getIsoContentDirectory(self): - """Return directory with content of iso image""" - squash = self.convertToSquash() - return path.dirname(squash.file) - - def releaseChild(self, child): - """Umount child Directory distributive""" - if isinstance(child, SquashDistributive): - self._umountIso(path.dirname(child.file)) - child.directory = None - - def convertToDirectory(self): - return self.convertToSquash().convertToDirectory() - - def _get_iso_util(self): - mkisoUtil = '/usr/bin/mkisofs' - if not path.exists(mkisoUtil): - raise DistributiveError( - "{errmess} : {errdescr}".format( - errmess=_("Failed to create the ISO image"), - errdescr=_("command '%s' not found") % mkisoUtil)) - return mkisoUtil - - def getMkIso(self, output=None, source=None, volume_id=None, - efi_image=None): - """ - Параметры mkisofs при создании образа с поддержкой EFI и обычного обарза - :param output: - :param source: - :param volume_id: - :param efi_image: - :return: - """ - mkisoUtil = self._get_iso_util() - if efi_image is None: - params = ["-b", "isolinux/isolinux.bin", "-no-emul-boot", - "-V", volume_id, - "-boot-load-size", "4", - "-boot-info-table", "-iso-level", "4", - "-hide", "boot.catalog"] - else: - params = ["-J", "-R", "-l", "-no-emul-boot", "-boot-load-size", "4", - "-udf", "-boot-info-table", "-iso-level", "4", - "-b", "isolinux/isolinux.bin", - "-V", volume_id, - "-c", "isolinux/boot.cat", "-eltorito-alt-boot", - "-no-emul-boot", "-eltorito-platform", "efi", - "-eltorito-boot", efi_image] - return [mkisoUtil] + params + ["-o", output, "%s/" % source] - - def packToIso(self, directory, file, **kwargs): - # remove previous version of iso - try: - if path.exists(file): - os.unlink(file) - except (Exception, KeyboardInterrupt) as e: - raise DistributiveError(_("Failed to remove") + - " %s:\n%s" % (file, str(e))) - - if not path.exists(path.dirname(file)): - makeDirectory(path.dirname(file)) - efi_image = 'boot/grub/efi.img' - if path.exists(path.join(directory, efi_image)): - cmd = self.getMkIso(source=directory, output=file, - volume_id=self.vol_id, efi_image=efi_image) - else: - cmd = self.getMkIso(source=directory, output=file, - volume_id=self.vol_id) - callback_progress = kwargs.get('callbackProgress', None) - processMkIsoFs = PercentProgress(*cmd, stderr=STDOUT, atty=True) - for perc in processMkIsoFs.progress(): - if callable(callback_progress): - callback_progress(perc) - if processMkIsoFs.failed(): - raise DistributiveError(_("Failed to create the ISO image") + - " %s:\n%s" % (file, processMkIsoFs.read())) - else: - return True - - def installFrom(self, source, **kwargs): - """Install distributive to partition from source distributive""" - # make temporary directory for creating iso image - isoDirectory = self.bdirectory - self._makeDirectory(isoDirectory) - - try: - # getting squash from source - liveimage = self._getLastLive(isoDirectory) - if liveimage: - curnum = self._getSquashNum(self.reLive.search(liveimage)) - liveimage = "livecd.squashfs.%d" % (curnum + 1) - else: - liveimage = "livecd.squashfs" - if isinstance(source, SquashDistributive): - self._copyfile(source.file, - path.join(isoDirectory, liveimage)) - else: - distDirectory = source.convertToDirectory() - squashDistr = SquashDistributive( - path.join(isoDirectory, liveimage), - exclude=self.exclude, - include=self.include, - compress=self.compress) - squashDistr.installFrom(distDirectory, **kwargs) - # prepare iso - self.eventPrepareIso.emit(isoDirectory) - # pack iso - if self.bdirectory != self.file: - self.packToIso(isoDirectory, self.file, **kwargs) - except DistributiveError as e: - raise e - except KeyboardInterrupt as e: - raise DistributiveError(_("Keyboard interruption")) - - def close(self): - # close all child - if Distributive.close(self): - # remove temporary directory - if path.lexists(self.bdirectory) and self.file != self.bdirectory: - self._removeDirectory(self.bdirectory) - - def serialize(self): - d = {'type': Distributive.Type.Iso, - 'file': self.file, - 'mdirectory': self.mdirectory, - 'bdirectory': self.bdirectory, - 'childs': [x.serialize() for x in self.childs]} - return d - - @classmethod - @Distributive.required("mdirectory", "bdirectory", "file") - def unserialize(cls, data, parent=None): - ld = IsoDistributive(_u8(data['file']), - mdirectory=_u8(data['mdirectory']), - bdirectory=_u8(data['bdirectory']), - parent=parent) - ld.childs = [Distributive.unserialize(x, parent=ld) - for x in data.get('childs', [])] - return ld - - -class FlashDistributive(PartitionDistributive): - def _checkMount(self, dev): - """Checking mount point""" - mp = isMount(dev) - if mp: - if mp.startswith('/media'): - self._umountDirectory(mp) - else: - raise DistributiveError( - _("Failed to format %s: this partition is mounted") % dev) - - def performFormat(self): - """Perform format for all partition of this distributive""" - if not self.isFormat: - dn = self.getDirectory() - clear_match = re.compile( - r"^(boot|efi|isolinux|syslinux|id.*\.uefi|" - "ldlinux.c32|ldlinux.sys|livecd|livecd.squashfs)$") - for fn in filter(clear_match.match, listDirectory(dn)): - full_path = path.join(dn, fn) - try: - if path.isdir(full_path): - removeDir(full_path) - else: - os.unlink(full_path) - except (OSError, IOError): - raise DistributiveError( - _("Failed to remove %s") % fn) - else: - super(FlashDistributive, self).performFormat() - - def getType(self): - return _("USB flash %s") % self.partition - - def probe(self): - """Check directory for flash content""" - try: - pathname = self.getDirectory() - except Exception: - return False - return path.exists(path.join(pathname, "syslinux")) and \ - path.exists(path.join(pathname, "isolinux")) - - def writeable(self, mp): - """ - Перемонитровать в rw, при необходимости - """ - if not check_rw(mp): - # перемонитровать в rw - p = process('/bin/mount', '-o', 'remount,rw', mp, stderr=STDOUT) - if p.success() and check_rw(mp): - return True - return False - else: - return True - - def convertToDirectory(self): - mp = isMount(self.partition) - if mp and self.writeable(mp): - d = DirectoryDistributive(mp) - d.no_unmount = True - return d - return super(FlashDistributive, self).convertToDirectory() - - def releaseChild(self, child): - """Umount child Directory distributive""" - if isinstance(child, DirectoryDistributive): - if not child.no_unmount: - self._umountPartition(child.directory) - child.directory = None - - def installFrom(self, source, **kwargs): - """Install distributive to partition from source distributive""" - # make temporary directory for creating iso image - distrTo = self.convertToDirectory() - # getting squash from source - if isinstance(source, IsoDistributive): - self.rsync(source.getIsoContentDirectory(), distrTo.directory, - byfile=lambda x: x.startswith('livecd.'), - noperm=True, **kwargs) - else: - raise DistributiveError( - _("Flash install does not support %s") % - source.__class__.__name__) - - -class PxeDistributive(Distributive): - needFormat = False - - def __init__(self, directory, parent=None): - Distributive.__init__(self, parent=parent) - self.directory = path.join(directory, "calculate") - self.origdir = directory - - def getDirectory(self): - return self.origdir - - def installFrom(self, source, **kwargs): - # make temporary directory for creating iso image - distrTo = self.directory - # getting squash from source - if isinstance(source, IsoDistributive): - if path.exists(self.directory): - removeDir(self.directory) - self._makeDirectory(self.directory) - self.rsync(source.getIsoContentDirectory(), distrTo, - byfile=lambda x: x.startswith('livecd.'), - **kwargs) - else: - raise DistributiveError( - _("PXE install does not support %s" % - source.__class__.__name__)) - - -class LayeredDistributive(Distributive): - """ - Каталог дистрибутива для сборки - """ - needFormat = False - - def __init__(self, mdirectory, diff_directory, image_file=None, - parent=None): - """ - :param mdirectory: результирующий каталог - :param diff_directory: каталог содержит изменения от оригинала - :param image_file: образ оригинала - :param parent: родительский дистрибутив - """ - super(LayeredDistributive, self).__init__(parent=parent) - self.mdirectory = mdirectory - self.diff_directory = diff_directory - self.workdir = "%s-work" % self.diff_directory - self.image_mount_dir = None - self.image_distro = None - self.image_file = image_file - - def post_clear(self): - if path.exists(self.diff_directory): - self._removeDirectory(self.diff_directory) - self.clear_empty_directories(self.diff_directory) - return True - - def getType(self): - return _("layered '{file} {diff}'").format( - file=self.image_file, - diff=self.diff_directory) - - def clearDiff(self): - if path.exists(self.diff_directory): - self._removeDirectory(self.diff_directory) - - def _mountLayers(self, target): - """Mount squashfs to directory""" - self._makeDirectory(target) - if not path.exists(self.diff_directory): - self._makeDirectory(self.diff_directory) - if path.exists(self.workdir): - self._removeDirectory(self.workdir) - if not path.exists(self.workdir): - self._makeDirectory(self.workdir) - self._mountToDirectory("none", target, mountopts=( - "-t overlay -o lowerdir=%(static)s," - "upperdir=%(upper)s,workdir=%(workdir)s" % - {"upper": self.diff_directory, - "workdir": self.workdir, - "static": self.image_mount_dir})) - - def _umountLayers(self, directory): - self._umountDirectory(directory) - self._removeDirectory(directory) - self._removeDirectory(self.workdir) - - def _mountLiveImage(self): - """Mount squashfs to directory""" - self.image_distro = IsoDistributive(file=self.image_file, parent=self) - self.image_mount_dir = ( - self.image_distro.convertToDirectory().getDirectory()) - - def _umountLiveImage(self): - if self.image_distro: - self.image_distro.close() - self.image_distro = None - - def convertToDirectory(self): - """Convert scrach directories to one directory""" - mdirectory = self.mdirectory - for child in self.childs: - if isinstance(child, DirectoryDistributive) and \ - mdirectory in child.directory: - return child - mdirectory = self._getMntDirectory(mdirectory) - self._mountLiveImage() - self._mountLayers(mdirectory) - return DirectoryDistributive(mdirectory, parent=self) - - def releaseChild(self, child): - """Unmount child Directory distributive""" - if isinstance(child, DirectoryDistributive): - self._umountLayers(child.directory) - self._umountLiveImage() - child.directory = None - - def installFrom(self, source, **kwargs): - """Install distributive to partition from source distributive""" - # make temporary directory for creating iso image - if isinstance(source, (IsoDistributive, SquashDistributive)): - self.image_file = source.file - return True - raise DistributiveError( - _("Install with layers does not support %s") % - source.__class__.__name__) - - def serialize(self): - d = {'type': Distributive.Type.Layered, - 'mdirectory': self.mdirectory, - 'diff_directory': self.diff_directory, - 'image_file': self.image_file, - 'childs': [x.serialize() for x in self.childs]} - if self.image_mount_dir: - d['image_mount_dir'] = self.image_mount_dir - return d - - @classmethod - @Distributive.required("mdirectory", "diff_directory", "image_file") - def unserialize(cls, data, parent=None): - ld = LayeredDistributive(_u8(data['mdirectory']), - _u8(data['diff_directory']), - _u8(data['image_file']), - parent=parent) - ld.childs = [Distributive.unserialize(x, parent=ld) - for x in data.get('childs', [])] - return ld diff --git a/libs_crutch/install/fs_manager.py b/libs_crutch/install/fs_manager.py deleted file mode 100644 index 2a4fc11..0000000 --- a/libs_crutch/install/fs_manager.py +++ /dev/null @@ -1,213 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2010-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from os import path - -from calculate.lib.cl_lang import setLocalTranslate - -setLocalTranslate('cl_install3', sys.modules[__name__]) - - -class FileSystemManager(object): - """Convert dict install option""" - - defaultOpt = ['noatime'] - defaultBindOpts = ['bind'] - supportFS = { - 'ext2': {'defaultopt': defaultOpt, - 'format': '/sbin/mkfs.ext2', - 'formatparam': '{labelparam} {device}', - 'gpt': '8300', - 'label': '-L {labelname}', - 'ssd': [], - 'msdos': '83', - 'compress': None, - 'type': ['hdd', 'usb-hdd']}, - 'ext3': {'defaultopt': defaultOpt, - 'format': '/sbin/mkfs.ext3', - 'formatparam': '{labelparam} {device}', - 'gpt': '8300', - 'label': '-L {labelname}', - 'ssd': [], - 'msdos': '83', - 'compress': None, - 'type': ['hdd', 'usb-hdd']}, - 'ext4': {'defaultopt': defaultOpt, - 'format': '/sbin/mkfs.ext4', - 'formatparam': '{labelparam} {device}', - 'gpt': '8300', - 'label': '-L {labelname}', - 'ssd': ['discard'], - 'msdos': '83', - 'compress': None, - 'type': ['hdd', 'usb-hdd']}, - 'reiserfs': {'defaultopt': defaultOpt, - 'format': '/sbin/mkfs.reiserfs', - 'formatparam': '{labelparam} -f {device}', - 'gpt': '8300', - 'label': '-l {labelname}', - 'msdos': '83', - 'compress': None, - 'ssd': [], - 'type': ['hdd', 'usb-hdd']}, - 'btrfs': {'defaultopt': defaultOpt, - 'format': '/sbin/mkfs.btrfs', - 'formatparam': '{labelparam} -f {device}', - 'gpt': '8300', - 'label': '-L {labelname}', - 'msdos': '83', - 'ssd': ['ssd', 'discard', 'space_cache'], - 'type': ['hdd', 'usb-hdd'], - 'compress': None, - 'compatible': ['btrfs-compress']}, - 'btrfs-compress': {'defaultopt': defaultOpt, - 'format': '/sbin/mkfs.btrfs', - 'orig': 'btrfs', - 'compress': "compress=%s", - 'formatparam': '{labelparam} -f {device}', - 'gpt': '8300', - 'label': '-L {labelname}', - 'msdos': '83', - 'ssd': ['ssd', 'discard', 'space_cache'], - 'type': ['hdd', 'usb-hdd'], - 'compatible': ['btrfs']}, - 'jfs': {'defaultopt': defaultOpt, - 'format': '/sbin/mkfs.jfs', - 'formatparam': '{labelparam} -f {device}', - 'gpt': '8300', - 'label': '-L {labelname}', - 'msdos': '83', - 'ssd': ['discard'], - 'compress': None, - 'type': ['hdd', 'usb-hdd']}, - 'xfs': {'defaultopt': defaultOpt, - 'format': '/sbin/mkfs.xfs', - 'formatparam': '{labelparam} -f {device}', - 'gpt': '8300', - 'label': '-L {labelname}', - 'msdos': '83', - 'boot': '-i sparce=0', - 'ssd': ['discard'], - 'compress': None, - 'type': ['hdd', 'usb-hdd']}, - 'f2fs': {'defaultopt': defaultOpt, - 'format': '/usr/sbin/mkfs.f2fs', - 'formatparam': '{labelparam} -f {device}', - 'gpt': '8300', - 'label': '-l {labelname}', - 'msdos': '83', - 'ssd': [], - 'compress': None, - 'type': ['hdd', 'usb-hdd']}, -# 'nilfs2': {'defaultopt': defaultOpt, -# 'format': '/sbin/mkfs.nilfs2', -# 'formatparam': '{labelparam} {device}', -# 'gpt': '8300', -# 'label': '-L {labelname}', -# 'msdos': '83', -# 'ssd': [], -# 'type': ['hdd', 'usb-hdd']}, - 'swap': {'defaultopt': ['sw'], - 'format': '/sbin/mkswap', - 'formatparam': '{device}', - 'gpt': '8200', - 'label': '', - 'ssd': [], - 'auto': False, - 'compress': None, - 'msdos': '82'}, - 'uefi': {'defaultopt': defaultOpt, - 'format': '/usr/sbin/mkfs.vfat', - 'formatparam': '{labelparam} -F 32 {device}', - 'gpt': 'EF00', - 'label': '-n {labelname}', - 'msdos': '0b', - 'ssd': [], - 'auto': False, - 'compress': None, - 'type': ['hdd']}, - 'vfat': {'defaultopt': defaultOpt, - 'format': '/usr/sbin/mkfs.vfat', - 'formatparam': '{labelparam} -F 32 {device}', - 'gpt': '0700', - 'label': '-n {labelname}', - 'msdos': '0b', - 'auto': False, - 'ssd': ['discard'], - 'compress': None, - 'type': ['flash']}, - 'ntfs': {'defaultopt': defaultOpt, - 'format': '/usr/sbin/mkfs.ntfs', - 'formatparam': '{labelparam} -FQ {device}', - 'gpt': '8300', - 'label': '-L {labelname}', - 'msdos': '7', - 'auto': False, - 'ssd': [], - 'compress': None, - 'compatible': ['ntfs-3g']}, - 'ntfs-3g': {'defaultopt': defaultOpt, - 'format': '/usr/sbin/mkfs.ntfs', - 'formatparam': '{labelparam} -FQ {device}', - 'gpt': '8300', - 'label': '-L {labelname}', - 'ssd': [], - 'auto': False, - 'msdos': '7', - 'compress': None, - 'compatible': ['ntfs']}} - - default_param = {'defaultopt': defaultOpt, - 'gpt': '8300', - 'msdos': '83', - 'compress': None, - 'ssd': []} - - @classmethod - def firstAvailable(cls, listFS): - for fs in listFS: - if path.exists(cls.supportFS['format']): - return fs - else: - return "" - - @classmethod - def get_default_fs(cls, dv, installtype): - if installtype == 'flash': - return 'vfat' - filesystems = dv.Get('install.cl_install_fs') - for fs in filesystems: - if fs in cls.supportFS and path.exists(cls.supportFS[fs]['format']): - return fs - return 'ext3' - - @classmethod - def getDefaultOpt(cls, fs, ssd=False, compress=None): - fsopts = cls.supportFS.get(fs, cls.default_param) - return ",".join(fsopts['defaultopt'] + - (fsopts['ssd'] if ssd else []) + - ([fsopts['compress'] % compress] - if fsopts['compress'] and compress else [])) - - @classmethod - def checkFSForTypeMount(cls, fs, roottype, mp): - if mp.startswith('/boot/efi'): - if fs not in ('uefi', 'vfat'): - return False - else: - return True - return roottype in cls.supportFS.get(fs, {}).get('type', []) diff --git a/libs_crutch/install/install.py b/libs_crutch/install/install.py deleted file mode 100644 index a4a83c1..0000000 --- a/libs_crutch/install/install.py +++ /dev/null @@ -1,765 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2010-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import os -import re -import sys -import time -from os import path -from random import choice -import string -import glob -import shutil -from time import sleep -from calculate.core.server.func import MethodsInterface -from calculate.core.server.admin import Admins -from calculate.lib.utils.mount import isMount -from calculate.lib.utils.system import SystemPath -from calculate.lib.utils.files import (pathJoin, - process, listDirectory, writeFile, - checkUtils, readFile, find, copyWithPath, - readLinesFile, getProgPath) -from collections import deque -import calculate.lib.utils.device as device -from calculate.lib.utils.device import (detectDeviceForPartition, - countPartitions) -from calculate.lib.utils.partition import DiskFactory -from .datavars import DataVarsInstall - -from .distr import DistributiveError, PartitionDistributive -from subprocess import Popen, PIPE, STDOUT -from itertools import * -from calculate.lib.utils.tools import traverse - - -class InstallError(Exception): - """Installation Error""" - - -from .migrate_users import migrate - -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate, _ - -setLocalTranslate('cl_install3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class Install(MethodsInterface): - """Primary class for templates applying and system installation""" - - def __init__(self): - self.clVars = None - # refresh information about LVM - device.lvm.refresh() - # refresh information about device in udevadm info - device.udev.refresh() - - def initVars(self, datavars=None): - """Primary initialization of variables""" - if not datavars: - self.clVars = DataVarsInstall() - self.clVars.importInstall() - self.clVars.flIniFile() - else: - self.clVars = datavars - - def canInstallGrub2(self, target): - """Check that system has grub2 in current and installed system""" - if self.clVars.Get('os_grub2_path'): - return bool( - filter(lambda x: (x.startswith('grub-1.99') or - x.startswith('grub-2')), - listDirectory('/var/db/pkg/sys-boot'))) - return False - - def prepareBoot(self, target_distr): - """Prepare system for boot""" - if self.clVars.Get('os_install_root_type') == "flash": - self.installSyslinuxBootloader(target_distr) - else: - if self.canInstallGrub2(target_distr): - self.installGrub2Bootloader(target_distr) - else: - self.installLegacyGrubBootloader(target_distr) - return True - - def setActivePartition(self, partition): - """ - Установка активного раздела для dos и gpt таблицы разделов - """ - device_name = detectDeviceForPartition(partition) - if device_name is None: - raise DistributiveError( - _("Failed to determine the parent device for %s") % partition) - # device hasn't any partition - elif device_name == "": - return True - fdisk_cmd, gdisk_cmd, parted_cmd = checkUtils('/sbin/fdisk', - '/usr/sbin/gdisk', - '/usr/sbin/parted') - - disk = self.clVars.Select('os_install_disk_parent', - where='os_install_disk_dev', eq=partition, - limit=1) - partition_table = self.clVars.Select('os_device_table', - where='os_device_dev', - eq=disk, limit=1) - info = device.udev.get_device_info(name=partition) - partition_number = ( - info.get('ID_PART_ENTRY_NUMBER', '') or - info.get('UDISKS_PARTITION_NUMBER', '')) - - device_partition_count = countPartitions(device_name) - if device_name and not partition_number: - raise DistributiveError( - _("Failed to determine the partition number for %s") - % partition) - boot_flag = "boot" if partition_table == "dos" else "legacy_boot" - if partition_table == "dos": - fdisk = process(fdisk_cmd, "-l", device_name) - DEVICENUM, AFLAG = 0, 1 - change_active = map( - lambda x: x[DEVICENUM], - filter( - lambda x: (x[DEVICENUM] != partition_number and - x[AFLAG] == "*" or - x[DEVICENUM] == partition_number and - not x[AFLAG] == "*"), - list(map( - lambda x: [str(x[0]), x[1][1].strip()], - # enumerate partitions - enumerate(filter(None, map( - lambda x: x.split()[:2], - # drop string before information about partitions - dropwhile( - lambda x: not x.lstrip().startswith( - "Device"), - fdisk.readlines()))))))[1:])) - else: - parted = process(parted_cmd, "-m", device_name, "print") - DEVICENUM, FLAGS = 0, 6 - change_active = map( - lambda x: x[DEVICENUM], filter( - lambda x: (x[DEVICENUM] != partition_number and - boot_flag in x[FLAGS].strip(';').split(', ') or - x[DEVICENUM] == partition_number and - boot_flag not in - x[FLAGS].strip(';').split(', ')), - filter(lambda x: len(x) >= 7, - map(lambda x: x.split(':'), - parted.readlines()[2:])))) - if not change_active: - return True - if partition_table == "dos": - pipe = Popen([fdisk_cmd, device_name], - stdin=PIPE, stdout=PIPE, stderr=PIPE) - for part_num in change_active: - pipe.stdin.write("a\n%s\n" % part_num) - pipe.stdin.write("w\n") - pipe.stdin.close() - pipe.wait() - elif partition_table == "gpt": - pipe = Popen([gdisk_cmd, device_name], - stdin=PIPE, stdout=PIPE, stderr=PIPE) - if device_partition_count > 1: - pipe.stdin.write("x\n") - for part_num in change_active: - pipe.stdin.write("a\n%s\n2\n\n" % part_num) - pipe.stdin.write("w\nY\n") - else: - pipe.stdin.write("x\na\n2\n\nw\nY\n") - pipe.stdin.close() - pipe.wait() - for wait_time in (0.1, 0.2, 0.5, 1, 2, 4): - if path.exists(partition): - return True - else: - sleep(wait_time) - raise InstallError( - _("Failed to find partition %s after changing the activity") % - partition) - - def installSyslinuxBootloader(self, target): - """ - Установить syslinux загрузчик (используется для flash) - """ - if not self.clVars.Get('os_install_mbr'): - return - # прописать MBR - dd_process = process("/bin/dd", "if=/usr/share/syslinux/mbr.bin", - "of=%s" % self.clVars.Get('os_install_mbr')[0], - stderr=STDOUT) - if dd_process.failed(): - raise DistributiveError( - _("Failed to write the master boot record\n%s") % - dd_process.read()) - target.close() - # выполнить установку syslinux загрузчика - install_root_dev = self.clVars.Get('os_install_root_dev') - syslinux_process = process("/usr/bin/syslinux", - install_root_dev, stderr=STDOUT) - if syslinux_process.failed(): - raise DistributiveError(_("Failed to install syslinux\n%s") % - syslinux_process.read()) - # установить загрузочный раздел активным - return self.setActivePartition(self.clVars.Get('os_install_root_dev')) - - def installGrub2Bootloader(self, target): - """ - Установка GRUB2 загрузчика - """ - # проверить наличие grub2 - cmd_grub_install = self.clVars.Get('os_grub2_path') - if not cmd_grub_install: - raise DistributiveError(_("Failed to install the bootloader")) - process("sync").success() - # если установка GRUB2 производится на текущую систему - # загруженную в builder режиме - if (self.clVars.Get('os_install_scratch') == "on" and - self.clVars.Get('cl_action') != "system"): - prefix_boot = "/mnt/scratch" - else: - prefix_boot = "/" - # установка UEFI - if self.clVars.GetBool('os_install_uefi_set'): - self.install_grub_uefi(cmd_grub_install, prefix_boot, target) - # не UEFI установка - else: - # необходимо удалить запись /boot/efi из текущей системы - # при выполнении cl-setup-boot --mbr - if self.clVars.Get('os_uefi'): - self.update_efi_fstab() - self.mount_efi_fstab() - self.install_grub_biosboot(cmd_grub_install, prefix_boot, target) - - def install_grub_biosboot(self, cmd_grub_install, prefix_boot, target): - """ - Установить GRUB, загрузчик MBR (GPT) - """ - # получить загрузочный раздел (если есть /boot, то - # он является загрузочным иначе корень) - for boot_path in ("/boot", "/"): - boot_disk = self.clVars.Select("os_install_disk_dev", - where="os_install_disk_mount", - eq=boot_path, limit=1) - if boot_disk: - self.setActivePartition(boot_disk) - break - - chroot_cmd = getProgPath('/usr/bin/chroot') - chroot_dn = target.getDirectory() - if chroot_dn == '/': - chrooting = [] - else: - chrooting = [chroot_cmd, chroot_dn] - # если GRUB2 версии 2.00 и выше, обычная установка требует - # параметра --target=i386-pc, иначе GRUB2 может попытаться - # прописать себя как UEFI - if filter(lambda x: "2." in x, - process(*traverse([chrooting, - cmd_grub_install, '--version']))): - platform = ["--target=i386-pc"] - else: - platform = [] - # прописать GRUB2 на все указанные диски - targetdir = target.convertToDirectory() - if not targetdir.hasSystemDirectories(): - targetdir.mountSystemDirectories() - try: - for mbr_disk in self.clVars.Get('os_install_mbr'): - grub_process = process( - *traverse([chrooting, - cmd_grub_install, - "--boot-directory=/%s" % path.relpath(target.getBootDirectory(), - chroot_dn), - mbr_disk, "--force",platform]), - stderr=STDOUT, envdict=os.environ) - if grub_process.failed(): - raise DistributiveError( - _("Failed to install the bootloader")) - finally: - if targetdir.system_mounted: - targetdir.umountSystemDirectories() - - def update_efi_fstab(self): - """ - Обновляем (или удаляем) запись /boot/efi в текущем /etc/fstab - - Удаление используется в случае, если выполняется обновление загрузчика - в не EFI системе, хотя до этого она была установлена как EFI - """ - fstab_fn = pathJoin(self.clVars.Get('cl_chroot_path'), "/etc/fstab") - re_efi_record = re.compile("^(# /boot/efi|\S+\s/boot/efi).*\n", - flags=re.M) - - efidata = self.clVars.Get('os_install_fstab_efi_conf') - - if path.exists(fstab_fn): - data = readFile(fstab_fn) - m = re_efi_record.search(data) - newdata = re_efi_record.sub("", data) - if efidata: - if m: - newdata = "%s%s\n%s" % (newdata[:m.start()], - efidata, newdata[m.start():]) - else: - newdata = "%s%s\n" % (newdata, efidata) - if data != newdata: - with writeFile(fstab_fn) as f: - f.write(newdata) - - def format_efi(self): - formatdisk = [(dev, fs) - for dev, mp, fs, make in self.clVars.ZipVars( - 'os_install_disk_dev', - 'os_install_disk_mount', - 'os_install_disk_format', - 'os_install_disk_perform_format') - if mp.startswith('/boot/efi') and make == 'on' - ] - if formatdisk: - self.startTask(_("Formatting the partitions"), progress=True, - num=len(formatdisk)) - i = 1 - for dev, fs in formatdisk: - self.formatPartition(dev, format="vfat") - self.setProgress(i) - i += 1 - self.endTask(True) - - def umountDirectory(self, directory): - return PartitionDistributive(None)._umountDirectory(directory) - - def makeDirectory(self, directory): - return PartitionDistributive(None)._makeDirectory(directory) - - def mountToDirectory(self, dev, mp): - return PartitionDistributive(None)._mountToDirectory(dev, mp) - - def formatPartition(self, dev, format="ext4", label=""): - return PartitionDistributive(None).formatPartition(dev, format, label) - - def mount_efi_fstab(self): - """ - Подключить /boot/efi (или отключить) согласно новым данным - - Используется как для подключения /boot/efi, так и для отключения. - """ - oldmp_efi = self.clVars.select('os_disk_mount', - os_disk_mount__startswith="/boot/efi") - for dev, mp in self.clVars.ZipVars('os_install_disk_dev', - 'os_install_disk_mount'): - if mp.startswith("/boot/efi"): - curdev = isMount(mp) - if curdev != dev: - if curdev: - self.umountDirectory(mp) - self.makeDirectory(mp) - self.mountToDirectory(dev, mp) - if mp in oldmp_efi: - oldmp_efi.remove(mp) - for mp in oldmp_efi: - self.umountDirectory(mp) - - def install_grub_uefi(self, cmd_grub_install, prefix_boot, target): - if self.clVars.Get('cl_action') != "system": - self.format_efi() - self.update_efi_fstab() - self.mount_efi_fstab() - efidirs = [x for x in self.clVars.Get('os_install_disk_mount') - if x.startswith('/boot/efi')] - if len(efidirs) > 1: - labels = ["calculate%d" % i for i in range(1, len(efidirs) + 1)] - else: - labels = ["calculate"] - for efiname, efidir in reversed(zip(labels, efidirs)): - self._install_grub_uefi(cmd_grub_install, prefix_boot, target, - efidir, efiname) - # удаляем устаревшие - efi_boot_mgr = getProgPath('/usr/sbin/efibootmgr') - p_efibootmgr = process(efi_boot_mgr, "-v") - data = p_efibootmgr.read() - for num, label in re.findall(r"Boot(\d+).*(calculate\d*)", data): - if label not in labels: - process(efi_boot_mgr, "-b", num, "-B").success() - - def _install_grub_uefi( - self, cmd_grub_install, prefix_boot, target, efidir, efiname): - """ - Установить grub с UEFI загрузчиком - """ - chroot_cmd = getProgPath('/usr/bin/chroot') - chroot_dn = target.getDirectory() - if chroot_dn == '/': - chrooting = [] - else: - chrooting = [chroot_cmd, chroot_dn] - grub_params = [ - "--boot-directory=/%s" % path.relpath(target.getBootDirectory(), - chroot_dn), - "--bootloader-id=%s" % efiname, - "--target=x86_64-efi", - "--efi-directory=%s" % efidir, - "--force"] - # проверяем наличие в nv-ram нужной нам записи для исключения повтора - efi_boot_mgr = getProgPath('/usr/sbin/efibootmgr') - efi_disk = self.clVars.Select("os_install_disk_dev", - where="os_install_disk_mount", - eq=efidir, limit=1) - if efi_disk: - efi_uuid = device.udev.get_device_info( - name=efi_disk).get("ID_PART_ENTRY_UUID", "") - if efi_uuid: - p_efibootmgr = process(efi_boot_mgr, "-v") - data = p_efibootmgr.read() - if re.search(r"Boot.*{label}\s.*GPT,{uuid}.*{efipath}".format( - label=efiname, - uuid=efi_uuid, - efipath=r"\\EFI\\%s\\grubx64.efi" % efiname), - data, flags=re.M | re.I): - grub_params.append("--no-nvram") - # в случае установки на usb-hdd EFI загрузчик не прописывается - # в efivars - if self.clVars.Get('os_install_root_type') == 'usb-hdd': - grub_params.append("--removable") - targetdir = target.convertToDirectory() - if not targetdir.hasSystemDirectories(): - targetdir.mountSystemDirectories() - try: - grub_process = process( - *traverse([ - chrooting, - cmd_grub_install, - grub_params]), stderr=STDOUT, - envdict=os.environ) - if grub_process.failed(): - raise DistributiveError(_("Failed to install the bootloader")) - finally: - if targetdir.system_mounted: - targetdir.umountSystemDirectories() - # проверяем успешность создания загрузочной записи - # если среди загрузочных записей отсутствует запись - # calculate и dmesg содержит сообщение об ошибке efivars - - # запись создать не удалось - dmesg = getProgPath('/bin/dmesg') - if efi_boot_mgr and dmesg: - if not re.search('Boot.*%s\s' % efiname, - process(efi_boot_mgr).read(), re.M) and \ - re.search('efivars.*set_variable.*failed', - process(dmesg).read(), re.M): - raise DistributiveError( - _("Failed to create the UEFI boot record")) - - def installLegacyGrubBootloader(self, target): - """ - Install legecy grub boot loader - - Perform grub installation to disk, which has root partition - """ - cmd_grub = getProgPath('/sbin/grub') - if not cmd_grub: - raise DistributiveError(_("Failed to install the bootloader")) - grub_process = process( - cmd_grub, - "--device-map=%s/boot/grub/device.map" % target.getDirectory(), - "--batch", stderr=STDOUT) - boot_disk = self.clVars.Select('os_install_disk_grub', - where='os_install_disk_mount', - _in=('/', '/boot'), - sort='DESC', limit=1) - if not boot_disk: - raise DistributiveError(_("Failed to determine the boot disk")) - self.setActivePartition(boot_disk) - for mbr_disk in self.clVars.Get('os_install_mbr'): - mbr_disk_num = self.clVars.Select("os_device_map", - where="os_device_dev", - eq=mbr_disk) - if not mbr_disk_num and mbr_disk_num != 0: - raise DistributiveError(_("Failed to determine mbr")) - for line in ("root (hd%s)" % boot_disk, - "setup (hd%d)" % mbr_disk_num, - "quit"): - grub_process.write("%s\n" % line) - if grub_process.failed(): - raise DistributiveError(_("Failed to install the bootloader")) - - def checkVideoDriver(self): - """ - Проверить видео драйвер, и если это nvidia, то - обновить маску на пакет видеодрайвера - """ - if self.clVars.Get('hr_video') != 'nvidia': - return True - mask_file = '/etc/portage/package.mask' - nvidia_mask_file = path.join(mask_file, 'nvidia') - # если package.mask является файлом - делаем его директорией - if path.isfile(mask_file): - os.rename(mask_file, mask_file + "2") - os.mkdir(mask_file, mode=0o755) - os.rename(mask_file + "2", path.join(mask_file, "default")) - current_nvidia_mask = readFile(nvidia_mask_file).strip() - new_nvidia_mask = self.clVars.Get('os_nvidia_mask') - if new_nvidia_mask == current_nvidia_mask: - return True - with open(nvidia_mask_file, 'w') as f: - f.write(new_nvidia_mask) - return True - - def changeScheduler(self, scheduler): - """ - Изменить текущий IO планировщик - """ - root_dev = self.clVars.Select('os_disk_parent', - where='os_disk_mount', - eq='/', limit=1) - try: - sysname = device.udev.get_syspath(name=root_dev) - if device.sysfs.exists(sysname, device.sysfs.Path.BlockScheduler): - device.sysfs.write( - sysname, device.sysfs.Path.BlockScheduler, scheduler) - except Exception: - raise InstallError(_("Unable to change the I/O scheduler")) - return True - - def clearLvm(self, devices): - dv = self.clVars - - def get_vgs(): - for pv, vg, pvbase in dv.ZipVars('os_lvm_pvname', - 'os_lvm_vgname', - 'os_lvm_pvname_parent'): - if (pv in devices or - any(x in devices for x in pvbase.split(','))): - yield vg - - remove_vgs = set(get_vgs()) - remove_pvs = set(self.clVars.select('os_lvm_pvname', - os_lvm_vgname__in=remove_vgs)) - remove_lvs = set(self.clVars.select('os_lvm_vgname', 'os_lvm_lvname', - os_lvm_vgname__in=remove_vgs)) - - failed = False - for vg, lv in sorted(remove_lvs): - failed |= not device.lvm.remove_lv(vg, lv) - for vg in sorted(remove_vgs): - failed |= not device.lvm.remove_vg(vg) - for pv in sorted(remove_pvs): - failed |= not device.lvm.remove_pv(pv) - return not failed - - def clearRaid(self, devices): - dv = self.clVars - - def generate_remove_raids(): - for raid, parents in dv.select('os_device_dev', 'os_device_parent', - os_device_type__startswith="raid"): - parents = parents.split(',') - if any(x in devices for x in parents): - yield raid, set(parents) - - remove_raids = deque(generate_remove_raids()) - - failed = False - while remove_raids: - raid, parents = remove_raids.popleft() - # если среди прочих удаляемых RAID есть те, которые используют - # текущий - откладываем его в конец - if any(raid in _parents for _raid, _parents in remove_raids): - remove_raids.append((raid, parents)) - else: - failed |= not device.raid.remove_raid(raid) - - return not failed - - def wait_devices(self, disks): - """ - Ожидание одного из указанных устройств - """ - for waittime in (0.1, 0.2, 0.5, 1, 2, 4): - disks = [x for x in disks if x and not path.exists(x)] - if not disks: - break - else: - sleep(waittime) - - if disks: - raise InstallError( - _("Failed to found partition %s after creating " - "the partition table") - % ",".join(disks)) - - def autopartition(self, scheme_builder, devices, disks): - """ - Авторазметка диска входящая переменная - SchemeBuilder - """ - self.clearLvm(devices) - self.clearRaid(devices) - scheme_builder.process(DiskFactory()) - self.wait_devices(disks) - return True - - def format(self, target): - """ - Форматировать разделы для 'target' дистрибутива - """ - target.performFormat() - return True - - def unpack(self, source, target, files_num): - """ - Распаковать 'source' в 'target', 'filesnum' количество копируемых файлов - """ - self.addProgress() - if files_num.isdigit(): - files_num = int(files_num) - else: - files_num = 0 - target.installFrom(source, callbackProgress=self.setProgress, - filesnum=files_num) - return True - - def copyClt(self, source, target, cltpath): - """ - Скопировать clt шаблоны из 'cltpath' в 'target' дистрибутив из - 'source' дистрибутива - """ - target_dir = target.getDirectory() - source_dir = source.getDirectory() - for f in filter(lambda x: x.endswith('.clt'), - chain(*map(lambda x: find(pathJoin(source_dir, x), - filetype="f"), - cltpath))): - copyWithPath(f, target_dir, prefix=source_dir) - return True - - def copyOther(self, source, target): - """ - Скопировать прочие настройки из текущей системы в новую - """ - file_mask = re.compile("(/etc/ssh/ssh_host_.*|" - "/root/.ssh/(id_.*|known_hosts))") - target_dir = target.getDirectory() - source_dir = source.getDirectory() - for f in filter(file_mask.search, - chain(*map(lambda x: find(pathJoin(source_dir, x), - filetype="f"), - ["/etc", "/root/.ssh"]))): - copyWithPath(f, target_dir, prefix=source_dir) - return True - - def rndString(self): - """ - Получить произвольную строку из 8 символов - """ - """Get random string with len 8 char""" - return "".join([choice(string.ascii_letters + string.digits) - for i in xrange(0, 8)]) - - def _getFreeDirectory(self, directory): - """ - Получить название директории - """ - new_dir_name = directory - while path.exists(new_dir_name): - new_dir_name = "%s.%s" % (directory, self.rndString()) - return new_dir_name - - def remountNTFS(self): - """ - Перемонтировать NTFS разделы для работы os-prober - """ - res = True - for disk in self.clVars.Select('os_disk_dev', - where='os_disk_format', like='ntfs'): - mount_dir = self._getFreeDirectory('/var/lib/calculate/mount.ntfs') - try: - os.mkdir(mount_dir) - except (OSError, IOError): - continue - if process('/bin/mount', disk, mount_dir).success(): - for i in (0.2, 0.5, 1, 2, 4, 5): - if process('/bin/umount', mount_dir).success(): - break - time.sleep(i) - else: - self.printWARNING(_("Unable to umount %s") % mount_dir) - res = False - try: - os.rmdir(mount_dir) - except (OSError, IOError): - self.printWARNING( - _("Unable to remove directory %s") % mount_dir) - return False - return res - - def mountBind(self, target): - """ - Подключить bind точки монтирования у дистрибутива - """ - target.postinstallMountBind() - return True - - def userMigrate(self, target, migrate_data, root_pwd): - """ - Перенос текущих пользователей в новую систему, - установка пароля пользователя root - """ - migrator = migrate(target.getDirectory()) - - if not migrator.migrate([[x[0],x[2],x[3]] for x in migrate_data if x], - root_pwd, [], [], ): - raise InstallError(_("Failed to migrate users onto the new system")) - return True - - def umount(self, distr): - """ - Отключить дистрибутив - """ - distr.close() - return True - - def drop_xorg_logs(self): - """ - Сбросить логи загрузки xorg сервера - """ - for fn in glob.glob("/var/log/Xorg.*.log"): - new_name = "%s.old" % fn - if path.exists(new_name): - os.unlink(new_name) - shutil.move(fn, new_name) - return True - - def update_admin_ini(self): - """ - Обновить список локальных администраторов при установке - """ - aliases = { - 'update': 'system_update', - } - install_admin = Admins(self.clVars, chroot=True) - install_admin.clear() - for k,v in self.clVars.select('install.cl_migrate_user', - 'install.cl_migrate_admin', - install_cl_migrate_admin__ne=""): - install_admin[k] = aliases.get(v,v) - install_admin.save() - return True - - def init_themes(self): - self.clVars.Get('cl_splash_image_hash') - self.clVars.Get('cl_grub_image_hash') - return True diff --git a/libs_crutch/install/migrate_users.py b/libs_crutch/install/migrate_users.py deleted file mode 100644 index 4796836..0000000 --- a/libs_crutch/install/migrate_users.py +++ /dev/null @@ -1,580 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2010-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os, sys, re, time -from calculate.lib.encrypt import encrypt -from os import path -from calculate.lib.utils.files import pathJoin, FilePermission - -from calculate.lib.cl_lang import setLocalTranslate, _ -from functools import reduce - -setLocalTranslate('cl_install3', sys.modules[__name__]) - - -class MigrationError(Exception): - pass - - -class _shareData(object): - """Share class""" - _reNumb = re.compile("^\d+$") - - def getDataInFile(self, fileName='', lenData=7): - """Get data list from file""" - with open(fileName) as f: - return map(lambda x: x[:lenData], - filter(lambda x: len(x) >= lenData, - map(lambda x: x.rstrip().split(":"), f))) - - -class migrateGroups(_shareData): - """Migrate group to new system""" - - maxGid = 65000 - minGid = 1000 - fileGroups = "/etc/group" - - def __init__(self, prefixNewSystem): - self.prefixNewSystem = prefixNewSystem - - def getData(self, fileName=''): - if not fileName: - fileName = self.fileGroups - return self.getDataInFile(fileName=fileName, lenData=4) - - def getThisData(self): - """Get data migrate groups in this system""" - return filter(lambda x: \ - self._reNumb.match(x[2]) and self.minGid <= int( - x[2]) <= self.maxGid, - self.getData()) - - def getNewData(self): - """Get data migrate groups in new system""" - fileName = pathJoin(self.prefixNewSystem, self.fileGroups) - return filter(lambda x: \ - self._reNumb.match(x[2]) and self.minGid <= int( - x[2]) <= self.maxGid, - self.getData(fileName=fileName)) - - def getNewDataSystemGroups(self): - """Get data system groups in new system""" - fileName = pathJoin(self.prefixNewSystem, self.fileGroups) - return filter(lambda x: \ - self._reNumb.match(x[2]) and \ - (int(x[2]) > self.maxGid or int(x[2]) < self.minGid), - self.getData(fileName=fileName)) - - def getNewProcessedData(self): - """Get processed data migrate groups in new system""" - # data this Group no users - dataThisGroupsNoUsers = map(lambda x: x[:3] + [""], self.getThisData()) - dataNewGroups = self.getNewData() - namesNewGroups = map(lambda x: x[0], dataNewGroups) - gidsNewGroups = map(lambda x: x[2], dataNewGroups) - for data in dataThisGroupsNoUsers: - nameGroup = data[0] - gid = data[2] - if nameGroup in namesNewGroups: - dataNewGroups = filter(lambda x: x[0] != nameGroup, - dataNewGroups) - namesNewGroups = map(lambda x: x[0], dataNewGroups) - gidsNewGroups = map(lambda x: x[2], dataNewGroups) - if gid in gidsNewGroups: - dataNewGroups = filter(lambda x: x[2] != gid, dataNewGroups) - namesNewGroups = map(lambda x: x[0], dataNewGroups) - gidsNewGroups = map(lambda x: x[2], dataNewGroups) - systemGroupsNewData = self.getNewDataSystemGroups() - return systemGroupsNewData, dataNewGroups, dataThisGroupsNoUsers - - -class migrateUsers(_shareData): - """Migrate users to new system""" - - maxId = 65000 - minId = 1000 - filePasswd = "/etc/passwd" - - def __init__(self, prefixNewSystem): - self.prefixNewSystem = prefixNewSystem - - def getData(self, fileName=''): - if not fileName: - fileName = self.filePasswd - return self.getDataInFile(fileName=fileName, lenData=7) - - def getThisData(self): - """Get data migrate users in this system""" - return filter(lambda x: \ - self._reNumb.match(x[2]) and self.minId <= int( - x[2]) <= self.maxId, - self.getData()) - - def getNewData(self): - """Get data migrate users in new system""" - fileName = pathJoin(self.prefixNewSystem, self.filePasswd) - return filter(lambda x: \ - self._reNumb.match(x[2]) and self.minId <= int( - x[2]) <= self.maxId, - self.getData(fileName=fileName)) - - def getNewDataSystemUsers(self): - """Get data system users in new system""" - fileName = pathJoin(self.prefixNewSystem, self.filePasswd) - return filter(lambda x: \ - self._reNumb.match(x[2]) and \ - (int(x[2] > self.maxId) or int(x[2]) < self.minId), - self.getData(fileName=fileName)) - - def getThisDataSystemUsers(self): - """Get data system users in this system""" - fileName = self.filePasswd - return filter(lambda x: \ - self._reNumb.match(x[2]) and \ - (int(x[2] > self.maxId) or int(x[2]) < self.minId), - self.getData(fileName=fileName)) - - def getNewProcessedData(self, migrateUsers=()): - """Get processed data migrate users in new system""" - dataThisUsers = self.getThisData() - if migrateUsers: - dataThisUsers = filter(lambda x: x[0] in migrateUsers, - dataThisUsers) - dataNewUsers = self.getNewData() - namesNewUsers = map(lambda x: x[0], dataNewUsers) - uidsNewUsers = map(lambda x: x[2], dataNewUsers) - for data in dataThisUsers: - nameUser = data[0] - uid = data[2] - if nameUser in namesNewUsers: - dataNewUsers = filter(lambda x: x[0] != nameUser, dataNewUsers) - namesNewUsers = map(lambda x: x[0], dataNewUsers) - uidsNewUsers = map(lambda x: x[2], dataNewUsers) - if uid in uidsNewUsers: - dataNewUsers = filter(lambda x: x[2] != uid, dataNewUsers) - namesNewUsers = map(lambda x: x[0], dataNewUsers) - uidsNewUsers = map(lambda x: x[2], dataNewUsers) - systemUsersNewData = self.getNewDataSystemUsers() - systemUsersNewNames = map(lambda x: x[0], systemUsersNewData) - systemUsersNewUids = map(lambda x: x[2], systemUsersNewData) - systemUsersThisData = [] - if migrateUsers: - # this users < minId - systemUsersThisData = filter(lambda x: int(x[2]) < self.minId and \ - x[0] in migrateUsers, - self.getThisDataSystemUsers()) - for data in systemUsersThisData: - nameUser = data[0] - uid = data[2] - if nameUser in systemUsersNewNames: - systemUsersNewData = filter(lambda x: x[0] != nameUser, - systemUsersNewData) - systemUsersNewNames = map(lambda x: x[0], - systemUsersNewData) - systemUsersNewUids = map(lambda x: x[2], - systemUsersNewData) - if uid in systemUsersNewUids: - systemUsersNewData = filter(lambda x: x[2] != uid, - systemUsersNewData) - systemUsersNewNames = map(lambda x: x[0], - systemUsersNewData) - systemUsersNewUids = map(lambda x: x[2], - systemUsersNewData) - return (systemUsersThisData, systemUsersNewData, - dataNewUsers, dataThisUsers) - - -class migrateShadow(_shareData): - """Migrate users to new system""" - - fileShadow = "/etc/shadow" - - def __init__(self, sysThisMigrateUsers, sysNewMigrateUsers, newMigrateUsers, - thisMigrateUsers, prefixNewSystem): - self.prefixNewSystem = prefixNewSystem - self.sysThisMigrateUsers = sysThisMigrateUsers - self.sysNewMigrateUsers = sysNewMigrateUsers - self.newMigrateUsers = newMigrateUsers - self.thisMigrateUsers = thisMigrateUsers - self.newFileName = pathJoin(self.prefixNewSystem, self.fileShadow) - - def getData(self, fileName=''): - if not fileName: - fileName = self.fileShadow - return self.getDataInFile(fileName=fileName, lenData=9) - - def getThisData(self): - """Get data migrate users in this system""" - return filter(lambda x: x[0] in self.thisMigrateUsers, self.getData()) - - def getNewData(self): - """Get data migrate users in new system""" - return filter(lambda x: x[0] in self.newMigrateUsers, - self.getData(fileName=self.newFileName)) - - def getNewDataSystemShadow(self): - """Get data system users in new system""" - return filter(lambda x: x[0] in self.sysNewMigrateUsers, - self.getData(fileName=self.newFileName)) - - def getThisDataSystemShadow(self): - """Get data system users in this system""" - return filter(lambda x: x[0] in self.sysThisMigrateUsers, - self.getData()) - - def getNewProcessedData(self): - """Get processed data migrate shadow in new system""" - dataThisShadow = self.getThisData() - dataNewShadow = self.getNewData() - namesNewShadow = map(lambda x: x[0], dataNewShadow) - for data in dataThisShadow: - nameUser = data[0] - if nameUser in namesNewShadow: - dataNewShadow = filter(lambda x: x[0] != nameUser, - dataNewShadow) - namesNewShadow = map(lambda x: x[0], dataNewShadow) - systemShadowNewData = self.getNewDataSystemShadow() - systemShadowThisData = self.getThisDataSystemShadow() - systemShadowNewNames = map(lambda x: x[0], systemShadowNewData) - for data in systemShadowThisData: - nameUser = data[0] - if nameUser in systemShadowNewNames: - systemShadowNewData = filter(lambda x: x[0] != nameUser, - systemShadowNewData) - systemShadowNewNames = map(lambda x: x[0], systemShadowNewData) - return (systemShadowThisData, systemShadowNewData, dataNewShadow, - dataThisShadow) - - -class migrate(object): - """Migrate users ang groups to new system""" - templateShadow = "%(user)s:%(hash)s:%(days)s:0:%(maxDays)s:%(warnDays)s:::" - templateUser = "%(user)s:x:%(id)s:%(gid)s::/home/%(user)s:/bin/bash" - templateGroup = "%(group)s:x:%(gid)s:" - dataUsers = [] - dataGroups = [] - dataShadow = [] - maxId = 65000 - minId = 1000 - maxGid = 65000 - minGid = 1000 - minSysId = 1000 - - newUserGroups = ["audio", "cdrom", "cdrw", "games", "lp", "lpadmin", - "plugdev", "scanner" "usb", "users", "video", "wheel"] - - def __init__(self, prefixNewSystem): - self.prefixNewSystem = prefixNewSystem - self.objGroups = migrateGroups(self.prefixNewSystem) - self.objUsers = migrateUsers(self.prefixNewSystem) - - def addThisUsersToGroups(self, users): - """Add users to groups""" - thisGroupsData = self.objGroups.getData() - thisGroupsData = map(lambda x: (x[0], x[3].split(',')), - thisGroupsData) - dataGroups = [] - for data in self.dataGroups: - groupName = data[0] - thisUsersInGroup = map(lambda x: x[1], - filter(lambda x: x[0] == groupName, - thisGroupsData)) - thisUsersInGroup = reduce(lambda x, y: x + y, thisUsersInGroup, []) - addUsers = list(set(thisUsersInGroup) & set(users)) - if addUsers: - newUsersInGroup = data[3].split(',') - for user in addUsers: - if not user in newUsersInGroup: - newUsersInGroup.append(user) - data[3] = ','.join(filter(lambda x: x, newUsersInGroup)) - dataGroups.append(data) - self.dataGroups = dataGroups - return self.dataGroups - - def getNextUid(self): - """get next uid""" - listUid = map(lambda x: int(x[2]), - filter(lambda x: \ - self.objUsers._reNumb.match(x[2]) and \ - self.minId <= int(x[2]) <= self.maxId, - self.dataUsers)) - if listUid: - return max(listUid) + 1 - return self.minId - - def getNextGid(self): - """get next gid""" - listGid = map(lambda x: int(x[2]), - filter(lambda x: \ - self.objGroups._reNumb.match(x[2]) and \ - self.minGid <= int(x[2]) <= self.maxGid, - self.dataGroups)) - if listGid: - return max(listGid) + 1 - return self.minGid - - def isSystemUser(self, userName): - if filter(lambda x: x[0] == userName and int(x[2]) <= self.minSysId, - self.dataUsers): - return True - return False - - def addUserToGroups(self, userName, userGroups): - """Add users to groups""" - - dataGroups = [] - for data in self.dataGroups: - groupName = data[0] - if groupName in userGroups: - usersInGroup = data[3].split(',') - if not userName in usersInGroup: - usersInGroup.append(userName) - data[3] = ','.join(filter(lambda x: x, usersInGroup)) - dataGroups.append(data) - self.dataGroups = dataGroups - return self.dataGroups - - def addUserToDefaultGroups(self, userName): - """Add users to default groups""" - return self.addUserToGroups(userName, self.newUserGroups) - - def changePassword(self, userName, pwdHash, maxDays="99999", warnDays="7"): - if not filter(lambda x: x[0] == userName, self.dataUsers): - raise MigrationError(_("User %s not found") % userName) - indexFoundUser = False - for i, data in enumerate(self.dataShadow): - if data[0] == userName: - indexFoundUser = i - break - if callable(pwdHash): - pwdHash = pwdHash(userName) - if pwdHash is False: - return False - shadowDict = {"user": userName, - "hash": pwdHash, - "days": str(int(time.time() / 86400)), - "maxDays": maxDays, - "warnDays": warnDays} - shadowLine = self.templateShadow % shadowDict - shadowList = shadowLine.split(":") - if indexFoundUser is False: - self.dataShadow.append(shadowList) - else: - self.dataShadow[indexFoundUser] = shadowList - return True - - def addUser(self, userName, userGroups, pwdHash): - """Add user""" - # find user - if filter(lambda x: x[0] == userName, self.dataUsers): - return "EXISTS" - else: - strUid = str(self.getNextUid()) - strGid = str(self.getNextGid()) - groupName = userName - dataExistGroup = filter(lambda x: x[0] == groupName, - self.dataGroups) - if dataExistGroup: - strGid = dataExistGroup[0][2] - else: - # add group - groupDict = {"group": groupName, "gid": strGid} - groupLine = self.templateGroup % groupDict - groupList = groupLine.split(":") - self.dataGroups.append(groupList) - # add user - userDict = {"user": userName, "id": strUid, "gid": strGid} - userline = self.templateUser % userDict - userList = userline.split(":") - self.dataUsers.append(userList) - # add shadow - if not self.changePassword(userName, pwdHash): - return False - # add user to default groups - self.addUserToGroups(userName, userGroups) - return True - - def checkPermFiles(self): - """Check permission files""" - checkThisFiles = [migrateGroups.fileGroups, migrateUsers.filePasswd, - migrateShadow.fileShadow] - checkNewFiles = map(lambda x: pathJoin(self.prefixNewSystem, x), - checkThisFiles) - parentDir = lambda x: "".join(os.path.split(x)[:-1]) - notRead = lambda x: not os.access(x, os.R_OK) - notWrite = lambda x: not os.access(x, os.W_OK) - filesNotRead = filter(notRead, checkThisFiles) - if filesNotRead: - raise MigrationError(_("Failed to read files") + _(": ") + - ", ".join(filesNotRead)) - filesNotWrite = filter(notWrite, checkNewFiles) - if filesNotWrite: - raise MigrationError(_("Failed to write to files") + _(": ") + - ", ".join(filesNotWrite)) - # Check permissions backup files - checkNewBackupFiles = map( - lambda x: pathJoin(self.prefixNewSystem, x + "-"), - checkThisFiles) - notWriteBackup = lambda x: not os.access(x, os.W_OK) and \ - (os.path.exists(x) or - not os.access(os.path.dirname(x), os.W_OK)) - filesNotWrite = filter(notWriteBackup, checkNewBackupFiles) - if filesNotWrite: - raise MigrationError(_("Failed to write to files") + _(": ") + - ", ".join(filesNotWrite)) - return True - - def saveNewFiles(self): - """Save /etc/passwd /etc/group /etc/shadow to new system""" - listFilesThisSystem = [migrateGroups.fileGroups, - migrateUsers.filePasswd, - migrateShadow.fileShadow] - listFiles = map(lambda x: (pathJoin(self.prefixNewSystem, x), - pathJoin(self.prefixNewSystem, x + "-")), - listFilesThisSystem) - listData = [self.dataGroups, self.dataUsers, self.dataShadow] - allData = zip(listFiles, listData) - for fileNames, data in allData: - buff = "\n".join(map(lambda x: ":".join(x), data)) + "\n" - for fileName in fileNames: - FD = open(fileName, "w+") - FD.write(buff) - FD.close() - - def createUserGuest(self): - if filter(lambda x: int(x[2]) >= self.minSysId, self.dataUsers): - return True - else: - # add user guest - pwd = "guest" - encryptObj = encrypt() - pwdHash = encryptObj.getHashPasswd(pwd, "shadow_ssha256") - if pwdHash is False: - return False - if not self.addUser("guest", "guest", pwdHash): - return False - return True - - def createHomeDirs(self, addUsersList, existsMigrateUsers): - """Create home directories for all migreate users""" - - def createHome(userdata): - perms = FilePermission.UserAll - if not userdata[5].startswith('/dev/'): - homedir = pathJoin(self.prefixNewSystem, userdata[5]) - if not path.exists(homedir): - os.mkdir(homedir) - os.chown(homedir, int(userdata[2]), int(userdata[3])) - os.chmod(homedir, perms) - users = list( - set(map(lambda x: x[0], - addUsersList) + existsMigrateUsers) - {"root"}) - try: - map(createHome, filter(lambda x: x[0] in users, self.dataUsers)) - except Exception as e: - raise MigrationError( - _("Failed to create the user's home directory")) - - def migrate(self, addUsersList=None, rootPwd="", - pwdUsersList=None, existsMigrateUsers=None): - """Migrate users ang groups to new system""" - if addUsersList is None: - addUsersList = [] - elif not any(addUsersList): - addUsersList = [] - if pwdUsersList is None: - pwdUsersList = [] - if existsMigrateUsers is None: - existsMigrateUsers = [] - if not self.checkPermFiles(): - return False - migrateUsers = (["root"] + - map(lambda x: x[0], addUsersList + pwdUsersList)) - for existMigrUser in existsMigrateUsers: - if existMigrUser not in migrateUsers: - migrateUsers.append(existMigrUser) - # add root to migrate users - dataUsers = self.objUsers.getNewProcessedData(migrateUsers) - dataGroups = self.objGroups.getNewProcessedData() - thisSystemUsers, newSystemUsers, newUsers, thisUsers = \ - map(lambda x: map(lambda y: y[0], x), dataUsers) - objShadow = migrateShadow(thisSystemUsers, newSystemUsers, newUsers, - thisUsers, self.prefixNewSystem) - dataShadow = objShadow.getNewProcessedData() - self.dataGroups = reduce(lambda x, y: x + y, dataGroups, []) - self.dataUsers = reduce(lambda x, y: x + y, dataUsers, []) - self.dataShadow = reduce(lambda x, y: x + y, dataShadow, []) - self.addThisUsersToGroups(thisUsers) - for userName, pwdHash, maxDays, warnDays in pwdUsersList: - if not self.changePassword(userName, pwdHash, - maxDays=maxDays, - warnDays=warnDays): - return False - for userName, userGroups, pwdHash in [ - ["root", [], rootPwd]] + addUsersList: - # if self.isSystemUser(userName): - # raise MigrationError(_("%s is a system user") %userName) - ret = self.addUser(userName, userGroups, pwdHash) - if not ret: - return False - elif ret == "EXISTS": - if not self.changePassword(userName, pwdHash): - return False - if not newUsers or not thisUsers: - # add user guest - if not self.createUserGuest(): - return False - self.saveNewFiles() - self.createHomeDirs(addUsersList, existsMigrateUsers) - return True - - -class currentUsers(migrate): - """Current users""" - - def __init__(self): - super(currentUsers, self).__init__('/') - - def addUsers(self, *users_passwd): - """Added users and groups to current system""" - if not self.checkPermFiles(): - return False - getDataInFile = _shareData().getDataInFile - self.dataUsers = getDataInFile(fileName=migrateUsers.filePasswd, - lenData=7) - self.dataGroups = getDataInFile(fileName=migrateGroups.fileGroups, - lenData=4) - self.dataShadow = getDataInFile(fileName=migrateShadow.fileShadow, - lenData=9) - getHash = encrypt().getHashPasswd - for userName, pwd in zip(users_passwd[0::2], - users_passwd[1::2]): - pwdHash = getHash(pwd, "shadow_ssha256") - if not self.addUser(userName, userName, pwdHash): - return False - self.saveNewFiles() - return True - - def hasUsers(self, *users): - """Is users in system""" - if not self.checkPermFiles(): - return False - getDataInFile = _shareData().getDataInFile - self.dataUsers = map(lambda x: x[0], - getDataInFile(fileName=migrateUsers.filePasswd, - lenData=7)) - return set(self.dataUsers) >= set(users) diff --git a/libs_crutch/install/utils/__init__.py b/libs_crutch/install/utils/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/install/utils/cl_install.py b/libs_crutch/install/utils/cl_install.py deleted file mode 100644 index f04c6db..0000000 --- a/libs_crutch/install/utils/cl_install.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2010-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import Action, Tasks -from calculate.install.distr import DistributiveError -from calculate.install.migrate_users import MigrationError -from calculate.install.variables.autopartition import AutopartitionError -from calculate.lib.utils.partition import VolumesError -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate, _ -from calculate.lib.cl_template import TemplatesError -from calculate.lib.utils.files import FilesError -from calculate.install.install import InstallError - -setLocalTranslate('cl_install3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class ClInstallAction(Action): - """ - Установка системы - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError, MigrationError, TemplatesError, VolumesError, - InstallError, AutopartitionError, DistributiveError) - successMessage = None - failedMessage = None - interruptMessage = None - - # список задач для действия - tasks = [ - # авторазметка диска - {'name': 'autopartition', - 'message': __("Creating a new partition table"), - 'method': "Install.autopartition(cl_autopartition_scheme_builder," - "cl_autopartition_device," - "cl_autopartition_disk_dev)", - 'condition': lambda dv: dv.Get('cl_autopartition_set') == 'on'}, - # форматирование разделов на которые устанавливается дистрибутив - {'name': 'format', - 'message': __("Formatting the partitions"), - 'method': 'Install.format(cl_target)', - 'condition': lambda dv: dv.Get('cl_target').needFormat}, - # распаковка дистрибутива - {'name': 'unpack', - 'message': __("Unpacking the system image to the target"), - 'method': 'Install.unpack(cl_image,cl_target,os_install_linux_files)', - }, - # отметка что установка идет на HDD - {'name': 'hdd', - 'condition': lambda dv: dv.Get( - 'os_install_root_type') != 'flash' and - dv.Get('os_install_pxe') == 'off'}, - # копирование clt шаблонов - {'name': 'hdd:copy_clt', - 'message': __("Copying clt templates to the new system"), - 'method': 'Install.copyClt(cl_source,cl_target,cl_template_clt_path)' - }, - # копирование прочих файлов - {'name': 'hdd:copy_other', - 'message': __("Copying other settings to the new system"), - 'method': 'Install.copyOther(cl_source,cl_target)', - 'condition': lambda dv: dv.Get('os_root_type') != "livecd", - }, - # перемонтирование ntfs для определения windows - {'name': 'hdd:remount_ntfs', - 'method': 'Install.remountNTFS()', - 'essential': False, - }, - # наложение шаблонов при установке на жесткий диск - {'name': 'hdd:apply_templates', - 'message': __("Updating the configuration"), - # наложить шаблоны в установленный дистрибутив, включая clt шаблоны - # без использования фильтров по clt шаблонам - 'method': 'Install.applyTemplates(cl_target,True,False,None)', - }, - # наложение шаблонов при PXE установке - {'name': 'apply_templates_pxe', - 'message': __("Configuring PXE install"), - # наложить шаблоны в установленный дистрибутив, исключая clt - # без использования фильтров по clt шаблонам - 'method': 'Install.applyTemplates(None,False,False,None)', - 'condition': lambda dv: dv.Get('os_install_pxe') == 'on' - }, - # наложение шаблонов при установке на flash диск - {'name': 'apply_templates_flash', - 'message': __("Configuring Flash install"), - # наложить шаблоны в установленный дистрибутив, исключая clt - # без использования фильтров по clt шаблонам - 'method': 'Install.applyTemplates(None,False,False,cl_target)', - 'condition': lambda dv: dv.Get('os_install_root_type') == "flash" - }, - # подключить точки монтирования bind - {'name': 'hdd:mount_bind', - 'message': __("Post-install configuration"), - 'method': "Install.mountBind(cl_target)", - }, - # перенос пользователей - {'name': 'hdd:user_migrate', - 'message': __("Migrating users"), - 'method': 'Install.userMigrate(cl_target,cl_migrate_data,' - 'cl_migrate_root_pwd)', - }, - # прописывание локальных администраторов - {'name': 'hdd:write_admins', - 'method': 'Install.update_admin_ini()', - }, - # подготовка загрузчика - {'name': 'prepare_boot', - 'message': __("Preparing the system for reboot"), - 'method': 'Install.prepareBoot(cl_target)', - 'condition': lambda dv: (dv.Get('os_install_mbr') or - dv.Get('os_install_uefi_set') == 'on') and - dv.Get('os_install_pxe') == 'off'}, - # отключение исходного дистрибутива - {'name': 'umount_source', - 'message': __("Letting go the source distribution"), - 'method': 'Install.umount(cl_image)', - 'condition': lambda dv: dv.Get('cl_image') and dv.Get( - 'cl_image').childs, - 'depend': Tasks.has("unpack")}, - # отключение установленного дистрибутива - {'name': 'umount_target', - 'message': __("Unmounting the target system volume"), - 'method': 'Install.umount(cl_target)', - 'condition': lambda dv: dv.Get('cl_target') and dv.Get( - 'cl_target').childs, - 'depend': Tasks.has("unpack")}, - # вывести сообщение в случае успеха - {'name': 'success', - 'message': __("System successfully installed!")}, - # вывести сообщение в случае ошибки - {'name': 'failed', - 'message': __("Failed to install the system!"), - 'depend': (Tasks.failed() & Tasks.hasnot("interrupt"))}, - # вывести сообщение о том, что установка прервана пользователем - {'name': 'intmessage', - 'message': __("Installation manually interrupted"), - 'depend': Tasks.has("interrupt")}, - # подтверждение на перезагрузку - {'name': 'ask_reboot', - 'message': __("Would you like to reboot your computer " - "now to complete the installation?"), - 'confirm': 'no', - 'condition': lambda Get: (Get('os_install_pxe') == 'off' and - Get('os_install_root_type') != "flash") - }, - # перезагрузить компьютер - {'name': 'reboot', - 'message': __("System reboot"), - 'command': '/sbin/reboot', - 'depend': Tasks.result("ask_reboot", eq='yes') - } - ] diff --git a/libs_crutch/install/utils/cl_setup.py b/libs_crutch/install/utils/cl_setup.py deleted file mode 100644 index f419567..0000000 --- a/libs_crutch/install/utils/cl_setup.py +++ /dev/null @@ -1,192 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2010-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.core.server.func import Action -from calculate.install.distr import DistributiveError -from calculate.install.migrate_users import MigrationError -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate, _ -from calculate.lib.cl_template import TemplatesError -from calculate.lib.utils.files import FilesError -from calculate.lib.utils.portage import isPkgInstalled -from calculate.install.install import InstallError -from calculate.install.variables.autopartition import AutopartitionError - -setLocalTranslate('cl_install3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - - -class ClSetupVideoAction(Action): - """ - Действие для настройки параметров видео - """ - # ошибки, которые отображаются без подробностей - native_error = (FilesError, MigrationError, TemplatesError, - InstallError, AutopartitionError, DistributiveError) - - templateTaskMessage = __("Video settings are being configured") - successMessage = __("Video settings configured!") - failedMessage = __("Failed to configure the video settings!") - interruptMessage = __("Configuration manually interrupted") - - prev_tasks = [ - # проверить и настроить параметры для nvidia драйвера - {'name': 'drop_xorg_log', - 'method': 'Install.drop_xorg_logs()', - 'condition': lambda Get: ( - Get('os_install_x11_video_drv_prev') - != Get('os_install_x11_video_drv')) - }, - ] - - addon_tasks = [ - {'name': 'check_video', - 'message': __("Checking the video driver"), - 'method': 'Install.checkVideoDriver()', - 'condition': lambda: isPkgInstalled('xorg-server') - }, - {'name': 'setupvideo', - 'condition': lambda Get: Get('cl_setup') == 'video' - }, - {'name': 'setupvideo:reboot', - 'warning': __("To apply the changes, reboot the system"), - 'condition': lambda Get: ((Get('os_x11_video_drv') != - Get('os_install_x11_video_drv') and - (Get('os_x11_video_drv') in Get( - 'os_x11_kms_video_drv') - or Get('os_install_x11_video_drv') - in Get('os_x11_kms_video_drv'))) - and Get('os_install_root_type') != 'livecd') - }, - {'name': 'setupvideo:restart', - 'warning': __("To apply the changes, restart the X server"), - 'condition': lambda Get: ((Get('os_x11_video_drv') != - Get('os_install_x11_video_drv') and - (not Get('os_x11_video_drv') in - Get('os_x11_kms_video_drv') and - not Get('os_install_x11_video_drv') - in Get('os_x11_kms_video_drv'))) - and Get('os_install_root_type') != 'livecd') - } - ] - - def __init__(self): - # список задач для действия - self.tasks = self.prev_tasks + [ - {'name': 'apply_templates', - 'message': self.templateTaskMessage, - # наложить шаблоны на текущий дистрибутив, включая clt шаблоны - # без использования фильтров по clt шаблонам - 'method': 'Install.applyTemplates(cl_source,cl_template_clt_set,' - 'cl_merge_set,None)', - }] - # выполнить дополнительные задачи - self.tasks.extend(self.addon_tasks) - Action.__init__(self) - - -class ClSetupSystemAction(ClSetupVideoAction): - """ - Объект настройки всех параметров системы - """ - prev_tasks = [] - templateTaskMessage = __("The system is being configured") - successMessage = __("System configured!") - failedMessage = __("Failed to configure the system!") - - -class ClSetupAudioAction(ClSetupSystemAction): - """ - Действие для настройки аудио параметров - """ - addon_tasks = [] - prev_tasks = [] - templateTaskMessage = __("The audio settings are being configured") - successMessage = __("Audio settings configured!") - failedMessage = __("Failed to configure the audio parameters!") - - -class ClSetupThemesAction(ClSetupSystemAction): - """ - Действие для настройки тем - """ - addon_tasks = [] - prev_tasks = [ - {'name': 'init_themes', - 'method': 'Install.init_themes()' - }, - ] - templateTaskMessage = __("The themes are being configured") - successMessage = __("Themes configured!") - failedMessage = __("Theme configuration failed!") - - -class ClSetupLocaleAction(ClSetupSystemAction): - """ - Действие для настройки языковых параметров - """ - addon_tasks = [] - prev_tasks = [] - templateTaskMessage = \ - __("The localization and time options are being configured") - successMessage = __("System configured!") - failedMessage = __("Failed to configure the system!") - - -class ClSetupNetworkAction(ClSetupSystemAction): - """ - Действие для настройки аудио параметров - """ - addon_tasks = [] - templateTaskMessage = __("The network settings are being configured") - successMessage = __("Network settings configured!") - failedMessage = __("Failed to configure the network settings!") - - -class ClSetupSessionAction(ClSetupSystemAction): - """ - Действие для настройки пользовательских параметров - """ - addon_tasks = [] - templateTaskMessage = __("The session settings are being configured") - successMessage = __("Session settings configured!") - failedMessage = __("Failed to configure the session settings!") - - -class ClSetupBootAction(ClSetupSystemAction): - """ - Действие для настройки параметров загрузки - """ - templateTaskMessage = __("The boot parameters are being configured") - successMessage = __("Boot parameters configured!") - failedMessage = __("Failed to configure the boot parameters!") - - prev_tasks = [] - addon_tasks = [ - # установить загрузчик - {'name': 'prepare_bootloader', - 'message': _("Installing the bootloader"), - 'method': 'Install.prepareBoot(cl_image)', - 'condition': (lambda Get: (Get('os_install_mbr') or - Get('os_install_uefi_set') == 'on') and - Get('os_root_type') != 'livecd' and - Get('os_install_scratch') == 'off') - }, - {'name': 'no_scratch', - 'warning': _("The builder mode is no longer supported"), - 'condition': lambda Get: Get('os_install_scratch') == 'on' - }, - ] diff --git a/libs_crutch/install/variables/X11.py b/libs_crutch/install/variables/X11.py deleted file mode 100644 index 467b7a9..0000000 --- a/libs_crutch/install/variables/X11.py +++ /dev/null @@ -1,554 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import sys -import re -from os import path -import hashlib -import glob -from calculate.lib.datavars import Variable, VariableError, ReadonlyVariable -from calculate.lib.utils.portage import isPkgInstalled -from calculate.lib.utils.files import readFile, readFileEx -from calculate.lib.utils.tools import get_best_nearest_resolution -import calculate.lib.utils.device as device -from calculate.lib.utils.common import (getVideoFromXorgLog, - getVideoFromXorgConf, - getVideoFromCmdLine, - getAvailableVideo, getValueFromCmdLine, - getCompositeFromXorgconf, - getVideoFromModules, - getVideoFromVendor, getInstalledVideo, - CmdlineParams) -from calculate.lib.utils.video import get_edid_data, EdidInfoError, EdidInfo -from calculate.install.distr import DistributiveError -import fcntl -import struct -from collections import OrderedDict - -from calculate.lib.cl_lang import setLocalTranslate, _ - -setLocalTranslate('cl_install3', sys.modules[__name__]) - - -class VideoVariable(Variable): - """ - Video variables not using for flash installation - """ - xorg_need = True - default_video = "default" - driver_names = OrderedDict([ - ('default', _("Auto detection")), - ('radeon', _("AMD Radeon (radeon)")), - ('amdgpu', _("AMD AMDGPU (amdgpu)")), - ('fglrx', _("AMD Catalyst (fglrx)")), - ('modesetting', _("Framebuffer device (modesetting)")), - ('vesa', _("Generic VESA (vesa)")), - ('intel', _("Intel (intel)")), - ('nouveau', _("Nvidia Nouveau (nouveau)")), - ('nvidia', _("Nvidia Graphics Driver (nvidia)")), - ]) - - def uncompatible(self): - """ - Video setting up unavailable for flash installation - """ - if self.Get('os_install_root_type') == 'flash': - return \ - _("Video configuration unavailable for Flash install") - if (self.Get('install.os_install_x11_server_set') == 'off' and - self.xorg_need): - return \ - _("This distribution does not provide a Xorg server") - return "" - - -class ResolutionVariable(VideoVariable): - """ - Abstract resolution variable - """ - fbres = False - - def choice(self): - resolutions = ["640x480", "800x480", "800x600", "1024x576", "1024x600", - "1024x768", "1200x800", "1280x800", "1280x720", - "1280x768", "1280x1024", "1360x768", "1366x768", - "1368x768", "1400x1050", "1440x900", "1680x945", - "1680x1050", "1920x1080", "1920x1200", "1600x768", - "1600x900", "1600x1200", "2048x1152", "2560x1440", - "2560x1600"] - if self.fbres: - return map(lambda x: "%s-32" % x, - resolutions) - else: - return resolutions - - def check(self, value): - """ - Check resolution format 1234x567 - """ - if not re.match('^\d+x\d+(-\d+(@\d+)?)?$', value): - raise VariableError( - _("Wrong resolution {resolution} {example}").format( - resolution=value, - example="(%s:%s)" % (_("Example"), "1024x768"))) - - -class VariableOsInstallX11ResolutionPreferred(ResolutionVariable): - """ - X.org resolution - """ - type = 'choiceedit' - opt = ['-X'] - metavalue = "x" - # разрешение по умолчанию пустое - это нужно для livecd - # для автоопределения разрешения xorg сервером - preferred_resolution = "" - - def init(self): - self.help = _("set the Xorg resolution") - self.label = _("Screen resolution") - - def get(self): - # get resolution from xorg.log - res = self.Get('os_x11_resolution') - if res: - return res - else: - return self.preferred_resolution - -class VariableOsInstallX11Resolution(ResolutionVariable): - """ - X.org resolution - """ - fallback_resolution = "1024x768" - FBIOGET_VSCREENINFO = 0x4600 - - def framebuffer_resolution(self): - try: - fbdev = os.open('/dev/fb0', os.O_RDONLY) - data = fcntl.ioctl(fbdev, self.FBIOGET_VSCREENINFO, " " * 8) - res = struct.unpack("II", data) - return "%sx%s" % (res[0], res[1]) - except (IOError, OSError): - pass - return "" - - def get(self): - # get resolution from xorg.log - res = self.Get('install.os_install_x11_resolution_preferred') - if res: - return res - res = self.framebuffer_resolution() - if res: - return res - return self.fallback_resolution - -class VariableOsInstallX11VideoAvailable(VideoVariable): - """ - Get available (already installed or installable drivers - """ - type = "list" - # supported = ["nvidia", "fglrx", "amdgpu", "nouveau", "intel", "radeon"] - supported = ["nvidia", "fglrx", "amdgpu", - "modesetting", - "nouveau", "intel", "radeon", "vesa"] - - def get(self): - image = self.Get('cl_image') - if image: - with image: - try: - distrPath = image.getDirectory() - if isPkgInstalled('xorg-server', prefix=distrPath): - return (sorted(filter(self.supported.__contains__, - getAvailableVideo( - prefix=distrPath))) + - [self.default_video]) - except DistributiveError: - pass - return [] - - def humanReadable(self): - return map(lambda x: self.driver_names.get(x, x), self.Get()) - - -class VariableOsX11KmsVideoDrv(ReadonlyVariable): - """ - Список KMS драйверов - """ - type = "list" - value = ["radeon", "intel", "nouveau", "amdgpu", "modesetting"] - - -class VariableOsInstallX11VideoDrv(VideoVariable): - """ - Video driver used by xorg - """ - type = 'choiceedit' - opt = ['--video'] - metavalue = "VIDEODRV" - - def init(self): - self.help = _("set the video driver") - self.label = _("Video driver") - - def nox_video_drivers(self): - values = self.Get('os_x11_kms_video_drv') - for drv, drvinfo in self.pkgDrvMap.items(): - _, pkgdrv = drvinfo - if isPkgInstalled(pkgdrv, prefix=self.Get('cl_chroot_path')): - values.append(drv) - return [self.default_video] + list(sorted(values)) - - def choice(self): - """Get available (already installed or installable drivers""" - if self.Get('os_install_x11_server_set') == 'on': - values = self.Get('os_install_x11_video_available') - else: - values = self.nox_video_drivers() - return map(lambda x: (x, self.driver_names.get(x, x)), - (x for x in self.driver_names.keys() if x in values)) - - def get(self): - if self.Get('os_install_x11_server_set') == 'on': - # get available videodriver list from install or configure distributive - list_video = self.Choice('os_install_x11_video_drv') - if not list_video: - return self.default_video - # if type system is usb-hdd then get detect video driver - if self.Get('os_install_root_type') == 'usb-hdd': - methods = ((getVideoFromModules, ()), - (getVideoFromCmdLine, ()), - (getVideoFromVendor, - (self.Get('hr_video'), list_video))) - else: - # test current video driver for install system - methods = ((getVideoFromXorgLog, ('/', list_video)), - (getVideoFromXorgConf, ('/',)), - (getVideoFromCmdLine, ()), - (getVideoFromModules, ()), - (getVideoFromVendor, - (self.Get('hr_video'), list_video))) - for func, args in methods: - drv = func(*args) - if drv in list_video: - return drv - return self.default_video - else: - for drv in map(lambda x: x[0], self.choice()): - refcnt = device.sysfs.read( - device.sysfs.Path.Module, drv, "refcnt").strip() - if refcnt.isdigit() and int(refcnt) > 0: - return {'i915': 'intel'}.get(drv, drv) - else: - return self.default_video - - pkgDrvMap = {'nvidia': ('NVidia', 'x11-drivers/nvidia-drivers'), - 'fglrx': ('ATI', 'x11-drivers/ati-drivers'), - 'vboxdrv': ('VirtualBox', 'x11-drivers/xf86-video-virtualbox')} - - def check(self, value): - if self.Get('os_install_x11_server_set') == 'on': - if self.Get('cl_action') == 'system': - availDrvs = self.Get('os_install_x11_video_available') - if not value in availDrvs: - raise VariableError(_("Only %s drivers are available") % - ",".join(availDrvs)) - else: - if not value in getInstalledVideo(prefix="/") and \ - not value in ("auto", self.default_video): - error = _("video driver %s is unavailable") % value - if value in self.pkgDrvMap: - error += ". " + (_("Install driver %s with:") - % self.pkgDrvMap[value][0]) - error += "\n" + ("emerge %s" % self.pkgDrvMap[value][1]) - raise VariableError(error) - else: - availDrivers = self.nox_video_drivers() - if not value in availDrivers: - raise VariableError("Only %s drivers are available" % - ",".join(availDrivers)) - - def uncompatible(self): - """ - Video setting up unavailable for flash installation - """ - if self.Get('os_install_root_type') == 'flash': - return \ - _("Video configuration unavailable for Flash install") - return "" - - -class VariableOsInstallX11VideoDrvPrev(VariableOsInstallX11VideoDrv): - """ - Предыдущее значение os_install_x11_videodrv - """ - - -class VariableHrVideoId(ReadonlyVariable): - """ - BusID of video card - TODO: need realization - """ - value = "" - - -class VariableOsInstallX11Composite(VideoVariable): - """ - on/off composite - """ - type = 'bool' - opt = ['--composite'] - - def init(self): - self.help = _("toggle composite") - self.label = _("Composite") - - def get(self): - """On/off composite""" - defaultCompositeOn = ("nvidia", "intel", "fglrx", "amdgpu", - "modesetting", - "nouveau", "radeon", "default") - composite = getValueFromCmdLine(CmdlineParams.Calculate, - CmdlineParams.Composite) - videodrv = getValueFromCmdLine(CmdlineParams.Calculate, - CmdlineParams.Video) - if videodrv != "auto": - composite = {'nocomposite': 'off', - 'off': 'off', - 'on': 'on', - 'composite': 'on'}.get(composite) - else: - composite = None - - if self.Get('os_install_x11_video_drv') in defaultCompositeOn: - defaultComposite = "on" - elif self.Get('hr_virtual') == 'vmware': - defaultComposite = "on" - else: - defaultComposite = "off" - if self.Get('os_install_x11_video_drv') == self.Get('os_x11_video_drv'): - state = getCompositeFromXorgconf() - else: - state = None - return composite or state or defaultComposite - -class VariableOsInstallFbResolutionPreferred(ResolutionVariable): - """ - Framebuffer resolution - """ - type = 'choiceedit' - opt = ['--fb'] - metavalue = "x" - xorg_need = False - fbres = True - value = "auto" - - def init(self): - self.help = _("set the framebuffer resolution") - self.label = _("Framebuffer resolution") - - def choice(self): - yield ("auto", _("Auto")) - for i in ResolutionVariable.choice(self): - yield (i,i) - - def check(self, value): - if value == "auto": - return - ResolutionVariable.check(self, value) - -class VariableOsInstallFbResolution(ResolutionVariable): - """ - Framebuffer resolution - """ - type = 'choiceedit' - opt = ['--fb'] - metavalue = "x" - xorg_need = False - fbres = True - fallback_resolution = "1024x768" - - def init(self): - self.help = _("set the framebuffer resolution") - self.label = _("Framebuffer resolution") - - def using_kms(self): - drv = self.Get('install.os_install_x11_video_drv') - kms = self.Get('install.os_x11_kms_video_drv') - return drv in kms - - def using_uefi(self): - return self.GetBool('install.os_install_uefi_set') - - def get(self): - custom = self.Get('os_install_fb_resolution_preferred') - if custom != "auto": - return custom - x11res = self.Get('os_install_x11_resolution') - if self.using_kms() or self.using_uefi(): - return x11res - hwinfo = device.Hwinfo() - try: - return get_best_nearest_resolution( - x11res, hwinfo.resolutions()) or self.fallback_resolution - except device.HwinfoError: - return self.fallback_resolution - -class VariableClGrubImageHash(ReadonlyVariable): - """ - Контрольная сумма изображения для grub - """ - grub_image = "/boot/grub/grub-calculate.png" - theme_data = "/etc/grub.d/05_theme" - - def get_image_md5(self, source): - return hashlib.md5(readFile(source)).hexdigest() - - def get_config_md5(selfself, source): - return hashlib.md5(readFileEx(source, grab=True)).hexdigest() - - def get_checksum(self): - data = [] - if path.exists(self.grub_image): - data.append(self.get_image_md5(self.grub_image)) - else: - data.append("-") - if path.exists(self.theme_data): - data.append(self.get_config_md5(self.theme_data)) - else: - data.append("-") - return "".join(data) - - def get(self): - if self.Get('cl_setup') == 'themes': - return self.get_checksum() - return "" - -class VariableClGrubImageUpdateSet(VariableClGrubImageHash): - """ - Изображение для grub обновлилось - """ - - def get(self): - if self.Get('cl_setup') == 'themes': - newmd5 = self.get_checksum() - return "on" if newmd5 != self.Get('cl_grub_image_hash') else "off" - return "off" - -class VariableClSplashImageHash(ReadonlyVariable): - """ - Контрольные суммы изображений для splashutils - """ - hash_files = ("/etc/splash/calculate/images/verbose.md5", - "/etc/splash/calculate/images/silent.md5", - "/usr/share/plymouth/themes/calculate/boot.md5", - "/usr/share/plymouth/themes/calculate/boot/md5sum", - "/usr/share/plymouth/themes/calculate/calculate.plymouth") - - cfg_files = "/etc/splash/calculate/*.cfg" - - def get_config_md5(selfself, source): - return hashlib.md5(readFileEx(source, grab=True)).hexdigest() - - def get_hash_data(self, sources): - data = [] - for fn in sources: - data.append(self.get_config_md5(fn)) - for fn in glob.glob(self.cfg_files): - data.append(self.get_config_md5(fn)) - break - return "".join(data) - - def get(self): - if self.Get('cl_setup') == 'themes': - return self.get_hash_data(self.hash_files) - return "" - -class VariableClSplashImageUpdateSet(VariableClSplashImageHash): - """ - Изображение для splash dracut обновлилось - """ - def get(self): - if self.Get('cl_setup') == 'themes': - newmd5 = self.get_hash_data(self.hash_files) - return "on" if newmd5 != self.Get('cl_splash_image_hash') else "off" - return "off" - -class VariableClInstallEdidData(ReadonlyVariable): - type = Variable.Types.Object - - def get(self): - edid_data = get_edid_data() - if not edid_data: - return {} - try: - ei = EdidInfo() - ei.set_data(edid_data) - return { - "resolution": ei.resolution, - "ratio": ei.ratio, - "screensize": ei.screensize - } - except EdidInfoError as e: - return {} - -class VariableClInstallEdidResolution(ReadonlyVariable): - def get(self): - return self.Get('cl_install_edid_data').get('resolution','') - -class VariableClInstallEdidScreensize(ReadonlyVariable): - def get(self): - return self.Get('cl_install_edid_data').get('screensize','') - -class VariableClInstallCalculateDpi(Variable): - def get(self): - inch = 25.4 - screensize = self.Get('cl_install_edid_screensize') - resolution = self.Get('os_install_x11_resolution') - if screensize and resolution: - cx = screensize.partition("x")[0] - cxres = resolution.partition("x")[0] - if cx.isdigit() and cxres.isdigit(): - cx = float(cx) - cxres = float(cxres) - return str(int(inch * cxres / cx)) - return "" - -class VariableClInstallDpi(Variable): - def get(self): - calculate_dpi = self.Get('cl_install_calculate_dpi') - try: - if calculate_dpi: - calculate_dpi = int(calculate_dpi) - if calculate_dpi > 100: - return "108" - except ValueError: - pass - return "96" - -class VariableClInstallScaling(Variable): - def get(self): - dpi = self.Get('cl_install_dpi') - try: - if dpi: - dpi = int(dpi) - if dpi > 100: - return "hi" - except ValueError: - pass - return "normal" diff --git a/libs_crutch/install/variables/__init__.py b/libs_crutch/install/variables/__init__.py deleted file mode 100644 index ea5023d..0000000 --- a/libs_crutch/install/variables/__init__.py +++ /dev/null @@ -1,47 +0,0 @@ -#-*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -#-*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from . import action -from . import disk -from . import locale -from . import linux -from . import distr -from . import kernel -from . import net -from . import system -from . import X11 -from . import lvm -from . import autopartition -from . import audio - -section = "install" diff --git a/libs_crutch/install/variables/action.py b/libs_crutch/install/variables/action.py deleted file mode 100644 index 704c125..0000000 --- a/libs_crutch/install/variables/action.py +++ /dev/null @@ -1,144 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.lib.datavars import ActionVariable - -from calculate.lib.cl_lang import setLocalTranslate - -setLocalTranslate('cl_install3', sys.modules[__name__]) - -ServerSetup = "server_setup" - -class VariableAcInstallMerge(ActionVariable): - """ - Action variable which has value "on" - in ebuild phase preinst or reconfigure system - """ - - def action(self, cl_action): - if (cl_action in ("system", "merge") and - self.Get('os_install_root_type') != 'flash' and - self.Get('os_install_pxe') == 'off' and - self.Get('cl_live') == 'off' or - cl_action in (ServerSetup, "sync", - "domain", "undomain",) - and self.Get('cl_merge_pkg')): - return "on" - return "off" - - -class VariableAcInstallLive(ActionVariable): - """ - Action variable which has value "on" - in configure system, install system hdd, and postinst ebuild phase - only not chroot - """ - nonchroot = True - - def action(self, cl_action): - if (cl_action in ("system", "merge") and - self.Get('os_install_root_type') != 'flash' and - self.Get('os_install_pxe') == 'off' or - cl_action in (ServerSetup, - "sync", "domain", "undomain",) - and self.Get('cl_merge_pkg')): - return "on" - return "off" - - -class VariableAcInstallConfig(ActionVariable): - """ - Action variable which has value "on" for emerge --config - cl-config - """ - def action(self, cl_action): - if cl_action in ("config",): - return "on" - return "off" - - -class VariableAcInstallDisk(ActionVariable): - """ - Action variable which has value "on" for installation on hdd - """ - - def action(self, cl_action): - if (cl_action == 'system' and - self.Get('os_install_root_type') != "flash" and - self.Get('os_install_pxe') != "on"): - return "on" - else: - return "off" - - -class VariableAcInstallFlash(ActionVariable): - """ - Action variable which has value "on" for USB flash - """ - - def action(self, cl_action): - if (cl_action == 'system' and - self.Get('os_install_root_type') == 'flash'): - return "on" - return "off" - - -class VariableAcInstallPxe(ActionVariable): - """ - Action variable which has value "on" for PXE installation - """ - - def action(self, cl_action): - if cl_action == 'system' and self.Get('os_install_pxe') == 'on': - return "on" - return "off" - - -class VariableAcInstallConfigure(ActionVariable): - """ - Action variable which has value "up" for configuration - """ - - def action(self, cl_action): - cl_setup = self.Get('cl_setup') - if cl_action == "merge" and cl_setup: - return "on" - return "off" - - -class VariableAcInstallUnmerge(ActionVariable): - """ - Action variable which has value "up" on prerm ebuild phase - """ - - def action(self, cl_action): - if (cl_action == "merge" and - self.Get('cl_ebuild_phase') in ('prerm', 'postrm')): - return "on" - return "off" - - -class VariableAcInstallPatch(ActionVariable): - """ - Action variable which has value "on" - in ebuild phase preinst or reconfigure system - """ - - def action(self, cl_action): - if cl_action in ("patch",): - return "on" - return "off" diff --git a/libs_crutch/install/variables/audio.py b/libs_crutch/install/variables/audio.py deleted file mode 100644 index 4305b1f..0000000 --- a/libs_crutch/install/variables/audio.py +++ /dev/null @@ -1,276 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -import re -from calculate.lib.datavars import (Variable, ReadonlyVariable, - ReadonlyTableVariable, FieldValue, - HumanReadable) -from calculate.lib.utils.common import getValueFromCmdLine, CmdlineParams -from calculate.lib.utils.portage import isPkgInstalled -from calculate.lib.utils.files import readFile, readLinesFile -from calculate.install.distr import DistributiveError -import glob - -from calculate.lib.cl_lang import setLocalTranslate, _ - -setLocalTranslate('cl_install3', sys.modules[__name__]) - - -class VariableOsAudio(Variable): - """ - Выбранная аудиосистема - """ - type = "choice" - opt = ['--audio'] - metavalue = "AUDIO" - - def init(self): - self.label = _("Audio system") - self.help = _("set the audio system") - - def get(self): - """ - pulseaudio по умолчанию если доступно или вписано в /etc/asound.conf - """ - avail = [x[0] for x in self.Get('os_audio_available')] - if "pulseaudio" in avail: - audio = getValueFromCmdLine(CmdlineParams.Calculate, - CmdlineParams.Audio) - - if audio and audio == "alsa": - return "alsa" - else: - return "pulseaudio" - return "alsa" - - def choice(self): - return self.Get('os_audio_available') - - - def uncompatible(self): - """ - Audio setting up unavailable for flash installation - """ - if self.Get('os_install_root_type') == 'flash': - return _("Audio configuration unavailable for Flash install") - if self.Get('os_install_alsa_set') == 'off': - return _("This distribution does not provide the ALSA sound") - return "" - - -class VariableOsAudioAvailable(Variable): - """ - Доступные звуковые системы - """ - type = "list" - - def get(self): - mapAudioConf = ( - ('alsa', None, _('ALSA')), - ('pulseaudio', 'media-sound/pulseaudio', _("PulseAudio")), - ) - image = self.Get('cl_image') - if image: - with image as distr: - try: - distrPath = image.getDirectory() - return map(lambda x: x[0::2], - filter(lambda x: not x[1] or isPkgInstalled(x[1], - prefix=distrPath), - mapAudioConf)) - except DistributiveError as e: - pass - return sorted(map(lambda x: x[0::2], mapAudioConf[-1:]), - key=lambda x: x[1]) - - -class VariableOsAudioCardMap(ReadonlyVariable): - """ - Соответствие номеров звуковых карт именам - """ - type = Variable.Types.Table - - def get(self): - return [(cardid[17:-3], readFile(cardid).strip()) - for cardid in glob.glob('/proc/asound/card[0-9]*/id')] - - -class VariableOsAudioData(ReadonlyTableVariable): - """ - Information about audio cards - """ - source = ['os_audio_id', - 'os_audio_name'] - - def generate_cards(self, cards): - for card_id, card_name in cards: - for playback_info in glob.glob( - "/proc/asound/card%s/pcm[0-9]p/info" % card_id): - dInfo = (x.partition(":")[::2] - for x in readLinesFile(playback_info)) - dInfo = {x.strip(): y.strip() for x, y in dInfo} - if all(x in dInfo for x in ('card', 'device', 'name')): - if card_name == dInfo['name']: - yield ("%s,%s" % (dInfo['card'], dInfo['device']), - "%s" % card_name) - else: - yield ("%s,%s" % (dInfo['card'], dInfo['device']), - "%s, %s" % (card_name, dInfo['name'])) - - def get(self, hr=HumanReadable.No): - # /proc/asound/card*/pcm*p/info - data = readFile('/proc/asound/cards') - cards = re.findall('^\s*(\d+).*\s-\s(.+)\n\s+\S.* at .*$', - data, re.M) - if cards: - return list(self.generate_cards(cards)) - else: - return [[]] - - setValue = Variable.setValue - - -class VariableOsAudioId(FieldValue, ReadonlyVariable): - """ - Order Id of audio card - """ - type = "list" - source_variable = "os_audio_data" - column = 0 - - -class VariableOsAudioName(FieldValue, ReadonlyVariable): - """ - Name of audio card - """ - type = "list" - source_variable = "os_audio_data" - column = 1 - - -class VariableOsAudioCardDefault(Variable): - """ - Идентификатор карты по умолчанию - """ - def get(self): - audio_default = self.Get('os_audio_default') - if audio_default and audio_default != "none": - cardmap = dict(self.Get("os_audio_card_map")) - cardnum = audio_default.split(',')[0] - if cardnum in cardmap: - return cardmap[cardnum] - return "0" - return "" - -class VariableOsAudioDeviceDefault(Variable): - """ - Номер устройства по умолчанию - """ - def get(self): - audio_default = self.Get('os_audio_default') - if audio_default and audio_default != "none": - return self.Get('os_audio_default').split(',')[1] - return "" - - -class VariableOsAudioCardNameDefault(Variable): - """ - Название карты используемое в настройках KDE - """ - def get(self): - try: - audio_default = self.Get('os_audio_default') - if audio_default and audio_default != "none": - cardnum = int(audio_default.split(',')[0]) - audionames = self.Get('os_audio_name') - if cardnum < len(audionames): - return audionames[cardnum].split(',')[0] - except ValueError: - pass - return "" - - -class VariableOsAudioHw(Variable): - """ - Current default audio card - """ - - def get_deprecated(self): - asound_data = readFile('/etc/asound.conf') - default_card_re = re.compile('defaults.ctl.card\s+(\d+)') - entry = default_card_re.search(asound_data) - if entry and entry.groups()[0] in self.Get('os_audio_id'): - return "%s,0" % entry.groups()[0] - default_card_re = re.compile( - 'pcm.!default {[^}]+card\s+(\d+)[^}]+device\s+(\d+)[^}]+}') - entry = default_card_re.search(asound_data) - if entry: - entry = "%s,%s" % entry.groups() - if entry in self.Get('os_audio_id'): - return entry - return "" - - def get(self): - cardmap = dict(self.Get("os_audio_card_map")) - value = self.get_deprecated() - if not value: - value = self.Select('os_audio_id', where='os_audio_name', - notlike='HDMI', limit=1) or "0,0" - cardnum, devicenum = value.split(",") - if cardnum in cardmap: - return "{},{}".format(cardmap[cardnum], devicenum) - return "" - -class VariableOsAudioDefault(Variable): - """ - Current default audio card - """ - type = "choice" - opt = ['--card'] - metavalue = "CARD" - - def init(self): - self.label = _("Default audio card") - self.help = _("set the default audio") - - def get(self): - current = self.Get('os_audio_hw') - if current and "," in current: - cardmap = {y:x for x, y in self.Get("os_audio_card_map")} - cardid, devicenum = current.split(",") - if cardid in cardmap: - return "{},{}".format(cardmap[cardid], devicenum) - data = self.Get('os_audio_data') - if data and data[0]: - return "0,0" - return "none" - - def choice(self): - data = self.Get('os_audio_data') - if data and data[0]: - return self.Get('os_audio_data') - return [("none", _("Not available"))] - - def uncompatible(self): - """ - Audio setting up unavailable for flash installation - """ - if self.Get('os_install_root_type') == 'flash': - return _("Audio configuration unavailable for Flash install") - if self.Get('os_install_alsa_set') == 'off': - return _("This distribution does not provide the ALSA sound") - return "" diff --git a/libs_crutch/install/variables/autopartition.py b/libs_crutch/install/variables/autopartition.py deleted file mode 100644 index fac2d62..0000000 --- a/libs_crutch/install/variables/autopartition.py +++ /dev/null @@ -1,1030 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -import re -from calculate.lib.datavars import (Variable, VariableError, FieldValue, - VariableInterface, HumanReadable, - ReadonlyVariable, ReadonlyTableVariable) -import calculate.lib.utils.device as device -from calculate.lib.utils.device import humanreadableSize -from calculate.lib.utils import partition -from calculate.lib.utils.files import (readLinesFile) -from calculate.lib.utils.mount import isMount, try_umount -from calculate.install.fs_manager import FileSystemManager -from calculate.lib.utils.tools import Sizes, traverse -from itertools import * - -from calculate.lib.cl_lang import setLocalTranslate, _ -from calculate.lib.variables.system import RootType - -setLocalTranslate('cl_install3', sys.modules[__name__]) - - -class SizeHelper(VariableInterface): - """ - Объект помогает переменной преобразовывать размер с суффиксами, - ведённый пользователями к "числовому" необходимому размеру - """ - default_size = Sizes.M - - def set(self, value): - # convert table from value to MB - sizeMap = {'kB': Sizes.kB, - 'K': Sizes.K, - 'M': Sizes.M, - 'Mb': Sizes.Mb, - 'G': Sizes.G, - 'Gb': Sizes.Gb, - 'T': Sizes.T, - 'Tb': Sizes.Tb} - value = value.strip() - reSizeValue = re.compile('^(\d+)\s*(%s)?' % "|".join(sizeMap.keys())) - res = reSizeValue.search(value) - if not res: - return "0" - intValue = int(res.group(1)) - if res.group(2): - intValue = intValue * sizeMap[res.group(2)] - else: - intValue = intValue * self.default_size - return str(intValue) - - -class AutopartitionError(Exception): - """ - Autopartition error - """ - pass - - -class SchemeOpt(object): - Swap = "swap" - Update = "update" - UEFI = "uefi" - LVM = "lvm" - Calculate = "data" - - -class AutopartitionHelper(VariableInterface): - """ - Helper for autopartiton device and mount point creating - """ - def uncompatible(self): - if self.Get('cl_autopartition_set') == "off": - return _("Autopartition options are not available with manual " - "partitioning") - return "" - - -class VariableHrMemorySize(ReadonlyVariable): - """ - Memory size in bytes - """ - type = "int" - - def get(self): - reMemTotal = re.compile(r'^MemTotal:\s*(\d+)\s*kB$') - totalMemList = filter(lambda x: x, - map(reMemTotal.search, - readLinesFile('/proc/meminfo'))) - if totalMemList: - size = int(totalMemList[0].group(1)) * Sizes.K - return str(size) - return "0" - - def humanReadable(self): - return humanreadableSize(self.Get()) - - -class VariableClAutopartitionSwapSize(SizeHelper, AutopartitionHelper, - Variable): - """ - Swap size - """ - type = "size-m" - opt = ["--swap-size"] - metavalue = "SIZE" - untrusted = True - - def init(self): - self.label = _("Swap partition size") + " (MB)" - self.help = _("set the swap partition size for autopartition") - - def get(self): - size = int(self.Get('hr_memory_size')) - if size < Sizes.G: - size = Sizes.G - return str(size) - - def check(self, value): - if value.isdigit() and int(value) <= Sizes.M: - raise VariableError( - _("Swap size should not be less that 1MiB")) - - def humanReadable(self): - return humanreadableSize(int(self.Get())) - - -class VariableClAutopartitionDeviceData(ReadonlyTableVariable): - """ - Список устройств, которые могут выбраны в качестве дисков под разметку - """ - - source = ['cl_autopartition_device_dev', - 'cl_autopartition_device_type', - 'cl_autopartition_device_size', - 'cl_autopartition_device_name'] - - re_raid = re.compile("raid[1-9]") - - def get(self, hr=HumanReadable.No): - def generator(): - for dev, _type, fulltype, size, name in self.ZipVars( - 'os_device_dev', 'os_device_type', - 'os_device_fulltype', - 'os_device_size', 'os_device_name'): - if not _type.startswith('raid'): - _type = "device" - if not name: - name = _("Unknown") - else: - name = _type.upper() - if self.re_raid.search(fulltype): - _type = "raid" - else: - _type = "raid0" - - yield [dev, _type, size, name] - - return list(generator()) or [[]] - - -class VariableClAutopartitionDeviceDev(FieldValue, ReadonlyVariable): - type = "list" - source_variable = "cl_autopartition_device_data" - column = 0 - - -class VariableClAutopartitionDeviceType(FieldValue, ReadonlyVariable): - type = "list" - source_variable = "cl_autopartition_device_data" - column = 1 - - -class VariableClAutopartitionDeviceSize(FieldValue, ReadonlyVariable): - type = "list" - source_variable = "cl_autopartition_device_data" - column = 2 - - -class VariableClAutopartitionDeviceName(FieldValue, ReadonlyVariable): - type = "list" - source_variable = "cl_autopartition_device_data" - column = 3 - - -class VariableClAutopartitionDevice(AutopartitionHelper, Variable): - """ - Device for autopartition - """ - type = "choice-list" - element = "selecttable" - opt = ["-D"] - metavalue = "DEVICE" - untrusted = True - - def init(self): - self.help = _("set the device for autopartition") - self.label = _("Devices for install") - - def get(self): - choiceVal = map(lambda x: x[0], self.choice()) - devicesTypes = self.Select(['os_device_dev','os_device_type'], - where='os_device_dev', _in=choiceVal) - notFlashDevices = [x[0] for x in devicesTypes if x[1] != 'flash'] - if len(notFlashDevices) == 1: - return [notFlashDevices[0]] - return [] - - def choice(self): - getname = lambda dev, name, size: "%s (%s, %s)" % ( - dev, name, humanreadableSize(size) - ) - return [(dev, getname(dev, name, size)) - for dev, name, size in self.ZipVars( - 'cl_autopartition_device_dev', 'cl_autopartition_device_name', - 'cl_autopartition_device_size')] - - def checkNeeded(self, valuelist, usedDevices, agregationType): - needDevices = list(set(usedDevices) - set(valuelist)) - if needDevices: - raise VariableError( - _("Disks {selecteddisk} are part of " - "{agrtype}\nYou need to use {needdisk} as well or " - "clear {agrtype} manually").format( - selecteddisk=",".join( - list(set(usedDevices) & set(valuelist))), - needdisk=",".join(needDevices), - agrtype=agregationType)) - - def checkOnLvm(self, valuelist): - disks = self.Select('os_disk_dev', - where='os_disk_parent', _in=valuelist) - vgroups = self.Select('os_lvm_vgname', - where='os_lvm_pvname', _in=disks) - lvmDisks = self.Select('os_lvm_pvname', - where='os_lvm_vgname', _in=vgroups) - lvmDevices = self.Select('os_disk_parent', where='os_disk_dev', - _in=lvmDisks) - self.checkNeeded(valuelist, lvmDevices, "LVM") - - def checkOnRaid(self, valuelist): - disks = self.Select('os_disk_dev', - where='os_disk_parent', _in=valuelist) - raids = filter(None, self.Select('os_disk_raid', - where='os_disk_dev', _in=disks)) - raidDisks = self.Select('os_disk_dev', where='os_disk_raid', _in=raids) - raidDevices = self.Select('os_disk_parent', - where='os_disk_dev', - _in=raidDisks) - self.checkNeeded(valuelist, raidDevices, "RAID") - - def checkOnSelectParentAndChild(self, valuelist): - """ - Проверка на попытку одновременно выбрать RAID и диски из которых - он состоит - """ - for device in valuelist: - parents = self.select('os_device_parent', - os_device_dev=device, limit=1).split(',') - common = set(parents) & set(valuelist) - if common: - if len(common) > 1: - raise VariableError( - _("Devices {devices} are used for {selected}").format( - devices=",".join(sorted(common)), - selected=device)) - else: - raise VariableError( - _("Device {device} is used for {selected}").format( - device=",".join(common), - selected=device)) - - def checkSelectedRaid(self, valuelist): - """ - Проверить схемы RAID, чтобы исключить базирование их на lvm - """ - typecheck = re.compile("lvm.*raid") - for dev, fulltype in self.ZipVars("os_device_dev", - "os_device_fulltype"): - if dev in valuelist: - if typecheck.search(fulltype): - raise VariableError(_("RAID %s is wrong") % dev) - - def is_force_param(self): - return "--force" in self.Get("cl_console_args") - - def get_mounted_devices(self): - mountedData = [(x,y) for x,y in self.ZipVars( - "os_disk_parent", "os_disk_dev") if y and isMount(y)] - mountedDevices = {} - for devices, disk in mountedData: - if not isMount(disk): - continue - if self.is_force_param() and try_umount(disk): - continue - for _device in traverse(devices.split(',')): - if _device not in mountedDevices: - mountedDevices[_device] = set() - mountedDevices[_device].add(disk) - return mountedDevices - - def check(self, valuelist): - if self.Get('cl_autopartition_set') == "on": - if not valuelist: - raise VariableError( - _("For autopartition, please select the device")) - - useDisks = set(traverse(x.split(',') for x in self.Select( - 'os_disk_parent', where='os_disk_mount', ne=''))) - - mountedDevices = self.get_mounted_devices() - - for value in chain(valuelist): - for disk in set(chain(self.select( - 'os_device_parent', os_device_dev=value, - limit=1).split(","), [value])): - if disk in useDisks: - raise VariableError( - _("Device %s is already in use by the current " - "system") % value) - if disk in mountedDevices: - disks = ",".join(sorted(mountedDevices[disk])) - raise VariableError( - _("Please unmount {disks}, as {device} will be used for " - "installation").format(device=disk, disks=disks)) - self.checkSelectedRaid(valuelist) - self.checkOnSelectParentAndChild(valuelist) - self.checkOnLvm(valuelist) - self.checkOnRaid(valuelist) - try: - df = partition.VariableFactory(device.udev) - sb = self.Get('cl_autopartition_scheme_builder') - if sb and sb.devices: - sb.process(df) - except (partition.SchemeError, - partition.VirtualDiskError) as e: - raise VariableError(str(e)) - if self.GetBool('cl_autopartition_uefi_set'): - if not self.Get('cl_autopartition_efi') and \ - self.Get('cl_autopartition_table') == 'gpt': - raise VariableError( - _("Selected devices have not EFI partition")) - if not self.GetBool('cl_autopartition_uefi_set'): - if not self.Get('cl_autopartition_mbr'): - raise VariableError( - _("Selected devices have not BIOS boot partition")) - - -class VariableClAutopartitionSchemeBuilder(ReadonlyVariable): - def get(self): - if not self.GetBool('cl_autopartition_set'): - return "" - sb = partition.SchemeBuilder() - if self.Get('cl_autopartition_table') == "dos": - sb.partition_table = sb.PartitionTable.DOS - if self.Get('cl_autopartition_table') == "gpt": - sb.partition_table = sb.PartitionTable.GPT - sb.vgname = self.Get('cl_autopartition_lvm_vgname') - sb.lvm = self.GetBool('cl_autopartition_lvm_set') - sb.efi = self.GetBool('cl_autopartition_uefi_set') - - sb.efi_size = int(self.Get('cl_autopartition_uefi_size')) - sb.biosboot_size = int(self.Get('cl_autopartition_bios_grub_size')) - sb.root_size = int(self.Get('cl_autopartition_root_size')) - sb.swap_size = int(self.Get('cl_autopartition_swap_size')) - - scheme = self.Get('cl_autopartition_scheme') - if SchemeOpt.Swap in scheme: - sb.swap = True - if SchemeOpt.Update in scheme: - sb.update = True - if SchemeOpt.Calculate in scheme: - sb.calculate = True - - deviceSizeAll = int(self.Get('cl_autopartition_device_size_all')) - if not sb.update and not sb.calculate and deviceSizeAll == sb.root_size: - sb.rootall = True - - sb.minimal_calculate_size = \ - int(self.Get('cl_autopartition_calculate_size')) - - used_devices = self.Get('cl_autopartition_device') - for dev, _type, size in self.ZipVars("cl_autopartition_device_dev", - "cl_autopartition_device_type", - "cl_autopartition_device_size"): - if dev in used_devices: - if _type.startswith("raid"): - device_type = partition.VolumesBuilder.DeviceType.RAID - else: - device_type = partition.VolumesBuilder.DeviceType.Device - sb.add_device(dev, device_type, int(size)) - - sb.default_format = self.Get('cl_autopartition_default_format') - - return sb - -class VariableClAutopartitionFactory(ReadonlyVariable): - """ - Объект для вычисления параметров/переменных выбранной конфигурации - """ - - def get(self): - df = partition.VariableFactory(device.udev) - sb = self.Get('cl_autopartition_scheme_builder') - if sb and sb.devices: - df.default_format = sb.default_format - root_purpose = partition.VolumesBuilder.Purpose.MainRoot - data_purpose = partition.VolumesBuilder.Purpose.Calculate - df.purpose_format[root_purpose] = \ - self.Get('cl_autopartition_root_format') - df.purpose_format[data_purpose] = \ - self.Get('cl_autopartition_calculate_format') - try: - sb.process(df) - except (partition.SchemeError, partition.VirtualDiskError): - return partition.VariableFactory(device.udev) - return df - - def __str__(self): - return "AutopartitionObject" - - -class VariableClAutopartitionDefaultFormat(Variable): - """ - Формат файловой системы - """ - type = "choice" - - def get(self): - return FileSystemManager.get_default_fs(self, 'hdd') - - def choice(self): - allfs = set([k for k,v in FileSystemManager.supportFS.items() - if v.get('auto',True)]) - availFS = set(self.Select('os_format_type', - where='os_format_use', - eq='yes')) - return list(sorted(allfs & availFS)) - - -class VariableClAutopartitionRootFormat(VariableClAutopartitionDefaultFormat): - """ - Формат файловой системы для корня - """ - opt = ["--root-fs"] - metavalue = "FS" - - def init(self): - self.label = _("Filesystem for root partition") - self.help = _("set filesystem for root partititon") - - def get(self): - return self.Get('cl_autopartition_default_format') - - -class VariableClAutopartitionCalculateFormat(VariableClAutopartitionDefaultFormat): - """ - Формат файловой системы для calculate - """ - opt = ["--data-fs"] - metavalue = "FS" - - def init(self): - self.label = _("Filesystem for data partition") - self.help = _("set filesystem for data partititon") - - def get(self): - return self.Get('cl_autopartition_default_format') - - -class VariableClAutopartitionCalculateSize(Variable): - """ - Минимальный размер для раздела /var/calculate - """ - value = str(Sizes.G * 1) - - -class VariableClAutopartitionSet(Variable): - """ - Using autopartition - """ - type = "bool" - element = "radio" - - def init(self): - self.label = _("Allocate drive space") - self.help = _("use the autopartition") - - def choice(self): - name = "Calculate Linux" - return [("on", _("Erase disk and install %s") % name), - ("off", _("Use current partitions"))] - - def get(self): - if self.is_console_set("os_location_data"): - return "off" - elif self.is_console_set("cl_autopartition_device"): - return "on" - if self.Get('os_root_type_ext') in (RootType.Value.LiveCD, - RootType.Value.LiveFlash, - RootType.Value.IsoScanFlash, - RootType.Value.NetBoot): - return "on" - else: - return "off" - - -class VariableClAutopartitionBriefSet(VariableClAutopartitionSet): - def get(self): - return self.Get('cl_autopartition_set') - - def uncompatible(self): - if self.Get('os_install_root_type') == 'flash': - return _("This option not used for Flash install") - - -class VariableClAutopartitionScheme(AutopartitionHelper, Variable): - """ - Autopartition scheme - """ - type = "choice-list" - element = "selecttable" - opt = ["--auto-scheme", "-S"] - metavalue = "AUTOPARTOPTS" - check_after = ["cl_autopartition_table"] - untrusted = True - - def init(self): - self.help = _("autopartition options") - self.label = _("Partitions options") - - def get(self): - default_value = [ - SchemeOpt.Update, - SchemeOpt.Calculate - ] - if self.Get('os_uefi_set') == 'on': - return [SchemeOpt.UEFI] + default_value - else: - return default_value - - def choice(self): - return [ - (SchemeOpt.Swap, _("Swap partition")), - (SchemeOpt.Update, _("The partition for the update")), - (SchemeOpt.Calculate, _("Data partition")), - (SchemeOpt.UEFI, _("Use the UEFI bootloader")), - (SchemeOpt.LVM, _("Use LVM")), - ] - - def check(self, value): - if SchemeOpt.UEFI in value: - if self.Get('os_uefi_set') == 'off': - raise VariableError( - _("Your system must be loaded in UEFI for using this " - "bootloader")) - if self.Get('os_install_arch_machine') != 'x86_64': - raise VariableError( - _("Architecture of the target system must be x86_64 " - "for using the UEFI bootloader")) - if self.Get('cl_autopartition_table') != 'gpt': - raise VariableError( - _("The partition table must be GPT for using " - "UEFI bootloader")) - - -class VariableClAutopartitionRootSizeDefault(Variable): - """ - Размер root раздела при авторазметке - """ - value = str(Sizes.G * 15) - - -class VariableClAutopartitionRootSizeMin(Variable): - """ - Минимальнй размер root раздела - """ - value = str(Sizes.G * 7) - - -class VariableClAutopartitionDeviceSizeAll(ReadonlyVariable): - """ - Общий объем дискового пространства - """ - def get(self): - deviceSize = sum( - int(x) for x in self.Get('cl_autopartition_device_size')) - scheme = self.Get('cl_autopartition_scheme') - if SchemeOpt.Swap in scheme: - return deviceSize - int(self.Get('cl_autopartition_swap_size')) - return deviceSize - -class VariableClAutopartitionRootSize(SizeHelper, AutopartitionHelper, - Variable): - """ - Root partition size for autopartition - """ - type = "size-m" - opt = ["--root-size"] - metavalue = "SIZE" - untrusted = True - - def init(self): - self.label = _("Root partition size") + " (MB)" - self.help = _("set the root partition size for autopartition") - - def use_rootall(self): - scheme = self.Get('cl_autopartition_scheme') - return SchemeOpt.Update not in scheme and SchemeOpt.Calculate not in scheme - - def get(self): - size = int(self.Get('cl_autopartition_root_size_default')) - deviceSize = int(self.Get('cl_autopartition_device_size_all')) - minRootSize = int(self.Get('cl_autopartition_root_size_min')) - if self.use_rootall(): - size = max(deviceSize, minRootSize) - return str(size) - - def check(self, value): - minRootSize = int(self.Get('cl_autopartition_root_size_min')) - if (self.Get('cl_autopartition_device') and - self.Get('cl_autopartition_set') == "on"): - if int(value) < minRootSize: - raise VariableError( - _("The root partition should be at least {size}").format( - size="%s Gb" % (Sizes().to_G(minRootSize)))) - - def humanReadable(self): - return humanreadableSize(int(self.Get())) - - -class VariableClAutopartitionTable(AutopartitionHelper, Variable): - """ - Partition table for autopartition - """ - type = "choice" - value = "gpt" - opt = ["--partition-table", "-T"] - metavalue = "TABLE" - - def init(self): - self.label = _("Partition table") - self.help = _("set the partition table for autopartition") - - def choice(self): - return [("dos", "DOS-type Partition Table"), - ("gpt", "GUID Partition Table (GPT)")] - - -class VariableClAutopartitionLvmSet(ReadonlyVariable): - """ - Using LVM for autopartition - """ - type = "bool" - - def get(self): - return ("on" if SchemeOpt.LVM in self.Get('cl_autopartition_scheme') - else "off") - - -class VariableClAutopartitionUefiSet(ReadonlyVariable): - """ - Using UEFI bootloader - """ - type = "bool" - - def get(self): - return ("on" if SchemeOpt.UEFI in self.Get('cl_autopartition_scheme') - else "off") - - -class VariableClAutopartitionLvmVgname(Variable): - """ - Volume group name for LVM autopartition - """ - - def get(self): - def generateName(startName): - yield startName - for i in count(20): - yield "%s%d" % (startName, i) - - for name in generateName("calculate"): - disks = self.Select('os_lvm_pvname', where='os_lvm_vgname', eq=name) - devices = self.Select('os_disk_parent', - where='os_disk_dev', _in=disks) - if set(devices) <= set(self.Get('cl_autopartition_device')): - return name - - -class VariableClAutopartitionDiskData(ReadonlyTableVariable): - """ - Серия переменных содержит список устройств, используемых для построения - переменных os_location_source и т.д. - """ - source = ['cl_autopartition_disk_dev', - 'cl_autopartition_disk_mount', - 'cl_autopartition_disk_format', - 'cl_autopartition_disk_size', - 'cl_autopartition_disk_part', - 'cl_autopartition_disk_type'] - - -class DiskFilter(VariableInterface): - field = "" - - def get(self): - factory = self.Get('cl_autopartition_factory') - return [str(x) for x, mount in zip(getattr(factory, self.field), - factory.disk_mount) - if mount and not mount.startswith("/boot/efi")] - -class VariableClAutopartitionDiskDev(DiskFilter, ReadonlyVariable): - """ - Autopartition virtual disk on device - """ - type = "list" - field = "disk_dev" - - -class VariableClAutopartitionDiskMount(DiskFilter, ReadonlyVariable): - """ - Autopartition mount points - """ - type = "list" - field = "disk_mount" - - -class VariableClAutopartitionDiskFormat(DiskFilter, ReadonlyVariable): - """ - Autopartition disk filesystem - """ - type = "list" - field = "disk_format" - - -class VariableClAutopartitionDiskPart(DiskFilter, ReadonlyVariable): - """ - Autopartition partition type (primary,extended,logical,gpt) - """ - type = "list" - field = "disk_part" - - -class VariableClAutopartitionDiskType(DiskFilter, ReadonlyVariable): - """ - Autopartition partition scheme (simple - disk-partition) - """ - type = "list" - field = "disk_type" - - -class VariableClAutopartitionDiskSize(DiskFilter, ReadonlyVariable): - """ - Autopartition disk size - """ - type = "list" - field = "disk_size" - - def get(self): - return map(str, super(VariableClAutopartitionDiskSize, self).get()) - - def humanReadable(self): - return map(humanreadableSize, self.Get()) - - -class VariableClAutopartitionDiskDataFull(ReadonlyTableVariable): - """ - Серия переменных содержит полный список созданной разметки - """ - source = ['cl_autopartition_disk_dev_full', - 'cl_autopartition_disk_mount_full', - 'cl_autopartition_disk_format_full', - 'cl_autopartition_disk_perform_format_full', - 'cl_autopartition_disk_size_full', - 'cl_autopartition_disk_part_full', - 'cl_autopartition_disk_type_full'] - - -class VariableClAutopartitionDiskDevFull(ReadonlyVariable): - """ - Autopartition virtual disk on device - """ - type = "list" - - def get(self): - var_factory = self.Get('cl_autopartition_factory') - return var_factory.disk_dev - - -class VariableClAutopartitionDiskMountFull(ReadonlyVariable): - """ - Autopartition mount points - """ - type = "list" - - def get(self): - var_factory = self.Get('cl_autopartition_factory') - return var_factory.disk_mount - - -class VariableClAutopartitionDiskFormatFull(ReadonlyVariable): - """ - Autopartition disk filesystem - """ - type = "list" - - def get(self): - var_factory = self.Get('cl_autopartition_factory') - return var_factory.disk_format - - -class VariableClAutopartitionDiskPerformFormatFull(ReadonlyVariable): - """ - Autopartition disk filesystem - """ - type = "bool-list" - - def get(self): - return ["on" if mp else "off" - for mp in self.Get('cl_autopartition_disk_mount_full')] - -class VariableClAutopartitionDiskPartFull(ReadonlyVariable): - """ - Autopartition partition type (primary,extended,logical,gpt) - """ - type = "list" - - def get(self): - var_factory = self.Get('cl_autopartition_factory') - return var_factory.disk_part - - -class VariableClAutopartitionDiskTypeFull(ReadonlyVariable): - """ - Autopartition partition scheme (simple - disk-partition) - """ - type = "list" - - def get(self): - var_factory = self.Get('cl_autopartition_factory') - return var_factory.disk_type - - -class VariableClAutopartitionDiskSizeFull(ReadonlyVariable): - """ - Autopartition disk size - """ - type = "list" - - def get(self): - var_factory = self.Get('cl_autopartition_factory') - return map(str, var_factory.disk_size) - - def humanReadable(self): - return map(humanreadableSize, self.Get()) - - -class VariableClAutopartitionRaid(ReadonlyVariable): - """ - Список выбранных для разметки RAID устройств - """ - type = "list" - - def get(self): - selected_dev = self.Get('cl_autopartition_device') - return [ - dev for dev, _type in self.ZipVars( - "cl_autopartition_device_dev", "cl_autopartition_device_type") - if dev in selected_dev and _type.startswith("raid")] - -class VariableClAutopartitionRaidParent(ReadonlyVariable): - """ - Список физических устройств, из которых построены выбранные RAID - """ - type = "list" - - def get(self): - selected_raid = set(self.Get('cl_autopartition_raid')) - return list(set(traverse( - parents.split(',') - for parents in self.select('os_device_parent', - os_device_dev__in=selected_raid)))) - -class VariableClAutopartitionMbr(ReadonlyVariable): - """ - Диски на которые будет установлен загрузчик - """ - type = "list" - - def get(self): - if self.GetBool('cl_autopartition_uefi_set'): - return [] - - selected_raid_devices = self.Get('cl_autopartition_raid_parent') - - mbr = (mbr for mbr in self.select( - 'os_device_mbr', os_device_dev__in=selected_raid_devices) - if mbr) - - var_factory = self.Get('cl_autopartition_factory') - return sorted(set(mbr) | set(var_factory.mbr)) - - -class VariableClAutopartitionEfi(ReadonlyVariable): - """ - Диски на которые будет установлен загрузчик - """ - type = "list" - - def get(self): - if not self.GetBool('cl_autopartition_uefi_set'): - return [] - - var_factory = self.Get('cl_autopartition_factory') - selected_dev = self.Get('cl_autopartition_device') - selected_data = ((dev, _type) for dev, _type in self.ZipVars( - "cl_autopartition_device_dev", "cl_autopartition_device_type") - if dev in selected_dev) - for dev, _type in selected_data: - if _type == "device": - if var_factory.efi: - return var_factory.efi[:1] - elif _type == "raid0": - parents = self.select('os_device_parent', - os_device_dev=dev, limit=1).split(',') - efidev = self.select('os_device_efi', os_device_dev__in=parents, - limit=1) - if efidev: - return [efidev] - # прочие raid - else: - parents = self.select('os_device_parent', - os_device_dev=dev, limit=1).split(',') - efidevs = self.select('os_device_efi', - os_device_dev__in=parents) - if efidevs: - return sorted(set(filter(None, efidevs))) - return [] - - -class VariableClAutopartitionParent(ReadonlyVariable): - """ - disk_parent для install переменных - """ - def get(self): - def generate(): - for device in self.Get('cl_autopartition_raid_parent'): - yield device - selected_dev = self.Get('cl_autopartition_device') - for dev, _type in self.ZipVars( - "cl_autopartition_device_dev", - "cl_autopartition_device_type"): - if dev in selected_dev and not _type.startswith("raid"): - yield dev - - return ",".join(sorted(set(generate()))) - -class VariableClAutopartitionUefiSize(Variable): - """ - Size of EF00 partition - """ - value = str(200 * Sizes.M) - - -class VariableClAutopartitionBootSize(Variable): - """ - Size of boot partition - """ - value = str(512 * Sizes.M) - - -class VariableClAutopartitionBiosGrubSize(Variable): - """ - Размер раздела bios_grub для авторазметки - """ - value = str(50 * Sizes.M) - - -class VariableClAutopartitionBindData(ReadonlyTableVariable): - """ - Autopartition bind data - """ - source = ['cl_autopartition_bind_path', - 'cl_autopartition_bind_mountpoint'] - - def get(self, hr=HumanReadable.No): - def generator(): - scheme = self.Get('cl_autopartition_scheme') - if (self.Get('cl_autopartition_set') == "on" and - SchemeOpt.Calculate in scheme): - yield ["/var/calculate/home", "/home"] - return list(generator()) or [[]] - - -class VariableClAutopartitionBindPath(FieldValue, ReadonlyVariable): - """ - Autopartition bind points - """ - type = "list" - source_variable = "cl_autopartition_bind_data" - column = 0 - - def get(self): - return list(super(VariableClAutopartitionBindPath, self).get()) - -class VariableClAutopartitionBindMountpoint(FieldValue, ReadonlyVariable): - """ - Autopartition bind points - """ - type = "list" - source_variable = "cl_autopartition_bind_data" - column = 1 - - def get(self): - return list(super(VariableClAutopartitionBindMountpoint, self).get()) diff --git a/libs_crutch/install/variables/disk.py b/libs_crutch/install/variables/disk.py deleted file mode 100644 index b7efc00..0000000 --- a/libs_crutch/install/variables/disk.py +++ /dev/null @@ -1,2638 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import sys -import re -import pty -import fcntl -from subprocess import Popen -from os import path -from itertools import * -from calculate.install.distr import (FlashDistributive, DistributiveError, - IsoDistributive) -from calculate.lib.datavars import (TableVariable, Variable, VariableError, - ReadonlyVariable, ReadonlyTableVariable, - SourceReadonlyVariable, VariableInterface, - HumanReadable) -import calculate.lib.utils.device as device -from calculate.lib.utils.device import (getPartitionSize, - humanreadableSize, - getUUIDDict) -from calculate.install.variables.autopartition import Sizes -from calculate.lib.utils.files import getProgPath -from calculate.lib.utils.mount import isMount, FStab, DiskSpace, Btrfs, \ - BtrfsError, try_umount -from calculate.install.fs_manager import FileSystemManager - -from calculate.lib.cl_lang import setLocalTranslate, _ -from calculate.lib.variables.system import RootType - -setLocalTranslate('cl_install3', sys.modules[__name__]) - - -class DeviceHelper(VariableInterface): - rePassDevice = re.compile("^/block/(?!%s)" % "|".join(['sr', 'fd', - 'ram', 'loop'])) - - def getBlockDevices(self): - """Get interest devices from sys block path""" - return filter(self.rePassDevice.search, - device.udev.get_block_devices()) - - def separateDevice(self, dev): - """ - Separate device word and number on tuple - - Using for sort. (Example: sda2 ("sda",2), md5p1 ("md",5,"p",1) - """ - return map(lambda x: int(x) if x.isdigit() else x, - re.findall('\d+|\D+', dev)) - - def mapUdevProperty(self, var, prop, default): - """Get each element from var through udev [prop]""" - return [device.udev.get_device_info(name=x).get(prop, default) - for x in self.Get(var)] - - def getPerfectName(self, dev, defaultValue=None): - """ - Get dev name or human-readable lvm name - """ - info = device.udev.get_device_info(name=dev) - if 'DM_VG_NAME' in info and 'DM_LV_NAME' in info: - lvmDeviceName = '/dev/{vg}/{lv}'.format(vg=info['DM_VG_NAME'], - lv=info['DM_LV_NAME']) - if path.exists(lvmDeviceName): - return lvmDeviceName - if defaultValue is None: - return info.get('DEVNAME', '') - else: - return defaultValue - - def getLvmName(self, dev): - """ - Get lvm name - """ - return self.getPerfectName(dev, defaultValue="") - - -####################################################### -# Devices variables -####################################################### -class VariableOsDeviceData(ReadonlyTableVariable): - """ - Information about disk devices - """ - type = 'table' - source = ['os_device_dev', - 'os_device_table', - 'os_device_type', - 'os_device_parent', - 'os_device_ssd_set', - 'os_device_virtual_set', - 'os_device_map', - 'os_device_syspath', - 'os_device_name', - 'os_device_size', - 'os_device_mbr', - 'os_device_efi', - 'os_device_fulltype'] - - -class VariableOsDeviceInvalidator(ReadonlyVariable): - """ - Переменная используемая для автоматического сброса значений переменных - если во время работы программы произошли изменения среди блочных устройств - """ - master = None - - def get(self): - """Get device /dev name""" - if self.master is None and not self.Get('cl_ebuild_phase'): - try: - self.master, slave = pty.openpty() - except OSError: - raise VariableError('Failed to create PTY') - udevAdm = getProgPath('/sbin/udevadm') - self.monitor = Popen([udevAdm, "monitor", "--kernel", - "--subsystem-match=block"], stdout=slave, - close_fds=True) - os.close(slave) - fl = fcntl.fcntl(self.master, fcntl.F_GETFL) - fcntl.fcntl(self.master, fcntl.F_SETFL, fl | os.O_NONBLOCK) - return "Device invalidator" - - def close(self): - try: - if self.monitor: - self.monitor.kill() - self.monitor.wait() - except Exception: - pass - - def refresh(self): - try: - if self.monitor: - res = os.read(self.master, 65535) - if res: - while len(res) == 65535: - res = os.read(self.master, 65535) - self.parent.Invalidate(self.name) - self.parent.Invalidate('os_install_disk_uuid') - self.parent.Invalidate('os_install_disk_partuuid') - self.parent.Invalidate('os_disk_dev') - device.udev.clear_cache() - except OSError as e: - pass - - -class VariableOsDeviceDev(DeviceHelper, ReadonlyVariable): - """ - Disk devices - """ - type = "list" - re_disk_raid = re.compile("^disk-.*-raid\d+$", re.I) - - def init(self): - pass - - def get(self): - """Get device /dev name""" - self.Get('os_device_invalidator') - - # get devices from block sys directories(discard mem,sr,loop and other) - devices = (x for x in self.getBlockDevices() if x.count('/') == 2) - devnames = device.udev.syspath_to_devname( - x for x in devices - if device.udev.is_device(device.udev.get_device_info(x)) or - self.re_disk_raid.match(device.udev.get_device_type(path=x))) - - return list(sorted((x for x in devnames), - key=self.separateDevice)) - -class VariableOsDeviceFulltype(ReadonlyVariable): - """ - Полный тип - """ - type = "list" - - def get(self): - """Get device /dev name""" - return [ - device.udev.get_device_type(x) - for x in self.Get('os_device_syspath') - ] - - -class VariableOsDeviceType(ReadonlyVariable): - """ - Device type (hdd,cdrom,usb-flash) - """ - type = "list" - - def getType(self, dev): - info = device.udev.get_device_info(name=dev) - if device.udev.is_raid(info): - return info["MD_LEVEL"] - device_name = path.basename(dev) - if device_name in self.usbdevices: - if device.sysfs.read(device.sysfs.Path.Block, device_name, - "removable").strip() == "1": - return "flash" - else: - return "usb-hdd" - else: - return "hdd" - - def get(self): - # get usb device by '/dev/disk/by-id'(usb devices contain 'usb' in name) - diskIdPath = '/dev/disk/by-id' - if device.devfs.exists(diskIdPath): - self.usbdevices = \ - map(lambda x: \ - device.devfs.realpath(diskIdPath, x).rpartition('/')[2], - filter(lambda x: x.startswith('usb-'), - device.devfs.listdir(diskIdPath, fullpath=False))) - else: - self.usbdevices = [] - return map(self.getType, - self.Get('os_device_dev')) - - -class VariableOsDeviceParent(ReadonlyVariable): - """ - Базовые устройства RAID массива - """ - type = "list" - - def get(self): - """Get disk parent""" - return [",".join(device.udev.get_all_base_devices(name=dev)) - if "raid" in _type - else "" - for dev, _type in self.ZipVars( - 'os_device_dev', 'os_device_type')] - -class MbrEfiHelper(VariableInterface): - boottype = "" - - def get_boot_partition(self, sysdevice): - basename = path.basename(sysdevice) - for devname in device.udev.syspath_to_devname( - device.sysfs.glob(sysdevice, "%s*" % basename), - dropempty=True): - partid = self.select('os_disk_id', os_disk_dev=devname, - limit=1) - if partid.upper() == self.boottype: - return devname - return "" - - -class VariableOsDeviceMbr(MbrEfiHelper, ReadonlyVariable): - """ - Разделы на устройстве, которые могут быть bios_boot - """ - type = "list" - boottype = "EF02" - - def get(self): - def generator(): - for dev, sysdevice, _type, table in self.ZipVars( - 'os_device_dev', 'os_device_syspath', 'os_device_type', - 'os_device_table'): - if "raid" in _type: - yield "" - elif table == "dos": - yield dev - else: - if self.get_boot_partition(sysdevice): - yield dev - else: - yield "" - return list(generator()) - - -class VariableOsDeviceEfi(MbrEfiHelper, ReadonlyVariable): - """ - Разделы на устройстве, которые могут быть EFI - """ - boottype = "EF00" - type = "list" - - def get(self): - def generator(): - for dev, sysdevice, _type, table in self.ZipVars( - 'os_device_dev', 'os_device_syspath', 'os_device_type', - 'os_device_table'): - if "raid" in _type or table == "dos": - yield "" - else: - yield self.get_boot_partition(sysdevice) or "" - return list(generator()) - - -class VariableOsDeviceMap(ReadonlyVariable): - """ - Map number for grub - - Using for legecy grub (DEPRECATATED) - """ - type = "list" - - def get(self): - return map(lambda x: str(x[0]), - enumerate(self.Get('os_device_dev'))) - - -class VariableOsDeviceArraySet(ReadonlyVariable): - """ - Диски массивы (при создании разделов на таких дисках перед - номером раздела добавляется "p": nvme0n1p1 вместо nvme0n11 - """ - type = "list" - devnames = ("nvme", "mmcblk") - - def get(self): - """Get device partition table""" - - def isArray(device, name): - if any(x in device for x in self.devnames): - return "on" - else: - return "off" - - return map(lambda x: isArray(*x), - zip(self.Get('os_device_dev'), - self.Get('os_device_name'))) - - -class VariableOsDeviceSsdSet(ReadonlyVariable): - """ - Ssd property - """ - type = "list" - ssd_names = ("SSD", "OCZ", "PLEXTOR") - udev_property = 'ID_ATA_ROTATION_RATE_RPM' - # считаем, что nvme диски - SSD - devnames = ("nvme",) - - def get(self): - """Get device partition table""" - - def isSsd(dev, name): - prop = device.udev.get_device_info(name=dev) - rpm = prop.get(self.udev_property, None) - if (any(x in dev for x in self.devnames) - or rpm == "0" or any(x in name for x in self.ssd_names)): - return "on" - else: - return "off" - - return map(lambda x: isSsd(*x), - zip(self.Get('os_device_dev'), - self.Get('os_device_name'))) - - -class VariableOsDeviceSyspath(ReadonlyVariable): - """ - Table on device - """ - type = "list" - udev_property = 'DEVPATH' - - def get(self): - """Get device partition table""" - - def getSysPath(dev): - prop = device.udev.get_device_info(name=dev) - syspath = prop.get(self.udev_property, "") - return syspath - - return [getSysPath(x) for x in self.Get('os_device_dev')] - - -class VariableOsDeviceVirtualSet(ReadonlyVariable): - """ - Table on device - """ - type = "list" - virtual_names = ("VBOX", "VMWare", "QEMU") - virtual_syspath = ("virtio",) - - def get(self): - """Get device partition table""" - - def isVirtual(device, name, syspath): - if any(x in name for x in self.virtual_names): - return "on" - elif any(x in syspath for x in self.virtual_syspath): - return "on" - else: - return "off" - - return map(lambda x: isVirtual(*x), - zip(self.Get('os_device_dev'), - self.Get('os_device_name'), - self.Get('os_device_syspath'))) - - -class VariableOsDeviceTable(ReadonlyVariable): - """ - Table on device - """ - type = "list" - - def getTableByChild(self, dev): - """Get table by child partitions""" - syspath = device.udev.get_syspath(name=dev) - shortname = path.basename(dev) - for child in device.sysfs.glob(syspath, "%s*" % shortname): - udevinfo = device.udev.get_device_info(path=child) - map_names = {'mbr': 'dos', - 'msdos': 'dos'} - table = (udevinfo.get('ID_PART_ENTRY_SCHEME', '') or - udevinfo.get('UDISKS_PARTITION_SCHEME', '')) - return map_names.get(table, table) - return "" - - def get(self): - """Get device partition table""" - autopartition = self.Get('cl_autopartition_set') == 'on' - autoDevice = self.Get('cl_autopartition_device') - - def getTable(dev): - prop = device.udev.get_device_info(name=dev) - return prop.get('ID_PART_TABLE_TYPE', - self.getTableByChild(dev)) - - def getByAutopartition(dev): - if autopartition and autoDevice == dev: - return self.Get('cl_autopartition_table') - else: - return getTable(dev) - - return map(getByAutopartition, - self.Get('os_device_dev')) - - -class VariableOsDeviceName(ReadonlyVariable): - """ - Name of device - """ - type = "list" - nameless_devices = { - 'nvme': 'NVME', - 'mmcblk': 'Multimedia Card' - } - - def getName(self, dev): - devicepath = device.udev.get_syspath(name=dev) - if devicepath: - vendor = device.sysfs.read(devicepath, "device/vendor").strip() - model = device.sysfs.read(devicepath, "device/model").strip() - if vendor or model: - return ("%s %s" % - (vendor, model)).strip() - else: - for k, v in self.nameless_devices.items(): - if k in devicepath: - return v - return "" - else: - return "" - - def get(self): - return map(self.getName, - self.Get('os_device_dev')) - - -class VariableOsDeviceSize(ReadonlyVariable): - """ - Name of device - """ - type = "list" - - def get(self): - """Get device size""" - return map(lambda x: getPartitionSize(name=x, inBytes=True), - self.Get('os_device_dev')) - - def humanReadable(self): - return map(humanreadableSize, - self.Get()) - - -############################################# -# Disk variables -############################################# -class VariableOsDiskData(ReadonlyTableVariable): - """ - Information about current system partition and mounts - """ - source = ['os_disk_dev', - 'os_disk_uuid', - 'os_disk_partuuid', - 'os_disk_name', - 'os_disk_size', - 'os_disk_part', - 'os_disk_format', - 'os_disk_type', - 'os_disk_raid', - 'os_disk_lvm', - 'os_disk_parent', - 'os_disk_id', - 'os_disk_grub'] - - -class VariableOsDiskDev(DeviceHelper, ReadonlyVariable): - """ - List of available partition devices - """ - type = "list" - - def get(self): - # получить блочные утсройства, в списке устройства с таблицей раздела - # разделены '/' - re_parent = re.compile("^/block/[^/]+") - disks = self.getBlockDevices() - parents = {re_parent.search(x).group() - for x in disks if x.count("/") > 2} - dev_names = device.udev.syspath_to_devname( - (x for x in disks if x not in parents), - dropempty=False) - return list(sorted((x for x in dev_names), key=self.separateDevice)) - - def humanReadable(self): - return map(self.getPerfectName, - self.Get()) - - -class VariableOsDiskMount(DeviceHelper, ReadonlyVariable): - """ - List mounted points for current operation system - """ - type = "list" - - def get(self): - disk_hash = self.Get('os_disk_dev') - fstab = FStab('/etc/fstab', devs=disk_hash) - rootdev = self.Get('os_root_dev') - return map(lambda x: '/' if x == rootdev else fstab.getBy(eq=x) or "", - self.Get('os_disk_dev')) - - -class VariableOsDiskContent(ReadonlyVariable): - """ - Partition content - """ - type = "list" - - def get(self): - """ - TODO: need to write - """ - return map(lambda x: "", - self.Get('os_disk_dev')) - -class VariableOsDiskFormat(ReadonlyVariable): - """ - Filesystem on device partitions - """ - type = "list" - - def get(self): - """Get current disk filesystem""" - fstab = FStab('/etc/fstab', devs=self.Get('os_disk_dev')) - - def getFormat(dev): - prop = device.udev.get_device_info(name=dev) - fs = prop.get('FSTAB_TYPE') or \ - fstab.getBy(what=fstab.TYPE, eq=dev) or \ - prop.get('ID_FS_TYPE', '') - if fs == "btrfs": - if "compress" in fstab.getBy(what=fstab.OPTS, - eq=dev): - return "btrfs-compress" - try: - if Btrfs(dev).compression != "": - return "btrfs-compress" - except BtrfsError: - pass - return fs - - return map(getFormat, - self.Get('os_disk_dev')) - - -class VariableOsDiskType(ReadonlyVariable): - """ - List type (lvm,raid,partition,disk) - """ - type = "list" - re_raid = re.compile("-raid\d+$") - re_raid_partition = re.compile("-raid\d+-partition$") - - def get(self): - """Get partition scheme""" - types = map(lambda x: (x, device.udev.get_device_type(name=x)), - self.Get('os_disk_dev')) - lvmUsedDisks = {} - raidUsedDisks = {} - - def forMember(typeInfo): - diskName, diskType = typeInfo - if diskName in raidUsedDisks: - diskType = "%s-raidmember(%s)" % (diskType, - raidUsedDisks[diskName]) - if diskName in lvmUsedDisks: - diskType = "%s-lvmmember(%s)" % (diskType, - ",".join( - lvmUsedDisks[diskName])) - return diskName, diskType - - for dev, diskType in types: - prop = device.udev.get_device_info(name=dev) - if self.re_raid.search(diskType): - raiddevice = prop.get('DEVPATH', '') - elif self.re_raid_partition.search(diskType): - raiddevice = path.dirname(prop.get('DEVPATH', '')) - else: - raiddevice = None - if raiddevice: - raiddev = device.udev.get_devname(raiddevice) - for x in device.raid.devices(raiddevice): - raidUsedDisks[x] = raiddev - if diskType.endswith("lvm"): - for x in device.lvm.used_partitions(prop.get('DM_VG_NAME', ''), - prop.get('DM_LV_NAME', '')): - if x in lvmUsedDisks: - lvmUsedDisks[x].append(dev) - else: - lvmUsedDisks[x] = [dev] - return map(lambda x: x[1], - map(forMember, - types)) - - -class VariableOsDiskRaid(ReadonlyVariable): - """ - Raids which this partition constructed - """ - type = "list" - - def generateRaid(self): - for disktype in self.Get('os_disk_type'): - if "raidmember" in disktype: - yield disktype.rpartition('(')[2][:-1] - else: - yield "" - - def get(self): - return list(self.generateRaid()) - - -class VariableOsDiskLvm(DeviceHelper, ReadonlyVariable): - """ - LVM vgname and lvname - """ - type = "list" - - def get(self): - """Get each element from var through udev [prop]""" - return map(self.getLvmName, - self.Get('os_disk_dev')) - - -class VariableOsDiskUuid(DeviceHelper, ReadonlyVariable): - """ - List uudi for partition devices - """ - - type = "list" - - def get(self): - return self.mapUdevProperty('os_disk_dev', 'ID_FS_UUID', '') - -class VariableOsDiskPartuuid(DeviceHelper, ReadonlyVariable): - """ - List uudi for partition devices - """ - - type = "list" - - def get(self): - return self.mapUdevProperty('os_disk_dev', 'ID_PART_ENTRY_UUID', '') - - -class VariableOsDiskParent(ReadonlyVariable): - """ - List parent deivces for partition - """ - type = "list" - - def get(self): - """Get disk parent""" - return [",".join(device.udev.get_disk_devices(name=x)) - for x in self.Get('os_disk_dev')] - - -class VariableOsDiskId(DeviceHelper, ReadonlyVariable): - """ - Partition's system id - """ - type = "list" - - def get(self): - """Get disk id""" - mapTypeUUID = {'ebd0a0a2-b9e5-4433-87c0-68b6b72699c7': '0700', - '0657fd6d-a4ab-43c4-84e5-0933c84b4f4f': '8200', - 'a19d880f-05fc-4d3b-a006-743f0f84911e': 'FD00', - '21686148-6449-6e6f-744e-656564454649': 'EF02', - 'c12a7328-f81f-11d2-ba4b-00a0c93ec93b': 'EF00', - '0fc63daf-8483-4772-8e79-3d69d8477de4': '8300'} - return map(lambda x: mapTypeUUID.get(x, x), - map(lambda x: x.rpartition("x")[2], - self.mapUdevProperty('os_disk_dev', 'ID_PART_ENTRY_TYPE', - ''))) - - -class VariableOsDiskGrub(ReadonlyVariable): - """ - List grub id for partition devices - """ - type = "list" - - def get(self): - """Get disk grub map""" - devicesMap = dict(zip(self.Get('os_device_dev'), - self.Get('os_device_map'))) - - def getGrubMap(devParent): - dev, disktype, parent = devParent - # grub id вычисляем только для разделов расположенных на диске - # (исключаются lvm, raid и прочие абстракции) - if disktype != "disk-partition": - return "" - prop = device.udev.get_device_info(name=dev) - partnum = int(prop.get('ID_PART_ENTRY_NUMBER', 0)) - if parent in devicesMap.keys() and partnum: - return "%s,%d" % (devicesMap[parent], partnum - 1) - else: - return "" - - return map(getGrubMap, - zip(self.Get('os_disk_dev'), - self.Get('os_disk_type'), - self.Get('os_disk_parent'))) - - -class VariableOsDiskPart(ReadonlyVariable): - """ - Type of partition devices - If msdos then(primary, extended or logical) - If gpt then gpt - """ - type = "list" - - def get(self): - def generator(): - for disk_dev, disk_type in self.ZipVars( - 'os_disk_dev', 'os_disk_type'): - if disk_type.endswith("-partition"): - yield device.udev.get_partition_type(name=disk_dev) - else: - yield "" - return list(generator()) - - -class VariableOsDiskSize(ReadonlyVariable): - """ - Partition size - """ - type = "list" - - def get(self): - """Get disk size""" - return map(lambda x: getPartitionSize(name=x, inBytes=True), - self.Get('os_disk_dev')) - - def humanReadable(self): - return map(humanreadableSize, - self.Get()) - - -class VariableOsDiskName(DeviceHelper, ReadonlyVariable): - """ - Label of partitions - """ - type = "list" - - def get(self): - """Get disk label""" - return self.mapUdevProperty('os_disk_dev', 'ID_FS_LABEL', '') - - -class VariableOsDiskOptions(ReadonlyVariable): - """ - List mount options - """ - type = "list" - - def get(self): - fstab = FStab('/etc/fstab', devs=self.Get('os_disk_dev')) - - def getFormat(dev): - return fstab.getBy(what=fstab.OPTS, eq=dev) - - return map(getFormat, - self.Get('os_disk_dev')) - - -################################################ -# Bind mount points -################################################ -class VariableOsBindData(ReadonlyTableVariable): - """ - Table of bind mount points - """ - source = ['os_bind_path', - 'os_bind_mountpoint'] - - -class VariableOsBindPath(ReadonlyVariable): - """ - List source bind path - """ - type = "list" - - def get(self): - fstab = FStab('/etc/fstab', devs=self.Get('os_disk_dev')) - return fstab.getBy(what=fstab.NAME, where=fstab.OPTS, - contains="bind", allentry=True) - - -class VariableOsBindMountpoint(ReadonlyVariable): - """ - Mountpoints for directories bind - """ - type = "list" - - def get(self): - fstab = FStab('/etc/fstab', devs=self.Get('os_disk_dev')) - return fstab.getBy(what=fstab.DIR, where=fstab.OPTS, - contains="bind", allentry=True) - - -###################################################################### -# Userselect partion parameters -###################################################################### -class LocationHelper(VariableInterface): - """ - Location variable - """ - - def uncompatible(self): - """ - Network setting up unavailable for flash installation - """ - if self.Get('cl_autopartition_set') == "on": - return \ - _("The layout is not available with autopartitioning") - return "" - - -class VariableOsLocationBriefData(LocationHelper, TableVariable): - source = ["os_location_source", - "os_location_dest", - "os_location_format", - "os_location_perform_format", - "os_location_size"] - - orig_source = [('os_install_disk_dev', - 'os_install_disk_mount', - 'os_install_disk_format', - 'os_install_disk_perform_format', - 'os_install_disk_size'), - ('os_install_bind_path', - 'os_install_bind_mountpoint', '', '', '')] - - def init(self): - self.label = _("Mount points") - - def get_autopartition(self, hr=HumanReadable.No): - # при авторазметке получаем только информацию о - # /boot/efi разделах - if self.GetBool('cl_autopartition_uefi_set'): - boot = [[dev, mp, fs, _format, size] - for dev, mp, fs, _format, size in self.ZipVars( - 'os_install_disk_dev', - 'os_install_disk_mount', - 'os_install_disk_format', - 'os_install_disk_perform_format', - 'os_install_disk_size', humanreadable=hr) - if mp.startswith('/boot/efi')] - else: - boot = [] - # исключаем из устройств авторазметки информацию о efi разделах - # так как она не достоверная - devices = [[dev, mp, fs, _format, size] - for dev, mp, fs, _format, size in self.ZipVars( - 'cl_autopartition_disk_dev_full', - 'cl_autopartition_disk_mount_full', - 'cl_autopartition_disk_format_full', - 'cl_autopartition_disk_perform_format_full', - 'cl_autopartition_disk_size_full', - humanreadable=hr) - if not mp.startswith('/boot/efi') - ] - binds = [[dev, mp, "", "", ""] - for dev, mp in self.ZipVars( - 'cl_autopartition_bind_path', - 'cl_autopartition_bind_mountpoint', humanreadable=hr)] - autodevs = self.Get('cl_autopartition_disk_dev_full') - bootdevs = [x[0] for x in boot] - - def keysort(dev): - if dev in autodevs: - return autodevs.index(dev), -1 - else: - return -1, bootdevs.index(dev) - - return sorted(boot + devices, - key=lambda x: keysort(x[0])) + binds or [[]] - - def get_manual(self, hr=HumanReadable.No): - devs = self.Get('os_disk_dev') - - def keysort(dev): - if dev in devs: - return devs.index(dev), -1 - else: - return -1, dev - - devices = map(list, self.ZipVars( - 'os_install_disk_dev', - 'os_install_disk_mount', - 'os_install_disk_format', - 'os_install_disk_perform_format', - 'os_install_disk_size', - humanreadable=hr)) - - binds = [[dev, mp, "", "", ""] - for dev, mp in self.ZipVars( - 'os_install_bind_path', - 'os_install_bind_mountpoint', humanreadable=hr)] - return sorted(devices, - key=lambda x: keysort(x[0])) + binds or [[]] - - def get(self, hr=HumanReadable.No): - if self.GetBool('cl_autopartition_set'): - return self.get_autopartition(hr) - else: - return self.get_manual(hr) - - - -class VariableOsLocationData(LocationHelper, TableVariable): - """ - Select installation disk variable - """ - opt = ["--disk", "-d"] - metavalue = 'DISK[[:MP[:FS[:FORMAT]]]]' - untrusted = True - source = ["os_location_source", - "os_location_dest", - "os_location_format", - "os_location_perform_format", - "os_location_size"] - - check_after = ["os_install_root_type"] - - def init(self): - self.help = (_("DISK bound for installation will be mounted to the " - "MP directory. To create a bind mount point, you have " - "to specify the source directory as DISK") + ". " + - _("To change the filesystem, you have to specify it as FS. " - "FORMAT is used for the specifying the need to format " - "partition or not")) - self.label = _("Mount points") - - def set(self, value): - return sorted(value, key=lambda x: x and x[0]) - - -class VariableOsLocationSource(LocationHelper, DeviceHelper, Variable): - """ - Source disk or directory - """ - type = "choiceedit-list" - - def init(self): - self.label = _("Disk or directory") - - def availDevs(self, choice=False): - """ - Available devices - """ - if self.Get('cl_install_type') == 'flash': - flashes = self.Select('os_device_dev', - where='os_device_type', - eq="flash") - return [disk_dev for disk_dev, disk_parent, disk_type in - self.ZipVars("os_disk_dev", "os_disk_parent", - "os_disk_type") - if disk_type == "disk-partition" and disk_parent in flashes] - else: - if choice: - return self.Get('os_disk_dev') + self.Get('os_bind_path') - else: - dev_from = self.Get('cl_install_dev_from') - return [ - disk_dev for disk_dev, disk_mount in self.ZipVars( - "os_disk_dev", "os_disk_mount") - if ((disk_mount not in ("", "/") - or disk_dev == dev_from) and - not disk_mount.startswith("/boot/efi")) - ] + self.Get('os_bind_path') - - def get(self): - if self.Get('cl_autopartition_set') == "on": - return ([device.udev.get_devname(name=x) - for x in self.Get('cl_autopartition_disk_dev')] + - self.Get('cl_autopartition_bind_path')) - else: - return self.availDevs() - - def set(self, value): - def normpath(val): - if type(val) == str and val: - return path.normpath(val) - return val - - return map(normpath, value) - - def choice(self): - return map(lambda x: (x, self.getPerfectName(x) or x), - self.fixOsDiskDev(self.availDevs(choice=True))) + [("", "")] - - def fixOsDiskDev(self, sourcelist=None): - """ - Fix os_disk_dev by autopartitions - """ - if not sourcelist: - sourcelist = self.Get('os_disk_dev') - scheme = self.Get('cl_autopartition_set') == "on" - if scheme: - autopartition_devices = self.Get('cl_autopartition_device') - exclude = { - disk_dev - for disk_dev, disk_parent in self.ZipVars("os_disk_dev", - "os_disk_parent") - if any(x in autopartition_devices - for x in disk_parent.split(',')) - } - appendDisks = (self.Get('cl_autopartition_disk_dev') + - self.Get('cl_autopartition_bind_path')) - return [x for x in sourcelist if x not in exclude] + appendDisks - else: - return sourcelist - - def check(self, value): - """Check set location source""" - ################################ - # check of device specifing - ################################ - if not value: - raise VariableError( - _("To install the system, you need to specify the root device")) - ########################### - # check wrong dev - ########################### - disks = filter(lambda x: x.startswith('/dev/'), value) - # get original /dev names - cnDisks = (device.udev.get_device_info(name=x).get('DEVNAME', x) - for x in disks) - wrongDevices = list(set(cnDisks) - - set(self.fixOsDiskDev())) - if wrongDevices: - raise VariableError(_("Wrong device '%s'") % wrongDevices[0]) - wrongSource = filter(lambda x: x and not x.startswith('/'), value) - if wrongSource: - raise VariableError( - _("Wrong bind mount point '%s'") % wrongSource[0]) - ########################## - # detect duplicate devices - ########################## - dupDevices = list(set(filter(lambda x: disks.count(x) > 1, - disks))) - if dupDevices: - raise VariableError( - _("Device '%s' is used more than once") % dupDevices[0]) - -class VariableClRootSizeMin(Variable): - """ - Минимальнй размер root раздела - """ - value_format = "{cl_autopartition_root_size_min}" - -class VariableOsLocationDest(LocationHelper, Variable): - """ - Desination directory of install disk data - """ - type = "choiceedit-list" - - def init(self): - self.label = _("Mount point") - - def get(self): - if self.Get('cl_autopartition_set') == "on": - return self.Get('cl_autopartition_disk_mount') + \ - self.Get('cl_autopartition_bind_mountpoint') - else: - source = self.Get('os_location_source') - installFrom = self.Get('cl_install_dev_from') - singleDevice = self.Get('os_install_disk_single') - - def installMountPoint(info): - dev, mount = info - if self.Get('cl_action') == 'system': - if self.Get('cl_install_type') == 'flash': - if dev == singleDevice: - return "/" - else: - return "" - else: - if dev == installFrom: - return "/" - elif mount == "/": - return "" - return mount - - return map(installMountPoint, - filter(lambda x: x[0] in source, - zip(self.Get('os_disk_dev'), - self.Get('os_disk_mount')) + \ - zip(self.Get('os_bind_path'), - self.Get('os_bind_mountpoint')))) - - def set(self, value): - """Add abilitiy not specify root""" - - def normpath(val): - if type(val) == str and val: - return path.normpath(val) - return val - - value = map(normpath, value) - return map(lambda x: x or "/", value) - - def choice(self): - if self.Get('cl_install_type') == 'flash': - return ["/", ""] - else: - return ['/', '/boot', '/var/calculate', '/home', - '/usr', '/var', '/tmp', 'swap', ''] - - def check(self, value): - """Check set location source""" - if self.Get('cl_autopartition_set') == "on": - return - ################################ - # check size for root device - ################################ - minroot = int(self.Get('cl_root_size_min')) - osInstallRootType = self.Get('os_install_root_type') - if osInstallRootType != "flash" and \ - not "/usr" in value: - for mp, size in filter(lambda x: x[0] == '/' and x[1].isdigit() and \ - int(x[1]) < minroot, - izip(value, - self.Get("os_location_size"))): - raise VariableError( - _("The root partition should be at least %s") % "7 Gb") - source = self.Get("os_location_source") - ################################ - # check of root device specifing - ################################ - if not source: - return - if not filter(lambda x: x == "/", value): - raise VariableError(_("To install the system, you need to " - "specify the root device")) - ################################ - disks = filter(lambda x: x[0].startswith('/dev/') and x[1], - zip(source, value)) - disksDevs = map(lambda x: x[0], disks) - binds = filter(lambda x: not x[0].startswith('/dev/') and x[1], - zip(source, value)) - ########################## - # detect efi specifing - ########################## - reEfi = re.compile("/u?efi", re.I) - if any(reEfi.search(x) for x in value): - if self.Get('cl_client_type') == 'gui': - raise VariableError( - _("Please specify EFI partition by UEFI parameter in " - "advanced options")) - else: - raise VariableError( - _("Please specify EFI partition by UEFI option")) - ########################## - # detect duplicate mps - ########################## - dupMP = list(set(filter(lambda x: value.count(x) > 1, - filter(lambda x: x and x != "swap", - value)))) - if dupMP: - raise VariableError( - _("Mount point '%s' is used more than once") % dupMP[0]) - ######################### - # detect wrong bind - ######################### - wrongBind = filter(lambda x: not x[0].startswith("/") or - not x[1].startswith("/"), - binds) - if wrongBind: - raise VariableError( - _("Incorrect mount point (bind '%(bindSrc)s' to " - "'%(bindDst)s')") \ - % {'bindSrc': wrongBind[0][0], - 'bindDst': wrongBind[0][1]}) - ######################################### - # Check '/' in start path of dest pointst - ######################################### - wrongMP = filter(lambda x: x and not x.startswith("/") and x != "swap", - value) - if wrongMP: - raise VariableError(_("Wrong mount point '%s'") % wrongMP[0]) - ######################################### - # Check using current root - ######################################### - rootDev = self.Get('os_root_dev') - if rootDev in self.Get('os_install_disk_dev'): - raise VariableError( - _("You may not use the current root partition %s for " - "installation") % rootDev) - ################################# - # detect using extended partition - ################################# - extendedPartitions = self.Select('os_install_disk_dev', - where='os_install_disk_part', - eq='extended', limit=1) - if extendedPartitions: - raise VariableError( - _("Unable to use extended partition %s for installation") % - extendedPartitions) - ########################## - # detect using CDROM disks - ########################## - cdromPartitions = self.Select('os_install_disk_dev', - where='os_install_disk_type', - like='cdrom', limit=1) - if cdromPartitions: - raise VariableError(_("Unable to use CDROM %s for installation") % - cdromPartitions) - ############################### - # check cross bind mount points - ############################### - DEVICE, MP = 0, 1 - srcMountPoints = map(lambda x: x[DEVICE], binds) - destMountPoints = map(lambda x: x[MP], binds) - wrongBind = filter(lambda x: x in destMountPoints, srcMountPoints) - if wrongBind: - incompBind = filter(lambda x: x[1] == wrongBind[0], - zip(srcMountPoints, destMountPoints)) - raise VariableError( - _("Source directory %(src)s is already used " - "for binding '%(bindSrc)s' to '%(bindDst)s'") \ - % {'src': wrongBind[0], - 'bindSrc': incompBind[0][0], - 'bindDst': incompBind[0][1]}) - ####################################### - # check multipart for flash and builder - ####################################### - osInstallRootType = self.Get('os_install_root_type') - if osInstallRootType == "flash": - if filter(lambda x: x and x != '/', value): - raise VariableError( - _("Flash install does not support multipartition mode")) - if filter(lambda x: x == "swap", value): - raise VariableError( - _("Flash install does not support swap disks")) - ######################################## - # check install on member of RAID or LVM - ######################################## - installTypes = zip(self.Get('os_install_disk_dev'), - self.Get('os_install_disk_type')) - for checkType in ("raid", "lvm"): - memberData = filter(lambda x: checkType + "member" in x[1], - installTypes) - if memberData: - raise VariableError( - _("Unable to use {part} partition used by active " - "{typepart} for installation").format( - typepart=checkType.upper(), - part=memberData[0][0])) - - -class VariableOsLocationFormat(LocationHelper, Variable): - type = "choice-list" - - def init(self): - self.label = _("Filesystem") - - def get(self): - if self.Get('cl_autopartition_set') == "on": - return self.Get('cl_autopartition_disk_format') + \ - map(lambda x: "", self.Get('cl_autopartition_bind_path')) - else: - mount = self.Get("os_location_dest") - source = self.Get("os_location_source") - value = [""] * len(source) - return map(self.defaultFormat(), - zip(source, mount, value)) - - def choice(self): - if self.Get('cl_install_type') == "flash": - return ["", "vfat"] - else: - return [""] + self.Get('os_format_type') - - def defaultFormat(self): - """Describe default value for filesystem""" - diskFormat = dict(zip(self.Get('os_disk_dev'), - (self.Get('os_disk_format')))) - osInstallRootType = self.Get('os_install_root_type') - availFS = set(self.Select('os_format_type', - where='os_format_use', - eq='yes')) - allAvailFS = self.Get('os_format_type') - - default_format = None - if self.Get('os_root_type_ext') in RootType.HDD: - root_format = self.select( - 'os_disk_format', os_disk_mount="/", limit=1) - autoformat = self.Get('cl_autopartition_default_format') - for _format in (root_format, autoformat): - if _format and self.select( - 'os_format_use', os_format_type=_format, limit=1) == "yes": - default_format = _format - break - else: - root_format = None - - def wrap(info): - dev, mount, fs = info - if mount and not fs and dev.startswith('/dev/'): - if mount == "swap": - return "swap" - elif mount.startswith('/boot/efi'): - return "vfat" - if mount == "/": - if root_format and root_format in allAvailFS: - return root_format - else: - if dev in diskFormat and diskFormat[dev] in allAvailFS: - if mount.count('/') == 1 or mount == "/var/calculate": - if FileSystemManager.checkFSForTypeMount( - diskFormat[dev], - osInstallRootType, mount): - return diskFormat[dev] - else: - return diskFormat[dev] - if default_format: - return default_format - return FileSystemManager.get_default_fs(self, osInstallRootType) - return fs - - return wrap - - def set(self, value): - value = map(lambda x: "vfat" if x == "uefi" else x, value) - mount = self.Get("os_location_dest") - source = self.Get("os_location_source") - return map(self.defaultFormat(), - zip(source, mount, value)) - - def check(self, value): - osInstallRootType = self.Get('os_install_root_type') - devMpFs = zip(self.Get('os_location_source'), - self.Get('os_location_dest'), value) - for dev, mp, fs in devMpFs: - if dev.startswith('/') and not dev.startswith('/dev/') and fs: - raise VariableError( - _("The bind mount point does not use filesystem")) - # check compatible fs for mount point only root dirs - if dev.startswith('/dev/') and mp and (mp.count('/') == 1 or - mp in ( - '/var/calculate', - '/boot/efi')): - if not FileSystemManager.checkFSForTypeMount(fs, - osInstallRootType, - mp): - raise VariableError( - _("The filesystem for '%(mp)s' should not be '%(opt)s'") - % {'mp': mp, 'opt': fs} + " " + - _("for {typedisk} install").format( - typedisk=osInstallRootType)) - if mp == "swap" and fs != "swap": - raise VariableError( - _( - "The swap partition {dev} must be formatted as swap").format( - dev=dev)) - - -class VariableOsLocationPerformFormat(LocationHelper, Variable): - type = "boolauto-list" - - def init(self): - self.label = _("Format") - - def get(self): - if self.Get('cl_autopartition_set') == "on": - return map(lambda x: "on", - self.Get('cl_autopartition_disk_format')) + \ - map(lambda x: "", - self.Get('cl_autopartition_bind_path')) - else: - mount = self.Get("os_location_dest") - source = self.Get("os_location_source") - fs = self.Get("os_location_format") - value = [""] * len(source) - return map(self.defaultPerformFormat(), - zip(source, mount, fs, value)) - - fixNtfs = lambda self, x: {'ntfs-3g': 'ntfs'}.get(x, x) - - def is_force_param(self): - return "--force" in self.Get("cl_console_args") - - def check(self, value): - """Check perform format - - Check what format will perform for need partition. - At example on change filesystem on partition. - """ - DEV, MP, FS, FORMAT = 0, 1, 2, 3 - info = zip(self.Get('os_location_source'), - self.Get('os_location_dest'), - self.Get('os_location_format'), - value) - diskFormat = dict(zip(self.Get('os_disk_dev'), - (self.Get('os_disk_format')))) - diskMount = dict(zip(self.Get('os_disk_dev'), - (self.Get('os_disk_mount')))) - unavailFS = set(self.Select('os_format_type', - where='os_format_use', - eq="no")) - fixNtfs = self.fixNtfs - for dev, mp, fs, isformat in info: - # should format if change fs or partition is root, but non flash - partitionMustFormat = \ - fixNtfs(diskFormat.get(dev, fs)) != fixNtfs(fs) or \ - (mp == '/' and - self.Get('os_install_root_type') != 'flash') - # if entry has mount point AND - # partition must was formated - if mp and partitionMustFormat: - # partition use in current system - if diskMount.get(dev, ''): - raise VariableError( - _("{device} must but cannot be formatted, as it is " - "mounted to {mountpoint} on the current system").format( - device=dev, mountpoint=diskMount.get(dev, ''))) - if isMount(dev): - if not self.is_force_param() or not try_umount(dev): - raise VariableError( - _("Please unmount {device}, as it will be used for " - "installation").format(device=dev)) - # but user select non-format - if not self.isTrue(isformat): - raise VariableError( - _("{device} must be formatted").format( - device=dev)) - if self.isTrue(isformat): - if not mp: - raise VariableError( - _("No need to format unused device {dev}").format( - dev=dev)) - if fs in unavailFS: - raise VariableError( - _("Filesystem '%s' is not available") % fs) - if not dev.startswith('/dev/'): - raise VariableError( - _("Bind mount points should not be formatted")) - elif diskMount.get(dev, "") and isformat: - raise VariableError( - _( - "{device} must but cannot be formatted, as it is mounted to {mountpoint} on the current system" - ).format( - device=dev, mountpoint=diskMount.get(dev, ''))) - elif isMount(dev): - if not self.is_force_param() or not try_umount(dev): - raise VariableError( - _("Please unmount disk {device} to " - "use it for install").format(device=dev)) - - def defaultPerformFormat(self): - diskFormat = dict(zip(self.Get('os_disk_dev'), - (self.Get('os_disk_format')))) - - def wrap(info): - source, dest, fs, isformat = info - fixNtfs = self.fixNtfs - if not isformat and source.startswith('/dev/'): - if dest == '/': - return "on" - if dest and fixNtfs(diskFormat.get(source, fs)) != fixNtfs(fs): - return "on" - return isformat or ("off" if source.startswith('/dev/') else "") - - return wrap - - def set(self, value): - """Default values for perform format""" - value = Variable.set(self, value) - DEV, MP, FS, FORMAT = 0, 1, 2, 3 - info = zip(self.Get('os_location_source'), - self.Get('os_location_dest'), - self.Get('os_location_format'), - value) - return map(self.defaultPerformFormat(), - map(lambda x: [x[DEV], x[MP], x[FS], ""] \ - if x[FORMAT] == "off" and not x[DEV].startswith("/dev/") - else x, - info)) - - -class VariableOsLocationSize(LocationHelper, SourceReadonlyVariable): - """ - Location size - """ - type = "list" - indexField = "os_location_source" - - def init(self): - self.label = _("Size") - - def getMap(self): - mapDevSize = dict(self.ZipVars('os_disk_dev', 'os_disk_size')) - mapDevSize.update( - zip(self.Get('cl_autopartition_disk_dev'), - self.Get('cl_autopartition_disk_size'))) - return mapDevSize - - def get(self): - return self.get_sizes(self.getMap().get) - - def get_sizes(self, method): - devices = (device.udev.get_devname(name=x) - for x in self.Get(self.indexField)) - mapped = (method(x) for x in devices) - return [x or "" for x in mapped] - - def getMapHumanReadable(self): - mapDevSize = dict(zip(self.Get('os_disk_dev'), - self.Get('os_disk_size', humanreadable=True))) - mapDevSize.update( - zip(self.Get('cl_autopartition_disk_dev'), - self.Get('cl_autopartition_disk_size', humanreadable=True))) - return mapDevSize - - def humanReadable(self): - return self.get_sizes(self.getMapHumanReadable().get) - - -class VariableClUuidSet(Variable): - """ - Use or not UUID for /etc/fstab - """ - type = "bool" - opt = ["--uuid"] - value = "on" - - def init(self): - self.label = _("Use UUID") - self.help = _("use UUID") - - def uncompatible(self): - """ - Unavailable for flash installation - """ - if self.Get('os_install_root_type') == 'flash': - return _("Impossible to use UUID for Flash install") - - -############################################################# -# Install disk parameters -############################################################# - -class VariableOsInstallDiskData(ReadonlyTableVariable): - """ - Table of install disk params - """ - source = ["os_install_disk_dev", - "os_install_disk_mount", - "os_install_disk_format", - "os_install_disk_perform_format", - "os_install_disk_options", - "os_install_disk_id", - "os_install_disk_uuid", - "os_install_disk_use", - "os_install_disk_name", - "os_install_disk_size", - "os_install_disk_type", - "os_install_disk_part", - "os_install_disk_parent"] - - -class VariableOsInstallDiskParent(SourceReadonlyVariable): - """ - Partition parent devices using for install - """ - type = "list" - indexField = "os_install_disk_dev" - - def getMap(self): - diskParent = dict(self.ZipVars('os_disk_dev', 'os_disk_parent')) - # replace value for autopartition - if self.Get('cl_autopartition_set') == 'on': - disk_parent = self.Get('cl_autopartition_parent') - for disk_dev in self.Get('cl_autopartition_disk_dev'): - diskParent[disk_dev] = disk_parent - return diskParent - - humanReadable = Variable.humanReadable - - -class VariableOsInstallDiskDevBase(DeviceHelper, ReadonlyVariable): - """ - Variable using for resolv cyclic deps - """ - type = "list" - - def get(self): - if self.Get('cl_install_type') == 'flash': - disk = self.Get('os_install_disk_single') - if disk: - return [device.udev.get_devname(name=disk)] - return [] - - return [dev - for dev, mount in self.ZipVars('os_location_source', - 'os_location_dest') - if (dev.startswith("/dev") and mount and - not mount.startswith("/boot/efi"))] - - -class VariableOsInstallDiskParentBase(VariableOsInstallDiskParent): - """ - Partition parent devices using for install - """ - type = "list" - indexField = "os_install_disk_dev_base" - humanReadable = Variable.humanReadable - - -class VariableOsInstallDiskDev(ReadonlyVariable, DeviceHelper): - """ - Disks for installation - """ - type = "list" - - def get(self): - return (self.Get('os_install_uefi') + - self.Get('os_install_disk_dev_base')) - - def humanReadable(self): - return map(lambda x: self.getPerfectName(x, defaultValue=x), - self.Get()) - - -class VariableOsInstallDiskUuid(ReadonlyVariable): - """ - Uudi for install - """ - type = "list" - - def get(self): - diskDev = self.Get('os_install_disk_dev') - hashUUID = getUUIDDict(revers=True) - return map(lambda x: hashUUID.get(x, "")[5:], diskDev) - -class VariableOsInstallDiskPartuuid(ReadonlyVariable): - """ - Uudi for install - """ - type = "list" - - def mapUdevProperty(self, var, prop, default): - """Get each element from var through udev [prop]""" - return [device.udev.get_device_info(name=x).get(prop, default) - for x in self.Get(var)] - - def get(self): - diskDev = self.Get('os_install_disk_dev') - return self.mapUdevProperty('os_install_disk_dev', 'ID_PART_ENTRY_UUID', '') - -class VariableOsInstallDiskMountBase(ReadonlyVariable): - """ - List mounted points for installed system - - Variable use for resolv cyclic deps by UEFI vars - """ - type = "list" - - def get(self): - if self.Get('cl_install_type') == 'flash': - disk = self.Get('os_install_disk_single') - if disk: - return ["/"] - return [] - return [mount - for dev, mount in self.ZipVars('os_location_source', - 'os_location_dest') - if (dev.startswith("/dev") and mount and - not mount.startswith("/boot/efi"))] - - -class VariableOsInstallDiskMount(ReadonlyVariable): - """ - List mounted points for installed system - """ - type = "list" - - def generate_uefi_mountpoints(self): - yield "/boot/efi" - for i in range(2, 20): - yield "/boot/efi%d" % i - - def get(self): - """Get install disk dest""" - mps = self.generate_uefi_mountpoints() - return ([next(mps) for x in self.Get('os_install_uefi')] + - self.Get('os_install_disk_mount_base')) - - -class VariableOsInstallDiskUse(ReadonlyVariable): - """ - /dev/sd or UUID= list (by cl_uuid_set) - """ - type = "list" - - def get(self): - """Get real id (by cl_uuid_set) device""" - if self.Get('cl_uuid_set') == "on": - return map(lambda x: "UUID=%s" % x[0] if x[0] else x[1], - zip(self.Get('os_install_disk_uuid'), - self.Get('os_install_disk_dev'))) - else: - return self.Get('os_install_disk_dev') - - -class VariableOsInstallDiskOptions(ReadonlyVariable): - """ - List mount options of installed os - """ - type = "list" - - def get(self): - ssd_devices = { - dev for dev, ssd in self.ZipVars('install.os_device_dev', - 'install.os_device_ssd_set') - if ssd == 'on' - } - old_options = { - dev: options for dev, options in self.ZipVars('os_disk_dev', - 'os_disk_options') - if options - } - - def generator(): - for disk_dev, disk_format, disk_parent in self.ZipVars( - 'os_install_disk_dev', - 'os_install_disk_format', - 'os_install_disk_parent'): - if disk_dev in old_options: - yield old_options[disk_dev] - else: - all_ssd = all(x in ssd_devices - for x in disk_parent.split(',')) - compression = self.Get('os_install_btrfs_compression') - yield FileSystemManager.getDefaultOpt(disk_format, all_ssd, - compression) - - return list(generator()) - - -class VariableOsInstallDiskFormat(ReadonlyVariable): - """ - Install list filesystem for partition devices - """ - type = "choice-list" - - def get(self): - _format = [fs for dev, mp, fs in self.ZipVars('os_location_source', - 'os_location_dest', - 'os_location_format') - if dev.startswith('/dev/') and mp] - efiformat = ['vfat' for x in self.Get('os_install_uefi')] - return efiformat + _format - - -class VariableOsInstallDiskPerformFormat(ReadonlyVariable): - """ - List need for format - """ - type = "bool-list" - - def get(self): - _format = map(lambda x: x[2], - filter(lambda x: x[0].startswith('/dev/') and x[1], - zip(self.Get('os_location_source'), - self.Get('os_location_dest'), - self.Get('os_location_perform_format')))) - if self.GetBool('cl_autopartition_set'): - efiformat = ['on' for x in self.Get('os_install_uefi')] - res = efiformat + _format - else: - vfatdevs = self.select('os_disk_dev', os_disk_format="vfat") - res = ["off" if dv in vfatdevs else "on" - for dv in self.Get('os_install_uefi')] + _format - return res - - -class VariableOsInstallDiskId(ReadonlyVariable): - """ - Install partition's system id - """ - type = "list" - - def get(self): - def generator(): - for (disk_dev, disk_part, disk_mount, - disk_format) in self.ZipVars('os_install_disk_dev', - 'os_install_disk_part', - 'os_install_disk_mount', - 'os_install_disk_format'): - if disk_part in ("gpt", "primary", "extended", "logical"): - if disk_part == "gpt": - if disk_mount.startswith("/boot/efi"): - disk_format = "uefi" - else: - disk_part = "msdos" - fsinfo = FileSystemManager.supportFS.get( - disk_format, FileSystemManager.default_param) - yield fsinfo.get(disk_part) - else: - yield "" - - return list(generator()) - - -class VariableOsInstallDiskName(Variable): - """ - New labels for disk - """ - type = "list" - - def get(self): - diskLabel = dict(self.ZipVars('os_disk_dev', 'os_disk_name')) - - def changeLabel(info): - dev, mount = info - if mount == '/': - return "%s-%s" % (self.Get('os_install_linux_shortname'), - self.Get('os_install_linux_ver')) - else: - return diskLabel.get(dev, '') - - return map(changeLabel, - self.ZipVars('os_install_disk_dev', - 'os_install_disk_mount')) - - -class VariableOsInstallDiskSize(SourceReadonlyVariable): - """ - New partition sizes (for feature change partition) - """ - type = "list" - indexField = 'os_install_disk_dev' - - def getMap(self): - if self.GetBool("cl_autopartition_set"): - return { - dev: size for dev, size in chain( - self.ZipVars('os_disk_dev', 'os_disk_size'), - self.ZipVars('os_location_source', 'os_location_size'), - self.ZipVars('cl_autopartition_disk_dev_full', - 'cl_autopartition_disk_size_full')) - } - else: - return { - dev: size for dev, size in chain( - self.ZipVars('os_disk_dev', 'os_disk_size'), - self.ZipVars('os_location_source', 'os_location_size')) - } - - def getMapHumanReadable(self): - if self.GetBool("cl_autopartition_set"): - return { - dev: size for dev, size in chain( - self.ZipVars('os_disk_dev', 'os_disk_size', - humanreadable=True), - self.ZipVars('os_location_source', 'os_location_size', - humanreadable=True), - self.ZipVars('cl_autopartition_disk_dev_full', - 'cl_autopartition_disk_size_full', - humanreadable=True)) - } - else: - return { - device.udev.get_devname(name=dev): size - for dev, size in chain( - zip(self.Get('os_disk_dev'), - self.Get('os_disk_size', humanreadable=True)), - zip(self.Get('os_location_source'), - self.Get('os_location_size', humanreadable=True))) - } - - -class VariableOsInstallDiskType(SourceReadonlyVariable): - """ - New partition scheme (for feature change partition) - """ - type = "list" - indexField = "os_install_disk_dev" - - def getMap(self): - diskType = dict(self.ZipVars('os_disk_dev', 'os_disk_type')) - diskType.update(self.ZipVars('cl_autopartition_disk_dev', - 'cl_autopartition_disk_type')) - return diskType - - humanReadable = Variable.humanReadable - - -class VariableOsInstallDiskPart(SourceReadonlyVariable): - """ - Get new type partitions using for install - """ - type = "list" - indexField = "os_install_disk_dev" - - def getMap(self): - diskPart = dict(self.ZipVars('os_disk_dev', 'os_disk_part')) - diskPart.update(self.ZipVars('cl_autopartition_disk_dev', - 'cl_autopartition_disk_part')) - return diskPart - - humanReadable = Variable.humanReadable - - -class VariableOsInstallBindData(ReadonlyTableVariable): - """ - Table of install bind mount points - """ - source = ['os_install_bind_path', - 'os_install_bind_mountpoint'] - - -class VariableOsInstallBindPath(ReadonlyVariable): - """ - Install directories for bind - """ - type = "list" - - def get(self): - """Get install bind source""" - return self.Select('os_location_source', - where='os_location_dest', - func=lambda x: not x[1].startswith('/dev/') and x[0]) - - -class VariableOsInstallBindMountpoint(ReadonlyVariable): - """ - Mountpoint for install directories bind - """ - - def get(self): - return self.Select('os_location_dest', - where='os_location_source', - func=lambda x: not x[0].startswith('/dev/') and x[1]) - - -class VariableOsInstallBootloader(ReadonlyVariable): - """ - Bootloader for brief information - """ - - def init(self): - self.label = _("Bootloader") - - def get(self): - if self.Get('os_install_uefi_set') == 'on': - return "uefi" - else: - return ",".join(self.Get('os_install_mbr')) - - def humanReadable(self): - if self.Get('os_install_uefi_set') == 'on': - return _("UEFI") - else: - mbrs = self.Get('os_install_mbr', humanreadable=True) - if not mbrs: - return _("no") - return ",".join(mbrs) - - -class VariableOsInstallBootDevices(ReadonlyVariable): - """ - Физическое устройство с которого будет производиться загрузка системы на - котором находится /boot или /, т.е. если / находится на RAID, расположенном - на двух дисках - будет эти диски - """ - type = "list" - - def get(self): - bootDev = (self.Select('os_install_disk_parent', - where='os_install_disk_mount', - _in=('/', '/boot'), sort="DESC", limit=1) or - self.select('os_disk_parent', - os_disk_dev=self.Get('os_install_root_dev'), - limit=1)) - if bootDev: - devices = bootDev.split(',') - return [mbr - for dev, mbr in self.ZipVars('os_device_dev', - 'os_device_mbr') - if mbr and dev in devices] - return [] - -class VariableOsUefi(ReadonlyVariable): - """ - UEFI partitions from fstab - """ - def get(self): - return self.select('os_disk_dev', os_disk_mount__startswith="/boot/efi") - -class VariableOsInstallUefi(LocationHelper, Variable): - """ - UEFI partitions for install - """ - type = "choiceedit-list" - element = "selecttable" - opt = ["--uefi"] - metavalue = "EFI" - - re_not0_raid = re.compile("-raid[1-9]") - - def init(self): - self.label = _("UEFI boot") - self.help = _("set UEFI boot disks") - - @property - def install_to_not_x86_64(self): - return self.Get('os_install_arch_machine') != 'x86_64' - - @property - def install_without_uefiboot(self): - return self.Get('os_uefi_set') == 'off' - - @property - def install_to_flash(self): - return self.Get('os_install_root_type') == 'flash' - - def is_force_param(self): - return "--force" in self.Get("cl_console_args") - - def get(self): - # если используется авторазметка список разделов находится в ней - if self.GetBool('cl_autopartition_set'): - return self.Get('cl_autopartition_efi') - # исключаем определение UEFI если оно не может быть использовано - if (self.install_to_flash or self.install_to_not_x86_64 or - self.install_without_uefiboot): - return [] - # если происходит обновление загрузчика текущей системы - # для определения используем /etc/fstab - fstabefidevs = self.Get('os_uefi') - if self.Get('cl_action') != 'system': - return fstabefidevs - rootdev = self.Get('os_install_root_dev') - rootscheme = self.select('os_disk_type', - os_disk_dev=rootdev, limit=1) - # определяем список физических дисков на которых находится rootdev - parents = set(self.select('os_disk_parent', - os_disk_dev=rootdev, limit=1).split(',')) - efidev = [x for x in self.select('os_device_efi', - os_device_dev__in=parents) if x] - allefi = [x for x in self.select('os_device_efi', - os_device_type="hdd") if x] - # если корневое устройство расположено на ненулевом RAID - возвращаем - # полный список иначе только первое устройство - # если диски для установки не содержат EFI - берём efi из /etc/fstab - # если и там нет, то берём первый попавшийся EFI на любом из HDD - if self.re_not0_raid.search(rootscheme): - return efidev or fstabefidevs or allefi[:1] - # возвращаем первое найденное устройство - else: - return efidev[:1] or fstabefidevs or allefi[:1] - - def set(self, value): - def transform(efidev): - if efidev not in self.Get('os_device_efi'): - return self.select('os_device_efi', - os_device_dev=efidev, limit=1) or efidev - return efidev - return filter(lambda x: x != "off", map(transform, value)) - - def choice(self): - deviceParentMap = self.ZipVars('os_device_dev', - 'os_device_efi', 'os_device_name') - return [(efidisk, "%s (%s)" % (dev, name or _("Unknown"))) - for dev, efidisk, name in deviceParentMap - if efidisk] - - def check(self, value): - if value: - efi_boot_mgr = getProgPath('/usr/sbin/efibootmgr') - if not efi_boot_mgr: - raise VariableError( - _("UEFI installation is unavailable, because '%s' command " - "not found") % efi_boot_mgr) - if self.install_without_uefiboot: - raise VariableError( - _("Your system must be loaded in UEFI for using this " - "bootloader")) - if self.install_to_not_x86_64: - raise VariableError( - _("Architecture of the target system must be x86_64")) - efidevs = self.Get('os_device_efi') - badefi = [x for x in value if x not in efidevs] - if badefi: - raise VariableError( - _("Wrong EFI device %s") % badefi[0]) - - fstab_disks = [dev - for dev, mp in self.ZipVars('os_disk_dev', 'os_disk_mount') - if mp and not mp.startswith("/boot/efi") - ] - for disk in value: - if disk in fstab_disks: - raise VariableError( - _("Partition {disk} already used by " - "the current system").format(disk=disk)) - not_fat_efi = self.select('os_disk_dev', - os_disk_format__ne="vfat") - for efipart in value: - if efipart in not_fat_efi and isMount(efipart): - - if not self.is_force_param() or not try_umount(efipart): - raise VariableError( - _("Please unmount {device}, as it will be used for " - "installation").format(device=efipart)) - - for efipart in value: - if efipart in self.select('os_location_source', - os_location_dest__ne=""): - raise VariableError( - _("Partition {disk} already used for " - "installation").format(disk=efipart)) - - def uncompatible(self): - """ - Uncompatible with autopartition - """ - if self.Get('cl_autopartition_set') == "on": - return \ - _("The layout is not available with autopartitioning") - if self.Get('os_install_root_type') == 'flash': - return \ - _("This option not used for Flash install") - return "" - - -class VariableOsInstallMbr(LocationHelper, Variable): - """ - Disks for boot mbr - """ - type = "choiceedit-list" - element = "selecttable" - opt = ["--mbr"] - metavalue = "MBR" - untrusted = True - check_after = ["os_install_uefi"] - - def init(self): - self.label = _("Boot disk") - self.help = _("boot disk for the system bound for install") - - def get(self): - """Get default Master boot record install""" - if self.Get('os_install_uefi_set') == 'on': - return [] - if self.Get('cl_autopartition_set') == 'on': - return self.Get('cl_autopartition_mbr') - if self.Get('os_install_root_type') in ("flash", "usb-hdd"): - rootdev = self.Get('os_install_root_dev') - device = filter(lambda x: x in rootdev, - self.Get('os_device_dev')) - if device: - return [device[0]] - else: - return [] - bootdevices = self.Get('os_install_boot_devices') - # при установке с HDD также устанавливаем загрузчик на первый диск - # если есть возможность - if self.Get('os_root_type') == "hdd": - first_hdd = self.Select( - 'os_device_dev', where='os_device_type', eq='hdd', limit=1) - if self.select('os_device_mbr', os_device_dev=first_hdd, limit=1): - bootdevices.append(first_hdd) - return sorted(set(bootdevices)) - - def choice(self): - deviceParentMap = self.ZipVars('os_device_mbr', 'os_device_name') - return [(mbrdisk, "%s (%s)" % (mbrdisk, name or _("Unknown"))) - for mbrdisk, name in deviceParentMap - if mbrdisk] - - def set(self, value): - # support off value - return filter(lambda x: x != "off", value) - - def check(self, value): - if self.GetBool('cl_autopartition_set'): - return - rootType = self.Get('os_install_root_type') - if rootType == "flash": - if len(value) > 1: - raise VariableError( - _("For Flash install, you need only one disk")) - if value and self.Get('os_install_uefi_set') == "on": - raise VariableError(_("MBR is not used with the UEFI bootloader")) - useBtrfs = "btrfs" in self.Select('os_install_disk_format', - where='os_install_disk_mount', - _in=('/', '/boot'), - sort="DESC")[:1] - for mbrDisk in value: - if self.Get('cl_autopartition_set') == 'on': - tableOnBootDisk = self.Get('cl_autopartition_table') - else: - tableOnBootDisk = self.Select('os_device_table', - where="os_device_dev", eq=mbrDisk, - limit=1) - if not tableOnBootDisk: - raise VariableError( - _("Disk '%s' needs a partition table for the boot record") % - mbrDisk) - if rootType == "flash": - if tableOnBootDisk == "gpt": - raise VariableError(_("You need a disk with a dos " - "table for Flash install")) - if rootType in ("usb-hdd", "hdd") and tableOnBootDisk == "gpt": - bbsizes = ( - size - for size, disk_id, disk_parent in self.ZipVars( - 'os_disk_size', 'os_disk_id', 'os_disk_parent' - ) - if disk_id == 'EF02' and mbrDisk in disk_parent - ) - bios_grub_size = self.Get('cl_autopartition_bios_grub_size') - for bbsize in bbsizes: - minsize = "%dMb" % (int(bios_grub_size) / Sizes.M) - if not bbsize: - raise VariableError( - _("Your boot device must have a " - "BIOS Boot partition ({minsize})").format( - minsize=minsize)) - # проверка размера EF02 при установке на btrfs - elif useBtrfs: - if (bbsize.isdigit() and bios_grub_size.isdigit() and - round(float(bbsize) / Sizes.M) < round(float(bios_grub_size)/ Sizes.M)): - raise VariableError( - _("Your boot device must have a BIOS Boot " - "partition ({minsize})").format( - minsize=minsize)) - if mbrDisk not in self.Get('os_device_mbr'): - raise VariableError( - _("Device {device} has not BIOS Boot partition").format( - device=mbrDisk)) - if value: - if not self.Get('os_grub2_path'): - self.checkForLegacyGrub() - - def checkForLegacyGrub(self): - """Check current disk configuration for installation for install - legacy grub""" - bootDiskType, bootDiskFormat = \ - self.Select(['os_install_disk_type', - 'os_install_disk_format'], - where='os_install_disk_mount', - _in=('/', '/boot'), - sort="DESC", limit=1) - if "lvm" in bootDiskType or "raid" in bootDiskType: - raise ValueError( - _("Legacy grub requires a separate /boot partition " - "to support boot from a RAID or a LVM")) - if bootDiskFormat in ("btrfs", "nilfs2"): - raise ValueError( - _("To support booting from %s, legacy grub needs a " - "separate /boot partition") % bootDiskFormat) - - def uncompatible(self): - """ - Опция несовместима с использованием UEFI - """ - if self.Get('cl_autopartition_set') == "on": - return \ - _("The layout is not available with autopartitioning") - return "" - - -class VariableOsInstallRootType(LocationHelper, Variable): - """ - Type of installation - """ - opt = ["--type"] - metavalue = "DISKTYPE" - type = "choice" - - def init(self): - self.help = _("device type for the system bound for install") - self.label = _("Installation type") - - def get(self): - selectRootType = self.Get('cl_install_type') - if not selectRootType: - return self.Get('os_root_type') - if selectRootType == "flash": - return "flash" - else: - rootdev = self.Get('os_install_root_dev') - devs = list(device.udev.get_disk_devices(name=rootdev)) - if not devs: - return "hdd" - device_type = self.Select( - 'os_device_type', where='os_device_dev', eq=devs[0], limit=1) - if device_type in ("usb-hdd", "flash"): - return "usb-hdd" - return "hdd" - - def choice(self): - return [("hdd", _("Hard disk")), - ("flash", _("USB Flash")), - ("usb-hdd", _("USB Hard Disk"))] - - -class VariableOsInstallRootDev(ReadonlyVariable): - def get(self): - """Get install root device""" - if self.Get('cl_action') == 'system': - return self.Select('os_install_disk_dev_base', - where='os_install_disk_mount_base', - eq="/", limit=1) or '' - else: - return self.Get('os_root_dev') - - -class VariableOsInstallRootUuid(ReadonlyVariable): - def get(self): - """UUID корневого устройства""" - if self.Get('cl_action') == 'system': - root_dev = self.Get('os_install_root_dev') - return self.Select('os_install_disk_uuid', - where='os_install_disk_dev', - eq=root_dev, limit=1) or '' - else: - root_dev = self.Get('os_root_dev') - return self.Select('os_disk_uuid', - where='os_disk_dev', - eq=root_dev, limit=1) or '' - - -class VariableOsInstallFstabMountConf(DeviceHelper, ReadonlyVariable): - """ - FStab.conf contains for mount and bind points - """ - - def _commentFstab(self, s, mp, dev): - """Generate comment for /etc/fstab each line""" - if s.startswith("UUID"): - return "# %s was on %s during installation\n%s" % (mp, dev, s) - else: - return s - - def formatFstab(self, used, dev, mp, fs, opts, spec): - if fs in FileSystemManager.supportFS: - fs_orig = FileSystemManager.supportFS[fs].get('orig', fs) - else: - fs_orig = fs - ret = "{dev}\t{mp}\t{fs}\t{opts}\t{spec}".format( - dev=used, mp=mp, fs=fs_orig, opts=opts, spec=spec - ) - if used.startswith("UUID"): - return "# %s was on %s during installation\n%s" % (mp, dev, ret) - return ret - - def get(self): - devicesForFstab = self.Select([ - 'os_install_disk_use', - 'os_install_disk_mount', - 'os_install_disk_format', - 'os_install_disk_options', - 'os_install_disk_dev'], - where='os_install_disk_mount', - func=lambda x: x[0] != "" and x[0] != "swap") - - devicesForFstab = sorted( - devicesForFstab, key=lambda x: self.separateDevice(x[1])) - - rootLine = "\n".join( - self.formatFstab(used, dev, mp, fs, opts, "0 1") - for used, mp, fs, opts, dev in devicesForFstab[:1] - ) - - otherLines = "\n".join( - self.formatFstab(used, dev, mp, fs, opts, "0 0") - for used, mp, fs, opts, dev in devicesForFstab[1:] - ) - - bindData = self.ZipVars('os_install_bind_path', - 'os_install_bind_mountpoint') - - bindLines = "\n".join(map(lambda x: "%s\t%s\tnone\tbind\t0 0" \ - % (x[0], x[1]), bindData)) - return "\n".join(filter(lambda x: x, [rootLine, otherLines, bindLines])) - - -class VariableOsInstallFstabEfiConf(VariableOsInstallFstabMountConf): - """ - Переменная содержит часть fstab в которой содержится описание - подключения /boot/efi - """ - - def get(self): - devicesForFstab = self.Select([ - 'os_install_disk_use', - 'os_install_disk_mount', - 'os_install_disk_format', - 'os_install_disk_options', - 'os_install_disk_dev'], - where='os_install_disk_mount', - func=lambda x: x[0].startswith("/boot/efi")) - - devicesForFstab = sorted( - devicesForFstab, key=lambda x: self.separateDevice(x[1])) - - efiLines = "\n".join( - self.formatFstab(used, dev, mp, fs, opts, "0 0") - for used, mp, fs, opts, dev in devicesForFstab - ) - - return "\n".join(filter(lambda x: x, [efiLines])) - - -class VariableOsInstallFstabSwapConf(VariableOsInstallFstabMountConf): - """ - FStab.conf contains swap partition - """ - - def get(self): - return "\n".join(map(lambda x: "%s\tnone\tswap\tsw\t0 0" % \ - self._commentFstab(x[0], "swap", x[2]), - self.Select(['os_install_disk_use', - 'os_install_disk_mount', - 'os_install_disk_dev'], - where='os_install_disk_mount', - eq='swap'))) - - -class VariableClInstallType(Variable): - """ - Installation type (extension variable describe, that - install must be to flash or hdd - """ - type = "choice" - value = "" - - def choice(self): - return ["", "flash", "hdd"] - - def check(self, value): - for dn in ("/proc", "/sys", "/dev", "/dev/pts"): - if not isMount(dn): - raise VariableError(_("%s is not mounted") %dn ) - check_fn = '/run/.calculate-rw-check-%d' % os.getpid() - try: - with open(check_fn,'w') as f: - pass - os.unlink(check_fn) - except (IOError,OSError) as e: - raise VariableError(_("Failed to create data in /run")) - - -class VariableOsInstallDiskSingle(Variable): - """ - Installation disk - """ - type = "choiceedit" - opt = ["--disk", "-d"] - metavalue = 'DISK' - untrusted = True - value = "" - - def init(self): - self.label = _("Installation disk") - self.help = _("set the USB Flash device") - - def choice(self): - def generator(): - device_names = dict(self.ZipVars('os_device_dev', - 'os_device_name')) - for disk_dev, disk_type, disk_parent in self.ZipVars( - 'os_disk_dev', 'os_disk_type', 'os_disk_parent'): - if disk_type == "disk-partition": - device_name = device_names.get(disk_parent, _("Unknown")) - yield disk_dev, "%s (%s)" % (disk_dev, device_name) - return list(generator()) - - def check(self, value): - # проверить, чтобы был выбран именно раздел - if value not in self.Get('os_disk_dev'): - raise VariableError( - _("Wrong device '%s'" % value) - ) - disktype = self.select('os_disk_type', os_disk_dev=value, limit=1) - if disktype and disktype != "disk-partition": - raise VariableError( - _("Wrong device '%s'" % value)) - # проверить, чтобы раздел не использовался системой (не описан в fstab) - mp = self.select('os_disk_mount', os_disk_dev=value, limit=1) - if mp: - raise VariableError( - _("The partition {dev} is already in use as {mp}").format( - dev=value, mp=mp)) - # если система загружена с флешки (не iso) - нельзя переустановить - # эту систему - root_type = self.Get('os_root_type_ext') - if root_type in RootType.LiveFlash: - if value == self.Get('os_root_flash_dev'): - raise VariableError( - _("You cannot install the new system instead current")) - # detect using extended partition - disk_part = self.select('os_disk_part', os_disk_dev=value, limit=1) - if disk_part == 'extended': - raise VariableError( - _("Unable to use extended partition %s for installation") % - value) - if "cdrom" in disk_part: - raise VariableError(_("Unable to use CDROM %s for installation") % - value) - if not disk_part or disk_part == 'gpt': - raise VariableError(_("You need a disk with a dos " - "table for Flash install")) - - -class VariableOsInstallFormatSingleSet(Variable): - """ - Форматировать Flash - """ - type = "bool" - opt = ["--format"] - untrusted = True - value = "off" - - def init(self): - self.label = _("Format the USB Flash") - self.help = _("perform the formatting of the USB Flash drive") - - def must_be_formatted(self, dev): - fs = self.select('os_disk_format', os_disk_dev=dev, limit=1) - if fs != "vfat": - return True - return False - - def cannot_be_formatted(self, dev): - flash_dev = self.Get('os_root_flash_dev') - return flash_dev and dev == flash_dev - - def check(self, value): - devs = self.Get('os_disk_dev') - dev = self.Get('os_install_disk_single') - if dev not in devs: - return - if value == "on": - if self.cannot_be_formatted(dev): - raise VariableError( - _("You cannot format the USB Flash which " - "contains the current system")) - else: - if self.must_be_formatted(dev): - raise VariableError( - _("{device} must be formatted").format(device=dev)) - if dev: - try: - with FlashDistributive(dev) as f: - dn = f.getDirectory() - df = DiskSpace() - free_size = df.get_free(dev) - squash_fn = path.join(dn, "livecd.squashfs") - if not path.exists(squash_fn): - source = self.Get('cl_image') - if isinstance(source, IsoDistributive): - image_size = source.get_squash_size() - if image_size > free_size: - raise VariableError( - _("Not enough free space on the " - "USB Flash")) - except DistributiveError: - pass - -class VariableOsInstallBtrfsCompression(Variable): - """ - Алгоритм сжатия для btrfs, в которых выбрано использовать сжатие - """ - type = "choiceedit" - value = "zstd" - - def choice(self): - return ["zlib","lzo","zstd"] - - def check(self, value): - if not re.search(r"^(zlib|lzo|zstd|(1[0-9]|1-9))$", value): - raise VariableError(_("Wrong btrfs compression")) diff --git a/libs_crutch/install/variables/distr.py b/libs_crutch/install/variables/distr.py deleted file mode 100644 index 1e7e5dc..0000000 --- a/libs_crutch/install/variables/distr.py +++ /dev/null @@ -1,660 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import sys -from os import path -import re -import operator -from operator import itemgetter -from calculate.lib.datavars import (Variable, VariableError, ReadonlyVariable, - CommonVariableError) -from calculate.lib.utils.common import (getSupportArch, getTupleVersion, - cmpVersion) -from calculate.lib.utils.files import listDirectory, pathJoin -from calculate.lib.variables.linux import Linux -from calculate.install.distr import (Distributive, PartitionDistributive, - DirectoryDistributive, DefaultMountPath, - DistributiveError, FlashDistributive, - ArchiveDistributive, - MultiPartitions, PxeDistributive) - -from calculate.lib.cl_lang import setLocalTranslate, _ -from calculate.install.fs_manager import FileSystemManager -from functools import reduce - -setLocalTranslate('cl_install3', sys.modules[__name__]) - - -class DistroRepository(Linux): - contentCache = {} - - marches = ['i686', 'x86_64'] - - extensiton = ['iso', 'tar.bz2', 'tar.gz', 'tar.7z', 'tar.lzma'] - - reDistName = re.compile(""" - ^.*/(?P%(name)s) - -(?P%(ver)s) - (?:-(?P%(ser)s))? - -(?P%(march)s) - .(?P%(ext)s)$""" % - {'name': "[a-z0-9]+", - 'ver': r"(\d+\.)*\d+", - 'ser': r"\d+", - 'march': "|".join(marches), - 'ext': "|".join(extensiton) - }, re.X) - - def _getDistrInfo(self, filename): - """Get information by distributive""" - # if filename is directory - if not path.isfile(filename): - return Distributive().getInfo(filename) - else: - match = self.reDistName.match(filename) - if not match: - return {} - distdic = match.groupdict() - distdic["os_linux_build"] = "" - if "os_linux_ver" in distdic: - if re.match("^\d{8}$", distdic["os_linux_ver"]): - distdic["os_linux_build"] = distdic["os_linux_ver"] - distdic["os_linux_ver"] = "" - return distdic - - def getImage(self, scratch, rootType, imagePath, march=None, - shortName=None, linuxVer=None, linuxBuild=None): - """Get image by parameters""" - # exclude directory distributive for flash and scratch install - if scratch == "on" or rootType == "flash": - discardType = ["dir"] - else: - discardType = [] - return self.getBestDistributive(imagePath, - march=march, - shortname=shortName, - discardType=discardType, - version=linuxVer, - build=linuxBuild) - - def _getAvailableShortnames(self, dirs): - """Get available distributives shortnames""" - distros = filter(lambda x: x, - map(self.reDistName.search, - self._getAvailableDistributives(dirs))) - return sorted(list(set(map(lambda x: x.groupdict()['name'], distros)))) - - def opcompareByString(self, buf): - if buf: - reOp = re.compile("^(!=|=|==|<=|>=|>|<)?(\d+.*)$") - res = reOp.search(buf) - if res: - return ({'!=': operator.ne, - '=': operator.eq, - '==': operator.eq, - '>=': operator.ge, - '<=': operator.le, - '<': operator.lt, - '>': operator.gt}.get(res.group(1), operator.eq), - res.group(2)) - else: - return operator.eq, buf - return None, None - - def _getAvailableDistributives(self, dirs, system=None, shortname=None, - march=None, version=None, build=None): - """Get all distributives by filter""" - - def systemByName(name): - return self.dictNameSystem.get(name.upper(), "") - - verCmp, version = self.opcompareByString(version) - if version: - version = getTupleVersion(version) - buildCmp, build = self.opcompareByString(build) - if build and build.isdigit(): - build = int(build) - - def distfilter(dist): - d = self._getDistrInfo(dist) - if not d: - return False - # check filter conditions - if system and systemByName(d['os_linux_shortname']) != system: - return False - if "os_linux_shortname" not in d or "os_linux_ver" not in d: - return False - if (shortname and - d['os_linux_shortname'].lower() != shortname.lower()): - return False - if march and d['os_arch_machine'] != march: - return False - if version and \ - not verCmp(getTupleVersion(d['os_linux_ver']), version): - return False - if build and "os_linux_build" in d and \ - (not d['os_linux_build'].isdigit() or - not buildCmp(int(d['os_linux_build']), build)): - return False - return True - - def listdistr(pathname): - if path.exists(path.join(pathname, 'etc/make.profile')) or \ - path.exists( - path.join(pathname, 'etc/portage/make.profile')) or \ - path.exists(path.join(pathname, 'livecd')) or \ - pathname.startswith('/dev/'): - return [pathname] - else: - # discard inner directories - return filter(lambda x: not path.isdir(path.join(pathname, x)), - listDirectory(pathname)) - - # get lists files in directories - allFiles = map(lambda x: map(lambda y: path.join(x, y), - listdistr(x)), - dirs) - # filter distributives - return filter(distfilter, - # join files lists to one list - reduce(lambda x, y: x + y, - allFiles, [])) - - def extcomparator(self, *exts): - """Compare extensions""" - mapExts = {'iso': 0, - 'flash': -1, - 'isodir': -2, - 'partdir': -3, - 'dir': -4} - return cmp(mapExts.get(exts[0], -4), mapExts.get(exts[1], -4)) - - def sortdistrfunc(self, x, y): - """Func of comparing two distributive""" - ver1, ver2 = x[1].get('os_linux_ver', ""), y[1].get('os_linux_ver', "") - if ver1 and ver2 and ver1 != "0" and ver2 != "0" and ver1 != ver2: - return cmpVersion(ver1, ver2) - build1 = getTupleVersion(x[1].get('os_linux_build', "")) - build2 = getTupleVersion(y[1].get('os_linux_build', "")) - if build1 != build2: - return cmp(build1, build2) - else: - ser1, ser2 = (x[1].get('serial_id') or "0", - y[1].get('serial_id') or "0") - if ser1 != ser2: - return cmp(int(ser1), int(ser2)) - ext1 = x[1].get('ext', "") - ext2 = y[1].get('ext', "") - return self.extcomparator(ext1, ext2) - - def getAvailableDristibutives(self, dirs, system=None, shortname=None, - march=None, version=None, build=None, - discardType=()): - """Get list available distributives""" - if shortname: - shortname = shortname.lower() - availDistrs = self._getAvailableDistributives(dirs, system, shortname, - march, version, - build) - availDistrs = filter(lambda x: x[1] and "ext" in x[1] and - not x[1]["ext"] in discardType, - map(lambda x: (x, self._getDistrInfo(x)), - availDistrs)) - return map(lambda x: x[0], - sorted(availDistrs, self.sortdistrfunc, reverse=True)) - - def getBestDistributive(self, dirs, system=None, shortname=None, march=None, - version=None, build=None, discardType=()): - """Get the actualest distributive""" - availDistrs = self.getAvailableDristibutives(dirs, system, shortname, - march, version, build, - discardType) - if availDistrs: - return availDistrs[0] - else: - return None - - def _findLatestFile(self, dirs, reMatch, keyfunc): - """Find latest file in dirs, which match by reMatch, - comparable part get by keyfunc""" - existsdirs = filter(path.exists, dirs) - listimgs = reduce(lambda x, y: x + map( - lambda x: reMatch.search( - path.join(y, x)), - listDirectory(y)), - existsdirs, []) - listimgs = filter(lambda x: x, listimgs) - if listimgs: - return max(listimgs, key=keyfunc).group() - return "" - - def getBestStage(self, dirs, march=None, hardened=None): - """Get latest stage by march""" - if march: - march = {'x86_64': 'amd64'}.get(march, march) - else: - march = "[^-]+" - if hardened is None: - hardened = "(?:-hardened)?" - elif hardened is True: - hardened = "-hardened" - elif hardened is False: - hardened = "" - reStage = re.compile(r'^.*/stage3-%s%s-(\d+)\.tar\.bz2$' % - (march, hardened), re.S) - return self._findLatestFile(dirs, reStage, lambda x: x.groups()[0]) - - -class VariableClImage(ReadonlyVariable): - """ - System image for installation - """ - type = "object" - - def get(self): - """Get image file from distributive repository""" - try: - action = self.Get('cl_action') - if not action in ('system',): - return Distributive.fromFile('/') - filename = self.Get('cl_image_filename') - if filename: - filename = Distributive.fromFile(filename) - except DistributiveError as e: - return "" - return filename - - def humanReadable(self): - filename = self.Get('cl_image') - if filename: - return filename.getType() - return filename - - -class VariableClImageFilename(DistroRepository, Variable): - """ - Distributive image filename - """ - type = 'file' - element = 'file' - metavalue = "IMAGE" - opt = ['--iso'] - check_after = ["os_install_root_type"] - untrusted = True - - def init(self): - self.label = _("Installation image") - self.help = _("ISO image for installation") - - def get(self): - if self.Get('cl_action') != 'system': - return "" - arch = self.Get('cl_image_arch_machine') or self.Get('os_arch_machine') - shortname = self.Get('cl_image_linux_shortname') or \ - self.Get('os_linux_shortname') - ver = self.Get('cl_image_linux_ver') or None - build = self.Get('cl_image_linux_build') or None - return self.getImage(self.Get('os_install_scratch'), - self.Get('os_install_root_type'), - self.Get('cl_image_path'), - arch, shortname, ver, build) or "" - - def check(self, isoimage): - """Set image file""" - if self.Get('cl_action') == 'system' and not isoimage: - raise VariableError(_("You need to select a distribution image")) - try: - d = Distributive.fromFile(isoimage) - if isinstance(d, ArchiveDistributive): - raise VariableError(_("Wrong image file")) - except DistributiveError: - pass - - imageData = Distributive().getInfo(isoimage) - if not ("os_linux_shortname" in imageData and - imageData.get('os_linux_build', '') and - "os_arch_machine" in imageData): - raise VariableError(_("Wrong image file")) - if imageData["os_chrootable_set"] == 'off': - raise VariableError( - _("The image is not compatible with the current kernel")) - - def humanImageName(self, distroinfo, filepath): - if all(x in distroinfo for x in ("os_linux_shortname", - "os_arch_machine", - "os_linux_build")): - distroinfo['os_linux_shortname'] = \ - distroinfo['os_linux_shortname'].upper() - fullname = distroinfo.get('os_linux_name', - Linux.dictLinuxName.get( - distroinfo['os_linux_shortname'], - "Calculate")) - subname = distroinfo.get('os_linux_subname', - Linux.dictLinuxSubName.get( - distroinfo['os_linux_shortname'], "")) - if subname: - subname = " %s" % subname - build = distroinfo['os_linux_build'] or \ - distroinfo.get('os_linux_ver', '') - ver = distroinfo.get('os_linux_ver', '') - return "{fullname} {os_arch_machine} {build}".format( - fullname="%s%s" % (fullname, subname), filepath=filepath, - build=build, ver=ver, **distroinfo) - else: - return filepath - - def humanReadable(self): - fullname = self.Get('os_install_linux_name') - subname = self.Get('os_install_linux_subname') - if subname: - subname = " %s" % subname - arch = self.Get('os_install_arch_machine') - build = self.Get('os_install_linux_build') - ver = self.Get('os_install_linux_ver') - - return "{fullname} {ver} {arch} {build}".format( - fullname="%s%s" % (fullname, subname), - build=build, ver=ver, arch=arch) - - def choice(self): - scratch = self.Get('os_install_scratch') - rootType = self.Get('os_install_root_type') - imagePath = self.Get('cl_image_path') - if scratch == "on" or rootType == "flash" or \ - self.Get('cl_install_type') == 'flash': - discardType = ["dir"] - else: - discardType = [] - distros = self.getAvailableDristibutives(imagePath, - discardType=discardType) - if self.wasSet and not self.value in distros: - distros.append(self.value) - return sorted(map(lambda x: ( - x, self.humanImageName(self._getDistrInfo(x), x)), distros), - key=itemgetter(1)) - - -class VariableClImageArchMachine(DistroRepository, Variable): - """ - Filter by architecture - """ - value = "" - type = 'choice' - opt = ['--march'] - metavalue = "ARCH" - available_arch = ["i686", "x86_64"] - - def init(self): - self.label = "%s %s" % (_("Filter"), _("by processor architecture")) - self.help = _("select the processor architecture") - - def set(self, march): - if march == "auto": - march = getSupportArch()[-1] - return march - - def choice(self): - return [("", _("Not used"))] + \ - [("auto", _("Auto"))] + \ - [(x, x) for x in self.available_arch] - - def humanReadable(self): - return self.Get() or _("Not used") - - -class VariableClImageLinuxShortname(DistroRepository, Variable): - """ - Filter by shortname - """ - value = "" - type = 'choiceedit' - metavalue = "SYSTEM" - opt = ['--os', '-s'] - - - def init(self): - self.label = "%s %s" % (_("Filter"), _("by distribution")) - self.help = _("select the operation system") - - def choice(self): - return [("", _("Not used"))] + [ - ("CLD", "Calculate Linux Desktop KDE"), - ("CLDM", "Calculate Linux Desktop MATE"), - ("CLDX", "Calculate Linux Desktop XFCE"), - ("CLS", "Calculate Linux Scratch"), - ("CDS", "Calculate Directory Server"), - ("CSS", "Calculate Scratch Server"), - ("CMC", "Calculate Media Center"), - ] - - def humanReadable(self): - return self.Get() or _("Not used") - - -class VariableClImageLinuxVer(DistroRepository, Variable): - """ - Filter by version - """ - value = "" - - def init(self): - self.label = "%s %s" % (_("Filter"), _("by version")) - self.help = _("select the operation system by version") - - def humanReadable(self): - return self.Get() or _("Not used") - - -class VariableClImageLinuxBuild(DistroRepository, Variable): - """ - Filter by build - """ - value = "" - - def init(self): - self.label = "%s %s" % (_("Filter"), _("by build")) - self.help = _("select the operation system by build") - - def humanReadable(self): - return self.Get() or _("Not used") - - -class VariableClImagePath(ReadonlyVariable): - """ - Image search path - """ - type = "list" - - def get(self): - # if current distributive is live - if self.Get('os_root_type') == "livecd": - # if builder from flash then this source path '/mnt/flash' - # may be this path will be '/mnt/builder' for install - # modified system - if self.Get('os_scratch') == "on" and path.exists('/mnt/flash'): - livedistr = ['/mnt/flash'] - # if system boot with kernel param 'docache' - elif path.exists('/mnt/squash'): - livedistr = ['/mnt/livecd'] - # standard livecd - else: - if self.Get('os_install_root_type') == "flash": - livedistr = ['/run/initramfs/live', - '/run/initramfs/squashfs', - '/mnt/cdrom'] - else: - livedistr = ['/run/initramfs/squashfs', - '/run/initramfs/live', - '/mnt/cdrom'] - livedistr = filter(listDirectory, - livedistr)[:1] - else: - livedistr = [] - # search all partition for source installation distributive - rootDev = self.Get('os_install_root_dev') - livedistr += \ - map(lambda x: x[0], - filter(lambda x: " live" in x[1] and x[0] != rootDev, - zip(self.Get('os_disk_dev'), - self.Get('os_disk_content')))) - # add to standard path - return filter(path.exists, - ['/var/calculate/remote/linux', - '/var/calculate/linux'] + livedistr) - - -class VariableClSource(ReadonlyVariable): - """ - Дистрибутив текущей системы - """ - type = "object" - - def get(self): - return DirectoryDistributive('/') - - -class VariableClTarget(ReadonlyVariable): - """ - Target distributive - """ - type = "object" - - def get(self): - listVars = ['os_install_disk_dev', 'os_install_disk_mount', - 'os_install_disk_format', 'os_install_disk_perform_format', - 'os_install_disk_part', 'os_install_disk_id'] - rootLabel = "{short}-{ver}".format( - short=self.Get('os_install_linux_shortname'), - ver=self.Get('os_install_linux_ver')) - - if self.Get('os_install_root_type') == "flash": - flashLabel = "{short}-{build}".format( - short="CL", build=self.Get('os_install_linux_build')) - disk = self.Get('os_install_disk_single') - fileSystem = "vfat" - systemId = FileSystemManager.supportFS.get( - 'vfat', {}).get('msdos', '0b') - isFormat = self.GetBool('os_install_format_single_set') - partTable = self.select('os_disk_part', - os_disk_dev=disk, limit=1) - return FlashDistributive( - disk, mdirectory=DefaultMountPath.InstallMount, - check=True, fileSystem=fileSystem, - isFormat=isFormat, systemId=systemId, - rootLabel=flashLabel, - partitionTable=partTable) - osInstallScratch = self.isTrue(self.Get('os_install_scratch')) - mapDevId = dict(self.ZipVars('os_disk_dev', 'os_disk_id')) - disk, mount, fileSystem, isFormat, partTable, systemId = \ - self.Select(listVars, - where='os_install_disk_mount', - eq='/', limit=1) - if not systemId or mapDevId.get(disk, '') == systemId: - systemId = None - if osInstallScratch: - raise VariableError("Scratch is not supported") - if self.Get('os_install_pxe') == "on": - return PxeDistributive(self.Get('os_install_pxe_path')) - else: - target = PartitionDistributive( - disk, mdirectory=DefaultMountPath.InstallMount, - check=True, fileSystem=fileSystem, - rootLabel=rootLabel, - isFormat=self.isTrue(isFormat), - systemId=systemId, - partitionTable=partTable, - compression=self.Get('os_install_btrfs_compression')) - multiPartition = None - diskData = self.Select(listVars, - where='os_install_disk_mount', - ne='/') - bindData = self.Select(['os_install_bind_path', - 'os_install_bind_mountpoint'], - where='os_install_bind_mountpoint', - ne='') - if diskData or bindData: - multiPartition = MultiPartitions() - target.multipartition = multiPartition - for disk, mount, fileSystem, isFormat, partTable, systemId in diskData: - if not systemId or mapDevId.get(disk, '') == systemId: - systemId = None - multiPartition.addPartition(dev=disk, - mountPoint=mount, - fileSystem=fileSystem, - isFormat=self.isTrue(isFormat), - systemId=systemId, - partitionTable=partTable) - for source, dest in bindData: - multiPartition.addPartition(dev=source, - mountPoint=dest, - fileSystem='bind', - isFormat=False, - systemId=None, - partitionTable='') - return target - - -class VariableClImageNewOnly(Variable): - """ - Distributive image filename - """ - type = 'bool' - opt = ['-U', '--update'] - value = "off" - - def init(self): - self.label = _("Install the newer image only") - self.help = _("install the newer image only") - - def installedBuild(self): - """ - Get build already installed system - Need for check update - """ - rootDev = self.Get('os_install_root_dev') - if not rootDev: - return "" - try: - imageData = Distributive().getInfo(rootDev) - return imageData.get('os_linux_build', '') - except Exception: - pass - return "" - - def check(self, value): - if value == 'on': - try: - imageData = Distributive().getInfo( - self.Get('cl_image_filename')) - except Exception as e: - raise VariableError(_("Wrong image file")) - if imageData.get('os_linux_build', '') <= \ - self.Get('os_linux_build') or \ - imageData.get('os_linux_build', - '') <= self.installedBuild(): - raise CommonVariableError(_("The image for update not found")) - - -class VariableClInstallPathFrom(ReadonlyVariable): - """ - Путь из устанавливаемой системы до устанавливающий системы - """ - - def get(self): - template_path = pathJoin(self.Get('cl_chroot_path'), - self.Get('cl_root_path')) - return os.path.relpath("/", template_path) diff --git a/libs_crutch/install/variables/kernel.py b/libs_crutch/install/variables/kernel.py deleted file mode 100644 index 4f7ab6f..0000000 --- a/libs_crutch/install/variables/kernel.py +++ /dev/null @@ -1,510 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import sys -import re -from os import path -from calculate.lib.datavars import (Variable, ReadonlyVariable, - ReadonlyTableVariable, FieldValue, - HumanReadable, VariableInterface) -from calculate.lib.utils.files import (readFile, - typeFile, process, listDirectory, - MAGIC_SYMLINK, MAGIC_COMPRESS) -from calculate.lib.utils.kernel import InitrdFile - -from calculate.lib.cl_lang import setLocalTranslate - -_ = lambda x: x -setLocalTranslate('cl_install3', sys.modules[__name__]) -from operator import itemgetter - -from calculate.lib.utils.files import readLinesFile -from calculate.lib.utils.common import (getKernelUid, getTupleVersion, - getValueFromCmdLine, CmdlineParams) -from itertools import * -from calculate.install.distr import DistributiveError - - -class VariableOsInstallKernelScheduler(Variable): - """ - Install scheduler opts (cfq,bfq,none,deadline) - """ - type = "choice" - opt = ["--scheduler"] - metavalue = "SCHEDULER" - - def init(self): - self.help = _("toggle the I/O scheduler") - self.label = _("I/O scheduler") - - def check_scheduler(self, scheduler): - return scheduler in self.Select('os_install_kernel_schedule_name', - where='os_install_kernel_schedule_set', - eq='on') - - def get_default(self): - root_devs = self.Select('os_install_disk_parent', - where='os_install_disk_mount', - eq='/', limit=1).split(',') - for root_dev in root_devs: - dev_ssd, dev_virtual = self.Select(['os_device_ssd_set', - 'os_device_virtual_set'], - where='os_device_dev', - eq=root_dev, limit=1) - if ((dev_ssd == 'on' or dev_virtual == 'on') and - self.check_scheduler("noop")): - return "noop" - return self.Get('os_install_kernel_schedule_default') - - def get(self): - """Get scheduler for install root device""" - if self.Get('os_root_type') == 'livecd': - return self.get_default() - else: - currentScheduler = getValueFromCmdLine( - CmdlineParams.IOScheduler) - if currentScheduler in map(lambda x: x[0], self.choice()): - return currentScheduler - return self.Get('os_install_kernel_schedule_default') - - def choice(self): - schedulers = {"deadline": "Deadline", - "cfq": "CFQ", - "noop": "No-op", - "bfq": "BFQ"} - return [(x, schedulers.get(x, x)) - for x in self.Select('os_install_kernel_schedule_name', - where='os_install_kernel_schedule_set', - eq='on')] + [("auto", _("Default"))] - - def set(self, value): - if value == "auto": - return self.get_default() - return value - - def uncompatible(self): - """ - Unavailable for flash installation - """ - if self.Get('os_install_root_type') == 'flash': - return _("I/O scheduler unavailable for Flash install") - - -class KernelConfig(object): - def __init__(self, kernel_config): - self.data = readFile(kernel_config).split('\n') - self.config = kernel_config - - def __iter__(self): - return iter(self.data) - - def __str__(self): - return "kernel config (%s)" % self.config - - def __len__(self): - return len(self.data) - - def __contains__(self, item): - if "=" in item: - if item.endswith("=n"): - key = "# %s is not set" % item[:-2] - else: - key = item - else: - key = "%s=" % item - return any(key in x for x in self) - - -class VariableOsInstallKernelConfig(ReadonlyVariable): - """ - Install config kernel filename - """ - - def get_kernel_src(self, distr_path): - """ - Get version of kernel from .config - """ - kernel_src = 'usr/src/linux' - makefile_path = path.join(distr_path, kernel_src, "Makefile") - - # get version from Makefile - re_makefile = re.compile("^VERSION = (\S+)\n" - "PATCHLEVEL = (\S+)\n" - "SUBLEVEL = (\S+)\n" - "EXTRAVERSION = (\S*)\n", re.M) - if path.exists(makefile_path): - with open(makefile_path) as f: - match = re_makefile.search(f.read(200)) - if match: - return "{0}.{1}.{2}{3}".format(*match.groups()) - return "" - - def configs(self, distr_path): - src_kernel_ver = self.get_kernel_src(distr_path) - if src_kernel_ver: - yield path.join("boot", "config-%s" % src_kernel_ver) - if self.Get('cl_chroot_path') == '/': - ver = process('/bin/uname', '-r').read().strip() - yield path.join("boot", "config-%s" % ver) - yield 'usr/src/linux/.config' - - def get(self): - image = self.Get('cl_image') - if image: - with image: - try: - distrPath = image.getDirectory() - for config in self.configs(distrPath): - config_name = path.join(distrPath, config) - if path.exists(config_name): - return KernelConfig(config_name) - except DistributiveError: - return "" - return "" - -class VariableOsKernelConfig(VariableOsInstallKernelConfig): - """ - Current config kernel - """ - def configs(self, distr_path): - ver = process('/bin/uname', '-r').read().strip() - yield path.join("boot", "config-%s" % ver) - yield 'usr/src/linux/.config' - - def get(self): - for config in self.configs("/"): - config_name = path.join("/", config) - if path.exists(config_name): - return KernelConfig(config_name) - return "" - - -class VariableOsInstallKernelScheduleDefault(Variable): - """ - IO планировщик по умолчанию - """ - type = "choice" - - def get(self): - for line in self.Get('os_install_kernel_config'): - if "CONFIG_DEFAULT_IOSCHED=" in line: - key, op, value = line.partition("=") - return value.strip('"') - return "cfq" - - def choice(self): - return self.Select('os_install_kernel_schedule_name', - where='os_install_kernel_schedule_set', - eq='on') - - -class VariableOsInstallKernelScheduleData(ReadonlyTableVariable): - """ - Information about kernel schedule - """ - source = ['os_install_kernel_schedule_name', - 'os_install_kernel_schedule_set'] - - def get(self, hr=HumanReadable.No): - schedulers = {'CONFIG_IOSCHED_BFQ=y': 'bfq', - 'CONFIG_IOSCHED_NOOP=y': 'noop', - 'CONFIG_IOSCHED_CFQ=y': 'cfq', - 'CONFIG_IOSCHED_DEADLINE=y': 'deadline'} - installed = map(schedulers.get, - filter(lambda x: x in schedulers, - self.Get('os_install_kernel_config'))) or ['cfq'] - return [[x, "on" if x in installed else "off"] - for x in sorted(schedulers.values())] - - setValue = Variable.setValue - - -class VariableOsInstallKernelScheduleName(FieldValue, ReadonlyVariable): - """ - Schedule name - """ - type = "list" - source_variable = "os_install_kernel_schedule_data" - column = 0 - - -class VariableOsInstallKernelScheduleSet(FieldValue, ReadonlyVariable): - """ - Kernel has schedule - """ - type = "list-bool" - source_variable = "os_install_kernel_schedule_data" - column = 1 - - -class VariableOsInstallKernelTuxoniceSet(ReadonlyVariable): - """ - Available BFQ in kernel - """ - type = "bool" - - def get(self): - if any("CONFIG_TOI_CORE=y" in x - for x in self.Get('os_install_kernel_config')): - return "on" - return "off" - - -class VariableOsInstallKernelBfqSet(ReadonlyVariable): - """ - Available BFQ in kernel - """ - type = "bool" - - def get(self): - if any("CONFIG_IOSCHED_BFQ=y" in x - for x in self.Get('os_install_kernel_config')): - return "on" - return "off" - - -class VariableOsInstallNomodeset(Variable): - type = "bool" - def get(self): - cmdLine = '/proc/cmdline' - if 'nomodeset' in readFile(cmdLine): - return "on" - return "off" - -class VariableOsInstallKernelAttr(Variable): - """ - Install kernel attributes - """ - - def get(self): - def generate(): - # 5 sec for usb hdd boot - if self.GetBool('os_install_nomodeset'): - yield "nomodeset" - if self.Get('os_install_root_type') == 'usb-hdd': - yield "scandelay=5" - if (self.GetBool('os_install_mdadm_set') or - self.GetBool('os_install_lvm_set')): - yield "rd.auto" - yield "rd.retry=40" - - return " ".join(generate()) - - -class VariableOsInstallKernelResume(ReadonlyVariable): - """ - Install kernel resume - """ - - def get(self): - """install kernel resume parameter""" - for dev, partuuid, install in zip(self.Get('os_install_disk_use'), - self.Get('os_install_disk_partuuid'), - self.Get('os_install_disk_mount')): - if install == "swap": - if self.Get('os_install_kernel_tuxonice_set') == 'on': - return "tuxonice tuxonice_resume=%s real_resume=%s" % ( - dev, dev) - else: - if partuuid: - return "resume=PARTUUID=%s" % partuuid - else: - return "resume=%s" % dev - return "" - - -class KernelHelper(VariableInterface): - """ - Helper for kernel variables - """ - reFindVer = re.compile( - "(?<=version )(\d+\.?\d*\.?\d*\.?\d*)([^\d* ])*(\d*)") - - def getFilesByType(self, pathname, descr): - """Get files from "pathname" has "descr" in descriptions""" - filelist = map(lambda x: path.join(pathname, x), os.listdir(pathname)) - ftype = typeFile(magic=MAGIC_COMPRESS | MAGIC_SYMLINK).getMType - filesWithType = map(lambda x: (x, ftype(x)), - filter(path.exists, - filelist)) - return filter(lambda x: x[1] and descr in x[1], filesWithType) - - def getInitrdFiles(self, pathname): - filelist = map(lambda x: path.join(pathname, x), os.listdir(pathname)) - return [x for x in filelist if path.exists(x) and InitrdFile.is_cpio(x)] - - def getInitrd(self, arch, shortname, chroot, kernel, suffix="", - notsuffix=""): - """Get initrd for kernel""" - reInitrdVer = re.compile("(initrd|initramfs)-(.+?)(-install)?$", re.S) - - def initrd_version_by_name(filename): - resInitrdVer = reInitrdVer.search(filename) - if resInitrdVer: - return resInitrdVer.groups()[1] - return "" - - ftype = typeFile(magic=MAGIC_COMPRESS | MAGIC_SYMLINK).getMType - kernelfile = path.join(chroot, 'boot', kernel) - typeKernelFile = ftype(kernelfile) - if typeKernelFile is None: - return "" - resKernelVer = self.reFindVer.search(ftype(kernelfile)) - if resKernelVer: - kernelVersion = "%s-%s-%s" % \ - (resKernelVer.group().replace('-calculate', ''), - arch, shortname) - origKernelVer = resKernelVer.group() - - bootdir = path.join(chroot, 'boot') - initramfsFiles = self.getInitrdFiles(bootdir) - initramfsWithVer = \ - filter(lambda x: (kernelVersion in x[1] or - origKernelVer in x[1]) and \ - x[0].endswith(suffix) and \ - ( - not notsuffix or not x[0].endswith(notsuffix)), - map(lambda x: (x, initrd_version_by_name(x)), - initramfsFiles)) - if initramfsWithVer: - return path.split(min(initramfsWithVer, - key=itemgetter(0))[0])[-1] - return "" - - -class VariableOsInstallKernel(ReadonlyVariable, KernelHelper): - """ - Kernel filename - """ - - def get(self): - bootdir = path.join(self.Get('cl_chroot_path'), 'boot') - modulesdir = path.join(self.Get('cl_chroot_path'), 'lib/modules') - validKernel = listDirectory(modulesdir) - kernelFiles = self.getFilesByType(bootdir, "Linux kernel") - installMarch = self.Get('os_install_arch_machine') - kernelsWithVer = \ - map(lambda x: ( - x[0], (getTupleVersion("".join(x[1].groups()[0:3:2])), - path.getmtime(x[0]))), - # convert version to tuple( versionTuple, mtime) - # version detect, for this version lib contains moudules - # kernel arch equal install arch - ifilter(lambda x: x[1] and x[1].group() in validKernel and - installMarch in x[0].rpartition('/')[2], - # (filename,version) - imap(lambda x: (x[0], self.reFindVer.search(x[1])), - kernelFiles))) - if kernelsWithVer: - return path.split(max(kernelsWithVer, key=itemgetter(1))[0])[-1] - else: - return "vmlinuz" - - -class VariableOsInstallInitrd(ReadonlyVariable, KernelHelper): - """ - Optimized initramfs filename - """ - - def get(self): - return self.getInitrd(self.Get('os_install_arch_machine'), - self.Get('os_install_linux_shortname'), - self.Get('cl_chroot_path'), - self.Get('os_install_kernel'), - suffix="", notsuffix="-install") or \ - self.getInitrd(self.Get('os_install_arch_machine'), - self.Get('os_install_linux_shortname'), - self.Get('cl_chroot_path'), - self.Get('os_install_kernel'), - suffix="-install")[:-8] \ - or "initrd" - - -class VariableOsInstallInitrdInstall(ReadonlyVariable, KernelHelper): - """ - Install initramfs filename - """ - - def get(self): - return self.getInitrd(self.Get('os_install_arch_machine'), - self.Get('os_install_linux_shortname'), - self.Get('cl_chroot_path'), - self.Get('os_install_kernel'), - suffix="-install") or "initrd-install" - - -class VariableOsInstallSystemMap(ReadonlyVariable): - """ - Install system map filename - """ - - def get(self): - systemmapfile = self.Get('os_install_kernel').replace('vmlinuz', - 'System.map') - if systemmapfile.startswith('System.map') and path.exists( - path.join(self.Get('cl_chroot_path'), 'boot', systemmapfile)): - return systemmapfile - else: - return "" - - -class VariableOsInstallKernelCpufreq(ReadonlyVariable): - """ - Cpufreq modules - """ - - def get(self): - """Get cpufreq (and other from modules_3= param) from conf.d/modules""" - cpufreqmods = map(lambda x: x.partition('=')[2].strip("\n '\""), - filter(lambda x: x.startswith('modules_3'), - readLinesFile('/etc/conf.d/modules'))) - if cpufreqmods: - return cpufreqmods[0] - else: - return "" - - -class VariableClInstallKernelUid(ReadonlyVariable): - """ - Variable install kernel UID - """ - - def get(self): - return getKernelUid(self.Get('os_install_root_dev')) - - -class VariableClInstallKernelBuild(Variable): - """ - Переменная используемся для GRP дистрибутивов и сборки нескольких ядер - """ - value = "" - - -class VariableClInstallKernelVersion(VariableOsInstallKernelConfig): - """ - Версия ядра в /usr/src/linux - """ - def get(self): - image = self.Get('cl_image') - if image: - with image: - try: - distrPath = image.getDirectory() - return self.get_kernel_src(distrPath) - except DistributiveError as e: - return "" - return "" diff --git a/libs_crutch/install/variables/linux.py b/libs_crutch/install/variables/linux.py deleted file mode 100644 index 54f35c5..0000000 --- a/libs_crutch/install/variables/linux.py +++ /dev/null @@ -1,102 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.lib.datavars import ReadonlyVariable, VariableInterface -from calculate.lib.variables.linux import Linux - -from calculate.lib.cl_lang import setLocalTranslate - -setLocalTranslate('cl_install3', sys.modules[__name__]) - - -class InstallLinux(Linux, VariableInterface): - def __getFromImageOrCurrent(self, currentVar): - """Get value from install image or current system""" - if self.Get('cl_action') == 'system': - image = self.Get('cl_image') - if image: - d = image.getInfo() - # support lazy values - res = d.get(currentVar, "") - return str(res()) if callable(res) else res - else: - return "" - else: - return self.Get(currentVar) - - # variable for get current system info (example: os_linux_shortname) - current_variable = "" - # field of distroinfo (name,ver,build,march and etc) - distroinfo_field = "" - - def get(self): - """Get by distroinfo or current info""" - return self.__getFromImageOrCurrent(self.current_variable) - -class VariableOsInstallChrootableSet(InstallLinux, ReadonlyVariable): - """Можно ли выполнить chroot в систему""" - current_variable = "os_chrootable_set" - -class VariableOsChrootableSet(ReadonlyVariable): - """Можно ли выполнить chroot в систему""" - type = "boolean" - value = "on" - -class VariableOsInstallLinuxShortname(InstallLinux, ReadonlyVariable): - """Shortname of system""" - current_variable = "os_linux_shortname" - - -class VariableOsInstallLinuxVer(InstallLinux, ReadonlyVariable): - """Version of system""" - current_variable = "os_linux_ver" - - -class VariableOsInstallLinuxBuild(InstallLinux, ReadonlyVariable): - """Build of system""" - current_variable = "os_linux_build" - - -class VariableOsInstallArchMachine(InstallLinux, ReadonlyVariable): - """Arch of system""" - current_variable = "os_arch_machine" - - -class VariableOsInstallLinuxFiles(InstallLinux, ReadonlyVariable): - """Files num in system""" - current_variable = "os_linux_files" - - -class VariableOsInstallLinuxName(InstallLinux, ReadonlyVariable): - """ - Install distro name - """ - current_variable = "os_linux_name" - - -class VariableOsInstallLinuxSystem(InstallLinux, ReadonlyVariable): - """ - Install system name - """ - current_variable = "os_linux_system" - - -class VariableOsInstallLinuxSubname(InstallLinux, ReadonlyVariable): - """ - Install subname - """ - current_variable = "os_linux_subname" diff --git a/libs_crutch/install/variables/locale.py b/libs_crutch/install/variables/locale.py deleted file mode 100644 index 08b654e..0000000 --- a/libs_crutch/install/variables/locale.py +++ /dev/null @@ -1,402 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from os import path -from calculate.lib.datavars import Variable, VariableError, ReadonlyVariable -from calculate.lib.variables.locale import Locale -from calculate.lib.utils.files import readLinesFile, process -from calculate.lib.utils.portage import isPkgInstalled -from calculate.lib.utils.common import (getValueFromCmdLine, getValueFromConfig, - CmdlineParams, cmpVersion) -from pytz import timezone, country_timezones, UnknownTimeZoneError -from datetime import datetime - -from calculate.lib.cl_lang import setLocalTranslate, _ - -setLocalTranslate('cl_install3', sys.modules[__name__]) - - -class LocaleVariable(ReadonlyVariable, Locale): - """ - Locale variables not using for flash installation - """ - - def uncompatible(self): - """ - Network setting up unavailable for flash installation - """ - if self.Get('os_install_root_type') == 'flash': - return \ - _("Locale configuration unavailable for Flash install") - return "" - - -class VariableOsInstallLinguas(LocaleVariable): - """ - Current LINGUAS value - """ - mode = "w" - - def get(self): - def get_linguas(lines): - linguas = map(lambda x: x.strip().rpartition('=')[-1].strip('"\''), - filter(lambda x: x.startswith("LINGUAS="), - lines)) - return linguas[-1] if linguas else "" - - makeconf = '/etc/make.conf' - - emerge_config = self.Get('cl_emerge_config') - if emerge_config and "LINGUAS" in emerge_config: - return emerge_config['LINGUAS'].encode('UTF-8') - - infocommand = ['emerge', '--info'] - defaultLinguas = "bg en de es fr it pl pt_BR nl ru uk" - # get linguas from make.conf, emerge --info or default - curlanguage = self.Get('os_install_locale_language') - return get_linguas(readLinesFile(makeconf)) or \ - " ".join(filter(lambda x: x == "en" or x == curlanguage, - get_linguas( - process( - *infocommand).readlines() or "").split())) or \ - defaultLinguas - - -class VariableOsInstallLocaleConsolefont(LocaleVariable): - """ - Consolefont for locale - """ - - def get(self): - return self.getConsolefont(self.Get('os_install_locale_keyboard_layout')) - -class VariableOsInstallLocaleKeymap(LocaleVariable): - """ - Keymap of locale (used for /etc/conf.d/keymaps) - """ - - def get(self): - return self.getKeymap(self.Get('os_install_locale_keyboard_layout')) - - -class VariableOsInstallLocaleDumpkeys(LocaleVariable): - """ - Dumpkeys_charset for keymap - """ - - def get(self): - # is specified keymap support by locale hash - if self.Get('os_install_locale_keymap') in self.getFields('keymap'): - return self.getDumpkeys_charset( - self.getKeyboardLayout( - keymap=self.Get('os_install_locale_keymap'))) - else: - return self.getDumpkeys_charset( - self.Get('os_install_locale_keyboard_layout')) - - -class VariableOsInstallLocaleLocale(LocaleVariable): - """ - Locale (at example: ru_RU.utf8) - """ - - def get(self): - """locale (example: ru_RU.utf8)""" - return self.getLocale(self.Get('os_install_locale_lang')) - - -class VariableOsInstallLocaleLang(LocaleVariable): - """ - Full language (at example: ru_RU) - """ - mode = 'w' - metavalue = "LOCALE" - type = 'choice' - opt = ["--locale", "-l"] - - def init(self): - self.label = _("Locale") - self.help = _("set the locale") - - def get(self): - """lang (example: ru_RU)""" - return self.Get('os_locale_lang') - - def choice(self): - return zip(self.Get('os_lang'), - map(str, self.Get('os_lang', humanreadable=True))) - - -class VariableOsInstallLocaleKeyboardLayout(LocaleVariable): - """ - Full language (at example: ru_RU) - """ - mode = 'w' - metavalue = "KEYMAP" - type = 'choice' - opt = ["--keymap", "-k"] - - def init(self): - self.label = _("Keyboard layout") - self.help = _("set the keyboard layout") - - def get(self): - """lang (example: ru_RU)""" - selected_lang = self.Get('os_install_locale_lang') - current_lang = self.Get('os_locale_lang') - if selected_lang != current_lang: - return self.getKeyboardLayout(lang=self.Get('os_install_locale_lang')) - else: - return self.Get('os_locale_keyboard_layout') - - def choice(self): - return zip(self.Get('os_keyboard_layout'), - map(str, self.Get('os_keyboard_layout', humanreadable=True))) - - -class VariableOsInstallLocaleLanguage(LocaleVariable): - """ - Short language (at example ru) - """ - - def get(self): - return self.getLanguage(self.Get('os_install_locale_lang')) - - -class VariableOsInstallLocaleXkb(LocaleVariable): - """ - Keyboard layout for X server - """ - - def get(self): - return self.getXkblayout(self.Get('os_install_locale_keyboard_layout')) - - -class VariableOsInstallLocaleXkbname(LocaleVariable): - """ - Keyboard layout name for X server - """ - - def get(self): - localeXkb = self.Get("os_install_locale_xkb") - if localeXkb: - return localeXkb.split("(")[0] - return "" - - -class VariableOsInstallClockTimezone(LocaleVariable): - """ - Installation timezone for clock - """ - mode = 'w' - type = 'choiceedit' - metavalue = "TIMEZONE" - opt = ["--timezone"] - locale_varname = 'os_install_locale_lang' - - def init(self): - self.label = _("Timezone") - self.help = _("set the timezone") - - def get(self): - return self.Get('os_clock_timezone') - - def check(self, value): - if not value or not path.isfile(path.join( - "/usr/share/zoneinfo", value)): - raise VariableError(_("Wrong timezone %s") % value) - - def generateComments(self, tzs): - """ - Generate comments by timezone names - """ - for tzname in tzs: - # add separator - if tzname == "---": - yield ("---", "---") - continue - try: - tz = timezone(tzname) - strinfo = tz.localize(datetime.now()).strftime('%z') - yield ( - tzname, "%s (%s:%s)" % (tzname, strinfo[:3], strinfo[-2:])) - except UnknownTimeZoneError: - pass - - def choice(self): - source = ["Etc/GMT-12", - "Pacific/Midway", - "Pacific/Honolulu", - "America/Anchorage", - "Canada/Pacific", - "America/Tijuana", - "America/Phoenix", - "America/Denver", - "America/Mazatlan", - "America/Monterrey", - "America/Regina", - "America/Mexico_City", - "Canada/Central", - "America/Bogota", - "America/New_York", - "America/Indiana/Indianapolis", - "America/Halifax", - "America/Caracas", - "America/Manaus", - "America/Santiago", - "America/St_Johns", - "America/Sao_Paulo", - "America/Argentina/Buenos_Aires", - "Etc/GMT+3", - "America/Montevideo", - "Atlantic/South_Georgia", - "Atlantic/Azores", - "Atlantic/Cape_Verde", - "UTC", - "Africa/Casablanca", - "Europe/Amsterdam", - "Europe/Belgrade", - "Europe/Brussels", - "Europe/Zagreb", - "Africa/Tunis", - "Europe/Kaliningrad", - "Asia/Amman", - "Europe/Athens", - "Europe/Istanbul", - "Asia/Beirut", - "Europe/Helsinki", - "Europe/Kiev", - "Europe/Sofia", - "Africa/Windhoek", - "Asia/Jerusalem", - "Africa/Cairo", - "Europe/Minsk", - "Africa/Harare", - "Europe/Moscow", - "Asia/Baghdad", - "Asia/Kuwait", - "Africa/Nairobi", - "Asia/Tbilisi", - "Asia/Tehran", - "Europe/Samara", - "Asia/Muscat", - "Asia/Baku", - "Asia/Yerevan", - "Asia/Kabul", - "Asia/Yekaterinburg", - "Asia/Karachi", - "Asia/Calcutta", - "Asia/Jayapura", - "Asia/Katmandu", - "Asia/Almaty", - "Asia/Omsk", - "Asia/Dhaka", - "Asia/Rangoon", - "Asia/Krasnoyarsk", - "Asia/Bangkok", - "Asia/Irkutsk", - "Asia/Hong_Kong", - "Asia/Singapore", - "Australia/Perth", - "Asia/Taipei", - "Asia/Yakutsk", - "Asia/Tokyo", - "Asia/Seoul", - "Australia/Adelaide", - "Australia/Darwin", - "Asia/Vladivostok", - "Australia/Brisbane", - "Pacific/Guam", - "Australia/Melbourne", - "Australia/Hobart", - "Asia/Srednekolymsk", - "Asia/Kamchatka", - "Pacific/Auckland", - "Etc/GMT-13"] - - source = list(set(source + Locale().getFields('timezone'))) - - def sortkey(s): - tz = timezone(s) - strinfo = tz.localize(datetime.now()).strftime('%z') - return int(strinfo[:3]), int("%s%s" % (strinfo[0], strinfo[-2:])) - - try: - lang = self.Get(self.locale_varname).split('_')[1] - nativeTZ = map(lambda x: x.encode('utf-8'), - country_timezones[lang]) - source = nativeTZ + ["---"] + \ - sorted(filter(lambda x: not x in nativeTZ, source), - key=sortkey) - except (KeyError, IndexError) as e: - pass - return list(self.generateComments(source)) - - -class VariableOsInstallClockType(Variable): - """ - Type of clock (UTC or local) - """ - mode = 'w' - type = 'choice' - opt = ["--hwclock"] - metavalue = "CLOCK" - fallback_value = "local" - - def init(self): - self.label = _("Hardware clock type") - self.help = _("set hardware clock type") - - def exclude_value(self): - """ - Исключения - """ - root_type = self.Get('os_root_type') - hr_virtual = self.Get('hr_virtual') - # oracle virtualbox по умолчанию для linux систем выставляет - # использование UTC - if root_type == "livecd" and hr_virtual == "virtualbox": - return "UTC" - return None - - def get(self): - """type of clock (UTC or local)""" - # в первую очередь смотрим на параметры загрузки системы - cmdtype = getValueFromCmdLine(CmdlineParams.Calculate, - CmdlineParams.Clock) - if cmdtype and cmdtype in self.choice(): - return cmdtype - - # во вторую очередь исключения (например для livecd и virtualbox) - clocktype = self.exclude_value() - if clocktype: - return clocktype - - # получаем значение из конфигурационных файлов hwclock - clockTypeFile = ['/etc/conf.d/clock', '/etc/conf.d/hwclock'] - for f in clockTypeFile: - clock = getValueFromConfig(f, "clock") - if clock: - if clock.upper() == 'UTC': - return clock.upper() - elif clock.lower() == 'local': - return clock.lower() - # запасное значение - return self.fallback_value - - def choice(self): - return ["local", "UTC"] diff --git a/libs_crutch/install/variables/lvm.py b/libs_crutch/install/variables/lvm.py deleted file mode 100644 index 42cd94b..0000000 --- a/libs_crutch/install/variables/lvm.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -from calculate.lib.datavars import (Variable, VariableInterface, - ReadonlyVariable, ReadonlyTableVariable, - FieldValue, HumanReadable) -from calculate.lib.utils import device - -from calculate.lib.cl_lang import setLocalTranslate - -setLocalTranslate('cl_install3', sys.modules[__name__]) - -class LvmHelper(VariableInterface): - def getLvmData(self): - for vg, lv, pv in device.lvm.pvdisplay_full(): - yield lv, vg, pv - -####################################################### -# Devices variables -####################################################### -class VariableOsLvmData(ReadonlyTableVariable, LvmHelper): - """ - Information about LVM - """ - source = ['os_lvm_lvname', - 'os_lvm_vgname', - 'os_lvm_pvname', - 'os_lvm_pvname_parent' - ] - - def get(self, hr=HumanReadable.No): - """LVM hash""" - def generator(): - for lvname, vgname, pvname in self.getLvmData(): - all_base = device.udev.get_all_base_devices(name=pvname) - full_base = ",".join(all_base) - yield lvname, vgname, pvname, full_base - return list(generator()) or [[]] - - setValue = Variable.setValue - - -class VariableOsLvmLvname(FieldValue, ReadonlyVariable): - """ - Logical volumes names - """ - type = "list" - source_variable = "os_lvm_data" - column = 0 - - -class VariableOsLvmVgname(FieldValue, ReadonlyVariable): - """ - Volume groups names - """ - type = "list" - source_variable = "os_lvm_data" - column = 1 - - -class VariableOsLvmPvname(FieldValue, ReadonlyVariable): - """ - Phisical volumes names - """ - type = "list" - source_variable = "os_lvm_data" - column = 2 - -class VariableOsLvmPvnameParent(FieldValue, ReadonlyVariable): - """ - Phisical volumes names - """ - type = "list" - source_variable = "os_lvm_data" - column = 3 diff --git a/libs_crutch/install/variables/net.py b/libs_crutch/install/variables/net.py deleted file mode 100644 index a310c9d..0000000 --- a/libs_crutch/install/variables/net.py +++ /dev/null @@ -1,948 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -import re -from os import path, readlink -from calculate.lib.datavars import (Variable, VariableError, ReadonlyVariable, - READONLY, TableVariable, FieldValue, - VariableInterface, HumanReadable) - -from calculate.lib.cl_lang import setLocalTranslate, _ -from calculate.lib.variables.system import RootType - -setLocalTranslate('cl_install3', sys.modules[__name__]) - -from calculate.lib.utils.ip import (getInterfaces, getIp, getMask, getMac, - cidrToMask, maskToCidr, getIpNet, isDhcpIp, - checkIp, checkMask, isSlaveInterface, - getOperState, getPlugged) -import calculate.lib.utils.device as device -from calculate.lib.utils.device import lspci -from calculate.lib.utils.files import (listDirectory, readLinesFile) -from calculate.lib.utils import ip -from calculate.lib.utils.portage import isPkgInstalled -from operator import itemgetter -from itertools import * -from calculate.install.distr import DistributiveError - - -class NetHelper(VariableInterface): - """ - Network variables not using for flash installation - """ - routing = False - - def uncompatible(self): - """ - Network setting up unavailable for flash installation - """ - if self.Get('os_install_root_type') == 'flash': - return \ - _("Network configuration is unavailable for Flash install") - if self.routing and not self.Select('os_install_net_interfaces', - where='os_install_net_status', - _notin=('off', 'dhcp'), limit=1): - return _("Network routing configuration is not available if all " - "interfaces are set to DHCP") - if self.Get('cl_network_migrate_set') == 'on': - return _("Network settings unavailable with use settings migration") - return "" - - -class VariableClNetworkConfigureSet(Variable): - """ - Выполнять ли настройку сети шаблонами - """ - type = "bool" - opt = ["--network"] - - def init(self): - self.label = _("Reconfigure network") - self.help = _("reconfigure network") - - def get(self): - if self.GetBool("cl_network_migrate_set"): - return "off" - else: - return "on" - - -class VariableClNetworkMigrateSet(Variable): - """ - Использовать миграцию для переноса настроек - """ - type = "bool" - element = "radio" - - def init(self): - self.label = _("Network") - self.help = _("use the network migration") - - def choice(self): - return [("on", _("Migrate network settings")), - ("off", _("Manually network configuration"))] - - def get(self): - for manvar in ("os_install_net_conf", - "os_install_net_data", - "os_install_net_fqdn", - "os_install_ntp", - "os_install_net_dns", - "os_install_net_dns_search", - "os_install_net_route_data"): - if self.is_console_set(manvar): - return "off" - else: - return "on" - - -class VariableOsInstallNtp(NetHelper, Variable): - """ - NTP server for system - """ - opt = ['--ntp'] - metavalue = "NTP" - value = "ntp0.zenon.net" - - def init(self): - self.label = _("NTP server") - self.help = _("set the NTP server for the system") - - -class VariableOsInstallProxy(NetHelper, Variable): - """ - Proxy for system - """ - value = "" - - -class VariableOsInstallNetType(NetHelper, ReadonlyVariable): - """ - Тип сетевого устройства: сейчас eth или wlan - """ - type = "list" - - def _getType(self, iface): - if ip.isWireless(iface): - return "wlan" - return "eth" - - def get(self): - interfaces = self.Get('os_install_net_interfaces') - return [self._getType(x) for x in interfaces] - - -class VariableOsInstallNetInterfaces(NetHelper, ReadonlyVariable): - """ - Net interface devices - """ - type = "list" - - def init(self): - self.label = _("Interface") - - def get(self): - return sorted(getInterfaces()) - - -class VariableOsInstallNetInterfacesOrig(NetHelper, ReadonlyVariable): - """ - Net interface devices orig name from udev (enp0s2) - Depricated - """ - type = "list" - - def get(self): - return self.Get('os_install_net_interfaces') - - -class VariableOsNetInterfacesInfo(NetHelper, ReadonlyVariable): - """ - Inforamation about net interfaces - """ - - def get(self): - self.Get("os_net_interfaces") - listInterfacesInfo = [] - # Получена ли сеть по DHCP если нет to ip или off - for interface, ipaddr, dhcp in zip( - self.Get('os_install_net_interfaces'), - self.Get('os_install_net_ip'), - self.Get('os_install_net_dhcp_set')): - if dhcp == "on": - listInterfacesInfo.append((interface, _("DHCP"))) - else: - listInterfacesInfo.append((interface, - ipaddr if ipaddr else _("Off"))) - return ", ".join(map(lambda x: "%s (%s)" % (x[0], x[1]), - listInterfacesInfo)) - - -class VariableOsInstallNetData(NetHelper, TableVariable): - """ - Hash for information about net - """ - opt = ["--iface"] - metavalue = "IFACE_SETTINGS" - source = ["os_install_net_interfaces", - "os_install_net_status", - "os_install_net_mask", - "os_install_net_name", - "os_install_net_mac"] - - def init(self): - def defaultInterface(): - ifaces = getInterfaces() - if ifaces: - return ifaces[0] - else: - return "enp0s0" - - self.label = _("Addresses") - # self.help = _("IP address with network (example:%s)")%"192.168.1.1/24" - self.help = _("Network interface, DHCP or IP address and network mask " - "(example: %s)") % (" --iface %s:192.168.1.1:24" % - defaultInterface()) - - def raiseReadonlyIndexError(self, fieldname="", variablename="", - value=""): - """ - Behavior on change readonly index - """ - raise VariableError(_("Network interface %s not found") % value) - - -class VariableOsInstallNetHostname(NetHelper, Variable): - """ - Computer hostname - """ - - def get(self): - return self.Get('os_install_net_fqdn').partition('.')[0] - - -class VariableOsInstallNetFqdn(NetHelper, Variable): - """ - Full host name - """ - opt = ['--hostname'] - metavalue = "HOSTNAME" - - def init(self): - self.label = _("Hostname") - self.help = _("set either the short or the full hostname") - - def set(self, value): - if "." in value: - return value - else: - return "%s.%s" % (value, self.Get('os_install_net_domain')) - - def check(self, value): - maxfqdn = 254 - if " " in value: - raise VariableError(_("Wrong hostname")) - if len(value) > maxfqdn: - raise VariableError( - _("The hostname length should be less than %d") % maxfqdn) - - def get(self): - if path.exists('/proc/self/fd/1') and \ - readlink('/proc/self/fd/1') == '/dev/console' and \ - self.Get('os_root_dev') == '/dev/nfs': - return "calculate.local" - return self.Get('os_net_fqdn') - - -class VariableOsInstallNetDomain(NetHelper, Variable): - """ - Domain on install system - """ - - def get(self): - return self.Get('os_install_net_fqdn').partition('.')[2] - - -class VariableOsInstallNetAllow(NetHelper, Variable): - """ - Allowed network - """ - - def get(self): - """Allowed network""" - return self.Get("os_net_allow") - - -class VariableOsInstallNetName(NetHelper, ReadonlyVariable): - """ - Net device names - """ - type = "list" - - def init(self): - self.label = _("Name") - - def get(self): - rePci = re.compile(r"(\d\d:\d\d\.\d)(?:/[^/]+){2}$") - - def getPci(interface): - pathname = path.realpath( - device.sysfs.syspath(device.sysfs.Path.ClassNet, interface)) - - pci = rePci.search(pathname) - if pci: - return pci.group(1) - else: - return "" - - pciEthernet = lspci(shortInfo=True) - return map(lambda x: "{vendor} {name}".format(**x), - map(lambda x: pciEthernet.get(getPci(x), - {'vendor': _("Unknown"), - 'name': _("vendor")}), - self.Get('os_install_net_interfaces'))) - - -class VariableOsInstallNetMacType(NetHelper, ReadonlyVariable): - """ - Net devices mac (Example: local/OUI) - """ - type = "list" - reLocal = re.compile("^.[2367abef]:.*$", re.I) - - def init(self): - self.label = _("Mac type") - - def _mactype(self, mac): - if not mac: - return "" - if not self.reLocal.match(mac): - return "OUI" - else: - return "local" - - def get(self): - return map(self._mactype, self.Get('os_install_net_mac')) - - -class VariableOsInstallNetMac(NetHelper, ReadonlyVariable): - """ - Net devices mac (Example: 01:02:03:04:05:06) - """ - type = "list" - - def init(self): - self.label = _("MAC") - - def get(self): - return map(lambda x: getMac(x).lower(), - self.Get('os_install_net_interfaces')) - - -class VariableOsInstallNetStatus(NetHelper, Variable): - """ - Net status (dhcp,ip,or off) - """ - type = "choiceedit-list" - - def init(self): - self.label = _("IP address") - - def get(self): - return map(self.getDefaultValue, - self.Get('os_install_net_interfaces')) - - def getDefaultValue(self, iface): - def statusValue(ipaddr, dhcp): - if not getPlugged(iface): - return 'off' - if isSlaveInterface(iface): - return 'off' - if dhcp == "on": - return "dhcp" - elif ipaddr: - return ipaddr - else: - if getOperState(iface) == 'down': - return "off" - else: - return "dhcp" - - rootDevNfs = self.Get('os_root_dev') == '/dev/nfs' - return statusValue(getIp(iface), "on" \ - if rootDevNfs or isDhcpIp(iface) else "off") - - def set(self, value): - value = map(lambda x: x.lower() if x else x, value) - ifaces = self.Get('os_install_net_interfaces') - return map(lambda x: self.getDefaultValue(x[1]) \ - if x[0] == "auto" else x[0], - zip(value, ifaces)) - - def check(self, value): - for status in value: - if status not in map(lambda x: x[0], self.choice()) and \ - not checkIp(status): - raise VariableError(_("Wrong IP address %s") % status) - - def choice(self): - return (("dhcp", _("DHCP")), - ("off", _("Disabled")), - ("auto", _("Auto"))) - - -class VariableOsInstallNetIp(NetHelper, ReadonlyVariable): - """ - IP for all network interfaces - """ - type = "list" - - def init(self): - self.label = _("IP address") - - def get(self): - return map(lambda x: "" if x[1].lower() == "off" else - getIp(x[0]) if x[1].lower() == "dhcp" else x[1], - zip(self.Get('os_install_net_interfaces'), - self.Get('os_install_net_status'))) - - # def check(self,value): - # dhcps = self.Get('os_install_net_dhcp_set') - # wrongIp = filter(lambda x:x[0] and not checkIp(x[0]), - # zip(value,dhcps)) - # if wrongIp: - # if wrongIp[0][0]: - # raise VariableError(_("Wrong IP address %s")%wrongIp[0][0]) - - -class VariableOsInstallNetNetwork(NetHelper, ReadonlyVariable): - """ - Network for ip (Example:192.168.0.0/16) - """ - type = "list" - - def init(self): - self.label = _("Network") - - def get(self): - return map(lambda x: getIpNet(x[0], x[1]) if x[0] and x[1] else "", - zip(self.Get('os_install_net_ip'), - self.Get('os_install_net_mask'))) - - -class VariableOsInstallNetCidr(NetHelper, ReadonlyVariable): - """ - CIDR of interfaces - """ - type = "list" - - def init(self): - self.label = _("CIDR") - - def get(self): - """ - Get CIDR of ip,net (Example: 24) - """ - return map(lambda x: maskToCidr(x) if x else '', - self.Get('os_install_net_mask')) - - -class VariableOsInstallNetMask(NetHelper, Variable): - """ - Net mask of interfaces (Example:255.255.0.0) - """ - type = "choiceedit-list" - - def init(self): - self.label = _("Mask") - - def get(self): - return map(lambda x: cidrToMask(getMask(x)), - self.Get('os_install_net_interfaces')) - - def set(self, value): - """ - Convert to mask CIDR value - """ - - def convertCidrToMask(x): - if x and x.isdigit() and int(x) in range(0, 33): - return cidrToMask(int(x)) - else: - return x - - res = map(convertCidrToMask, value) - return res - - def check(self, value): - dhcps = self.Get('os_install_net_status') - wrongMask = filter(lambda x: (x[0] or not x[1] in ("off", "dhcp")) and \ - not checkMask(x[0]), - zip(value, dhcps)) - if wrongMask: - raise VariableError(_("Wrong mask %s") % wrongMask[0][0]) - - def choice(self): - return ["255.255.255.255", - "255.255.255.0", - "255.255.0.0", - "255.0.0.0", - "0.0.0.0"] - - -class VariableOsInstallNetDhcpSet(NetHelper, Variable): - """ - Describe ip was get by DHCP or manualy - """ - type = "boolauto-list" - - def init(self): - self.label = _("DHCP") - - def get(self): - return map(lambda x: "on" if x == "dhcp" else "off", - self.Get('os_install_net_status')) - - -class VariableOsInstallNetRouteData(NetHelper, TableVariable): - """ - Route table data - """ - opt = ["--route"] - metavalue = "NETROUTE" - source = ['os_install_net_route_network', - 'os_install_net_route_gw', - 'os_install_net_route_dev', - 'os_install_net_route_src'] - routing = True - - def humanReadable(self): - return self.Get() - - def init(self): - self.label = _("Routing") - self.help = \ - _("add a routing rule (specified as " - "NETWORK:GATEWAY[:DEV[:SOURCE]])") - - def get(self, hr=HumanReadable.No): - """Routing hash""" - interfaces = self.Get('os_install_net_interfaces') - interfaces_status = self.Get('os_install_net_status') - interfaces_network = self.Get('os_install_net_network') - staticInterface = \ - map(itemgetter(0, 2), - filter(lambda x: not x[1] in ("off", "dhcp"), - zip(interfaces, interfaces_status, interfaces_network))) - route_data = [] - if staticInterface: - staticInterface, skipNet = zip(*staticInterface) - return map(lambda x: [x[0], - x[1].get('via', ''), - x[1].get('dev', ''), - x[1].get('src', '')], - ifilter(lambda x: not x[0] in skipNet, - ip.getRouteTable(staticInterface))) or [[]] - return [[]] - - def getHumanReadableAuto(self): - return Variable.getHumanReadableAuto(self) - - def setValue(self, value, force=False): - """ - Standard action for set value - """ - interfaces = self.Get('os_install_net_interfaces') - if len(interfaces) == 1: - wrapper = lambda x: x if not x else [x[0],x[1],interfaces[0],x[3]] - else: - wrapper = lambda x: x - - self.value = self.set(map(wrapper,value)) - self.wasSet = True - self.invalid = False - # run check - if not force: - self._check() - - -class VariableOsInstallNetRouteNetwork(NetHelper, FieldValue, Variable): - """ - Net for route table record - """ - type = "choiceedit-list" - source_variable = "os_install_net_route_data" - column = 0 - - def init(self): - self.label = _("Network") - - def choice(self): - return ["default"] # +self.Get('os_install_net_network') - - def check(self, value): - ########################## - # detect duplicate network - ########################## - for wrongnet in ifilterfalse(ip.checkNet, - ifilter("default".__ne__, - value)): - raise VariableError(_("Wrong network %s") % wrongnet) - dupNetwork = list(set(filter(lambda x: value.count(x) > 1, - value))) - if dupNetwork: - raise VariableError( - _("Network '%s' is used more than once") % dupNetwork[0]) - - -class VariableOsInstallNetRouteGw(NetHelper, FieldValue, Variable): - """ - Gateway for route table record - """ - source_variable = "os_install_net_route_data" - column = 1 - - def init(self): - self.label = _("Gateway") - - def check(self, value): - ############################# - # search unreachable gateways - ############################# - NET, GW = 0, 1 - netsGw = zip(self.Get('os_install_net_route_network'), - value) - nets = filter(lambda x: x and x != "default", - chain(self.Get('os_install_net_route_network'), - self.Get('os_install_net_network'))) - - for wrongip in ifilterfalse(ip.checkIp, value): - raise VariableError(_("Wrong gateway IP %s") % wrongip) - - wrongGws = map(lambda x: x[GW], - filter(lambda x: not ip.isIpInNet(x[GW], - *(set(nets) - set( - x[NET]))), - filter(lambda x: x[GW], - netsGw))) - if wrongGws: - raise VariableError(_("Gateways %s are unreachable") % - (",".join(wrongGws))) - - -class VariableOsInstallNetRouteDev(NetHelper, FieldValue, Variable): - """ - Device for route table record - """ - type = "choice-list" - source_variable = "os_install_net_route_data" - column = 2 - - def init(self): - self.label = _("Interface") - - def choice(self): - return self.Get('os_install_net_interfaces') - - -class VariableOsInstallNetRouteSrc(NetHelper, FieldValue, Variable): - """ - Source ip for route table record - """ - type = "choiceedit-list" - source_variable = "os_install_net_route_data" - column = 3 - - def init(self): - self.label = _("Source IP") - - def choice(self): - return [""] + self.Get('os_install_net_ip') - - def check(self, value): - for wrongip in ifilterfalse(ip.checkIp, - ifilter(None, value)): - raise VariableError(_("Wrong source IP %s") % wrongip) - ipAddrs = self.Get('os_install_net_ip') - wrongIps = filter(lambda x: x and not x in ipAddrs, - value) - if wrongIps: - raise VariableError( - _("Wrong IP address %s in the specified source IP") % - (",".join(wrongIps))) - - -class VariableOsInstallNetRoute(NetHelper, ReadonlyVariable): - """ - Data by route for conf.d/net - """ - - def performRouteData(self, performFunc): - routeMatrix = zip(self.Get('os_install_net_route_network'), - self.Get('os_install_net_route_gw'), - self.Get('os_install_net_route_dev'), - self.Get('os_install_net_route_src')) - DEV, IP, CIDR, NET = 0, 1, 2, 1 - return map(lambda x: performFunc(x[DEV], x[NET], routeMatrix), - # union ip and mask to ip/net - map(lambda x: (x[DEV], ip.getIpNet(x[IP], cidr=x[CIDR])) \ - if x[IP] and x[CIDR] else (x[DEV], ""), - # filter(lambda x:x[IP] and x[CIDR], - zip(self.Get('os_install_net_interfaces'), - self.Get('os_install_net_ip'), - self.Get('os_install_net_cidr')))) - - def get(self): - """Route info for conf.d/net""" - defaultDev = 0 - workIfaces = self.Select('os_install_net_interfaces', - where='os_install_net_status', - _notin="off") - if len(workIfaces) == 1: - defaultDev = workIfaces[0] - - def getRouteForInterfaceConf(interface, net, routeMatrix): - NET, GW, DEV, SRC = 0, 1, 2, 3 - # filter by interface and discard direct routes - # example: for 192.168.1.5/24 discard 192.168.1.0/24 net - route_list = filter(lambda x: (interface == x[DEV] or defaultDev and - interface == defaultDev) \ - and net != x[NET], routeMatrix) - - nets = [] - route_list_uniqnet = [] - for net, gw, dev, src in route_list: - if net not in nets: - route_list_uniqnet.append([net, gw, dev, src]) - nets.append(net) - - route_strs = map(lambda x: "{net}{gateway}{src}".format( - net=x[NET], - gateway=" via %s" % x[GW] if x[GW] else "", - src=" src %s" % x[SRC] if x[SRC] else ""), route_list_uniqnet) - # build string for route from net,gateway,dev and src - return "\n".join(route_strs) - - return self.performRouteData(getRouteForInterfaceConf) - - -class VariableOsInstallNetNmroute(VariableOsInstallNetRoute): - """ - Data by route for NetworkManager - """ - mode = READONLY - - def get(self): - """Route info for system-connections of NetworkManager""" - defaultDev = 0 - workIfaces = self.Select('os_install_net_interfaces', - where='os_install_net_status', - _notin="off") - if len(workIfaces) == 1: - defaultDev = workIfaces[0] - - def getRouteForInterfaceNM(interface, net, routeMatrix): - NET, GW, DEV, SRC = 0, 1, 2, 3 - defaultGw = map(lambda x: "%s;" % x[GW], - filter(lambda x: interface == x[DEV] and \ - x[NET] == "default", - routeMatrix)) - return "{0}\n".format(defaultGw[0] if defaultGw else "") + \ - "\n".join( - # build string for route from net,gateway,dev and src - map(lambda - x: "routes{num}={ip};{cidr};{gateway};0;".format( - num=x[0] + 1, - ip=x[1][NET].partition('/')[0], - cidr=x[1][NET].partition('/')[2], - gateway=x[1][GW] if x[1][GW] else "0.0.0.0"), - # filter by interface and discard direct routes - # example: for 192.168.1.5/24 discard 192.168.1.0/24 net - enumerate( - filter(lambda x: (interface == x[ - DEV] or defaultDev and - interface == defaultDev) and net != - x[ - NET] and \ - x[NET] != "default", - routeMatrix)))) - - return self.performRouteData(getRouteForInterfaceNM) - - -class VariableOsInstallNetConfAvailable(NetHelper, Variable): - """ - Available net configuration - """ - type = "list" - - def get(self): - mapNetConf = (('networkmanager', 'net-misc/networkmanager', - _("NetworkManager")), - ('openrc', '', _('OpenRC'))) - image = self.Get('cl_image') - if image: - with image as distr: - try: - distrPath = image.getDirectory() - return map(itemgetter(0, 2), - filter(lambda x: not x[1] or isPkgInstalled(x[1], - prefix=distrPath), - mapNetConf)) - except DistributiveError as e: - pass - return sorted(map(itemgetter(0, 2), mapNetConf[-1:]), key=itemgetter(1)) - - -class VariableOsInstallNetConf(NetHelper, Variable): - """ - Net setup (networkmanager or openrc) - """ - type = "choice" - opt = ["--netconf"] - metavalue = "NETMANAGER" - - def init(self): - self.label = _("Network manager") - self.help = _("network manager") - - def get(self): - """Net setup (networkmanager or openrc)""" - if filter(lambda x: x.lower() == "networkmanager", - listDirectory('/etc/runlevels/boot') + - listDirectory('/etc/runlevels/default')) \ - or self.Get('os_root_type') == "livecd": - nm = "networkmanager" - else: - nm = "" - for val, comment in self.Get('os_install_net_conf_available'): - if nm == val and not (self.Get('os_root_dev') == '/dev/nfs' and - self.Get( - 'os_install_root_type') == "livecd"): - return nm - else: - return "openrc" - - def choice(self): - return self.Get('os_install_net_conf_available') - - -class VariableOsInstallNetDnsSearch(NetHelper, Variable): - """ - Dns search - """ - opt = ["--domain-search"] - metavalue = "DOMAINS" - - def init(self): - self.label = _("Search domains") - self.help = _("search domains (comma-separated)") - - def isDNSByDHCP(self): - """ - If first interface get ip by DHCP dns must be DHCP - """ - statuses = self.Get('os_install_net_status') - if statuses: - if statuses[0] == "dhcp": - return True - return False - - def set(self, value): - return " ".join(re.split('[; ,]', value)) - - def get(self): - """Get current name servers""" - dnsSearch = " ".join( - map(lambda x: x.strip().partition("search")[2].strip(), - filter(lambda x: x.lstrip().startswith("search"), - readLinesFile('/etc/resolv.conf')))) - return "" if self.isDNSByDHCP() else dnsSearch - - def humanReadable(self): - return self.Get() or (_("Get via DHCP") - if self.isDNSByDHCP() - else _("Not used")) - - -class VariableOsInstallNetDns(VariableOsInstallNetDnsSearch): - """ - Dns servers - """ - opt = ["--dns"] - metavalue = "DNS" - - def init(self): - self.label = _("Domain name server") - self.help = _("domain name server (comma-separated)") - - def set(self, value): - return " ".join(re.split('[; ,]', value)) - - def get(self): - dnsIps = filter(ip.checkIp, - map(lambda x: x.strip().partition("nameserver")[ - 2].strip(), - filter( - lambda x: x.lstrip().startswith("nameserver"), - readLinesFile('/etc/resolv.conf')))) - return "" if self.isDNSByDHCP() else " ".join(dnsIps) - - def check(self, value): - reIp = re.compile(ip.IP_ADDR) - if any(ifilterfalse(reIp.match, value.split(' '))): - raise VariableError(_("Wrong IP address for DNS")) - - def humanReadable(self): - return self.Get() or (_("Get via DHCP") - if self.isDNSByDHCP() - else _("Not used")) - - -class VariableOsInstallNetSettings(NetHelper, Variable): - """ - Net service configured - """ - type = "choice" - value = "" - - def choice(self): - return [("", "")] + self.Get('os_install_net_conf_available') - - -class VariableOsInstallPxeIp(Variable): - """ - IP адрес PXE сервера - """ - type = "choice" - opt = ["--ip"] - metavalue = "IP" - - def init(self): - self.label = _("PXE server IP") - self.help = "set IP address for PXE server" - - def get(self): - ips = self.Get('os_net_ip').split(',') - for ipaddr in ifilter(None, ips): - return ipaddr - else: - return "" - - def choice(self): - ips = self.Get('os_net_ip').split(',') - return filter(None, ips) diff --git a/libs_crutch/install/variables/system.py b/libs_crutch/install/variables/system.py deleted file mode 100644 index 4e03e85..0000000 --- a/libs_crutch/install/variables/system.py +++ /dev/null @@ -1,1119 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import sys -import re -from os import path -from calculate.lib.datavars import (Variable, VariableError, ReadonlyVariable, - TableVariable, PasswordError, - DataVarsError, VariableInterface) -from calculate.install.fs_manager import FileSystemManager -from calculate.lib.utils.files import (readFile, getProgPath, process, - readLinesFile, readFileEx) -from calculate.lib.utils.common import (getPasswdUsers, getUserGroups, - getGroups, CmdlineParams) -from calculate.lib.utils.common import getValueFromConfig, getValueFromCmdLine -from calculate.lib.utils.common import getUserPrimaryGroup -from calculate.lib.utils.portage import isPkgInstalled -import calculate.lib.utils.device as device -from calculate.lib.encrypt import sha256_crypt -from calculate.lib.variables.system import RootType -import calculate.lib.variables.env as libenv -from calculate.lib.encrypt import get_shadow_hash, get_grub_hash -from calculate.core.server.admin import Admins -import calculate.lib.cl_ini_parser as cl_ini_parser -from calculate.lib.cl_template import SystemIni - -from calculate.lib.cl_lang import setLocalTranslate, _ - -setLocalTranslate('cl_install3', sys.modules[__name__]) - - -class UserHelper(VariableInterface): - """ - Locale variables not using for flash installation - """ - xorg_need = False - stub_hash_value = "{SHA256}" - - def uncompatible(self): - """ - User setting up unavailable for flash installation - """ - if self.Get('os_install_root_type') == 'flash': - return _("User configuration unavailable for Flash install") - if self.Get('os_install_x11_server_set') == 'off' and self.xorg_need: - return _("Autologin is available for Xorg sessions only") - return "" - - -class GrubHelper(VariableInterface): - grub_passwd_file = "/etc/grub.d/07_passwd" - - def read_hash_from_passwd(self): - """ - Получить пароль из конфигурационного файла grub - """ - data = readFile(self.grub_passwd_file) - reRootPwd = re.compile("password_pbkdf2 root (\S+)") - pwd = reRootPwd.search(data) - if pwd: - return pwd.group(1) - return "" - - -class VariableOsInstallLibPath(ReadonlyVariable): - """ - Использовать lib или lib64 - """ - def get(self): - if self.Get('os_install_arch_machine') == 'x86_64': - return "lib64" - else: - return "lib" - - -class VariableOsInstallPython(libenv.VariableOsPython): - """ - Текущий python - """ - def get_root(self): - return self.Get('cl_chroot_path') - - def get_usrlib(self): - return "/usr/%s" % self.Get('os_install_lib_path') - - -class VariableOsInstallScratch(ReadonlyVariable): - """ - Install system in scratch mode - """ - type = "bool" - opt = ['--build'] - - def get(self): - # for installation default - normal system - if self.Get('cl_action') == 'system': - return "off" - else: - return self.Get('os_scratch') - - -class VariableOsFormatType(ReadonlyVariable): - """ - Filesystem format support by calcualte-install - """ - type = "list" - - def get(self): - """Filesystem format support by calcualte-install""" - return FileSystemManager.supportFS.keys() - - -class VariableOsFormatUse(ReadonlyVariable): - """ - Avialable format by mkfs utility - """ - type = "list" - # (on or off) autoupdate config from install program - cl_autoupdate_set = { - 'type': "bool", - 'value': "off"} - - def checkFunc(self, fs): - if "format" in FileSystemManager.supportFS[fs] and \ - path.exists(FileSystemManager.supportFS[fs]["format"]): - return "yes" - return "no" - - def get(self): - return map(self.checkFunc, self.Get('os_format_type')) - - -class VariableClMigrateRootPwdPlain(GrubHelper, UserHelper, Variable): - """ - Root password - """ - type = "password" - opt = ["--root-password"] - metavalue = 'PASSWORD' - untrusted = True - check_after = ["cl_grub"] - - def get(self): - shadow_pwd = self.Get('cl_migrate_root_shadow_pwd') - if shadow_pwd: - return self.stub_hash_value - return "" - - def init(self): - self.help = _("specify the root password") - self.label = _("Root password") - - def check(self, value): - if not value and not self.Get('cl_migrate_root_shadow_pwd'): - raise PasswordError(_("Enter password for user %s") % "root") - # если plain пароля нет (есть только хэш), но требуется установить - # пароль на grub (cl_grub_passwd_set), при этом нет 07_passwd - if (value == self.stub_hash_value and - not self.read_hash_from_passwd() and - self.GetBool('cl_grub_passwd_set')): - raise PasswordError(_("Please enter a root password for Grub")) - - -class VariableClMigrateRootPwd(ReadonlyVariable): - """ - Хэш root пароля - """ - def get(self): - value = self.Get('cl_migrate_root_pwd_plain') - - if value and value != UserHelper.stub_hash_value: - return get_shadow_hash().hash(value) - - return self.Get('cl_migrate_root_shadow_pwd') - - -class VariableClMigrateRootShadowPwd(ReadonlyVariable): - """ - Хэш root пароля из файла /etc/shadow. Если пароль root, то - содержит пустую строку - """ - def get(self): - rootPasswd = map(lambda x: x[1], - filter("root".__eq__, - map(lambda x: x.split(':')[0:2], - readLinesFile('/etc/shadow')))) - if rootPasswd: - rootPasswd = rootPasswd[0] - else: - rootPasswd = "" - # if root password is "root" - enc = get_shadow_hash() - if rootPasswd: - if enc.identify(rootPasswd): - if enc.verify("root", rootPasswd): - rootPasswd = "" - else: - rootPasswd = "" - return rootPasswd or "" - - -class VariableClGrubPasswdSet(GrubHelper, Variable): - """ - Использовать при установке системы пароль root как пароль для grub - """ - type = 'bool' - opt = ["--grub-passwd"] - - def init(self): - self.help = _("use the root password to edit Grub") - self.label = _("Use the root password to edit Grub") - - def get(self): - if (self.read_hash_from_passwd() or - self.Get('os_root_type_ext') in RootType.Live): - return Variable.On - else: - return Variable.Off - - -class VariableClGrubPwd(GrubHelper, Variable): - """ - Хэш пароля на grub - """ - opt = ["--passwd"] - type = "password" - - def init(self): - self.help = _("set grub password") - self.label = _("Grub password") - - def get(self): - system_action = self.Get('cl_action') == "system" - if self.GetBool('cl_grub_remove_pwd_set'): - return "" - use_grub_passwd = self.GetBool('cl_grub_passwd_set') - passwd_hash = self.read_hash_from_passwd() - if passwd_hash: - if not system_action or use_grub_passwd: - return passwd_hash - if use_grub_passwd: - value = self.Get('cl_migrate_root_pwd_plain') - if value and value != UserHelper.stub_hash_value: - enc = get_grub_hash() - return enc.hash(value) - return "" - - def set(self, value): - """ - Поддержка использвания как шифрованного хэша так и plain - """ - enc = get_grub_hash() - # используется hash - if enc.identify(value): - return value - # отключение - if value in ("", "none"): - return "" - # используется plain - return enc.hash(value) - - def check(self, value): - if value and self.GetBool('cl_grub_remove_pwd_set'): - raise VariableError( - _("You cannot set a password and remove the " - "existing password at a time")) - - -class VariableClGrubRemovePwdSet(Variable): - """ - Удалить пароль из grub - """ - type = "bool" - guitype = "hidden" - value = Variable.Off - - opt = ["--remove-passwd"] - - def init(self): - self.help = _("remove the password protection for editing the Grub menu") - self.label = _("Remove the password protection on Grub") - - -class VariableClInstallHomeCryptSet(UserHelper, Variable): - type = 'bool' - opt = ["--crypt-home", "-C"] - untrusted = True - - def init(self): - self.help = _("encrypt user profiles") - self.label = _("Encrypt user profiles") - - def get(self): - return ("off" if self.Get('cl_autologin') - else self.Get('cl_home_crypt_set')) - - def check(self, value): - if value == "on" and self.Get('cl_autologin'): - raise VariableError( - _("User profile encryption is uncompatible with autologin")) - - -class VariableClMigrateData(UserHelper, TableVariable): - """ - User migrate data table - """ - type = 'table' - opt = ["--user", "-u"] - metavalue = 'USER[:ADMIN[:GROUPS]]' - source = ['cl_migrate_user', 'cl_migrate_admin', - 'cl_migrate_user_groups', - 'cl_migrate_user_pwd'] - untrusted = True - - def init(self): - self.help = _("add a user to the installed system. USER is username. " - "ADMIN is administrator rights ({alllist}, " - "{none_value} by default). " - "GROUPS is list user supplimentary groups " - "(comma delimeter). " - "Use '{none_value}' value to discard user migration").format( - alllist="none, update, all", none_value="none") - self.label = _("Migrating users") - - def set(self, value): - value = [ - [x[0], - VariableClMigrateAdmin.pre_set(x[1]), - VariableClMigrateUserGroups.pre_set(x[2]), - x[3]] - for x in value - ] - if len(value) == 1: - if len(value[0]) > 1 and value[0][0] == 'none': - return [[]] - return value - - -class VariableClMigrateDataBrief(UserHelper, TableVariable): - """ - User migrate data table for brief view - """ - source = ['cl_migrate_user', 'cl_migrate_admin', 'cl_migrate_user_groups'] - - def init(self): - self.label = _("Migrating users") - - -class VariableClMigrateUser(UserHelper, Variable): - """ - Migrate users list - """ - type = 'list' - - def init(self): - self.label = _("User") - - def get(self): - """ - Migrating users (users above 1000 uid) - """ - cached_users = getPasswdUsers( - datafile="var/lib/calculate/calculate-client/cache/create_passwd") - return [x for x in getPasswdUsers() if x != "root" and x not in cached_users] - - def check(self, value): - """ - Проверка на корректное имя пользователя - """ - if any(not x for x in value): - raise VariableError(_("Username is missing")) - - -class VariableClMigrateAdmin(UserHelper, Variable): - """ - Migrate users list - """ - type = 'choice-list' - default_value = "" - aliases = {'system_update': 'update'} - - def init(self): - self.label = _("Administrator") - - def choice(self): - return [ - ("", ""), - ("update", _("System update")), - ("all", _("Full access")), - ] - - @classmethod - def pre_set(cls, value): - return {'none':''}.get(value, value) - - def get_alias(self, value): - return self.aliases.get(value, value) - - def get(self): - """ - Migrating users (users above 1000 uid) - """ - admins = Admins(self.parent) - return [self.get_alias(admins[x]) or self.default_value - for x in self.Get('cl_migrate_user')] - - def set(self, value): - return map(lambda x: x if x else self.default_value, value) - - -class VariableOsAvailableGroups(ReadonlyVariable): - """ - Список доступных в дистрибутиве групп - """ - type = 'list' - - def get(self): - image = self.Get('cl_image') - if image: - with image: - try: - distrPath = image.getDirectory() - return getGroups(distrPath) - except DistributiveError: - pass - return getGroups() - -class VariableClMigrateUserGroups(UserHelper, Variable): - """ - Migrate users groups - """ - type = 'choice-list-list' - defaultGroupList = sorted(["users", "audio", "cdrom", "video", - "cdrw", "usb", "plugdev", "games", "lp", "lpadmin", - "scanner", "uucp"]) - default_value = "default" - - @classmethod - def pre_set(cls, value): - """ - Обработать значение до передачи его из таблицы в поле - """ - if not any(value): - return [cls.default_value] - else: - return value - - def getDefaultGroups(self): - return list(set(self.defaultGroupList) & set(self.Get('os_available_groups'))) - - def init(self): - self.label = _("Groups") - - def process_groups(self, values): - groupslist = list(set(self.defaultGroupList) - & set(self.Get('os_available_groups'))) - for value in values: - if value == self.default_value: - for i in groupslist: - yield i - else: - yield value - - def set(self, value): - value = map(lambda x: sorted(list(set(self.process_groups(x)))), - value) - return value - - def getPrimaryGroup(self, username): - pg = getUserPrimaryGroup(username) - if pg: - return [pg] - return [] - - def get(self): - """ - User groups - """ - passwdList = getPasswdUsers() - return map(lambda x: sorted(self.getPrimaryGroup(x) + - (getUserGroups(x) - if x in passwdList else - self.getDefaultGroups())), - self.Get('cl_migrate_user')) - - def choice(self): - """ - Available groups - """ - return [(self.default_value, - _("Default"))] + sorted([(x, x) for x in getGroups()]) - - -class VariableClMigrateUserPwd(UserHelper, Variable): - """ - Migrate users who need to change passwords - """ - type = 'password-list' - - def init(self): - self.label = _("Password") - - def get(self): - """ - Migrating users passwords - """ - retList = [] - fileName = "/etc/shadow" - if os.access(fileName, os.R_OK): - migrateusers = self.Get("cl_migrate_user") - if migrateusers: - lenData = 9 - with open(fileName) as f: - shadowData = filter(lambda x: len(x) == lenData, - map(lambda x: x.rstrip().split(":"), f)) - shadowData = filter(lambda x: x[0] in migrateusers, shadowData) - shadowData = map(lambda x: (x[0], x[1]), shadowData) - shadowUsers = map(lambda x: x[0], shadowData) - for userName in migrateusers: - if userName in shadowUsers: - userData = filter(lambda x: x[0] == userName, - shadowData) - hashPwd = userData[0][1] - if (sha256_crypt.identify(hashPwd) and - sha256_crypt.verify("guest", hashPwd)): - retList.append("") - else: - retList.append(hashPwd) - else: - retList.append("") - return retList - - def check(self, value): - """ - Check exists password for all migrate users - """ - for user, pwd in zip(self.Get('cl_migrate_user'), value): - if not pwd: - raise PasswordError( - _("Enter password for user %s") % user) - - def set(self, value): - """ - Encrypt passwords - """ - shadow_hash = get_shadow_hash() - - return map(lambda x: x if shadow_hash.identify(x) or not x else \ - shadow_hash.hash(x), value) - - -class VariableClAutologin(UserHelper, Variable): - """ - Autologin variable (contains user name for autologin) or - - empty string if disable - """ - type = 'choiceedit' - opt = ["--autologin", '-A'] - metavalue = "USER" - xorg_need = True - - def init(self): - self.label = _("Autologin") - self.help = _("set an autologin for the user, 'off' for disable") - - def get(self): - # autologin enable for livecd and all install type CMC - cmdDomainSet = (getValueFromCmdLine( - "calculate", CmdlineParams.DomainPassword) or - getValueFromCmdLine( - "calculate", CmdlineParams.Domain) or "") - if (not cmdDomainSet and - self.Get('os_install_root_type') == "livecd") or \ - self.Get('os_install_linux_shortname') == "CMC": - nonRootUsers = filter(lambda x: x != "root", - self.Get('cl_migrate_user')) - if nonRootUsers: - return nonRootUsers[0] - else: - return "" - return "" - - def set(self, value): - return {'none': '', - 'off': ''}.get(value, value) - - def choice(self): - yield ("off", _("No autologin")) - for user in (x for x in self.Get('cl_migrate_user') - if x != "root"): - yield (user,user) - - def check(self, value): - """ - Autologin only for migrated non-root users - """ - if value and not value in self.Get('cl_migrate_user'): - raise VariableError(_("User %s does not exist") % value) - if value == "root": - raise VariableError( - _("Autologin is unavailable for user %s") % value) - - def humanReadable(self): - return self.Get() or _("Not used") - - def uncompatible(self): - """ - Network setting up unavailable for flash installation - """ - try: - if (self.Get('cl_action') == 'merge' and - self.Get('client.cl_remote_host')): - return \ - _("The autologin is not available with domain workstations") - except DataVarsError: - pass - return UserHelper.uncompatible(self) - - -class VariableClInstallAutoupdateSet(Variable): - """ - (on or off) autoupdate config from install program for install - """ - type = "bool" - value = "off" - - -class VariableOsInstallMakeopts(Variable): - """ - Make.conf makeopts - """ - - def get(self): - cpunum = self.Get('hr_cpu_num') - if cpunum == "1": - return "-j1" - else: - return "-j%d" % (int(cpunum) + 1) - - -class VariableOsGrubConf(ReadonlyVariable): - """ - DEPRICATED content of current grub.conf - """ - - -class VariableOsInstallGrubDevicemapConf(ReadonlyVariable): - """ - DEPRICATED content of device.map file for grub - """ - os_install_grub_devicemap_conf = {} - - -class VariableClDistfilesPath(Variable): - """ - DISTFILES path - """ - value = '/var/calculate/remote/distfiles' - - -class VariableClPkgdirPath(Variable): - """ - PKGDIR path - """ - - def get(self): - return "/var/calculate/remote/packages/%s/%s" % ( - self.Get('os_install_linux_shortname'), - self.Get('os_install_arch_machine')) - - -class VariableClInstallDevFrom(Variable): - """ - Root device of previous installed os - """ - - def set(self, value): - """ - If device in calculate3.env dev_from not exists set '' - """ - if value: - value = device.udev.get_device_info( - name=value).get('DEVNAME', value) - if value in self.Get('os_disk_dev'): - return value - else: - return "" - - def get(self): - if self.Get('cl_autopartition_set') == 'on': - return self.Get('cl_autopartition_factory').dev_from - return "" - - -class VariableOsNvidiaMask(ReadonlyVariable): - """ - Get nvidia card mask versions - """ - - def get_cards_id(self): - category = "0300" - vendor = "10de:" - lsPciProg = getProgPath("/usr/sbin/lspci") - nvidiacards = filter(lambda x: " %s: " % category in x, - process(lsPciProg, "-d", vendor, "-n")) - cardsid = \ - map(lambda x: x.groups()[0], - filter(lambda x: x, - map(lambda x: re.search( - "[0-9a-fA-F]{4}:([0-9a-fA-F]{4})", x), - nvidiacards))) - if not cardsid: - return set() - return set(cardsid) - - def get_legacy(self): - image = self.Get('cl_image') - try: - if image: - image = image.convertToDirectory() - chrootPath = image.getDirectory() - else: - chrootPath = self.Get("cl_chroot_path") - nvidiaeclass = path.join(chrootPath, - 'usr/portage/eclass/nvidia-driver.eclass') - if not os.access(nvidiaeclass, os.R_OK): - return "" - if not nvidiacards: - return "" - cardsid = self.get_cards_id() - if not cardsid: - return "" - eclassdata = readFile(nvidiaeclass) - reBlock = re.compile( - r"if has \$\{nvidia_gpu\}\s+\\([^;]+);\s*then(.*?)fi", re.S) - reMask = re.compile('>=x11-drivers/nvidia-drivers[^"]+') - masks = [] - for block in reBlock.findall(eclassdata): - nvidia_ids, mask_data = block - nvidia_ids = nvidia_ids.strip().replace('\\','') - nvidia_ids = {x for x in nvidia_ids.split() if x} - m = reMask.search(mask_data) - if m: - mask_str = m.group() - if cardsid & nvidia_ids: - return mask_str - finally: - if image: - image.close() - return "" - - def get_new(self, ini): - cardsid = self.get_cards_id() - for nvidia_serie in ini.getKeys('nvidia'): - nvidia_ids = set(ini.getVar('nvidia', nvidia_serie).split(',')) - if cardsid & nvidia_ids: - if nvidia_serie.isdigit(): - return ">=x11-drivers/nvidia-drivers-{}".format(int(nvidia_serie)+1) - return "" - - def get(self): - ini = SystemIni(self.parent) - if ini.getKeys('nvidia'): - return self.get_new(ini) - else: - return self.get_legacy() - - -class VariableOsInstallLvmSet(ReadonlyVariable): - """ - Using lvm - """ - type = "bool" - - def get(self): - for typeDisk in self.Get('os_install_disk_type'): - if "lvm" in typeDisk.lower(): - return "on" - else: - return "off" - - -class VariableOsInstallMdadmSet(ReadonlyVariable): - """ - Using mdadm - """ - type = "bool" - - def get(self): - for typeDisk in self.Get('os_install_disk_type'): - if "raid" in typeDisk.lower(): - return "on" - else: - return "off" - - -class VariableClChrootGrub(ReadonlyVariable): - """ - Chroot for grub-mkconfig - """ - - def get(self): - if self.Get('os_install_scratch') == "on": - if self.Get('cl_action') == 'system': - return self.Get('cl_target').mdirectory - else: - return path.join(self.Get('cl_chroot_path'), "mnt/scratch") - else: - return self.Get('cl_chroot_path') - - -class VariableOsGrub2Path(Variable): - """ - Get Grub2 Install cmd (grub-install or grub2-install) - """ - - def get(self): - # find grub2-install - chroot_path = self.Get('cl_chroot_path') - chroot_cmd = getProgPath('/usr/bin/chroot') - grubInstall = getProgPath('/usr/sbin/grub2-install', prefix=chroot_path) - if grubInstall: - return grubInstall - # find grub-install and check, that this is grub2-install (ver 1.99) - grubInstall = getProgPath('/usr/sbin/grub-install', prefix=chroot_path) - if grubInstall and filter(lambda x: "1.99" in x or "2." in x, - process(chroot_cmd, chroot_path, - grubInstall, '--version')): - return grubInstall - return "" - - -class VariableClSetup(Variable): - """ - Type of setup - """ - type = "choice" - value = "" - - def choice(self): - return ["audio", "network", "locale", "video", "boot", "users", - "session", "themes", ""] - - def humanReadable(self): - mapType = {'network': _("network settings"), - 'locale': _("localization and time options"), - 'video': _("video settings"), - 'boot': _("boot parameters"), - 'audio': _("audio parameters"), - 'themes': _("update themes"), - 'session': _("session settings"), - 'users': _("user settings")} - return mapType.get(self.Get(), "") - - def check(self, value): - if value == "boot" and self.Get('os_install_root_type') == 'livecd': - raise VariableError( - _("Boot configuration is not available on a LiveCD")) - - -class VariableClLive(Variable): - """ - Apply live templates - """ - value = "off" - type = "bool" - opt = ['--live'] - - def init(self): - self.label = _("Configure dynamic options only") - self.help = _("configure dynamic options only") - - -class VariableOsInstallPxe(Variable): - """ - Installation for PXE loading - """ - type = "boot" - value = "off" - untrusted = True - - def check(self, value): - if value == "on": - if self.Get('os_linux_system') != "server": - raise VariableError( - _("PXE install is available for Calculate " - "Directory Server only") + '.') - for pkg in ['net-misc/dhcp', 'net-ftp/tftp-hpa', - 'net-fs/nfs-utils']: - if not isPkgInstalled(pkg): - raise VariableError( - _("For PXE install, you need to install package %s") - % pkg) - for env_fn in ('/etc/calculate/calculate.env', - '/var/lib/calculate/calculate.env'): - try: - config = cl_ini_parser.iniParser(env_fn) - val = config.getVar('server', 'sr_dhcp_set') - if val.encode('utf-8') == "on": - return - except Exception: - pass - raise VariableError( - _("PXE install is only available if the DHCP " - "service has been configured first")) - - -class VariableOsInstallPxePath(Variable): - """ - Path to PXE installation - """ - value = "/var/calculate/pxe" - - opt = ['--pxe-path'] - - def init(self): - self.label = _("Installation path") - self.help = _("path for PXE install") - - -class VariableOsInstallUefiSet(Variable): - """ - Install in UEFI - """ - def get(self): - if self.Get('os_install_uefi'): - return "on" - else: - return "off" - -class VariableOsInstallGrubTerminal(Variable): - """ - Gfxmode - """ - type = "choice" - opt = ['--grub-terminal'] - metavalue = "TERMINAL" - - def init(self): - self.label = _("Grub terminal") - self.help = _("grub terminal") - - def get(self): - cmdLine = '/proc/cmdline' - if 'grub_nogfxmode' in readFile(cmdLine): - return 'console' - grubDefault = path.join(self.Get('cl_chroot_path'), - 'etc/default/grub') - if getValueFromConfig(grubDefault, 'GRUB_TERMINAL') == 'console': - return 'console' - grubCfg = '/boot/grub/grub.cfg' - if re.search('^terminal_output\s*console', readFile(grubCfg), re.M): - return 'console' - - return 'gfxterm' - - def choice(self): - return ['gfxterm', 'console'] - - def uncompatible(self): - """ - Grub setting up unavailable for flash installation - """ - if self.Get('os_install_root_type') == 'flash': - return _("Grub configuration unavailable for Flash install") - return "" - - -class PackageCheckVariable(ReadonlyVariable): - """ - Конструктор для переменных проверки установлен ли пакет - """ - image = False - package = None - type = "bool" - image_variable = "cl_image" - prefix_variable = "cl_chroot_path" - - def get(self): - try: - if self.image: - image = self.Get(self.image_variable) - if image: - with image as distr: - distrPath = image.getDirectory() - if isPkgInstalled(self.package, prefix=distrPath): - return "on" - else: - prefix = self.Get(self.prefix_variable) - if isPkgInstalled(self.package, prefix=prefix): - return "on" - except Exception: - pass - return "off" - - -class VariableOsInstallAlsaSet(PackageCheckVariable): - """ - Установлен ли media-sound/alsa-utils - """ - image = True - package = "media-sound/alsa-utils" - - -class VariableOsInstallX11ServerSet(PackageCheckVariable): - """ - Установлен ли x11-base/xorg-server - """ - image = True - package = "x11-base/xorg-server" - - -class VariableOsInstallSplashSet(Variable): - """ - Переменная отображать splash при загрузки - """ - type = "bool" - value = "on" - -class VariableClInstallFs(Variable): - """ - Preferred fs - """ - type = "list" - - value = ["btrfs", "ext4", "reiserfs", "ext3"] - -class FlashUncompatible(VariableInterface): - def uncompatible(self): - """ - Update setting up unavailable for flash installation - """ - if self.Get('os_install_root_type') == 'flash': - return \ - _("Update configuration unavailable for Flash install") - return "" - - -class VariableClInstallUpdatePkgSet(Variable): - """ - При установке системы создать сценарии запуска cl-update в конце - первой загрузки - """ - type = "bool" - opt = ["--update-pkg","-K"] - - def system_has_ip(self): - return bool(self.Get('os_net_ip')) - - def custom_set_has_packages(self): - install_short = self.Get('os_install_linux_shortname') - now_short = self.Get('os_linux_shortname') - setfile = "/etc/portage/sets/custom.{}".format(now_short.lower()) - if readFileEx(setfile, grab=True) and install_short == now_short: - return True - return False - - def get(self): - if self.system_has_ip() and self.custom_set_has_packages(): - return "on" - return "off" - - def init(self): - self.help = _("update packages at first boot") - self.label = _("Update packages at first boot") - - -try: - import calculate.update.variables.update as update - - class VariableClInstallAutocheckSet(FlashUncompatible, - update.VariableClUpdateAutocheckSet): - def get(self): - return self.Get('update.cl_update_autocheck_set') - - class VariableClInstallAutocheckInterval(FlashUncompatible, - update.VariableClUpdateAutocheckInterval): - def get(self): - return self.Get('update.cl_update_autocheck_interval') - - class VariableClInstallCleanpkgSet(FlashUncompatible, - update.VariableClUpdateCleanpkgSet): - def get(self): - return self.Get('update.cl_update_cleanpkg_set') - - class VariableClInstallOtherSet(FlashUncompatible, - update.VariableClUpdateOtherSet): - def get(self): - return self.Get('update.cl_update_other_set') - -except ImportError: - update = None - - class VariableClInstallAutocheckSet(FlashUncompatible, Variable): - value = "off" - - class VariableClInstallAutocheckInterval(FlashUncompatible, Variable): - value = "" - - class VariableClInstallCleanpkgSet(FlashUncompatible, Variable): - value = "off" - - class VariableClInstallOtherSet(FlashUncompatible, Variable): - value = "off" diff --git a/libs_crutch/install/wsdl_install.py b/libs_crutch/install/wsdl_install.py deleted file mode 100644 index 55e4ff7..0000000 --- a/libs_crutch/install/wsdl_install.py +++ /dev/null @@ -1,426 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2010-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import sys - -from calculate.lib.datavars import VariableError, DataVarsError -from calculate.install.distr import DistributiveError - -from . import install -from calculate.lib.cl_lang import setLocalTranslate, getLazyLocalTranslate, _ - -setLocalTranslate('cl_install3', sys.modules[__name__]) -__ = getLazyLocalTranslate(_) - -from calculate.core.server.func import WsdlBase -from calculate.lib.utils.partition import VolumesError -from calculate.install.utils.cl_install import ClInstallAction -from calculate.install.utils.cl_setup import ( - ClSetupLocaleAction, ClSetupVideoAction, ClSetupSystemAction, - ClSetupBootAction, ClSetupNetworkAction, ClSetupAudioAction, - ClSetupSessionAction, ClSetupThemesAction) - - -class Wsdl(WsdlBase): - methods = [{ - # идентификатор метода - 'method_name': "install", - # категория метода - 'category': __("Installation"), - # заголовок метода - 'title': __("System Install"), - # иконка для графической консоли - 'image': ('calculate-install,system-installer,applications-other,' - 'drive-harddisk'), - # метод присутствует в графической консоли - 'gui': True, - # консольная команда - 'command': 'cl-install', - # права для запуска метода - 'rights': ['install'], - # объект содержащий модули для действия - 'logic': {'Install': install.Install}, - # описание действия - 'action': ClInstallAction, - # объект переменных - 'datavars': "install", - 'native_error': (VariableError, DistributiveError, VolumesError, - DataVarsError, install.InstallError), - # значения по умолчанию для переменных этого метода - 'setvars': {'cl_action!': 'system', - 'cl_chroot_status!': 'off', 'cl_install_type': 'hdd', - 'cl_dispatch_conf': 'usenew'}, - # описание груп (список лямбда функций) - 'groups': [ - lambda group: group(_("Language and locale"), - image="welcome", - normal=('os_install_locale_lang', - 'os_install_clock_timezone'), - expert=('os_install_locale_keyboard_layout', - 'os_install_clock_type',)), - lambda group: group(_("Distribution"), - normal=('cl_image_filename',), - hide=('cl_image_linux_shortname', - 'cl_image_arch_machine', - 'cl_image_new_only'), - expert=('cl_image_linux_shortname', - 'cl_image_arch_machine', - 'cl_image_new_only')), - lambda group: group(_("Installation type"), - normal=('cl_autopartition_set',), - hide=('cl_autopartition_set', - 'cl_autopartition_root_size', - 'cl_autopartition_root_format', - 'cl_autopartition_calculate_format', - 'cl_autopartition_swap_size',), - brief=('cl_autopartition_brief_set',), - expert=('cl_autopartition_scheme', - 'cl_autopartition_root_format', - 'cl_autopartition_calculate_format', - 'cl_autopartition_table', - 'cl_autopartition_root_size', - 'cl_autopartition_swap_size', - 'cl_autopartition_device'), - expert_label=_( - "Click to select partitions to be created") - ), - lambda group: group(_("Layout"), - normal=('os_location_data',), - hide=('os_location_data', 'os_install_mbr', - 'os_install_uefi'), - brief_force=('os_location_brief_data', - 'os_install_bootloader'), - brief=('os_install_uefi',), - expert=('cl_uuid_set', - 'os_install_mbr', - 'os_install_uefi')), - lambda group: group(_("Network settings"), - normal=( - 'cl_network_migrate_set',), - expert=('os_install_net_conf', - 'os_install_net_data', - 'os_install_net_fqdn', 'os_install_ntp', - 'os_install_net_dns', - 'os_install_net_dns_search', - 'os_install_net_route_data'), - expert_label=_( - "Click to select network settings") - ), - lambda group: group(_("Users"), - normal=( - 'cl_migrate_root_pwd_plain', - 'cl_grub_passwd_set', - 'cl_migrate_data', - 'cl_autologin'), - expert=('cl_install_home_crypt_set',), - hide=('cl_migrate_data',), - brief=('cl_migrate_data_brief',)), - lambda group: group(_("Audio"), - normal=('os_audio',), - expert=('os_audio_default',)), - lambda group: group(_("Video"), - normal=('os_install_x11_video_drv', - 'os_install_x11_composite', - 'os_install_x11_resolution_preferred', - 'os_install_grub_terminal'), - expert=('os_install_fb_resolution_preferred',)), - lambda group: group(_("Update"), - normal=('cl_install_autocheck_set', - 'cl_install_autocheck_interval', - 'cl_install_cleanpkg_set', - 'cl_install_other_set'), - expert=('cl_install_update_pkg_set',))], - # действие выводит информацию перед запуском - 'brief': {'next': __("Run"), - 'image': 'finish', - 'name': __("Start installing")}}, - # установка на Flash - { - 'method_name': "install_flash", - 'category': __("Installation"), - 'title': __("Flash Install"), - 'image': ('calculate-install-flash,' - 'drive-removable-media-usb-pendrive,' - 'drive-removable-media-usb,media-flash'), - 'command': 'cl-install-flash', - 'gui': True, - 'rights': ['install'], - 'logic': {'Install': install.Install}, - 'action': ClInstallAction, - 'datavars': "install", - 'native_error': (VariableError, DistributiveError, VolumesError, - DataVarsError, install.InstallError), - 'setvars': {'cl_action!': 'system', - 'cl_chroot_status!': 'off', - 'cl_install_type': 'flash', - 'cl_protect_use_set!': 'off', - 'cl_autopartition_set!': 'off', - 'cl_dispatch_conf': 'usenew'}, - 'groups': [ - lambda group: group(_("Flash install"), - normal=('cl_image_filename', - 'os_install_disk_single', - 'os_install_format_single_set'), - next_label=_("Run"))], - 'brief': {'next': __("Run"), - 'name': __("Start installing")}}, - # PXE установка - { - 'method_name': "install_pxe", - 'category': __("Installation"), - 'title': __("PXE Install"), - 'image': ('calculate-install-pxe,gnome-network-properties,' - 'network-server,' - 'preferences-desktop-remote-desktop'), - 'command': 'cl-install-pxe', - 'gui': True, - 'rights': ['installpxe'], - 'logic': {'Install': install.Install}, - 'action': ClInstallAction, - 'datavars': "install", - 'native_error': (VariableError, DistributiveError, - DataVarsError, install.InstallError), - 'setvars': {'cl_action!': 'system', 'os_install_pxe': 'on', - 'cl_protect_use_set!': 'off', - 'cl_dispatch_conf': 'usenew'}, - # действие выводит информацию перед запуском - 'brief': {'next': __("Installation"), - 'image': 'finish', - 'name': __("Start installing")}, - 'groups': [ - lambda group: group(_("PXE install"), - normal=('cl_image_filename',), - expert=('os_install_pxe_path', - 'os_install_pxe_ip'), - next_label=_("Run"))]}, - # настройка загрузки системы - { - 'method_name': "setup_boot", - 'category': __("Configuration"), - 'title': __("Boot"), - 'image': 'calculate-setup-boot,stock_save,drive-harddisk', - 'command': 'cl-setup-boot', - 'gui': True, - 'rights': ['setupboot'], - 'logic': {'Install': install.Install}, - 'action': ClSetupBootAction, - 'datavars': "install", - 'native_error': ( - VariableError, DataVarsError, install.InstallError), - 'setvars': {'cl_action!': 'merge', 'cl_merge_pkg!': [None], - 'cl_merge_set!': "on", 'cl_setup': 'boot'}, - 'groups': [ - lambda group: group(_("Boot"), - normal=( - 'os_install_mbr', - 'os_install_uefi', - 'os_install_grub_terminal', - 'cl_grub_pwd', - 'cl_grub_remove_pwd_set', - ), - expert=( - 'cl_templates_locate', - 'cl_dispatch_conf', - 'cl_verbose_set'), - next_label=_("Save"))]}, - { - # настройка сети - 'method_name': "setup_network", - 'category': __("Configuration"), - 'title': __("Network"), - 'image': 'calculate-setup-network,network-workgroup,' - 'network-idle,preferences-system-network', - 'command': 'cl-setup-network', - 'gui': True, - 'rights': ['setupnetwork'], - 'logic': {'Install': install.Install}, - 'action': ClSetupNetworkAction, - 'datavars': "install", - 'native_error': ( - VariableError, DataVarsError, install.InstallError), - 'setvars': {'cl_action!': 'merge', 'cl_merge_pkg!': [None], - 'cl_network_migrate_set': 'off', - 'cl_merge_set!': "on", 'cl_setup': 'network'}, - 'groups': [ - lambda group: group(_("Network"), - normal=('os_install_net_conf', - 'os_install_net_data', - 'os_install_net_fqdn', - 'os_install_ntp'), - expert=('os_install_net_dns', - 'os_install_net_dns_search', - 'os_install_net_route_data', - 'cl_templates_locate', - 'cl_dispatch_conf', - 'cl_verbose_set'), - next_label=_("Save"))]}, - { - # перенастройка системы - 'method_name': "setup_system", - 'category': __("Configuration"), - 'title': __("System"), - 'image': 'calculate-setup-system,run-build,applications-ide,' - 'system-run,system,computer', - 'command': 'cl-setup-system', - 'gui': True, - 'rights': ['setupsystem'], - 'logic': {'Install': install.Install}, - 'action': ClSetupSystemAction, - 'datavars': "install", - 'native_error': ( - VariableError, DataVarsError, install.InstallError), - 'setvars': {'cl_action!': 'merge', 'cl_live': 'off'}, - 'groups': [ - lambda group: group(_("Update system settings"), - normal=('cl_live', - 'cl_network_configure_set'), - expert=( - 'cl_templates_locate', - 'cl_dispatch_conf', - 'cl_verbose_set'), - next_label=_("Save"))]}, - { - # настройка видео - 'method_name': "setup_video", - 'category': __("Configuration"), - 'title': __("Video"), - 'image': 'calculate-setup-video,system-config-display,' - 'video-display,gnome-multimedia', - 'command': 'cl-setup-video', - 'gui': True, - 'rights': ['setupvideo'], - 'logic': {'Install': install.Install}, - 'action': ClSetupVideoAction, - 'datavars': "install", - 'native_error': ( - VariableError, DataVarsError, install.InstallError), - 'setvars': {'cl_action!': 'merge', 'cl_merge_pkg!': [None], - 'cl_merge_set!': "on", 'cl_setup': 'video'}, - 'groups': [ - lambda group: group( - _("Video"), - normal=('os_install_x11_video_drv', - 'os_install_x11_resolution_preferred', - 'os_install_x11_composite'), - expert=( - 'os_install_fb_resolution_preferred', - 'cl_templates_locate', - 'cl_dispatch_conf', - 'cl_verbose_set'), - next_label=_("Save"))]}, - { - # настройка звука - 'method_name': "setup_audio", - 'category': __("Configuration"), - 'title': __("Audio"), - 'image': 'calculate-setup-audio,audio-card', - 'command': 'cl-setup-audio', - 'gui': True, - 'rights': ['setupaudio'], - 'logic': {'Install': install.Install}, - 'action': ClSetupAudioAction, - 'datavars': "install", - 'native_error': ( - VariableError, DataVarsError, install.InstallError), - 'setvars': {'cl_action!': 'merge', 'cl_merge_pkg!': [None], - 'cl_merge_set!': "on", 'cl_setup': 'audio'}, - 'groups': [ - lambda group: group(_("Audio"), - normal=('os_audio',), - expert=( - 'os_audio_default', - 'cl_templates_locate', - 'cl_dispatch_conf', - 'cl_verbose_set'), - next_label=_("Save"))]}, - { - # настройка локали - 'method_name': "setup_locale", - 'category': __("Configuration"), - 'title': __("Locale"), - 'image': 'calculate-setup-locale,locale,preferences-desktop-locale', - 'command': 'cl-setup-locale', - 'gui': True, - 'rights': ['setuplocale'], - 'logic': {'Install': install.Install}, - 'action': ClSetupLocaleAction, - 'datavars': "install", - 'native_error': ( - VariableError, DataVarsError, install.InstallError), - 'setvars': {'cl_action!': 'merge', 'cl_merge_pkg!': [None], - 'cl_merge_set!': "on", 'cl_setup': 'locale'}, - 'groups': [ - lambda group: group(_("Locale"), - normal=('os_install_locale_lang', - 'os_install_clock_timezone'), - expert=( - 'os_install_locale_keyboard_layout', - 'os_install_clock_type', - 'cl_templates_locate', - 'cl_dispatch_conf', - 'cl_verbose_set'), - next_label=_("Save"))]}, - { - # настройка локали - 'method_name': "setup_session", - 'category': __("Configuration"), - 'title': __("Session"), - 'image': 'calculate-setup-session,system-lock-screen', - 'command': 'cl-setup-session', - 'gui': True, - 'rights': ['setupsession'], - 'logic': {'Install': install.Install}, - 'action': ClSetupSessionAction, - 'datavars': "install", - 'native_error': ( - VariableError, DataVarsError, install.InstallError), - 'setvars': {'cl_action!': 'merge', 'cl_merge_pkg!': [None], - 'cl_merge_set!': "on", 'cl_setup': 'session'}, - 'groups': [ - lambda group: group(_("Session"), - normal=('cl_autologin', - 'cl_install_home_crypt_set'), - expert=( - 'cl_templates_locate', - 'cl_dispatch_conf', - 'cl_verbose_set'), - next_label=_("Save"))]}, - { - # настройка тем - 'method_name': "setup_theme", - 'category': None, - 'title': __("Themes"), - 'image': None, - 'command': 'cl-setup-themes', - 'gui': True, - 'rights': ['setupthemes'], - 'logic': {'Install': install.Install}, - 'action': ClSetupThemesAction, - 'datavars': "install", - 'native_error': ( - VariableError, DataVarsError, install.InstallError), - 'setvars': {'cl_action!': 'merge', 'cl_merge_pkg!': [None], - 'cl_merge_set!': "on", 'cl_setup': 'themes'}, - 'groups': [ - lambda group: group(_("Session"), - normal=(), - expert=( - 'cl_templates_locate', - 'cl_dispatch_conf', - 'cl_verbose_set'), - next_label=_("Save"))]} - ] diff --git a/libs_crutch/lib/__init__.py b/libs_crutch/lib/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/libs_crutch/lib/cl_ini_parser.py b/libs_crutch/lib/cl_ini_parser.py deleted file mode 100644 index 303a2f5..0000000 --- a/libs_crutch/lib/cl_ini_parser.py +++ /dev/null @@ -1,418 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import absolute_import -from contextlib import contextmanager - -import sys -import os -import errno -from calculate.lib.cl_xml import xpath -import time -import fcntl - -from .utils.common import _error -from .cl_template import FormatFactory, TemplatesError - -from calculate.lib.cl_lang import setLocalTranslate - -_ = lambda x: x -setLocalTranslate('cl_lib3', sys.modules[__name__]) - - -class iniParser(_error): - """Класс для работы с ini файлами - - """ - - def __init__(self, iniFile=None, text=None): - # название ini файла - self.iniFile = iniFile or "" - # права создаваемого ini-файла - self.mode = 0o640 - # Cоответствует ли формат файла нужному - self.checkIni = None - self.FD = None - self.readOnly = False - self.text = text - self.locked = False - self.formatFactory = FormatFactory(self) - - def joinText(self, iniObj, xmlNewDoc): - """Объединяет два документа""" - newRootNode = xmlNewDoc.documentElement - newBodyNode = xpath.Evaluate('child::body', newRootNode)[0] - newImportBodyNode = iniObj.doc.importNode(newBodyNode, True) - iniObj.docObj.joinBody(iniObj.docObj.body, newImportBodyNode) - # iniObj.docObj.insertBRtoBody(iniObj.docObj.body) - - def setMode(self, mode): - """установка прав создаваемого ini-файла""" - self.mode = mode - - def lockfile(self, fd, fn, timeout=5, readonly=False): - """ - Блокировка файла с таймаутом - """ - if self.locked: - return True - for i in range(0, timeout): - try: - fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) - return True - except IOError as e: - # dirty hack for lock ini.env on cifs - if e.errno == errno.EBADF and readonly: - return False - if e.errno != errno.EAGAIN: - raise e - time.sleep(1) - else: - raise TemplatesError(_("Lock timeout of %s") % fn) - - @property - def iniFile_lock(self): - return self.iniFile + ".lock~" - - def wait_global_lock(self, timeout=10): - lockfn = self.iniFile_lock - for i in range(0, timeout): - if self.locked or not os.path.exists(lockfn): - break - time.sleep(1) - - @contextmanager - def lock(self): - lockfn = self.iniFile_lock - lockf = open(lockfn, "w+") - self.lockfile(lockf.fileno(), lockfn) - self.locked = True - try: - yield - finally: - self.locked = False - os.unlink(lockfn) - - def _open(self, mode): - self.wait_global_lock() - return open(self.iniFile, mode) - - def openIniFile(self): - if not self.text is None: - return self.text - if not os.access(self.iniFile, os.R_OK): - return "" - self.FD = self._open("r") - self.lockfile(self.FD.fileno(), self.iniFile, readonly=True) - textIni = self.FD.read() - return textIni - - def openRWIniFile(self): - if not os.access(self.iniFile, os.R_OK): - return "" - try: - self.FD = self._open("r+") - except (IOError, OSError): - self.FD = self._open("r") - self.readOnly = True - self.lockfile(self.FD.fileno(), self.iniFile) - textIni = self.FD.read() - return textIni - - def writeIniFile(self, txtConfig): - if self.readOnly: - self.setError(_("Failed to write to file") - + _(": ") + self.iniFile) - return False - if not os.path.exists(self.iniFile): - try: - # Создание файла - self.FD = self._open("w+") - self.lockfile(self.FD.fileno(), self.iniFile) - os.chmod(self.iniFile, self.mode) - except Exception: - self.setError(_("Failed to create the file") + - _(": ") + self.iniFile) - return False - if not self.FD: - self.setError(_("Failed to write to file") - + _(": ") + self.iniFile) - return False - self.FD.truncate(0) - self.FD.seek(0) - self.FD.write(txtConfig) - self.FD.close() - self.FD = None - return True - - def setVar(self, strHeader, dictVar): - """Заменяет или добавляет область и переменные - - Добавляет область в ini-файл или объединяет с существующей - strHeader - имя области - dictVar - словарь переменных - """ - textIni = self.openRWIniFile() - nameFomat = self.checkIniFile(textIni) - if not nameFomat: - return False - if type(strHeader) in (tuple, list): - # формат plasma - classObj = self.formatFactory.getClassObj("plasma") - else: - if nameFomat == "plasma": - self.setError(_("Trying to write a variable of 'samba' " - "format to file %s ('plasma' format)") \ - % self.iniFile) - return False - # формат samba - classObj = self.formatFactory.getClassObj("samba") - # создаем объект - # и записываем в него содержимое ini-файла - objIni = classObj(textIni, self) - # создаем текст из строки заголовка и - # словаря переменных области - txtConfig = objIni.createTxtConfig(strHeader, dictVar) - # создаем объект и записываем в него текст - objIniAdd = classObj(txtConfig, self) - # объединяем объекты для получения результирующего текста - objIni.join(objIniAdd) - # получаем текст - txtConfig = objIni.getConfig().encode("UTF-8") - # записываем его в ini файл - if not self.writeIniFile(txtConfig): - return False - return True - - def isEmptyFile(self, textIni): - """Если файл пустой или содержит только комментарии - False - - иначе - True - """ - if textIni.strip(): - if filter(lambda x: x.strip(), - map(lambda x: x[0].split(";")[0], - map(lambda x: x.split("#"), - textIni.splitlines()))): - return False - else: - return True - else: - return True - - def checkIniFile(self, textIni): - """Проверка на правильность формата файла""" - if self.checkIni is None: - # Ошибка - if textIni is False: - self.checkIni = False - return False - self.checkIni = "samba" - # В файле есть данные - if not self.isEmptyFile(textIni): - try: - objIni = self.formatFactory.getClassObj( - "plasma")(textIni, self) - except Exception: - self.setError(_("Incorrect file format") + _(": ") + \ - self.iniFile) - self.checkIni = False - return self.checkIni - allAreas = objIni.docObj.getAllAreas() - for xmlArea in allAreas: - parentNode = xmlArea.parentNode - if parentNode and parentNode.tagName == "area": - self.checkIni = "plasma" - break - if self.checkIni == "samba": - objIni = self.formatFactory.getClassObj( - "samba")(textIni, self) - xmlBody = objIni.docObj.getNodeBody() - if not xmlBody.firstChild: - self.checkIni = False - return self.checkIni - - def delVar(self, strHeader, nameVar): - """Удаляем переменную из ini файла""" - delNameVar = "!%s" % nameVar - dictVar = {delNameVar: "del"} - res = self.setVar(strHeader, dictVar) - return res - - def delArea(self, strHeader): - """Удаляем область из ini файла""" - if type(strHeader) in (tuple, list): - # Формат plasma - delStrHeader = list(strHeader[:]) - delStrHeader[-1] = "!%s" % delStrHeader[-1] - else: - # Формат samba - delStrHeader = "!%s" % strHeader - dictVar = {"del": "del"} - res = self.setVar(delStrHeader, dictVar) - return res - - def getVar(self, strHeader, nameVar, checkExistVar=False): - """Получаем значение переменной из ini-файла""" - textIni = self.openIniFile() - if self.FD: - self.FD.close() - self.FD = None - nameFomat = self.checkIniFile(textIni) - if not nameFomat: - return False - formatPlasma = False - if type(strHeader) in (tuple, list): - # формат plasma - classObj = self.formatFactory.getClassObj("plasma") - formatPlasma = True - else: - if nameFomat == "plasma": - self.setError(_("Trying to fetch a variable of 'samba' " - "format from file %s ('plasma' format)") \ - % self.iniFile) - return False - # формат samba - classObj = self.formatFactory.getClassObj("samba") - # создаем объект и записываем в него содержимое ini-файла - objIni = classObj(textIni, self) - # получаем ноду body - xmlBody = objIni.docObj.getNodeBody() - flagFound, xmlBody = self.getLastNode(objIni, xmlBody, strHeader, - formatPlasma) - if flagFound and xmlBody: - if formatPlasma: - strHeader = strHeader[-1] - # находим в области переменную - res = objIni.docObj.getAreaFieldValues(strHeader, nameVar, xmlBody) - else: - res = False - if checkExistVar: - if res is False: - return False, "" - else: - return True, res - else: - if res is False: - return "" - else: - return res - - def getLastNode(self, objIni, xmlBody, strHeader, formatPlasma): - """Ищет область в XML в которой область с переменными""" - flagFound = True - if not strHeader: - flagFound = False - return flagFound, xmlBody - lenStrHeader = len(strHeader) - if formatPlasma and lenStrHeader > 0: - xmlAreas = [xmlBody] - for i in xrange(lenStrHeader - 1): - flagFound = False - for xmlArea in xmlAreas: - xmlAreas = objIni.docObj.getArea(strHeader[i], xmlArea) - if xmlAreas: - flagFound = True - break - if xmlAreas: - xmlBody = xmlAreas[0] - return flagFound, xmlBody - - def getAreaVars(self, strHeader): - """Получаем все переменнные области из ini-файла""" - textIni = self.openIniFile() - if self.FD: - self.FD.close() - self.FD = None - nameFomat = self.checkIniFile(textIni) - if not nameFomat: - return False - formatPlasma = False - if type(strHeader) in (tuple, list): - # формат plasma - classObj = self.formatFactory.getClassObj("plasma") - formatPlasma = True - else: - if nameFomat == "plasma": - self.setError(_("Trying to fetch a variable of 'samba' " - "format from file %s ('plasma' format)") \ - % self.iniFile) - return False - # формат samba - classObj = self.formatFactory.getClassObj("samba") - # создаем объект типа samba и записываем в него содержимое ini-файла - objIni = classObj(textIni, self) - # получаем ноду body - xmlBody = objIni.docObj.getNodeBody() - flagFound, xmlBody = self.getLastNode(objIni, xmlBody, strHeader, - formatPlasma) - if flagFound and xmlBody: - if formatPlasma: - strHeader = strHeader[-1] - # если находим область то выдаем словарем все переменные иначе False - res = objIni.docObj.getAreaFields(strHeader, xmlBody, allVars=True) - else: - res = False - if res is False: - return {} - else: - return res - - def getAllSectionNames(self): - """Получаем все имена секций определенных в ini файле - - Если формат ini файла plasma то имя секции - - имена нескольких секций через запятую - """ - textIni = self.openIniFile() - if self.FD: - self.FD.close() - self.FD = None - nameFomat = self.checkIniFile(textIni) - if not nameFomat: - return False - if nameFomat == "samba": - # создаем объект типа samba и записываем в него содержимое ini-файла - objIni = self.formatFactory.getClassObj("samba")(textIni, self) - elif nameFomat == "plasma": - # создаем объект типа plasma и записываем в него содержимое - # ini-файла - objIni = self.formatFactory.getClassObj("plasma")(textIni, self) - else: - return [] - xmlNodes = objIni.docObj.getAllAreas() - # Имена секций ini файла - namesSection = [] - if nameFomat == "plasma": - for xmlNode in xmlNodes: - nSect = objIni.docObj.getNameArea(xmlNode) - if nSect: - namesSect = [nSect] - parentNode = xmlNode.parentNode - while parentNode != objIni.docObj.body: - nameSect = objIni.docObj.getNameArea(parentNode) - if nameSect: - namesSect.append(nameSect) - parentNode = parentNode.parentNode - else: - return [] - namesSection.append(",".join(reversed(namesSect))) - elif nameFomat == "samba": - # получаем ноду body - for xmlNode in xmlNodes: - nSect = objIni.docObj.getNameArea(xmlNode) - if nSect: - namesSection.append(nSect) - return namesSection diff --git a/libs_crutch/lib/cl_lang.py b/libs_crutch/lib/cl_lang.py deleted file mode 100644 index 5f0090e..0000000 --- a/libs_crutch/lib/cl_lang.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import gettext -from gettext import gettext as _ -import threading -import types -import sys -import re -from gettext import Catalog -from importlib import reload - -class Lang: - """ - Multilanguage class - """ - _modnames = {} - GP = [""] - orig_gettext = _ - - def __init__(self): - self.nameDomain = self.GP[0] - self.__catalog = None - # translate language for all modules - self._translators = {} - - def __translate(self, message): - """Method return message without changes""" - return self.__gettranslate()(message) - - def setLanguage(self, module, glob=False): - """Set translate language for modules 'module'. - - module - export python module - if the module export other modules, then lang will be - set for them - Method must be call after module for translate""" - if glob: - for name, mod in vars(module).items(): - if (isinstance(mod, types.ModuleType) and - not name.startswith('__') and - name not in sys.builtin_module_names and - (hasattr(mod, '__file__') and ( - "calculate" in mod.__file__ or - not mod.__file__.startswith('/usr/lib')))): - self.__setLang(mod) - self.setLanguage(mod, True) - return self.__setLang(module) - - def __setLang(self, module): - """ Set translate language for module 'module'. - """ - if module.__name__ in self._modnames.keys(): - return True - - module._ = self.__translate - self._modnames[module.__name__] = module._ - - @staticmethod - def get_current_lang(): - """ - Получить текущий язык - """ - env = os.environ - cur_thread = threading.currentThread() - if hasattr(cur_thread, "lang"): - return cur_thread.lang - return env.get("LANG", "en_US.utf8").split('.')[0].split("_")[0] - - def __gettranslate(self): - l = self.get_current_lang() - if l in self._translators: - return self._translators[l] - if l == 'en': - trans = lambda x: x - else: - la = [l] - reload(gettext) - if gettext.find(self.nameDomain, self.__catalog, la): - """Если найден словарь то инициализируем переводчик""" - transl = gettext.translation(self.nameDomain, self.__catalog, - la) - trans = transl.gettext - else: - trans = lambda x: x - self._translators[l] = trans - return trans - - def getTranslatorByName(self, namemodule): - """Method for detect already imported translate modules - """ - return self._modnames.get(namemodule, 0) - - def setGlobalDomain(self, nameDomain): - """ Method for set global translate domain - """ - self.GP[0] = nameDomain - self.nameDomain = self.GP[0] - return True - - def setLocalDomain(self, nameDomain): - """ Method for set local translate domain - """ - self.nameDomain = nameDomain - return True - - -def setGlobalTranslate(domain, *modules): - _lang = Lang() - _lang.setGlobalDomain(domain) - for mod in modules: - _lang.setLanguage(mod, glob=True) - - -def setLocalTranslate(domain, *modules): - _lang = Lang() - _lang.setLocalDomain(domain) - for mod in modules: - _lang.setLanguage(mod) - - -def getLazyLocalTranslate(translateFunc): - class Translate: - def __init__(self, s): - self.s = s - self._format_args = None - - def __str__(self): - if self._format_args is None: - return translateFunc(self.s) - else: - return translateFunc(self.s).format(*self._format_args[0], - **self._format_args[1]) - - def __hash__(self): - return hash(self.s) - - def format(self, *args, **kwargs): - self._format_args = (args, kwargs) - return self - - return Translate - - -class RegexpLocalization(object): - def __init__(self, domain, languages=(Lang.get_current_lang())): - try: - self.set_translate_dict(Catalog(domain, - languages=languages)._catalog) - except IOError: - self._catalog = {} - - def set_translate_dict(self, d): - def create_key(k): - try: - return re.compile(k.replace("\\\\", "\\")) - except re.error: - return None - - self._catalog = filter(lambda x: x[0], - ((create_key(k), v) for k, v in - sorted(d.items(), reverse=True) if k)) - - def translate(self, s): - for k, v in self._catalog: - try: - s = k.sub(v, s) - except UnicodeDecodeError: - return s - return s diff --git a/libs_crutch/lib/cl_ldap.py b/libs_crutch/lib/cl_ldap.py deleted file mode 100644 index 9352008..0000000 --- a/libs_crutch/lib/cl_ldap.py +++ /dev/null @@ -1,424 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -from __future__ import absolute_import -import sys -import ldap -from .utils.common import _error -from collections import defaultdict -from ldif import LDIFParser, LDIFWriter -import cStringIO - -from calculate.lib.cl_lang import setLocalTranslate -from functools import reduce -_ = lambda x: x -setLocalTranslate('cl_lib3', sys.modules[__name__]) - - -class ldapFun(_error): - """ - Объект для работы с LDAP сервером - - подключение к серверу и поиск данных - """ - - def __init__(self, dnUser, password, host="localhost"): - self.conLdap = False - # Получаем соединение с LDAP - try: - self.conLdap = self.__ldapConnect(dnUser, password, host) - except ldap.LDAPError as e: - self.setError(e[0]['desc']) - - def __ldapConnect(self, dnUser, password, host): - """Соединение с LDAP сервером""" - con_ldap = ldap.initialize('ldap://%s' % host) - con_ldap.simple_bind_s(dnUser, password) - return con_ldap - - def ldapSearch(self, base_dn, search_scope, search_filter, retrieve_attrs): - try: - ldap_result_id = self.conLdap.search(base_dn, search_scope, - search_filter, - retrieve_attrs) - result_set = [] - while 1: - result_type, result_data = self.conLdap.result( - ldap_result_id, 0) - if isinstance(result_data, list) and not result_data: - break - else: - if result_type == ldap.RES_SEARCH_ENTRY: - result_set.append(result_data) - except ldap.NO_SUCH_OBJECT: - return [] - except Exception: - return False - return result_set - - -class LDAPConnectError(Exception): - pass - -class LDAPBadSearchFilter(LDAPConnectError): - pass - -class LDAPConnect(ldapFun): - """ - Объект работающий с исключениями - """ - - def setError(self, message): - raise LDAPConnectError(message) - - def ldap_search(self, base_dn, search_scope=ldap.SCOPE_BASE, - search_filter='(objectClass=*)', - retrieve_attrs=None): - try: - ldap_result_id = self.conLdap.search( - base_dn, search_scope, search_filter, retrieve_attrs) - while 1: - result_type, result_data = self.conLdap.result( - ldap_result_id, 0) - if isinstance(result_data, list) and not result_data: - break - else: - if result_type == ldap.RES_SEARCH_ENTRY: - yield result_data - except ldap.NO_SUCH_OBJECT: - pass - except ldap.LDAPError as e: - error = e[0]['desc'] - if "Bad search filter" in error: - raise LDAPBadSearchFilter(error) - raise LDAPConnectError(error) - except Exception as e: - raise LDAPConnectError(str(e)) - - def ldap_dump(self, fobj, base_dn, search_scope=ldap.SCOPE_SUBTREE, - search_filter='(objectClass=*)', retrieve_attrs=None): - """ - вернуть dump ldif - :param fobj: - :param base_dn: - :param search_scope: - :param search_filter: - :param retrieve_attrs: - :return: - """ - try: - dn_list = self.conLdap.search_s( - base_dn, search_scope, search_filter, retrieve_attrs) - writer = LDIFWriter(fobj) - for dn, f in dn_list: - writer.unparse(dn, f) - except ldap.NO_SUCH_OBJECT: - pass - except ldap.LDAPError as e: - error = e[0]['desc'] - if "Bad search filter" in error: - raise LDAPBadSearchFilter(error) - raise LDAPConnectError(error) - except Exception as e: - raise LDAPConnectError(str(e)) - - def ldap_simple_search(self, base_dn, search_filter, attr, first=True): - """ - Простой поиск аттрибутов - :param base_dn: - :param search_filter: - :param attr: - :param first: вернуть первый из списка - :return: - """ - for entry in self.ldap_search(base_dn, ldap.SCOPE_ONELEVEL, - search_filter, [attr]): - attrs = entry[0][1] - yield attrs[attr][0] - - def ldap_modify_attrs(self, base_dn, attrs): - """Модифицирует аттрибуты DN""" - attrs = list(attrs) - if attrs: - try: - self.conLdap.modify_s(base_dn, attrs) - except ldap.LDAPError as e: - raise LDAPConnectError(e[0]['desc']) - - def ldap_modify_dn(self, base_dn, new_dn): - """ - Изменить ветку - :param base_dn: предыдущее название - :param new_dn: новое название - :return: - """ - try: - self.conLdap.modrdn_s(base_dn, new_dn) - except ldap.LDAPError as e: - raise LDAPConnectError(e[0]['desc']) - - def ldap_remove_dn(self, base_dn): - """ - Удалить указанный dn - :param base_dn: удаляемый dn - :return: - """ - try: - self.conLdap.delete_s(base_dn) - except ldap.LDAPError as e: - raise LDAPConnectError(e[0]['desc']) - - - - # @adminConnectLdap - # def modifyElemDN(self, relDN, newFirstDn): - # """Изменяет основной элемент DN (uid, cn и др.)""" - # DN = self.addDN(relDN,self.baseDN) - # try: - # self.conLdap.modrdn_s(DN, newFirstDn) - # except ldap.LDAPError, e: - # self.printERROR(e[0]['desc']) - # return False - # return True - - # @adminConnectLdap - # def delDN(self, relDN): - # """Удаляет одиночный DN""" - # DN = self.addDN(relDN,self.baseDN) - # try: - # self.conLdap.delete_s(DN) - # except ldap.LDAPError, e: - # self.printERROR(e[0]['desc']) - # return False - # return True - - -class ldapUser(_error): - """Получение данных для пользователя из LDAP""" - # Данные из /etc/ldap.conf - _dictData = {} - # Объект LDAP - ldapObj = False - # Подключение к LDAP - conLdap = False - - def addDN(self, *arg): - """ - Append text DN elements - """ - return ",".join(x for x in arg if x) - - def getDataInLdapConf(self, bindData=True, cache=True): - """Получение данных из /etc/ldap.conf""" - data = [("host", 'host'), - ("usersDN", 'nss_base_passwd'), - ("groupsDN", 'nss_base_group')] - if bindData: - data += [("bindDn", 'binddn'), ("bindPw", 'bindpw')] - names_data = [x[0] for x in data] - # Данные из кеша, если он есть - if (cache and self._dictData and - set(names_data) <= set(self._dictData.keys())): - return self._dictData - file_name = "/etc/ldap.conf" - get_str_list = lambda x: reduce(lambda x, y: [x, y.upper()], ([x] * 2)) - workdata = map(lambda x: (x[0], get_str_list(x[1]), len(x[1])), data) - dict_data = defaultdict(list) - delimeter = (" ", "\t") - try: - for line in open(file_name): - for name, keys, lenKey in workdata: - if (name not in dict_data.keys() and - any(line.startswith(x) for x in keys) and - len(line) > lenKey): - spl = line[lenKey] - if spl in delimeter: - param_value = line.rpartition(spl)[2] - if name in ("usersDN", "groupsDN"): - dict_data[name].append( - param_value.partition('?')[0].strip()) - else: - dict_data[name].append(param_value.strip()) - except Exception: - # self.setError(_("Can not open %s")%fileName) - return False - if set(dict_data.keys()) == set(names_data): - # Кеширование данных - if cache: - self._dictData.clear() - self._dictData.update(dict_data) - return dict_data - else: - return {} - - def getBindConnectData(self): - """Получение данных для соединения с LDAP bind пользователем""" - configdata = self.getDataInLdapConf() - if configdata: - bind_dn = configdata["bindDn"][0] - bind_pw = configdata["bindPw"][0] - host = configdata["host"][0] - return bind_dn, bind_pw, host - return False - - def getUsersDN(self): - """Получение DN пользователей""" - configdata = self.getDataInLdapConf(bindData=False) - if configdata: - return self._dictData["usersDN"][0] - return False - - def getHost(self): - """Получение LDAP хоста""" - configdata = self.getDataInLdapConf(bindData=False) - if configdata: - return configdata["host"][0] - return False - - def getGroupsDN(self): - """Получение списка DN групп""" - configdata = self.getDataInLdapConf(bindData=False) - if configdata: - return self._dictData["groupsDN"] - return False - - def connectLdap(self): - """ - Connect to LDAP - """ - connectData = self.getBindConnectData() - if not connectData: - return {} - bindDn, bindPw, host = connectData - self.getUsersDN() - # Соединяемся с LDAP - return self.ldapConnect(bindDn, bindPw, host) - - def getUserLdapInfo(self, user_name, shadowAttr=False): - """Выдаем информацию о пользователе из LDAP""" - if not self.connectLdap(): - return False - users_dn = self.getUsersDN() - groups_dn = self.getGroupsDN() - search_user = self.ldapObj.ldapSearch(users_dn, ldap.SCOPE_ONELEVEL, - "uid=%s" % user_name, None) - if not search_user: - return False - convert_dict = {'uid': ('user', 'uidNumber'), - 'gid': ('user', 'gidNumber'), - 'fullName': ('user', 'cn'), - 'mail': ('user', 'mail'), - 'jid': ('user', 'registeredAddress'), - 'home': ('user', 'homeDirectory'), - 'group': ('group', 'cn')} - if shadowAttr: - convert_dict.update({'loginShell': ('user', 'loginShell'), - 'shadowLastChange': ( - 'user', 'shadowLastChange'), - 'shadowMin': ('user', 'shadowMin'), - 'shadowMax': ('user', 'shadowMax'), - 'shadowWarning': ('user', 'shadowWarning'), - 'shadowExpire': ('user', 'shadowExpire'), - 'shadowFlag': ('user', 'shadowFlag'), - 'groups': ('group', 'memberUid')}) - list_user_attr = [k for k, v in convert_dict.items() if v[0] == "user"] - list_group_attr = [k for k, v in convert_dict.items() - if v[0] == "group"] - uid = "" - gid = "" - dict_out = {} - for dict_attr in list_user_attr: - ldap_attr = convert_dict[dict_attr][1] - if ldap_attr in search_user[0][0][1]: - dict_out[dict_attr] = search_user[0][0][1][ldap_attr][0] - else: - dict_out[dict_attr] = "" - if dict_attr == 'uid': - uid = dict_out[dict_attr] - if dict_attr == 'gid': - gid = dict_out[dict_attr] - if gid: - for dict_attr in list_group_attr: - search_group = [] - ldap_attr = convert_dict[dict_attr][1] - if dict_attr == "group": - for groupDN in groups_dn: - search_group = self.ldapObj.ldapSearch( - groupDN, ldap.SCOPE_ONELEVEL, "gidNumber=%s" % gid, - None) - if search_group: - break - if search_group: - data = search_group[0][0][1] - if ldap_attr in data: - dict_out[dict_attr] = data[ldap_attr][0] - else: - dict_out[dict_attr] = "" - else: - dict_out[dict_attr] = "" - elif dict_attr == "groups": - user_groups_data = [] - for groupDN in groups_dn: - search_group = self.ldapObj.ldapSearch( - groupDN, ldap.SCOPE_ONELEVEL, - "%s=%s" % (ldap_attr, user_name), - ["cn", "gidNumber"]) - if search_group: - user_groups_data.extend( - [(x[0][1]["cn"][0], x[0][1]["gidNumber"][0]) - for x in search_group]) - dict_out[dict_attr] = user_groups_data - if uid and gid: - return dict_out - else: - return {} - - def ldapConnect(self, bind_dn, bind_pw, host): - """Подключение к LDAP""" - if not self.ldapObj: - ldap_obj = ldapFun(bind_dn, bind_pw, host) - if ldap_obj.getError(): - ldap_obj.clearErrors() - return False - # Устанавливаем у объекта соединение и объект LDAP функций - self.ldapObj = ldap_obj - self.conLdap = ldap_obj.conLdap - return True - - -class LDIFError(Exception): - pass - - -class LDIFAdd(LDIFParser): - """ - Добавление LDIF в базу - """ - - def __init__(self, ldif_data, ldap_connect): - self.ldap_connect = ldap_connect - LDIFParser.__init__(self, cStringIO.StringIO(ldif_data)) - - def handle(self, dn, entry, controls=None): - # (self, dn, entry, *args): - try: - self.ldap_connect.add_s(dn, entry.items()) - except ldap.LDAPError as e: - raise LDIFError(e[0]['desc']) - except Exception as e: - print(str(e)) - raise LDIFError(_("Error in LDIF file")) diff --git a/libs_crutch/lib/cl_log.py b/libs_crutch/lib/cl_log.py deleted file mode 100644 index 5438b0f..0000000 --- a/libs_crutch/lib/cl_log.py +++ /dev/null @@ -1,77 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import logging -import logging.handlers -from . import cl_overriding - - -class log: - def __init__(self, program_name, level=logging.DEBUG, - formatter="%(asctime)s - %(name)s - " - "%(levelname)s - %(message)s", - filename='/var/log/calculate/calculate2.log', - maxBytes=1048576, backupCount=5): - self.programName = program_name - self.logger = logging.getLogger(self.programName) - self.level = level - self.logger.setLevel(self.level) - self.formatter = logging.Formatter(formatter) - self.filename = filename - self.maxBytes = maxBytes - self.backupCount = backupCount - - def addHandler(self): - """Добавление обработчика""" - if not self.logger.handlers: - try: - handler = logging.handlers.RotatingFileHandler( - self.filename, maxBytes=self.maxBytes, - backupCount=self.backupCount) - except Exception as e: - cl_overriding.printERROR("logging - %s - %s" % - (self.programName, str(e))) - return False - handler.setLevel(self.level) - handler.setFormatter(self.formatter) - self.logger.addHandler(handler) - return True - - def _addLogMessage(self, typeMessage, message, **kwargs): - """Добавить сообщение данного типа""" - if self.addHandler(): - getattr(self.logger, typeMessage)(message, **kwargs) - - def debug(self, message, **kwargs): - """Отладочное сообщение""" - self._addLogMessage("debug", message, **kwargs) - - def info(self, message, **kwargs): - """Информационное сообщение""" - self._addLogMessage("info", message, **kwargs) - - def warn(self, message, **kwargs): - """Предупреждающее сообщение""" - self._addLogMessage("warn", message, **kwargs) - - def error(self, message, **kwargs): - """Сообщение о ошибке""" - self._addLogMessage("error", message, **kwargs) - - def critical(self, message, **kwargs): - """Критическое сообщение""" - self._addLogMessage("critical", message, **kwargs) diff --git a/libs_crutch/lib/cl_overriding.py b/libs_crutch/lib/cl_overriding.py deleted file mode 100644 index 69d9d3e..0000000 --- a/libs_crutch/lib/cl_overriding.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import sys -import gettext - - -def __findFileMO(domain, localedir=None, languages=None, all=0): - """Модифицированный метод, ищет файл перевода - - замена gettext.find""" - if localedir is None: - localedir = gettext._default_localedir - if languages is None: - languages = [] - for envar in ('LANGUAGE', 'LC_ALL', 'LC_MESSAGES', 'LANG'): - val = os.environ.get(envar) - if val: - languages = val.split(':') - break - if 'C' not in languages: - languages.append('C') - # now normalize and expand the languages - nelangs = [] - for lang in languages: - for nelang in gettext._expand_lang(lang): - if nelang not in nelangs: - nelangs.append(nelang) - # select a language - if all: - result = [] - else: - result = None - for lang in nelangs: - if lang == 'C': - break - mofile = os.path.join(localedir, '%s_%s.mo' % (domain, lang)) - if os.path.exists(mofile): - if all: - result.append(mofile) - else: - return mofile - return result - - -def exit(codeExit): - """Метод выхода из программы""" - sys.exit(codeExit) - - -def printERROR(err_message): - """Вывод ошибки""" - if err_message or err_message == "": - if type(err_message) != str: - err_message = str(err_message) - err_message += "\n" - try: - sys.stderr.write(err_message) - sys.stderr.flush() - except IOError: - exit(1) - - -def printSUCCESS(message, printBR=True): - """Вывод сообщения о успехе""" - if message or message == "": - if type(message) != str: - message = str(message) - if printBR: - message += "\n" - try: - sys.stdout.write(message) - sys.stdout.flush() - except IOError: - exit(1) diff --git a/libs_crutch/lib/cl_print.py b/libs_crutch/lib/cl_print.py deleted file mode 100644 index 6401ca9..0000000 --- a/libs_crutch/lib/cl_print.py +++ /dev/null @@ -1,233 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys -from .utils.text import _u, get_term_size -from . import cl_overriding - - -class color_print(object): - _printSysOut = sys.stdout - - def getconsolewidth(self): - """Получить ширину текущей консоли""" - fd_stdout = self._printSysOut.fileno() - h, w = get_term_size(fd_stdout) - if w is None: - return 80 - return w - - def printRight(self, offsetLeft, offsetRight): - """Добавляет необходимое количество пробелов: - - количество пробелов = (ширина консоли - offsetLeft - offsetRight) - """ - cols = self.getconsolewidth() - for i in range(cols - offsetLeft - offsetRight): - self._printSysOut.write(" ") - - def colorPrint(self, attr, fg, bg, string): - """Раскрашивает выводимое сообщение - - Параметры: - attr - это атрибут - fg - цвет символа - bg - цвет фона - - в случае если параметр равен "" то он не изменяется - - attr может принимать следующие значения: - 0 сбросить все атрибуты (вернуться в нормальный режим) - 1 яркий (обычно включает толстый шрифт) - 2 тусклый - 3 подчёркнутый - 5 мигающий - 7 реверсный - 8 невидимый - - fg может принимать следующие значения: - 30 чёрный - 31 красный - 32 зелёный - 33 жёлтый - 34 синий - 35 фиолетовый - 36 голубой - 37 белый - - bg может принимать следующие значения: - 40 чёрный - 41 красный - 42 зелёный - 43 жёлтый - 44 синий - 45 фиолетовый - 46 голубой - 47 белый - """ - lst = [] - if attr: - lst.append(attr) - if fg: - lst.append(fg) - if bg: - lst.append(bg) - self._printSysOut.write("\033[%sm%s\033[0m" % (";".join(lst), string)) - - def redBrightPrint(self, string): - """Печатает яркое красное сообщение""" - self.colorPrint("1", "31", "", string) - - def greenBrightPrint(self, string): - """Печатает яркое зеленое сообщение""" - self.colorPrint("1", "32", "", string) - - def yellowBrightPrint(self, string): - """Печатает яркое желтое сообщение""" - self.colorPrint("1", "33", "", string) - - def blueBrightPrint(self, string): - """Печатает яркое cинее сообщение""" - self.colorPrint("1", "34", "", string) - - def lenString(self, string): - """Получаем длинну строки""" - stringUnicode = _u(string) - lenString = len(stringUnicode) - return lenString - - def defaultPrint(self, string): - try: - self._printSysOut.write(string) - except UnicodeError: - self._printSysOut.write(string.encode('utf-8')) - try: - self._printSysOut.flush() - except IOError: - cl_overriding.exit(1) - - def printLine(self, argL, argR, offsetL=0, printBR=True): - """Печатает справа и слева консоли цветные сообщения""" - # Допустимые цвета - color_dict = { - # цвет по умолчанию - '': self.defaultPrint, - # ярко зеленый - 'greenBr': self.greenBrightPrint, - # ярко голубой - 'blueBr': self.blueBrightPrint, - # ярко красный - 'redBr': self.redBrightPrint, - # ярко желтый - 'yellowBr': self.yellowBrightPrint - } - # cмещение от левого края консоли - # offsetL = 0 - for color, leftString in argL: - offsetL += self.lenString(leftString) - if color in color_dict: - # печатаем и считаем смещение - color_dict[color](leftString) - else: - color_dict[''](leftString) - # cмещение от правого края консоли - offset_r = 0 - for color, rightString in argR: - offset_r += self.lenString(rightString) - # Добавляем пробелы - if offset_r: - self.printRight(offsetL, offset_r) - for color, rightString in argR: - if color in color_dict: - # печатаем и считаем смещение - color_dict[color](rightString) - else: - color_dict[''](rightString) - if printBR: - self._printSysOut.write("\n") - try: - self._printSysOut.flush() - except IOError: - cl_overriding.exit(1) - - def printNotOK(self, string, offsetL=0, printBR=True): - """Вывод на печать в случае сбоя""" - self._printSysOut = sys.stderr - self.printLine((('greenBr', ' * '), - ('', string), - ), - (('blueBr', '['), - ('redBr', ' !! '), - ('blueBr', ']'), - ), offsetL, printBR) - - def printOnlyNotOK(self, string, offsetL=0, printBR=True): - """Вывод на печать в случае сбоя""" - self._printSysOut = sys.stderr - self.printLine((('', string),), - (('blueBr', '['), - ('redBr', ' !! '), - ('blueBr', ']'), - ), offsetL, printBR) - - def printOK(self, string, offsetL=0, printBR=True): - """Вывод на печать в случае успеха""" - self._printSysOut = sys.stdout - self.printLine((('greenBr', ' * '), - ('', string), - ), - (('blueBr', '['), - ('greenBr', ' ok '), - ('blueBr', ']'), - ), offsetL, printBR) - - def printOnlyOK(self, string, offsetL=0, printBR=True): - """Вывод на печать в случае успеха""" - self._printSysOut = sys.stdout - self.printLine((('', string),), - (('blueBr', '['), - ('greenBr', ' ok '), - ('blueBr', ']'), - ), offsetL, printBR) - - def printWARNING(self, string, offsetL=0, printBR=True): - """Вывод на печать предупреждения""" - self._printSysOut = sys.stdout - self.printLine((('yellowBr', ' * '), - ('', string), - ), - (('', ''), - ), offsetL, printBR) - - def printERROR(self, string, offsetL=0, printBR=True): - """Вывод на печать ошибки""" - self._printSysOut = sys.stderr - self.printLine((('redBr', ' * '), - ('', string), - ), - (('', ''), - ), offsetL, printBR) - - def printSUCCESS(self, string, offsetL=0, printBR=True): - """Вывод на печать в случае успеха без [ok] справа""" - self._printSysOut = sys.stdout - self.printLine((('greenBr', ' * '), - ('', string), - ), - (('', ''), - ), offsetL, printBR) diff --git a/libs_crutch/lib/cl_progressbar.py b/libs_crutch/lib/cl_progressbar.py deleted file mode 100644 index 9abe0d3..0000000 --- a/libs_crutch/lib/cl_progressbar.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2012-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import sys -import calculate.contrib.progressbar as progressbar - -class DoubleMarkerBar(progressbar.Bar): - """ - Прогресс с двойным маркером - """ - - def update(self, pbar, width): - left, marker, right = (progressbar.format_updatable(i, pbar) for i in - (self.left, self.marker, self.right)) - - width -= len(left) + len(right) - # Marker must *always* have length of 2 - count = int(float(pbar.currval) / pbar.maxval * width) - marker = str(marker[:1] * (count - 1) + - (marker[1:] if count > 0 else "")) - - if self.fill_left: - return '%s%s%s' % (left, marker.ljust(width, self.fill), right) - else: - return '%s%s%s' % (left, marker.rjust(width, self.fill), right) - - -class StubProgressBar(object): - def update(self, percents): - pass - - def finish(self): - pass - - -class StubMessageBox(object): - def critical(self, message): - pass - - def warning(self, message): - pass - - -def get_progress_bar(bartype="text", title=""): - """ - Получить объект прогресс бар - """ - if bartype == "text": - return progressbar.ProgressBar( - maxval=100, - widgets=[DoubleMarkerBar(left="[", right="]", marker="=>"), " ", - progressbar.Percentage(), " ", - progressbar.Timer( - format="Time:%s")]).start() - elif bartype == "gui": - from .cl_progressbar_gui import ClProgressDialog - if ClProgressDialog: - pbar = ClProgressDialog() - pbar.setCancelButton(None) - pbar.adjustSize() - pbar.setWindowTitle(title.decode('utf-8')) - pbar.setAutoClose(False) - pbar.setAutoReset(False) - pbar.setMaximum(0) - pbar.setLabelText(title.decode('utf-8')) - pbar.setTextVisible(False) - pbar.setStyleSheet("QProgressBar {border:none; text-align: center;}") - return pbar - return StubProgressBar() - - -def get_message_box(): - """ - Получить message box - """ - from .cl_progressbar_gui import ClMessageBox - if ClMessageBox: - return ClMessageBox() - else: - return StubMessageBox() diff --git a/libs_crutch/lib/cl_progressbar_gui.py b/libs_crutch/lib/cl_progressbar_gui.py deleted file mode 100644 index d2c8605..0000000 --- a/libs_crutch/lib/cl_progressbar_gui.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2018 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -import sys -import signal -from os import environ -from multiprocessing import Process, Queue - -print("Import QtGUI") - -try: - from PyQt5 import QtCore - from PyQt5 import QtGui - from PyQt5 import QtWidgets - - class ControlledProgressDialog(QtWidgets.QProgressDialog): - """ - QProgressDialog controlled by pipe - """ - - def __init__(self, inQueue, outQueue, *args, **kwargs): - QtWidgets.QProgressDialog.__init__(self, *args, **kwargs) - self.progress = QtWidgets.QProgressBar(self) - self.progress.setFormat("%p%") - self.setBar(self.progress) - self.inQueue = inQueue - self.outQueue = outQueue - self.timer = QtCore.QTimer(self) - if not hasattr(self.timer.timeout, "connect"): - self.timer.timeout.connect = lambda x: x - self.timer.timeout.connect(self.dispatcher) - self.timer.start(50) - self.center() - - def setTextVisible(self, visible): - self.progress.setTextVisible(visible) - - def center(self): - screen = QtWidgets.QDesktopWidget().screenGeometry() - size = self.geometry() - self.move((screen.width() - size.width()) / 2, - (screen.height() - size.height()) / 2) - - @QtCore.pyqtSlot() - def dispatcher(self): - """ - Dispatcher called by 50ms - """ - while not self.inQueue.empty(): - cmd, args, ret = self.inQueue.get() - if cmd == "quit": - self.timer.stop() - self.close() - else: - res = getattr(self, cmd)(*args) - if ret: - self.outQueue.put(res) - self.center() - - def sigint_handler(*args): - pass - - class ClMessageBox(object): - """ - ProgressDialog in other process - """ - proc = None - - def runProgress(self, message, typemes="warning"): - signal.signal(signal.SIGINT, sigint_handler) - app = QtWidgets.QApplication(sys.argv) - getattr(QtWidgets.QMessageBox, typemes)( - None, "", message, - QtWidgets.QMessageBox.Close, - QtWidgets.QMessageBox.Close) - app.quit() - - def critical(self, message): - self.proc = Process(target=self.runProgress, - args=(message, "critical")) - self.proc.start() - self.proc.join() - - def warning(self, message): - self.proc = Process(target=self.runProgress, - args=(message, "warning")) - self.proc.start() - self.proc.join() - - class ClProgressDialog: - """ - ProgressDialog in other process - """ - homeDir = '/root' - - def runProgress(self, inQueue, outQueue): - signal.signal(signal.SIGINT, sigint_handler) - environ['HOME'] = self.homeDir - app = QtWidgets.QApplication(sys.argv) - progressDialog = ControlledProgressDialog(inQueue, outQueue) - progressDialog.exec_() - app.quit() - - setMaximum = None - setTextVisible = None - setValue = None - - methods = ["autoClose", "autoReset", "colorCount", "depth", - "maximum", "minimum", "minimumDuration", - "setLabelText", "setMaximum", "setMinimum", - "setMinimumDuration", "setRange", "setValue", - "setAutoClose", "setAutoReset", "setWindowTitle", - "setCancelButton", "value", "setTextVisible", - "adjustSize", "setStyleSheet"] - - def __init__(self): - self.outQueue = Queue() - self.finished = False - self.inQueue = Queue() - Process(target=self.runProgress, - args=(self.outQueue, self.inQueue)).start() - for method in self.methods: - setattr(self, method, self.proxyCall(method)) - - def proxyCall(self, method): - def wrapper(*args, **kwargs): - needRet = kwargs.get('needRet', False) - self.outQueue.put((method, args, needRet)) - if needRet: - return self.inQueue.get() - return None - - return wrapper - - def quit(self): - self.outQueue.put(("quit", (), False)) - self.finished = True - self.outQueue.close() - self.inQueue.close() - - def finish(self): - self.quit() - - def update(self, value): - self.setMaximum(100) - self.setTextVisible(True) - self.setValue(min(value, 99)) - -except Exception: - ControlledProgressDialog = None - ClProgressDialog = None - ClMessageBox = None diff --git a/libs_crutch/lib/cl_template.py b/libs_crutch/lib/cl_template.py deleted file mode 100644 index 4dc9f32..0000000 --- a/libs_crutch/lib/cl_template.py +++ /dev/null @@ -1,6749 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2008-2016 Mir Calculate. http://www.calculate-linux.org -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import sys -import os -import stat -import re -import importlib -from calculate.lib.utils.portage import getInstalledAtom, RepositoryPath, \ - searchProfile, RepositorySubstituting -from calculate.lib.cl_xml import xmlShare -from calculate.lib.utils.system import SystemPath, FUser, emerge_running -from functools import wraps -import types -import random -import string -import time -import glob -import hashlib -import codecs -import uuid -from fnmatch import fnmatch -from math import sqrt -from itertools import * -from collections import OrderedDict -from operator import lt, le, eq, ne, ge, gt -import shutil - -from .utils.common import (_error, _warning, getTupleVersion, getPortageUidGid, - isBootstrapDataOnly) -from .utils.text import _u -from .utils.portage import (isPkgInstalled, reVerSplitToPV, EmergeLog, - getInstalledAtom, - EmergeLogPackageTask, getPkgUses, RepositoryPath) -from .utils.content import PkgContents, checkContents, getCfgFiles, fillContents -from .utils.files import (getModeFile, listDirectory, removeDir, typeFile, - scanDirectory, FilesError, dir_sync, find, getProgPath, - pathJoin, readFile, readLinesFile, process) -from .utils.mount import Mounts -from .utils.tools import iterate_list, has_any, Locker -from .datavars import DataVarsError, VariableError, CriticalError, SimpleDataVars -from calculate.lib.configparser import (ConfigParser, NoSectionError, - ParsingError) - -from calculate.lib.cl_lang import setLocalTranslate, RegexpLocalization - -_ = lambda x: x -setLocalTranslate('cl_lib3', sys.modules[__name__]) - -PORTAGEUID, PORTAGEGID = getPortageUidGid() - - -class TemplatesError(Exception): - """ - Error on templates appling - """ - - -def catch_no_space_left(f): - def wrapper(*args, **kw): - try: - return f(*args, **kw) - except IOError as e: - if e.errno == 28: - raise TemplatesError(_("No space left on device")) - raise - - return wrapper - -def post_unlock_packages(f): - def wrapper(self, *args, **kw): - if not kw.get("rerun", True): - return f(self, *args, **kw) - else: - try: - return f(self, *args, **kw) - except BaseException as e: - raise - finally: - self.unlock_packages() - return wrapper - - -class DataVarsConfig(SimpleDataVars): - """ - Получить профиль и emerge config из chroot системы - """ - def __init__(self, chroot_path='/'): - from calculate.lib.variables import env - SimpleDataVars.__init__( - self, - env.VariableClMakeProfile(systemRoot=chroot_path), - env.VariableClEmergeConfig(systemRoot=chroot_path)) - -class LayeredIni(object): - _baseDir = None - objVar = None - - class IniPath(object): - IniName = "ini.env" - Grp = os.path.join('/var/lib/calculate/calculate-update', IniName) - System = os.path.join('/var/lib/calculate/', IniName) - Etc = os.path.join('/etc/calculate', IniName) - Local = os.path.join('/var/calculate', IniName) - Remote = os.path.join('/var/calculate/remote', IniName) - Hardcode = os.path.join(RepositoryPath.CalculateProfiles, IniName) - Work = System - - def __init__(self): - # комплексное содержимое ini.env с приоритетом меньше, чем у - # изменяемого - self.lowerIni = None - # комплексное содержимое ini.env с приоритетом выше, чем у - # изменяемого - self.upperIni = None - - def is_user(self): - return self.objVar and self.objVar.Get('cl_action') == "desktop" - - def get_profile_path(self, dv): - """ - Получить путь до системного профиля - :param dv: - :return: - """ - if not dv: - return "" - try: - make_profile = dv.Get('main.cl_make_profile') - if os.path.exists(make_profile): - profiledir = os.path.dirname(make_profile) - return os.path.join(profiledir, os.readlink(make_profile)) - return "" - except VariableError: - return "" - - def get_profiles_inienv(self, dv): - """ - Получить список ini.env находящихся в профиле с учётом их расположения - в parent файлах - :param dv: - :return: - """ - if dv: - profile_path = self.get_profile_path(dv) - if profile_path: - repos = RepositorySubstituting(dv, self._baseDir) - return list(searchProfile( - profile_path, self.IniPath.IniName, repository_sub=repos)) - return [] - - def read_other_ini(self): - """ - Прочитать все необходимые файлы env - :return: - """ - if not self.lowerIni: - inifiles = self.get_profiles_inienv(self.objVar) - inifiles.append(pathJoin(self._baseDir, self.IniPath.Grp)) - - try: - if (self.objVar and - (self.objVar.Get('core.ac_backup_restore') == 'on' or - self.objVar.Get( - 'core.ac_backup_service') == 'on')): - backup_path = self.objVar.Get('cl_backup_ini_env') - inifiles.append(backup_path) - except DataVarsError as e: - pass - #print "lower:", inifiles - self.lowerIni = ConfigParser(strict=False) - for inifn in inifiles: - try: - self.lowerIni.read(inifn, encoding="utf-8") - except ParsingError as e: - sys.stderr.write("%s\n" % str(e)) - sys.stderr.flush() - if not self.upperIni: - inifiles = [self.IniPath.Etc, - self.IniPath.Local, - self.IniPath.Remote] - if self.is_user(): - inifiles = [self.IniPath.Work] + inifiles - inifiles = [pathJoin(self._baseDir, x) for x in inifiles] - #print "upper:", inifiles - self.upperIni = ConfigParser(strict=False) - for inifn in inifiles: - try: - self.upperIni.read(inifn, encoding="utf-8") - except ParsingError as e: - sys.stderr.write("%s\n" % str(e)) - sys.stderr.flush() - - -class SystemIni(LayeredIni): - _inifile = LayeredIni.IniPath.Work - - @property - def inifile(self): - if self.objVar: - return pathJoin(self.objVar.Get('cl_chroot_path'), self._inifile) - else: - return self._inifile - - def is_user(self): - return False - - def __init__(self, dv=None): - self.objVar = dv - if dv: - self._baseDir = self.objVar.Get('cl_chroot_path') - else: - self._baseDir = '/' - super(SystemIni, self).__init__() - self.config = ConfigParser(strict=False) - try: - self.config.read(self.inifile, encoding="utf-8") - except ParsingError as e: - sys.stderr.write("%s\n" % str(e)) - sys.stderr.flush() - self.read_other_ini() - - def getVar(self, section, varname): - value = self.upperIni.get(section, varname, raw=True, fallback=None) - if value is None: - value = self.config.get(section, varname, raw=True, fallback=None) - if value is None: - value = self.lowerIni.get(section, varname, raw=True, fallback="") - return value - - def getKeys(self, section): - skeys = [] - for iniobj in (self.upperIni, self.config, self.lowerIni): - if iniobj.has_section(section): - skeys.extend(list(iniobj[section].keys())) - return list(sorted(list(set(skeys)))) - - def delVar(self, section, varname): - try: - self.config.remove_option(section, varname) - for section in filter(lambda x: not self.config[x], - self.config.sections()): - self.config.remove_section(section) - self.__write() - except NoSectionError: - pass - - def __write(self): - comment_block = "\n".join(takewhile(lambda x: x.startswith("#"), - readLinesFile(self.inifile))) - with open(self.inifile, 'wb') as f: - if comment_block: - f.write(comment_block) - f.write('\n\n') - self.config.write(f) - - def setVar(self, section, var_dict): - if not self.config.has_section(section): - self.config.add_section(section) - for k, v in var_dict.items(): - self.config.set(section, k, v) - self.__write() - - -class _shareTemplate(object): - """Общие аттрибуты для классов шаблонов""" - # Метка начала переменной - varStart = "#-" - # Метка конца переменной - varEnd = "-#" - _deltVarStart = len(varStart) - _deltVarEnd = len(varEnd) - objVar = None - _reVar = re.compile( - "%s(?:[a-z0-9_]+\.)?[a-zA-Z0-9_-]+%s" % (varStart, varEnd), re.M) - - def applyVarsTemplate(self, textTemplate, nameTemplate): - """ Заменяет переменные на их значения - """ - resS = self._reVar.search(textTemplate) - textTemplateTmp = textTemplate - while resS: - mark = textTemplateTmp[resS.start():resS.end()] - varName = mark[self._deltVarStart:-self._deltVarEnd] - try: - t = self.objVar.getInfo(varName).type - if "list" in t: - varValue = self.objVar.serialize(t, - self.objVar.Get(varName)) - else: - varValue = self.objVar.Get(varName) - if not varValue: - varValue = "" - else: - varValue = str(varValue) - except DataVarsError as e: - raise TemplatesError(_("error in template %s") % nameTemplate - + "\n" + str(e)) - textTemplateTmp = textTemplateTmp.replace(mark, varValue) - resS = self._reVar.search(textTemplateTmp) - return textTemplateTmp - - def getDataUser(self, groupsInfo=False): - """Получить информацию о пользователе""" - userName = self.objVar.Get("ur_login") - if not userName: - userName = "root" - import pwd - - try: - pwdObj = pwd.getpwnam(userName) - uid = pwdObj.pw_uid - gid = pwdObj.pw_gid - homeDir = self.objVar.Get('ur_home_path') - except Exception: - raise TemplatesError(_("User %s not found") % str(userName)) - if groupsInfo: - import grp - - try: - groupName = grp.getgrgid(gid).gr_name - except Exception: - raise TemplatesError(_("Group ID %s not found") % str(gid)) - groupsNames = map(lambda x: x.gr_name, - filter(lambda x: userName in x.gr_mem, - grp.getgrall())) - groupsNames = [groupName] + groupsNames - return uid, gid, homeDir, groupsNames - return uid, gid, homeDir - - -class _shareTermsFunction(object): - """Общие аттрибуты для классов _terms и templateFunctions""" - # Символы допустимые в скобках функции шаблона - _reFunctionArgvInSquareBrackets = ( - "a-zA-Z0-9_:;%@<>=\!\|\{\}\^\$\?\(\)\[\]\-" - "\n\+\,\*\/\.\'\"~\\\\ ") - _reFunctionArgvText = "[%s]" % _reFunctionArgvInSquareBrackets - # регулярное выражение для поиска функции в шаблоне - _reFunctionText = ("([a-zA-Z0-9\_-]+)\(((?:#-|-#|%s)+|)\)" % - _reFunctionArgvText) - - -class _terms(_error, _shareTermsFunction, _shareTemplate): - """Вычисление условий применяемых в шаблонах - - """ - # регулярное выражение для поиска функции в шаблоне - _reFunction = re.compile(_shareTermsFunction._reFunctionText) - # регулярное выражение для не версии - _re_not_Version = re.compile("[^0-9\.]") - # регулярное выражение не номер - _re_not_Number = re.compile("[^0-9]") - _suffixDict = {"pre": -2, "p": 0, "alpha": -4, "beta": -3, "rc": -1} - _lenSuffixDict = len(_suffixDict) - # Регулярное выражение для названия переменной - _reRightName = re.compile("^(?:[a-z_\-]+\.)?(?:[a-zA-Z0-9_\-]+)$") - # Регулярное выражение для сравниваемого значения - _reDenyValue = re.compile("[^0-9a-zA-Z_/\.,-]") - # латинские буквы в нижнем регистре - _letters = list(string.ascii_lowercase) - - # использует из других объектов - objVar = None - - def _splitVersion(self, strVersion): - """ - Split version. Version, addition letter, list suffixes with version, - revision. - Examples: - 3.0.0_beta2 - ("3.0.0_beta2","",[],"") - 3.0.0_beta2-r1 - ("3.0.0_beta2","",[],"r1") - 3.0.0_beta2a-r1 - ("3.0.0_beta2","a",[],"r1") - 3.0.0_beta2a_rc1-r1 - ("3.0.0_beta2","a",[("rc","1")],"r1") - 3.0.0_beta2a_rc1_p20111212-r1 - ("3.0.0_beta2","a",[("rc1","1"),("p","20111212")],"r1") - """ - # get revision from version - strWorkVersion, spl, rVersion = strVersion.rpartition("-") - if rVersion == strVersion: - strWorkVersion = rVersion - rVersion = "" - - suffixes = [] - # get suffixes from version - while "_" in strWorkVersion: - # 2.3_p45 ('2.3','_','p43') - # 2.3_rc4_p45 ('2.3_rc4','_','p43') - strWorkVersion, spl, suffix = strWorkVersion.rpartition("_") - suffSplList = filter(lambda x: suffix.startswith(x), - self._suffixDict.keys()) - if suffSplList: - suffSpl = suffSplList[0] - lenSuffSpl = len(suffSpl) - suffixVersion = suffix[lenSuffSpl:] - suffixes.append((suffSpl, suffixVersion)) - letters = "" - numberVersion = strWorkVersion - if numberVersion and numberVersion[-1:] in self._letters: - letters = numberVersion[-1:] - numberVersion = numberVersion[:-1] - return numberVersion, letters, suffixes, rVersion - - def _isVersion(self, strVersion): - """strVersion is not version - True""" - numberVersion, letters, suffixes, rVersion = \ - self._splitVersion(strVersion) - if not numberVersion.strip(): - return False - if self._re_not_Version.search(numberVersion): - return False - if letters and letters not in self._letters: - return False - for suffix, suffixVersion in suffixes: - if suffixVersion and self._re_not_Number.search(suffixVersion): - return False - if rVersion: - if rVersion[0] != "r" or len(rVersion) == 1: - return False - if self._re_not_Number.search(rVersion[1:]): - return False - return True - - def _isIntervalVersion(self, strVersion): - if "," in strVersion and strVersion.count(',') == 1: - version1, op, version2 = strVersion.partition(",") - return self._isVersion(version1) and self._isVersion(version2) - return False - - def _convertVers(self, verA, verB): - """Конвертирование номеров версий для корректного сравнения - """ - - def fillZero(elemA, elemB): - # elemA, elemB = elemA[], elemB[] - if len(elemA) > len(elemB): - maxElemB = len(elemB) - 1 - for i in range(len(elemA)): - if i > maxElemB: - elemB.append("0") - else: - maxElemA = len(elemA) - 1 - for i in range(len(elemB)): - if i > maxElemA: - elemA.append("0") - for i in range(len(elemB)): - lenA = len(elemA[i]) - lenB = len(elemB[i]) - if lenA == lenB: - pass - elif lenA > lenB: - res = lenA - lenB - for z in range(res): - elemB[i] = "0" + elemB[i] - elif lenB > lenA: - res = lenB - lenA - for z in range(res): - elemA[i] = "0" + elemA[i] - - def fillSuffix(elemA, elemB, sA, svA, sB, svB): - if str(sA) or str(sB): - svA, svB = map(lambda x: [x] if x else ['0'], (svA, svB)) - fillZero(svA, svB) - sA, sB = map(lambda x: x if x else 0, (sA, sB)) - elemA.append(str(self._lenSuffixDict + sA)) - elemA.extend(svA) - elemB.append(str(self._lenSuffixDict + sB)) - elemB.extend(svB) - - # Version, letters, suffix, suffixVersion, rVersion - vA, lA, ssA, rvA = self._splitVersion(verA) - vB, lB, ssB, rvB = self._splitVersion(verB) - elemA = vA.split(".") - elemB = vB.split(".") - fillZero(elemA, elemB) - if lA or lB: - lA, lB = map(lambda x: x if x else '0', (lA, lB)) - elemA.append(lA) - elemB.append(lB) - - # dereferencing suffix in suffixes list - ssA = map(lambda x: (self._suffixDict.get(x[0], 0), x[1]), ssA) - ssB = map(lambda x: (self._suffixDict.get(x[0], 0), x[1]), ssB) - for suffix, sufVer in reversed(ssA): - if ssB: - sB, svB = ssB.pop() - else: - sB, svB = "", "" - fillSuffix(elemA, elemB, suffix, sufVer, sB, svB) - while ssB: - sB, svB = ssB.pop() - fillSuffix(elemA, elemB, "", "", sB, svB) - - if rvA or rvB: - rvA, rvB = map(lambda x: [x[1:]], (rvA, rvB)) - fillZero(rvA, rvB) - elemA += rvA - elemB += rvB - return ".".join(elemA), ".".join(elemB) - - def _checkInterval(self, val, op, interval): - ver1, ver2 = interval.split(',') - - val1, ver1 = self._convertVers(val, ver1) - val2, ver2 = self._convertVers(val, ver2) - - comparator = { - '==': lambda a,b,c,d: a>=b and c <= d, - '!=': lambda a,b,c,d: a d, - '<=': lambda a,b,c,d: a>b and c <= d, - '<>': lambda a,b,c,d: a>b and c < d, - '=>': lambda a,b,c,d: a>=b and c < d - } - if op not in comparator: - raise TemplatesError(_("Wrong interval operator")) - - return comparator[op](val1, ver1, val2, ver2) - - def _equalTerm(self, term, textError, function=None): - """Вычисление логических выражений для условий - - Для корректной работы в классе который наследует этот класс - должен быть объявлен аттрибут self.objVar - (объект для работы с переменными) - function - функция для для обработки функций в заголовке блока - """ - rpl = lambda x: x.replace("@@", " ") - trm = {"&&": "@@and@@", "||": "@@or@@"} - dictRuleFunc = OrderedDict((("==", eq), ("!=", ne), - (">=", ge), - ("<=", le), - ("<>", ne), ("=>", ge), - (">", gt), ("<", lt), - )) - rule = dictRuleFunc.keys() - listEqual = [] - for k in trm.keys(): - if k in term: - term = term.replace(k, trm[k]) - trs = term.split("@@") - listSplitOr = [] - if "or" in trs: - lst = [] - for t in trs: - if t != "or": - lst.append(t) - else: - listSplitOr.append(lst) - lst = [] - if lst: - listSplitOr.append(lst) - else: - listSplitOr = [trs] - for trsAnd in listSplitOr: - listEqual = [] - for t in trsAnd: - def search_rule(t, rule, prefix=""): - for sepF in rule: - if sepF in t: - vals = list(t.partition(sepF)[::2]) - if vals[0].endswith("\\"): - return search_rule(vals[1], rule, - prefix="%s%s%s" % ( - prefix, vals[0], sepF)) - return True, sepF, ["%s%s" % (prefix, vals[0]), - vals[1]] - return False, None, [] - - flagRule, sepF, vals = search_rule(t, rule) - if flagRule: - # проверка на допустимость названия переменной - flagFunction = False - if not self._reRightName.search(vals[0]): - # проверка на допустимость функции - flagError = True - if callable(function): - searchFunct = self._reFunction.search(vals[0]) - if searchFunct: - flagError = False - flagFunction = True - if flagError: - self.setError( - "'%s'" % rpl(term) + " " + _("incorrect")) - self.setError(textError) - return False - # проверка на допустимость значения - try: - if "#-" in vals[1]: - vals[1] = self.applyVarsTemplate(vals[1], "") - vals[1] = function(vals[1]) - except TemplatesError: - pass - if self._reDenyValue.search(vals[1]): - self.setError("'%s'" % rpl(term) + " " + _("incorrect")) - self.setError(textError) - return False - flagIntTypeVar = None - if flagFunction and callable(function): - valVars = function("#-%s-#" % vals[0]) - if valVars is False: - self.setError( - "'%s'" % rpl(term) + " " + _("incorrect")) - self.setError(textError) - return False - if "load" == searchFunct.group(1) and \ - re.search("\(\s*num\s*,", vals[0]): - if valVars: - try: - valVars = int(valVars) - except ValueError: - self.setError("'%s'" % rpl(term) + " " + - _("incorrect")) - self.setError(textError) - return False - flagIntTypeVar = True - else: - flagIntTypeVar = False - else: - if valVars == "" and \ - (self._isVersion(vals[1]) or - self._isIntervalVersion(vals[1])): - valVars = "0" - elif vals[1] == "" and self._isVersion(valVars): - vals[1] = "0" - else: - try: - - valVars = self.objVar.Get(vals[0]) - varTable = self.objVar.Get('cl_used_action') - varTable.append((vals[0], vals[1])) - if not valVars: - valVars = "" - except DataVarsError as e: - raise TemplatesError("{header}\n{body}".format( - header=textError, body=str(e))) - # Номера версий для ini - flagNotIniFunct = True - # Два значения не пусты - flagNotEmptyVals = not (valVars == "" and vals[1] == "") - if flagFunction and flagNotEmptyVals and \ - searchFunct.group(1) == "ini": - # Проверка значения на версию - if self._isVersion(valVars) and \ - self._isVersion(vals[1]): - verFile, verVar = self._convertVers(vals[1], - valVars) - res = dictRuleFunc[sepF](verVar, verFile) - if res: - listEqual.append(True) - else: - listEqual.append(False) - break - flagNotIniFunct = False - if self._isVersion(valVars) and \ - self._isIntervalVersion(vals[1]): - res = False - try: - res = self._checkInterval( - valVars, sepF, vals[1]) - except TemplatesError: - self.setError("'%s'" % rpl(term) + " " + \ - _("incorrect")) - self.setError( - _("Wrong interval operator")) - if res: - listEqual.append(True) - else: - listEqual.append(False) - break - flagNotIniFunct = False - # Cравниваем номера версий - if flagNotIniFunct: - if flagNotEmptyVals and \ - ("_ver" in vals[0] or - (flagFunction and searchFunct.group( - 1) in - ("pkg", "merge", "mergepkg")) or - (flagFunction and searchFunct.group( - 1) == "load" and - re.search("\(\s*ver\s*,", - vals[0]))): - # Проверка значения на версию (или интервал) - if (not self._isVersion(vals[1]) and - not self._isIntervalVersion(vals[1])): - self.setError("'%s'" % rpl(term) + " " + \ - _("incorrect")) - self.setError( - _("This value is not a version")) - return False - # Проверка значения функции на версию - if not self._isVersion(valVars): - self.setError("'%s'" % rpl(term) + " " + \ - _("incorrect")) - self.setError( - _("The function value is not a version")) - return False - if self._isIntervalVersion(vals[1]): - res = False - try: - res = self._checkInterval( - valVars, sepF, vals[1]) - except TemplatesError: - self.setError("'%s'" % rpl(term) + " " + \ - _("incorrect")) - self.setError( - _("Wrong interval operator")) - else: - verFile, verVar = self._convertVers(vals[1], - valVars) - res = dictRuleFunc[sepF](verVar, verFile) - if res: - listEqual.append(True) - else: - listEqual.append(False) - break - else: - if flagIntTypeVar is None: - flagIntTypeVar = True - try: - valVars = int(valVars) - except (TypeError, ValueError): - flagIntTypeVar = False - if flagIntTypeVar: - if not vals[1].strip(): - vals[1] = 0 - try: - valFile = int(vals[1]) - valVar = valVars - res = dictRuleFunc[sepF](valVar, valFile) - if res: - listEqual.append(True) - else: - listEqual.append(False) - break - except ValueError: - flagIntTypeVar = False - if not flagIntTypeVar: - if sepF == "!=" or sepF == "==": - if not vals[1].strip(): - vals[1] = "" - valFile = vals[1] - valVar = valVars - res = dictRuleFunc[sepF](valVar, valFile) - if res: - listEqual.append(True) - else: - listEqual.append(False) - break - else: - if not flagNotEmptyVals: - listEqual.append(False) - break - else: - self.setError("'%s'" % rpl(term) + " " \ - + _("incorrect")) - self.setError(textError) - return False - else: - if t == "and": - if listEqual == [] or False in listEqual: - listEqual = [False] - break - else: - listEqual = [True] - else: - self.setError("'%s'" % rpl(term) + " " + _("incorrect")) - self.setError(textError) - return False - if not (listEqual == [] or False in listEqual): - break - if listEqual == [] or False in listEqual: - return False - return True - - def splitParLine(self, linePar): - """ - Split params line - """ - - def splitQuote(listPar, quoteSymbol): - listTerm = map(lambda x: x + quoteSymbol, ("=", ">", "<")) - flagQ = False - mass = [] - v = "" - for i in listPar: - if i.count(quoteSymbol) == 1: - if flagQ and i.endswith(quoteSymbol): - v = v + " " + i - mass.append(v) - v = "" - flagQ = False - elif filter(lambda x: x in i, listTerm): - flagQ = True - v = i - else: - mass.append(i) - elif flagQ: - v = v + " " + i - else: - mass.append(i) - foundPar = list(set(mass) - set(listPar)) - return not flagQ, filter(lambda x: not x in foundPar, - mass), foundPar - - listPar = re.split("\s+", linePar) - flagFoundQ = "'" in linePar - flagFoundQQ = '"' in linePar - if flagFoundQ and flagFoundQQ: - flagQ, listSplQPar, listFoundQPar = splitQuote(listPar, "'") - if flagQ: - flagQQ, listSplQQPar, listFoundQQPar = splitQuote(listSplQPar, - '"') - if flagQQ: - listPar = listSplQQPar + listFoundQPar + listFoundQQPar - elif flagFoundQQ: - flagQQ, listSplQQPar, listFoundQQPar = splitQuote(listPar, '"') - if flagQQ: - listPar = listSplQQPar + listFoundQQPar - elif flagFoundQ: - flagQ, listSplQPar, listFoundQPar = splitQuote(listPar, "'") - if flagQ: - listPar = listSplQPar + listFoundQPar - if flagFoundQ: - listQPar = [] - for par in listPar: - if par.endswith("'") and par.count("'") > 1: - listQPar.append(par[:-1].replace("='", "=")) - else: - listQPar.append(par) - listPar = listQPar - if flagFoundQQ: - listQQPar = [] - for par in listPar: - if par.endswith('"') and par.count('"') > 1: - listQQPar.append(par[:-1].replace('="', '=')) - else: - listQQPar.append(par) - listPar = listQQPar - return listPar - - -class HParams(object): - Format = "format" - DotAll = "dotall" - Multiline = "multiline" - Comment = "comment" - Append = "append" - Force = "force" - DConf = "dconf" - Convert = "convert" - Link = "link" - DirectoryLink = Link - Mirror = "mirror" - Symbolic = "symbolic" - ChangeMode = "chmod" - ChangeOwner = "chown" - Name = "name" - Path = "path" - Autoupdate = "autoupdate" - Protected = "protected" - RunNow = "run" - RunPost = "exec" - Merge = "merge" - PostMerge = "postmerge" - Module = "module" - Environ = "env" - RestartService = "restart" - StartService = "start" - StopService = "stop" - Rebuild = "rebuild" - Stretch = "stretch" - - ServiceControl = (StopService, StartService, RestartService) - - _Single = (DotAll, Multiline, Force, Mirror, Symbolic, Autoupdate, - Protected, Stretch) - - class AppendParams(object): - Join = "join" - Before = "before" - After = "after" - Replace = "replace" - Remove = "remove" - Skip = "skip" - Patch = "patch" - Clear = "clear" - LinkDirCompatible = (Replace, Join) - - class ActionType(object): - Merge = "merge" - Patch = "patch" - Profile = "profile" - - class ExecuteType(object): - Now = "run" - Post = "exec" - - class Formats(object): - Executable = ("diff", "dconf", "ldif", "contents", "sqlite", - "backgrounds") - Meta = ("backgrounds",) - Modificator = ("sqlite",) - - class OptDir(object): - Path = "path" - Skip = "skip" - Autoupdate = "autoupdate" - - @classmethod - def single(cls, it): - return [x for x in it if x in cls._Single] - - -class fileHeader(HParams, _terms): - """Обработка заголовков шаблонов и конфигурационных файлов - - """ - # Допустимые параметры заголовка - allowParam = ( - HParams.Format, HParams.DotAll, HParams.Multiline, HParams.Comment, - HParams.Append, HParams.Force, HParams.DConf, HParams.Convert, - HParams.Link, HParams.Mirror, HParams.Symbolic, HParams.Stretch, - HParams.ChangeMode, HParams.ChangeOwner, - HParams.Name, HParams.Path, HParams.Autoupdate, - HParams.Protected, HParams.RunNow, HParams.RunPost, - HParams.Merge, HParams.PostMerge, HParams.Module, HParams.Environ, - HParams.RestartService, HParams.StartService, HParams.StopService, - HParams.Rebuild - ) - - # Тип шаблона - fileType = "" - # Тип вставки шаблона - typeAppend = "" - # Возможные типы вставки шаблонов - _fileAppend = ( - HParams.AppendParams.Join, - HParams.AppendParams.Before, - HParams.AppendParams.After, - HParams.AppendParams.Replace, - HParams.AppendParams.Remove, - HParams.AppendParams.Skip, - HParams.AppendParams.Patch, - HParams.AppendParams.Clear) - - # Интерпретатор (#!/bin/bash) (#!/usr/bin/python) - execStr = "" - # Символ комментария - comment = False - # Выражение для поиска строки интерпретатора - reExecStr = re.compile("^(#!/.+[^#]\s)", re.M) - # условные операторы - terms = ('>', '<', '==', '!=', '>=', '<=', '<>', '=>') - # параметры без значения - listParNotVal = HParams.single(allowParam) - # Результат вычисления условия в заголовке - headerTerm = True - - def __init__(self, templateName, text, comment=None, fileType=False, - objVar=False, function=None, templateObj=None): - self.body = text - # Объект с переменными - self.objVar = objVar - # Параметры описанные в заголовке файла шаблона - self.params = {} - # некорректные параметры - incorrectParams = [] - used_params = [] - # Поиск строки запустка (#!/bin/bash и.т. д) - if comment or fileType != "bin": - reExecRes = self.reExecStr.search(self.body) - if reExecRes: - self.execStr = self.body[reExecRes.start():reExecRes.end()] - self.body = self.body[:reExecRes.start()] + \ - self.body[reExecRes.end():] - # Удаление Заголовка Calculate - if comment: - titleFirst = "Modified" - # В случае текста XML - if isinstance(comment, tuple) and len(comment) == 2: - reCalcHeader = \ - re.compile("\s*%s\s+%s.+\s+(.+\n)+%s\s?" \ - % (comment[0], titleFirst, comment[1]), - re.M | re.I) - reS = reCalcHeader.search(self.body) - if reS: - self.body = self.body[:reS.start()] + self.body[reS.end():] - else: - reCalcHeader = re.compile( - "\s*%s\-+\s+%s\s+%s.+\s+(%s.+\s+)+%s\-+\s?" \ - % (comment, comment, titleFirst, comment, comment), - re.M | re.I) - reS = reCalcHeader.search(self.body) - if reS: - self.body = self.body[reS.end():] - if fileType is not False: - if fileType == "bin": - self.params[HParams.Format] = fileType - self.fileType = self._getType() - self.typeAppend = self._getAppend() - else: - textLines = self.body.splitlines() - if textLines: - textLine = textLines[0] - rePar = re.compile( - "\s*#\s*calculate\s+\\\\?|\s*#\s*calculate\\\\?$", re.I) - reP = rePar.search(textLine) - if reP: - reLns = re.compile(r"\A([^\\\n]*\\\n)+[^\n]*\n*", re.M) - reLs = reLns.search(self.body) - if reLs: - reL = reLs - paramLine = self.body[reP.end():reLs.end()] - paramLine = paramLine.replace("\\", " ") - else: - reLn = re.compile("\n") - reL = reLn.search(self.body) - paramLine = textLine[reP.end():] - if reL: - self.body = self.body[reL.end():] - else: - self.body = "" - paramList = self.splitParLine(paramLine) - if paramList: - for i in paramList: - for term in self.terms: - if term in i: - if self.headerTerm: - errorMsg = ( - _("Incorrect template") + - _(": ") + templateName + "\n" + - _("template header not valid") + - _(": ") + i) - if function: - rezTerm = self._equalTerm( - i, errorMsg, function) - else: - rezTerm = self._equalTerm( - i, errorMsg) - if not rezTerm: - self.headerTerm = False - break - else: - par = i.split("=") - if len(par) == 1: - if i in self.listParNotVal: - self.params[i] = "True" - used_params.append(i) - else: - if i.strip(): - incorrectParams = {i} - elif len(par) == 2: - par[1] = self.applyVarsTemplate( - par[1], "") - par[1] = templateObj.applyFuncTemplate( - par[1], templateName) - self.params[par[0]] = par[1] - used_params.append(par[0]) - if par[0] == HParams.Environ: - try: - importlib.import_module( - "calculate.%s.variables" - % par[1]) - except (ImportError, - AttributeError): - self.headerTerm = False - self.comment = self._getComment() - self.fileType = self._getType() - typeAppend = self._getAppend() - if typeAppend: - self.typeAppend = typeAppend - else: - self.headerTerm = False - self.setError( - _("incorrect header parameter: '%s'") - % "%s=%s" % (HParams.Append, - self.params[HParams.Append])) - if any(x in self.params for x in (HParams.RunPost, - HParams.RunNow)): - if HParams.RunPost in self.params: - self.execStr = "#!%s\n" % self.params[HParams.RunPost] - if HParams.RunNow in self.params: - self.execStr = "#!%s\n" % self.params[HParams.RunNow] - if "python" in self.execStr: - self.execStr += "# -*- coding: utf-8 -*-\n" - - double_params = list(set([x for x in used_params - if used_params.count(x) > 1])) - if double_params: - self.headerTerm = False - self.setError(_("redefine header parameter: '%s'") - % " ".join(double_params)) - - if not incorrectParams and self.params: - incorrectParams = set(self.params.keys()) - set(self.allowParam) - if incorrectParams: - self.headerTerm = False - self.setError(_("incorrect header parameter: '%s'") \ - % " ".join(list(incorrectParams))) - - def _getType(self): - """Выдать тип файла""" - return self.params.get(HParams.Format, "raw") - - def _getAppend(self): - """Выдать тип добавления файла""" - if HParams.Append in self.params: - if self.params[HParams.Append] in self._fileAppend: - return self.params[HParams.Append] - else: - return False - else: - if self.fileType != "raw" and self.fileType != "bin" and \ - self.fileType != "": - if (HParams.Format in self.params and - self.params[HParams.Format] in - chain(("patch",), HParams.Formats.Executable)): - self.params[HParams.Append] = HParams.AppendParams.Patch - else: - self.params[HParams.Append] = HParams.AppendParams.Join - else: - self.params[HParams.Append] = HParams.AppendParams.Replace - return self.params[HParams.Append] - - def _getComment(self): - """Выдать символ комментария файла""" - if HParams.Comment in self.params: - if self.params[HParams.Comment] in ("xml", "XML"): - return "" - else: - return self.params[HParams.Comment] - else: - return False - - -class dirHeader(HParams, _terms): - """Обработка заголовков шаблонов директорий - - """ - # Допустимые параметры заголовка - allowParam = ( - HParams.Append, HParams.ChangeMode, - HParams.ChangeOwner, HParams.Name, - HParams.Path, HParams.Autoupdate, - HParams.Module, HParams.Environ, - HParams.Merge, HParams.PostMerge, - HParams.Rebuild, - HParams.RestartService, HParams.StartService, HParams.StopService, - HParams.DirectoryLink - ) - - # Тип вставки шаблона - typeAppend = "" - - # Возможные типы вставки шаблонов - _fileAppend = ( - HParams.AppendParams.Join, - HParams.AppendParams.Remove, - HParams.AppendParams.Skip, - HParams.AppendParams.Clear, - HParams.AppendParams.Replace - ) - - # условные операторы - terms = ('>', '<', '==', '!=', '>=', '<=', '<>', '=>') - - # параметры без значения - listParNotVal = (HParams.Symbolic, HParams.Force, - HParams.Autoupdate) - - # Результат вычисления условия в заголовке - headerTerm = True - - def __init__(self, templateName, text, objVar=False, function=None, - templateObj=None): - self.body = text - # Объект с переменными - self.objVar = objVar - # Параметры описанные в заголовке файла шаблона - self.params = {} - # некорректные параметры - incorrectParams = set([]) - used_params = [] - - textLines = text.splitlines() - flagErrorBody = False - if textLines: - textLine = textLines[0] - rePar = re.compile( - "\s*#\s*calculate\s+\\\\?|\s*#\s*calculate\\\\?$", re.I) - reP = rePar.search(textLine) - if reP: - reLns = re.compile(r"\A([^\\\n]*\\\n)+[^\n]*\n*", re.M) - reLs = reLns.search(text) - if reLs: - reL = reLs - paramLine = text[reP.end():reLs.end()] - paramLine = paramLine.replace("\\", " ") - else: - reLn = re.compile("\n") - reL = reLn.search(text) - paramLine = textLine[reP.end():] - if reL: - self.body = text[reL.end():] - else: - self.body = "" - if self.body.strip(): - self.headerTerm = False - self.setError(_("incorrect text in the template: '%s'") - % self.body) - flagErrorBody = True - if not flagErrorBody: - paramList = self.splitParLine(paramLine) - if paramList: - for i in paramList: - for term in self.terms: - if term in i: - if self.headerTerm: - errorMsg = ( - _("Incorrect template") + - _(": ") + templateName + "\n" + - _("template header not valid") - + _(": ") + i) - if function: - rezTerm = self._equalTerm( - i, errorMsg, function) - else: - rezTerm = self._equalTerm( - i, errorMsg) - if not rezTerm: - self.headerTerm = False - break - else: - par = i.split("=") - if len(par) == 1: - if i in self.listParNotVal: - self.params[i] = "True" - used_params.append(i) - else: - if i.strip(): - incorrectParams = {i} - elif len(par) == 2: - # self.params[par[0]] = par[1] - par[1] = self.applyVarsTemplate( - par[1], "") - par[1] = templateObj.applyFuncTemplate( - par[1], templateName) - used_params.append(par[0]) - self.params[par[0]] = par[1] - if par[0] == HParams.Environ: - try: - importlib.import_module( - "calculate.%s.variables" % - par[1]) - except (ImportError, AttributeError): - self.headerTerm = False - self.objVar.defaultModule = \ - self.params[HParams.Environ] - typeAppend = self._getAppend() - if typeAppend: - self.typeAppend = typeAppend - else: - self.headerTerm = False - self.setError(_("incorrect header parameter: '%s'") \ - % "%s=%s" % ( - HParams.Append, - self.params[HParams.Append])) - - double_params = list(set([x for x in used_params - if used_params.count(x) > 1])) - if double_params: - self.headerTerm = False - self.setError(_("redefine header parameter: '%s'") - % " ".join(double_params)) - - if not flagErrorBody: - if not incorrectParams: - incorrectParams = set(self.params.keys()) - set(self.allowParam) - if incorrectParams: - self.headerTerm = False - self.setError(_("incorrect header parameter: '%s'") \ - % " ".join(list(incorrectParams))) - - def _getAppend(self): - """Выдать тип добавления директории""" - if HParams.Append in self.params: - if (self.params[HParams.Append] == HParams.AppendParams.Replace and - HParams.DirectoryLink not in self.params): - return False - if self.params[HParams.Append] in self._fileAppend: - return self.params[HParams.Append] - else: - return False - else: - return HParams.AppendParams.Join - - -class blocText(object): - """Разбиваем текст на блоки""" - - def splitTxtToBloc(self, text, openTxtBloc, closeTxtBloc, - commentTxtBloc, sepField): - """Делит текст на блоки (без заголовков) - - openTxtBloc - регулярное выражение для начала блока - closeTxtBloc - регулярное выражение для конца блока - commentTxtBloc - регулярное выражение - комментарий - возвращает блоки текста - """ - blocs = [] - level = 0 - # Нахождение нескольких блоков в строке - # разделители линий, разделителями могут быть ("","\n") - sepsLines = [] - # линии - txtLines = [] - # Исходные строки - txtLinesSrc = text.splitlines() - for line in txtLinesSrc: - lineTmpA = line - closeBl = False - txtLinesTmp = [] - commentSpl = commentTxtBloc.split(line) - textLine = None - commentLine = None - if commentSpl[0].strip(): - closeBl = True - if len(commentSpl) > 1: - commentBl = commentTxtBloc.search(line) - textLine = commentSpl[0] - commentLine = line[commentBl.start(0):] - lineTmpA = textLine - - while closeBl: - closeBl = sepField.search(lineTmpA) - if closeBl: - lineTmpB = lineTmpA[closeBl.end(0):] - txtLinesTmp.append(lineTmpA[:closeBl.end(0)]) - lineTmpA = lineTmpB - if lineTmpA.strip(): - txtLinesTmp.append(lineTmpA) - # Если есть значение и комментарий в строке - if textLine is not None: - for l in txtLinesTmp: - txtLines.append(l) - sepsLines.append("") - if not txtLinesTmp: - txtLines.append(textLine) - sepsLines.append("") - txtLines.append(commentLine) - sepsLines.append("\n") - # Если есть несколько блоков в строке - elif len(txtLinesTmp) > 1 and txtLinesTmp[1].strip(): - lenTmpLines = len(txtLinesTmp) - for l in range(lenTmpLines): - txtLines.append(txtLinesTmp[l]) - if l == lenTmpLines - 1: - sepsLines.append("\n") - else: - sepsLines.append("") - # Cтрока не преобразована - else: - txtLines.append(line) - sepsLines.append("\n") - - # разбивание на блоки - z = 0 - bl = "" - for i in txtLines: - if commentTxtBloc.split(i)[0].strip() and openTxtBloc.search(i): - level += len(openTxtBloc.split(i)) - 1 - if commentTxtBloc.split(i)[0].strip() and closeTxtBloc.search(i): - level -= len(closeTxtBloc.split(i)) - 1 - bl += i + sepsLines[z] - if level == 0: - if bl: - blocs.append(bl) - bl = "" - z += 1 - # cоздание блоков с элементами не входящими в блоки - realBlocs = [] - z = 0 - bl = "" - for i in blocs: - txtLines = i.splitlines() - if len(txtLines) > 0: - line = txtLines[0] - else: - line = i - if commentTxtBloc.split(i)[0].strip() and openTxtBloc.search(line): - if bl: - realBlocs.append(bl) - bl = "" - realBlocs.append(i) - else: - bl += i - z += 1 - if bl: - realBlocs.append(bl) - if level == 0: - if text and text[-1] != "\n": - tmpBlocs = realBlocs.pop() - tmpBlocs = tmpBlocs[:-1] - realBlocs.append(tmpBlocs) - return realBlocs - else: - return [] - - def findArea(self, text, reTextHeader, reTextArea, numGroupArea=0): - """ Делит текст на области (с заголовками) - - reTextHeader - регулярное выражение для заголовка области - reTextArea - регулярное выражение для всей области - numGroupArea - номер групы результата поиска по регулярному выражению - по всей области - возвращает два списка: первый - заголовки, второй - тела областей без - заголоков - """ - # Заголовки областей - headersArea = [] - # Тексты областей без заголовков - textBodyArea = [] - r = reTextArea.search(text) - if not r: - headersArea.append("") - textBodyArea.append(text) - return headersArea, textBodyArea - - txtWr = text - while r: - textArea = r.group(numGroupArea) - txtSpl = txtWr.split(textArea) - area = txtSpl[0] - txtWr = txtSpl[1] - if area: - headersArea.append("") - textBodyArea.append(area) - res = reTextHeader.search(textArea) - header = textArea[:res.end()] - body = textArea[res.end():] - - headersArea.append(header) - textBodyArea.append(body) - - if txtWr: - r = reTextArea.search(txtWr) - else: - r = False - if txtWr: - headersArea.append("") - textBodyArea.append(txtWr) - return headersArea, textBodyArea - - def findBloc(self, text, captionTxtBloc, bodyTxtBloc): - """ Делит текст на блоки (с заголовками) - - captionTxtBloc - регулярное выражение для заголовка блока - bodyTxtBloc - регулярное выражение для тела блока - возвращает два списка: первый - заголовки, второй - тела блоков - """ - # Заголовки блоков - headersTxt = [] - # Тексты блоков - blocsTxt = [] - r = captionTxtBloc.search(text) - if r: - headersTxt.append(r.group(0)) - txtSpl = text.partition(r.group(0)) - blocTxt = txtSpl[0] - txtWr = txtSpl[2] - rb = bodyTxtBloc.search(blocTxt) - if not blocTxt: - blocsTxt.append(blocTxt) - if rb: - blocsTxt.append(rb.group(0)) - while r: - r = captionTxtBloc.search(txtWr) - if r: - headersTxt.append(r.group(0)) - txtSpl = txtWr.partition(r.group(0)) - blocTxt = txtSpl[0] - txtWr = txtSpl[2] - rb = bodyTxtBloc.search(blocTxt) - if rb: - blocsTxt.append(rb.group(0)) - else: - blocsTxt.append(txtWr) - if headersTxt and blocsTxt: - if len(headersTxt) > len(blocsTxt): - blocsTxt.insert(0, "") - elif len(headersTxt) < len(blocsTxt): - headersTxt.insert(0, "") - if len(headersTxt) != len(blocsTxt): - return False - return headersTxt, blocsTxt - else: - return False - - -class _file(_error): - """ - Класс для работы с файлами - """ - configMode = None - - def printWARNING(self, s): - raise NotImplemented() - - def __init__(self): - # Имя файла конфигурационного файла - self.nameFileConfig = "" - self.nameFileConfigOrig = "" - # Содержимое конфигурационного файла - self.textConfig = "" - # Имя файла шаблона - self.nameFileTemplate = "" - # Содержимое шаблона - self.textTemplate = "" - # Дескриптор файла шаблона - self.F_TEMPL = None - # Дескриптор файла конфигурационного файла - self.F_CONF = None - # тип запускаемого шаблона - self.executeType = None - # список скриптов на запуск - self.queueExecute = [] - - def saveConfFile(self): - """Записать конфигурацию""" - if not self.textConfig: - self.textConfig = self.textTemplate - if self.F_CONF: - try: - self.F_CONF.truncate(0) - self.F_CONF.seek(0) - self.F_CONF.write(self.textConfig) - except IOError: - self.setError(_("unable to open the file:") - + self.nameFileConfig) - return False - self.F_CONF.flush() - return True - elif self.executeType == HParams.ExecuteType.Post: - processor = self.textConfig.partition("\n")[0] - if processor.startswith("#!"): - self.queueExecute.append((processor[2:], self.textConfig, - self.nameFileTemplate)) - else: - self.setError(_("unable to execute '%s'") - + self.textConfig) - return False - - def openTemplFile(self, nameFileTemplate): - """Открыть файл шаблона""" - try: - F_TEMPL = open(nameFileTemplate, "r") - except IOError: - self.setError(_("unable to open the file:") - + nameFileTemplate) - return False - return F_TEMPL - - def closeTemplFile(self): - if self.F_TEMPL: - self.F_TEMPL.close() - self.F_TEMPL = None - - def __closeOldFile(self): - if self.F_CONF: - self.F_CONF.close() - self.F_CONF = None - - def __openConfFile(self, nameFileConfig): - """Отктрыть конфигурационный файл""" - try: - if os.path.islink(nameFileConfig): - # если ссылка то удаляем её - os.unlink(nameFileConfig) - F_CONF = open(nameFileConfig, "r+") - except (IOError, OSError): - try: - if os.path.isdir(nameFileConfig): - self.printWARNING(_("unable to open the directory as file:") - + nameFileConfig) - return False - F_CONF = open(nameFileConfig, "w+") - except (IOError, OSError): - self.setError(_("unable to open the file:") - + nameFileConfig) - return False - return F_CONF - - def openFiles(self, nameFileTemplate, nameFileConfig, typeFormat=None, - newBuffer=None): - """Открывает шаблон и конфигурационный файл""" - self.textConfig = "" - self.textTemplate = "" - self.closeFiles() - self.F_TEMPL = None - self.F_CONF = None - self.nameFileConfig = os.path.abspath(nameFileConfig) - self.nameFileTemplate = os.path.abspath(nameFileTemplate) - self.F_TEMPL = self.openTemplFile(self.nameFileTemplate) - copy_stat = not os.path.exists(self.nameFileConfig) - if (not self.executeType and - typeFormat not in HParams.Formats.Executable): - self.F_CONF = self.__openConfFile(self.nameFileConfig) - if self.F_TEMPL and self.F_CONF: - self.textTemplate = self.F_TEMPL.read() - self.closeTemplFile() - if self.configMode == T_NEWCFG: - origConfigName = re.sub(r'/._cfg\d{4}_([^/]+)$', '/\\1', - self.nameFileConfig) - if newBuffer is None: - self.textConfig = readFile(origConfigName) - if copy_stat: - self.copy_mod_own(origConfigName, self.nameFileConfig) - else: - self.textConfig = newBuffer - else: - self.textConfig = self.F_CONF.read() - - def copy_mod_own(self, source, target): - try: - statdata = os.stat(source) - statdata_old = os.stat(target) - if statdata.st_mode != statdata_old.st_mode: - os.chmod(target, statdata.st_mode) - if (statdata.st_uid != statdata_old.st_uid or - statdata.st_gid != statdata_old.st_gid): - os.chown(target, statdata.st_uid, statdata.st_gid) - except OSError: - pass - - def __del__(self): - self.closeFiles() - - def closeFiles(self): - """Закрытие файлов""" - self.closeTemplFile() - self.__closeOldFile() - - -class utfBin(object): - """Класс для преобразования в utf-8 - - преобразование бинарного или смеси бинарного и utf-8 кода в utf-8 и - обратное преобразование - методы класса encode и decode - """ - - def _retUTF(self, char): - byte = ord(char) - if byte <= 127: - return '_ch_', 1 - elif byte <= 191: - return '_nb_', 1 - elif byte <= 223: - return '_fb_', 2 - elif byte <= 239: - return '_fb_', 3 - elif byte <= 247: - return '_fb_', 4 - else: - return '_er_', 1 - - def _sumbUtf(self, symbols, lenTail): - if not symbols: - return False, 0 - lenSymb = len(symbols) - if lenSymb >= 4: - l = 4 - elif lenSymb >= 3: - l = 3 - elif lenSymb >= 2: - l = 2 - else: - if symbols[0] == '_ch_': - return True, 1 - else: - return False, 1 - result = False - i_ = 0 - for i in range(l): - i_ = i - if i == 0 and symbols[i] != '_fb_': - break - elif i > 0 and symbols[i] != '_nb_': - break - if lenTail > 1 and lenTail != i_: - return False, 1 - if i_ > 0: - result = True - return result, i_ - - def _intToChar(self, x): - he = hex(x)[2:] - ret = None - exec ("ret = '\\x%s'" % he) - return ret - - def _hexToChar(self, he): - ret = None - exec ("ret = '\\x%s'" % he) - return ret - - def encode(self, text): - """Кодирует смешанный формат в UTF-8""" - ind = 0 - utf = [] - lenUtf = [] - indErr = [] - i = 0 - for ch in text: - r, l = self._retUTF(ch) - utf.append(r) - lenUtf.append(l) - i += 1 - while 1: - if utf[ind] == '_fb_': - res, l = self._sumbUtf(utf[ind:], lenUtf[ind]) - if res is False: - indErr.append(ind) - if l > 0: - ind += l - if ind >= len(utf): - break - else: - if utf[ind] != '_ch_': - indErr.append(ind) - ind += 1 - if ind >= len(utf): - break - if indErr: - lenIndErr = len(indErr) - block = [] - blocks = [] - if lenIndErr > 1: - i = 1 - while 1: - if i == 1: - block.append(indErr[i - 1]) - if indErr[i] - indErr[i - 1] == 1: - block.append(indErr[i]) - else: - if block: - blocks.append(block) - block = [indErr[i]] - i += 1 - if i >= lenIndErr: - break - else: - block.append(indErr[0]) - if block: - blocks.append(block) - listErr = [] - for block in blocks: - string = "" - last_elem = None - for elem in block: - string += hex(ord(text[elem]))[-2:] - last_elem = elem - if last_elem is not None: - listErr.append((block[0], "__hex__?%s?__hex__" % string, - last_elem)) - textOut = text - deltaInd = 0 - for erEl in listErr: - startInd = erEl[0] + deltaInd - endInd = erEl[2] + 1 + deltaInd - textOut = textOut[:startInd] + erEl[1] + textOut[endInd:] - deltaInd += len(erEl[1]) - (erEl[2] - erEl[0] + 1) - # if i == 1: - # break - # i += 1 - return textOut - - def decode(self, text): - """Декодирует UTF-8 в смешанный формат""" - varStart = "__hex__\?" - varEnd = "\?__hex__" - # -1 Это экранирование '?' которое тоже считается - deltVarStart = len(varStart) - 1 - deltVarEnd = len(varEnd) - 1 - reVar = re.compile("%s[a-f0-9]+%s" % (varStart, varEnd), re.M) - resS = reVar.search(text) - textTemplateTmp = text - while resS: - mark = textTemplateTmp[resS.start():resS.end()] - hexString = mark[deltVarStart:-deltVarEnd] - i = 0 - stringInsert = "" - hexCode = "" - for ch in hexString: - if i >= 1: - hexCode += ch - stringInsert += self._hexToChar(hexCode) - hexCode = "" - i = 0 - else: - hexCode += ch - i += 1 - textTemplateTmp = textTemplateTmp.replace(mark, stringInsert) - resS = reVar.search(textTemplateTmp) - return textTemplateTmp - - -class TemplateFormat(_error): - """ - Формат шаблон - """ - def __init__(self, text, parent=None): - self.text = text - self.changed_files = [] - self.set_parent(parent) - self.prepare() - - def prepare(self): - pass - - def setError(self, error): - super(TemplateFormat, self).setError(error) - if hasattr(self.parent, "bHasError"): - self.parent.bHasError = True - - def set_parent(self, parent): - self.parent = parent - - @property - def template_name(self): - return self.parent.nameFileTemplate - - def getIni(self, key, nameFile=""): - return self.parent.functObj.getIni(key, nameFile) - - def setIni(self, key, value, nameFile=""): - return self.parent.functObj.setIni(key, value, nameFile) - - @property - def objVar(self): - return self.parent.objVar - - -class FormatFactory(object): - """ - Фабрика классов форматов шаблонов - """ - # Импортированные классы поддерживаемых форматов шаблонов - importFormats = {} - - newObjProt = {} - - def __init__(self, parent): - self.parent = parent - - def createNewClass(self, name, bases, attrs=None): - raise NotImplemented() - - def getClassObj(self, nameClassTemplate): - """Создает класс шаблона по имени""" - if nameClassTemplate in self.importFormats: - classFormat = self.importFormats[nameClassTemplate] - else: - try: - classFormat = getattr(__import__("calculate.lib.format.%s" % - nameClassTemplate, - globals(), locals(), - [nameClassTemplate]), - nameClassTemplate) - except (ImportError, AttributeError): - # Создаем объект из self.newObjProt с помощью - # метаклассов - if nameClassTemplate in self.newObjProt: - # Прототип класса - nameProt = self.newObjProt[nameClassTemplate] - if nameProt in self.importFormats: - classProt = self.importFormats[nameProt] - else: - try: - classProt = getattr( - __import__("calculate.lib.format.%s" % nameProt, - globals(), locals(), - [nameProt]), - nameProt) - except (ImportError, AttributeError): - return False - self.importFormats[nameProt] = classProt - classFormat = self.createNewClass(nameClassTemplate, - (classProt,)) - else: - return False - self.importFormats[nameClassTemplate] = classFormat - return classFormat - - def createObject(self, formatTemplate, textTemplate): - """Создание объекта формата шаблона. - - Объект создается на основании формата шаблона и текста шаблона""" - classFormat = self.getClassObj(formatTemplate) - if callable(classFormat): - obj = classFormat(textTemplate, self.parent) - return obj - else: - return False - -class TemplateFunctionError(Exception): - pass - -def template_function(lastall=False): - """ - Подготовить метод для использования в качестве функции - - lastall: поволяет поделить строку аргументов на указанное число, - при этом последний аргумент получит все данные, которые - могут содержать разделитель параметров - """ - def decor(f): - @wraps(f) - def wrapper(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - def pretty_num(num): - if num > 1: - return _("%d argumens") % num - else: - return _("1 argument") - funArgv = funArgv.strip() - # поиск всех служебных переменных - spec_vars = [x for x in f.__code__.co_varnames[:f.__code__.co_argcount] - if x in ("self", "nameTemp", "localVars")] - varnum = f.__code__.co_argcount - len(spec_vars) - defnum = len(f.__defaults__) if f.__defaults__ else 0 - # число обязательных параметров - reqnum = varnum - defnum - if funArgv: - terms = map(lambda x: x.strip(), funArgv.split(",")) - else: - terms = [] - - if not varnum and len(terms) != varnum: - raise self.raiseErrTemplate(_("Function takes no arguments")) - if len(terms) < reqnum: - if defnum: - raise self.raiseErrTemplate( - _("Function takes at least {num}").format( - num=pretty_num(reqnum))) - else: - raise self.raiseErrTemplate( - _("Function takes exactly {num}").format( - num=pretty_num(reqnum))) - if not lastall: - if len(terms) > varnum: - if defnum: - raise self.raiseErrTemplate( - _("Function takes at most {num}").format( - num=pretty_num(varnum))) - else: - raise self.raiseErrTemplate( - _("Function takes exactly {num}").format( - num=pretty_num(varnum))) - else: - terms = terms[:varnum-1] + [",".join(terms[varnum-1:])] - args = [self] - if "nameTemp" in spec_vars: - args.append(nameTemp) - if "localVars" in spec_vars: - args.append(localVars) - args.extend(terms) - try: - replace = f(*args) - except TemplateFunctionError as e: - raise self.raiseErrTemplate(str(e)) - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - return wrapper - return decor - -class templateFunction(_error, _warning, _shareTemplate, _shareTermsFunction, - LayeredIni): - """Класс для функций шаблонов""" - # Словарь установленных программ {"имя программы":[версии]} - installProg = {} - - # Cписок просканированных категорий установленных программ - installCategory = [] - - # Флаг сканирования всех установленных программ - flagAllPkgScan = False - - # Список названий функций шаблона - namesTemplateFunction = [] - - # Словарь {название функции шаблона: функция шаблона, ...} - templateFunction = {} - - # Регулярное выражение для сложения - sNum = re.compile("\-[^\-\+]+|[^\-\+]+") - - # Регулярное выражение для умножениея и деления - sMD = re.compile("[^\-\+\*/]+") - - # директория установленных программ - _basePkgDir = "/var/db/pkg" - basePkgDir = _basePkgDir - - # кэш для проверки наличия пакета в портежах - cachePortdir = {} - - # стек глобальных переменных - stackGlobalVars = [] - - # регулярное выражение для поиска версии - reFindVer = re.compile("(?<=-)(?:\d+)(?:(?:\.\d+)*)" - "(?:[a-z]?)(?:(?:_(?:pre|p|beta|alpha|rc)\d*)*)" - "(?:-r\d+)?$") - - reEmptyLoad = re.compile("^\s*$|^\s*;|^\s*#") - - # Имя обрабатываемого шаблона - nameTemplate = "" - - # Текст функции шаблона - functText = "" - - # regular for discard sort number and version - reData = re.compile(r"^(?:\d+-)?(.+?)(?:-(?:|always|\d+|\d(?:\d|\.|pre|_" - "|-always|alpha|beta|pre|rc|[a-z][^a-z])*[a-z]?)(?:" - "-r\d+)?)?$", re.S) - - currentAction = HParams.ActionType.Merge - - def printSUCCESS(self, s): - raise NotImplemented() - - def printWARNING(self, s): - raise NotImplemented() - - def printERROR(self, s): - raise NotImplemented() - - @classmethod - def get_pkgname_by_filename(cls, fn): - fileName = os.path.split(fn)[1] - if fileName == '.calculate_directory': - parentDir = os.path.dirname(fn) - parentDir, pkgName = os.path.split(parentDir) - else: - parentDir, pkgName = os.path.split(fn) - category = os.path.split(parentDir)[1] - # reg for discard version and sort number - pkgName = cls.reData.search(pkgName).group(1) - category = cls.reData.search(category).group(1) - return "%s/%s" % (category, pkgName) - - currentBelong = "" - currentBelongSlot = "" - alreadyInformed = [] - - def __init__(self, objVar): - # Если не определен словарь функций шаблона - # import services api - LayeredIni.__init__(self) - if not self.templateFunction: - # префикс функций шаблона - pref = "func" - # cписок [(название функции, функция), ...] - dictFunc = filter(lambda x: x[0].startswith(pref) and \ - hasattr(x[1], "__call__"), - self.__class__.__dict__.items()) - # удаляем у названия функции префикс и переводим остаток названия - # в нижний регистр - dictFunc = map(lambda x: (x[0][len(pref):].lower(), x[1]), dictFunc) - # Формируем словарь функций шаблона - self.templateFunction.update(dictFunc) - # Формируем список функций шаблона - for nameFunction in self.templateFunction.keys(): - self.namesTemplateFunction.append(nameFunction) - # Объект хранения переменных - self.objVar = objVar - self._reFunc = re.compile("%s%s%s" - % (self.varStart, self._reFunctionText, - self.varEnd), re.M) - self._rePrePattern = "%s.{%%d,}?%s" % (self.varStart, self.varEnd) - self._rePreFuncPattern = "%s.{%%d,}?\)%s" % (self.varStart, self.varEnd) - # Аттрибуты для функции шаблона ini() - # Первоначальный словарь переменных для ini() - self.prevDictIni = {} - # Текущий словарь переменных для ini() - self.currDictIni = {} - # Время модификации конфигурационного файла для ini() - self.timeIni = -1 - self.recalculateBaseDir() - - # Словарь времен модификации env файлов - self.timeConfigsIni = {} - # Словарь хранения переменых полученных функцией env() из env файлов - self.valuesVarEnv = {} - # Словарь хранения опций для функции info() - self.optionsInfo = {} - # файл параметров сервисов - envFile = self.objVar.Get("cl_env_server_path") - # объект конвертирования из старого remote env файла - self.convObj = False - if os.access(envFile, os.R_OK): - self.convObj = False - elif os.access("/var/calculate/remote/calculate.env", os.R_OK): - from .convertenv import convertEnv - - self.convObj = convertEnv() - - - def recalculateBaseDir(self): - """Recalculate basedir and homedir""" - # Директория другой системы - self._chrootDir = self.objVar.Get("cl_chroot_path") - # Изменение директории к базе пакетов - self.basePkgDir = pathJoin(self._chrootDir, self._basePkgDir) - self.basePkgDir = os.path.normpath(self.basePkgDir) - # Базовая директория переноса шаблонов "/mnt/calculate" или "/" и.т.д - self._baseDir = pathJoin(self._chrootDir, - self.objVar.Get("cl_root_path")) - self._baseDir = os.path.normpath(self._baseDir) - self.uid, self.gid, self.homeDir, self.groups = \ - self.getDataUser(groupsInfo=True) - # Домашняя директория, плюс базовая директория - self.homeDir = pathJoin(self._baseDir, self.homeDir) - # path to configuration file for ini() function - # if action is desktop configuration, then path in user directory - # else config file place in /etc/calculate - if self.objVar.Get('cl_action') == "desktop": - self.pathConfigIni = os.path.join(self.homeDir, ".calculate") - self.fileConfigIni = os.path.join(self.pathConfigIni, - LayeredIni.IniPath.IniName) - self.modeConfigIni = 0o640 - else: - self.fileConfigIni = pathJoin(self._chrootDir, - LayeredIni.IniPath.Work) - self.pathConfigIni = os.path.dirname(self.fileConfigIni) - self.modeConfigIni = 0o644 - - def equalTerm(self, term, localVars): - """Метод для вычисления выражения""" - terms = self.sNum.findall(term) - if terms: - strNumers = [] - for n in terms: - strNum = n.strip() - if "*" in strNum or "/" in strNum: - strNum = self.multAndDiv(strNum, localVars) - num = 0 - try: - num = int(strNum) - except ValueError: - minus = False - if strNum[:1] == "-": - minus = True - strNum = strNum[1:] - if strNum in localVars: - try: - num = int(localVars[strNum]) - except ValueError: - raise self.raiseErrTemplate( - _("error: variable %s is not integer") % - str(strNum)) - elif self.objVar.exists(strNum): - try: - num = int(self.objVar.Get(strNum)) - except ValueError: - raise self.raiseErrTemplate( - _("error: variable %s is not integer") % - str(strNum)) - else: - raise self.raiseErrTemplate( - _("error: local variable %s not defined") % - str(strNum)) - if minus: - num = -num - strNumers.append(num) - return sum(strNumers) - raise self.raiseErrTemplate(_("error: template term %s, incorrect data") \ - % str(term)) - - def multAndDiv(self, term, localVars): - """Метод для умножения и деления""" - termTmp = term - varsLocal = self.sMD.findall(term) - for var in varsLocal: - flagVarTxt = True - try: - int(var) - except ValueError: - flagVarTxt = False - if flagVarTxt: - continue - varReplace = str(self.equalTerm(var, localVars)) - termTmp = termTmp.replace(var, varReplace) - ret = eval(termTmp) - return ret - - def getIni(self, key, nameTemp=""): - class FakeMatch(object): - def start(self): - return 0 - - def end(self): - return 0 - - return self.funcIni(key, FakeMatch(), None, "", nameTemp) - - def setIni(self, key, value, nameTemp=""): - class FakeMatch(object): - def start(self): - return 0 - - def end(self): - return 0 - - self.funcIni("%s,%s" % (key, value), FakeMatch(), None, "", nameTemp) - - def funcProfile(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """ - Функция проверят состояние пользовательского профиля: - configured - профиль настраивался утилитами calculate - empty - профиль пустой, либо содержит skel, либо сертификат утилит - custom - профиль настроен и он настраивался не утилитами - """ - ini_value = self.funcIni("main.update", resS, localVars, "", nameTemp) - if ini_value: - replace = "configured" - else: - user_dir = self.objVar.Get('ur_home_path') - if isBootstrapDataOnly(user_dir): - replace = "empty" - else: - replace = "custom" - - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def check_command(self, command, prefix="/"): - cmd = getProgPath(command, prefix=prefix) - if not cmd: - raise self.raiseErrTemplate( - _("Command not found '%s'")%command) - return cmd - - def warning_message(self, message): - if callable(self.printWARNING): - self.printWARNING(message) - - @template_function() - def funcWorld(self, category): - """ - Функция выполняет eix и возвращает список пакетов - """ - prefix = self.objVar.Get('cl_chroot_path') - nfenv = dict(os.environ) - nfenv["NOFOUND_STATUS"]="0" - kwargs = {'lang':'C', 'envdict': nfenv} - if prefix == "/": - args = [self.check_command("/usr/bin/eix", prefix=prefix)] - else: - args = ["/bin/chroot", prefix, self.check_command("/usr/bin/eix", prefix=prefix)] - args.extend(["-*", "--format", ""]) - if "/" in category: - args.extend(["-e", category]) - else: - args.extend(["--category", "-e", category]) - p = process(*args, **kwargs) - - if p.success(): - atoms = [x for x in p.read().split() if x.strip()] - if not atoms: - self.warning_message(_("No packages in %s category")%category) - return "\n".join(atoms) - else: - raise TemplateFunctionError(_("Failed to execute") + _(": ") + - "eix") - - def funcLs(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """ - Функция получения списка файлов из директории - """ - globpath, o, pattern = funArgv.partition(',') - if not pattern: - pattern = r" " - - pattern = pattern.replace(r"\1", "{0}") - pattern = re.sub(r"(^|[^\\])\\n", "\\1\n", pattern) - pattern = pattern.replace(r"\n", "\n") - pattern = pattern.replace(r"\t", "\t") - - chroot_path = os.path.normpath(self.objVar.Get('cl_chroot_path')) - globpath = pathJoin(chroot_path, globpath) - - if "*" in globpath: - files = glob.glob(globpath) - else: - files = listDirectory(globpath, fullPath=True) - if files: - files = (x for x in files if not os.path.isdir(x)) - if chroot_path != '/': - l = len(chroot_path) - files = (x[l:] for x in files) - if r"{0}" not in pattern: - replace = pattern.join(files) - else: - replace = "".join(pattern.format(x) for x in files) - else: - replace = "" - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcForeach(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """ - Функция получения списка файлов из директории - """ - varname, o, pattern = funArgv.partition(',') - - values = "" - try: - values = self.objVar.Get(varname) - except DataVarsError as e: - raise TemplatesError(_("error: variable %s does not exist") - % varname) - - if not pattern: - pattern = r" " - - if values: - pattern = pattern.replace(r"\1", "{0}") - pattern = re.sub(r"(^|[^\\])\\n", "\\1\n", pattern) - pattern = pattern.replace(r"\n", "\n") - pattern = pattern.replace(r"\t", "\t") - - if r"{0}" not in pattern: - replace = pattern.join(values) - else: - replace = "".join(pattern.format(x) for x in values) - else: - replace = "" - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcSum(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """Функция шаблона, вычисляет функцию sum()""" - terms = funArgv.replace(" ", "").split(",") - # Название локальной переменной - nameLocVar = terms[0] - if nameLocVar not in localVars: - localVars[nameLocVar] = 0 - if len(terms) == 2: - if terms[1].strip(): - localVars[nameLocVar] = self.equalTerm(terms[1], localVars) - replace = str(localVars[nameLocVar]) - else: - replace = "" - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - elif len(terms) == 3: - if terms[1].strip(): - replaceInt = self.equalTerm(terms[1], localVars) - replace = str(replaceInt) - else: - replace = "" - localVars[nameLocVar] = self.equalTerm(terms[2], localVars) - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - else: - raise self.raiseErrTemplate() - return textTemplateTmp - - def funcExists(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """Функция шаблона exists(), - проверяет существование файла, если существует выдает '1' - если второй параметр root, то проверка осуществляется от корня. - """ - if funArgv.strip(): - terms = map(lambda x: x.strip(), funArgv.split(",")) - if len(terms) > 2: - raise self.raiseErrTemplate() - fileName = terms[0] - flagNotRootFS = True - if len(terms) == 2: - if terms[1] == "root": - flagNotRootFS = False - else: - raise self.raiseErrTemplate( - _("The second argument of the function is not 'root'")) - if fileName[0] == "~": - # Получаем директорию пользователя - fileName = os.path.join(self.homeDir, - fileName.partition("/")[2], "")[:-1] - elif fileName[0] != "/": - raise self.raiseErrTemplate(_("wrong path '%s'") % fileName) - else: - if flagNotRootFS: - fileName = pathJoin(self._baseDir, fileName) - replace = "" - if os.path.exists(fileName): - check_map = ( - ('f', stat.S_ISREG), - ('d', stat.S_ISDIR), - ('l', stat.S_ISLNK), - ('b', stat.S_ISBLK), - ('c', stat.S_ISCHR), - ('p', stat.S_ISFIFO), - ('s', stat.S_ISSOCK)) - fmode = os.lstat(fileName) - for t, func in check_map: - if func(fmode.st_mode): - replace = t - break - else: - replace = "1" - else: - replace = "" - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcLoad(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """Функция шаблона load(), - - если файл существует читает из файла локальную переменную - если один параметр - выводит значение локальной переменной - """ - terms = funArgv.split(",") - if terms: - lenTerms = len(terms) - if not terms[0].strip() or \ - (lenTerms == 2 and not terms[1].strip()) or \ - (lenTerms == 3 and not terms[2].strip()) or \ - lenTerms > 3: - raise self.raiseErrTemplate() - else: - raise self.raiseErrTemplate() - flagNotRootFS = True - if lenTerms == 3: - if terms[2] == "root": - flagNotRootFS = False - else: - raise self.raiseErrTemplate( - _("The third argument of the function is not 'root'")) - if lenTerms >= 2: - if not terms[0] in ["ver", "num", "char", "key", "empty"]: - raise self.raiseErrTemplate( - _("the first argument of the function is neither 'ver'" - " or 'num' or 'char' or 'empty'")) - if lenTerms == 1: - fileName = terms[0].strip() - else: - fileName = terms[1].strip() - # Если домашняя директория - if fileName[0] == "~": - # Получаем директорию пользователя - fileName = os.path.join(self.homeDir, - fileName.partition("/")[2], "")[:-1] - elif fileName[0] != "/": - raise self.raiseErrTemplate(_("wrong path '%s'") % fileName) - else: - if flagNotRootFS: - fileName = pathJoin(self._baseDir, fileName) - replace = "" - if os.path.exists(fileName): - replace = readFile(fileName).strip() - if replace and lenTerms >= 2 and terms[0] == "empty": - replace = "\n".join(filter(lambda x: not self.reEmptyLoad.search(x), - replace.split("\n"))) - if not replace and lenTerms >= 2 and terms[0] in ["ver", "num"]: - replace = "0" - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def sharePkg(self, pkgs, force=False): - """ - Update packages from pkgs list - """ - for pkgname, category, ver, slot in pkgs: - fullname = "%s/%s" % (category, pkgname) - if not fullname in self.installProg or \ - type(self.installProg[fullname]) != dict: - self.installProg[fullname] = self.installProg[pkgname] = {} - if force or not slot in self.installProg[fullname]: - self.installProg[fullname][slot] = ver - return self.installProg - - def getInstallPkgGentoo(self, category=""): - pkgs = [] - filterFunc = lambda x: "SLOT" == x - - def getFilesDir(pkgs, dirname, names): - for nameFile in filter(filterFunc, names): - absNameFile = os.path.join(dirname, nameFile) - category, spl, pkgname = dirname.rpartition('/') - dbpkg, spl, category = category.rpartition('/') - slot = readFile(absNameFile).strip().partition('/')[0] - pkgname, spl, rev = pkgname.rpartition("-") - if rev.startswith('r'): - pkgname, spl, ver = pkgname.rpartition("-") - ver = "%s-%s" % (ver, rev) - else: - ver = rev - pkgs.append((pkgname, category, ver, slot)) - return True - - os.path.walk(os.path.join(self.basePkgDir, category), - getFilesDir, pkgs) - return self.sharePkg(pkgs) - - def pkg(self, nameProg, slot=None): - if len(self.installProg) > 0: - if type(self.installProg.values()[0]) != dict: - self.installProg.clear() - self.getInstallPkgGentoo() - if nameProg in self.installProg: - versions = self.installProg[nameProg] - if slot: - return versions.get(slot, "") - if len(versions) == 1: - return versions.values()[0] - else: - return versions[max(versions.keys(), key=getTupleVersion)] - else: - return "" - - def funcPkg(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """Функция шаблона pkg(), выдает номер версии программы""" - # Название программы - nameProg = funArgv.replace(" ", "") - if not nameProg: - nameProg = self.get_pkgname_by_filename(self.nameTemplate) - # Замена функции в тексте шаблона - if "/" in nameProg: - category, spl, nameProg = nameProg.partition("/") - nameProg, spl, uses = nameProg.partition('[') - nameProg, spl, slot = nameProg.partition(":") - if uses: - uses = uses.rstrip("]") - if not category in self.installCategory: - self.getInstallPkgGentoo(category=category) - self.installCategory.append(category) - replace = self.pkg(nameProg, slot=slot or None) - if replace and uses: - pkg_use, pkg_iuse = getPkgUses("%s/%s" % (category, nameProg), - replace, prefix=self.objVar.Get( - 'cl_chroot_path')) - for use in filter(None, uses.split(',')): - if (use[0] == "-" and use[1:] in pkg_use or - use[0] != "-" and use not in pkg_use): - replace = "" - break - else: - if not self.flagAllPkgScan: - self.getInstallPkgGentoo() - templateFunction.flagAllPkgScan = True - nameProg, spl, slot = nameProg.partition(":") - replace = self.pkg(nameProg, - slot=slot) - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcKernel(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """ - Функция kernel(...), выдает значение опции конфига ядра (y,m,) - """ - terms = funArgv.replace(" ", "").split(",") - if not terms[0].strip() or len(terms) != 1: - raise self.raiseErrTemplate() - kernel_opt = terms[0].upper() - if kernel_opt.startswith("CONFIG_"): - raise self.raiseErrTemplate( - _("the option name should not starts with CONFIG_")) - kernel_config = self.objVar.Get('install.os_install_kernel_config') - find_str = "CONFIG_%s" % kernel_opt - replace = "" - for line in kernel_config: - if find_str in line: - if "%s=" % find_str in line: - key, op, value = line.partition("=") - replace = value.strip("'\"") - break - elif "%s is not set" % find_str in line: - break - textTemplateTmp = (textTemplateTmp[:resS.start()] + replace + - textTemplateTmp[resS.end():]) - return textTemplateTmp - - def funcGrep(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """ - Функция grep (...), выдает значение из файла по регулярному выражению - """ - fname, op, regpattern = funArgv.replace(" ", "").partition(",") - regpattern = regpattern.replace("(?\<", "(?<") - regpattern = self._replace_hex(regpattern) - if not fname or not regpattern: - raise self.raiseErrTemplate() - try: - reg = re.compile(regpattern) - except re.error: - raise self.raiseErrTemplate(_("Wrong regular expression")) - if fname[0] == "~": - # Получаем директорию пользователя - fname = os.path.join(self.homeDir, - fname.partition("/")[2], "")[:-1] - fname = pathJoin(self.objVar.Get('cl_chroot_path'), fname) - fileContent = readFile(fname) - match_data = reg.search(fileContent) - if match_data: - md_groups = match_data.groups() - if md_groups: - replace = md_groups[0] or "" - else: - replace = match_data.group() - else: - replace = "" - return ( - textTemplateTmp[:resS.start()] + replace + textTemplateTmp[ - resS.end():]) - - def funcCut(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """ - Функция разбивающая третий аргумент на строки по указанному разеделителю - и возвращающая указанный блок - #-cut(2,-,1-2-3-4)-# -> 3 - #-cut(2,,1,2,3,4)-# -> 3 - :param funArgv: - :param resS: - :param localVars: - :param textTemplateTmp: - :param nameTemp: - :return: - """ - if funArgv: - terms = funArgv.split(",") - else: - terms = [] - if len(terms) > 3: - terms = terms[:2] + [",".join(terms[2:])] - if len(terms) < 1: - terms = ["0"] - if len(terms) < 2: - terms.append("-") - if len(terms) < 3: - terms.append(self.objVar.Get('cl_pass_file')) - num, delimeter, data = terms - if not num.isdigit(): - raise self.raiseErrTemplate( - _("first parameter must be number")) - delimeter = delimeter or "," - num = int(num) - data = data.split(delimeter) - if num < len(data): - replace = data[num] - else: - replace = "" - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcRnd(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """Функция шаблона rnd(), выдает строку случайных символов - - первый аргумент: - 'num' - числа, - 'pas' - цифры и буквы - 'uuid' - цифры и строчные буквы a-f - второй аргумент: - количество символов - """ - terms = funArgv.replace(" ", "").split(",") - gentype = terms[0].strip() - genlen = None - uuidmax = 32 - if len(terms) not in (1,2): - raise self.raiseErrTemplate( - _("function rnd support one or two arguments only")) - if len(terms) == 2: - genlen = terms[1] - if not genlen.isdigit(): - raise self.raiseErrTemplate( - _("the second argument of the function is not a number")) - genlen = int(terms[1]) - if gentype == 'uuid': - if not genlen: - genlen = uuidmax - if genlen > uuidmax: - raise self.raiseErrTemplate( - _("length of UUID must not be above {maxlen}").format( - maxlen=uuidmax)) - - if not gentype or not genlen or len(terms) not in (1, 2): - raise self.raiseErrTemplate() - fArgvNames = {'num': string.digits, - 'pas': string.ascii_letters + string.digits, - 'hex': string.ascii_lowercase[:6] + string.digits, - 'uuid': string.ascii_lowercase[:6] + string.digits} - if not gentype in fArgvNames: - raise self.raiseErrTemplate( - _("the first argument of the function must " - "be 'num', 'pas' or 'uuid'")) - if gentype == 'uuid': - offset = [y for x, y in ( - (0, 0), (9, 1), (13, 2), (17, 3), (21, 4)) if genlen >= x][-1] - replace = str(uuid.uuid4()) - replace = replace[:genlen+offset] - else: - choiceStr = fArgvNames[gentype] - replace = ''.join([random.choice(choiceStr) for i in xrange(genlen)]) - - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcCase(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """Функция шаблона case(), выдает переменную в определенном регистре - - первый аргумент: - 'upper' - верхний регистр, - 'lower' - нижний регистр, - 'capitalize' - первая буква в верхнем регистре - второй аргумент: - название переменной - """ - terms = funArgv.replace(" ", "").split(",") - if not terms[0].strip() or \ - (len(terms) == 2 and not terms[1].strip()) or len(terms) != 2: - raise self.raiseErrTemplate() - fArgvNames = ['upper', 'lower', 'capitalize'] - if not terms[0] in fArgvNames: - raise self.raiseErrTemplate(_("the first argument of the function" - " is neither 'upper' or 'lower' or" - " 'capitalize'")) - try: - strValue = self.objVar.Get(terms[1]) - if not strValue: - strValue = "" - else: - strValue = str(strValue) - except Exception: - raise TemplatesError( - _("error in template %s") % self.nameTemplate + "\n" + - _("error: variable %s not found") % str(terms[1])) - replace = "" - strValue = _u(strValue) - if terms[0] == 'upper': - replace = strValue.upper() - elif terms[0] == 'lower': - replace = strValue.lower() - elif terms[0] == 'capitalize': - replace = strValue.capitalize() - if replace: - replace = replace.encode("UTF-8") - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcIn(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """ - Function in for check value in variable - """ - terms = funArgv.replace(" ", "").split(",") - # Название локальной переменной - nameLocVar = terms[0] - try: - value = self.objVar.Get(nameLocVar) - terms = terms[1:] - if any(x in terms for x in iterate_list(value)): - replace = "1" - else: - replace = "" - except Exception: - raise self.raiseErrTemplate(_("error: variable %s does not exist") \ - % str(nameLocVar)) - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcPush(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """локальная функция записывает значение переменной - - в стек глобальных переменных - """ - terms = funArgv.replace(" ", "").split(",") - # Название локальной переменной - nameLocVar = terms[0] - value = "" - if nameLocVar in localVars.keys(): - flagFoundVar = True - value = localVars[nameLocVar] - else: - try: - value = self.objVar.Get(nameLocVar) - flagFoundVar = True - except Exception: - flagFoundVar = False - if flagFoundVar: - # Если переменная существует - if len(terms) == 1: - self.stackGlobalVars.append(str(value)) - else: - raise self.raiseErrTemplate(_("error: variable %s exists") \ - % str(nameLocVar)) - else: - # Если переменная не существует - if len(terms) == 1: - raise self.raiseErrTemplate( - _("error: variable %s does not exist") \ - % str(nameLocVar)) - elif len(terms) == 2: - value = terms[1].strip() - self.stackGlobalVars.append(str(value)) - localVars[nameLocVar] = value - else: - raise self.raiseErrTemplate() - replace = "" - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcPop(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """локальная функция получает значение - - из стека глобальных переменных и присваивает локальной переменной - - """ - terms = funArgv.replace(" ", "").split(",") - # Название локальной переменной - nameLocVar = terms[0] - if len(terms) == 1: - if self.stackGlobalVars: - localVars[nameLocVar] = self.stackGlobalVars.pop() - else: - raise self.raiseErrTemplate( - _("error: global variables stack empty")) - else: - raise self.raiseErrTemplate() - replace = "" - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcPrint(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """ - Вывод успешного сообщения - """ - if funArgv: - funArgv = _(funArgv) - self.printSUCCESS(funArgv) - textTemplateTmp = textTemplateTmp[:resS.start()] + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcWarning(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """ - Вывод сообщения с предупреждением - """ - if funArgv: - funArgv = _(funArgv) - self.printWARNING(funArgv) - textTemplateTmp = textTemplateTmp[:resS.start()] + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcError(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """ - Вывод сообщения с ошибкой - """ - if funArgv: - funArgv = _(funArgv) - self.printERROR(funArgv) - textTemplateTmp = textTemplateTmp[:resS.start()] + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def getElogTimestamp(self): - # Получаем время модификации конфигурационного файла - curTime = self.getTimeFile(self.fileConfigIni) - nameLocVar = "update.timestamp" - if self.timeIni != curTime: - # читаем переменные из файла - self.prevDictIni = self.loadVarsIni(self.fileConfigIni) - self.currDictIni = {} - self.currDictIni.update(self.prevDictIni) - self.timeIni = self.getTimeFile(self.fileConfigIni) - if nameLocVar in self.currDictIni.keys(): - if self.currDictIni[nameLocVar] is None: - return 0 - else: - val = self.currDictIni[nameLocVar].encode("UTF-8") - if val.isdigit(): - return int(val) - return 0 - - elogFile = '/var/log/emerge.log' - - @classmethod - def getLastElog(cls): - # get last timestamp (of ::completed emerge) - entry = EmergeLog(EmergeLogPackageTask()).get_last_time() - if entry: - return entry.partition(":")[0] - else: - return "0" - - def funcElog(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """Function for work with emerge.log""" - funArgv = funArgv.strip() - rePkg = re.compile(r'\) Merging (?:Binary )?\((\S+)::', re.S) - replace = "" - if funArgv: - lastTimestamp = self.getElogTimestamp() - for line in reversed(list(readLinesFile(self.elogFile))): - timestamp, op, info = line.partition(':') - if timestamp.isdigit() and lastTimestamp and \ - int(timestamp) < lastTimestamp: - break - match = rePkg.search(info) - if match and match.group(1).startswith(funArgv): - pkgInfo = reVerSplitToPV(match.group(1)) - if "{CATEGORY}/{PN}".format(**pkgInfo) == funArgv: - replace = pkgInfo['PVR'] - break - else: - replace = self.getLastElog() - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - @classmethod - def splash_cmd(cls, splash_type): - cmd_map = { - 'splashutils': "splash=silent,theme:calculate console=tty1", - 'plymouth': "splash", - } - return cmd_map.get(splash_type, "verbose") - - def funcLivemenu(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - def generateSubmenu(data): - base_dn = self.objVar.Get('builder.cl_builder_flash_path') - for id, label, iso, vmlinuz_orig, vmlinuz, initrd_orig, initrd, \ - xorg, drivers, splash in data: - splash = self.splash_cmd(splash) - yield ("{id};\n{label};\n/boot/{kernel};\n" - "root=live iso-scan/filename={iso};\n" - "/boot/{initrd};\n" - "init=/linuxrc rd.live.squashimg=livecd.squashfs " - "{splash} " - "nodevfs quiet noresume;\n".format( - id=id, label=label, kernel=vmlinuz, initrd=initrd, - splash=splash, iso=iso[len(base_dn):] - )) - - def generateXorg(data): - for id, label, iso, vmlinuz_orig, vmlinuz, initrd_orig, initrd, \ - xorg, drivers, splash in data: - if xorg == "on": - yield id - - def generateVideo(data): - for id, label, iso, vmlinuz_orig, vmlinuz, initrd_orig, initrd, \ - xorg, drivers, splash in data: - if drivers == "on": - yield id - - data = filter(None, self.objVar.Get('builder.cl_builder_image_data')) - if funArgv == 'submenu': - res = "\n".join(generateSubmenu(data)) - elif funArgv == 'xorg': - res = " ".join(generateXorg(data)) - elif funArgv == 'video': - res = " ".join(generateVideo(data)) - else: - res = "" - textTemplateTmp = textTemplateTmp[:resS.start()] + res + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def loadVarsIni(self, iniFileName): - """ Читает файл fileName - создает и заполняет переменные на основе этого файла - Используеться совместно c funcIni - """ - localVarsIni = {} - # получить объект ini файла - config = ConfigParser(strict=False) - config.read(iniFileName, encoding="utf-8") - # получаем все секции из конфигурационного файла - allsect = config.sections() - if not allsect: - return localVarsIni - # Заполняем переменные для funcIni - for sect in allsect: - for name, valueVar in config.items(sect, raw=True): - nameVar = "%s.%s" % (sect, name) - localVarsIni[nameVar] = valueVar - return localVarsIni - - def getTimeFile(self, fileName): - # Получаем время модификации файла - nameEnvFile = os.path.split(fileName)[1] - if nameEnvFile in self.timeConfigsIni: - return self.timeConfigsIni[nameEnvFile] - return 0 - - def funcWallpaper(self, funArgv, resS, localVars, textTemplateTmp, - nameTemp): - """ - Получить наиболее близкое к заданному разрешение из списка обоев - """ - terms = funArgv.replace(" ", "").split(",") - onlyfile = "" - if len(terms) == 3: - resol, wpath, onlyfile = terms - if onlyfile != "file": - raise self.raiseErrTemplate( - _("third parameter may be 'file' only")) - elif len(terms) == 2: - resol, wpath = terms - else: - raise self.raiseErrTemplate( - _("function support two or three parameters")) - if not resol: - resol = "1024x768" - _wpath = wpath - - wpath = pathJoin(self._baseDir, wpath) - if os.path.isdir(wpath): - re_resol = re.compile("^(\d+)x(\d+)(-\d+(@\d+)?)?$") - resol = re_resol.match(resol) - if not resol: - raise self.raiseErrTemplate( - _("the first parameter must be the resolution")) - - re_resol = re.compile(".*?(\d+)x(\d+).*") - res = map(lambda x: (int(x.group(1)), int(x.group(2)), x.group()), - filter(None, - map(re_resol.search, - listDirectory(wpath)))) - width = int(resol.group(1)) - height = int(resol.group(2)) - gep = sqrt(height ** 2 + width ** 2) - k = float(width) / float(height) - if res: - # наиболее подходящее разрешение: - # минимальная разность между пропорциями (отношение ширины к высоте) - # минимальная разность между размерами (гепотенуза) - near_res = min(res, - key=lambda x: (abs(x[0] / float(x[1]) - k), - abs(gep - sqrt( - x[0] ** 2 + x[1] ** 2)))) - replace = near_res[2] - if not onlyfile: - replace = pathJoin(_wpath, replace) - else: - replace = "" - else: - if os.path.exists(wpath): - if onlyfile: - replace = os.path.basename(_wpath) - else: - replace = _wpath - else: - replace = "" - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcIni(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """локальная функция записывает и считывает значение переменной - - из ini файла ~./calculate/ini.env - """ - # Создаем директорию - if not os.path.exists(self.pathConfigIni): - os.makedirs(self.pathConfigIni) - os.chown(self.pathConfigIni, self.uid, self.gid) - termsRaw = funArgv.split(",") - flagFirst = True - terms = [] - for term in termsRaw: - if flagFirst: - terms.append(term.replace(" ", "")) - flagFirst = False - else: - val = term.strip() - # Флаг (не найдены кавычки) - flagNotFoundQuote = True - for el in ('"', "'"): - if val.startswith(el) and val.endswith(el): - terms.append(val[1:-1]) - flagNotFoundQuote = False - break - if flagNotFoundQuote: - if not val: - terms.append(None) - else: - terms.append(val) - # Название локальной переменной - if self.objVar.Get('cl_action') in ('image', 'system'): - oldbasedir = self._baseDir - old_profile = self.objVar.Get('cl_make_profile') - old_config = self.objVar.Get('cl_emerge_config') - try: - if self.objVar.Get('cl_action') == 'image': - self._baseDir = self.objVar.Get('builder.cl_builder_path') - else: - self._baseDir = self.objVar.Get('cl_chroot_path') - dvc = DataVarsConfig(self._baseDir) - self.objVar.Set('cl_make_profile', - dvc.Get('cl_make_profile'), force=True) - self.objVar.Set('cl_emerge_config', - dvc.Get('cl_emerge_config'), force=True) - self.read_other_ini() - finally: - self._baseDir = oldbasedir - self.objVar.Set('cl_make_profile', old_profile, force=True) - self.objVar.Set('cl_emerge_config', old_config, force=True) - else: - self.read_other_ini() - nameLocVar = terms[0] - namesVar = nameLocVar.split(".") - if len(namesVar) == 1: - nameLocVar = "main.%s" % nameLocVar - elif len(namesVar) > 2: - raise self.raiseErrTemplate() - replace = "" - # Получаем время модификации конфигурационного файла - curTime = self.getTimeFile(self.fileConfigIni) - if 1 <= len(terms) <= 3: - if self.timeIni != curTime: - # читаем переменные из файла - self.prevDictIni = self.loadVarsIni(self.fileConfigIni) - self.currDictIni = {} - self.currDictIni.update(self.prevDictIni) - self.timeIni = self.getTimeFile(self.fileConfigIni) - section, op, varname = nameLocVar.partition(".") - value = self.upperIni.get(section, varname, raw=True, fallback=None) - if value is None: - if nameLocVar in self.currDictIni.keys(): - if self.currDictIni[nameLocVar] is not None: - value = self.currDictIni[nameLocVar] - if value is None: - value = self.lowerIni.get(section, varname, raw=True, - fallback="") - else: - raise self.raiseErrTemplate() - - if len(terms) == 1: - replace = value.encode("UTF-8") - elif len(terms) == 2: - # Значение локальной переменной - valueLocVar = terms[1] - self.currDictIni[nameLocVar] = valueLocVar - elif len(terms) == 3: - if not terms[2] in ['url', 'purl', 'unicode', 'hexcode']: - raise self.raiseErrTemplate( - _("the third argument of the function is neither " - "'url' or 'purl' or 'unicode'")) - if terms[1]: - raise self.raiseErrTemplate() - if value: - if terms[2] in ('url', 'purl'): - replace = (value.encode("UTF-8").__repr__()[1:-1].replace( - '\\x', '%').replace(' ', '%20')) - if terms[2] == 'purl': - replace = replace.replace('/', '%2f') - elif terms[2] == 'unicode': - replace = value.__repr__()[2:-1] - elif terms[2] == 'hexcode': - replace = value.__repr__()[2:-1].replace('\\u0','\\x') - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def _replace_hex(self, text): - """ - Заменить в строке комбинацию \\x00 на символ - """ - return re.sub(r'\\x([0-9a-fA-F]{2})', - lambda x: chr(int(x.group(1), 16)), text) - - def funcReplace(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """локальная функция заменяет в значении переменной old на new - - replace(old, new, name_var_template) - - одинарные и двойные кавычки должны быть обязательно использованы - в первых двух аргументах old и new - "test\ntest" - преобразование строки (строка с переводом) - 'test\ntest' - без преобразования (одна строка) - """ - - def getStrArgv(terms): - """Определяет в двойных или одинарных кавычках параметры - - Результат [(тип, аргумент),...] [("double", arg1).] - """ - listArgv = [] - for term in terms: - if term.startswith('"') and term.endswith('"'): - replTerms = [(r"\'", "'"), (r'\"', '"'), (r'\n', '\n'), - (r'\r', '\r'), - (r'\t', '\t'), (r"\\", "\\")] - textArgv = term[1:-1] - for replTerm in replTerms: - textArgv = textArgv.replace(*replTerm) - textArgv = self._replace_hex(textArgv) - listArgv.append(textArgv) - elif term.startswith("'") and term.endswith("'"): - listArgv.append(term[1:-1]) - else: - raise self.raiseErrTemplate() - return listArgv - - terms = map(lambda x: x.strip(), funArgv.split(",")) - if len(terms) != 3: - raise self.raiseErrTemplate() - listArgv = getStrArgv(terms[:2]) - old = listArgv[0] - new = listArgv[1] - nameVar = terms[2] - # Получаем значение переменной - if nameVar in localVars: - value = str(localVars[nameVar]) - else: - try: - value = str(self.objVar.Get(nameVar)) - except Exception: - raise self.raiseErrTemplate( - _("template variable '%s' not found") \ - % str(nameVar)) - replace = value.replace(old, new) - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcEnv(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """Функция шаблона env(), выдает значение переменной из env файлов - """ - terms = funArgv.replace(" ", "").split(",") - if len(terms) != 1: - raise self.raiseErrTemplate() - nameVar = terms[0] - if nameVar in self.valuesVarEnv: - replace = self.valuesVarEnv[nameVar] - else: - # Получаем значение из env файлов - value = self.objVar.getIniVar(nameVar) - if value is False: - raise self.raiseErrTemplate(self.getError()) - self.valuesVarEnv[nameVar] = value - replace = value - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcServer(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """Функция шаблона info(), выдает значение опций сервиса - - из /var/calculate/remote/calculate.env - """ - terms = funArgv.replace(" ", "").split(",") - if len(terms) == 0 or len(terms) > 2: - raise self.raiseErrTemplate() - nameLocalVar = "" - if len(terms) == 2: - if not terms[1]: - raise self.raiseErrTemplate() - nameLocalVar = terms[1] - textLine = terms[0] - vals = textLine.split(".") - if len(vals) != 2: - raise self.raiseErrTemplate() - if filter(lambda x: not x.strip(), vals): - raise self.raiseErrTemplate() - service, option = vals - if not service or not option: - raise self.raiseErrTemplate() - if not self.optionsInfo: - # файл /var/calculate/remote/server.env - envFile = self.objVar.Get("cl_env_server_path") - # получаем словарь всех информационных переменных - if self.convObj: - optInfo = self.convObj.convert() - else: - optInfo = self.objVar.getRemoteInfo(envFile) - if optInfo is False: - raise self.raiseErrTemplate() - if optInfo: - self.optionsInfo = optInfo - replace = '' - if service in self.optionsInfo and option in self.optionsInfo[service]: - value = self.optionsInfo[service][option] - if nameLocalVar: - localVars[nameLocalVar] = value - else: - replace = value - elif nameLocalVar: - localVars[nameLocalVar] = "" - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcGroups(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """Функция шаблона groups(), - проверяет нахождение пользователя в группах, если находится выдает '1' - """ - terms = map(lambda x: x.strip(), funArgv.split(",")) - groupNames = set(terms) - userGroups = set(self.groups) - replace = "" - if groupNames & userGroups: - replace = "1" - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcBelong(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - self.printWARNING(_("Function '{funcname}' used by {template} " - "is deprecated and will be removed in the future" - ).format(funcname="belong", template=nameTemp)) - replace = "" - return textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - - def funcMergepkg(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """ - Функция объединяющая выполнение merge и pkg - :param funArgv: - :param resS: - :param localVars: - :param textTemplateTmp: - :param nameTemp: - :return: - """ - term = funArgv.replace(" ", "") - funcPkg = term - replace = self.funcMerge(funcPkg, resS, localVars, "", nameTemp) - if replace == "1": - replace = self.funcPkg(funcPkg, resS, localVars, "", nameTemp) - return textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - - def funcMerge(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """ - Belong function use value in first arg and compare it - for all values in cl_merge_pkg. - If cl_merge_pkg empty or first arg <=cl_belogn_pkg - then "1" else "" - """ - - def uniq_warning(message): - hashMessage = hashlib.md5(message).digest() - if not hashMessage in self.alreadyInformed: - self.printWARNING(message) - self.alreadyInformed.append(hashMessage) - - def check_skip(pkgName): - varname_map = { - HParams.ActionType.Merge: "cl_setup_skip_merge", - HParams.ActionType.Patch: "cl_setup_skip_patch", - HParams.ActionType.Profile: "cl_setup_skip_profile", - } - skip_data_varname = varname_map.get(self.currentAction) - skip_data = self.objVar.Get(skip_data_varname) - pass_location = self.objVar.Get('cl_pass_location') - if any(skip_data): - for data in skip_data: - if len(data) != 2: - uniq_warning( - _("Wrong entry '{data}' for {var_name}").format( - data=",".join(data), - var_name=skip_data_varname)) - else: - pkg, location = data - if fnmatch(pkgName, pkg) and (location == "*" - or location == pass_location): - return True - return False - - term = funArgv.replace(" ", "") - funcPkg = term - funcPkg, spl, uses = funcPkg.partition('[') - funcPkg, spl, slot = funcPkg.partition(":") - if uses: - uses = uses.rstrip("]") - if not funcPkg: - funcPkg = self.get_pkgname_by_filename(self.nameTemplate) - self.currentBelong = funcPkg - self.currentBelongSlot = slot - if self.objVar.Get('cl_action') == 'patch': - if funcPkg == "%s/%s" % ( - self.objVar.Get('core.cl_core_pkg_category'), - self.objVar.Get('core.cl_core_pkg_name')): - spec_slot = self.objVar.Get('core.cl_core_pkg_slot') - spec_slot = spec_slot.partition('/')[0] - if not slot or spec_slot == slot: - replace = self.objVar.Get('core.cl_core_pkg_version') - if uses: - from os import environ - pkg_use = environ.get("USE", "").split(" ") - for use in filter(None, uses.split(',')): - if (use[0] == "-" and use[1:] in pkg_use or - use[0] != "-" and use not in pkg_use): - replace = "" - if check_skip(funcPkg): - replace = "" - else: - replace = "" - else: - replace = "" - else: - replace = "" - pkgs = self.objVar.Get("cl_merge_pkg") - if pkgs: - pkgs = [x for x in pkgs if x] - if slot: - for pkg in pkgs: - if ":" in pkg: - pkg, _, pkgslot = pkg.partition(":") - if pkg == funcPkg and pkgslot == slot: - replace = "1" - break - elif pkg == funcPkg: - replace = "1" - break - else: - if funcPkg in [x.partition(":")[0] for x in pkgs]: - replace = "1" - else: - replace = "1" - if replace == "1" and check_skip(funcPkg): - replace = "" - textTemplateTmp = textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - return textTemplateTmp - - def funcList(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """Функция шаблона list(). - Если первый аргумент является именем локальной или глобальной - переменной и значение переменной является списком, выдает - элемент списка по второму аргументу индексу. - Первый элемент имеет индекс 0 - """ - terms = funArgv.replace(" ", "").split(",") - # У функции должно быть два аргумента - if len(terms) != 2: - raise self.raiseErrTemplate() - # Название локальной или глобальной переменной - nameLocVar = terms[0] - strIndex = terms[1] - try: - try: - intIndex = int(strIndex) - except ValueError: - raise TemplatesError(_("'%s' is not a number") % strIndex) - if nameLocVar in localVars.keys(): - value = localVars[nameLocVar] - else: - try: - value = self.objVar.Get(nameLocVar) - except Exception: - raise TemplatesError(_("error: variable %s does not exist") - % str(nameLocVar)) - if not type(value) in (list, tuple): - # Значение переменной не список или кортеж - raise TemplatesError(_("value of %s is not a list or a tuple") - % str(nameLocVar)) - if len(value) > intIndex: - try: - replace = str(value[intIndex]) - except IndexError: - replace = "" - else: - replace = "" - return textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - except TemplatesError as e: - raise self.raiseErrTemplate(str(e)) - - def funcDisk(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """Функция шаблона disk(). - Первый аргумент ищется в значении переменной os_disk_install - (значение os_install_disk_mount - - список точек монтирования при установке) - второй аргумент используется для поиска в переменной - os_disk_второй_аргумент (значение os_disk_второй_аргумент - список) - В os_install_disk_mount ищется первый аргумент, находим его индекс - результат - элемент cписка из os_disk_второй_аргумент с этим индексом - """ - terms = funArgv.replace(" ", "").split(",") - # У функции должно быть два аргумента - if len(terms) != 2: - raise self.raiseErrTemplate() - # Название глобальной переменной - mountPoint = terms[0] - lastElementVar = terms[1] - if not mountPoint or mountPoint[:1] != "/": - raise self.raiseErrTemplate(_("wrong %s") % lastElementVar) - nameVar = "install.os_install_disk_mount" - try: - try: - valueVar = self.objVar.Get(nameVar) - except Exception: - raise TemplatesError( - _("error: variable %s does not exist") % nameVar) - nameElementVar = "install.os_install_disk_%s" % lastElementVar - try: - valueElementVar = self.objVar.Get(nameElementVar) - except Exception: - # Если переменная не существует - nameElementVar = "install.os_disk_%s" % lastElementVar - try: - valueElementVar = self.objVar.Get(nameElementVar) - except Exception: - raise TemplatesError(_("wrong %s") % lastElementVar + "\n" + - _("error: variable %s does not exist") - % nameElementVar) - - for k, v in ((nameVar, valueVar), - (nameElementVar, valueElementVar)): - if not type(v) in (list, tuple): - # Значение переменной не список или кортеж - raise TemplatesError( - _("value of %s is not a list or a tuple") % k) - if len(valueVar) != len(valueElementVar): - raise TemplatesError( - _("%(name)s does not match %(nameElement)s in size") - % {'name': nameVar, 'nameElement': nameElementVar}) - index = None - for num, mPoint in enumerate(valueVar): - if mountPoint == mPoint: - index = num - break - if index is None: - for num, mPoint in enumerate(valueVar): - if "/" == mPoint: - index = num - break - if index is None: - raise TemplatesError( - _("mount point '/' or '/%s' not found " - " in the value of variable os_disk_install") % mountPoint) - replace = valueElementVar[index] - return textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - except TemplatesError as e: - raise self.raiseErrTemplate(str(e)) - - def funcModule(self, funArgv, resS, localVars, textTemplateTmp, nameTemp): - """Функция шаблона module(), выдает значение аттрибута api. - - аргумент: - путь_к_атрибуту - путь к аттрибуту - - возможные пути: - имя_пакета.var.имя_переменной - получаем значение переменной - имя_пакета.имя_метода_api - выполнение метода, получение результата - all.имя_метода_api - выполнение метода для всех пакетов с api - """ - current_version = self.objVar.Get('cl_ver') - try: - if funArgv not in self.objVar.importedModules: - importlib.import_module( - "calculate.%s.variables" % funArgv) - if self.objVar.Get('cl_chroot_path') == "/": - replace = current_version - else: - usrlib = SystemPath(self.objVar.Get('cl_chroot_path')).usrlib - module_path = os.path.join( - usrlib, "python3.9/site-packages/calculate/%s/variables" - % funArgv) - if os.path.exists(module_path): - pkg = "sys-apps/calculate-utils:3" - chroot_version = self.funcPkg( - pkg, re.search(".*", pkg), localVars, pkg, nameTemp) - t_chroot_version = getTupleVersion(chroot_version) - t_current_version = getTupleVersion(current_version) - if t_chroot_version < t_current_version: - replace = chroot_version - else: - replace = current_version - else: - replace = "" - except (ImportError, AttributeError): - replace = "" - - return textTemplateTmp[:resS.start()] + replace + \ - textTemplateTmp[resS.end():] - - def raiseErrTemplate(self, message=""): - """Возвращает ошибки при обработке функций шаблона""" - if message: - message = "%s\n" % message - else: - message = "" - return TemplatesError( - _("error in template %s") % self.nameTemplate + "\n" + \ - _("error, template term '%s'") % str(self.functText) + \ - " " + message) - - def applyFuncTemplate(self, textTemplate, nameTemplate): - """Применяет функции к тексту шаблона""" - # Локальные переменные - localVars = {} - # Имя обрабатываемого шаблона - self.nameTemplate = nameTemplate - # Регулярное выражение для поиска функции в шаблоне - reFunc = self._reFunc - - textTemplateTmp = textTemplate - flagIniFunc = False - writeIniFunc = False - - def funcSearch(s): - minlen = 1 - resS = re.search(self._rePreFuncPattern % minlen, s, re.M) - while resS and resS.group().count("#-") != (resS.group().count("-#") - - resS.group().count("-#-")): - minlen = len(resS.group()) - 2 - resS = re.search(self._rePreFuncPattern % minlen, s, re.M) - if resS: - funcblock = resS.group() - resS = reFunc.search(s[:resS.end()]) - if not resS: - raise self.raiseErrTemplate( - _("wrong function syntax %s") % funcblock) - return resS - - resS = funcSearch(textTemplateTmp) - - while resS: - mark = textTemplateTmp[resS.start():resS.end()] - self.functText = mark[self._deltVarStart:-self._deltVarEnd] - funcName, spl, funcEnd = self.functText.partition("(") - if funcName in self.namesTemplateFunction: - # аргументы функции - '(' аргументы ')' - funArgv = funcEnd.rpartition(")")[0] - # вызов функции шаблона - if "#-" in funArgv and "-#" in funArgv: - funArgv = self.applyVarsTemplate(funArgv, nameTemplate) - funArgv = self.applyFuncTemplate(funArgv, nameTemplate) - textTemplateTmp = self.templateFunction[funcName](self, funArgv, - resS, - localVars, - textTemplateTmp, - nameTemplate) - resS = funcSearch(textTemplateTmp) - if funcName == "ini": - if "," in funArgv: - writeIniFunc = True - flagIniFunc = True - else: - raise self.raiseErrTemplate( - _("function of templates '%s' not found") \ - % str(self.functText)) - if flagIniFunc: - # Очистка файла в случае его ошибочного чтения - if not self.prevDictIni and os.path.exists(self.fileConfigIni): - with open(self.fileConfigIni, "r+") as FD: - FD.truncate(0) - FD.seek(0) - # Если конф. файл модифицирован шаблоном - curTime = self.getTimeFile(self.fileConfigIni) - if curTime != self.timeIni: - # Считаем переменные из конф. файла - self.prevDictIni = self.loadVarsIni(self.fileConfigIni) - self.currDictIni.update(self.prevDictIni) - self.timeIni = curTime - # Если словари переменных не совпадают - if self.prevDictIni != self.currDictIni: - # Запишем переменные в конфигурационный файл - # Создание объекта парсера - config = ConfigParser(strict=False) - config.read(self.fileConfigIni, encoding="utf-8") - comment_block = "\n".join(takewhile(lambda x: x.startswith("#"), - readLinesFile( - self.fileConfigIni))) - for k, v in self.currDictIni.items(): - if "." in k: - sect, op, k = k.rpartition('.') - if v is None: - if config.has_section(sect): - config.remove_option(sect, k) - else: - if not config.has_section(sect): - config.add_section(sect) - config[sect][k] = v - for section in filter(lambda x: not config[x], - config.sections()): - config.remove_section(section) - with codecs.open(self.fileConfigIni, 'wb', - 'utf-8', 'ignore') as f: - if comment_block: - f.write(comment_block) - f.write('\n\n') - config.write(f) - try: - oMode = getModeFile(self.fileConfigIni, mode="mode") - if oMode != self.modeConfigIni: - os.chmod(self.fileConfigIni, self.modeConfigIni) - except OSError: - pass - - # читаем переменные из файла - self.prevDictIni = self.loadVarsIni(self.fileConfigIni) - self.currDictIni.update(self.prevDictIni) - self.timeConfigsIni[self.fileConfigIni] = float(time.time()) - self.timeIni = self.getTimeFile(self.fileConfigIni) - # Меняем владельца в случае необходимости - if writeIniFunc and os.path.exists(self.fileConfigIni): - uid, gid = getModeFile(self.fileConfigIni, "owner") - if self.uid not in (uid, PORTAGEUID) or \ - self.gid not in (gid, PORTAGEGID): - try: - os.chown(self.fileConfigIni, self.uid, self.gid) - except OSError: - self.setError(_("error") + " " + - "'chown %d:%d %s'" % (self.uid, self.gid, - self.fileConfigIni)) - return textTemplateTmp - - -class ChangedFiles(object): - """ - Object which contains modified files and package - """ - FILE_MODIFIED, FILE_REMOVED, DIR_CREATED, DIR_REMOVED, DIR_EXISTS = 0, 1, 2, 3, 4 - - def __init__(self): - self.data = {} - self.pkgs = set() - - def _createEntry(self, fn): - if not fn in self.data: - self.data[fn] = [] - - def addObj(self, filename, action, pkg, slot=""): - if slot: - pkgslot = "{}:{}".format(pkg,slot) - else: - pkgslot = pkg - self._createEntry(filename) - self.data[filename].append((pkgslot, action)) - self.pkgs.add(pkgslot) - - def getPkgs(self): - return self.pkgs - - def getPkgFiles(self, pkg): - return map(lambda x: (x[0], x[1][0][1]), - filter(lambda x: x[1], - map(lambda x: ( - x[0], filter(lambda x: x[0] == pkg, x[1])), - self.data.items()))) - -# modes work with configuration file -# T_ORIGIN - work with original config file -# T_CFG - work with last ._cfg file -# T_NEWCFG - new content has difference with (create new ._cfg file) -T_ORIGIN, T_CFG, T_NEWCFG = 0, 1, 2 - - -class Template(_file, _terms, _warning, xmlShare, _shareTemplate): - """Класс для работы с шаблонами - - На вход 2 параметра: объект хранения переменных, имя сервиса - не - обязательный параметр - - """ - # Название файла шаблона директории - templDirNameFile = ".calculate_directory" - _titleList = ("Modified", "Processed template files" + ":") - titleEnd = "For modify this file, create %(conf_path)s.clt template." - protectPaths = [] - allContents = {} - if "CONFIG_PROTECT" in os.environ: - protectPaths = ["/etc"] + list(filter(lambda x: x.strip(), - os.environ["CONFIG_PROTECT"].split( - " "))) - protectPaths = map(lambda x: os.path.normpath(x), protectPaths) - - @classmethod - def removeComment(cls, text): - re_comment = re.compile('(?:|[{symb}]-*)\n'.format( - modified=cls._titleList[0], - processed=cls._titleList[1], - endtitle=cls.titleEnd % {'conf_path': '.*'}, - symb='"#' - )) - return re_comment.sub('', text) - - def hasError(self): - return self.getError() or self.bHasError or ( - self.cltObj and self.cltObj.bHasError) - - def __init__(self, objVar, servDir=False, dirsFilter=(), filesFilter=(), - cltObj=True, cltFilter=True, printWarning=True, - printSUCCESS=lambda x: x, printWARNING=lambda x: x, - printERROR=lambda x: x, askConfirm=lambda x: x, - userProfile=False, dispatchConf=None, - critical=False): - # совместимость с вызовами из модулей предыдущих версий - self.translator = RegexpLocalization("cl_templates3") - if userProfile and objVar: - objVar.Set('cl_protect_use_set', 'off', force=True) - - self.protectPaths = objVar.Get('cl_config_protect') - self.dispatchConf = dispatchConf - self.changedFiles = ChangedFiles() - self.printSUCCESS = printSUCCESS - self.formatFactory = FormatFactory(self) - self.printERROR = printERROR - self.critical = critical - self.postmergePkgs = [] - self._locked_packages = {} - if objVar and objVar.Get("cl_merge_pkg"): - self._locked_packages = { - x.partition(":")[0]: None - for x in objVar.Get('cl_merge_pkg') - if x - } - - self.postmergeFile = "/var/lib/calculate/-postmerge" - self.bHasError = False - if printERROR: - def newSetError(s): - self.printERROR(s) - self.bHasError = True - - self.setError = newSetError - self.printWARNING = printWARNING - self.askConfirm = askConfirm - self.cltObj = None - self.functObj = None - self.mounts = None - - # Бесконченое ожидание завершения emerge если выполняется настройка пакетов - #if objVar: - # if (not objVar.Get('cl_ebuild_phase') - # and emerge_running() - # and objVar.GetBool('install.ac_install_merge')): - # self.printWARNING(_("Waiting for emerge to be complete")) - # while emerge_running(): - # time.sleep(1) - # Предупреждения - # self.warning = [] - # Печатать ли предупреждения о корневых шаблонах без cl_name==pkg - self.printWarning = printWarning - # Необрабатываемые директории - self.dirsFilter = dirsFilter - # Необрабатываемые файлы - self.filesFilter = filesFilter - _file.__init__(self) - # Словарь для создания объектов новых классов по образцу - self.newObjProt = {'proftpd': 'apache'} - # Заголовок title - self.__titleHead = "--------------------------------------\ -----------------------------------------" - self._titleBody = "" - # Условия - self._reTermBloc = re.compile( - "#\?(?P(?:[a-z0-9_]+\.)?[a-zA-Z0-9\-_]+)" - "(?P\(((?:#-|-#|%s)+|)\))?" - "(?P[><=!&\|]+" - "(?:#-|-#|[><=!\|&\(\)%s])*)#" - "\n?(?P.+?\n*?)\n?#(?P=rTerm)#(?P[ ,\t]*\n?)" - % (self._reFunctionArgvText, self._reFunctionArgvInSquareBrackets), - re.M | re.S) - # Объект с переменными - self.objVar = objVar - self.recalculateBaseDir() - # Последняя часть директории шаблона (имя сервиса: samba, mail) - self._servDir = servDir - if self._servDir: - if self._servDir[0] != "/": - self._servDir = "/" + self._servDir - if self._servDir[-1] != "/": - self._servDir += "/" - self._servDir = os.path.split(self._servDir)[0] - # Созданные директории - self.createdDirs = [] - # Примененные файлы - self.filesApply = [] - # номер обрабатываемого файла - self.numberProcessTempl = 0 - # имя текущей программы - _nameProgram = "Calculate Utilities" - # версия текущей программы - _versionProgram = self.objVar.Get("cl_ver") - # имя и версия текущей программы - self.programVersion = "%s %s" % (_nameProgram, _versionProgram) - # Словарь директорий с количеством файлов шаблонов - self.dictTemplates = {} - # Общее количество шаблонов - self.allTemplates = 0 - # Объект функций шаблона - self.functObj = templateFunction(self.objVar) - self.functObj.printSUCCESS = self.printSUCCESS - self.functObj.printWARNING = self.printWARNING - self.functObj.printERROR = self.printERROR - if self.printERROR: - self.functObj.setError = self.printERROR - self.functObj.askConfirm = self.askConfirm - # Метод применения функций к шаблонам - self.applyFuncTemplate = self.functObj.applyFuncTemplate - # Объект для определения типа файла шаблона - self.typeFileObj = typeFile() - # Глобальный словарь обработанных шаблонов файлов - # {путь к конф. файлу:[имена шаблонов] ...} - self.dictProcessedTemplates = {} - if cltObj is True: - # Объект templateClt - self.cltObj = templateClt(self.objVar, self.postmergePkgs, - printSUCCESS=self.printSUCCESS, - printERROR=self.printERROR, - printWARNING=self.printWARNING, - askConfirm=self.askConfirm, - critical=self.critical) - elif cltObj: - # Объект templateClt - self.cltObj = cltObj - else: - # Объект templateClt - self.cltObj = None - # Фильтровать ли шаблоны clt по конфигурационным файлам обычных шаблонов - self.cltFilter = cltFilter - # autoupdate файлы - self.autoUpdateFiles = [] - self.autoUpdateDirs = [] - - self.protectedFiles = [ - pathJoin(self._baseDir, x) - for x in self.objVar.Get('main.cl_env_path') - ] + [self.functObj.fileConfigIni] - # список выполненных файлов - self.executedFiles = [] - - def recalculateBaseDir(self): - """Recalculate basedir and homedir""" - # Базовая директория переноса шаблонов "/mnt/calculate" или "/" и.т.д - self._baseDir = pathJoin(self.objVar.Get("cl_chroot_path"), - self.objVar.Get("cl_root_path")) - self._baseDir = os.path.normpath(self._baseDir) - self.uid, self.gid, self.homeDir = self.getDataUser() - # Домашняя директория, плюс базовая директория - self.homeDir = pathJoin(self._baseDir, self.homeDir) - if self.cltObj: - self.cltObj.recalculateBaseDir() - if self.functObj: - self.functObj.recalculateBaseDir() - - def _addFile(self, filesApl, pkg=None, slot=None): - """ - Add files to ChangedFiles - """ - for fn in filesApl: - if os.path.exists(fn): - self.changedFiles.addObj( - fn, ChangedFiles.FILE_MODIFIED, - pkg or self.functObj.currentBelong, - slot or self.functObj.currentBelongSlot) - else: - self.changedFiles.addObj( - fn, ChangedFiles.FILE_REMOVED, - pkg or self.functObj.currentBelong, - slot or self.functObj.currentBelongSlot) - - def execute_command(self, cmd, lang): - env = dict(os.environ) - env['TERM'] = "linux" - env['EINFO_QUIET'] = "yes" - return process(cmd, lang=lang, envdict=dict(os.environ)) - - def executeTemplate(self, code, execPath): - """Execute template""" - p = self.execute_command(execPath, self.objVar.Get('os_locale_locale')) - if "/bin/bash" in code.partition('\n')[0]: - p.write("""function translate() { -gettext -d cl_template "$*" -} -""") - p.write(code) - p.pipe.stdin.close() - for line in p.readByLine(): - if line: - line = self.translator.translate(line) - self.printSUCCESS(line.strip()) - p.pipe.wait() - if p.success(): - self.executedFiles.append((code, execPath)) - errdata = p.readerr().rstrip() - if errdata: - for line in errdata.split('\n'): - if line: - line = self.translator.translate(line) - self.printWARNING(line.strip()) - return True - else: - errdata = p.readerr().rstrip() - if errdata: - for line in errdata.split('\n'): - if line: - line = self.translator.translate(line) - self.printERROR(line.strip()) - return False - - def __octToInt(self, strOct): - """Преобразование восьмеричного в целое (ввод строка, вывод число)""" - if strOct: - try: - res = string.atoi(strOct, 8) - except ValueError: - self.setError(_("Invalid oct value: ") + str(strOct)) - return False - return res - else: - self.setError(_("Empty oct value")) - return False - - def getTemplateType(self): - """выдать тип шаблона (text, bin)""" - return self.getFileType(self.nameFileTemplate) - - def getFileType(self, fileName): - """выдать тип файла (text, bin)""" - isBin = self.typeFileObj.isBinary(fileName) - if isBin is True: - typeTemplate = "bin" - elif isBin is False: - typeTemplate = "text" - else: - self.setError(_("ERROR") + ": getFileType()") - self.setError(isBin) - return False - return typeTemplate - - def createDir(self, dirName, mode=False, uid=False, gid=False): - """Создает директорию""" - if os.access(dirName, os.F_OK): - return True - else: - dMode = False - prevDir, tmpSubdir = os.path.split(dirName) - createDirs = [] - while not os.access(prevDir, os.F_OK) and prevDir: - createDirs.append(prevDir) - prevDir = os.path.split(prevDir)[0] - try: - dUid, dGid = getModeFile(prevDir, "owner") - except OSError: - self.setError(_("No access to the directory: ") + prevDir) - return False - if not mode is False: - dMode = mode - if not uid is False: - dUid = uid - if not gid is False: - dGid = gid - createDirs.reverse() - for nameDir in createDirs: - try: - if dMode: - os.mkdir(nameDir, dMode) - os.chmod(nameDir, dMode) - else: - os.mkdir(nameDir) - self.chownConfDir(nameDir, dUid, dGid, None) - except OSError: - self.setError(_("Failed to create the directory: ") - + nameDir) - return False - try: - if dMode: - os.mkdir(dirName, dMode) - os.chmod(dirName, dMode) - else: - os.mkdir(dirName) - self.chownConfDir(dirName, dUid, dGid, None) - createDirs.append(dirName) - except OSError: - self.setError(_("Failed to create the directory: ") - + dirName) - return False - return createDirs - - reBlock = re.compile( - "#\?(?P(?:[a-z0-9_]+\.)?[a-zA-Z0-9\-_]+).*?#(?P=rTerm)#" - "(?:[ ,\t]*\n?)", - re.S | re.M) - - def applyTermsTemplate(self, textTemplate, nameTemplate): - """ Применяет условия, к условным блокам текста - """ - - def function(text): - """Функция обработки функций в заголовке""" - return self.applyFuncTemplate(text, nameTemplate) - - def searchBlock(s): - resS = self.reBlock.search(s) - if resS: - funcblock = resS.group() - resS = self._reTermBloc.search(textTemplateTmp[:resS.end()]) - if not resS: - raise TemplatesError( - "Wrong conditional block: %s" % funcblock) - return resS - - textTemplateTmp = textTemplate - resS = searchBlock(textTemplateTmp) - while resS: - mark = resS.group(0) - body = resS.group("body") - end = resS.group("end") - notbody = "" - elseblock = "#!%s#" % resS.group("rTerm") - if elseblock in body: - data = re.split("\n?%s\n?" % elseblock, body) - body = data[0] - notbody = "".join(data[1:]) + end - body = body + end - parent = resS.group("func") - if not parent: - parent = "" - term = resS.group("rTerm") + parent + \ - resS.group("lTerm") - if self._equalTerm(term, _("invalid template content: ") + \ - nameTemplate, function): - textTemplateTmp = textTemplateTmp.replace(mark, body) - else: - textTemplateTmp = textTemplateTmp.replace(mark, notbody) - resS = searchBlock(textTemplateTmp) - return textTemplateTmp - - def getNeedTemplate(self, fileTemplate): - """Применяем правила к названию файла""" - dirP, fileP = os.path.split(fileTemplate) - if fileP: - spFile = fileP.split("?") - if len(spFile) > 1: - flagTrue = False - for term in spFile[1:]: - if self._equalTerm(term, _("invalid template name: ") + \ - fileTemplate): - flagTrue = True - break - if flagTrue: - return True - else: - return False - else: - return True - else: - self.setError(_("invalid template name: ") + str(fileTemplate)) - return False - - def getTitle(self, comment, commentList, configPath=""): - """Выдает заголовок шаблона ( версия и.т.д)""" - origConfigPath = PkgContents.reCfg.sub("/", configPath) - if self._baseDir != "/": - lenBaseDir = len(self._baseDir) - commentList = [x[lenBaseDir:] - if x.startswith(self._baseDir) - else x for x in commentList] - if configPath and self.protectPaths: - for protectPath in self.protectPaths: - if self._baseDir != "/": - lenBaseDir = len(self._baseDir) - if len(configPath) > lenBaseDir and \ - configPath[:lenBaseDir] == self._baseDir: - configPath = configPath[lenBaseDir:] - if configPath.startswith(protectPath + "/"): - if not any(origConfigPath.endswith(x) for x in - ("/calculate.env", "/ini.env", "/custom")): - commentList = commentList + \ - [self.titleEnd % { - 'conf_path': origConfigPath}] - break - if comment: - commentFirst = comment - commentInsert = comment - commentLast = comment - flagList = False - # В случае открывающего и закрывающего комментария - if type(comment) == tuple and len(comment) == 2: - commentFirst = comment[0] - commentInsert = "" - commentLast = comment[1] - flagList = True - if flagList: - self._titleBody = commentFirst + "\n" - else: - self._titleBody = commentFirst + self.__titleHead + "\n" - z = 0 - flagFirst = True - for com in list(self._titleList) + [""] * (len(commentList)): - if com: - if flagFirst: - self._titleBody += commentInsert + " " + com + " " + \ - self.programVersion + "\n" - flagFirst = False - else: - self._titleBody += commentInsert + " " + com + "\n" - else: - self._titleBody += commentInsert + " " + \ - commentList[z] + "\n" - z += 1 - if flagList: - self._titleBody += commentLast + "\n" - else: - self._titleBody += commentLast + self.__titleHead + "\n" - return self._titleBody - else: - return "" - - def changeMergePackage(self, package): - return True - - def numberAllTemplates(self, number): - """Количество шаблонов - - Вызов происходит перед наложением шаблонов - в момент вызова в number находится количество обрабатываемых файлов - Наследуемая функция - Используется для отображения прогресса при наложениии шаблонов - """ - return True - - def numberProcessTemplates(self, number): - """Номер текущего обрабатываемого шаблона - - Вызов происходит при наложении шаблона - в момент вызова в number находится номер обрабатываемого шаблона - Наследуемая функция - Используется для отображения прогресса при наложениии шаблонов - """ - return True - - def templateModify(self): - """ - Files which created by apping templates - """ - return True - - def fixNameFileConfig(self, origfilename): - """Support ._cfg0000 files for postinst""" - # if self.objVar.Get('cl_ebuild_phase') != 'postinst': - # return origfilename - directory, filename = os.path.split(origfilename) - for i in range(0, 9999): - if not os.path.exists(os.path.join(directory, - "._cfg%04d_%s" % (i, filename))): - if i: - filename = os.path.join(directory, - "._cfg%04d_%s" % (i - 1, filename)) - #if not os.path.exists(origfilename): - # return origfilename - # origstat = os.stat(origfilename)[stat.ST_CTIME] - # newstat = os.stat(filename)[stat.ST_CTIME] - self.configMode = T_CFG - return filename - return origfilename - return origfilename - - def getHeaderText(self, text): - textLines = text.splitlines() - paramLine = "" - if textLines: - textLine = textLines[0] - rePar = re.compile( - "\s*#\s*calculate\s+\\\\?|\s*#\s*calculate\\\\?$", re.I) - reP = rePar.search(textLine) - if reP: - reLns = re.compile(r"\A([^\\\n]*\\\n)+[^\n]*\n*", re.M) - reLs = reLns.search(text) - if reLs: - paramLine = text[reP.end():reLs.end()] - paramLine = paramLine.replace("\\", " ") - else: - paramLine = textLine[reP.end():] - return paramLine - - def getTemplateDirs(self, dirsTemplates): - """Check template variable cl_name in first directories and files""" - skipDirs = [] - skipTemplates = [] - debug = False - for dirsTemplate in dirsTemplates: - filesAndDirs = map(lambda x: os.path.join(dirsTemplate, x), - listDirectory(dirsTemplate)) - for dirFile in filesAndDirs: - if os.path.isdir(dirFile): - flagDir = True - templatePath = os.path.join(dirFile, self.templDirNameFile) - if not os.path.isfile(templatePath): - skipDirs.append(dirFile) - continue - else: - flagDir = False - templatePath = dirFile - if os.path.isfile(templatePath): - if self.getFileType(templatePath) == "bin": - skipTemplates.append(dirFile) - else: - with open(templatePath) as f: - textTemplate = f.read() - if textTemplate: - headerLine = self.getHeaderText(textTemplate) - if headerLine: - if not debug and \ - not "cl_name==" in headerLine and \ - not "ac_" in headerLine: - if flagDir: - skipDirs.append(dirFile) - else: - skipTemplates.append(dirFile) - else: - if flagDir: - skipDirs.append(dirFile) - else: - skipTemplates.append(dirFile) - else: - skipTemplates.append(dirFile) - if skipDirs or skipTemplates: - # print warning - self.printWARNING(_("No conditions for checking the value of " - "an action variable")) - skipDirTemplates = [] - for skipDir in skipDirs: - skipTempl = os.path.join(skipDir, self.templDirNameFile) - if os.path.isfile(skipTempl): - skipDirTemplates.append(skipTempl) - if skipTemplates or skipDirTemplates: - self.printWARNING(_("Skipped templates:")) - for skipTemplate in skipTemplates + skipDirTemplates: - self.printWARNING(" " * 6 + skipTemplate) - if skipDirs: - self.printWARNING(_("Skipped directories:")) - for skipDir in skipDirs: - self.printWARNING(" " * 6 + skipDir) - self.printWARNING("") - self.printWARNING(_("Headers of directory templates and headers " - "of files on the first level should include " - "an action variable")) - self.printWARNING(_("Example:")) - self.printWARNING("# Calculate ac_install_merge==on") - return skipDirs + skipTemplates - - def lock_package(self, pkg, fuser=None): - pkg = pkg.partition(":")[0] - if pkg not in self._locked_packages: - category, _, pkg = pkg.partition("/") - pkglockfile = ("/var/calculate/tmp/portage/" - "{}/.{}.calculate_lockfile".format(category, pkg)) - ipcfile = ("/var/db/pkg/" - "{}/.{}*.portage_lockfile".format(category, pkg)) - if os.path.exists(pkglockfile): - fuser = fuser or FUser() - if any(fuser.search(ipcfile)): - return False - l = Locker(fn=pkglockfile, timeout=0) - if l.acquire(): - self._locked_packages[pkg] = l - return True - return False - return True - - def unlock_packages(self): - for pkg, l in self._locked_packages.items(): - if l: - l.remove() - self._locked_packages = {} - - @catch_no_space_left - @post_unlock_packages - def applyTemplates(self, progress=True, rerun=True): - """Применяет шаблоны к конфигурационным файлам""" - - def createDictTemplates(path, prefix, dictTemplates): - """Создает словарь {"директория":"кол-во шаблонов" ...} - - и считает общее количество шаблонов - """ - # Количество шаблонов - self.allTemplates += 1 - dirTemplate = os.path.split(path)[0] - while True: - if dirTemplate in dictTemplates.keys(): - dictTemplates[dirTemplate] += 1 - else: - dictTemplates[dirTemplate] = 1 - if dirTemplate == prefix: - break - dirTemplate = os.path.split(dirTemplate)[0] - return dictTemplates - - self.clearErrors() - self.clearWarnings() - if not self.objVar.defined("cl_template_path_use"): - self.setError(_("undefined variable: ") + "cl_template_path_use") - return False - dirsTemplates = [ - os.path.realpath(x) for x in self.objVar.Get("cl_template_path_use")] - # Созданные директории - self.createdDirs = [] - # Примененные файлы - self.filesApply = [] - # Номер применяемого шаблона - self.numberProcessTempl = 0 - # Словарь директорий с количеством файлов шаблонов - self.dictTemplates = {} - # Количество шаблонов - self.allTemplates = 0 - # Установка по умолчанию аттрибутов для функциии шаблонов ini() - # Время доступа к конфигурационному файлу функции шаблона ini() - self.functObj.timeIni = -1 - # Первоначальный словарь переменных для ini() - self.functObj.prevDictIni = {} - # Текущий словарь переменных для ini() - self.functObj.currDictIni = {} - self.functObj.currentBelong = "" - self.functObj.currentBelongSlot = "" - self.functObj.currentAction = HParams.ActionType.Merge - # Словарь времен модификации env файлов для ini() - self.functObj.timeConfigsIni = {} - if self._servDir: - tmpDirsTemplates = [] - for dirP in dirsTemplates: - dirTempl = dirP + self._servDir - if os.access(dirTempl, os.F_OK): - # Если директория существует - tmpDirsTemplates.append(dirTempl) - dirsTemplates = tmpDirsTemplates - scanObj = scanDirectory() - scanObj.processingFile = lambda x, y: createDictTemplates(x, y, - self.dictTemplates) - # Считаем количество шаблонов - dirsTemplatesExists = filter(lambda x: os.path.exists(x), dirsTemplates) - if not dirsTemplatesExists and not self.cltObj: - return self.createdDirs, self.filesApply - # check cl_name in first template dirs and files - skipTemplates = self.getTemplateDirs(dirsTemplatesExists) - # if not os.environ.get("EBUILD_PHASE","") and progress: - # for dirTemplate in dirsTemplatesExists: - # scanObj.scanningDirectory(dirTemplate) - # Считаем количество шаблонов clt - if self.cltObj: - # Считаем количество шаблонов clt - self.cltObj.countsNumberTemplates() - # не считать количество файлов в объекте self.cltObj - self.cltObj.checkNumberTemplate = False - # начальный номер clt шаблона - self.cltObj.numberProcessTempl = self.allTemplates - # метод показывающий номер clt шаблона - self.cltObj.numberProcessTemplates = self.numberProcessTemplates - # метод показывающий номер clt шаблона - self.cltObj.templateModify = self.templateModify - # общее количество шаблонов - self.allTemplates += self.cltObj.allTemplates - self.cltObj.allTemplates = self.allTemplates - self.numberAllTemplates(self.allTemplates) - # Обрабатываем шаблоны - locationPath = dict(self.objVar.ZipVars('main.cl_template_path', - 'main.cl_template_location')) - for dirTemplate in dirsTemplatesExists: - self.objVar.Set('cl_pass_location', - locationPath.get(dirTemplate, dirTemplate), - force=True) - if self.scanningTemplates(dirTemplate, - skipTemplates=skipTemplates) is False: - break - if self.cltObj: - self.objVar.Set('cl_pass_location', 'clt', True) - # Созданные директории - self.cltObj.createdDirs = self.createdDirs - # Примененные файлы - self.cltObj.filesApply = self.filesApply - # Словарь директорий с количеством файлов шаблонов - self.cltObj.dictTemplates = self.dictTemplates - # Количество шаблонов - self.cltObj.allTemplates = self.allTemplates - # Установка по умолчанию аттрибутов для функциии шаблонов ini() - # Время доступа к конфигурационному файлу функции шаблона ini() - self.cltObj.functObj = self.functObj - self.cltObj.protectedFiles = self.protectedFiles - # Метод применения функций к шаблонам - self.cltObj.applyFuncTemplate = self.functObj.applyFuncTemplate - # Словарь примененных файлов шаблонов - self.cltObj.dictProcessedTemplates = self.dictProcessedTemplates - self.cltObj.changedFiles = self.changedFiles - self.cltObj.autoUpdateFiles = self.autoUpdateFiles - self.cltObj.autoUpdateDirs = self.autoUpdateDirs - - if self.cltFilter: - # Шаблоны + .clt которые будут применены - self.cltObj.filterApplyTemplates = {} - if self.objVar.Get('cl_merge_set') == "on": - for pkg in self.objVar.Get('cl_merge_pkg'): - if not pkg: - continue - atom = list(sorted(getInstalledAtom(pkg))) - if atom: - pkgContents = PkgContents("{CATEGORY}/{PF}".format( - **atom[-1])) - for filename in pkgContents.content.keys(): - if not filename in self.cltObj.filterApplyTemplates: - self.cltObj.filterApplyTemplates[ - filename] = [] - self.cltObj.filterApplyTemplates[ - filename].append(pkg) - for filename, pkgs in self.changedFiles.data.items(): - filename = PkgContents.reCfg.sub("/", filename) - if not filename in self.cltObj.filterApplyTemplates: - self.cltObj.filterApplyTemplates[filename] = [] - pkgs = filter( - lambda x: x not in - self.cltObj.filterApplyTemplates[filename], - map(lambda x: x[0], pkgs)) - self.cltObj.filterApplyTemplates[filename].extend(pkgs) - old_mod = self.objVar.defaultModule - try: - self.objVar.defaultModule = "install" - self.cltObj.applyTemplatesClt() - finally: - self.objVar.defaultModule = old_mod - - if ((self.objVar.Get('cl_merge_pkg') or - self.objVar.Get('cl_action') in ( - "sync", "domain", "server_setup", - "undomain")) and - self.objVar.Get('cl_merge_pkg_new')): - - skip_pkglist = [] - if self.objVar.Get('cl_ebuild_phase'): - new_pkglist = [] - for pkgn in self.objVar.Get('cl_merge_pkg_new'): - if not pkgn: - continue - if self.lock_package(pkgn): - new_pkglist.append(pkgn) - else: - skip_pkglist.append(pkgn) - - self.objVar.Set('cl_merge_pkg_new', new_pkglist, force=True) - else: - new_pkglist = self.objVar.Get('cl_merge_pkg_new') - - if new_pkglist: - self.objVar.Set('cl_root_path', - self.objVar.Get('cl_root_path_next'), force=True) - self.recalculateBaseDir() - - self.objVar.Set('cl_merge_pkg_pass', list( - set(self.objVar.Get('cl_merge_pkg_pass')) | - set(self.objVar.Get('cl_merge_pkg')) | - set(skip_pkglist) | - set(self.objVar.Get('cl_merge_pkg_new'))), force=True) - self.objVar.Set('cl_merge_pkg', - self.objVar.Get('cl_merge_pkg_new'), force=True) - self.objVar.Set('cl_merge_pkg_new', [], force=True) - createdDirs = self.createdDirs - filesApply = self.filesApply - self.changeMergePackage(self.objVar.Get('cl_merge_pkg')) - self.applyTemplates(rerun=False) - createdDirs.extend(self.createdDirs) - filesApply.extend(self.filesApply) - self.filesApply = filesApply - self.createdDirs = createdDirs - if rerun: - if self.cltObj: - self.queueExecute.extend(self.cltObj.queueExecute) - for processor, text, nameTemplate in self.queueExecute: - if not self.executeTemplate(text, processor): - self.setError(_("Failed to execute") + _(": ") + - nameTemplate) - return False - self.queueExecute = [] - self.filesApply = list(set(self.filesApply)) - if (self.objVar.Get('cl_root_path') == '/' and - self.objVar.Get('cl_chroot_path') == '/'): - post_script = [] - if any("/etc/env.d" in x for x in self.filesApply): - post_script.append( - "LANG=C /usr/sbin/env-update --no-ldconfig") - post_script.append("source /etc/profile") - root_type = self.objVar.Get('install.os_install_root_type') - if (root_type != "livecd" and - any("/etc/locale.gen" in x for x in self.filesApply)): - post_script.append("/usr/sbin/locale-gen &>/dev/null") - self.executeTemplate("\n".join(post_script), "/bin/bash") - if self.objVar.Get('cl_protect_use_set') == 'on': - self.updateProtectedFiles() - if self.objVar.Get('cl_verbose_set') == 'on' and \ - self.filesApply: - self.verboseOutput( - filter(lambda x: not x.endswith('/ini.env'), - self.filesApply)) - self.objVar.Set('cl_merge_pkg_pass', [], force=True) - self.objVar.Set('cl_merge_pkg_new', [], force=True) - self.objVar.Set('cl_merge_pkg', [], force=True) - return self.createdDirs, self.filesApply - - def verboseOutput(self, filesApply): - """ - Verbose output applied templates - """ - if not filesApply: - return - self.printWARNING(_("Calculate Utilities have changed files") + _(":")) - reGrey = re.compile(r"\._cfg\d{4}_") - rootPath = self.objVar.Get('cl_root_path') - for fn in sorted(list(set(filesApply))): - if rootPath != '/' and self.objVar.Get('cl_action') == 'patch' and \ - fn.startswith(rootPath): - fn = fn[len(rootPath) + 1:] - if reGrey.search(fn): - self.printSUCCESS(" " * 5 + \ - "%s" % fn) - else: - self.printSUCCESS(" " * 5 + fn) - - def updateProtectedFiles(self): - """ - Update ._cfg0000 files - """ - if self.objVar.Get('cl_ebuild_phase') == 'compile': - return - chrootPath = self.objVar.Get('cl_chroot_path') - cfgs = getCfgFiles(self.objVar.Get('cl_config_protect'), - prefix=chrootPath) - reverse_cfgs = {y[0][1]: x for x, y in cfgs.items()} - autoUpdateDict = {} - get_digest = lambda x: hashlib.md5(readFile(x)).hexdigest() - for pkg in list(set(filter(None, list(self.changedFiles.getPkgs()) + - self.objVar.Get('cl_merge_pkg')))): - atom = list(sorted(getInstalledAtom(pkg, prefix=chrootPath))) - if atom: - pkgContents = PkgContents("{CATEGORY}/{PF}".format( - **atom[-1]), prefix=chrootPath) - protected = [] - checked_map = {} - for filename, action in self.changedFiles.getPkgFiles(pkg): - origFn = pkgContents.origFileName(filename) - if origFn in self.protectedFiles: - pkgContents.removeObject(filename) - protected.append(origFn) - continue - if action in (ChangedFiles.FILE_MODIFIED, - ChangedFiles.DIR_CREATED, - ChangedFiles.DIR_EXISTS): - orig_filename = reverse_cfgs.get(filename, None) - if orig_filename: - checked_map[orig_filename] = ( - get_digest(filename) == get_digest( - orig_filename)) - # не давать править CONTENTS - if (orig_filename is None or - not checked_map[orig_filename]): - pkgContents.addObject(filename) - elif action in (ChangedFiles.FILE_REMOVED, - ChangedFiles.DIR_REMOVED): - pkgContents.removeObject(filename) - files = set(map(lambda x: pathJoin(chrootPath, x), - pkgContents.content.keys()) + protected) - if (self.objVar.Get('cl_dispatch_conf') != 'usenew' and - self.objVar.Get('cl_autoupdate_set') != "on"): - notUpdate = files - set(self.autoUpdateFiles) - files &= set(self.autoUpdateFiles) - for filename in list(notUpdate & set(cfgs.keys())): - equal = checked_map.get( - filename, - get_digest(filename) == get_digest( - cfgs[filename][0][1])) - if equal: - files.add(filename) - - for filename in list(files & set(cfgs.keys())): - # get ctime from orig filename - # if os.path.exists(filename): - # ctime = os.stat(filename).st_ctime - # else: - # ctime = 0 - # if orig filename older that .cfg - cfgs[filename].sort(reverse=True) - # if ctime < cfgs[filename][0][0]: - try: - with open(filename, 'w') as f: - f.write(readFile(cfgs[filename][0][1])) - self.copy_mod_own(cfgs[filename][0][1], filename) - except Exception as e: - self.printERROR(str(e)) - self.printWARNING( - _("Failed to copy {ffrom} to {fto}").format( - ffrom=cfgs[filename][0][1], fto=filename)) - continue - autoUpdateDict[cfgs[filename][0][1]] = filename - for mtime, fn in cfgs[filename]: - try: - if os.path.exists(fn): - os.unlink(fn) - except OSError: - self.printWARNING(_("Failed to remove %s") % fn) - try: - pkgContents.writeContents() - except IOError: - self.printWARNING(_("Failed to modify %s contents") % pkg) - self.filesApply = map(lambda x: autoUpdateDict.get(x, x), - self.filesApply) - if filter(lambda x: "._cfg" in x, self.filesApply): - if self.objVar.Get('cl_ebuild_phase') != '': - self.printWARNING(_("Some config files need updating. " - "Perform run dispatch-conf.")) - if self.dispatchConf and \ - self.objVar.Get( - 'cl_dispatch_conf') == 'dispatch' and \ - self.objVar.Get('cl_ebuild_phase') == '': - self.dispatchConf(self.filesApply) - - def scanningTemplates(self, scanDir, prefix=None, flagDir=False, - optDir=None, skipTemplates=()): - """Сканирование и обработка шаблонов в директории scanDir""" - if optDir is None: - # ключи: HParams.OptDir.{Path,Skip,Autoupdate} - optDir = {} - ret = True - if not prefix: - prefix = os.path.realpath(scanDir) - if not flagDir: - # проверка корневой директории - retDir = self.processingDirectory(scanDir, scanDir, optDir) - if retDir is None: - return None - elif retDir is False: - return False - pathDir, objHead = retDir - optDir[HParams.OptDir.Path] = pathDir - if not objHead is True: - if objHead.typeAppend == HParams.AppendParams.Skip: - # Установка опции пропуска директории - optDir[HParams.OptDir.Skip] = True - if (HParams.Autoupdate in objHead.params or - self.objVar.Get('cl_autoupdate_set') == 'on'): - optDir[HParams.OptDir.Autoupdate] = True - if flagDir or stat.S_ISDIR(os.lstat(str(scanDir))[stat.ST_MODE]): - if not os.access(scanDir, os.R_OK | os.X_OK): - self.printWARNING(_("Failed to read templates directory %s") % - scanDir) - return False - for fileOrDir in sorted(listDirectory(scanDir)): - absPath = os.path.join(scanDir, fileOrDir) - if skipTemplates and absPath in skipTemplates: - continue - stInfo = os.lstat(str(absPath)) - statInfo = stInfo[stat.ST_MODE] - prevModule = self.objVar.defaultModule - prevBelong = self.functObj.currentBelong - prevBelongSlot = self.functObj.currentBelongSlot - prevAction = self.functObj.currentAction - try: - if stat.S_ISREG(statInfo): - if not self.processingFile(absPath, prefix, optDir): - ret = False - continue - elif stat.S_ISDIR(statInfo): - # Обработка директории - retDir = self.processingDirectory(absPath, prefix, - optDir) - if retDir is None: - continue - elif retDir is False: - ret = False - break - # Опции следующей директории - optNextDir = {} - pathDir, objHead = retDir - optNextDir[HParams.OptDir.Path] = pathDir - if objHead is not True: - if objHead.typeAppend == HParams.AppendParams.Skip: - # Установка опции пропуска директории - optNextDir[HParams.OptDir.Skip] = True - if (HParams.Autoupdate in objHead.params or - self.objVar.Get( - 'cl_autoupdate_set') == 'on'): - optNextDir[HParams.OptDir.Autoupdate] = True - ret = self.scanningTemplates(absPath, prefix, True, - optNextDir) - if objHead is not True: - if has_any(objHead.params, HParams.ServiceControl): - self.doServiceControl(objHead.params) - if ret is False: - break - except TemplatesError as e: - self.clearErrors() - if self.critical: - raise - else: - self.printWARNING(str(e)) - finally: - self.objVar.defaultModule = prevModule - self.functObj.currentBelong = prevBelong - self.functObj.currentBelongSlot = prevBelongSlot - self.functObj.currentAction = prevAction - return ret - - def processingFile(self, path, prefix, optFile): - """Обработка в случае шаблона файла""" - self.numberProcessTempl += 1 - self.numberProcessTemplates(self.numberProcessTempl) - # Пропуск шаблонов директорий - if self.templDirNameFile == os.path.split(path)[1]: - return True - # Проверка на переменные в названии файла - if not self.getNeedTemplate(path): - if self.getError(): - return False - return True - if self.getError(): - return False - nameFileConfig = path.partition(prefix)[2] - # файл в системе без условий - nameFileConfig = "/".join(map(lambda x: x.split("?")[0], - nameFileConfig.split("/"))) - # Записываем в переменную обрабатываемый файл - self.objVar.Set("cl_pass_file", os.path.basename(nameFileConfig)) - self.headerParams = None - filesApl = self.joinTemplate(path, nameFileConfig, optFile) - if self.headerParams: - if has_any(self.headerParams, HParams.ServiceControl): - self.doServiceControl(self.headerParams) - if self.getError(): - return False - if filesApl: - # Настоящее имя конфигурационного файла - nameFileConfig = filesApl[0] - # Пишем время модификации *.env файлов - if nameFileConfig.endswith(".env"): - nameEnvFile = os.path.basename(nameFileConfig) - self.functObj.timeConfigsIni[nameEnvFile] = float(time.time()) - self.filesApply += filesApl - if filesApl: - self._addFile(filesApl) - return True - - def processingDirectory(self, path, prefix, opt): - """Обработка в случае директории если возвращаем None то пропуск дир.""" - if path.endswith("/.git"): - return None - # Файл шаблона директории - if not os.access(path, os.R_OK | os.X_OK): - self.printWARNING(_("Failed to read templates directory %s") % path) - return None - dirInfoFile = os.path.join(path, self.templDirNameFile) - newDir = pathJoin(self._baseDir, path.partition(prefix)[2]) - newDir = "/".join(map(lambda x: x.split("?")[0], newDir.split("/"))) - # Применяем шаблон - pathDir, objHeadDir, createdDirs = \ - self.getApplyHeadDir(newDir, dirInfoFile, opt) - if createdDirs: - self.createdDirs += createdDirs - if os.path.isfile(pathDir): - self.printWARNING(_("{dirpath} is a file").format(dirpath=pathDir)) - self.printWARNING( - _("templates in {tempath} are skipped").format(tempath=path)) - return None - if objHeadDir: - return pathDir, objHeadDir - else: - if self.getError(): - return False - # Добавление количества файлов в пропущенной директории - if path in self.dictTemplates.keys(): - self.numberProcessTempl += self.dictTemplates[path] - return None - - def setRebuildVersion(self, prev_ver): - if "_rc73" in prev_ver: - return prev_ver - ver_nor = prev_ver.partition("-")[0] - # rc даже после уже существующего гарантирует, что версия будет - # считаться ниже - #reRc = re.compile("(.*rc)(\d+)(.*)") - #rc_match = reRc.search(ver_nor) - #if rc_match: - # rc_num = max(0, int(rc_match.group(2)) - 1) - # return "%s%d%s" % (rc_match.group(1), rc_num, rc_match.group(3)) - return "%s_rc73" % ver_nor - - def _processRebuild(self, params, templateDirFile): - """ - Обработка параметра rebuild= - :param params: - :param templateDirFile: - :return: - """ - if HParams.Rebuild in params: - rebuild_packages = params[HParams.Rebuild].split(',') - chroot_path = self.objVar.Get('cl_chroot_path') - for atom in rebuild_packages: - for pkg in getInstalledAtom( - atom, prefix=chroot_path): - ver = pkg["PVR"] - new_ver = self.setRebuildVersion(ver) - if ver != new_ver: - try: - fn = pathJoin(chroot_path, "var/db/pkg", - "%s-%s" % (pkg["CATEGORY/PN"], - pkg["PVR"])) - new_fn = pathJoin(chroot_path, "var/db/pkg", - "%s-%s" % (pkg["CATEGORY/PN"], - new_ver)) - shutil.copytree(fn, new_fn, symlinks=True) - shutil.rmtree(fn) - except (OSError, IOError) as e: - self.printWARNING( - _("Failed to change version of %s") % str(pkg)) - - def _processMergePostmerge(self, params, templateDirFile): - """Обработка параметров merge= , postmerge=""" - if HParams.Merge in params: - mergePkgs = params[HParams.Merge].split(',') - if self.objVar.Get('cl_action') == "config": - self.printWARNING( - _("Config action is not support '%s' parameter") - % HParams.Merge ) - return - else: - mergePkgs = [] - if HParams.PostMerge in params: - postmergePkgs = params[HParams.PostMerge].split(',') - if self.objVar.Get('cl_action') == "config": - self.printWARNING( - _("Config action is not support '%s' parameter") - % HParams.PostMerge ) - return - else: - postmergePkgs = [] - - if mergePkgs or postmergePkgs: - if self.objVar.Get('cl_ebuild_phase') == 'postinst': - for pkg in postmergePkgs: - if pkg not in self.objVar.Get('cl_merge_pkg_pass'): - self.objVar.Get('cl_merge_pkg_pass').append(pkg) - if pkg not in self.postmergePkgs: - try: - with open(self.postmergeFile, "a") as f: - f.write("%s\n" % pkg) - self.postmergePkgs.append(pkg) - except OSError: - self.printWARNING( - _("Failed to reconfigure package %s") % pkg) - else: - mergePkgs = mergePkgs + postmergePkgs - - for pkg in mergePkgs: - curlistset = set(self.objVar.Get('cl_merge_pkg_new') + - self.objVar.Get('cl_merge_pkg_pass') + - self.objVar.Get('cl_merge_pkg')) - if ":" not in pkg: - curlistset = {x.partition(":")[0] for x in curlistset - if x} - if pkg not in curlistset: - self.objVar.Get('cl_merge_pkg_new').append(pkg) - - def checkVfat(self, fn): - if self.mounts is None: - self.mounts = Mounts() - if self.mounts.getBy(self.mounts.TYPE, where=self.mounts.DIR, - _in=fn) in ("vfat", "ntfs-3g", "ntfs"): - return True - return False - - def checkOsError(self, e, fn): - if hasattr(e, 'errno') and e.errno == errno.EPERM: - if self.checkVfat(fn): - return True - if hasattr(e, 'errno') and e.errno == errno.EACCES and \ - "var/calculate/remote" in fn: - return True - return False - - def setUidGidError(self, fn, uid, gid, tfn=None): - """ - Установить ошибку связанную со сменой UID и GID файла - """ - import pwd, grp - - try: - userName = pwd.getpwuid(uid).pw_name - except (TypeError, KeyError): - userName = str(uid) - try: - groupName = grp.getgrgid(gid).gr_name - except (TypeError, KeyError): - groupName = str(gid) - owner = userName + ":" + groupName - if tfn: - self.setError(_("Failed to apply template file %s") % tfn) - self.setError(_("error") + " " + "'%s %s %s'" % ( - HParams.ChangeOwner, owner, fn)) - - def setModeError(self, fn, mode, tfn): - """ - Установить ошибку связанную со сменой доступа к файлу - """ - self.setError(_("Failed to apply template file %s") % tfn) - self.setError(_("error") + " " + - "'%s %s %s'" % ( - HParams.ChangeMode, str(oct(mode)), fn)) - - def chownConfDir(self, nameDirConfig, uid, gid, nameFileTemplate): - """Изменение владельца конфигурационной директории""" - try: - os.chown(nameDirConfig, uid, gid) - except (OSError, Exception) as e: - if self.checkOsError(e, nameDirConfig): - return True - self.setUidGidError( - nameDirConfig, uid, gid, nameFileTemplate) - return False - return True - - def chmodConfDir(self, nameDirConfig, mode, nameFileTemplate): - """Изменения режима доступа конфигурационного файла""" - try: - os.chmod(nameDirConfig, mode) - except (OSError, Exception) as e: - if self.checkOsError(e, nameDirConfig): - return True - self.setModeError(nameDirConfig, mode, nameFileTemplate) - return False - return True - - def getApplyHeadDir(self, newDir, templateDirFile, optDir): - """Применяет шаблон к директории (права, владелец, и.т. д)""" - - def function(text): - """Функция обработки функций в заголовке""" - return self.applyFuncTemplate(text, templateDirFile) - - applyDir = newDir - - # Родительская директория - if optDir.get(HParams.OptDir.Path): - path = optDir[HParams.OptDir.Path] - else: - if applyDir == self._baseDir: - path = os.path.dirname(self._baseDir) - else: - path = os.path.split(applyDir)[1] - path = pathJoin(self._baseDir, path) - if not os.path.exists(templateDirFile): - if applyDir != self._baseDir: - applyDir = os.path.join(path, os.path.split(applyDir)[1]) - # Фильтрация шаблонов по названию директории - realPath = os.path.join("/", applyDir.partition(self._baseDir)[2]) - if realPath in self.dirsFilter: - return "", False, [] - # Создаем директорию если необходимо - crDirs = self.createDir(applyDir, False, self.uid, self.gid) - if not crDirs: - return "", False, [] - if HParams.OptDir.Autoupdate in optDir: - self.autoUpdateDirs.append(applyDir) - if crDirs is True: - return applyDir, True, [] - else: - return applyDir, True, crDirs - try: - self.objVar.Set("cl_pass_file", - os.path.basename(os.path.dirname(templateDirFile))) - with open(templateDirFile) as FD: - textTemplate = FD.readline().rstrip() - buf = textTemplate - while buf and textTemplate.endswith('\\'): - buf = FD.readline() - textTemplate = "%s %s" % (textTemplate[:-1], buf.rstrip()) - except IOError: - self.setError(_("Failed to open the template") + _(": ") + - templateDirFile) - return "", False, [] - - headerLine = self.getHeaderText(textTemplate) - if headerLine: - envparam = "%s=" % HParams.Environ - moduleParam = filter(lambda x: x.startswith(envparam), - headerLine.split()) - if moduleParam: - self.objVar.defaultModule = moduleParam[0].partition('=')[2] - try: - importlib.import_module( - "calculate.%s.variables" % self.objVar.defaultModule) - except (ImportError, AttributeError): - return "", False, [] - # Заменяем переменные на их значения - # textTemplate = self.applyVarsTemplate(textTemplate, templateDirFile) - - # Заменяем функции на их значения - # textTemplate = self.applyFuncTemplate(textTemplate, templateDirFile) - # Обработка заголовка - objHead = dirHeader(templateDirFile, textTemplate, self.objVar, - function, templateObj=self) - signs = {'ac_install_patch==on': HParams.ActionType.Patch, - 'ac_desktop_profile==on': HParams.ActionType.Profile} - for sign in signs: - if sign in textTemplate: - self.functObj.currentAction = signs[sign] - break - # Директория с профилями не будет применена - if not objHead.headerTerm: - if objHead.getError(): - self.setError(_("Incorrect template") + _(": ") + - templateDirFile) - return "", False, [] - - # add packeges for reconfigure - self._processMergePostmerge(objHead.params, templateDirFile) - self._processRebuild(objHead.params, templateDirFile) - # Пропускаем директорию - if objHead.typeAppend == HParams.AppendParams.Skip: - applyDir = path - return applyDir, objHead, [] - - # Изменяем название родительской директории - if HParams.Path in objHead.params: - path = objHead.params[HParams.Path] - if path and path[0] == "~": - # Получаем путь с заменой ~ на директорию пользователя - path = os.path.join(self.homeDir, - path.partition("/")[2], "")[:-1] - elif not path or path and path[0] != "/": - self.setError( - (_("Wrong value '%s' in the template") % HParams.Path) + - _(": ") + templateDirFile) - return "", False, [] - else: - path = pathJoin(self._baseDir, path) - - # Изменяем название директории - if HParams.Name in objHead.params: - nameDir = objHead.params[HParams.Name] - if "/" in nameDir or nameDir == ".." or nameDir == ".": - self.setError( - (_("Wrong value '%s' in the template") % HParams.Name) + - _(": ") + templateDirFile) - return "", False, [] - # Новый путь к директории - applyDir = pathJoin(path, nameDir) - else: - applyDir = pathJoin(path, os.path.split(applyDir)[1]) - - # Фильтрация шаблонов по названию директории - realPath = os.path.join("/", applyDir.partition(self._baseDir)[2]) - if realPath in self.dirsFilter: - return "", False, [] - if HParams.DirectoryLink in objHead.params: - if objHead.typeAppend not in HParams.AppendParams.LinkDirCompatible: - self.setError( - _("Option '%(opt)s' should be used with %(appends)s only") % - {'opt': HParams.DirectoryLink, - 'appends': ",".join( - "%s=%s" % (HParams.Append, x) - for x in HParams.AppendParams.LinkDirCompatible) - }) - return "", False, [] - # Удаляем директорию - if objHead.typeAppend == HParams.AppendParams.Remove: - if os.path.isdir(applyDir): - self.changedFiles.addObj(applyDir, ChangedFiles.DIR_REMOVED, - self.functObj.currentBelong, - self.functObj.currentBelongSlot) - # удаляем директорию - try: - removeDir(applyDir) - except OSError: - self.setError(_("Failed to delete the directory: ") + \ - applyDir) - return "", False, [] - - # Очищаем директорию - if objHead.typeAppend == HParams.AppendParams.Clear: - if os.path.isdir(applyDir): - for rmPath in os.listdir(applyDir): - removePath = pathJoin(applyDir, rmPath) - if os.path.isdir(removePath): - # удаляем директорию - try: - removeDir(removePath) - except OSError: - self.setError( - _("Failed to delete the directory: ") + \ - removePath) - else: - try: - os.unlink(removePath) - except OSError: - self.setError( - _("Failed to delete: ") + removePath) - return "", False, [] - - # Созданные директории - createdDirs = [] - # chmod - изменяем права - mode = None - if HParams.ChangeMode in objHead.params: - mode = self.__octToInt(objHead.params[HParams.ChangeMode]) - if mode: - if not os.path.exists(applyDir): - crDirs = self.createDir(applyDir, mode, self.uid, self.gid) - if not crDirs: - return "", False, [] - if not crDirs is True: - createdDirs += crDirs - else: - self.chmodConfDir(applyDir, mode, templateDirFile) - else: - self.setError( - (_("Wrong value '%s' in the template") - % HParams.ChangeMode) + _(": ") + templateDirFile) - return "", False, [] - # chown - изменяем владельца и группу - owner_uid, owner_gid = None, None - if HParams.ChangeOwner in objHead.params: - owner = objHead.params[HParams.ChangeOwner] - if owner: - if ":" in owner: - strUid, strGid = owner.split(":") - if strUid.isdigit(): - owner_uid = int(strUid) - else: - owner_uid = self.getUidFromPasswd(strUid) - import pwd - - try: - if owner_uid is None: - owner_uid = pwd.getpwnam(strUid).pw_uid - except (KeyError, TypeError): - self.setError(_("No such user on the system: ") - + strUid) - self.setError( - (_("Wrong value '%s' in the template") - % HParams.ChangeOwner) + _(": ") + templateDirFile) - return "", False, [] - if strGid.isdigit(): - owner_gid = int(strGid) - else: - owner_gid = self.getGidFromGroup(strGid) - import grp - try: - if owner_gid is None: - owner_gid = grp.getgrnam(strGid).gr_gid - except (KeyError, TypeError): - self.setError(_("Group not found on the system: ") - + strGid) - self.setError( - (_("Wrong value '%s' in the template") - % HParams.ChangeOwner) + _(": ") + templateDirFile) - return "", False, [] - - if not os.path.exists(applyDir): - crDirs = self.createDir(applyDir, False, owner_uid, - owner_gid) - if not crDirs: - return "", False, [] - if not crDirs is True: - createdDirs += crDirs - else: - if not self.chownConfDir(applyDir, owner_uid, owner_gid, - templateDirFile): - return "", False, [] - else: - self.setError( - (_("Wrong value '%s' in the template") - % HParams.ChangeOwner) + _(": ") + templateDirFile) - return "", False, [] - else: - self.setError( - (_("Wrong value '%s' in the template") - % HParams.ChangeOwner) + _(": ") + templateDirFile) - return "", False, [] - else: - # Устанавливаем владельцем директории, пользователя по умолчанию - # (переменная шаблона ur_login) - if os.path.exists(applyDir): - self.changedFiles.addObj(applyDir, ChangedFiles.DIR_EXISTS, - self.functObj.currentBelong, - self.functObj.currentBelongSlot) - tUid, tGid = getModeFile(applyDir, mode="owner") - if (self.uid, self.gid) != (tUid, tGid): - if not self.chownConfDir(applyDir, self.uid, self.gid, - templateDirFile): - return "", False, [] - else: - self.changedFiles.addObj(applyDir, ChangedFiles.DIR_CREATED, - self.functObj.currentBelong, - self.functObj.currentBelongSlot) - crDirs = self.createDir(applyDir, False, self.uid, self.gid) - if not crDirs: - return "", False, [] - if crDirs is not True: - createdDirs += crDirs - if HParams.DirectoryLink in objHead.params: - templateFile = objHead.params[HParams.DirectoryLink] - templateFile = pathJoin(self._baseDir, templateFile) - if not os.path.isdir(templateFile): - self.setError(_("Source path %s is not a directory") - % templateFile) - return "", False, [] - try: - if objHead.typeAppend == HParams.AppendParams.Replace: - for fn in listDirectory(applyDir, fullPath=True): - if os.path.isdir(fn): - shutil.rmtree(fn) - elif os.path.isfile(fn) or os.path.islink(fn): - os.unlink(fn) - for fn in listDirectory(templateFile, fullPath=True): - applyFn = os.path.join(applyDir, os.path.basename(fn)) - if os.path.isfile(fn): - shutil.copy2(fn, applyFn) - elif os.path.islink(fn): - os.symlink(os.readlink(fn), applyFn) - elif os.path.isdir(fn): - dir_sync(fn, applyFn) - except (IOError, OSError) as e: - self.setError(_("Failed to synchronize directory " - "{dn}: {error}").format(dn=templateFile, - error=str(e))) - return "", False, [] - if not objHead: - applyDir = "" - if applyDir: - if ((HParams.OptDir.Autoupdate in optDir or - HParams.Autoupdate in objHead.params) and - not self.objVar.Get('cl_merge_pkg_pass')): - self.autoUpdateDirs.append(applyDir) - return applyDir, objHead, createdDirs - - def getUidFromPasswd(self, strUid): - """Get uid by username from chroot passwd file""" - passwdFile = os.path.join(self._baseDir, 'etc/passwd') - if os.path.exists(passwdFile): - with open(passwdFile, 'r') as f: - mapUid = dict( - filter(lambda x: x and len(x) > 1 and x[0] and x[1], - map(lambda x: x.split(':')[0:3:2], - filter(lambda x: not x.startswith('#'), - f)))) - if strUid in mapUid: - return int(mapUid[strUid]) - return None - - def getGidFromGroup(self, strGid): - """Get gid by groupname from chroot group file""" - groupFile = os.path.join(self._baseDir, 'etc/group') - if os.path.exists(groupFile): - with open(groupFile, 'r') as f: - mapGid = dict( - filter(lambda x: x and len(x) > 1 and x[0] and x[1], - map(lambda x: x.split(':')[0:3:2], - filter(lambda x: not x.startswith('#'), - f)))) - if strGid in mapGid: - return int(mapGid[strGid]) - return None - - def checkOnNewConfigName(self, pathFile): - """ - Check on need update and return pathFile - """ - # if file in PROTECT_MASK or not in PROTECT - chrootPath = self.objVar.Get('cl_chroot_path') - if not filter(pathFile.startswith, - map(lambda x: pathJoin(chrootPath, x), - self.objVar.Get('cl_config_protect'))) or \ - filter(pathFile.startswith, - map(lambda x: pathJoin(chrootPath, x), - self.objVar.Get('cl_config_protect_mask'))): - return pathFile - # if file was already modified by templates - if pathFile in self.changedFiles.data.keys(): - return pathFile - # if using already created ._cfg file - if self.configMode != T_ORIGIN: - return pathFile - # not current package file - pkg = self.functObj.currentBelong - slot = self.functObj.currentBelongSlot - if not pkg: - if not self.allContents: - fillContents(self.allContents, - self.objVar.Get('cl_config_protect'), - prefix=self.objVar.Get('cl_chroot_path')) - origName = pathFile if chrootPath == '/' \ - else pathFile[len(chrootPath):] - if origName in self.allContents: - pkg = self.allContents[origName] - else: - return pathFile - if slot: - pkgslot = "{}:{}".format(pkg,slot) - else: - pkgslot = pkg - atom = list(sorted(getInstalledAtom(pkgslot, prefix=chrootPath))) - if not atom: - return pathFile - if checkContents("{CATEGORY}/{PF}".format(**atom[-1]), - pathFile, - prefix=chrootPath, - reservedFile='/var/lib/calculate/-CONTENTS-{PN}'.format( - **atom[-1]) \ - if self.objVar.Get('cl_ebuild_phase') == 'postinst' \ - else None): - return pathFile - real_filename = os.path.basename(pathFile) - real_dirname = os.path.dirname(pathFile) - self.configMode = T_NEWCFG - return os.path.join(real_dirname, "._cfg0000_%s" % real_filename) - - def chownConfFile(self, nameFileConfig, uid, gid, nameFileTemplate, - checkExists=True): - """Изменение владельца конфигурационного файла""" - try: - if checkExists and not os.path.exists(nameFileConfig): - # Создание файла - open(nameFileConfig, "w").close() - os.lchown(nameFileConfig, uid, gid) - except (OSError, Exception) as e: - if self.checkOsError(e, nameFileConfig): - return True - self.setUidGidError( - nameFileConfig, uid, gid, nameFileTemplate) - return False - return True - - def chmodConfFile(self, nameFileConfig, mode, nameFileTemplate, - checkExists=True): - """Изменения режима доступа конфигурационного файла""" - try: - if checkExists and not os.path.exists(nameFileConfig): - # Создание файла - open(nameFileConfig, "w").close() - os.chmod(nameFileConfig, mode) - except (OSError, Exception) as e: - if self.checkOsError(e, nameFileConfig): - return True - self.setModeError(nameFileConfig, mode, nameFileTemplate) - return False - return True - - def getApplyHeadTemplate(self, nameFileTemplate, nameFileConfig, - templateFileType, optFile): - """Применяет заголовок к шаблону (права, владелец, и.т. д)""" - - def function(text): - """Функция обработки функций в заголовке""" - return self.applyFuncTemplate(text, nameFileTemplate) - - def preReturn(pathProg): - """Действия перед выходом из метода""" - if pathProg: - os.chdir(pathProg) - - self.closeFiles() - pathProg = "" - self.executeType = None - # Файлы в системе к которым были применены шаблоны - # В случае бинарного типа файла читаем шаблон - if templateFileType == "bin": - self.nameFileTemplate = os.path.abspath(nameFileTemplate) - self.F_TEMPL = self.openTemplFile(self.nameFileTemplate) - if not self.F_TEMPL: - self.setError(_("Failed to open the template") + _(": ") + - self.nameFileTemplate) - return [], False - self.textTemplate = self.F_TEMPL.read() - self.closeTemplFile() - objHeadNew = fileHeader(nameFileTemplate, self.textTemplate, False, - templateFileType, objVar=self.objVar, - function=function, templateObj=self) - # файл шаблона не будет применен - if not objHeadNew.headerTerm: - if objHeadNew.getError(): - self.setError(_("Incorrect template") + _(": ") + - nameFileTemplate) - return [], False - else: - self.headerParams = objHeadNew.params - - # add packeges for reconfigure - self._processMergePostmerge(objHeadNew.params, nameFileTemplate) - self._processRebuild(objHeadNew.params, nameFileTemplate) - - # Родительская директория - path = optFile[HParams.OptDir.Path] - # Изменяем название родительской директории - if HParams.Path in objHeadNew.params: - path = objHeadNew.params[HParams.Path] - if path and path[0] == "~": - # Получаем путь с заменой ~ на директорию пользователя - path = os.path.join( - self.homeDir, path.partition("/")[2], "")[:-1] - elif not path or path and path[0] != "/": - self.setError( - (_("Wrong value '%s' in the template") % HParams.Path) + - _(": ") + nameFileTemplate) - return [], False - else: - path = pathJoin(self._baseDir, path) - - # Путь к оригинальному файлу - pathOldFile - # Изменяем путь к оригинальному файлу - if HParams.Name in objHeadNew.params: - nameFile = objHeadNew.params[HParams.Name] - if "/" in nameFile or nameFile == ".." or nameFile == ".": - self.setError( - (_("Wrong value '%s' in the template") % HParams.Name) + - _(": ") + nameFileTemplate) - return [], False - # Новый путь к оригинальному файлу - pathOldFile = pathJoin(path, nameFile) - else: - pathOldFile = pathJoin(path, os.path.split(nameFileConfig)[1]) - pathOrigFile = pathOldFile - self.nameFileConfigOrig = pathOrigFile - if self.objVar.Get('cl_protect_use_set') == 'on': - pathOldFile = self.fixNameFileConfig(pathOldFile) - pathOldFile = self.checkOnNewConfigName(pathOldFile) - # буффер для использование в link= - newBuffer = None - applyFiles = [pathOldFile] - # Фильтрация шаблонов по названию файла - realPath = os.path.join("/", pathOldFile.partition(self._baseDir)[2]) - if realPath in self.filesFilter: - return [], False - typeAppendTemplate = objHeadNew.typeAppend - - # Параметр exec - if (HParams.RunPost in objHeadNew.params or - HParams.RunNow in objHeadNew.params): - if HParams.RunPost in objHeadNew.params: - paramName = HParams.RunPost - self.executeType = HParams.ExecuteType.Post - else: - paramName = HParams.RunNow - self.executeType = HParams.ExecuteType.Now - execPath = objHeadNew.params[paramName] - if not os.access(execPath, os.X_OK): - self.setError( - _("Wrong value '%s' in the template") % paramName + - _(": ") + nameFileTemplate) - self.setError(_("Failed to execute %s") % execPath) - return [], False - if typeAppendTemplate == HParams.AppendParams.Join: - self.setError( - (_("Wrong value '{var}={val}' in template").format - (var=HParams.Append, val=HParams.AppendParams.Join)) + - _(": ") + nameFileTemplate) - return [], False - - # Очищаем оригинальный файл - if typeAppendTemplate == HParams.AppendParams.Clear: - try: - with open(pathOldFile, "w") as f: - f.truncate(0) - except IOError: - self.setError(_("Template error") + _(": ") + - nameFileTemplate) - self.setError(_("Failed to clear the file") + _(": ") + - pathOldFile) - return applyFiles, False - # Удаляем оригинальный файл - if typeAppendTemplate == HParams.AppendParams.Remove: - if HParams.Force in objHeadNew.params: - pathOldFile = pathOrigFile - self.configMode = T_ORIGIN - try: - if os.path.islink(pathOldFile): - # удаляем ссылку - try: - os.unlink(pathOldFile) - return applyFiles, False - except OSError: - self.setError(_("Template error") + _(": ") + - nameFileTemplate) - self.setError(_("Failed to delete the link") + _(": ") + - pathOldFile) - return [], False - if os.path.isfile(pathOldFile) and self.configMode == T_ORIGIN: - # удаляем файл - try: - os.remove(pathOldFile) - return applyFiles, False - except OSError: - self.setError(_("Template error") + _(": ") + - nameFileTemplate) - self.setError(_("Failed to delete the file") + _(": ") + - pathOldFile) - return [], False - finally: - pattern = "%s/._cfg????_%s" % (os.path.dirname(pathOrigFile), - os.path.basename(pathOrigFile)) - for fn in glob.glob(pattern): - try: - os.unlink(fn) - except OSError: - pass - if self.functObj.currentBelong: - self.changedFiles.addObj(pathOrigFile, - ChangedFiles.FILE_REMOVED, - self.functObj.currentBelong, - self.functObj.currentBelongSlot) - return [], False - # Пропускаем обработку шаблона - elif typeAppendTemplate == HParams.AppendParams.Skip: - return [], False - - # Создаем директорию для файла если ее нет - if not os.path.exists(path): - if not self.createDir(path): - return [], False - - # создаём пустой файл если его нет для sqlite - if objHeadNew.fileType in HParams.Formats.Modificator: - try: - if not os.path.exists(pathOrigFile): - with open(pathOrigFile, "w") as f: - f.truncate(0) - except IOError: - self.setError(_("Template error") + _(": ") + - nameFileTemplate) - self.setError(_("Failed to create the file") + _(": ") + - pathOrigFile) - # В случае force - if (HParams.Force in objHeadNew.params and - objHeadNew.fileType not in HParams.Formats.Executable): - if os.path.islink(pathOldFile): - # удаляем ссылку - newBuffer = "" - try: - os.unlink(pathOldFile) - except OSError: - self.setError(_("Template error") + _(": ") + - nameFileTemplate) - self.setError(_("Failed to delete the link") + _(": ") + - pathOldFile) - return [], False - if os.path.isfile(pathOldFile): - # удаляем файл - newBuffer = "" - try: - os.remove(pathOldFile) - except OSError: - self.setError(_("Template error") + _(": ") + - nameFileTemplate) - self.setError(_("Failed to delete the file") + _(": ") + - pathOldFile) - return [], False - - flagSymlink = False - # Если есть параметр mirror - if (HParams.Mirror in objHeadNew.params and - objHeadNew.fileType not in HParams.Formats.Executable): - if HParams.Link in objHeadNew.params: - templateFile = objHeadNew.params[HParams.Link] - if templateFile and templateFile[0] == "~": - # Получаем директорию пользователя - templateFile = os.path.join( - self.homeDir, templateFile.partition("/")[2], "")[:-1] - templateFile = pathJoin(self._baseDir, templateFile) - if (not os.path.exists(templateFile) or - not objHeadNew.params[HParams.Link]): - if os.path.exists(pathOldFile): - try: - os.remove(pathOldFile) - except OSError: - self.setError(_("Template error") + _(": ") + - nameFileTemplate) - self.setError( - _("Failed to delete the file") + _(": ") + - pathOldFile) - return [], False - elif not os.path.exists(pathOldFile): - return [], False - # Если есть указатель на файл шаблона (link) - if (HParams.Link in objHeadNew.params and - objHeadNew.fileType not in HParams.Formats.Executable and - HParams.Symbolic not in objHeadNew.params): - templateFile = objHeadNew.params[HParams.Link] - if templateFile and templateFile[0] == "~": - # Получаем директорию пользователя - templateFile = os.path.join( - self.homeDir, templateFile.partition("/")[2], "")[:-1] - templateFile = pathJoin(self._baseDir, templateFile) - foundTemplateFile = os.path.exists(templateFile) - buff = None - fMode, fUid, fGid = None, None, None - if foundTemplateFile and objHeadNew.params[HParams.Link]: - try: - F_CONF = self.openTemplFile(templateFile) - if not F_CONF: - raise IOError - buff = F_CONF.read() - F_CONF.close() - fMode, fUid, fGid = getModeFile(templateFile) - except (OSError, IOError): - self.setError(_("Template error") + _(": ") + - nameFileTemplate) - self.setError(_("Failed to open the file") + _(": ") + - templateFile) - return [], False - if os.path.exists(pathOldFile): - newBuffer = "" - try: - os.remove(pathOldFile) - except OSError: - self.setError(_("Template error") + _(": ") + - nameFileTemplate) - self.setError(_("Failed to delete the file") + _(": ") + - pathOldFile) - return [], False - if buff is not None: - try: - with open(pathOldFile, "w+") as FD: - newBuffer = buff - FD.write(buff) - except IOError: - self.setError(_("Template error") + _(": ") + - nameFileTemplate) - self.setError(_("Failed to create the file") + " '%s'" \ - % pathOldFile) - return [], False - oMode = getModeFile(pathOldFile, mode="mode") - # Если права не совпадают, меняем права - if fMode != oMode: - if not self.chmodConfFile( - pathOldFile, fMode, nameFileTemplate, - checkExists=False): - return [], False - - # Если символическая ссылка - prevOldFile = None - if HParams.Symbolic in objHeadNew.params: - prevOldFile = pathOldFile - pathOldFile = objHeadNew.params[HParams.Link] - flagSymlink = True - if not pathOldFile: - raise TemplatesError( - _("Missed source link in template '%s'") - % str(nameFileTemplate)) - if not "/" == pathOldFile[0]: - pathLink = os.path.split(os.path.abspath(prevOldFile))[0] - pathProg = os.getcwd() - try: - os.chdir(pathLink) - except OSError: - self.setError(_("Template error") + _(": ") + - nameFileTemplate) - self.setError( - _("Failed to change the current directory to") + \ - " " + pathLink) - return [], False - - # chmod - изменяем права - if HParams.ChangeMode in objHeadNew.params: - mode = self.__octToInt(objHeadNew.params[HParams.ChangeMode]) - if mode: - if not self.chmodConfFile(pathOldFile, mode, nameFileTemplate): - preReturn(pathProg) - return [], False - else: - self.setError( - (_("Wrong value '%s' in the template") - % HParams.ChangeMode) + _(": ") + nameFileTemplate) - preReturn(pathProg) - return [], False - # chown - изменяем владельца и группу - if HParams.ChangeOwner in objHeadNew.params: - owner = objHeadNew.params[HParams.ChangeOwner] - if owner: - if ":" in owner: - strUid, strGid = owner.split(":") - if strUid.isdigit(): - uid = int(strUid) - else: - uid = self.getUidFromPasswd(strUid) - import pwd - - try: - if uid is None: - uid = pwd.getpwnam(strUid).pw_uid - except (KeyError, TypeError): - self.setError(_("No such user on the system: ") + - strUid) - self.setError((_("Wrong value '%s' in the template") - % HParams.ChangeOwner) + _(": ") - + nameFileTemplate) - preReturn(pathProg) - return [], False - if strGid.isdigit(): - gid = int(strGid) - else: - gid = self.getGidFromGroup(strGid) - try: - if gid is None: - import grp - - gid = grp.getgrnam(strGid).gr_gid - except (KeyError, TypeError): - self.setError(_("Group not found on the system: ") + - strGid) - self.setError((_("Wrong value '%s' in the template") - % HParams.ChangeOwner) + _(": ") - + nameFileTemplate) - preReturn(pathProg) - return [], False - # Изменяем владельца файла - if not self.chownConfFile(pathOldFile, uid, gid, - nameFileTemplate): - preReturn(pathProg) - return [], False - else: - self.setError((_("Wrong value '%s' in the template") - % HParams.ChangeOwner) + _(": ") - + nameFileTemplate) - preReturn(pathProg) - return [], False - else: - self.setError((_("Wrong value '%s' in the template") - % HParams.ChangeOwner) + _(": ") - + nameFileTemplate) - preReturn(pathProg) - return [], False - if not flagSymlink: - self.openFiles(nameFileTemplate, pathOldFile, objHeadNew.fileType, - newBuffer) - if self.getError(): - return [], False - if HParams.ChangeOwner not in objHeadNew.params: - # Устанавливаем владельцем конфигурационного файла, - # пользователя по умолчанию (переменная шаблона ur_login) - if os.path.exists(pathOldFile): - tUid, tGid = getModeFile(pathOldFile, mode="owner") - if (self.uid, self.gid) != (tUid, tGid): - # Изменяем владельца файла - if not self.chownConfFile( - pathOldFile, self.uid, self.gid, nameFileTemplate, - checkExists=False): - preReturn(pathProg) - return [], False - if flagSymlink: - if os.path.exists(prevOldFile) or os.path.islink(prevOldFile): - try: - if os.path.islink(prevOldFile): - # если ссылка то удаляем её - os.unlink(prevOldFile) - else: - # иначе удаляем файл - os.remove(prevOldFile) - except OSError: - self.setError(_("Template error") + _(": ") + - nameFileTemplate) - self.setError(_("Failed to delete the file") + _(": ") + - prevOldFile) - preReturn(pathProg) - return [], False - if not "/" == pathOldFile[0]: - applyFiles = [ - prevOldFile] # ,os.path.join(pathLink,pathOldFile)] - else: - applyFiles = [prevOldFile] # ,pathOldFile] - try: - os.symlink(pathOldFile, prevOldFile) - except OSError: - self.setError(_("Template error") + _(": ") + nameFileTemplate) - self.setError(_("Failed to create a symbolic link") + _(": ") + - "%s -> %s" % (prevOldFile, pathOldFile)) - preReturn(pathProg) - return [], False - if not objHeadNew.body.strip(): - preReturn(pathProg) - if HParams.Protected in objHeadNew.params: - self.protectedFiles += applyFiles - return applyFiles, False - else: - applyFiles = [pathOldFile] - preReturn(pathProg) - if HParams.Protected in objHeadNew.params: - self.protectedFiles += applyFiles - if ((HParams.OptDir.Autoupdate in optFile - or HParams.Autoupdate in objHeadNew.params) and - not self.objVar.Get('cl_merge_pkg_pass')): - reCfg = re.compile(r"/._cfg\d{4}_", re.S) - self.autoUpdateFiles += map(lambda x: reCfg.sub('/', x), applyFiles) - if pathOldFile not in self.dictProcessedTemplates: - self.dictProcessedTemplates[pathOldFile] = [] - self.dictProcessedTemplates[pathOldFile].append(nameFileTemplate) - # Если файлы заменяются не нужно их обрабатывать дальше - if (HParams.AppendParams.Replace == typeAppendTemplate and - HParams.Symbolic not in objHeadNew.params and - HParams.Link in objHeadNew.params): - return applyFiles, False - return applyFiles, objHeadNew - - def doServiceControl(self, params): - """ - Выполнить действие над сервисом - :param params: параметры заголовка шаблонов - :return: - """ - command_action_map = { - HParams.RestartService: "restart", - HParams.StopService: "stop", - HParams.StartService: "start" - } - command_action_messages = { - HParams.RestartService: _("Service %s has been restarted"), - HParams.StopService: _("Service %s has been stopped"), - HParams.StartService: _("Service %s has been started") - } - command_action_error = { - HParams.RestartService: _("Failed to restart %s service"), - HParams.StopService: _("Failed to stop %s service"), - HParams.StartService: _("Failed to start %s service") - } - for param in HParams.ServiceControl: - if param in params: - service_list = filter(None, params[param].split(',')) - command_action = command_action_map[param] - for service in service_list: - try: - p = process("/etc/init.d/%s" % service, command_action) - if p.success(): - self.printSUCCESS( - command_action_messages[param] % service) - else: - self.printERROR( - command_action_error[param] % service) - for line in p.readerr().strip().split('\n'): - self.printERROR(line) - except FilesError as e: - self.printERROR( - command_action_error[param] % service) - self.printERROR(str(e)) - - def createNewClass(self, name, bases, attrs=None): - """Создает объект нового класса - - createNewClass(self, name, bases, attrs) - name - имя класса - str, - bases - cписок наследуемых классов - (tuple), - attrs - аттрибуты класса - {dict} - """ - if attrs is None: - attrs = {} - - class newMethod(object): - # Объединяем конфигурации - def join(self, newObj): - if newObj.__class__.__name__ == self.__class__.__name__: - if hasattr(self, "docObj"): - self.docObj.joinDoc(newObj.doc) - # Пост обработка - if hasattr(self, "postXML"): - self.postXML() - - attrsNew = {"configName": name} - if attrs: - attrsNew.update(attrs) - newCl = type(name, bases + (newMethod, object,), attrsNew) - return newCl - - def fileIsUtf(self, fileName, data=None): - """Проверяет файл на кодировку UTF-8""" - if os.path.isfile(fileName): - if data is None: - with open(os.path.abspath(fileName), 'r') as FD: - data = FD.read(1) + FD.read() - try: - data.decode("UTF-8") - except UnicodeDecodeError: - return False - return True - return False - - def joinTemplate(self, nameFileTemplate, nameFileConfig, optFile=None): - """Объединения шаблона и конф. файла - - join(nameFileTemplate, nameFileConfig, ListOptTitle) - Объединение шаблона nameFileTemplate и конф. файла nameFileConfig, - ListOptTitle - список строк которые добавятся в заголовок - optFile = опции для шаблона - """ - if optFile is None: - optFile = {} - # Выполняем условия для блока текста а так-же заменяем переменные - self.nameFileTemplate = os.path.abspath(nameFileTemplate) - self.F_TEMPL = self.openTemplFile(self.nameFileTemplate) - origTextTemplate = self.F_TEMPL.read() - self.textTemplate = origTextTemplate - self.configMode = T_ORIGIN - self.closeTemplFile() - # Флаг копирования шаблона в конфигурационный файл - flagCopyTemplate = True - # Тип шаблона бинарный или текстовый - if self.textTemplate[:11] == "# Calculate": - templateFileType = "text" - else: - templateFileType = self.getTemplateType() - headerLine = self.getHeaderText(self.textTemplate) - if headerLine: - envparam = "%s=" % HParams.Environ - moduleParam = filter(lambda x: x.startswith(envparam), - headerLine.split()) - if moduleParam: - self.objVar.defaultModule = moduleParam[0].partition('=')[2] - try: - importlib.import_module( - "calculate.%s.variables" % self.objVar.defaultModule) - except (ImportError, AttributeError): - return [] - - if not optFile: - optFile = {"path": os.path.split(nameFileConfig)[0]} - - filesApply, objHeadNew = self.getApplyHeadTemplate(nameFileTemplate, - nameFileConfig, - templateFileType, - optFile) - if not objHeadNew: - return filesApply - if filesApply and not filter(lambda x: "calculate/ini.env" in x, - filesApply): - self.templateModify() - - if templateFileType != "bin": - # Вычисляем условные блоки - - objHeadNew.body = self.applyTermsTemplate(objHeadNew.body, - nameFileTemplate) - # Заменяем переменные на их значения - objHeadNew.body = self.applyVarsTemplate(objHeadNew.body, - nameFileTemplate) - flagCopyTemplate = False - # Вычисляем функции - objHeadNew.body = self.applyFuncTemplate(objHeadNew.body, - nameFileTemplate) - # Настоящее имя конфигурационного файла - nameFileConfig = filesApply[0] - # Флаг - кодировка с бинарными примесями у файла шаблона включаем при - # условии текстового файла и кодировки отличной от UTF-8 - flagNotUtf8New = False - # Флаг - кодировка с бинарными примесями у оригинального файла - flagNotUtf8Old = False - if not flagCopyTemplate: - # проверяем кодировку шаблона - if not self.fileIsUtf(nameFileTemplate, data=origTextTemplate): - flagNotUtf8New = True - if not (HParams.Link in objHeadNew.params and - HParams.Symbolic in objHeadNew.params): - # проверяем кодировку оригинального файла - if not self.fileIsUtf(nameFileConfig): - flagNotUtf8Old = True - self.textTemplate = objHeadNew.body - # Список примененных шаблонов - ListOptTitle = [] - if nameFileConfig in self.dictProcessedTemplates: - ListOptTitle = self.dictProcessedTemplates[nameFileConfig] - # Титл конфигурационного файла - title = "" - if ListOptTitle: - title = self.getTitle(objHeadNew.comment, ListOptTitle, - configPath=nameFileConfig) - title = title.encode("UTF-8") - - objHeadOld = False - if objHeadNew.comment: - objHeadOld = fileHeader(nameFileConfig, self.textConfig, - objHeadNew.comment) - elif (objHeadNew.fileType and - objHeadNew.typeAppend in (HParams.AppendParams.Before, - HParams.AppendParams.After)): - configFileType = self.getFileType(nameFileConfig) - objHeadOld = fileHeader(nameFileConfig, self.textConfig, - fileType=configFileType) - # Строка вызова скрипта (#!/bin/bash ...) - execStr = "" - if objHeadNew.execStr: - execStr = objHeadNew.execStr - elif objHeadOld and objHeadOld.execStr: - execStr = objHeadOld.execStr - - if objHeadNew.fileType != 'patch': - wrongOpt = [x for x in (HParams.Multiline, HParams.DotAll) - if x in objHeadNew.params] - if wrongOpt: - self.setError( - _("Option %s should be used for format=patch only") - % wrongOpt[0]) - return None - if objHeadNew.fileType != 'dconf': - wrongOpt = [x for x in (HParams.DConf,) - if x in objHeadNew.params] - if wrongOpt: - self.setError( - _("Option %s should be used for format=dconf only") - % wrongOpt[0]) - return None - if objHeadNew.fileType != 'backgrounds': - wrongOpt = [x for x in (HParams.Convert, HParams.Stretch) - if x in objHeadNew.params] - if wrongOpt: - self.setError( - _("Option %s should be used for format=backgrounds only") - % wrongOpt[0]) - return None - if objHeadNew.fileType: - formatTemplate = objHeadNew.fileType - typeAppendTemplate = objHeadNew.typeAppend - if formatTemplate in chain(("patch",), HParams.Formats.Executable): - if typeAppendTemplate != HParams.AppendParams.Patch: - self.setError( - _("Wrong option '%(param)s=%(type)s' " - "in template %(file)s") - % {"param": HParams.Append, - "type": typeAppendTemplate, - "file": nameFileTemplate}) - return None - # создаем объект формата шаблона - objTempl = self.formatFactory.createObject( - formatTemplate, self.textTemplate) - if formatTemplate == 'patch': - if HParams.Multiline in objHeadNew.params: - objTempl.setMultiline() - if HParams.DotAll in objHeadNew.params: - objTempl.setDotall() - if formatTemplate == 'dconf': - if HParams.DConf in objHeadNew.params: - objTempl.setPath(objHeadNew.params[HParams.DConf]) - objTempl.setUser(self.objVar.Get('ur_login')) - if formatTemplate == 'backgrounds': - root_path = self.objVar.Get('cl_chroot_path') - if root_path != '/': - objTempl.setRootPath(root_path) - if HParams.Convert in objHeadNew.params: - objTempl.setConvert(objHeadNew.params[HParams.Convert]) - if HParams.Link in objHeadNew.params: - objTempl.setSource(objHeadNew.params[HParams.Link]) - if HParams.Mirror in objHeadNew.params: - objTempl.setMirror() - if HParams.Stretch in objHeadNew.params: - objTempl.setStretch(True) - if (HParams.Name in objHeadNew.params and - not objHeadNew.params[HParams.Name]): - objTempl.setPrefix("") - if not objTempl: - self.setError( - _("Wrong header parameter 'format=%s' " - "in template") - % formatTemplate + " " + nameFileTemplate) - return None - if objHeadOld and objHeadOld.body: - self.textConfig = objHeadOld.body - # обработка конфигурационного файла - objTempl.printWARNING = self.printWARNING - self.textTemplate = objTempl.processingFile( - self.textConfig, pathJoin(self.objVar.Get('cl_chroot_path'), - self.objVar.Get('cl_root_path')), - self.nameFileConfigOrig - ) - error = objTempl.getError() - if error: - self.printERROR(error.strip()) - if (formatTemplate in HParams.Formats.Executable and - formatTemplate != "diff"): - raise TemplatesError( - (_("Failed to use %s ") % formatTemplate) + - nameFileTemplate) - if (self.objVar.Get('cl_ebuild_phase') == 'compile' and - self.objVar.Get('cl_template_wrong_patch') == 'break'): - raise CriticalError(_("Failed to use patch ") + - nameFileTemplate) - raise TemplatesError(_("Failed to use patch ") + - nameFileTemplate) - elif (formatTemplate == 'diff' and - self.objVar.Get('cl_verbose_set') == "on"): - self.printSUCCESS(_("Appling patch") + " " + - os.path.basename(nameFileTemplate)) - self.textConfig = self.add_comment( - execStr, title, self.textTemplate) - if formatTemplate in HParams.Formats.Executable: - return objTempl.changed_files - else: - self.saveConfFile() - if HParams.RunNow in objHeadNew.params: - if not self.executeTemplate( - self.textConfig, objHeadNew.params[HParams.RunNow]): - self.setError(_("Failed to execute") + _(": ") + - nameFileTemplate) - return None - return None - if HParams.RunPost not in objHeadNew.params: - return filesApply - else: - return None - # Создаем объект в случае параметра format в заголовке - if ((typeAppendTemplate == HParams.AppendParams.Replace or - typeAppendTemplate == HParams.AppendParams.Before or - typeAppendTemplate == HParams.AppendParams.After) and - not (formatTemplate == "bin" or - formatTemplate == "raw")): - # Преобразовываем бинарные файлы - objTxtCoder = None - if flagNotUtf8New: - objTxtCoder = utfBin() - self.textTemplate = objTxtCoder.encode(self.textTemplate) - # создаем объект формата шаблона - objTemplNew = self.formatFactory.createObject( - formatTemplate, self.textTemplate) - if not objTemplNew: - self.setError( - _("Wrong header parameter '{var}={val}' " - "in template").format( - var=HParams.Format, val=formatTemplate) - + " " + nameFileTemplate) - return None - if "xml_" in formatTemplate: - if objTemplNew.getError(): - self.setError(_("Wrong template") + _(": ") + - nameFileTemplate) - return None - # Имя файла внутри xml xfce конфигурационных файлов - nameRootNode = \ - nameFileConfig.rpartition("/")[2].split(".")[0] - objTemplNew.setNameBodyNode(nameRootNode) - # Объект Документ - docObj = objTemplNew.docObj - # Удаление комментариев из документа - docObj.removeComment(docObj.getNodeBody()) - # Добавление необходимых переводов строк - docObj.insertBRtoBody(docObj.getNodeBody()) - # Добавление необходимых разделителей между областями - docObj.insertBeforeSepAreas(docObj.getNodeBody()) - # Пост обработка - if 'postXML' in dir(objTemplNew): - objTemplNew.postXML() - # Получение текстового файла из XML документа - self.textTemplate = objTemplNew.getConfig().encode("UTF-8") - # Если не UTF-8 производим преобразование - if objTxtCoder: - self.textTemplate = objTxtCoder.decode(self.textTemplate) - # Титл для объединения - if ListOptTitle: - title = self.getTitle(objTemplNew._comment, - ListOptTitle, - configPath=nameFileConfig) - title = title.encode("UTF-8") - # Замена - if typeAppendTemplate == HParams.AppendParams.Replace: - if "xml_" in formatTemplate: - data = self.textTemplate.split("\n") - data.insert(1, title) - self.textConfig = "\n".join(data) - else: - self.textConfig = self.add_comment( - objHeadNew.execStr, title, self.textTemplate) - - self.saveConfFile() - if HParams.RunNow in objHeadNew.params: - if not self.executeTemplate( - self.textConfig, - objHeadNew.params[HParams.RunNow]): - self.setError(_("Failed to execute") + _(": ") + \ - nameFileTemplate) - return None - return None - if HParams.RunPost not in objHeadNew.params: - return filesApply - else: - return None - # Вверху - elif typeAppendTemplate == HParams.AppendParams.Before: - if "xml_" in formatTemplate: - self.setError( - _("Wrong option '{var}={val}' in template {fn}").format( - var=HParams.Append, val=HParams.AppendParams.Before, - fn=nameFileTemplate)) - return None - if objHeadOld and objHeadOld.body: - self.textConfig = objHeadOld.body - if self.textTemplate and self.textTemplate[-1] == "\n": - tmpTemplate = self.textTemplate + self.textConfig - else: - tmpTemplate = self.textTemplate + "\n" + self.textConfig - self.textConfig = self.add_comment(execStr, title, tmpTemplate) - - self.saveConfFile() - if HParams.RunNow in objHeadNew.params: - if not self.executeTemplate( - self.textConfig, objHeadNew.params[HParams.RunNow]): - self.setError(_("Failed to execute") + _(": ") + - nameFileTemplate) - return None - return None - if HParams.RunPost not in objHeadNew.params: - return filesApply - else: - return None - # Внизу - elif typeAppendTemplate == HParams.AppendParams.After: - if "xml_" in formatTemplate: - self.setError( - _("Wrong option '{var}={val}' in template {fn}").format( - var=HParams.Append, val=HParams.AppendParams.After, - fn=nameFileTemplate)) - return None - if objHeadOld and objHeadOld.body: - self.textConfig = objHeadOld.body - if not self.textTemplate or self.textTemplate[-1] == "\n": - tmpTemplate = self.textConfig + self.textTemplate - else: - tmpTemplate = self.textConfig + "\n" + self.textTemplate - self.textConfig = self.add_comment(execStr, title, tmpTemplate) - self.saveConfFile() - if HParams.RunNow in objHeadNew.params: - if not self.executeTemplate( - self.textConfig, objHeadNew.params[HParams.RunNow]): - self.setError(_("Failed to execute") + _(": ") + - nameFileTemplate) - return None - return None - if HParams.RunPost not in objHeadNew.params: - return filesApply - else: - return None - # Объединение - elif typeAppendTemplate == HParams.AppendParams.Join: - objTxtCoder = None - if flagNotUtf8New: - objTxtCoder = utfBin() - self.textTemplate = objTxtCoder.encode(self.textTemplate) - if formatTemplate == "raw": - self.setError( - _("Wrong header parameter '{var}={val}' " - "in template").format(var=HParams.Append, - val=typeAppendTemplate) + - " " + nameFileTemplate) - return None - # создаем объект формата шаблона - objTemplNew = self.formatFactory.createObject( - formatTemplate, self.textTemplate) - if not objTemplNew: - self.setError( - _("Wrong header parameter '{var}={val}' in " - "template").format(var=HParams.Format, - val=formatTemplate) + " " - + nameFileTemplate) - return None - if "xml_" in formatTemplate: - if objTemplNew.getError(): - self.setError(_("Wrong template") + _(": ") + \ - nameFileTemplate) - return None - nameRootNode = nameFileConfig.rpartition("/")[2].split(".")[ - 0] - objTemplNew.setNameBodyNode(nameRootNode) - # Титл для объединения - if ListOptTitle: - title = self.getTitle(objTemplNew._comment, - ListOptTitle, - configPath=nameFileConfig) - title = title.encode("UTF-8") - - # В случае пустого конфигурационного файла - reNoClean = re.compile("[^\s]", re.M) - if not self.textConfig or \ - not reNoClean.search(self.textConfig): - self.textConfig = "" - - objHeadOld = fileHeader(nameFileConfig, self.textConfig, - objTemplNew._comment) - if objHeadOld.body: - self.textConfig = objHeadOld.body - else: - self.textConfig = "" - if flagNotUtf8Old: - objTxtCoder = utfBin() - self.textConfig = objTxtCoder.encode(self.textConfig) - # создаем объект формата шаблона для конфигурационного файла - objTemplOld = self.formatFactory.createObject( - formatTemplate, self.textConfig) - if not objTemplOld: - self.setError(_("Error in template %s") % nameFileConfig) - return None - if "xml_" in formatTemplate: - if objTemplOld.getError(): - self.setError(_("Wrong template") + _(": ") + - nameFileConfig) - return None - nameRootNode = nameFileConfig.rpartition("/")[2].split(".")[ - 0] - objTemplOld.setNameBodyNode(nameRootNode) - - objTemplOld.join(objTemplNew) - if "xml_" in formatTemplate: - if objTemplOld.getError(): - self.setError(_("Wrong template") + _(": ") + \ - nameFileTemplate) - return None - data = objTemplOld.getConfig().encode("UTF-8").split("\n") - data.insert(1, title) - self.textConfig = "\n".join(data) - else: - self.textConfig = self.add_comment( - execStr, title, objTemplOld.getConfig().encode("UTF-8")) - # Декодируем если кодировка не UTF-8 - if objTxtCoder: - self.textTemplate = objTxtCoder.decode(self.textTemplate) - self.textConfig = objTxtCoder.decode(self.textConfig) - self.saveConfFile() - if HParams.RunNow in objHeadNew.params: - if not self.executeTemplate( - self.textConfig, objHeadNew.params[HParams.RunNow]): - self.setError(_("Failed to execute") + _(": ") + \ - nameFileTemplate) - return None - return None - if HParams.RunPost not in objHeadNew.params: - return filesApply - else: - return None - else: - self.setError(_("Wrong template option (type append)") - + _(": ") + typeAppendTemplate) - return None - else: - self.setError(_("Template type not found: ") + nameFileTemplate) - return None - - def add_comment(self, execStr, comment, body): - """ - Сформировать выходной файл с учётом строки выполнения, комментария и - содержимого конфига - :param execStr: - :param comment: - :param body: - :return: - """ - if comment.startswith("