text
stringlengths
0
1.05M
meta
dict
from functools import wraps from flask import session, flash, redirect, url_for from app.models.SQL_DB import User # Login required decorator def login_required(f): @wraps(f) def wrap(*args, **kwargs): if 'logged_in' in session: return f(*args, **kwargs) else: flash('You need to login first.') return redirect(url_for('login')) return wrap # Is supplier decorator def is_supplier(f): @wraps(f) def wrap(*args, **kwargs): username = session['username'] user = User.query.filter_by(username=username).first() if user.supplier: return f(*args, **kwargs) else: flash('You are not supplier.') return redirect(url_for('index')) return wrap # Is customer decorator def is_customer(f): @wraps(f) def wrap(*args, **kwargs): username = session['username'] user = User.query.filter_by(username=username).first() if not user.supplier: return f(*args, **kwargs) else: flash('You are not customer.') return redirect(url_for('index')) return wrap
{ "repo_name": "Pytlicek/VOBS", "path": "app/models/Checks.py", "copies": "1", "size": "1167", "license": "mit", "hash": 3532951732295303700, "line_mean": 23.829787234, "line_max": 62, "alpha_frac": 0.5852613539, "autogenerated": false, "ratio": 4.052083333333333, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 47 }
from functools import wraps from flask import session, redirect, url_for from flask import abort from passlib.hash import argon2 from orator.exceptions.query import QueryException from onepage.models import User from onepage.models import Novel LOGGED_IN_IDENTIFIER = "logged_in" def can_login(email, password): """Validation login parameter(email, password) with rules. return validation result True/False. """ login_user = User.find_by_email(email) return login_user is not None and argon2.verify(password, login_user.password_hash) def activate_session(email): session[LOGGED_IN_IDENTIFIER] = email def inactivate_session(): session.pop(LOGGED_IN_IDENTIFIER, None) def required_login(func): """Decorator for check login state if not logged in, redirect to login page. """ @wraps(func) def wrapper(*args, **kwargs): if check_session(): return func(*args, **kwargs) else: return redirect(url_for('login.get_login')) return wrapper def check_session(): return LOGGED_IN_IDENTIFIER in session def get_session(): return session.get(LOGGED_IN_IDENTIFIER) def only_author(func): """Decorator for check author if not author, redirect to not found page """ @wraps(func) def wrapper(*args, **kwargs): if check_author(kwargs): return func(*args, **kwargs) else: abort(404) return wrapper def check_author(kwargs): novel = Novel.find(kwargs.get('novel_id')) return novel.user.email == session.get(LOGGED_IN_IDENTIFIER) if novel is not None else False def create_user(email, password, pen_name): """Creating a unique user return created new user. If failed creating new user, return None. """ signup_user = User() signup_user.email = email signup_user.password_hash = argon2.hash(password) signup_user.pen_name = pen_name try: signup_user.save() return signup_user except QueryException: return None def update_password(old_password, new_password): """Update login user password return True if updating is successful, False otherwise. """ user = User.find_by_email(session.get(LOGGED_IN_IDENTIFIER)) if user is not None and argon2.verify(old_password, user.password_hash): user.password_hash = argon2.hash(new_password) user.save() return True return False
{ "repo_name": "Subarunari/onepage", "path": "onepage/utils/auth.py", "copies": "1", "size": "2477", "license": "mit", "hash": 5342001130475283000, "line_mean": 23.77, "line_max": 96, "alpha_frac": 0.6685506661, "autogenerated": false, "ratio": 3.764437689969605, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.49329883560696053, "avg_score": null, "num_lines": null }
from functools import wraps from flask import url_for, request, render_template from flask_login import current_user from werkzeug.utils import redirect def login_required(f): @wraps(f) def decorated_function(*args, **kwargs): if current_user is None or current_user.is_authenticated() is False: return redirect(url_for('Login.login', next=request.url)) return f(*args, **kwargs) return decorated_function def admin_only(f): @wraps(f) def decorated_function(*args, **kwargs): if current_user.is_admin(): return f(*args, **kwargs) return redirect(url_for('Common.unauthorized')) return decorated_function def templated(template=None): def decorator(f): @wraps(f) def decorated_function(*args, **kwargs): template_name = template if template_name is None: template_name = request.endpoint.replace('.', '/') + '.html' ctx = f(*args, **kwargs) if ctx is None: ctx = {} elif not isinstance(ctx, dict): return ctx return render_template(template_name, **ctx) return decorated_function return decorator
{ "repo_name": "mandrive/FlaskTest", "path": "utils/utils.py", "copies": "1", "size": "1240", "license": "mit", "hash": 1129672658238032900, "line_mean": 26.5555555556, "line_max": 76, "alpha_frac": 0.6056451613, "autogenerated": false, "ratio": 4.275862068965517, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5381507230265516, "avg_score": null, "num_lines": null }
from functools import wraps from flask_sqlalchemy import SQLAlchemy from werkzeug.exceptions import BadRequest import flask import requests from . import extractor api = flask.Blueprint("api", __name__) web = flask.Blueprint("web", __name__) def create_app() -> flask.Flask: app = flask.Flask(__name__) app.register_blueprint(web) app.register_blueprint(api, url_prefix="/api/") return app def validate_json(fn): @wraps(fn) def wrapper(*args, **kwargs): try: if not flask.request.get_json(force=True): raise BadRequest("asdf") return fn(*args, **kwargs) except BadRequest: return flask.jsonify({"error": "must be valid json"}), 400 return wrapper class OptimisiticDict(dict): def __getitem__(self, k): if k not in self: flask.abort(400) return super().__getitem__(k) @web.route("/", methods=["GET"]) def index() -> str: return "Hello, world." @api.route("/recipe/scrape", methods=["POST"]) @validate_json def trigger_scrape(): obj = OptimisiticDict(flask.request.get_json(force=True)) req = requests.get(obj["url"]) recipes = extractor.extract_recipes(req.text) return flask.jsonify({"recipes": recipes})
{ "repo_name": "erik/sketches", "path": "projects/recipebin-py/recipebin/web.py", "copies": "1", "size": "1271", "license": "mit", "hash": -5356633158840891000, "line_mean": 21.6964285714, "line_max": 70, "alpha_frac": 0.6325727773, "autogenerated": false, "ratio": 3.684057971014493, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9816630748314492, "avg_score": 0, "num_lines": 56 }
from functools import wraps from funcy import decorator @decorator def rwlocked(call, read=None, write=None): import sys from dvc.rwlock import rwlock from dvc.dependency.repo import RepoDependency if read is None: read = [] if write is None: write = [] stage = call._args[0] assert stage.repo.lock.is_locked def _chain(names): return [ item.path_info for attr in names for item in getattr(stage, attr) # There is no need to lock RepoDependency deps, as there is no # corresponding OutputREPO, so we can't even write it. if not isinstance(item, RepoDependency) ] cmd = " ".join(sys.argv) with rwlock(stage.repo.tmp_dir, cmd, _chain(read), _chain(write)): return call() def unlocked_repo(f): @wraps(f) def wrapper(stage, *args, **kwargs): stage.repo.state.dump() stage.repo.lock.unlock() stage.repo._reset() try: ret = f(stage, *args, **kwargs) finally: stage.repo.lock.lock() stage.repo.state.load() return ret return wrapper
{ "repo_name": "dmpetrov/dataversioncontrol", "path": "dvc/stage/decorators.py", "copies": "1", "size": "1187", "license": "apache-2.0", "hash": 1920991245414375700, "line_mean": 22.2745098039, "line_max": 74, "alpha_frac": 0.5787700084, "autogenerated": false, "ratio": 3.9046052631578947, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4983375271557895, "avg_score": null, "num_lines": null }
from functools import wraps from funcy import decorator @decorator def rwlocked(call, read=None, write=None): import sys from dvc.dependency.repo import RepoDependency from dvc.rwlock import rwlock if read is None: read = [] if write is None: write = [] stage = call._args[0] # pylint: disable=protected-access assert stage.repo.lock.is_locked def _chain(names): return [ item.path_info for attr in names for item in getattr(stage, attr) # There is no need to lock RepoDependency deps, as there is no # corresponding OutputREPO, so we can't even write it. if not isinstance(item, RepoDependency) ] cmd = " ".join(sys.argv) with rwlock(stage.repo.tmp_dir, cmd, _chain(read), _chain(write)): return call() def unlocked_repo(f): @wraps(f) def wrapper(stage, *args, **kwargs): stage.repo.lock.unlock() stage.repo._reset() # pylint: disable=protected-access try: ret = f(stage, *args, **kwargs) finally: stage.repo.lock.lock() return ret return wrapper def relock_repo(f): @wraps(f) def wrapper(stage, *args, **kwargs): stage.repo.lock.lock() try: ret = f(stage, *args, **kwargs) finally: stage.repo.lock.unlock() stage.repo._reset() # pylint: disable=protected-access return ret return wrapper
{ "repo_name": "efiop/dvc", "path": "dvc/stage/decorators.py", "copies": "1", "size": "1518", "license": "apache-2.0", "hash": -795806231315422200, "line_mean": 22.71875, "line_max": 74, "alpha_frac": 0.5803689065, "autogenerated": false, "ratio": 3.8823529411764706, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.49627218476764706, "avg_score": null, "num_lines": null }
from functools import wraps from funk.error import FunkyError from funk.call import Call from funk.call import IntegerCallCount from funk.call import InfiniteCallCount from funk.sequence import Sequence from funk.util import function_call_str from . import pycompat from .tools import data __all__ = ['with_mocks', 'Mocks', 'expects', 'allows', 'expects_call', 'allows_call', 'data'] class UnexpectedInvocationError(AssertionError): def __init__(self, mock_name, args, kwargs, expectations): args_str = map(repr, args) kwargs_str = {} for key, value in pycompat.iteritems(kwargs): kwargs_str[key] = repr(value) call_str = function_call_str(mock_name, args_str, kwargs_str) exception_str = ["Unexpected invocation: %s" % call_str] exception_str.append("\nThe following expectations on %s did not match:\n " % mock_name) if len(expectations) > 0: exception_str.append("\n ".join(e.replace("\n", "\n ") for e in expectations)) else: exception_str.append("No expectations set.") super(UnexpectedInvocationError, self).__init__(''.join(exception_str)) class Mock(object): def __init__(self, base, name): self._mocked_calls = MockedCalls(base, name) self._base = base def __getattribute__(self, name): my = lambda name: object.__getattribute__(self, name) mocked_calls = my('_mocked_calls') base = my('_base') if name in mocked_calls or (base is not None and hasattr(base, name)): return mocked_calls.for_method(name) return my(name) def __call__(self, *args, **kwargs): return object.__getattribute__(self, "_mocked_calls").for_self()(*args, **kwargs) def _verify(self): object.__getattribute__(self, "_mocked_calls").verify() class MockedCalls(object): def __init__(self, base, mock_name): self._base = base self._method_calls = {} self._function_calls = [] self._mock_name = mock_name def add_method_call(self, method_name, call_count): if self._base is not None: if not hasattr(self._base, method_name): raise AssertionError("Method '%s' is not defined on type object '%s'" % (method_name, self._base.__name__)) if not callable(getattr(self._base, method_name)): raise AssertionError("Attribute '%s' is not callable on type object '%s'" % (method_name, self._base.__name__)) call = Call("%s.%s" % (self._mock_name, method_name), call_count) if method_name not in self._method_calls: self._method_calls[method_name] = [] self._method_calls[method_name].append(call) return call def add_function_call(self, call_count): call = Call(self._mock_name, call_count) self._function_calls.append(call) return call def for_method(self, name): return MockedCallsForFunction("%s.%s" % (self._mock_name, name), self._method_calls.get(name, [])) def for_self(self): return MockedCallsForFunction(self._mock_name, self._function_calls) def __contains__(self, name): return name in self._method_calls def verify(self): for method_name in self._method_calls: for call in self._method_calls[method_name]: self._verify_call(call) for call in self._function_calls: self._verify_call(call) def _verify_call(self, call): if not call.is_satisfied(): raise AssertionError("Not all expectations were satisfied. Expected call: %s" % call) class MockedCallsForFunction(object): def __init__(self, name, calls): self._name = name self._calls = calls def __call__(self, *args, **kwargs): desc = [] for call in self._calls: if call.accepts(args, kwargs, desc): return call(*args, **kwargs) raise UnexpectedInvocationError(self._name, args, kwargs, desc) def with_mocks(test_function, mock_factory=None): @wraps(test_function) def test_function_with_mocks(*args, **kwargs): if 'mocks' in kwargs: raise FunkyError("mocks has already been set") mocks = Mocks(mock_factory) kwargs['mocks'] = mocks test_function(*args, **kwargs) mocks.verify() return test_function_with_mocks class MethodArgumentsSetter(object): def __init__(self, call): self._call = call def __call__(self, *args, **kwargs): return self._call.with_args(*args, **kwargs) def __getattr__(self, name): return getattr(self._call, name) class ExpectationCreator(object): def __init__(self, expectation_setter): self._expectation_setter = expectation_setter def __getattribute__(self, name): my = lambda name: object.__getattribute__(self, name) return MethodArgumentsSetter(my('_expectation_setter')(name)) def expects(mock, method_name=None): if method_name is None: return ExpectationCreator(lambda method_name: expects(mock, method_name)) return object.__getattribute__(mock, "_mocked_calls").add_method_call(method_name, IntegerCallCount(1)) def allows(mock, method_name=None): if method_name is None: return ExpectationCreator(lambda method_name: allows(mock, method_name)) return object.__getattribute__(mock, "_mocked_calls").add_method_call(method_name, InfiniteCallCount()) def expects_call(mock): return MethodArgumentsSetter(object.__getattribute__(mock, "_mocked_calls").add_function_call(IntegerCallCount(1))) def allows_call(mock): return MethodArgumentsSetter(object.__getattribute__(mock, "_mocked_calls").add_function_call(InfiniteCallCount())) class Mocks(object): def __init__(self, mock_factory=None): if mock_factory is None: mock_factory = Mock self._mocks = [] self._mock_factory = mock_factory for attr in ["allows", "expects", "data"]: setattr(self, attr, globals()[attr]) def mock(self, base=None, name=None): mock = self._mock_factory(base, self._generate_name(name, base)) self._mocks.append(mock) return mock def verify(self): for mock in self._mocks: mock._verify() def sequence(self): return Sequence() def _generate_name(self, name, base): if name is not None: return name if base is None: return "unnamed" name = [] name.append(base.__name__[0].lower()) for character in base.__name__[1:]: if character.isupper(): name.append("_%s" % character.lower()) else: name.append(character) return ''.join(name)
{ "repo_name": "mwilliamson/funk", "path": "funk/__init__.py", "copies": "1", "size": "6979", "license": "bsd-2-clause", "hash": -7579493514223519000, "line_mean": 36.1223404255, "line_max": 127, "alpha_frac": 0.6033815733, "autogenerated": false, "ratio": 3.9675952245594086, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5070976797859409, "avg_score": null, "num_lines": null }
from functools import wraps from fuzzysearch.common import FuzzySearchBase, Match from fuzzysearch.compat import text_type, xrange __all__ = [ 'search_exact', 'ExactSearch', ] CLASSES_WITH_INDEX = (list, tuple) CLASSES_WITH_FIND = (bytes, bytearray, text_type) try: from Bio.Seq import Seq except ImportError: pass else: CLASSES_WITH_FIND += (Seq,) def search_exact(subsequence, sequence, start_index=0, end_index=None): if not subsequence: raise ValueError('subsequence must not be empty') if end_index is None: end_index = len(sequence) if isinstance(sequence, CLASSES_WITH_FIND): def find_in_index_range(start_index): return sequence.find(subsequence, start_index, end_index) elif isinstance(sequence, CLASSES_WITH_INDEX): first_item = subsequence[0] first_item_last_index = end_index - (len(subsequence) - 1) def find_in_index_range(start_index): while True: try: first_index = sequence.index(first_item, start_index, first_item_last_index) start_index = first_index + 1 except ValueError: return -1 for subseq_index in xrange(1, len(subsequence)): if sequence[first_index + subseq_index] != subsequence[subseq_index]: break else: return first_index else: raise TypeError('unsupported sequence type: %s' % type(sequence)) index = find_in_index_range(start_index) while index >= 0: yield index index = find_in_index_range(index + 1) try: from fuzzysearch._common import search_exact_byteslike except ImportError: pass else: _search_exact = search_exact @wraps(_search_exact) def search_exact(subsequence, sequence, start_index=0, end_index=None): if end_index is None: end_index = len(sequence) try: return search_exact_byteslike(subsequence, sequence, start_index, end_index) except (TypeError, UnicodeEncodeError): return _search_exact(subsequence, sequence, start_index, end_index) class ExactSearch(FuzzySearchBase): @classmethod def search(cls, subsequence, sequence, search_params): for index in search_exact(subsequence, sequence): yield Match(index, index + len(subsequence), 0, sequence[index:index + len(subsequence)]) @classmethod def extra_items_for_chunked_search(cls, subsequence, search_params): return 0
{ "repo_name": "taleinat/fuzzysearch", "path": "src/fuzzysearch/search_exact.py", "copies": "1", "size": "2662", "license": "mit", "hash": 8351028903281778000, "line_mean": 30.3176470588, "line_max": 96, "alpha_frac": 0.6149511645, "autogenerated": false, "ratio": 4.021148036253776, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5136099200753776, "avg_score": null, "num_lines": null }
from functools import wraps from fuzzysearch.compat import int_types from attr import attrs, attrib __all__ = [ 'Match', 'LevenshteinSearchParams', 'count_differences_with_maximum', 'group_matches', 'get_best_match_in_group', 'consolidate_overlapping_matches', ] @attrs(frozen=True, slots=True) class Match(object): start = attrib(type=int, eq=True, hash=True) end = attrib(type=int, eq=True, hash=True) dist = attrib(type=int, eq=True, hash=True) matched = attrib(eq=False, hash=False) if __debug__: def __attrs_post_init__(self): if not (isinstance(self.start, int_types) and self.start >= 0): raise ValueError('start must be a non-negative integer') if not (isinstance(self.end, int_types) and self.end >= self.start): raise ValueError('end must be an integer no smaller than start') if not (isinstance(self.dist, int_types) and self.dist >= 0): print(self.dist) raise ValueError('dist must be a non-negative integer') if self.matched is None: raise ValueError('matched must be supplied') @attrs(frozen=True, slots=True) class LevenshteinSearchParams(object): """Parameter data-class for Levenshtein-distance fuzzy searches.""" max_substitutions = attrib(default=None) max_insertions = attrib(default=None) max_deletions = attrib(default=None) max_l_dist = attrib(default=None) def __attrs_post_init__(self): self._check_params_valid() max_subs, max_ins, max_dels, max_l_dist = \ self._normalize_params(*self.unpacked) object.__setattr__(self, 'max_substitutions', max_subs) object.__setattr__(self, 'max_insertions', max_ins) object.__setattr__(self, 'max_deletions', max_dels) object.__setattr__(self, 'max_l_dist', max_l_dist) @property def unpacked(self): return ( self.max_substitutions, self.max_insertions, self.max_deletions, self.max_l_dist, ) def _check_params_valid(self): if not all(x is None or (isinstance(x, int) and x >= 0) for x in [ self.max_substitutions, self.max_insertions, self.max_deletions, self.max_l_dist ]): raise TypeError("All limits must be positive integers or None.") if self.max_l_dist is None: n_limits = ( (1 if self.max_substitutions is not None else 0) + (1 if self.max_insertions is not None else 0) + (1 if self.max_deletions is not None else 0) ) if n_limits < 3: if n_limits == 0: raise ValueError('No limitations given!') elif self.max_substitutions is None: raise ValueError('# substitutions must be limited!') elif self.max_insertions is None: raise ValueError('# insertions must be limited!') elif self.max_deletions is None: raise ValueError('# deletions must be limited!') @classmethod def _normalize_params(cls, max_substitutions, max_insertions, max_deletions, max_l_dist): maxes_sum = sum( x if x is not None else 1 << 29 for x in [ max_substitutions, max_insertions, max_deletions, ] ) if max_l_dist is None: # replace max_l_dist with the sum of the other limits return ( max_substitutions, max_insertions, max_deletions, maxes_sum, ) else: def _normalize(param): return min(param, max_l_dist) if param is not None else max_l_dist return ( _normalize(max_substitutions), _normalize(max_insertions), _normalize(max_deletions), min(max_l_dist, maxes_sum), ) def count_differences_with_maximum(sequence1, sequence2, max_differences): n_different = 0 for item1, item2 in zip(sequence1, sequence2): if item1 != item2: n_different += 1 if n_different == max_differences: return n_different return n_different try: from fuzzysearch._common import count_differences_with_maximum_byteslike except ImportError: pass else: _count_differences_with_maximum = count_differences_with_maximum @wraps(_count_differences_with_maximum) def count_differences_with_maximum(sequence1, sequence2, max_differences): try: return count_differences_with_maximum_byteslike(sequence1, sequence2, max_differences) except TypeError: return _count_differences_with_maximum(sequence1, sequence2, max_differences) class GroupOfMatches(object): def __init__(self, match): assert match.start <= match.end self.start = match.start self.end = match.end self.matches = set([match]) def is_match_in_group(self, match): return not (match.end <= self.start or match.start >= self.end) def add_match(self, match): self.matches.add(match) self.start = min(self.start, match.start) self.end = max(self.end, match.end) def group_matches(matches): groups = [] for match in matches: overlapping_groups = [g for g in groups if g.is_match_in_group(match)] if not overlapping_groups: groups.append(GroupOfMatches(match)) elif len(overlapping_groups) == 1: overlapping_groups[0].add_match(match) else: new_group = GroupOfMatches(match) for group in overlapping_groups: for match in group.matches: new_group.add_match(match) groups = [g for g in groups if g not in overlapping_groups] groups.append(new_group) return [group.matches for group in groups] def get_best_match_in_group(group): """Get the longest match of those with the smallest distance.""" return min(group, key=lambda match: (match.dist, -(match.end - match.start))) def consolidate_overlapping_matches(matches): """Replace overlapping matches with a single, "best" match.""" groups = group_matches(matches) best_matches = [get_best_match_in_group(group) for group in groups] return sorted(best_matches) class FuzzySearchBase(object): """Abstract base class for fuzzy search classes""" @classmethod def search(cls, subsequence, sequence, search_params): raise NotImplementedError @classmethod def consolidate_matches(cls, matches): try: len(matches) except TypeError: return list(matches) else: return matches @classmethod def extra_items_for_chunked_search(cls, subsequence, search_params): raise NotImplementedError
{ "repo_name": "taleinat/fuzzysearch", "path": "src/fuzzysearch/common.py", "copies": "1", "size": "7375", "license": "mit", "hash": 7443151393031251000, "line_mean": 34.119047619, "line_max": 82, "alpha_frac": 0.5728813559, "autogenerated": false, "ratio": 4.159616469261139, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5232497825161139, "avg_score": null, "num_lines": null }
from functools import wraps from gi.repository import Gtk from ..application import input_alg, builder from ..model.input import Condition, JumpTo, JumpFrom, Control, End, ControlBlock from .util import get_handler_constructor id_chooser = builder.get_object('id_chooser') id_input = builder.get_object('id_input') control_dialog = builder.get_object('control_dialog') control_input = builder.get_object('control_input') toolbar_handlers = {} handler = get_handler_constructor(toolbar_handlers) def with_indexchooser(handler_func): @wraps(handler_func) def wrapper(*args, **kwargs): response = id_chooser.run() if response: index = id_input.get_value_as_int() kwargs.update(index=index) handler_func(*args, **kwargs) id_input.set_value(0) id_chooser.hide() return wrapper @handler('add_cond') @with_indexchooser def add_cond(widget, index): input_alg.insert(Condition(index)) input_alg.draw() @handler('add_control') def add_control(widget): response = control_dialog.run() try: if response: ids = control_input.get_text().split(' ') ids = list(map(int, ids)) if len(ids) == 1: input_alg.insert(Control(ids[0])) else: input_alg.insert(ControlBlock(ids)) except ValueError as e: print(e) finally: control_input.set_text('') control_dialog.hide() input_alg.draw() @handler('add_jump_from') @with_indexchooser def add_jump_from(widget, index): input_alg.insert(JumpFrom(index)) input_alg.draw() @handler('add_jump_to') @with_indexchooser def add_jump_to(widget, index): input_alg.insert(JumpTo(index)) input_alg.draw() @handler('add_end') def add_end(widget): input_alg.insert(End()) input_alg.draw() @handler('delete') def delete(widget): input_alg.delete() input_alg.draw() @handler('move_left') def move_left(widget): input_alg.move_left() input_alg.draw() @handler('move_right') def move_right(widget): input_alg.move_right() input_alg.draw()
{ "repo_name": "uvNikita/fsm_builder", "path": "fsm_builder/handlers/toolbar.py", "copies": "1", "size": "2141", "license": "mit", "hash": 8718089638182733000, "line_mean": 21.5368421053, "line_max": 81, "alpha_frac": 0.6412891172, "autogenerated": false, "ratio": 3.3716535433070867, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.45129426605070866, "avg_score": null, "num_lines": null }
from functools import wraps from git_orm import transaction, GitError from git_orm.sync import fetch, push, merge from tiget.cmds import Cmd __all__ = ['Begin', 'Commit', 'Rollback', 'Fetch', 'Push', 'Merge'] def catch_git_error(fn): @wraps(fn) def _inner(self, args): try: return fn(self, args) except GitError as e: raise self.error(e) return _inner class Begin(Cmd): description = 'begin transaction' @catch_git_error def do(self, args): transaction.begin() class Commit(Cmd): description = 'commit transaction' def setup(self): self.parser.add_argument('message', nargs='?') @catch_git_error def do(self, args): transaction.commit(args.message) class Rollback(Cmd): description = 'roll back transaction' @catch_git_error def do(self, args): transaction.rollback() class Fetch(Cmd): description = 'fetch changes from remote repository' @catch_git_error def do(self, args): fetch() class Push(Cmd): description = 'push changes to remote repository' @catch_git_error def do(self, args): push() class Merge(Cmd): description = 'merge remote and local changes' @catch_git_error def do(self, args): merge()
{ "repo_name": "natano/tiget", "path": "tiget/core/cmds/git.py", "copies": "1", "size": "1314", "license": "isc", "hash": -1089442359798031700, "line_mean": 17.7714285714, "line_max": 67, "alpha_frac": 0.6225266362, "autogenerated": false, "ratio": 3.7223796033994336, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.48449062395994336, "avg_score": null, "num_lines": null }
from functools import wraps from git_orm import transaction from tiget.cmds import Cmd from tiget.table import Table from tiget.scrum.models import Ticket, User from tiget.utils import open_in_editor def require_user(fn): @wraps(fn) def _inner(self, args): try: user = User.current() except User.DoesNotExist as e: raise self.error(e) return fn(self, args, user) return _inner class Accept(Cmd): description = 'accept ticket' def setup(self): self.parser.add_argument('ticket_id') @transaction.wrap() @require_user def do(self, args, user): try: ticket = Ticket.objects.get(id__startswith=args.ticket_id) ticket.owner = user except (Ticket.DoesNotExist, User.DoesNotExist) as e: raise self.error(e) ticket.save() class Mine(Cmd): description = 'list tickets owned by the current user' def setup(self): self.parser.add_argument('-a', '--all', action='store_true') @transaction.wrap() @require_user def do(self, args, user): tickets = Ticket.objects.filter(owner=user) if not args.all: tickets = tickets.filter(status__in=('new', 'wtf')) table = Table.from_queryset(tickets, fields=( 'id', 'summary', 'sprint', 'status', 'type')) self.print(table.render()) class New(Cmd): description = 'create new ticket' def setup(self): self.parser.add_argument( 'type', nargs='?', default=Ticket._meta.get_field('type').default) def do(self, args): try: ticket = Ticket(type=args.type) s = open_in_editor(ticket.dumps()) ticket.loads(s) ticket.save() except Ticket.InvalidObject as e: raise self.error(e) class SetTicketStatus(Cmd): names = [status for status in Ticket.STATUS_CHOICES if not status == 'new'] @property def description(self): return 'set ticket status to {}'.format(self.name) def setup(self): self.parser.add_argument('ticket_id') @transaction.wrap() def do(self, args): ticket = Ticket.objects.get(id__startswith=args.ticket_id) ticket.status = self.name ticket.save()
{ "repo_name": "natano/tiget", "path": "tiget/scrum/cmds.py", "copies": "1", "size": "2298", "license": "isc", "hash": -4731468286903679000, "line_mean": 25.4137931034, "line_max": 79, "alpha_frac": 0.6040034813, "autogenerated": false, "ratio": 3.785831960461285, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4889835441761285, "avg_score": null, "num_lines": null }
from functools import wraps from google.appengine.ext import deferred from mutex import Mutex def merge(dict1, dict2): data = {} data.update(dict1) data.update(dict2) return data class task(object): """ This decorator help to create delay task easily Parameters could be: _countdown, _eta, _headers, _name, _target, _transactional, _url, _retry_options, _queue See the appengine task queue documentation for details. https://cloud.google.com/appengine/docs/python/taskqueue/tasks This decorator could apply for both function and class. If class want to apply this decorator, class must define function `execute`. Please refer to `class_execute` function to see how it is implemented """ def __init__(self, **kwargs): self.kwargs = kwargs def class_execute(self, klass, *args, **kwargs): instance = klass(*args, **kwargs) return instance.execute() def func_execute(self, func, *args, **kwargs): return func(*args, **kwargs) def execute_delay(self, func, obj, func_args, func_kwargs): params = merge(self.kwargs, func_kwargs) deferred.defer(func, obj, *func_args, **params) def setup_delay_class(self, klass): @classmethod def delay(cls, *class_args, **class_kwargs): self.execute_delay( self.class_execute, klass, class_args, class_kwargs) klass.delay = delay def setup_delay_func(self, func): @wraps(func) def delay(*func_args, **func_kwargs): self.execute_delay( self.func_execute, func, func_args, func_kwargs) func.delay = delay def __call__(self, obj): if isinstance(obj, type): self.setup_delay_class(obj) else: self.setup_delay_func(obj) return obj class single_task(task): def _execute(self, func_name, obj, *func_args, **func_kwargs): func = getattr(super(single_task, self), func_name) defer_func = getattr(self, func_name) mutex = Mutex(func_kwargs['key']) if mutex.acquired(): func_kwargs.pop('key') func(obj, *func_args, **func_kwargs) mutex.release() else: func_kwargs['_countdown'] = self.kwargs.get('_countdown', 1) mutex.release() deferred.defer(defer_func, obj, *func_args, **func_kwargs) def class_execute(self, klass, *args, **kwargs): return self._execute('class_execute', klass, *args, **kwargs) def func_execute(self, func, *args, **kwargs): return self._execute('func_execute', func, *args, **kwargs) def execute_delay(self, func, obj, func_args, func_kwargs): mutex = Mutex(func_kwargs['key']) mutex.initial() super(single_task, self).execute_delay( func, obj, func_args, func_kwargs)
{ "repo_name": "trustcircleglobal/tcg-gae", "path": "tcg_gae/task.py", "copies": "1", "size": "2902", "license": "isc", "hash": -8318074710403391000, "line_mean": 29.8723404255, "line_max": 76, "alpha_frac": 0.61130255, "autogenerated": false, "ratio": 3.749354005167959, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9860656555167959, "avg_score": 0, "num_lines": 94 }
from functools import wraps from huey import RedisHuey def _task_wrapper(task_fn, pre_task=None, post_task=None): @wraps(task_fn) def inner(*args, **kwargs): if pre_task is not None: pre_task() result = task_fn(*args, **kwargs) if post_task is not None: post_task() return result return inner class RedisHueyExt(RedisHuey): def task(self, pre_task=None, post_task=None, *args, **kwargs): def decorator(fn): return (super(RedisHueyExt, self) .task(*args, **kwargs)(_task_wrapper( fn, pre_task=pre_task, post_task=post_task))) return decorator def periodic_task(self, pre_task=None, post_task=None, *args, **kwargs): def decorator(fn): return (super(RedisHueyExt, self) .periodic_task(*args, **kwargs)(_task_wrapper( fn, pre_task=pre_task, post_task=post_task))) return decorator
{ "repo_name": "pombredanne/huey", "path": "huey/wrapper.py", "copies": "2", "size": "1100", "license": "mit", "hash": 3708291903903587000, "line_mean": 30.4285714286, "line_max": 76, "alpha_frac": 0.52, "autogenerated": false, "ratio": 3.900709219858156, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5420709219858156, "avg_score": null, "num_lines": null }
from functools import wraps from . import compat from .base import make_key_func, get_serializer, get_expire from .wrappers import load_wrappers class make_cache(object): def __init__(self, cache, ttl=600, fmt='msgpack', fuzzy_ttl=True): self.cache = cache self.ttl = ttl self.fmt = fmt self.fuzzy_ttl = fuzzy_ttl def _wrapper(self, tpl, ttl, fmt, fuzzy_ttl, multi=False): def decorator(func): m = load_wrappers( compat.iscoroutinefunction(func), getattr(self.cache, 'is_async', False)) cls = m['ObjectsCacheWrapper'] if multi else m['CacheWrapper'] fttl = self.fuzzy_ttl if fuzzy_ttl is None else fuzzy_ttl return wraps(func)(cls( func, self.cache, make_key_func(tpl, func, multi), get_serializer(fmt or self.fmt), get_expire(ttl or self.ttl, fttl))) return decorator def __call__(self, tpl, ttl=None, fmt=None, fuzzy_ttl=None): return self._wrapper(tpl, ttl, fmt, fuzzy_ttl) def objects(self, tpl, ttl=None, fmt=None, fuzzy_ttl=None): return self._wrapper(tpl, ttl, fmt, fuzzy_ttl, multi=True)
{ "repo_name": "baverman/cachel", "path": "cachel/simple.py", "copies": "1", "size": "1226", "license": "mit", "hash": 7638278697207634000, "line_mean": 36.1515151515, "line_max": 74, "alpha_frac": 0.5970636215, "autogenerated": false, "ratio": 3.7378048780487805, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.48348684995487806, "avg_score": null, "num_lines": null }
from functools import wraps from . import filters from .asyncsupport import auto_aiter from .asyncsupport import auto_await async def auto_to_seq(value): seq = [] if hasattr(value, "__aiter__"): async for item in value: seq.append(item) else: for item in value: seq.append(item) return seq async def async_select_or_reject(args, kwargs, modfunc, lookup_attr): seq, func = filters.prepare_select_or_reject(args, kwargs, modfunc, lookup_attr) if seq: async for item in auto_aiter(seq): if func(item): yield item def dualfilter(normal_filter, async_filter): wrap_evalctx = False if getattr(normal_filter, "environmentfilter", False) is True: def is_async(args): return args[0].is_async wrap_evalctx = False else: has_evalctxfilter = getattr(normal_filter, "evalcontextfilter", False) is True has_ctxfilter = getattr(normal_filter, "contextfilter", False) is True wrap_evalctx = not has_evalctxfilter and not has_ctxfilter def is_async(args): return args[0].environment.is_async @wraps(normal_filter) def wrapper(*args, **kwargs): b = is_async(args) if wrap_evalctx: args = args[1:] if b: return async_filter(*args, **kwargs) return normal_filter(*args, **kwargs) if wrap_evalctx: wrapper.evalcontextfilter = True wrapper.asyncfiltervariant = True return wrapper def asyncfiltervariant(original): def decorator(f): return dualfilter(original, f) return decorator @asyncfiltervariant(filters.do_first) async def do_first(environment, seq): try: return await auto_aiter(seq).__anext__() except StopAsyncIteration: return environment.undefined("No first item, sequence was empty.") @asyncfiltervariant(filters.do_groupby) async def do_groupby(environment, value, attribute): expr = filters.make_attrgetter(environment, attribute) return [ filters._GroupTuple(key, await auto_to_seq(values)) for key, values in filters.groupby( sorted(await auto_to_seq(value), key=expr), expr ) ] @asyncfiltervariant(filters.do_join) async def do_join(eval_ctx, value, d="", attribute=None): return filters.do_join(eval_ctx, await auto_to_seq(value), d, attribute) @asyncfiltervariant(filters.do_list) async def do_list(value): return await auto_to_seq(value) @asyncfiltervariant(filters.do_reject) async def do_reject(*args, **kwargs): return async_select_or_reject(args, kwargs, lambda x: not x, False) @asyncfiltervariant(filters.do_rejectattr) async def do_rejectattr(*args, **kwargs): return async_select_or_reject(args, kwargs, lambda x: not x, True) @asyncfiltervariant(filters.do_select) async def do_select(*args, **kwargs): return async_select_or_reject(args, kwargs, lambda x: x, False) @asyncfiltervariant(filters.do_selectattr) async def do_selectattr(*args, **kwargs): return async_select_or_reject(args, kwargs, lambda x: x, True) @asyncfiltervariant(filters.do_map) async def do_map(*args, **kwargs): seq, func = filters.prepare_map(args, kwargs) if seq: async for item in auto_aiter(seq): yield await auto_await(func(item)) @asyncfiltervariant(filters.do_sum) async def do_sum(environment, iterable, attribute=None, start=0): rv = start if attribute is not None: func = filters.make_attrgetter(environment, attribute) else: def func(x): return x async for item in auto_aiter(iterable): rv += func(item) return rv @asyncfiltervariant(filters.do_slice) async def do_slice(value, slices, fill_with=None): return filters.do_slice(await auto_to_seq(value), slices, fill_with) ASYNC_FILTERS = { "first": do_first, "groupby": do_groupby, "join": do_join, "list": do_list, # we intentionally do not support do_last because it may not be safe in async "reject": do_reject, "rejectattr": do_rejectattr, "map": do_map, "select": do_select, "selectattr": do_selectattr, "sum": do_sum, "slice": do_slice, }
{ "repo_name": "mitsuhiko/jinja2", "path": "src/jinja2/asyncfilters.py", "copies": "3", "size": "4246", "license": "bsd-3-clause", "hash": -1670468177936462800, "line_mean": 26.0445859873, "line_max": 86, "alpha_frac": 0.6594441828, "autogenerated": false, "ratio": 3.5442404006677797, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0002258221701091444, "num_lines": 157 }
from functools import wraps from .inspect import get_full_arg_spec class typed(object): """ Decorator that does type checks according to annotations. @typed(strict=True) def do_something(a: bool, b: int = 3) -> bool: return False """ def __init__(self, annotations=None, strict=False): self.strict = strict self.annotations = annotations def is_instance(self, instance, cls): """ Check that instance is of type cls. """ if self.strict: return type(instance) == cls else: return isinstance(instance, cls) def __call__(self, func): """ Decorate function. """ argnames, defaults, annotations = get_full_arg_spec(func) annotations = self.annotations or annotations return_cls = annotations.get('return') @wraps(func) def wrapper(*args, **kwargs): values = defaults.copy() values.update(dict(zip(argnames, args))) values.update(kwargs) for argname, argcls in annotations.items(): if argname == 'return': continue argvalue = values[argname] if not self.is_instance(argvalue, argcls): raise TypeError( 'Value {} of type {} is not a {} instance'.format( argvalue, type(argvalue), argcls)) value = func(*args, **kwargs) if (return_cls is not None and not self.is_instance(value, return_cls)): raise TypeError( 'Returned {} of type {} instead of {}'.format( value, type(value), return_cls)) return value if not hasattr(wrapper, '__wrapped__'): wrapper.__wrapped__ = func return wrapper
{ "repo_name": "snogaraleal/adjax", "path": "adjax/utils/types.py", "copies": "1", "size": "1888", "license": "mit", "hash": -7832632849146310000, "line_mean": 28.9682539683, "line_max": 74, "alpha_frac": 0.530720339, "autogenerated": false, "ratio": 4.684863523573201, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5715583862573201, "avg_score": null, "num_lines": null }
from functools import wraps from jinja2.asyncsupport import auto_aiter from jinja2 import filters async def auto_to_seq(value): seq = [] if hasattr(value, '__aiter__'): async for item in value: seq.append(item) else: for item in value: seq.append(item) return seq async def async_select_or_reject(args, kwargs, modfunc, lookup_attr): seq, func = filters.prepare_select_or_reject( args, kwargs, modfunc, lookup_attr) if seq: async for item in auto_aiter(seq): if func(item): yield item def dualfilter(normal_filter, async_filter): wrap_evalctx = False if getattr(normal_filter, 'environmentfilter', False): is_async = lambda args: args[0].is_async wrap_evalctx = False else: if not getattr(normal_filter, 'evalcontextfilter', False) and \ not getattr(normal_filter, 'contextfilter', False): wrap_evalctx = True is_async = lambda args: args[0].environment.is_async @wraps(normal_filter) def wrapper(*args, **kwargs): b = is_async(args) if wrap_evalctx: args = args[1:] if b: return async_filter(*args, **kwargs) return normal_filter(*args, **kwargs) if wrap_evalctx: wrapper.evalcontextfilter = True wrapper.asyncfiltervariant = True return wrapper def asyncfiltervariant(original): def decorator(f): return dualfilter(original, f) return decorator @asyncfiltervariant(filters.do_first) async def do_first(environment, seq): try: return await auto_aiter(seq).__anext__() except StopAsyncIteration: return environment.undefined('No first item, sequence was empty.') @asyncfiltervariant(filters.do_groupby) async def do_groupby(environment, value, attribute): expr = filters.make_attrgetter(environment, attribute) return [filters._GroupTuple(key, await auto_to_seq(values)) for key, values in filters.groupby(sorted( await auto_to_seq(value), key=expr), expr)] @asyncfiltervariant(filters.do_join) async def do_join(eval_ctx, value, d=u'', attribute=None): return filters.do_join(eval_ctx, await auto_to_seq(value), d, attribute) @asyncfiltervariant(filters.do_list) async def do_list(value): return await auto_to_seq(value) @asyncfiltervariant(filters.do_reject) async def do_reject(*args, **kwargs): return async_select_or_reject(args, kwargs, lambda x: not x, False) @asyncfiltervariant(filters.do_rejectattr) async def do_rejectattr(*args, **kwargs): return async_select_or_reject(args, kwargs, lambda x: not x, True) @asyncfiltervariant(filters.do_select) async def do_select(*args, **kwargs): return async_select_or_reject(args, kwargs, lambda x: x, False) @asyncfiltervariant(filters.do_selectattr) async def do_selectattr(*args, **kwargs): return async_select_or_reject(args, kwargs, lambda x: x, True) @asyncfiltervariant(filters.do_map) async def do_map(*args, **kwargs): seq, func = filters.prepare_map(args, kwargs) if seq: async for item in auto_aiter(seq): yield func(item) @asyncfiltervariant(filters.do_sum) async def do_sum(environment, iterable, attribute=None, start=0): rv = start if attribute is not None: func = filters.make_attrgetter(environment, attribute) else: func = lambda x: x async for item in auto_aiter(iterable): rv += func(item) return rv @asyncfiltervariant(filters.do_slice) async def do_slice(value, slices, fill_with=None): return filters.do_slice(await auto_to_seq(value), slices, fill_with) ASYNC_FILTERS = { 'first': do_first, 'groupby': do_groupby, 'join': do_join, 'list': do_list, # we intentionally do not support do_last because that would be # ridiculous 'reject': do_reject, 'rejectattr': do_rejectattr, 'map': do_map, 'select': do_select, 'selectattr': do_selectattr, 'sum': do_sum, 'slice': do_slice, }
{ "repo_name": "cdgallahue/atomic-turbine", "path": "web/lib/python2.7/site-packages/jinja2/asyncfilters.py", "copies": "217", "size": "4144", "license": "mit", "hash": 6862721363820964000, "line_mean": 27.3835616438, "line_max": 76, "alpha_frac": 0.6493725869, "autogenerated": false, "ratio": 3.557081545064378, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0005057441296253012, "num_lines": 146 }
from functools import wraps from markupsafe import Markup from jinja2 import Environment from jinja2.compiler import CodeGenerator class LocalOverridingCodeGenerator(CodeGenerator): def visit_Template(self, *args, **kwargs): super(LocalOverridingCodeGenerator, self).visit_Template(*args, **kwargs) overrides = getattr(self.environment, '_codegen_overrides', {}) if overrides: self.writeline('') for name, override in overrides.items(): self.writeline('{} = {}'.format(name, override)) class DynAutoEscapeEnvironment(Environment): code_generator_class = LocalOverridingCodeGenerator def __init__(self, *args, **kwargs): escape_func = kwargs.pop('escape_func', None) markup_class = kwargs.pop('markup_class', None) super(DynAutoEscapeEnvironment, self).__init__(*args, **kwargs) # we need to disable constant-evaluation at compile time, because it # calls jinja's own escape function. # # this is done by jinja itself if a finalize function is set and it # is marked as a contextfunction. this is accomplished by either # suppling a no-op contextfunction itself or wrapping an existing # finalize in a contextfunction if self.finalize: if not getattr(self.finalize, 'contextfunction', False): _finalize = getattr(self, 'finalize') self.finalize = lambda _, v: _finalize(v) else: self.finalize = lambda _, v: v self.finalize.contextfunction = True self._codegen_overrides = {} if escape_func: self._codegen_overrides['escape'] = 'environment.escape_func' self.escape_func = escape_func self.filters['e'] = escape_func self.filters['escape'] = escape_func if markup_class: self._codegen_overrides['markup'] = 'environment.markup_class' self.markup_class = markup_class def markup_escape_func(f): @wraps(f) def _(v): if isinstance(v, Markup): return v return Markup(f(v)) return _
{ "repo_name": "mbr/jinja-vanish", "path": "jinja_vanish/__init__.py", "copies": "1", "size": "2222", "license": "mit", "hash": -2521852490328145400, "line_mean": 33.1846153846, "line_max": 76, "alpha_frac": 0.6098109811, "autogenerated": false, "ratio": 4.452905811623246, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5562716792723246, "avg_score": null, "num_lines": null }
from functools import wraps from ming.orm import ThreadLocalORMSession from allura.model.project import Project, Neighborhood, AppConfig from allura.model.auth import User from allura.model.discuss import Discussion, Thread, Post def flush_on_return(fn): @wraps(fn) def new_fn(*args, **kwargs): result = fn(*args, **kwargs) ThreadLocalORMSession.flush_all() return result return new_fn @flush_on_return def create_project(shortname): neighborhood = create_neighborhood() return Project(shortname=shortname, database_uri='mim://test/myproject_db', neighborhood_id=neighborhood._id, is_root=True) @flush_on_return def create_neighborhood(): neighborhood = Neighborhood(url_prefix='http://example.com/myproject') return neighborhood @flush_on_return def create_app_config(project, mount_point): return AppConfig( project_id=project._id, tool_name='myapp', options={'mount_point': 'my_mounted_app'}, acl=[]) @flush_on_return def create_post(slug): discussion = create_discussion() thread = create_thread(discussion=discussion) author = create_user() return Post(slug=slug, thread_id=thread._id, full_slug='%s:%s' % (thread._id,slug), discussion_id=discussion._id, author_id=author._id) @flush_on_return def create_thread(discussion): return Thread(discussion_id=discussion._id) @flush_on_return def create_discussion(): return Discussion() @flush_on_return def create_user(): return User()
{ "repo_name": "leotrubach/sourceforge-allura", "path": "Allura/allura/tests/unit/factories.py", "copies": "1", "size": "1642", "license": "apache-2.0", "hash": -7667391126739619000, "line_mean": 23.1470588235, "line_max": 74, "alpha_frac": 0.652862363, "autogenerated": false, "ratio": 3.6570155902004453, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.48098779532004454, "avg_score": null, "num_lines": null }
from functools import wraps from modbus_tk.modbus import ModbusError from pyjsonrpc.rpcerror import jsonrpcerrors, JsonRpcError modbus_mapping = {} def json_rpc_error(func): """ Wraps a function and raises a JSON RPC error when a ModbusError has been excepted:: @json_rpc_error() def modbus_request(modbus_master, slave_id, function_code, starting_address, quantity): modbus_master.execute(int(slave_id), function_code, int(starting_address), int(quantity)) """ @wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except ModbusError as e: raise modbus_mapping[e.get_exception_code()] return wrapper class IllegalFunction(JsonRpcError): code = -32001 message = 'Function code is not valid.' jsonrpcerrors[IllegalFunction.code] = IllegalFunction modbus_mapping[1] = IllegalFunction class IllegalDataAddress(JsonRpcError): code = -32002 message = 'Data address is not valid.' jsonrpcerrors[IllegalDataAddress.code] = IllegalDataAddress modbus_mapping[2] = IllegalDataAddress class IllegalDataValue(JsonRpcError): code = -32003 message = 'Data value is not valid.' jsonrpcerrors[IllegalDataValue.code] = IllegalDataValue modbus_mapping[3] = IllegalDataValue class SlaveDeviceFailure(JsonRpcError): code = -32004 message = 'Slave device could not perform requested action.' jsonrpcerrors[SlaveDeviceFailure.code] = SlaveDeviceFailure modbus_mapping[4] = SlaveDeviceFailure class CommandAcknowledge(JsonRpcError): code = -32005 message = 'Slave device has accepted the request and is ' \ 'processing it, but a long duration of time will be required to ' \ 'do so. This response is returned to prevent a timeout error ' \ 'from occurring in the master.' jsonrpcerrors[CommandAcknowledge.code] = CommandAcknowledge modbus_mapping[5] = CommandAcknowledge class SlaveDeviceBusy(JsonRpcError): code = -32006 message = 'Slave device is busy.' jsonrpcerrors[SlaveDeviceBusy.code] = SlaveDeviceBusy modbus_mapping[6] = SlaveDeviceBusy class NegativeAcknowlegde(JsonRpcError): code = -32007 message = 'Slave device cannot perform the program function received in the ' \ 'query.' jsonrpcerrors[NegativeAcknowlegde.code] = NegativeAcknowlegde modbus_mapping[7] = NegativeAcknowlegde class MemoryParityError(JsonRpcError): code = -32008 message = 'Slave device failed to read extended memory or record file.' jsonrpcerrors[MemoryParityError.code] = MemoryParityError modbus_mapping[8] = MemoryParityError class GateWayPathUnavailable(JsonRpcError): code = -32010 message = 'Gateway is unable to allocate an internal communication path from ' \ 'the input port to the output port.' jsonrpcerrors[GateWayPathUnavailable.code] = GateWayPathUnavailable modbus_mapping[10] = GateWayPathUnavailable class GatewayTargetDeviceFailedToRespond(JsonRpcError): code = -32011 message = 'No response obtained from the target device.' jsonrpcerrors[GatewayTargetDeviceFailedToRespond.code] = GatewayTargetDeviceFailedToRespond modbus_mapping[11] = GatewayTargetDeviceFailedToRespond
{ "repo_name": "AdvancedClimateSystems/Tolk", "path": "tolk/exceptions.py", "copies": "1", "size": "3348", "license": "mpl-2.0", "hash": 3001984135148082000, "line_mean": 26.2195121951, "line_max": 91, "alpha_frac": 0.7168458781, "autogenerated": false, "ratio": 3.8794901506373116, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0003799524793148428, "num_lines": 123 }
from functools import wraps from neupy.network import errors from neupy.utils import format_data __all__ = ('mse', 'rmse', 'mae', 'msle', 'rmsle', 'binary_crossentropy', 'categorical_crossentropy') def override_theano_function(function): """ Override theano function and help evaluate output result. Parameters ---------- function : function Function need to return theano variable. Returns ------- function """ @wraps(function) def wrapper(actual, expected, *args, **kwargs): actual = format_data(actual) expected = format_data(expected) output = function(actual, expected, *args, **kwargs) # use .item(0) to get a first array element and automaticaly # convert vector that contains one element to scalar return output.eval().item(0) return wrapper mae = override_theano_function(errors.mae) mse = override_theano_function(errors.mse) rmse = override_theano_function(errors.rmse) msle = override_theano_function(errors.msle) rmsle = override_theano_function(errors.rmsle) binary_crossentropy = override_theano_function(errors.binary_crossentropy) categorical_crossentropy = override_theano_function( errors.categorical_crossentropy )
{ "repo_name": "stczhc/neupy", "path": "neupy/estimators.py", "copies": "1", "size": "1260", "license": "mit", "hash": 6673941216718866000, "line_mean": 28.3023255814, "line_max": 74, "alpha_frac": 0.6936507937, "autogenerated": false, "ratio": 3.925233644859813, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5118884438559813, "avg_score": null, "num_lines": null }
from functools import wraps from nose import SkipTest class Capabilities(dict): def __missing__(self, capability): probe = getattr(self, '_' + capability) self[capability] = have = probe() return have def _genshi(self): try: from genshi.template import MarkupTemplate # present only in >= 0.6 return hasattr(MarkupTemplate, 'add_directives') except ImportError: return False def _jinja2(self): try: import jinja2 except ImportError: return False else: return True have = Capabilities() def need(capability): def decorator(fn): @wraps(fn) def decorated(*args, **kw): if not have[capability]: raise SkipTest return fn(*args, **kw) return decorated return decorator def alternate_expectation(backend, string): def decorator(fn): try: alternates = fn.alternates except AttributeError: alternates = fn.alternates = {} alternates[backend] = string.strip() return fn return decorator class desired_output(object): def __init__(self, language, schema, **kw): self.language = language self.schema = schema self.expected = None self.alternate_expectations = {} self.render_context = kw def __call__(self, fn): self.expected = fn.__doc__.strip() self.alternate_expectations = getattr(fn, 'alternates', {}) return self def expectation_for(self, backend): try: return self.alternate_expectations[backend] except KeyError: return self.expected @property def genshi(self): def decorator(fn): markup = _wrap_with_xmlns(fn.__doc__, self.language) fn.__doc__ = None @wraps(fn) def runner(): if not have['genshi']: raise SkipTest got = _render_genshi(markup, self.language, self.schema, **self.render_context) expected = self.expectation_for('genshi') if expected != got: print "\n" + fn.__name__ print "Expected:\n" + expected print "Got:\n" + got assert expected == got return runner return decorator @property def markup(self): def decorator(fn): @wraps(fn) def runner(): got = _render_markup_fn(fn, self.language, self.schema, **self.render_context) expected = self.expectation_for('markup') if expected != got: print "\n" + fn.__name__ print "Expected:\n" + expected print "Got:\n" + got assert expected == got return runner return decorator def markup_test(markup='xml', schema=None): """Turn a function into a Generator markup test. Desired output is read from the docstring. The function is passed a generator and an Element and is expected to return output matching the docstring. """ def decorator(fn): expected = fn.__doc__.decode('utf8').strip() @wraps(fn) def test(): from flatland.out.markup import Generator generator = Generator(markup=markup) if schema is not None: el = schema() else: el = None got = fn(generator, el) assert hasattr(got, '__html__') got = got.strip() if expected != got: print "\n" + fn.__name__ print "Expected:\n" + expected print "Got:\n" + got assert expected == got return test return decorator def render_genshi(markup, language, schema, wrap=True, **context): if wrap: markup = _wrap_with_xmlns(markup, language) return _render_genshi(markup, language, schema, **context) def _render_markup_fn(fn, language, schema, **kw): from flatland.out.markup import Generator generator = Generator(markup=language) if schema is not None: form = schema() else: form = None output = fn(generator, form, **kw) return output.strip() def _render_genshi(markup, language, schema, **kw): from genshi.template import MarkupTemplate from flatland.out.genshi import setup template = MarkupTemplate(markup) setup(template) if schema is not None: kw['form'] = schema() else: kw['form'] = None output = template.generate(**kw).render(language) # strip div wrapper off got = output[output.index('\n') + 1:output.rindex('\n')] got = got.strip() return got def _wrap_with_xmlns(template, language): wrapped = '<div ' if language == 'xhtml': wrapped += 'xmlns="http://www.w3.org/1999/xhtml" ' wrapped += ( 'xmlns:form="http://ns.discorporate.us/flatland/genshi" ' + 'xmlns:py="http://genshi.edgewall.org/">\n' + template + '\n</div>') return wrapped
{ "repo_name": "mmerickel/flatland", "path": "tests/markup/_util.py", "copies": "2", "size": "5322", "license": "mit", "hash": 2685856432010759000, "line_mean": 26.8638743455, "line_max": 74, "alpha_frac": 0.5415257422, "autogenerated": false, "ratio": 4.464765100671141, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.00046561678703722865, "num_lines": 191 }
from functools import wraps from oauthlib.oauth2 import Server from django.http import HttpResponseForbidden from django.core.exceptions import ImproperlyConfigured from .oauth2_validators import OAuth2Validator from .oauth2_backends import OAuthLibCore from .scopes import get_scopes_backend from .settings import oauth2_settings def protected_resource(scopes=None, validator_cls=OAuth2Validator, server_cls=Server): """ Decorator to protect views by providing OAuth2 authentication out of the box, optionally with scope handling. @protected_resource() def my_view(request): # An access token is required to get here... # ... pass """ _scopes = scopes or [] def decorator(view_func): @wraps(view_func) def _validate(request, *args, **kwargs): validator = validator_cls() core = OAuthLibCore(server_cls(validator)) valid, oauthlib_req = core.verify_request(request, scopes=_scopes) if valid: request.resource_owner = oauthlib_req.user return view_func(request, *args, **kwargs) return HttpResponseForbidden() return _validate return decorator def rw_protected_resource(scopes=None, validator_cls=OAuth2Validator, server_cls=Server): """ Decorator to protect views by providing OAuth2 authentication and read/write scopes out of the box. GET, HEAD, OPTIONS http methods require "read" scope. Otherwise "write" scope is required. @rw_protected_resource() def my_view(request): # If this is a POST, you have to provide 'write' scope to get here... # ... pass """ _scopes = scopes or [] def decorator(view_func): @wraps(view_func) def _validate(request, *args, **kwargs): # Check if provided scopes are acceptable provided_scopes = get_scopes_backend().get_all_scopes() read_write_scopes = [oauth2_settings.READ_SCOPE, oauth2_settings.WRITE_SCOPE] if not set(read_write_scopes).issubset(set(provided_scopes)): raise ImproperlyConfigured( "rw_protected_resource decorator requires following scopes {0}" " to be in OAUTH2_PROVIDER['SCOPES'] list in settings".format( read_write_scopes) ) # Check if method is safe if request.method.upper() in ['GET', 'HEAD', 'OPTIONS']: _scopes.append(oauth2_settings.READ_SCOPE) else: _scopes.append(oauth2_settings.WRITE_SCOPE) # proceed with validation validator = validator_cls() core = OAuthLibCore(server_cls(validator)) valid, oauthlib_req = core.verify_request(request, scopes=_scopes) if valid: request.resource_owner = oauthlib_req.user return view_func(request, *args, **kwargs) return HttpResponseForbidden() return _validate return decorator
{ "repo_name": "DeskConnect/django-oauth-toolkit", "path": "oauth2_provider/decorators.py", "copies": "2", "size": "3117", "license": "bsd-2-clause", "hash": -8441618289534767000, "line_mean": 36.1071428571, "line_max": 94, "alpha_frac": 0.6191851139, "autogenerated": false, "ratio": 4.5239477503628445, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6143132864262845, "avg_score": null, "num_lines": null }
from functools import wraps from osgeo import ogr import shapely.wkb from geoalchemy.base import WKBSpatialElement, WKTSpatialElement def reproject_from_native_spatial_reference(f): @wraps(f) def within(self, boundary, spatial_reference=None, **kwargs): # Find the native spatial reference native_spatial_reference = self.native_spatial_reference # If no spatial reference was specified, or if it matches the native one, just render directly if native_spatial_reference is None or spatial_reference is None or spatial_reference.IsSame(native_spatial_reference): return f(self, boundary, spatial_reference=native_spatial_reference, **kwargs) def reproj(g): geom = ogr.CreateGeometryFromWkb(g.wkb) geom.AssignSpatialReference(native_spatial_reference) geom.TransformTo(spatial_reference) return shapely.wkb.loads(geom.ExportToWkb()) geoms = f(self, boundary.transform_to(native_spatial_reference), spatial_reference=native_spatial_reference, **kwargs) return (reproj(x) for x in geoms) return within class IterableGeometry(object): """An object suitable for rendering with Geometry which simply stores an iterable of shapely geometry objects. .. py:attribute:: geom The iterable of shapely geometry objects. """ def __init__(self, geom=None): self.geom = geom self.native_spatial_reference = None @reproject_from_native_spatial_reference def within(self, boundary, spatial_reference=None): """Returns *all* of :py:obj:`self.geom` or an empty list if it is `None`.""" return self.geom or [] class GeoAlchemyGeometry(object): def __init__(self, query_cb=None, geom_cls=None, geom_attr=None, spatial_reference=None, db_srid=None): self.query_cb = query_cb self.geom_cls = geom_cls self.geom_attr = geom_attr or 'geom' self.native_spatial_reference = spatial_reference self.db_srid = db_srid or 4326 @reproject_from_native_spatial_reference def within(self, boundary, spatial_reference=None): if self.query_cb is None: return [] if self.geom_attr is None: return [] q = self.query_cb() if self.geom_cls is not None: bound = WKTSpatialElement(boundary.wkt, srid=self.db_srid) q = q.filter(getattr(self.geom_cls, self.geom_attr).intersects(bound)) return (shapely.wkb.loads(bytes(getattr(x, self.geom_attr).geom_wkb)) for x in q)
{ "repo_name": "rjw57/foldbeam", "path": "foldbeam/rendering/geometry.py", "copies": "1", "size": "2623", "license": "apache-2.0", "hash": -5073183530063551000, "line_mean": 35.9436619718, "line_max": 127, "alpha_frac": 0.6599313763, "autogenerated": false, "ratio": 3.9325337331334334, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.00329768167173357, "num_lines": 71 }
from functools import wraps from os import getenv from sqlalchemy.orm import sessionmaker from cafe.abc.compat import abstractclassmethod from cafe.patterns.context import SessionManager class SQLAlchemySessionManager(SessionManager): ENGINE = None @classmethod def default(cls): return cls.instance() @classmethod def instance(cls, engine=None, **kwargs): """ :type engine: sqlalchemy.engine.Engine or None :rtype: cafe.database.sqlalchemy.session.SQLAlchemySessionManager """ return cls(cls.factory(engine=engine, **kwargs)) @staticmethod def _determine_echo(): """ SQLAlchemy echo level, using DATABASE_ECHO environment variable Possible values: True, False, 'debug' :return: True | False | basestring """ echo = getenv('DATABASE_ECHO', 'false') if echo.lower() == 'true': return True if echo.lower() == 'debug': return 'debug' return False @classmethod def factory(cls, engine=None, **kwargs): if engine is None: engine = cls.engine() engine.echo = cls._determine_echo() return sessionmaker(bind=engine, **kwargs) @classmethod def engine(cls, *args, **kwargs): if cls.ENGINE is None: cls.ENGINE = cls.get_engine(*args, **kwargs) return cls.ENGINE @abstractclassmethod def get_engine(cls, *args, **kwargs): """ Default engine for this session manager. :rtype: sqlalchemy.engine.Engine """ raise NotImplementedError def __enter__(self): """ :rtype: sqlalchemy.orm.session.Session """ return super(SQLAlchemySessionManager, self).__enter__() def __exit__(self, exc_type, exc_val, exc_tb): rvalue = True if exc_type is not None: self.session.rollback() rvalue = False else: self.session.commit() super(SQLAlchemySessionManager, self).__exit__(exc_type, exc_val, exc_tb) return rvalue def session_query_wrapper_generator(session_manager=None, engine=None, context=False): """ Decorator which wraps a function in a SQLAlchemy session :param context: execute the wrapped function inside a session context :type context: bool :param session_manager: SessionManager to use :type session_manager: cafe.database.sqlalchemy.session.SQLAlchemySessionManager :param engine: Engine to use to connect to Mimir :type engine: sqlalchemy.engine.Engine """ session_keyword_arg = 'session' def session_decorator(function): @wraps(function) def wrapper(*args, **kwargs): execute_in_context = context if session_keyword_arg not in kwargs or kwargs[session_keyword_arg] is None: if session_manager is None: raise TypeError( 'sessioned query functions should be called with an SQLAlchemySessionManager ' 'or Session instance when a default session manager is not configured.') kwargs[session_keyword_arg] = session_manager.instance(engine=engine) execute_in_context = True instance = kwargs.pop(session_keyword_arg) if isinstance(instance, SQLAlchemySessionManager): instance = instance.instance(engine=engine) execute_in_context = True if execute_in_context: with instance as session: return function(*args, session=session, **kwargs) else: return function(*args, session=instance, **kwargs) return wrapper return session_decorator session_query = session_query_wrapper_generator()
{ "repo_name": "betsybookwyrm/python-cafe-sqlalchemy", "path": "cafe/database/sqlalchemy/session.py", "copies": "1", "size": "3849", "license": "apache-2.0", "hash": 5195884849008478000, "line_mean": 30.2926829268, "line_max": 102, "alpha_frac": 0.6180826189, "autogenerated": false, "ratio": 4.609580838323353, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5727663457223353, "avg_score": null, "num_lines": null }
from functools import wraps from pipetools.debug import repr_args, set_name, get_name from pipetools.ds_builder import DSBuilder, NoBuilder from pipetools.main import pipe, XObject, StringFormatter, xcurry def pipe_util(func): """ Decorator that handles X objects and currying for pipe-utils. """ @wraps(func) def pipe_util_wrapper(function, *args, **kwargs): if isinstance(function, XObject): function = ~function function_name = get_name(function) if args or kwargs: function = xcurry(function, *args, **kwargs) name = '%s(%s)' % (func.__name__, ', '.join( filter(None, (function_name, repr_args(*args, **kwargs))))) return pipe | set_name(name, func(function)) return pipe_util_wrapper def auto_string_formatter(func): """ Decorator that handles automatic string formatting. By converting a string argument to a function that does formatting on said string. """ @wraps(func) def auto_string_formatter_wrapper(function, *args, **kwargs): if isinstance(function, basestring): function = StringFormatter(function) return func(function, *args, **kwargs) return auto_string_formatter_wrapper def data_structure_builder(func): """ Decorator to handle automatic data structure creation for pipe-utils. """ @wraps(func) def ds_builder_wrapper(function, *args, **kwargs): try: function = DSBuilder(function) except NoBuilder: pass return func(function, *args, **kwargs) return ds_builder_wrapper
{ "repo_name": "starenka/pipetools", "path": "pipetools/decorators.py", "copies": "1", "size": "1638", "license": "mit", "hash": 1170350596101668900, "line_mean": 26.7627118644, "line_max": 78, "alpha_frac": 0.6440781441, "autogenerated": false, "ratio": 4.084788029925187, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5228866174025187, "avg_score": null, "num_lines": null }
from functools import wraps from plash import utils from plash.eval import hint, register_macro def cache_container_hint(cache_key_templ): def decorator(func): @wraps(func) def wrapper(*args): cache_key = cache_key_templ.format(":".join(args)).replace("/", "%") container_id = utils.plash_call("map", cache_key) if not container_id: container_id = func(*args) utils.plash_call("map", cache_key, container_id) return hint("image", container_id) return wrapper return decorator @register_macro() @cache_container_hint("docker:{}") def from_docker(image): "use image from local docker" return utils.plash_call("import-docker", image) @register_macro() @cache_container_hint("lxc:{}") def from_lxc(image): "use images from images.linuxcontainers.org" return utils.plash_call("import-lxc", image) @register_macro() @cache_container_hint("url:{}") def from_url(url): "import image from an url" return utils.plash_call("import-url", url) @register_macro() def from_id(image): "specify the image from an image id" return hint("image", image) class MapDoesNotExist(Exception): pass @register_macro() def from_map(map_key): "use resolved map as image" image_id = utils.plash_call("map", map_key) if not image_id: raise MapDoesNotExist("map {} not found".format(repr(map_key))) return hint("image", image_id) @register_macro("from") def from_(image): "guess from where to take the image" if image.isdigit(): return from_id(image) else: return from_lxc(image) @register_macro() @cache_container_hint("github:{}") def from_github(user_repo_pair, file="plashfile"): "build and use a file (default 'plashfile') from a github repo" return utils.plash_call("build", "--eval-github", user_repo_pair, file)
{ "repo_name": "ihucos/plash", "path": "lib/python/plash/macros/froms.py", "copies": "1", "size": "1917", "license": "mit", "hash": -5020219556562920000, "line_mean": 24.2236842105, "line_max": 80, "alpha_frac": 0.6478873239, "autogenerated": false, "ratio": 3.4854545454545454, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9632529653630049, "avg_score": 0.00016244314489928524, "num_lines": 76 }
from functools import wraps from plumeria.command import CommandError async def get_voice_client(member, move_to=False, any_channel=False): user_voice_channel = member.voice.voice_channel if user_voice_channel is None: raise CommandError("You are not currently in a voice channel.") voice_client = member.transport.voice_client_in(member.server) if voice_client is None: return await member.transport.join_voice_channel(user_voice_channel) elif voice_client.channel != user_voice_channel: if move_to: await voice_client.move_to(user_voice_channel) return user_voice_channel elif any_channel: return voice_client else: raise CommandError("The bot is busy in another voice channel.") else: return voice_client def voice_with_bot_only(f): """Make sure that the command is being run by a user in the same voice channel as the bot.""" @wraps(f) async def wrapper(message, *args, **kwargs): await get_voice_client(message.author) return await f(message, *args, **kwargs) wrapper.voice_with_bot_only = True return wrapper
{ "repo_name": "sk89q/Plumeria", "path": "plumeria/util/voice.py", "copies": "1", "size": "1178", "license": "mit", "hash": -4633650814688203000, "line_mean": 33.6470588235, "line_max": 97, "alpha_frac": 0.6748726655, "autogenerated": false, "ratio": 3.993220338983051, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5168093004483051, "avg_score": null, "num_lines": null }
from functools import wraps from pubsub import pub __author__ = 'e.kolpakov' def get_observers(target): """ Gets observer attached to callable, if any :param target: callable :return: BaseObserver """ raw = getattr(target, BaseObserver.OBSERVER_ATTRIBUTE) if hasattr(target, BaseObserver.OBSERVER_ATTRIBUTE) else [] try: return [observer for observer in raw if isinstance(observer, BaseObserver)] except TypeError: return [] def get_agent_for_class_method(args): from model.agents.base_agents import BaseAgent agent = args[0] if not isinstance(agent, BaseAgent): raise ValueError("Base agent expected, got {0}", agent) return agent def observer_trigger(target): @wraps(target) def wrapper(*args, **kwargs): agent = get_agent_for_class_method(args) result = target(*args, **kwargs) agent.observe() return result return wrapper class BaseObserver: OBSERVER_ATTRIBUTE = "observer" def __init__(self, topic, target): self._topic = topic self._target = target def inspect(self, agent): raise NotImplementedError() @staticmethod def _append_observer(target, observer): """ :param target: callable :param observer: BaseObserver """ if not hasattr(target, BaseObserver.OBSERVER_ATTRIBUTE): setattr(target, BaseObserver.OBSERVER_ATTRIBUTE, []) observers = getattr(target, BaseObserver.OBSERVER_ATTRIBUTE) observers.append(observer) class Observer(BaseObserver): def __init__(self, topic, target, converter=None): super(Observer, self).__init__(topic, target) self._converter = converter if converter else lambda x: x def inspect(self, agent): pub.sendMessage(self._topic, agent=agent, value=self._get_value(agent)) def _get_value(self, agent): return self._converter(self._target(agent)) @property def topic(self): """ :return: str """ return self._topic @classmethod def observe(cls, topic, converter=None): if not topic: raise ValueError("Non-empty topic expected, got {0}".format(topic)) def decorator(func): @wraps(func) def wrapper(*args, **kwargs): return func(*args, **kwargs) cls._append_observer(wrapper, cls(topic, func, converter)) return wrapper return decorator class DeltaObserver(Observer): def __init__(self, topic, target, delta_calculator, converter=None): super(DeltaObserver, self).__init__(topic, target, converter) self._delta_calculator = delta_calculator self._previous = dict() def inspect(self, agent): value = self._get_value(agent) previous = self._get_previous_value(agent) delta = self._delta_calculator(value, previous) if previous is not None else value self._set_previous_value(agent, value) pub.sendMessage(self._topic, agent=agent, delta=delta) def _get_previous_value(self, agent): return self._previous[agent] if agent in self._previous else None def _set_previous_value(self, agent, value): self._previous[agent] = value @classmethod def observe(cls, topic, converter=None, delta=None): if not topic: raise ValueError("Non-empty topic expected, got {0}".format(topic)) if not delta or not callable(delta): raise ValueError("Callable delta expected, got {0}".format(delta)) def decorator(func): @wraps(func) def wrapper(*args, **kwargs): return func(*args, **kwargs) cls._append_observer(wrapper, cls(topic, func, delta, converter)) return wrapper return decorator class CallObserver(BaseObserver): @classmethod def observe(cls, topic): def decorator(func): @wraps(func) def wrapper(*args, **kwargs): value = func(*args, **kwargs) pub.sendMessage(topic, args=args, kwargs=kwargs) return value return wrapper return decorator class AgentCallObserver(BaseObserver): @classmethod def observe(cls, topic): def decorator(func): @wraps(func) def wrapper(*args, **kwargs): value = func(*args, **kwargs) agent = get_agent_for_class_method(args) pub.sendMessage(topic, agent=agent, args=args[1:], kwargs=kwargs) return value return wrapper return decorator
{ "repo_name": "e-kolpakov/study-model", "path": "model/infrastructure/observers.py", "copies": "1", "size": "4693", "license": "mit", "hash": 8154139428601111000, "line_mean": 27.9691358025, "line_max": 118, "alpha_frac": 0.6119752823, "autogenerated": false, "ratio": 4.301558203483043, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5413533485783043, "avg_score": null, "num_lines": null }
from functools import wraps from pulsar.api import ImproperlyConfigured from pulsar.apps import wsgi from lux.models import Schema from lux.openapi import OperationInfo from lux.utils.data import compact_dict class route(wsgi.route): """Extend pulsar wsgi route decorator for openapi information It adds the openapi namedtuple to the route parameters dictionary """ def __init__(self, rule=None, body_schema=None, path_schema=None, query_schema=None, header_schema=None, default_response=200, default_response_schema=None, responses=None, **kwargs): if isinstance(rule, type(Schema)): rule = rule() if isinstance(rule, Schema): if path_schema: raise ImproperlyConfigured( 'both rule and path_schema are provided as schema' ) path_schema = rule rule = path_schema.rule() kwargs['openapi'] = OperationInfo( path=path_schema, body=body_schema, query=query_schema, header=header_schema, responses=responses, default_response=default_response, default_response_schema=default_response_schema ) super().__init__(rule, **kwargs) def __call__(self, method): api = self.parameters['openapi'] if api.body or api.responses[api.default_response]: # the callable must accept the schema as second parameter @wraps(method) def _(router, request): return method(router, request, **compact_dict( body_schema=api.body, query_schema=api.query, schema=api.schema )) return super().__call__(_) return super().__call__(method)
{ "repo_name": "quantmind/lux", "path": "lux/ext/rest/route.py", "copies": "1", "size": "1874", "license": "bsd-3-clause", "hash": 6896493764760453000, "line_mean": 32.4642857143, "line_max": 77, "alpha_frac": 0.5789754536, "autogenerated": false, "ratio": 4.696741854636591, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5775717308236592, "avg_score": null, "num_lines": null }
from functools import wraps from pyaib.components import component_class @component_class("acl") class ACL(object): def __init__(self, ctx, config): self.permissions = config.get("permissions", {}) def allowed(self, trigger, chan, nick): channel = self.permissions.get(chan, {}) cmd = channel.get(trigger, {}) # denies nick in `deny` list then bail out # `deny` is the main priority here denied_nicks = set(cmd.get("deny", [])) if "*" in denied_nicks or nick in denied_nicks: return False # allows nick in `allow` list allowed_nicks = set(cmd.get("allow", [])) if "*" in allowed_nicks or nick in allowed_nicks: return True # default to deny all nicks if trigger is specified return False if cmd else True def icanhaz(func): @wraps(func) def wrapped(ctx, msg, trigger, args, kwargs): nick = msg.nick if not ctx.acl.allowed(trigger, msg.channel, nick): msg.reply("sorry {0}, you're not allowed".format(nick)) return return func(ctx, msg, trigger, args, kwargs) return wrapped
{ "repo_name": "gchandrasa/pinbot", "path": "pinbot/components/acl.py", "copies": "1", "size": "1171", "license": "mit", "hash": -8604532056675650000, "line_mean": 29.8157894737, "line_max": 67, "alpha_frac": 0.6037574722, "autogenerated": false, "ratio": 3.9163879598662206, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 38 }
from functools import wraps from pycloudia.utils.defer import inline_callbacks, return_value, maybe_deferred from pycloudia.cluster.consts import HEADER, STATUS from pycloudia.cluster.interfaces import IRequestPackage __all__ = [ 'ResolverMeta', 'resolve_errors', 'resolve_method', ] class ResolverMeta(type): def __new__(mcs, name, bases, namespace): cls = super(ResolverMeta, mcs).__new__(mcs, name, bases, namespace) cls.exception_map = {} for base in bases: if hasattr(base, 'exception_map'): cls.exception_map.update(base.exception_map) cls.exception_map.update(dict( (method.__exception_type__, method) for method in namespace.values() if hasattr(method, '__exception_type__') )) return cls def resolve(cls, exception, logger=None): try: method = cls.exception_map[type(exception)] except KeyError: if logger: logger.exception(exception) else: raise exception else: return method.__exception_verbose__, method(exception) class ResolverMethodDecorator(object): def __init__(self, exception_type, verbose=None): self.exception_type = exception_type self.verbose = verbose or exception_type.__name__ def __call__(self, method): method.__exception_type__ = self.exception_type method.__exception_verbose__ = self.verbose return method class ResolverDecorator(object): def __init__(self, resolver, logging=True): self.resolver = resolver self.logging = logging def __call__(self, func): @wraps(func) @inline_callbacks def decorator(subject, package, *args, **kwargs): assert isinstance(package, IRequestPackage) try: response = yield maybe_deferred(func(subject, package, *args, **kwargs)) except Exception as e: if self.logging: verbose, content = self.resolver.resolve(e, subject.logger) else: verbose, content = self.resolver.resolve(e) response = package.create_response(content, { HEADER.STATUS: STATUS.FAILURE, HEADER.REASON: verbose, }) return_value(response) return decorator resolve_errors = ResolverDecorator resolve_method = ResolverMethodDecorator
{ "repo_name": "cordis/pycloudia", "path": "pycloudia/cluster/resolver.py", "copies": "1", "size": "2525", "license": "mit", "hash": 1927005801108601000, "line_mean": 30.5625, "line_max": 88, "alpha_frac": 0.5952475248, "autogenerated": false, "ratio": 4.484902309058614, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0002947704258565682, "num_lines": 80 }
from functools import wraps from pyramid.httpexceptions import HTTPNotFound, HTTPForbidden from social.utils import setting_name, module_member from social.strategies.utils import get_strategy from social.backends.utils import user_backends_data DEFAULTS = { 'STORAGE': 'social.apps.pyramid_app.models.PyramidStorage', 'STRATEGY': 'social.strategies.pyramid_strategy.PyramidStrategy' } def get_helper(request, name): return request.registry.settings.get(setting_name(name), DEFAULTS.get(name, None)) def load_strategy(request, *args, **kwargs): backends = get_helper(request, 'AUTHENTICATION_BACKENDS') strategy = get_helper(request, 'STRATEGY') storage = get_helper(request, 'STORAGE') return get_strategy(backends, strategy, storage, request=request, *args, **kwargs) def strategy(redirect_uri=None): def decorator(func): @wraps(func) def wrapper(request, *args, **kwargs): backend = request.matchdict.get('backend') if not backend: return HTTPNotFound('Missing backend') uri = redirect_uri if uri and not uri.startswith('/'): uri = request.route_url(uri, backend=backend) request.strategy = load_strategy(request, backend=backend, redirect_uri=uri, *args, **kwargs) return func(request, *args, **kwargs) return wrapper return decorator def login_required(func): @wraps(func) def wrapper(request, *args, **kwargs): is_logged_in = module_member( request.strategy.setting('LOGGEDIN_FUNCTION') ) if not is_logged_in(request): raise HTTPForbidden('Not authorized user') return func(request, *args, **kwargs) return wrapper def backends(request, user): """Load Social Auth current user data to context under the key 'backends'. Will return the output of social.backends.utils.user_backends_data.""" storage = module_member(get_helper(request, 'STORAGE')) return { 'backends': user_backends_data( user, get_helper(request, 'AUTHENTICATION_BACKENDS'), storage ) }
{ "repo_name": "imsparsh/python-social-auth", "path": "social/apps/pyramid_app/utils.py", "copies": "3", "size": "2264", "license": "bsd-3-clause", "hash": -1389448066563892200, "line_mean": 32.7910447761, "line_max": 79, "alpha_frac": 0.6347173145, "autogenerated": false, "ratio": 4.200371057513915, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 67 }
from functools import wraps from qtpy import QtWidgets from .histogram import HistogramWidget, HistogramModel, HistogramController from .pdsspect_image_set import PDSSpectImageSetViewBase class BasicHistogramModel(HistogramModel): """Model for the hhistograms in the Basic Widgets Attributes --------- connected_models : :obj:`list` Other :class:`BasicHistogramModel` for other histograms """ def __init__(self, *args, **kwargs): super(BasicHistogramModel, self).__init__(*args, **kwargs) self.connected_models = [] def check_model_type(func): @wraps(func) def wrapper(self, model): if not isinstance(model, BasicHistogramModel): raise ValueError("Model must be a BasicHistogramModel object") return func(self, model) return wrapper @check_model_type def connect_model(self, model): """Connect another model to this model Attributes ---------- model : :class:`BasicHistogramModel` Connect the model to current model Raises ------ ValueError When :attr:`model` is not :class:`BasicHistogramModel` """ if model not in self.connected_models: self.connected_models.append(model) model.cuts = self.cuts @check_model_type def disconnect_model(self, model): """Disconnect another model from this model Attributes ---------- model : :class:`BasicHistogramModel` Disconnect the model from current model Raises ------ ValueError When :attr:`model` is not :class:`BasicHistogramModel` """ if model in self.connected_models: self.connected_models.remove(model) def disconnect_from_all_models(self): """Disconnect all models from this model""" self.connected_models = [] class BasicHistogramController(HistogramController): """Controller for histogram views Parameters ---------- model : :class:`BasicHistogramModel` histogram model view : :class:`object` View with :class:`BasicHistogramModel` as its model Attributes ---------- model : :class:`BasicHistogramModel` histogram model view : :class:`object` View with :class:`BasicHistogramModel` as its model """ def set_cut_low(self, cut_low): """Set the low cut level to a new value Parameters ---------- cut_low : :obj:`float` New low cut value """ super(BasicHistogramController, self).set_cut_low(cut_low) for model in self.model.connected_models: model.cut_low = cut_low def set_cut_high(self, cut_high): """Set the high cut level to a new value Parameters ---------- cut_high : :obj:`float` New high cut value """ super(BasicHistogramController, self).set_cut_high(cut_high) for model in self.model.connected_models: model.cut_high = cut_high def set_cuts(self, cut_low, cut_high): """Set both the low and high cut levels Parameters ---------- cut_low : :obj:`float` New low cut value cut_high : :obj:`float` New high cut value """ super(BasicHistogramController, self).set_cuts(cut_low, cut_high) for model in self.model.connected_models: model.cuts = cut_low, cut_high def restore(self): """Restore the histogram""" super(BasicHistogramController, self).restore() for model in self.model.connected_models: model.restore() class BasicHistogramWidget(HistogramWidget): """:class:`~.pdsspect.histogram.HistogramWidget` in a different layout""" def __init__(self, *args, **kwargs): super(BasicHistogramWidget, self).__init__(*args, **kwargs) self.controller = BasicHistogramController(self.model, self) self.histogram.controller = BasicHistogramController( self.model, self.histogram ) def _create_layout(self): layout = QtWidgets.QGridLayout() layout.addWidget(self._cut_low_label, 0, 1) layout.addWidget(self._cut_low_box, 0, 2) layout.addWidget(self._cut_high_label, 1, 1) layout.addWidget(self._cut_high_box, 1, 2) layout.addWidget(self._bins_label, 2, 1) layout.addWidget(self._bins_box, 2, 2) layout.addWidget(self.histogram, 0, 0, 3, 1) return layout class BasicController(object): """Controller for :class:`Basic` window Parameters ---------- image_set : :class:`~.pdsspect_image_set.PDSSpectImageSet` pdsspect model view : :class:`Basic` View to control Attributes ---------- image_set : :class:`~.pdsspect_image_set.PDSSpectImageSet` pdsspect model view : :class:`Basic` View to control """ def __init__(self, image_set, view): self.image_set = image_set self.view = view def change_current_image_index(self, new_index): """Change the current image index to a new index Parameters ---------- new_index : :obj:`int` The new index for :class:`~.pdsspect_image_set.PDSSpectImageSetViewBase.images` to determine the current image """ self.image_set.current_image_index = new_index class BasicWidget(QtWidgets.QWidget): """Widget to hold each basic window Parameters ---------- image_set : :class:`~.pdsspect_image_set.PDSSpectImageSet` pdsspect model view_canvas : :class:`~.pds_image_view_canvas.PDSImageViewCanvas` view canvas Attributes ---------- image_set : :class:`~.pdsspect_image_set.PDSSpectImageSet` pdsspect model basics : :obj:`list` of :class:`Basic` :class:`Basic` in the widget """ def __init__(self, image_set, view_canvas): super(BasicWidget, self).__init__() self.image_set = image_set self.basics = [] self.main_layout = QtWidgets.QHBoxLayout() self.setLayout(self.main_layout) self.setWindowTitle('Basic') self.add_basic(image_set, view_canvas) def add_basic(self, image_set, view_canvas): """Add a :class:`Basic` to the widget Parameters ---------- image_set : :class:`~.pdsspect_image_set.PDSSpectImageSet` pdsspect model view_canvas : :class:`~.pds_image_view_canvas.PDSImageViewCanvas` view canvas """ basic = Basic(image_set, view_canvas, self) self.basics.append(basic) self.main_layout.addWidget(basic) self.connect_model(basic) def connect_model(self, basic): """Connect the models of other basic windows to the given window The models are connected when they have the same current image Parameters ---------- basic : :class:`Basic` Basic window connect/disconnect its histogram model to others """ other_basics = list(self.basics) other_basics.remove(basic) for other_basic in other_basics: image = other_basic.image_set.current_image if image == basic.image_set.current_image: other_basic.histogram.connect_model(basic.histogram) basic.histogram.connect_model(other_basic.histogram) else: other_basic.histogram.disconnect_model(basic.histogram) basic.histogram.disconnect_model(other_basic.histogram) class Basic(QtWidgets.QWidget, PDSSpectImageSetViewBase): """Window to apply cut levels and choose the current image Parameters ---------- image_set : :class:`~.pdsspect_image_set.PDSSpectImageSet` pdsspect model view_canvas : :class:`~.pds_image_view_canvas.PDSImageViewCanvas` Canvas to view the image Attributes ---------- image_set : :class:`~.pdsspect_image_set.PDSSpectImageSet` pdsspect model view_canvas : :class:`~.pds_image_view_canvas.PDSImageViewCanvas` Canvas to view the image controller : :class:`BasicController` Controller for view image_menu : :class:`QtWidgets.QComboBox <PySide.QtGui.QComboBox>` Drop down menu to pick the current image histogram : :class:`~.histogram.HistogramModel` Model for the :attr:`histogram_widget` histogram_widget : :class:`BasicHistogramWidget` The histogram widget to adjust the cut levels layout : :class:`QtWidgets.QVBoxLayout <PySide.QtGui.QVBoxLayout>` The main layout """ def __init__(self, image_set, view_canvas, basic_widget): super(Basic, self).__init__(basic_widget) self.image_set = image_set self.image_set.register(self) self.basic_widget = basic_widget self.controller = BasicController(image_set, self) self.view_canvas = view_canvas self.image_menu = QtWidgets.QComboBox() for image in self.image_set.images: self.image_menu.addItem(image.image_name) self.image_menu.setCurrentIndex(image_set.current_image_index) self.image_menu.currentIndexChanged.connect(self.change_image) self.histogram = BasicHistogramModel(self.view_canvas, bins=100) self.histogram_widget = BasicHistogramWidget(self.histogram, self) self.layout = QtWidgets.QVBoxLayout() self.layout.addWidget(self.image_menu) self.layout.addWidget(self.histogram_widget) self.setLayout(self.layout) self.histogram.set_data() def change_image(self, new_index): """Change the image when new image selected in :attr:`image_menu` Parameters ---------- new_index : :obj:`int` The new index to determine the current image """ self.image_set.current_image.cuts = self.histogram.cuts self.controller.change_current_image_index(new_index) self.basic_widget.connect_model(self) def set_image(self): """When the image is set, adjust the histogram""" self.histogram.set_data() self.histogram.restore()
{ "repo_name": "planetarypy/pdsspect", "path": "pdsspect/basic.py", "copies": "1", "size": "10353", "license": "bsd-3-clause", "hash": -2988340413348178400, "line_mean": 30.2779456193, "line_max": 78, "alpha_frac": 0.6127692456, "autogenerated": false, "ratio": 4.176280758370311, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.528905000397031, "avg_score": null, "num_lines": null }
from functools import wraps from redis import Redis, StrictRedis from rq import Queue import ujson from search.whoosh_redis_storage import RedisStore redis_conn = StrictRedis() # no args implies the default queue q = Queue(connection=redis_conn) ########## decorator stuff ############ from django.http import HttpResponse from api.exceptions import APIException, NotAuthenticated from django.http import Http404 def to_json(f): @wraps(f) def _decorator_func(*args, **kwargs): try: result = f(*args, **kwargs) except APIException as api_exception: result = api_exception.to_dict() result["ok"] = False except Http404 as not_found_exception: result = { "status_code": 404, "detail": "resource not found" } result["ok"] = False if "ok" not in result: result["ok"] = True result["status_code"] = 200 return HttpResponse(ujson.dumps(result), content_type="application/json" ) return _decorator_func def required_login(f): @wraps(f) def _decorator_func(request, *args, **kwargs): # print "request.user.is_authenticated():%s"%request.user.is_authenticated() if not request.user.is_authenticated(): raise NotAuthenticated("require login") return f(request, *args, **kwargs) return _decorator_func def _get_whoosh_ix(): # refer to flask-whooshalchemy # use FileStorage ix = {} def _(schemaName, schema): storage = RedisStore(redis_conn, schemaName) # print "ix.get(schemaName):%s"%ix.get(schemaName) if ix.get(schemaName) is None: if storage.folder_exists(schemaName): ## problem here ix[schemaName] = storage.open_index() else: # print "create Index" ix[schemaName] = storage.create_index(schema) return ix.get(schemaName) return _ get_whoosh_ix = _get_whoosh_ix()
{ "repo_name": "zhy0216/random-read", "path": "utils/__init__.py", "copies": "1", "size": "2075", "license": "mit", "hash": 741807323389888000, "line_mean": 25.2658227848, "line_max": 84, "alpha_frac": 0.593253012, "autogenerated": false, "ratio": 4.029126213592233, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5122379225592233, "avg_score": null, "num_lines": null }
from functools import wraps from rest_framework.generics import GenericAPIView from rest_framework.renderers import JSONRenderer from c3nav.mapdata.utils.json import json_encoder_reindent default_app_config = 'c3nav.api.apps.APIConfig' orig_render = JSONRenderer.render @wraps(JSONRenderer.render) def nicer_renderer(self, data, accepted_media_type=None, renderer_context=None): if self.get_indent(accepted_media_type, renderer_context) is None: return orig_render(self, data, accepted_media_type, renderer_context) shorten_limit = 50 if isinstance(data, (list, tuple)): shorten_limit = 5 if any(('geometry' in item) for item in data[:50]) else 50 shorten = isinstance(data, (list, tuple)) and len(data) > shorten_limit if shorten: remaining_len = len(data)-shorten_limit data = data[:shorten_limit] result = json_encoder_reindent(lambda d: orig_render(self, d, accepted_media_type, renderer_context), data) if shorten: result = (result[:-2] + ('\n ...%d more elements (truncated for HTML preview)...' % remaining_len).encode() + result[-2:]) return result # Monkey patch for nicer indentation in the django rest framework JSONRenderer.render = nicer_renderer # Fuck serializers! del GenericAPIView.get_serializer
{ "repo_name": "c3nav/c3nav", "path": "src/c3nav/api/__init__.py", "copies": "1", "size": "1335", "license": "apache-2.0", "hash": -1247203535589538300, "line_mean": 35.0810810811, "line_max": 111, "alpha_frac": 0.7018726592, "autogenerated": false, "ratio": 3.75, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9946144990184352, "avg_score": 0.0011455338031295516, "num_lines": 37 }
from functools import wraps from rest_framework import status as status_code from rest_framework.response import Response from app.models import Passphrase def guard(func): """ This decorator enforces passphrase authentication. """ @wraps(func) def decorated_func(viewset, request, pk=None, *args, **kwargs): passphrase = request.POST.get("passphrase", None) if not passphrase: passphrase = request.data.get("passphrase", None) exists = Passphrase.exists(passphrase) if exists.status: request.passphrase = exists.matched_list[0] if pk: return func(viewset, request, pk, *args, **kwargs) return func(viewset, request, *args, **kwargs) else: content = { "status": "Invalid passphrase. Contact the admin to provide authorization" } status = status_code.HTTP_401_UNAUTHORIZED return Response(content, status=status) return decorated_func
{ "repo_name": "waitress-andela/waitress", "path": "waitress/app/decorators.py", "copies": "2", "size": "1032", "license": "mit", "hash": -3265002032095217700, "line_mean": 31.25, "line_max": 90, "alpha_frac": 0.628875969, "autogenerated": false, "ratio": 4.467532467532467, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6096408436532468, "avg_score": null, "num_lines": null }
from functools import wraps from rest_framework import status from rest_framework.response import Response def params(**kwargs): """ Request fn decorator that builds up a list of params and automatically returns a 400 if they are invalid. The validated params are passed to the wrapped function as kwargs. """ # Types that we'll all for as 'tuple' params TUPLE_TYPES = tuple, set, frozenset, list VALID_TYPES = int, float, str class ParamValidator(object): # name param_name = None # the name of the param in the request, e.g. 'user_id' (even if we pass 'user' to the Fn) # type param_type = None # method - explicitly allow a certain method. If both are false we'll use defaults allow_GET = False allow_POST = False # value validators gt = None gte = None lt = None lte = None eq = None # optional optional = False default = None # multiple vals many = False # django models only deferred = True field = 'id' def __init__(self, arg_name, **kwargs): self.param_name = arg_name for k, v in kwargs.items(): setattr(self, k, v) def check_type(self, param): """ Check that the type of param is valid, or raise an Exception. This doesn't take self.many into account. """ valid_type = True if isinstance(self.param_type, TUPLE_TYPES): if not param in self.param_type: raise Exception('invalid option "%s": Must be one of: %s' % (param, self.param_type)) else: if self.param_type == int: param = int(param) elif self.param_type == float: param = float(param) elif self.param_type == str: assert(isinstance(param, (str, unicode))) param = unicode(param) elif self.param_type == bool: param = bool(param) elif hasattr(self.param_type, '_default_manager'): # isinstance(django.models.Model) doesn't seem to work, but this is a good tell query_set = self.param_type.objects if self.deferred: query_set = query_set.only('id') param = query_set.get(**{self.field: param}) else: valid_type = False if not valid_type: raise Exception("Invalid param type: %s" % self.param_type.____name__) return param def check_value(self, param): """ Check that a single value is lt/gt/etc. Doesn't take self.many into account. """ val = None if self.param_type == int or self.param_type == float: val = param elif self.param_type == str: val = len(param) if val: try: if self.eq and val != self.eq: raise Exception("must be less than %s!" % self.eq) else: if self.lt and val >= self.lt: raise Exception("must be less than %s!" % self.lt) if self.lte and val > self.lte: raise Exception("must be less than or equal to %s!" % self.lte) if self.gt and val <= self.gt: raise Exception("must be greater than %s!" % self.gt) if self.gte and val < self.gte: raise Exception("must be greater than or equal to %s!" % self.gte) except Exception as e: msg = str(e) msg = ("Length " if self.param_type == str else 'Value ') + msg raise Exception(msg) validators = {} for k, v in kwargs.items(): parts = k.split('__') param_key = parts[0] if not param_key in validators: validators[param_key] = ParamValidator(param_key) obj = validators[param_key] if (len(parts) == 1): # set type if not hasattr(v, '_default_manager'): # django model if not isinstance(v, TUPLE_TYPES) and not v in VALID_TYPES: raise Exception("Invalid type for %s: %s is not a valid type" % (k, v)) obj.param_type = v else: # we only are interested in the last part, since the only thing that can be multipart is __length__eq (etc) and 'length' is not important last_part = parts[-1] if last_part == 'method': if isinstance(v, TUPLE_TYPES): for method in v: if method == 'GET': obj.allow_GET = True elif method == 'POST': obj.allow_POST = True else: raise Exception('Invalid value for __method: "%s"' % method) else: if v == 'GET': obj.allow_GET = True elif v == 'POST': obj.allow_POST = True else: raise Exception('Invalid value for __method: "%s"' % v) continue if last_part == 'name': obj.param_name = v continue BOOL_PARTS = 'deferred', 'optional', 'many' if last_part in BOOL_PARTS: assert(isinstance(v, bool)) setattr(obj, last_part, v) continue NUM_PARTS = 'gt', 'gte', 'lt', 'lte', 'eq' if last_part in NUM_PARTS: assert(isinstance(v, int) or isinstance(v, float)) setattr(obj, last_part, v) continue if last_part == 'default': obj.optional = True obj.default = v continue if last_part == 'field': assert(isinstance(last_part, str)) obj.field = v continue raise Exception("Invalid option: '__%s' in param '%s'" % (last_part, k)) def _params(fn): @wraps(fn) def wrapped_request_fn(first_arg, *args, **kwargs): if len(args) == 0: request = first_arg # request function is a top-level function else: request = args[0] # request fn is a method, first_arg is 'self' request_method = request.META['REQUEST_METHOD'] default_param_method = 'POST' if request_method == 'POST' or request_method == 'PUT' else 'GET' # Validate the params for arg_name, validator in validators.items(): param_name = validator.param_name # what methods are allowed? use_default_methods = not validator.allow_GET and not validator.allow_POST allow_GET = (default_param_method == 'GET') if use_default_methods else validator.allow_GET allow_POST = (default_param_method == 'POST') if use_default_methods else validator.allow_POST # find the param param = None if allow_POST: param = request.DATA.get(param_name, None) param_type = 'POST' if not param and allow_GET: param = request.GET.get(param_name, None) param_type = 'GET' try: # optional/default if not param: if not validator.optional: raise Exception('Param is missing') else: kwargs[arg_name] = validator.default continue # check type, value if validator.many: if param_type == 'GET': params = str(param).split(',') else: params = param if isinstance(param, list) else (param,) params = [validator.check_type(p) for p in params] [validator.check_value(p) for p in params] else: param = validator.check_type(param) validator.check_value(param) except Exception as e: return Response({'error': 'Invalid param "%s": %s' % (param_name, str(e))}, status=status.HTTP_400_BAD_REQUEST) kwargs[arg_name] = params if validator.many else param return fn(first_arg, *args, **kwargs) return wrapped_request_fn return _params
{ "repo_name": "pombredanne/django-rest-params", "path": "django_rest_params/__init__.py", "copies": "3", "size": "8989", "license": "bsd-3-clause", "hash": 7914815665428855000, "line_mean": 38.7743362832, "line_max": 149, "alpha_frac": 0.4815886083, "autogenerated": false, "ratio": 4.667185877466252, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0012801246390778255, "num_lines": 226 }
from functools import wraps from reversion.compat import is_authenticated from reversion.revisions import create_revision as create_revision_base, set_user, get_user class _RollBackRevisionView(Exception): def __init__(self, response): self.response = response def _request_creates_revision(request): return request.method not in ("OPTIONS", "GET", "HEAD") def _set_user_from_request(request): if getattr(request, "user", None) and is_authenticated(request.user) and get_user() is None: set_user(request.user) def create_revision(manage_manually=False, using=None, atomic=True): """ View decorator that wraps the request in a revision. The revision will have it's user set from the request automatically. """ def decorator(func): @wraps(func) def do_revision_view(request, *args, **kwargs): if _request_creates_revision(request): try: with create_revision_base(manage_manually=manage_manually, using=using, atomic=atomic): response = func(request, *args, **kwargs) # Check for an error response. if response.status_code >= 400: raise _RollBackRevisionView(response) # Otherwise, we're good. _set_user_from_request(request) return response except _RollBackRevisionView as ex: return ex.response return func(request, *args, **kwargs) return do_revision_view return decorator class RevisionMixin(object): """ A class-based view mixin that wraps the request in a revision. The revision will have it's user set from the request automatically. """ revision_manage_manually = False revision_using = None revision_atomic = True def __init__(self, *args, **kwargs): super(RevisionMixin, self).__init__(*args, **kwargs) self.dispatch = create_revision( manage_manually=self.revision_manage_manually, using=self.revision_using, atomic=self.revision_atomic )(self.dispatch)
{ "repo_name": "htwenhe/DJOA", "path": "env/Lib/site-packages/reversion/views.py", "copies": "1", "size": "2217", "license": "mit", "hash": -3771546092535379000, "line_mean": 31.6029411765, "line_max": 107, "alpha_frac": 0.6143437077, "autogenerated": false, "ratio": 4.407554671968191, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5521898379668191, "avg_score": null, "num_lines": null }
from functools import wraps from rumble import rumble fib_timer = rumble.Rumble() for x in [3, 9, 17]: fib_timer.arguments(x) def memoize(f): '''memoizer for single-argument functions''' _cache = {} @wraps(f) def wrapper(x): try: return _cache[x] except KeyError: _cache[x] = f(x) return _cache[x] return wrapper @fib_timer.contender def recursive(n): if n == 0: return 0 if n in (1, 2): return 1 return recursive(n - 1) + recursive(n - 2) @fib_timer.contender @memoize def memoized(n): if n == 0: return 0 if n in (1, 2): return 1 return memoized(n - 1) + memoized(n - 2) prime_timer = rumble.Rumble() prime_timer.arguments(100) prime_timer.arguments(500) @prime_timer.contender def sieve(n): flags = [True for _ in range(n + 1)] flags[0] = flags[1] = False for i in range(len(flags)): if flags[i]: for j in range(i + 1, len(flags)): if flags[j] and j % i == 0: flags[j] = False return [i for i, f in enumerate(flags) if f] @prime_timer.contender @memoize def memoized(n, _primes={}): result = [] for i in range(2, n + 1): if i not in _primes: _primes[i] = not any(i % x == 0 for x in range(2, i)) if _primes[i]: result.append(i) return result if __name__ == '__main__': print('fibonacci!') fib_timer.run() print('ready for prime time!') prime_timer.run()
{ "repo_name": "mambocab/rumble", "path": "examples/classexample.py", "copies": "1", "size": "1541", "license": "mit", "hash": 4564701663443459000, "line_mean": 20.7042253521, "line_max": 65, "alpha_frac": 0.545100584, "autogenerated": false, "ratio": 3.0215686274509803, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.406666921145098, "avg_score": null, "num_lines": null }
from functools import wraps from six import string_types from .queue import Queue from .worker import PQ_DEFAULT_RESULT_TTL class job(object): def __init__(self, queue, connection='default', timeout=None, result_ttl=PQ_DEFAULT_RESULT_TTL): """A decorator that adds a ``delay`` method to the decorated function, which in turn creates a RQ job when called. Accepts a required ``queue`` argument that can be either a ``Queue`` instance or a string denoting the queue name. For example: @job(queue='default') def simple_add(x, y): return x + y simple_add.delay(1, 2) # Puts simple_add function into queue """ self.queue = queue self.connection = connection self.timeout = timeout self.result_ttl = result_ttl def __call__(self, f): @wraps(f) def delay(*args, **kwargs): if isinstance(self.queue, string_types): queue = Queue.create(name=self.queue, connection=self.connection) else: queue = self.queue return queue.enqueue_call(f, args=args, kwargs=kwargs, timeout=self.timeout, result_ttl=self.result_ttl) f.delay = delay return f
{ "repo_name": "bretth/django-pq", "path": "pq/decorators.py", "copies": "1", "size": "1299", "license": "bsd-2-clause", "hash": -1185458996288759300, "line_mean": 33.2105263158, "line_max": 81, "alpha_frac": 0.5935334873, "autogenerated": false, "ratio": 4.163461538461538, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5256995025761538, "avg_score": null, "num_lines": null }
from functools import wraps from slackminion.plugin import cmd from slackminion.plugin.base import BasePlugin from . import version try: from . import commit except ImportError: commit = 'HEAD' def user_mgt_command(f): @wraps(f) def wrapper(self, msg, args): if len(args) < 2: return "Usage: !%s *acl_name* *username*" % f.__name__.replace('_', ' ') name, user = args return f(self, name, user) return wrapper def acl_mgt_command(f): @wraps(f) def wrapper(self, msg, args): if len(args) < 1: return "Usage: !%s *acl_name*" % f.__name__.replace('_', ' ') name = args[0] return f(self, name) return wrapper class AuthManager(BasePlugin): """Basic Authorization Plugin""" def on_load(self): # Hook into the dispatcher to receive acl checks setattr(self._bot.dispatcher, 'auth_manager', self) # Setup default ACL # ACL Rule Ordering (first match) # Allow Rule --> Deny Rule --> Allow All self._acl = { '*': { 'allow': [], 'deny': [] }, } return super(AuthManager, self).on_load() @cmd(admin_only=True) def acl(self, msg, args): """ACL Management. Usage: !acl _action_ [args] Actions: new _acl_ - Create a new ACL delete _acl_ - Delete an ACL allow _acl_ _user_ - Add user to the acl allow block deny _acl_ _user_ - Add user to the acl deny block remove _acl_ _user_ - Remove user from acl allow and deny blocks show - Show all defined ACLs show _acl_ - Show allow and deny blocks of specified ACL """ if len(args) == 0: return "Usage: !acl show OR !acl _action_ _args_" valid_actions = ['allow', 'deny', 'remove', 'show', 'new', 'delete'] return "Valid actions: %s" % ', '.join(valid_actions) @cmd(admin_only=True) @acl_mgt_command def acl_new(self, name): if self.create_acl(name): return "Created new acl '%s'" % name return "ACL '%s' already exists" % name @cmd(admin_only=True) @acl_mgt_command def acl_delete(self, name): if self.delete_acl(name): return "Deleted acl '%s'" % name return "ACL '%s' does not exist" % name @cmd(admin_only=True) @user_mgt_command def acl_allow(self, name, user): if self.add_user_to_allow(name, user): return "Added %s to %s (allow)" % (user, name) return "Failed to add %s to %s (allow)" % (user, name) @cmd(admin_only=True) @user_mgt_command def acl_deny(self, name, user): if self.add_user_to_deny(name, user): return "Added %s to %s (deny)" % (user, name) return "Failed to add %s to %s (deny)" % (user, name) @cmd(admin_only=True) @user_mgt_command def acl_remove(self, name, user): if self.remove_user_from_acl(name, user): return "Removed %s from %s (allow and deny)" % (user, name) return "Failed to remove %s from %s (allow and deny)" % (user, name) @cmd(admin_only=True) def acl_show(self, msg, args): """Show current allow and deny blocks for the given acl.""" name = args[0] if len(args) > 0 else None if name is None: return "%s: The following ACLs are defined: %s" % (msg.user, ', '.join(self._acl.keys())) if name not in self._acl: return "Sorry, couldn't find an acl named '%s'" % name return '\n'.join([ "%s: ACL '%s' is defined as follows:" % (msg.user, name), "allow: %s" % ', '.join(self._acl[name]['allow']), "deny: %s" % ', '.join(self._acl[name]['deny']) ]) def add_user_to_allow(self, name, user): """Add a user to the given acl allow block.""" # Clear user from both allow and deny before adding if not self.remove_user_from_acl(name, user): return False if name not in self._acl: return False self._acl[name]['allow'].append(user) return True def add_user_to_deny(self, name, user): """Add a user to the given acl deny block.""" if not self.remove_user_from_acl(name, user): return False if name not in self._acl: return False self._acl[name]['deny'].append(user) return True def remove_user_from_acl(self, name, user): """Remove a user from the given acl (both allow and deny).""" if name not in self._acl: return False if user in self._acl[name]['allow']: self._acl[name]['allow'].remove(user) if user in self._acl[name]['deny']: self._acl[name]['deny'].remove(user) return True def create_acl(self, name): """Create a new acl.""" if name in self._acl: return False self._acl[name] = { 'allow': [], 'deny': [] } return True def delete_acl(self, name): """Delete an acl.""" if name not in self._acl: return False del self._acl[name] return True @staticmethod def admin_check(cmd, user): # Commands not needing admin-level access pass if not cmd.admin_only: return True if hasattr(user, 'is_admin'): return user.is_admin return False def acl_check(self, cmd, user): effective_acl = cmd.acl if effective_acl not in self._acl: self.log.warning("Unable to locate ACL %s for %s, defaulting to *", effective_acl, cmd.method.__name__) effective_acl = '*' if self._check_allow(self._acl[effective_acl], user): return True if self._check_deny(self._acl[effective_acl], user): return False return True @staticmethod def _check_allow(acl, user): return '*' in acl['allow'] or user.username in acl['allow'] @staticmethod def _check_deny(acl, user): return '*' in acl['deny'] or user.username in acl['deny']
{ "repo_name": "arcticfoxnv/slackminion", "path": "slackminion/plugins/core/acl.py", "copies": "1", "size": "6225", "license": "mit", "hash": -4281970976525875000, "line_mean": 28.9278846154, "line_max": 115, "alpha_frac": 0.546184739, "autogenerated": false, "ratio": 3.718637992831541, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4764822731831541, "avg_score": null, "num_lines": null }
from functools import wraps from statsmodels.tools.data import _is_using_pandas from statsmodels.tsa.base import datetools from statsmodels.tsa.tsatools import freq_to_period def _get_pandas_wrapper(X, trim_head=None, trim_tail=None, names=None): index = X.index #TODO: allow use index labels if trim_head is None and trim_tail is None: index = index elif trim_tail is None: index = index[trim_head:] elif trim_head is None: index = index[:-trim_tail] else: index = index[trim_head:-trim_tail] if hasattr(X, "columns"): if names is None: names = X.columns return lambda x : X.__class__(x, index=index, columns=names) else: if names is None: names = X.name return lambda x : X.__class__(x, index=index, name=names) def _maybe_get_pandas_wrapper(X, trim_head=None, trim_tail=None): """ If using pandas returns a function to wrap the results, e.g., wrapper(X) trim is an integer for the symmetric truncation of the series in some filters. otherwise returns None """ if _is_using_pandas(X, None): return _get_pandas_wrapper(X, trim_head, trim_tail) else: return def _maybe_get_pandas_wrapper_freq(X, trim=None): if _is_using_pandas(X, None): index = X.index func = _get_pandas_wrapper(X, trim) freq = index.inferred_freq return func, freq else: return lambda x : x, None def pandas_wrapper(func, trim_head=None, trim_tail=None, names=None, *args, **kwargs): @wraps(func) def new_func(X, *args, **kwargs): # quick pass-through for do nothing case if not _is_using_pandas(X, None): return func(X, *args, **kwargs) wrapper_func = _get_pandas_wrapper(X, trim_head, trim_tail, names) ret = func(X, *args, **kwargs) ret = wrapper_func(ret) return ret return new_func def pandas_wrapper_bunch(func, trim_head=None, trim_tail=None, names=None, *args, **kwargs): @wraps(func) def new_func(X, *args, **kwargs): # quick pass-through for do nothing case if not _is_using_pandas(X, None): return func(X, *args, **kwargs) wrapper_func = _get_pandas_wrapper(X, trim_head, trim_tail, names) ret = func(X, *args, **kwargs) ret = wrapper_func(ret) return ret return new_func def pandas_wrapper_predict(func, trim_head=None, trim_tail=None, columns=None, *args, **kwargs): pass def pandas_wrapper_freq(func, trim_head=None, trim_tail=None, freq_kw='freq', columns=None, *args, **kwargs): """ Return a new function that catches the incoming X, checks if it's pandas, calls the functions as is. Then wraps the results in the incoming index. Deals with frequencies. Expects that the function returns a tuple, a Bunch object, or a pandas-object. """ @wraps(func) def new_func(X, *args, **kwargs): # quick pass-through for do nothing case if not _is_using_pandas(X, None): return func(X, *args, **kwargs) wrapper_func = _get_pandas_wrapper(X, trim_head, trim_tail, columns) index = X.index freq = index.inferred_freq kwargs.update({freq_kw : freq_to_period(freq)}) ret = func(X, *args, **kwargs) ret = wrapper_func(ret) return ret return new_func def dummy_func(X): return X def dummy_func_array(X): return X.values def dummy_func_pandas_columns(X): return X.values def dummy_func_pandas_series(X): return X['A'] import pandas as pd import numpy as np def test_pandas_freq_decorator(): X = pd.util.testing.makeDataFrame() # in X, get a function back that returns an X with the same columns func = pandas_wrapper(dummy_func) np.testing.assert_equal(func(X.values), X) func = pandas_wrapper(dummy_func_array) pd.util.testing.assert_frame_equal(func(X), X) expected = X.rename(columns=dict(zip('ABCD', 'EFGH'))) func = pandas_wrapper(dummy_func_array, names=list('EFGH')) pd.util.testing.assert_frame_equal(func(X), expected)
{ "repo_name": "wzbozon/statsmodels", "path": "statsmodels/tsa/filters/_utils.py", "copies": "29", "size": "4391", "license": "bsd-3-clause", "hash": -3401104899099645400, "line_mean": 28.4697986577, "line_max": 77, "alpha_frac": 0.5991801412, "autogenerated": false, "ratio": 3.5757328990228014, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.008915223387227262, "num_lines": 149 }
from functools import wraps from statsmodels.tools.data import _is_using_pandas from statsmodels.tsa.tsatools import freq_to_period def _get_pandas_wrapper(X, trim_head=None, trim_tail=None, names=None): index = X.index #TODO: allow use index labels if trim_head is None and trim_tail is None: index = index elif trim_tail is None: index = index[trim_head:] elif trim_head is None: index = index[:-trim_tail] else: index = index[trim_head:-trim_tail] if hasattr(X, "columns"): if names is None: names = X.columns return lambda x : X.__class__(x, index=index, columns=names) else: if names is None: names = X.name return lambda x : X.__class__(x, index=index, name=names) def _maybe_get_pandas_wrapper(X, trim_head=None, trim_tail=None): """ If using pandas returns a function to wrap the results, e.g., wrapper(X) trim is an integer for the symmetric truncation of the series in some filters. otherwise returns None """ if _is_using_pandas(X, None): return _get_pandas_wrapper(X, trim_head, trim_tail) else: return def _maybe_get_pandas_wrapper_freq(X, trim=None): if _is_using_pandas(X, None): index = X.index func = _get_pandas_wrapper(X, trim) freq = index.inferred_freq return func, freq else: return lambda x : x, None def pandas_wrapper(func, trim_head=None, trim_tail=None, names=None, *args, **kwargs): @wraps(func) def new_func(X, *args, **kwargs): # quick pass-through for do nothing case if not _is_using_pandas(X, None): return func(X, *args, **kwargs) wrapper_func = _get_pandas_wrapper(X, trim_head, trim_tail, names) ret = func(X, *args, **kwargs) ret = wrapper_func(ret) return ret return new_func def pandas_wrapper_bunch(func, trim_head=None, trim_tail=None, names=None, *args, **kwargs): @wraps(func) def new_func(X, *args, **kwargs): # quick pass-through for do nothing case if not _is_using_pandas(X, None): return func(X, *args, **kwargs) wrapper_func = _get_pandas_wrapper(X, trim_head, trim_tail, names) ret = func(X, *args, **kwargs) ret = wrapper_func(ret) return ret return new_func def pandas_wrapper_predict(func, trim_head=None, trim_tail=None, columns=None, *args, **kwargs): pass def pandas_wrapper_freq(func, trim_head=None, trim_tail=None, freq_kw='freq', columns=None, *args, **kwargs): """ Return a new function that catches the incoming X, checks if it's pandas, calls the functions as is. Then wraps the results in the incoming index. Deals with frequencies. Expects that the function returns a tuple, a Bunch object, or a pandas-object. """ @wraps(func) def new_func(X, *args, **kwargs): # quick pass-through for do nothing case if not _is_using_pandas(X, None): return func(X, *args, **kwargs) wrapper_func = _get_pandas_wrapper(X, trim_head, trim_tail, columns) index = X.index freq = index.inferred_freq kwargs.update({freq_kw : freq_to_period(freq)}) ret = func(X, *args, **kwargs) ret = wrapper_func(ret) return ret return new_func def dummy_func(X): return X def dummy_func_array(X): return X.values def dummy_func_pandas_columns(X): return X.values def dummy_func_pandas_series(X): return X['A'] import pandas as pd import numpy as np def test_pandas_freq_decorator(): X = pd.util.testing.makeDataFrame() # in X, get a function back that returns an X with the same columns func = pandas_wrapper(dummy_func) np.testing.assert_equal(func(X.values), X) func = pandas_wrapper(dummy_func_array) pd.util.testing.assert_frame_equal(func(X), X) expected = X.rename(columns=dict(zip('ABCD', 'EFGH'))) func = pandas_wrapper(dummy_func_array, names=list('EFGH')) pd.util.testing.assert_frame_equal(func(X), expected)
{ "repo_name": "bert9bert/statsmodels", "path": "statsmodels/tsa/filters/_utils.py", "copies": "2", "size": "4348", "license": "bsd-3-clause", "hash": 8369541900056123000, "line_mean": 28.3783783784, "line_max": 77, "alpha_frac": 0.5965961362, "autogenerated": false, "ratio": 3.575657894736842, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.008975461383086906, "num_lines": 148 }
from functools import wraps from sympy.core import S, Symbol, sympify, Tuple, Integer, Basic, Expr from sympy.core.decorators import call_highest_priority from sympy.core.sympify import SympifyError from sympy.functions import transpose, conjugate, adjoint from sympy.matrices import ShapeError from sympy.simplify import simplify def _sympifyit(arg, retval=None): # This version of _sympifyit sympifies MutableMatrix objects def deco(func): @wraps(func) def __sympifyit_wrapper(a, b): try: b = sympify(b, strict=True) return func(a, b) except SympifyError: return retval return __sympifyit_wrapper return deco class MatrixExpr(Basic): """ Matrix Expression Class Matrix Expressions subclass SymPy Expr's so that MatAdd inherits from Add MatMul inherits from Mul MatPow inherits from Pow They use _op_priority to gain control with binary operations (+, *, -, **) are used They implement operations specific to Matrix Algebra. """ _op_priority = 11.0 is_Matrix = True is_MatrixExpr = True is_Identity = None is_Inverse = False is_Transpose = False is_ZeroMatrix = False is_MatAdd = False is_MatMul = False is_commutative = False # The following is adapted from the core Expr object def __neg__(self): return MatMul(S.NegativeOne, self).doit() def __abs__(self): raise NotImplementedError @_sympifyit('other', NotImplemented) @call_highest_priority('__radd__') def __add__(self, other): return MatAdd(self, other).doit() @_sympifyit('other', NotImplemented) @call_highest_priority('__add__') def __radd__(self, other): return MatAdd(other, self).doit() @_sympifyit('other', NotImplemented) @call_highest_priority('__rsub__') def __sub__(self, other): return MatAdd(self, -other).doit() @_sympifyit('other', NotImplemented) @call_highest_priority('__sub__') def __rsub__(self, other): return MatAdd(other, -self).doit() @_sympifyit('other', NotImplemented) @call_highest_priority('__rmul__') def __mul__(self, other): return MatMul(self, other).doit() @_sympifyit('other', NotImplemented) @call_highest_priority('__mul__') def __rmul__(self, other): return MatMul(other, self).doit() @_sympifyit('other', NotImplemented) @call_highest_priority('__rpow__') def __pow__(self, other): if not self.is_square: raise ShapeError("Power of non-square matrix %s" % self) if other is S.NegativeOne: return Inverse(self) elif other is S.Zero: return Identity(self.rows) elif other is S.One: return self return MatPow(self, other) @_sympifyit('other', NotImplemented) @call_highest_priority('__pow__') def __rpow__(self, other): raise NotImplementedError("Matrix Power not defined") @_sympifyit('other', NotImplemented) @call_highest_priority('__rdiv__') def __div__(self, other): return self * other**S.NegativeOne @_sympifyit('other', NotImplemented) @call_highest_priority('__div__') def __rdiv__(self, other): raise NotImplementedError() #return MatMul(other, Pow(self, S.NegativeOne)) __truediv__ = __div__ __rtruediv__ = __rdiv__ @property def rows(self): return self.shape[0] @property def cols(self): return self.shape[1] @property def is_square(self): return self.rows == self.cols def _eval_transpose(self): from sympy.matrices.expressions.transpose import Transpose return Transpose(self) def _eval_conjugate(self): from sympy.matrices.expressions.adjoint import Adjoint from sympy.matrices.expressions.transpose import Transpose return Adjoint(Transpose(self)) def _eval_inverse(self): from sympy.matrices.expressions.inverse import Inverse return Inverse(self) def _eval_power(self, exp): return MatPow(self, exp) def _eval_simplify(self, **kwargs): if self.is_Atom: return self else: return self.__class__(*[simplify(x, **kwargs) for x in self.args]) def _eval_adjoint(self): from sympy.matrices.expressions.adjoint import Adjoint return Adjoint(self) def _entry(self, i, j): raise NotImplementedError( "Indexing not implemented for %s" % self.__class__.__name__) def adjoint(self): return adjoint(self) def conjugate(self): return conjugate(self) def transpose(self): return transpose(self) T = property(transpose, None, None, 'Matrix transposition.') def inverse(self): return self._eval_inverse() @property def I(self): return self.inverse() def valid_index(self, i, j): def is_valid(idx): return isinstance(idx, (int, Integer, Symbol, Expr)) return (is_valid(i) and is_valid(j) and 0 <= i < self.rows and 0 <= j < self.cols) def __getitem__(self, key): if not isinstance(key, tuple) and isinstance(key, slice): from sympy.matrices.expressions.slice import MatrixSlice return MatrixSlice(self, key, (0, None, 1)) if isinstance(key, tuple) and len(key) == 2: i, j = key if isinstance(i, slice) or isinstance(j, slice): from sympy.matrices.expressions.slice import MatrixSlice return MatrixSlice(self, i, j) i, j = sympify(i), sympify(j) if self.valid_index(i, j) is not False: return self._entry(i, j) else: raise IndexError("Invalid indices (%s, %s)" % (i, j)) raise IndexError("Invalid index, wanted %s[i,j]" % self) def as_explicit(self): """ Returns a dense Matrix with elements represented explicitly Returns an object of type ImmutableMatrix. Examples ======== >>> from sympy import Identity >>> I = Identity(3) >>> I I >>> I.as_explicit() [1, 0, 0] [0, 1, 0] [0, 0, 1] See Also ======== as_mutable: returns mutable Matrix type """ from sympy.matrices.immutable import ImmutableMatrix return ImmutableMatrix([[ self[i, j] for j in range(self.cols)] for i in range(self.rows)]) def as_mutable(self): """ Returns a dense, mutable matrix with elements represented explicitly Examples ======== >>> from sympy import Identity >>> I = Identity(3) >>> I I >>> I.shape (3, 3) >>> I.as_mutable() [1, 0, 0] [0, 1, 0] [0, 0, 1] See Also ======== as_explicit: returns ImmutableMatrix """ return self.as_explicit().as_mutable() def __array__(self): from numpy import empty a = empty(self.shape, dtype=object) for i in range(self.rows): for j in range(self.cols): a[i, j] = self[i, j] return a def equals(self, other): """ Test elementwise equality between matrices, potentially of different types >>> from sympy import Identity, eye >>> Identity(3).equals(eye(3)) True """ return self.as_explicit().equals(other) def canonicalize(self): return self def as_coeff_mmul(self): return 1, MatMul(self) class MatrixSymbol(MatrixExpr): """Symbolic representation of a Matrix object Creates a SymPy Symbol to represent a Matrix. This matrix has a shape and can be included in Matrix Expressions >>> from sympy import MatrixSymbol, Identity >>> A = MatrixSymbol('A', 3, 4) # A 3 by 4 Matrix >>> B = MatrixSymbol('B', 4, 3) # A 4 by 3 Matrix >>> A.shape (3, 4) >>> 2*A*B + Identity(3) 2*A*B + I """ is_commutative = False def __new__(cls, name, n, m): n, m = sympify(n), sympify(m) obj = Basic.__new__(cls, name, n, m) return obj def _hashable_content(self): return(self.name, self.shape) @property def shape(self): return self.args[1:3] @property def name(self): return self.args[0] def _eval_subs(self, old, new): # only do substitutions in shape shape = Tuple(*self.shape)._subs(old, new) return MatrixSymbol(self.name, *shape) def __call__(self, *args): raise TypeError( "%s object is not callable" % self.__class__ ) def _entry(self, i, j): # MatMul _entry will pass us a Dummy and ask that we remember it # so that it can be summed over later. We'll use the function syntax if i.is_Dummy or j.is_Dummy: return Symbol(self.name)(i, j) # If that isn't the case we'd really rather just make a symbol # They are simpler and look much nicer else: return Symbol('%s_%s%s' % (self.name, str(i), str(j))) @property def free_symbols(self): return set((self,)) def doit(self, **hints): if hints.get('deep', True): return type(self)(self.name, self.args[1].doit(**hints), self.args[2].doit(**hints)) else: return self def _eval_simplify(self, **kwargs): return self class Identity(MatrixExpr): """The Matrix Identity I - multiplicative identity >>> from sympy.matrices import Identity, MatrixSymbol >>> A = MatrixSymbol('A', 3, 5) >>> I = Identity(3) >>> I*A A """ is_Identity = True def __new__(cls, n): return super(Identity, cls).__new__(cls, n) @property def rows(self): return self.args[0] @property def cols(self): return self.args[0] @property def shape(self): return (self.args[0], self.args[0]) def _eval_transpose(self): return self def _eval_trace(self): return self.rows def _eval_inverse(self): return self def conjugate(self): return self def _entry(self, i, j): if i == j: return S.One else: return S.Zero class ZeroMatrix(MatrixExpr): """The Matrix Zero 0 - additive identity >>> from sympy import MatrixSymbol, ZeroMatrix >>> A = MatrixSymbol('A', 3, 5) >>> Z = ZeroMatrix(3, 5) >>> A+Z A >>> Z*A.T 0 """ is_ZeroMatrix = True def __new__(cls, m, n): return super(ZeroMatrix, cls).__new__(cls, m, n) @property def rows(self): return self.args[0] @property def cols(self): return self.args[1] @property def shape(self): return (self.args[0], self.args[1]) @_sympifyit('other', NotImplemented) @call_highest_priority('__rpow__') def __pow__(self, other): if other != 1 and not self.is_square: raise ShapeError("Power of non-square matrix %s" % self) if other == 0: return Identity(self.rows) return self def _eval_transpose(self): return ZeroMatrix(self.cols, self.rows) def _eval_trace(self): return S.Zero def conjugate(self): return self def _entry(self, i, j): return S.Zero def matrix_symbols(expr): return [sym for sym in expr.free_symbols if sym.is_Matrix] from matmul import MatMul from matadd import MatAdd from matpow import MatPow from transpose import Transpose from inverse import Inverse
{ "repo_name": "amitjamadagni/sympy", "path": "sympy/matrices/expressions/matexpr.py", "copies": "2", "size": "11863", "license": "bsd-3-clause", "hash": -1735233517140174000, "line_mean": 25.3037694013, "line_max": 78, "alpha_frac": 0.5755711034, "autogenerated": false, "ratio": 3.9596128170894525, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5535183920489453, "avg_score": null, "num_lines": null }
from functools import wraps from sympy import Mul, symbols, lambdify, Add, LC, fraction from sympy.printing.lambdarepr import NumPyPrinter from sympy.printing.precedence import precedence import numpy as np import scipy.sparse.linalg from .cram import CRAM_exp, get_CRAM_from_cache from .partialfrac import (thetas_alphas, thetas_alphas_to_expr_complex, thetas_alphas_to_expr_real, t, multiply_vector, allroots) from .util import memoize class MatrixNumPyPrinter(NumPyPrinter): """ Print an expression for numpy assuming the variables are matrices Prints inversions as solve() and multiplication of nonconstants as @. """ _default_settings = { **NumPyPrinter._default_settings, # TODO: Make this automatic 'use_autoeye': True, 'py_solve': False, 'float128': False, } def __init__(self, settings=None): if settings is not None and 'py_solve' in settings: if settings.get('use_autoeye', False): raise ValueError("use_autoeye cannot be used with py_solve") settings['use_autoeye'] = False super().__init__(settings) def _print_Add(self, expr): if not (self._settings['use_autoeye'] or self._settings['py_solve']): return super()._print_Add(expr) prec = precedence(expr) num_terms = [i for i in expr.args if i.is_number] rest_terms = [i for i in expr.args if i not in num_terms] if len(rest_terms) > 1: rest = super()._print_Add(Add(*rest_terms)) elif len(rest_terms) == 1: rest = self._print(rest_terms[0]) else: if self._settings['py_solve']: return super()._print_Add(expr) rest = '' if len(num_terms) > 1: num = self.__class__({**self._settings, 'use_autoeye': False})._print_Add(Add(*num_terms)) elif len(num_terms) == 1: num = self.__class__({**self._settings, 'use_autoeye': False})._print(num_terms[0]) else: num = '' if rest and num: if self._settings['py_solve']: return "diag_add(%s, %s)" % (self._print(rest_terms[0]), self._print(Add(*num_terms))) return self.parenthesize(rest + ' + autoeye(%s)' % num, prec) elif rest: return self.parenthesize(rest, prec) else: if self._settings['use_autoeye']: # No need to parenthesize return 'autoeye(%s)' % num else: return self.parenthesize(num, prec) def _print_Mul(self, expr): prec = precedence(expr) pows = [i for i in expr.args if i.is_Pow and i.exp < 0] if len(pows) > 1: raise NotImplementedError("Need exactly one inverted Pow, not %s" % len(pows)) if not pows: no_autoeye = self.__class__({**self._settings, 'use_autoeye': False}) num_terms = [no_autoeye._print(no_autoeye.parenthesize(i, prec)) for i in expr.args if i.is_number] mat_terms = [self._print(self.parenthesize(i, prec)) for i in expr.args if not i.is_number] if len(mat_terms) >= 2 and self._settings['py_solve']: raise NotImplementedError("matrix multiplication is not yet supported with py_solve") if num_terms and mat_terms: return '*'.join(num_terms) + '*' + '@'.join(mat_terms) else: if self._settings['use_autoeye']: if num_terms: return ('autoeye(%s)' % '*'.join(num_terms)) + '@'.join(mat_terms) return '@'.join(mat_terms) return '*'.join(num_terms) + '@'.join(mat_terms) [pow] = pows rest = Mul(*[i for i in expr.args if i != pow]) return 'solve(%s, %s)' % (self._print(1/pow), self._print(rest)) def _print_Integer(self, expr): if self._settings['use_autoeye']: return 'autoeye(%s)' % super()._print_Integer(expr) return super()._print_Integer(expr) def _print_Float(self, expr): super_float = super()._print_Float(expr) if self._settings['float128']: super_float = 'float128(%r)' % super_float if self._settings['use_autoeye']: return 'autoeye(%s)' % super_float return super_float def _print_Pow(self, expr): if self._settings['py_solve']: raise NotImplementedError("Matrix powers are not yet supported with py_solve") if expr.exp.is_Integer and expr.exp > 1: return 'matrix_power(%s, %s)' % (self._print(expr.base), expr.exp) return super()._print_Pow(expr) def _print_ImaginaryUnit(self, expr): if self._settings['use_autoeye']: return 'autoeye(1j)' return '1j' def _print_customre(self, expr): return 'real(%s)' % self._print(expr.args[0]) class autoeye: __array_priority__ = 11 def __init__(self, coeff=1): self.coeff = coeff def eval(self, shape, eye_type, dtype=None): return self.coeff*eye_type(shape, dtype=dtype) def __eq__(self, other): if isinstance(other, autoeye): return self.coeff == other.coeff return False def __add__(self, other): if isinstance(other, autoeye): return autoeye(self.coeff + other.coeff) if isinstance(other, (int, float, complex)): return autoeye(self.coeff + other) if isinstance(other, np.ndarray): eye_type = np.eye elif isinstance(other, scipy.sparse.spmatrix): eye_type = scipy.sparse.eye else: return NotImplemented if len(other.shape) != 2: raise ValueError("autoeye can only be added to 2-dim numpy arrays") if other.shape[0] != other.shape[1]: raise ValueError("autoeye can only be added to square numpy arrays, other.shape is %s" % (other.shape,)) return self.eval(other.shape[0], eye_type, dtype=other.dtype) + other __radd__ = __add__ def __mul__(self, other): if isinstance(other, autoeye): return autoeye(self.coeff * other.coeff) if isinstance(other, (int, float, complex)): return autoeye(self.coeff * other) return NotImplemented __rmul__ = __mul__ def __matmul__(self, other): if isinstance(other, autoeye): return autoeye(self.coeff * other.coeff) if isinstance(other, np.ndarray): eye_type = np.eye elif isinstance(other, scipy.sparse.spmatrix): eye_type = scipy.sparse.eye else: return NotImplemented if len(other.shape) != 2: raise ValueError("autoeye can only be matmuled by 2-dim numpy arrays") return self.eval(other.shape[0], eye_type=eye_type, dtype=other.dtype) @ other def __rmatmul__(self, other): if isinstance(other, autoeye): return autoeye(self.coeff * other.coeff) if isinstance(other, np.ndarray): eye_type = np.eye elif isinstance(other, scipy.sparse.spmatrix): eye_type = scipy.sparse.eye else: return NotImplemented if len(other.shape) != 2: # Matmul works weird on 1d arrays. It treats them as column # vectors from the left and row vectors from the right. raise ValueError("autoeye can only be matmuled by 2-dim numpy arrays") ret_shape = other.shape[1] return other @ self.eval(ret_shape, eye_type=eye_type, dtype=other.dtype) def __str__(self): return 'autoeye(%s)' % self.coeff __repr__ = __str__ def numpy_solve_with_autoeye(a, b, **kwargs): if isinstance(a, autoeye): a = a.eval(b.shape[0], np.eye) if isinstance(b, autoeye): b = b.eval(a.shape[0], np.eye) return np.linalg.solve(a, b, **kwargs) def scipy_sparse_solve_with_autoeye(a, b, **kwargs): if isinstance(a, autoeye): a = a.eval(b.shape[0], scipy.sparse.eye) if isinstance(b, autoeye): b = b.eval(a.shape[0], scipy.sparse.eye) ret = scipy.sparse.linalg.spsolve(a, b, **kwargs) if isinstance(ret, np.ndarray): ret = ret[:,np.newaxis] return ret scipy_translations = { 'solve': scipy.sparse.linalg.spsolve, 'autoeye': autoeye, 'matrix_power': lambda a, b: a**b, 'real': lambda m: np.real(m) if isinstance(m, np.ndarray) else scipy.sparse.csr_matrix((np.real(m.data), m.indices, m.indptr), shape=m.shape), 'float128': np.float128, } scipy_translations_autoeye = { **scipy_translations, 'solve': scipy_sparse_solve_with_autoeye, } numpy_translations = { 'solve': np.linalg.solve, 'autoeye': autoeye, 'matrix_power': lambda a, b: a**b, 'float128': np.float128, } numpy_translations_autoeye = { **scipy_translations, 'solve': numpy_solve_with_autoeye, } @memoize def CRAM_matrix_exp_lambdify(degree=14, prec=200, *, use_cache=True, form='complex partial fraction', py_solve=False): """ Return a lambdified function for the CRAM approximation to exp(-x) form can be one of 'complex partial fraction' (the default) 'real partial fraction' 'rational function' 'rational function horner' 'factored' When py_solve = True, the py_solve module will be used (scipy is used otherwise). In this case, it is much faster to pre-flatten the input matrix: >>> mat, time, b = ... >>> mat = py_solve.asflat(mat) >>> f = CRAM_matrix_exp_lambdify(py_solve=True) >>> f(-mat*time, b) """ # TODO: This function should give exp(x), not exp(-x) if use_cache: rat_func = get_CRAM_from_cache(degree, prec) else: rat_func = CRAM_exp(degree, prec, plot=False) thetas, alphas, alpha0 = thetas_alphas(rat_func, prec) if form == 'complex partial fraction': expr = thetas_alphas_to_expr_complex(thetas, alphas, alpha0) elif form == 'real partial fraction': expr = thetas_alphas_to_expr_real(thetas, alphas, alpha0) elif form in ['rational function', 'rational function horner']: expr = rat_func elif form == 'factored': num, den = fraction(rat_func) # XXX: complex conjugate roots have the same absolute value numroots = sorted(allroots(num, degree, prec), key=lambda i: abs(i)) denroots = sorted(allroots(den, degree, prec), key=lambda i: abs(i)) p1q1 = LC(num)/LC(den) else: raise ValueError("Invalid argument for 'form': %s" % (form,)) n0 = symbols("n0", commutative=False) if py_solve: from . import py_solve module = [py_solve, 'numpy'] printer = MatrixNumPyPrinter({'py_solve': True}) def wrapper(f): @wraps(f) def _f(t, n0): t = py_solve.asflat(t) return f(t, n0) return _f else: module = scipy_translations_autoeye printer = MatrixNumPyPrinter({'use_autoeye': True}) wrapper = lambda f: f if form != 'factored': return wrapper(lambdify((t, n0), multiply_vector(expr, n0, horner=(form == 'rational function horner')), module, printer=printer, dummify=False)) else: if py_solve: raise NotImplementedError("py_solve is not supported with factor yet") # TODO: Code generate this as a single expression def e_factored(mat, b, reverse=False): if reverse: r = reversed else: r = lambda i: i for num_root, den_root in zip(r(numroots), r(denroots)): f = lambdify((t, n0), multiply_vector((t - num_root)/(t - den_root), n0), scipy_translations_autoeye, printer=MatrixNumPyPrinter()) b = f(mat, b) return float(p1q1)*b return e_factored
{ "repo_name": "ergs/transmutagen", "path": "transmutagen/codegen.py", "copies": "1", "size": "12068", "license": "bsd-3-clause", "hash": -5349966175652853000, "line_mean": 32.9943661972, "line_max": 119, "alpha_frac": 0.5786377196, "autogenerated": false, "ratio": 3.654754694124773, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9722705228916602, "avg_score": 0.002137436961634171, "num_lines": 355 }
from functools import wraps from twilio.twiml.voice_response import VoiceResponse from twilio.twiml.messaging_response import MessagingResponse from twilio.request_validator import RequestValidator from flask import ( Flask, abort, current_app, request, ) import os app = Flask(__name__) def validate_twilio_request(f): """Validates that incoming requests genuinely originated from Twilio""" @wraps(f) def decorated_function(*args, **kwargs): # Create an instance of the RequestValidator class validator = RequestValidator(os.environ.get('TWILIO_AUTH_TOKEN')) # Validate the request using its URL, POST data, # and X-TWILIO-SIGNATURE header request_valid = validator.validate( request.url, request.form, request.headers.get('X-TWILIO-SIGNATURE', '')) # Continue processing the request if it's valid, return a 403 error if # it's not if request_valid or current_app.debug: return f(*args, **kwargs) else: return abort(403) return decorated_function @app.route('/voice', methods=['POST']) @validate_twilio_request def incoming_call(): """Twilio Voice URL - receives incoming calls from Twilio""" # Create a new TwiML response resp = VoiceResponse() # <Say> a message to the caller from_number = request.form['From'] body = """ Thanks for calling! Your phone number is {0}. I got your call because of Twilio's webhook. Goodbye!""".format(' '.join(from_number)) resp.say(body) # Return the TwiML return str(resp) @app.route('/message', methods=['POST']) @validate_twilio_request def incoming_message(): """Twilio Messaging URL - receives incoming messages from Twilio""" # Create a new TwiML response resp = MessagingResponse() # <Message> a text back to the person who texted us body = "Your text to me was {0} characters long. Webhooks are neat :)" \ .format(len(request.values['Body'])) resp.message(body) # Return the TwiML return str(resp) if __name__ == '__main__': app.run(debug=True)
{ "repo_name": "TwilioDevEd/webhooks-example-flask", "path": "app.py", "copies": "1", "size": "2164", "license": "mit", "hash": 7742135546815484000, "line_mean": 26.05, "line_max": 78, "alpha_frac": 0.6538817006, "autogenerated": false, "ratio": 3.7898423817863396, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9941409267571524, "avg_score": 0.0004629629629629629, "num_lines": 80 }
from functools import wraps from twisted.internet.defer import gatherResults from twisted.web.client import getPage from rhumba import RhumbaPlugin, cron def unpack_args(fn): return wraps(fn)(lambda self, args: fn(self, **args)) class Plugin(RhumbaPlugin): """ A plugin to periodically push an application group definition to Marathon. """ def __init__(self, *args, **kw): super(Plugin, self).__init__(*args, **kw) self.marathon_host = self.config.get("marathon_host", "localhost") self.marathon_port = self.config.get("marathon_port", "8080") self.group_json_files = self.config["group_json_files"] @cron(min="*/1") @unpack_args def call_update_groups(self): """ Send app group definitions to Marathon. """ ds = [] for filepath in self.group_json_files: ds.append(self.call_update_group({'group_json_file': filepath})) return gatherResults(ds) @unpack_args def call_update_group(self, group_json_file): self.log("Updating %r" % (group_json_file,)) body = self.readfile(group_json_file) d = self._call_marathon("PUT", "v2/groups", body) d.addBoth(self._logcb, "API response for %s: %%r" % (group_json_file,)) return d def _logcb(self, r, msgfmt): self.log(msgfmt % (r,)) return r def _call_marathon(self, method, path, body=None): uri = b"http://%s:%s/%s" % ( self.marathon_host, self.marathon_port, path) return self.getPage(uri, method=method, postdata=body) def readfile(self, filepath): """ Read a file and return its content. """ with open(filepath, "r") as f: return f.read() def getPage(self, *args, **kw): """ Proxy twisted.web.client.getPage so we can stub it out in tests. """ return getPage(*args, **kw)
{ "repo_name": "praekeltfoundation/seed-xylem", "path": "seed/xylem/marathon_sync.py", "copies": "1", "size": "1937", "license": "mit", "hash": -642943967402458500, "line_mean": 29.265625, "line_max": 79, "alpha_frac": 0.5952503872, "autogenerated": false, "ratio": 3.515426497277677, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9610676884477677, "avg_score": 0, "num_lines": 64 }
from functools import wraps from twisted.internet.defer import ( inlineCallbacks, returnValue, gatherResults, maybeDeferred) from twisted.trial.unittest import SkipTest from vumi.tests.helpers import VumiTestCase, PersistenceHelper from zope.interface.verify import verifyObject from go_store_service.collections import ( InMemoryCollectionBackend, RiakCollectionBackend) from go_store_service.interfaces import ICollection, IStoreBackend def skip_for_backend(*backends): """ Skip tests for a particular backend or set of backends. This exists to allow incremental addition of new backends. Any backend-specific tests should go in the appropriate test classes rather than CommonStoreTests. """ def deco(func): @wraps(func) def wrapper(self): if backends: self.skip_for_backends = backends return func(self) return wrapper return deco class CommonStoreTests(object): """ Tests to run for all store implementations. """ def make_store_backend(self): """ This must be overridden in subclasses to build a store backend object. """ raise NotImplementedError() def get_store_backend(self): """ This calls .make_store_backend() and skips the test if necessary. """ backend = self.make_store_backend() if type(backend) in getattr(self, 'skip_for_backends', ()): raise SkipTest("Skipped for %s" % (type(backend),)) return backend def filtered_all_keys(self, collection): """ Get all keys in a collection. Some backends may have some index deletion lag, so we might need to filter the results. This implementation doesn't do any filtering, but subclasses can override. """ return collection.all_keys() def filtered_all(self, collection): """ Get all objects in a collection. Some backends may have some index deletion lag, so we might need to filter the results. This implementation doesn't do any filtering, but subclasses can override. This waits for all deferreds to fire before returning. """ d = collection.all() d.addCallback(lambda objs: [maybeDeferred(lambda: o) for o in objs]) d.addCallback(gatherResults) d.addCallback(lambda objs: [o for o in objs if o is not None]) return d def ensure_equal(self, foo, bar, msg=None): """ Similar to .assertEqual(), but raises an exception instead of failing. This should be used to differentiate state setup confirmation (which is not part of the behaviour being tested) from assertions about the code under test. """ if msg is None: msg = "%r != %r" % (foo, bar) if foo != bar: raise Exception(msg) ############################################## # Tests for backend functionality. def test_store_backend_provides_IStoreBackend(self): """ The store backend provides IStoreBackend. """ backend = self.get_store_backend() verifyObject(IStoreBackend, backend) @inlineCallbacks def test_store_collection_provides_ICollection(self): """ The return value of .get_store_collection() is an object that provides ICollection. """ backend = self.get_store_backend() stores = yield backend.get_store_collection("me") verifyObject(ICollection, stores) @inlineCallbacks def test_row_collection_provides_ICollection(self): """ The return value of .get_row_collection() is an object that provides ICollection. """ backend = self.get_store_backend() rows = yield backend.get_row_collection("me", "my_store") verifyObject(ICollection, rows) ############################################## # Tests for store collection functionality. @inlineCallbacks def get_empty_store_collection(self, owner="me"): """ Return a store collection after ensuring that it is empty. This raises an exception rather than a failure because it's not part of the intended test assertions. """ backend = self.get_store_backend() stores = yield backend.get_store_collection(owner) keys = yield self.filtered_all_keys(stores) self.ensure_equal( keys, [], "Expected empty store collection for %r, got keys: %r" % ( owner, keys)) returnValue(stores) @inlineCallbacks def test_store_collection_all_keys_empty(self): """ Listing all stores returns an empty list when no stores exist. """ backend = self.get_store_backend() stores = yield backend.get_store_collection("me") store_keys = yield self.filtered_all_keys(stores) self.assertEqual(store_keys, []) @inlineCallbacks def test_store_collection_all_empty(self): """ Listing all stores returns an empty list when no stores exist. """ backend = self.get_store_backend() stores = yield backend.get_store_collection("me") all_store_data = yield self.filtered_all(stores) self.assertEqual(all_store_data, []) @inlineCallbacks def test_store_collection_all_not_empty(self): """ Listing all stores returns a non-empty list when stores exist. """ backend = self.get_store_backend() stores = yield backend.get_store_collection("me") store_data = yield stores.create(None, {}) all_store_data = yield self.filtered_all(stores) self.assertEqual(all_store_data, [store_data]) @inlineCallbacks def test_store_collection_get_missing_object(self): """ Asking for an object that does not exist returns None. """ stores = yield self.get_empty_store_collection() store_data = yield stores.get('missing') self.assertEqual(store_data, None) @inlineCallbacks def test_store_collection_create_and_get_null_data(self): """ An object with data=None may be created and retrieved. """ stores = yield self.get_empty_store_collection() store_data = yield stores.create(None, None) store_key = store_data["id"] self.assertEqual(store_data, {'id': store_key, 'data': None}) got_data = yield stores.get(store_key) self.assertEqual(store_data, got_data) @inlineCallbacks def test_store_collection_create_and_get_string_data(self): """ An object with string data may be created and retrieved. """ stores = yield self.get_empty_store_collection() store_data = yield stores.create(None, 'foo') store_key = store_data["id"] self.assertEqual(store_data, {'id': store_key, 'data': 'foo'}) got_data = yield stores.get(store_key) self.assertEqual(store_data, got_data) @inlineCallbacks def test_store_collection_create_and_get_dict_data(self): """ An object with dict data may be created and retrieved. """ stores = yield self.get_empty_store_collection() store_data = yield stores.create(None, {'foo': 42}) store_key = store_data["id"] self.assertEqual(store_data, {'id': store_key, 'data': {'foo': 42}}) got_data = yield stores.get(store_key) self.assertEqual(store_data, got_data) @inlineCallbacks def test_store_collection_create_no_id(self): """ Creating an object with no object_id should generate one. """ stores = yield self.get_empty_store_collection() store_data = yield stores.create(None, {}) store_key = store_data["id"] self.assertEqual(store_data, {'id': store_key, 'data': {}}) got_data = yield stores.get(store_key) self.assertEqual(store_data, got_data) @inlineCallbacks def test_store_collection_create_with_id(self): """ Creating an object with an object_id should not generate a new one. """ stores = yield self.get_empty_store_collection() store_data = yield stores.create('key', {}) self.assertEqual(store_data, {'id': 'key', 'data': {}}) got_data = yield stores.get('key') self.assertEqual(store_data, got_data) @inlineCallbacks def test_store_collection_delete_missing_store(self): stores = yield self.get_empty_store_collection() store_data = yield stores.delete('foo') self.assertEqual(store_data, None) store_keys = yield self.filtered_all_keys(stores) self.assertEqual(store_keys, []) @inlineCallbacks def test_store_collection_delete_existing_store(self): stores = yield self.get_empty_store_collection() store_key = (yield stores.create(None, {}))["id"] store_keys = yield self.filtered_all_keys(stores) self.ensure_equal(store_keys, [store_key]) store_data = yield stores.delete(store_key) self.assertEqual(store_data, {'id': store_key, 'data': {}}) store_data = yield stores.get(store_key) self.assertEqual(store_data, None) store_keys = yield self.filtered_all_keys(stores) self.assertEqual(store_keys, []) @inlineCallbacks def test_store_collection_update(self): stores = yield self.get_empty_store_collection() store_data = yield stores.create(None, {}) store_key = store_data["id"] self.ensure_equal(store_data, {'id': store_key, 'data': {}}) store_data = yield stores.update(store_key, {'foo': 'bar'}) self.assertEqual(store_data, {'id': store_key, 'data': {'foo': 'bar'}}) store_data = yield stores.get(store_key) self.assertEqual(store_data, {'id': store_key, 'data': {'foo': 'bar'}}) ############################################## # Tests for row collection functionality. @inlineCallbacks def get_empty_row_collection(self, owner_id="me", store_id="store"): """ Return a row collection after ensuring that it is empty. This raises an exception rather than a failure because it's not part of the intended test assertions. """ backend = self.get_store_backend() rows = yield backend.get_row_collection(owner_id, store_id) keys = yield self.filtered_all_keys(rows) self.ensure_equal( keys, [], "Expected empty row collection for %r:%r, got keys: %r" % ( owner_id, store_id, keys)) returnValue(rows) @inlineCallbacks def test_row_collection_all_keys_empty(self): """ Listing all rows returns an empty list when no rows exist in the store. """ backend = self.get_store_backend() rows = yield backend.get_row_collection("me", "store") row_keys = yield self.filtered_all_keys(rows) self.assertEqual(row_keys, []) @inlineCallbacks def test_row_collection_all_keys_empty_rows_in_other_store(self): """ Listing all rows returns an empty list when no rows exist in the store, even when rows exist in other stores. """ backend = self.get_store_backend() rows = yield backend.get_row_collection("me", "store") other_rows = yield backend.get_row_collection("me", "other_store") yield other_rows.create(None, {}) row_keys = yield self.filtered_all_keys(rows) self.assertEqual(row_keys, []) @inlineCallbacks def test_row_collection_all_empty(self): """ Listing all rows returns an empty list when no rows exist in the store. """ backend = self.get_store_backend() rows = yield backend.get_row_collection("me", "store") all_row_data = yield self.filtered_all(rows) self.assertEqual(all_row_data, []) @inlineCallbacks def test_row_collection_all_not_empty(self): """ Listing all rows returns a non-empty list when rows exist in the store. """ backend = self.get_store_backend() rows = yield backend.get_row_collection("me", "store") row_data = yield rows.create(None, {}) all_row_data = yield self.filtered_all(rows) self.assertEqual(all_row_data, [row_data]) @inlineCallbacks def test_row_collection_all_empty_rows_in_other_store(self): """ Listing all rows returns an empty list when no rows exist in the store, even when rows exist in other stores. """ backend = self.get_store_backend() rows = yield backend.get_row_collection("me", "store") other_rows = yield backend.get_row_collection("me", "other_store") yield other_rows.create(None, {}) all_row_data = yield self.filtered_all(rows) self.assertEqual(all_row_data, []) @inlineCallbacks def test_row_collection_get_missing_object(self): """ Asking for an object that does not exist returns None. """ rows = yield self.get_empty_row_collection() row_data = yield rows.get('missing') self.assertEqual(row_data, None) @inlineCallbacks def test_row_collection_create_and_get_null_data(self): """ An object with data=None may be created and retrieved. """ rows = yield self.get_empty_row_collection() row_data = yield rows.create(None, None) row_key = row_data["id"] self.assertEqual(row_data, {'id': row_key, 'data': None}) got_data = yield rows.get(row_key) self.assertEqual(row_data, got_data) @inlineCallbacks def test_row_collection_create_and_get_string_data(self): """ An object with string data may be created and retrieved. """ rows = yield self.get_empty_row_collection() row_data = yield rows.create(None, 'foo') row_key = row_data["id"] self.assertEqual(row_data, {'id': row_key, 'data': 'foo'}) got_data = yield rows.get(row_key) self.assertEqual(row_data, got_data) @inlineCallbacks def test_row_collection_create_and_get_dict_data(self): """ An object with dict data may be created and retrieved. """ rows = yield self.get_empty_row_collection() row_data = yield rows.create(None, {'foo': 42}) row_key = row_data["id"] self.assertEqual(row_data, {'id': row_key, 'data': {'foo': 42}}) got_data = yield rows.get(row_key) self.assertEqual(row_data, got_data) @inlineCallbacks def test_row_collection_create_no_id(self): """ Creating an object with no object_id should generate one. """ rows = yield self.get_empty_row_collection() row_data = yield rows.create(None, {}) row_key = row_data["id"] self.assertEqual(row_data, {'id': row_key, 'data': {}}) got_data = yield rows.get(row_key) self.assertEqual(row_data, got_data) @inlineCallbacks def test_row_collection_create_with_id(self): """ Creating an object with an object_id should not generate a new one. """ rows = yield self.get_empty_row_collection() row_data = yield rows.create('key', {}) self.assertEqual(row_data, {'id': 'key', 'data': {}}) got_data = yield rows.get('key') self.assertEqual(row_data, got_data) @inlineCallbacks def test_row_collection_delete_missing_row(self): rows = yield self.get_empty_row_collection() row_data = yield rows.delete('foo') self.assertEqual(row_data, None) row_keys = yield self.filtered_all_keys(rows) self.assertEqual(row_keys, []) @inlineCallbacks def test_row_collection_delete_existing_row(self): rows = yield self.get_empty_row_collection() row_key = (yield rows.create(None, {}))["id"] row_keys = yield self.filtered_all_keys(rows) self.ensure_equal(row_keys, [row_key]) row_data = yield rows.delete(row_key) self.assertEqual(row_data, {'id': row_key, 'data': {}}) row_keys = yield self.filtered_all_keys(rows) self.assertEqual(row_keys, []) @inlineCallbacks def test_row_collection_update(self): rows = yield self.get_empty_row_collection() row_key = (yield rows.create(None, {}))["id"] row_data = yield rows.get(row_key) self.ensure_equal(row_data, {'id': row_key, 'data': {}}) row_data = yield rows.update( row_key, {'foo': 'bar'}) self.assertEqual(row_data, {'id': row_key, 'data': {'foo': 'bar'}}) row_data = yield rows.get(row_key) self.assertEqual(row_data, {'id': row_key, 'data': {'foo': 'bar'}}) class TestInMemoryStore(VumiTestCase, CommonStoreTests): def make_store_backend(self): return InMemoryCollectionBackend({}) class TestRiakStore(VumiTestCase, CommonStoreTests): def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True)) self.manager = self.persistence_helper.get_riak_manager() def make_store_backend(self): return RiakCollectionBackend(self.manager) @inlineCallbacks def filtered_all_keys(self, collection): """ There's a delay (3s by default) between object deletion and tombstone cleanup in Riak. Index entries only get removed after this, so we check for the existence of each key and filter out any keys that have no objects associated with them. This means we're never actually checking that deleted objects get removed from the return value of .all_keys() but we can probably assume that Riak indexes work properly. """ keys = yield collection.all_keys() def check_key(key): d = collection.get(key) d.addCallback(lambda obj: None if obj is None else key) return d checked_keys = yield gatherResults([check_key(key) for key in keys]) returnValue([key for key in checked_keys if key is not None])
{ "repo_name": "praekelt/go-store-service", "path": "go_store_service/collections/tests/test_collections.py", "copies": "1", "size": "18337", "license": "bsd-3-clause", "hash": 659917996073299000, "line_mean": 34.6058252427, "line_max": 79, "alpha_frac": 0.6152587664, "autogenerated": false, "ratio": 4.020390265292699, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 515 }
from functools import wraps from typing import Callable from drf_openapi.entities import VersionedSerializers from rest_framework.response import Response def view_config(request_serializer=None, response_serializer=None, validate_response=False): def decorator(view_method): view_method.request_serializer = request_serializer view_method.response_serializer = response_serializer @wraps(view_method) def wrapper(instance, request, version=None, *args, **kwargs): if request_serializer and issubclass(request_serializer, VersionedSerializers): instance.request_serializer = request_serializer.get(version) else: instance.request_serializer = request_serializer if response_serializer and issubclass(response_serializer, VersionedSerializers): instance.response_serializer = response_serializer.get(version) else: instance.response_serializer = response_serializer response = view_method(instance, request, version=version, *args, **kwargs) if validate_response: response_validator = instance.response_serializer(data=response.data) response_validator.is_valid(raise_exception=True) return Response(response_validator.validated_data) return response return wrapper decorator.__annotations__ = {'view_method': Callable, 'return': Callable} return decorator view_config.__annotations__ = {'return': Callable}
{ "repo_name": "limdauto/drf_openapi", "path": "drf_openapi/utils.py", "copies": "1", "size": "1565", "license": "mit", "hash": -3837737249476905500, "line_mean": 40.1842105263, "line_max": 93, "alpha_frac": 0.6862619808, "autogenerated": false, "ratio": 4.905956112852665, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6092218093652665, "avg_score": null, "num_lines": null }
from functools import wraps from .utils import partial_prepare def partial(func): """Wraps func to behave like a partial pipeline step, any output that's not None or {} will be considered a response object and will be returned to user. The pipeline function will receive a current_partial object, it contains the partial pipeline data and a token that is used to identify it when it's continued, this is useful to build links with the token. The default value for this parameter is partial_token, but can be overridden by SOCIAL_AUTH_PARTIAL_PIPELINE_TOKEN_NAME setting. The token is also stored in the session under the partial_pipeline_token key. """ @wraps(func) def wrapper(strategy, backend, pipeline_index, *args, **kwargs): current_partial = partial_prepare(strategy, backend, pipeline_index, *args, **kwargs) out = func(strategy=strategy, backend=backend, pipeline_index=pipeline_index, current_partial=current_partial, *args, **kwargs) or {} if not isinstance(out, dict): strategy.storage.partial.store(current_partial) strategy.session_set('partial_pipeline_token', current_partial.token) return out return wrapper
{ "repo_name": "LennonChin/Django-Practices", "path": "MxShop/extra_apps/social_core/pipeline/partial.py", "copies": "4", "size": "1400", "license": "apache-2.0", "hash": -7580702082072917000, "line_mean": 35.8421052632, "line_max": 76, "alpha_frac": 0.6342857143, "autogenerated": false, "ratio": 4.761904761904762, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.7396190476204763, "avg_score": null, "num_lines": null }
from functools import wraps from .utils import smart_decorator, combine_alternatives from .tree import Tree from .exceptions import VisitError, GrammarError from .lexer import Token ###{standalone from inspect import getmembers, getmro class Discard(Exception): """When raising the Discard exception in a transformer callback, that node is discarded and won't appear in the parent. """ pass # Transformers class _Decoratable: "Provides support for decorating methods with @v_args" @classmethod def _apply_decorator(cls, decorator, **kwargs): mro = getmro(cls) assert mro[0] is cls libmembers = {name for _cls in mro[1:] for name, _ in getmembers(_cls)} for name, value in getmembers(cls): # Make sure the function isn't inherited (unless it's overwritten) if name.startswith('_') or (name in libmembers and name not in cls.__dict__): continue if not callable(value): continue # Skip if v_args already applied (at the function level) if hasattr(cls.__dict__[name], 'vargs_applied') or hasattr(value, 'vargs_applied'): continue static = isinstance(cls.__dict__[name], (staticmethod, classmethod)) setattr(cls, name, decorator(value, static=static, **kwargs)) return cls def __class_getitem__(cls, _): return cls class Transformer(_Decoratable): """Transformers visit each node of the tree, and run the appropriate method on it according to the node's data. Calls its methods (provided by the user via inheritance) according to ``tree.data``. The returned value replaces the old one in the structure. They work bottom-up (or depth-first), starting with the leaves and ending at the root of the tree. Transformers can be used to implement map & reduce patterns. Because nodes are reduced from leaf to root, at any point the callbacks may assume the children have already been transformed (if applicable). ``Transformer`` can do anything ``Visitor`` can do, but because it reconstructs the tree, it is slightly less efficient. It can be used to implement map or reduce patterns. All these classes implement the transformer interface: - ``Transformer`` - Recursively transforms the tree. This is the one you probably want. - ``Transformer_InPlace`` - Non-recursive. Changes the tree in-place instead of returning new instances - ``Transformer_InPlaceRecursive`` - Recursive. Changes the tree in-place instead of returning new instances Parameters: visit_tokens (bool, optional): Should the transformer visit tokens in addition to rules. Setting this to ``False`` is slightly faster. Defaults to ``True``. (For processing ignored tokens, use the ``lexer_callbacks`` options) NOTE: A transformer without methods essentially performs a non-memoized deepcopy. """ __visit_tokens__ = True # For backwards compatibility def __init__(self, visit_tokens=True): self.__visit_tokens__ = visit_tokens def _call_userfunc(self, tree, new_children=None): # Assumes tree is already transformed children = new_children if new_children is not None else tree.children try: f = getattr(self, tree.data) except AttributeError: return self.__default__(tree.data, children, tree.meta) else: try: wrapper = getattr(f, 'visit_wrapper', None) if wrapper is not None: return f.visit_wrapper(f, tree.data, children, tree.meta) else: return f(children) except (GrammarError, Discard): raise except Exception as e: raise VisitError(tree.data, tree, e) def _call_userfunc_token(self, token): try: f = getattr(self, token.type) except AttributeError: return self.__default_token__(token) else: try: return f(token) except (GrammarError, Discard): raise except Exception as e: raise VisitError(token.type, token, e) def _transform_children(self, children): for c in children: try: if isinstance(c, Tree): yield self._transform_tree(c) elif self.__visit_tokens__ and isinstance(c, Token): yield self._call_userfunc_token(c) else: yield c except Discard: pass def _transform_tree(self, tree): children = list(self._transform_children(tree.children)) return self._call_userfunc(tree, children) def transform(self, tree): "Transform the given tree, and return the final result" return self._transform_tree(tree) def __mul__(self, other): """Chain two transformers together, returning a new transformer. """ return TransformerChain(self, other) def __default__(self, data, children, meta): """Default function that is called if there is no attribute matching ``data`` Can be overridden. Defaults to creating a new copy of the tree node (i.e. ``return Tree(data, children, meta)``) """ return Tree(data, children, meta) def __default_token__(self, token): """Default function that is called if there is no attribute matching ``token.type`` Can be overridden. Defaults to returning the token as-is. """ return token class InlineTransformer(Transformer): # XXX Deprecated def _call_userfunc(self, tree, new_children=None): # Assumes tree is already transformed children = new_children if new_children is not None else tree.children try: f = getattr(self, tree.data) except AttributeError: return self.__default__(tree.data, children, tree.meta) else: return f(*children) class TransformerChain(object): def __init__(self, *transformers): self.transformers = transformers def transform(self, tree): for t in self.transformers: tree = t.transform(tree) return tree def __mul__(self, other): return TransformerChain(*self.transformers + (other,)) class Transformer_InPlace(Transformer): """Same as Transformer, but non-recursive, and changes the tree in-place instead of returning new instances Useful for huge trees. Conservative in memory. """ def _transform_tree(self, tree): # Cancel recursion return self._call_userfunc(tree) def transform(self, tree): for subtree in tree.iter_subtrees(): subtree.children = list(self._transform_children(subtree.children)) return self._transform_tree(tree) class Transformer_NonRecursive(Transformer): """Same as Transformer but non-recursive. Like Transformer, it doesn't change the original tree. Useful for huge trees. """ def transform(self, tree): # Tree to postfix rev_postfix = [] q = [tree] while q: t = q.pop() rev_postfix.append(t) if isinstance(t, Tree): q += t.children # Postfix to tree stack = [] for x in reversed(rev_postfix): if isinstance(x, Tree): size = len(x.children) if size: args = stack[-size:] del stack[-size:] else: args = [] stack.append(self._call_userfunc(x, args)) else: stack.append(x) t ,= stack # We should have only one tree remaining return t class Transformer_InPlaceRecursive(Transformer): "Same as Transformer, recursive, but changes the tree in-place instead of returning new instances" def _transform_tree(self, tree): tree.children = list(self._transform_children(tree.children)) return self._call_userfunc(tree) # Visitors class VisitorBase: def _call_userfunc(self, tree): return getattr(self, tree.data, self.__default__)(tree) def __default__(self, tree): """Default function that is called if there is no attribute matching ``tree.data`` Can be overridden. Defaults to doing nothing. """ return tree def __class_getitem__(cls, _): return cls class Visitor(VisitorBase): """Tree visitor, non-recursive (can handle huge trees). Visiting a node calls its methods (provided by the user via inheritance) according to ``tree.data`` """ def visit(self, tree): "Visits the tree, starting with the leaves and finally the root (bottom-up)" for subtree in tree.iter_subtrees(): self._call_userfunc(subtree) return tree def visit_topdown(self,tree): "Visit the tree, starting at the root, and ending at the leaves (top-down)" for subtree in tree.iter_subtrees_topdown(): self._call_userfunc(subtree) return tree class Visitor_Recursive(VisitorBase): """Bottom-up visitor, recursive. Visiting a node calls its methods (provided by the user via inheritance) according to ``tree.data`` Slightly faster than the non-recursive version. """ def visit(self, tree): "Visits the tree, starting with the leaves and finally the root (bottom-up)" for child in tree.children: if isinstance(child, Tree): self.visit(child) self._call_userfunc(tree) return tree def visit_topdown(self,tree): "Visit the tree, starting at the root, and ending at the leaves (top-down)" self._call_userfunc(tree) for child in tree.children: if isinstance(child, Tree): self.visit_topdown(child) return tree def visit_children_decor(func): "See Interpreter" @wraps(func) def inner(cls, tree): values = cls.visit_children(tree) return func(cls, values) return inner class Interpreter(_Decoratable): """Interpreter walks the tree starting at the root. Visits the tree, starting with the root and finally the leaves (top-down) For each tree node, it calls its methods (provided by user via inheritance) according to ``tree.data``. Unlike ``Transformer`` and ``Visitor``, the Interpreter doesn't automatically visit its sub-branches. The user has to explicitly call ``visit``, ``visit_children``, or use the ``@visit_children_decor``. This allows the user to implement branching and loops. """ def visit(self, tree): f = getattr(self, tree.data) wrapper = getattr(f, 'visit_wrapper', None) if wrapper is not None: return f.visit_wrapper(f, tree.data, tree.children, tree.meta) else: return f(tree) def visit_children(self, tree): return [self.visit(child) if isinstance(child, Tree) else child for child in tree.children] def __getattr__(self, name): return self.__default__ def __default__(self, tree): return self.visit_children(tree) # Decorators def _apply_decorator(obj, decorator, **kwargs): try: _apply = obj._apply_decorator except AttributeError: return decorator(obj, **kwargs) else: return _apply(decorator, **kwargs) def _inline_args__func(func): @wraps(func) def create_decorator(_f, with_self): if with_self: def f(self, children): return _f(self, *children) else: def f(self, children): return _f(*children) return f return smart_decorator(func, create_decorator) def inline_args(obj): # XXX Deprecated return _apply_decorator(obj, _inline_args__func) def _visitor_args_func_dec(func, visit_wrapper=None, static=False): def create_decorator(_f, with_self): if with_self: def f(self, *args, **kwargs): return _f(self, *args, **kwargs) else: def f(self, *args, **kwargs): return _f(*args, **kwargs) return f if static: f = wraps(func)(create_decorator(func, False)) else: f = smart_decorator(func, create_decorator) f.vargs_applied = True f.visit_wrapper = visit_wrapper return f def _vargs_inline(f, _data, children, _meta): return f(*children) def _vargs_meta_inline(f, _data, children, meta): return f(meta, *children) def _vargs_meta(f, _data, children, meta): return f(children, meta) # TODO swap these for consistency? Backwards incompatible! def _vargs_tree(f, data, children, meta): return f(Tree(data, children, meta)) def v_args(inline=False, meta=False, tree=False, wrapper=None): """A convenience decorator factory for modifying the behavior of user-supplied visitor methods. By default, callback methods of transformers/visitors accept one argument - a list of the node's children. ``v_args`` can modify this behavior. When used on a transformer/visitor class definition, it applies to all the callback methods inside it. ``v_args`` can be applied to a single method, or to an entire class. When applied to both, the options given to the method take precedence. Parameters: inline (bool, optional): Children are provided as ``*args`` instead of a list argument (not recommended for very long lists). meta (bool, optional): Provides two arguments: ``children`` and ``meta`` (instead of just the first) tree (bool, optional): Provides the entire tree as the argument, instead of the children. wrapper (function, optional): Provide a function to decorate all methods. Example: :: @v_args(inline=True) class SolveArith(Transformer): def add(self, left, right): return left + right class ReverseNotation(Transformer_InPlace): @v_args(tree=True) def tree_node(self, tree): tree.children = tree.children[::-1] """ if tree and (meta or inline): raise ValueError("Visitor functions cannot combine 'tree' with 'meta' or 'inline'.") func = None if meta: if inline: func = _vargs_meta_inline else: func = _vargs_meta elif inline: func = _vargs_inline elif tree: func = _vargs_tree if wrapper is not None: if func is not None: raise ValueError("Cannot use 'wrapper' along with 'tree', 'meta' or 'inline'.") func = wrapper def _visitor_args_dec(obj): return _apply_decorator(obj, _visitor_args_func_dec, visit_wrapper=func) return _visitor_args_dec ###} # --- Visitor Utilities --- class CollapseAmbiguities(Transformer): """ Transforms a tree that contains any number of _ambig nodes into a list of trees, each one containing an unambiguous tree. The length of the resulting list is the product of the length of all _ambig nodes. Warning: This may quickly explode for highly ambiguous trees. """ def _ambig(self, options): return sum(options, []) def __default__(self, data, children_lists, meta): return [Tree(data, children, meta) for children in combine_alternatives(children_lists)] def __default_token__(self, t): return [t]
{ "repo_name": "erezsh/lark", "path": "lark/visitors.py", "copies": "1", "size": "15756", "license": "mit", "hash": 3204314142606212600, "line_mean": 32.2405063291, "line_max": 133, "alpha_frac": 0.6188753491, "autogenerated": false, "ratio": 4.3214481623697205, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0019248129901727321, "num_lines": 474 }
from functools import wraps from .utils import thread_get_logger, get_func_path from .exceptions import LoggerNotRegistered def logexpose(logger_alias, func_before=None, func_after=None, level='debug'): def wrapper(func): @wraps(func) def faked(*args, **kwargs): logger = thread_get_logger(logger_alias) if logger is None: raise LoggerNotRegistered( 'Function `%s` tried to use an unregistered `%s` logger.' % (get_func_path(func), logger_alias) ) func_before_real = func_before or getattr(logger, 'default_func_before', None) func_after_real = func_after or getattr(logger, 'default_func_after', None) grp_id = None msg_id = logger.generate_msg_id() parent_msg_id = logger.get_parent_msg_id() logger.stack.append(msg_id) try: if func_before_real: grp_id = func_before_real( func=func, fargs=args, fkwargs=kwargs, level=level, ids_tuple=(None, msg_id, parent_msg_id) )[0] result = func(*args, **kwargs) if func_after_real: func_after_real( func=func, fargs=args, fkwargs=kwargs, level=level, ids_tuple=(grp_id, None, parent_msg_id) ) finally: logger.stack.pop() return result return faked return wrapper
{ "repo_name": "idlesign/django-logexpose", "path": "logexpose/decorators.py", "copies": "1", "size": "1573", "license": "bsd-3-clause", "hash": 3221982536553603000, "line_mean": 31.7708333333, "line_max": 115, "alpha_frac": 0.5238397966, "autogenerated": false, "ratio": 4.172413793103448, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5196253589703448, "avg_score": null, "num_lines": null }
from functools import wraps from vial import ref, refs def test_ref(): refs.clear() def boo(foo): return foo r = ref(boo) assert r(1) == 1 assert str(r) == 'vial.refs[\'tests.test_ref.boo:10\']' def test_ref_with_same_name(): refs.clear() def boo(): return 1 r1 = ref(boo) def boo(): return 2 r2 = ref(boo) assert r1() == 1 assert r2() == 2 def test_lambda(): refs.clear() r = ref(lambda: 'boo') assert r() == 'boo' assert str(r) == 'vial.refs[\'tests.test_ref.<lambda>:35\']' def bar(foo): return foo def test_lazy_func(): refs.clear() r = ref('.test_ref.bar') assert r(10) == 10 assert str(r) == 'vial.refs[\'tests.test_ref.bar:lazy\']' def test_ref_to_decorated_func(): def d(func): @wraps(func) def inner(): return func() inner.func = func return inner @d def boo(): return 1 r1 = ref(boo) @d def boo(): return 2 r2 = ref(boo) assert str(r1) == "vial.refs['tests.test_ref.boo:60']" assert r1() == 1 assert str(r2) == "vial.refs['tests.test_ref.boo:65']" assert r2() == 2
{ "repo_name": "guilhermedallanol/dotfiles", "path": "vim/plugged/vial/tests/test_ref.py", "copies": "2", "size": "1210", "license": "mit", "hash": 2947145706250485000, "line_mean": 15.5753424658, "line_max": 64, "alpha_frac": 0.5198347107, "autogenerated": false, "ratio": 3.025, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.95448347107, "avg_score": 0, "num_lines": 73 }
from functools import wraps from vumi.blinkenlights.metrics import Metric from go.dashboard import client from go.vumitools.metrics import ( get_account_metric_prefix, get_conversation_metric_prefix) def is_collection(obj): return ( isinstance(obj, dict) or isinstance(obj, list) or isinstance(obj, set) or isinstance(obj, tuple)) def is_mutable_collection(obj): return ( isinstance(obj, dict) or isinstance(obj, list) or isinstance(obj, set)) def collection_items(collection): if isinstance(collection, dict): items = collection.iteritems() else: items = enumerate(collection) return items def visit_dicts(collection, fn): is_mutable = is_mutable_collection(collection) for key, value in collection_items(collection): if is_collection(value): visit_dicts(value, fn) if is_mutable and isinstance(value, dict): collection[key] = fn(value) def ensure_handler_fields(*fields): def decorator(fn): @wraps(fn) def wrapper(self, target): missing_fields = [f for f in fields if f not in target] if missing_fields: raise DashboardParseError( "Dashboard layout handler '%s' is missing fields: %s" % (fn.__name__, missing_fields)) return fn(self, target) return wrapper return decorator class DashboardError(Exception): """ Raised when an error is encountered while building or usnig a dashboard. """ class DashboardSyncError(DashboardError): """ Raised when we fail to sync the dashboard with diamondash. """ class DashboardParseError(DashboardError): """ Raised when dashboard data cannot be parsed into something that can be given to diamondash. """ class Dashboard(object): def __init__(self, name, layout): self.diamondash_api = client.get_diamondash_api() self.name = name self.layout = layout self.config = None def _get_raw_config(self): return { 'name': self.name, 'widgets': self.layout.get_config() } def sync(self): """ Ensures the dashboard exists on diamondash's side """ try: raw_config = self._get_raw_config() self.config = self.diamondash_api.replace_dashboard(raw_config) except Exception as e: raise DashboardSyncError("Dashboard sync failed: %s" % e) def get_config(self): if self.config is None: raise DashboardError( "Could not retrieve dashboard config, " "dashboard has not yet been synced") return self.config class DashboardLayout(object): def __init__(self, entities=None): self.entities = [] for entity in (entities or []): self.add_entity(entity) def handle_metric(self, target): handler_name = "handle_%s_metric" % target['metric_type'] handler = getattr(self, handler_name, None) if handler is None: raise DashboardParseError( "No dashboard metric handler found for metric_type '%s'" % target['metric_type']) return handler(target) def parse_widget_metrics(self, widget): def traverse(collection): if 'metric_type' in collection: collection = self.handle_metric(collection) return collection visit_dicts(widget, traverse) def add_widget(self, widget): self.parse_widget_metrics(widget) self.entities.append(widget) def new_row(self): self.entities.append('new_row') def add_entity(self, entity): if entity == 'new_row': self.new_row() else: self.add_widget(entity) def get_config(self): return self.entities def get_metric_diamondash_target(prefix, metric_name, aggregator_name): return "%s%s.%s" % (prefix, metric_name, aggregator_name) class ConversationReportsLayout(DashboardLayout): def __init__(self, conv, entities=None): self.conv = conv super(ConversationReportsLayout, self).__init__(entities) def aggregator_from_target(self, target): aggregator = target.get('aggregator') # FIXME: We don't always get the aggregator in the target. In order to # handle this, we get the name of the (first) default aggregator # from Metric if the aggregator is not specified. if aggregator is None: aggregator = Metric.DEFAULT_AGGREGATORS[0].name return aggregator @ensure_handler_fields('name') def handle_conversation_metric(self, target): prefix = get_conversation_metric_prefix(self.conv) return get_metric_diamondash_target( prefix, target['name'], self.aggregator_from_target(target)) @ensure_handler_fields('store', 'name') def handle_account_metric(self, target): prefix = get_account_metric_prefix( self.conv.user_account.key, target['store']) return get_metric_diamondash_target( prefix, target['name'], self.aggregator_from_target(target))
{ "repo_name": "praekelt/vumi-go", "path": "go/dashboard/dashboard.py", "copies": "1", "size": "5297", "license": "bsd-3-clause", "hash": 6421130393598828000, "line_mean": 28.2651933702, "line_max": 79, "alpha_frac": 0.61657542, "autogenerated": false, "ratio": 4.093508500772797, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 181 }
from functools import wraps from waffle.compat import CLASS_TYPES from waffle.models import Flag, Switch, Sample __all__ = ['override_flag', 'override_sample', 'override_switch'] class _overrider(object): def __init__(self, name, active): self.name = name self.active = active def __call__(self, func): if isinstance(func, CLASS_TYPES): return self.for_class(func) else: return self.for_callable(func) def for_class(self, obj): """Wraps a class's test methods in the decorator""" for attr in dir(obj): if not attr.startswith('test_'): # Ignore non-test functions continue attr_value = getattr(obj, attr) if not callable(attr_value): # Ignore non-functions continue setattr(obj, attr, self.for_callable(attr_value)) return obj def for_callable(self, func): """Wraps a method in the decorator""" @wraps(func) def _wrapped(*args, **kwargs): with self: return func(*args, **kwargs) return _wrapped def get(self): self.obj, self.created = self.cls.objects.get_or_create(name=self.name) def update(self, active): raise NotImplementedError def get_value(self): raise NotImplementedError def __enter__(self): self.get() self.old_value = self.get_value() if self.old_value != self.active: self.update(self.active) def __exit__(self, exc_type, exc_val, exc_tb): if self.created: self.obj.delete() else: self.update(self.old_value) class override_switch(_overrider): """ override_switch is a contextmanager for easier testing of switches. It accepts two parameters, name of the switch and it's state. Example usage:: with override_switch('happy_mode', active=True): ... If `Switch` already existed, it's value would be changed inside the context block, then restored to the original value. If `Switch` did not exist before entering the context, it is created, then removed at the end of the block. It can also act as a decorator:: @override_switch('happy_mode', active=True) def test_happy_mode_enabled(): ... """ cls = Switch def update(self, active): self.cls.objects.filter(pk=self.obj.pk).update(active=active) def get_value(self): return self.obj.active class override_flag(_overrider): cls = Flag def update(self, active): self.cls.objects.filter(pk=self.obj.pk).update(everyone=active) def get_value(self): return self.obj.everyone class override_sample(_overrider): cls = Sample def get(self): try: self.obj = self.cls.objects.get(name=self.name) self.created = False except self.cls.DoesNotExist: self.obj = self.cls.objects.create(name=self.name, percent='0.0') self.created = True def update(self, active): if active is True: p = 100.0 elif active is False: p = 0.0 else: p = active self.cls.objects.filter(pk=self.obj.pk).update(percent='{0}'.format(p)) def get_value(self): p = self.obj.percent if p == 100.0: return True if p == 0.0: return False return p
{ "repo_name": "styleseat/django-waffle", "path": "waffle/testutils.py", "copies": "4", "size": "3532", "license": "bsd-3-clause", "hash": -3124368710993069000, "line_mean": 24.9705882353, "line_max": 79, "alpha_frac": 0.5784258211, "autogenerated": false, "ratio": 3.9819616685456594, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 136 }
from functools import wraps from waffle.models import Flag, Switch, Sample __all__ = ['override_flag', 'override_sample', 'override_switch'] class _overrider(object): def __init__(self, name, active): self.name = name self.active = active def __call__(self, func): @wraps(func) def _wrapped(*args, **kwargs): with self: return func(*args, **kwargs) return _wrapped def get(self): self.obj, self.created = self.cls.objects.get_or_create(name=self.name) def update(self, active): raise NotImplementedError def get_value(self): raise NotImplementedError def __enter__(self): self.get() self.old_value = self.get_value() if self.old_value != self.active: self.update(self.active) def __exit__(self, exc_type, exc_val, exc_tb): if self.created: self.obj.delete() else: self.update(self.old_value) class override_switch(_overrider): """ override_switch is a contextmanager for easier testing of switches. It accepts two parameters, name of the switch and it's state. Example usage:: with override_switch('happy_mode', active=True): ... If `Switch` already existed, it's value would be changed inside the context block, then restored to the original value. If `Switch` did not exist before entering the context, it is created, then removed at the end of the block. It can also act as a decorator:: @override_switch('happy_mode', active=True) def test_happy_mode_enabled(): ... """ cls = Switch def update(self, active): self.cls.objects.filter(pk=self.obj.pk).update(active=active) def get_value(self): return self.obj.active class override_flag(_overrider): cls = Flag def update(self, active): self.cls.objects.filter(pk=self.obj.pk).update(everyone=active) def get_value(self): return self.obj.everyone class override_sample(_overrider): cls = Sample def get(self): try: self.obj = self.cls.objects.get(name=self.name) self.created = False except self.cls.DoesNotExist: self.obj = self.cls.objects.create(name=self.name, percent='0.0') self.created = True def update(self, active): if active is True: p = 100.0 elif active is False: p = 0.0 else: p = active self.cls.objects.filter(pk=self.obj.pk).update(percent='{0}'.format(p)) def get_value(self): p = self.obj.percent if p == 100.0: return True if p == 0.0: return False return p
{ "repo_name": "safarijv/django-waffle", "path": "waffle/testutils.py", "copies": "5", "size": "2804", "license": "bsd-3-clause", "hash": -3958064031515929600, "line_mean": 24.4909090909, "line_max": 79, "alpha_frac": 0.5866619116, "autogenerated": false, "ratio": 3.8622589531680442, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 110 }
from functools import wraps from waffle.models import Flag, Switch, Sample, uncache_flag __all__ = ['override_flag', 'override_sample', 'override_switch'] class _overrider(object): def __init__(self, name, active): self.name = name self.active = active def __call__(self, func): @wraps(func) def _wrapped(*args, **kwargs): with self: return func(*args, **kwargs) return _wrapped def get(self): self.obj, self.created = self.cls.objects.get_or_create(name=self.name) def update(self, active): raise NotImplementedError def get_value(self): raise NotImplementedError def __enter__(self): self.get() self.old_value = self.get_value() if self.old_value != self.active: self.update(self.active) def __exit__(self, exc_type, exc_val, exc_tb): if self.created: self.obj.delete() else: self.update(self.old_value) class override_switch(_overrider): """ override_switch is a contextmanager for easier testing of switches. It accepts two parameters, name of the switch and it's state. Example usage:: with override_switch('happy_mode', active=True): ... If `Switch` already existed, it's value would be changed inside the context block, then restored to the original value. If `Switch` did not exist before entering the context, it is created, then removed at the end of the block. It can also act as a decorator:: @override_switch('happy_mode', active=True) def test_happy_mode_enabled(): ... """ cls = Switch def update(self, active): self.obj.active = active self.obj.save() def get_value(self): return self.obj.active class override_flag(_overrider): cls = Flag def update(self, active): self.obj.everyone = active self.obj.save() def get_value(self): return self.obj.everyone class override_sample(_overrider): cls = Sample def get(self): try: self.obj = self.cls.objects.get(name=self.name) self.created = False except self.cls.DoesNotExist: self.obj = self.cls.objects.create(name=self.name, percent='0.0') self.created = True def update(self, active): if active is True: p = 100.0 elif active is False: p = 0.0 else: p = active self.obj.percent = '{0}'.format(p) self.obj.save() def get_value(self): p = self.obj.percent if p == 100.0: return True if p == 0.0: return False return p
{ "repo_name": "festicket/django-waffle", "path": "waffle/testutils.py", "copies": "1", "size": "2780", "license": "bsd-3-clause", "hash": 1779226623963759900, "line_mean": 23.3859649123, "line_max": 79, "alpha_frac": 0.5751798561, "autogenerated": false, "ratio": 3.921015514809591, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4996195370909591, "avg_score": null, "num_lines": null }
from functools import wraps from wedo.distance import interpolate_distance_data from wedo.motor import processMotorValues from wedo.tilt import process_tilt from wedo.tilt import FLAT, TILT_BACK, TILT_FORWARD, TILT_LEFT, TILT_RIGHT import os import usb.core import logging logger = logging.getLogger('wedo') ID_VENDOR = 0x0694 ID_PRODUCT = 0x0003 WEDO_INTERFACE = 0 WEDO_CONFIGURATION = 1 UNAVAILABLE = None TILTSENSOR = (38, 39, 40) DISTANCESENSOR = (176, 177, 178, 179, 180) MOTOR = (238, 239) # limit the visibility to simplify the usage __all__ = ["scan_for_devices", "WeDo", "FLAT", "TILT_BACK", "TILT_FORWARD", "TILT_LEFT", "TILT_RIGHT"] def scan_for_devices(): """ Find all available devices """ devices = [] try: for dev in usb.core.find(find_all=True, idVendor=ID_VENDOR, idProduct=ID_PRODUCT): devices.append(dev) except usb.core.USBError as e: logger.error("Could not find a connected WeDo device: %s" % str(e)) return devices class WeDo(object): """ Each instance of this class represents a physical WeDo device. Usage : >>> from wedo import WeDo >>> wd = WeDo() Activating the first motor full forward: >>> wd.motor_a = 100 Activating the second motor half speed/force backward: >>> wd.motor_b = -50 Current value of the tilt sensor: >>> wd.tilt Current distance value in meters of the distance sensor: >>> wd.distance """ def __init__(self, device=None): """ If a device is not given, it will attach this instance to the first one found. Otherwise you can pass a specific one from the list returned by scan_for_devices. """ self.number = 0 self.dev = device if self.dev is None: devices = scan_for_devices() if not(devices): raise OSError("Could not find a connected WeDo device") self.dev = devices[0] self.init_device() self.valMotorA = 0 self.valMotorB = 0 self.init_device() def init_device(self): """ Reinit device associated with the WeDo instance """ try: if os.name != 'nt' and self.dev.is_kernel_driver_active(WEDO_INTERFACE): try: self.dev.detach_kernel_driver(WEDO_INTERFACE) except usb.core.USBError as e: logger.error("Could not detatch kernel driver: %s" % str(e)) self.dev.set_configuration(WEDO_CONFIGURATION) self.endpoint = self.dev[0][(0, 0)][0] except usb.core.USBError as e: logger.error("Could not init device: %s" % str(e)) def getRawData(self): """Read 64 bytes from the WeDo's endpoint, but only return the last eight.""" try: return self.endpoint.read(64)[-8:] except usb.core.USBError as e: logger.exception("Could not read from WeDo device") return None def setMotors(self): """ Arguments should be in form of a number between 0 and 100, positive or negative. Magic numbers used for the ctrl_transfer derived from sniffing USB coms. """ data = [64, processMotorValues(self.valMotorA) & 0xFF, processMotorValues(self.valMotorB) & 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00] try: self.dev.ctrl_transfer(bmRequestType=0x21, bRequest=0x09, wValue=0x0200, wIndex=0, data_or_wLength=data) except usb.core.USBError as e: logger.exception("Could not write to driver") def getData(self): """ Sensor data is contained in the 2nd and 4th byte, with sensor IDs being contained in the 3rd and 5th byte respectively. """ rawData = self.getRawData() if rawData is not None: sensorData = {rawData[3]: rawData[2], rawData[5]: rawData[4]} else: sensorData = {} return sensorData @property def raw_tilt(self): """ Returns the raw tilt direction (arbitrary units) """ data = self.getData() for num in data: if num in TILTSENSOR: return data[num] return UNAVAILABLE @property def tilt(self): """ Returns the tilt direction (one of the FLAT, TILT_FORWARD, TILT_LEFT, TILT_RIGHT, TILT_BACK constants) """ raw_data = self.raw_tilt if raw_data is UNAVAILABLE: return UNAVAILABLE return process_tilt(raw_data) @property def raw_distance(self): """ Return the raw evaluated distance from the distance meter (arbitrary units) """ data = self.getData() for num in data: if num in DISTANCESENSOR: return data[num] return UNAVAILABLE @property def distance(self): """ Return the evaluated distance in meters from the distance meter. (Note: this is the ideal distance without any objets on the side, you might have to adapt it depending on your construction) """ raw_data = self.raw_distance if raw_data is UNAVAILABLE: return UNAVAILABLE return interpolate_distance_data(raw_data) @property def motor_a(self): """ Get back the last speed/force set for motor A """ return self.valMotorA @property def motor_b(self): """ Get back the last speed/force set for motor A """ return self.valMotorB @motor_a.setter def motor_a(self, value): """ Sets the speed/force of the motor A, expects a value between -100 and 100 """ if value > 100 or value < -100: raise ValueError("A motor can only be between -100 and 100") self.valMotorA = value self.setMotors() @motor_b.setter def motor_b(self, value): """ Sets the speed/force of the motor B, expects a value between -100 and 100 """ if value > 100 or value < -100: raise ValueError("A motor can only be between -100 and 100") self.valMotorB = value self.setMotors()
{ "repo_name": "nvazquez/Turtlebots", "path": "plugins/wedo_plugin/wedo/__init__.py", "copies": "1", "size": "6290", "license": "mit", "hash": -3639753260016241700, "line_mean": 30.608040201, "line_max": 132, "alpha_frac": 0.5910969793, "autogenerated": false, "ratio": 3.8660110633066993, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.49571080426066993, "avg_score": null, "num_lines": null }
from functools import wraps from werkzeug.exceptions import HTTPException from werkzeug.routing import Map, Rule from flask import current_app, request from flask_socketio import join_room, leave_room from .exc import InvalidRequestError, InvalidURIError, SocketAPIError class SocketAPI(object): def __init__(self, socketio=None, namespace=None): self.namespace = namespace self.routes = Map() self.urls = self.routes.bind('/', '/') self.patch_handlers = {} if socketio is not None: self.init_socketio(socketio) def init_socketio(self, socketio): self.socketio = socketio @socketio.on('create', namespace=self.namespace) def handle_create(payload): # Retreive request arguments. if 'uri' not in payload: raise InvalidRequestError('missing URI') uri = payload['uri'] attributes = payload.get('attributes', {}) # Search for a matching route. try: creator, kwargs = self.urls.match(uri, method='POST') except HTTPException: # No registered resource creator for this uri. raise InvalidRequestError("no registered resource creator for %s'" % uri) # Create the new resource instance. kwargs.update(attributes) resource = creator(**kwargs) # Send the creation event to all subscribers of the uri. self.socketio.emit('create', { 'uri': uri, 'resource': resource }, room=uri) @socketio.on('patch') def handle_patch(payload, namespace=self.namespace): # Retreive request arguments. if 'uri' not in payload: raise InvalidRequestError('missing URI') uri = payload['uri'] patch = payload.get('patch', {}) # Search for a matching route. try: rule, kwargs = self.urls.match(uri, return_rule=True, method='PATCH') kwargs['patch'] = patch except HTTPException: # No registered resource patcher for this uri. raise InvalidRequestError("no registered resource patcher for %s'" % uri) # Call all the resource patchers for the given uri. for patch_handler in self.patch_handlers[rule.rule]: patch_handler(**kwargs) # Send the patch event to all subscribers of the resource, and of # the resource list. for room_name in (uri, uri[0:len(uri) - len(uri.split('/')[-1])]): self.socketio.emit('patch', { 'uri': uri, 'patch': patch }, room=room_name) @socketio.on('delete', namespace=self.namespace) def handle_delete(payload): # Retreive request arguments. if 'uri' not in payload: raise InvalidRequestError('missing URI') uri = payload['uri'] # Search for a matching route. try: deleter, kwargs = self.urls.match(uri, method='DELETE') except HTTPException: # No registered resource deleter for this uri. raise InvalidRequestError("no registered resource deleter for %s'" % uri) # Delete the resource. resource = deleter(**kwargs) # Send the deletion event to all subscribers of the resource, and # of the resource list. for room_name in (uri, uri[0:len(uri) - len(uri.split('/')[-1])]): self.socketio.emit('delete', { 'uri': uri }, room=room_name) @socketio.on('subscribe', namespace=self.namespace) def handle_subscribe(uri): # Try to retrieve the subscribed resource, so that we can send its # current state to the subscriber. try: getter, kwargs = self.urls.match(uri, method='GET') resource = getter(**kwargs) except HTTPException: resource = None if resource is not None: self.socketio.emit('state', { 'uri': uri, 'resource': resource }, room=request.sid) join_room(uri) @socketio.on('unsubscribe', namespace=self.namespace) def handle_unsubscribe(uri): leave_room(uri) @socketio.on_error(self.namespace) def handle_error(e): if isinstance(e, SocketAPIError): # Instances of SocketAPIError are forwarded to the client. self.socketio.emit('api_error', { 'error': e.__class__.__name__, 'message': str(e) }, room=request.sid) else: # Other errors are considered server errors and should not be # forwarded to the client, except in debug mode. self.socketio.emit('server_error', { 'error': e.__class__.__name__, 'message': str(e) if current_app.debug else None }, room=request.sid) # Log the error. current_app.logger.exception(e) def resource_creator(self, rule): # Make sure the given rule corresponds to a list uri. if not rule.endswith('/'): raise InvalidURIError('resource creators should be registered on list uri') def decorate(fn): @wraps(fn) def decorated(*args, **kwargs): return fn(*args, **kwargs) # Register a new POST route for the given rule. self.routes.add(Rule(rule, endpoint=decorated, methods=['POST'])) return decorated return decorate def resource_getter(self, rule): def decorate(fn): @wraps(fn) def decorated(*args, **kwargs): return fn(*args, **kwargs) # Register a new GET route for the given rule. self.routes.add(Rule(rule, endpoint=decorated, methods=['GET'])) return decorated return decorate def resource_patcher(self, rule): # Make sure the rule doesn't correspond to a list. if rule.endswith('/'): raise InvalidURIError('cannot register resource patchers on a list uri') def decorate(fn): @wraps(fn) def decorated(*args, **kwargs): return fn(*args, **kwargs) # Check if there already is a route to catch patch requests on the # given rule. for route in self.routes.iter_rules(): if (route.rule == rule) and ('PATCH' in route.methods): break else: # Register a new PATCH route for the given rule. self.routes.add(Rule(rule, methods=['PATCH'])) # Register the given patch handler. if rule not in self.patch_handlers: self.patch_handlers[rule] = [] self.patch_handlers[rule].append(decorated) return decorated return decorate def resource_deleter(self, rule): # Make sure the rule doesn't correspond to a list. if rule.endswith('/'): raise InvalidURIError('cannot register resource deleters on a list uri') def decorate(fn): @wraps(fn) def decorated(*args, **kwargs): return fn(*args, **kwargs) # Register a new DELETE route for the given rule. self.routes.add(Rule(rule, endpoint=decorated, methods=['DELETE'])) return decorated return decorate
{ "repo_name": "kyouko-taiga/Flask-SocketAPI", "path": "flask_socketapi/socketapi.py", "copies": "1", "size": "7830", "license": "mit", "hash": 918672835432936700, "line_mean": 35.4186046512, "line_max": 89, "alpha_frac": 0.5481481481, "autogenerated": false, "ratio": 4.7111913357400725, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5759339483840074, "avg_score": null, "num_lines": null }
from functools import wraps import anyjson from tornado.web import RequestHandler, HTTPError from celery import states from celery.task.control import revoke from celery.events.state import state def JSON(fun): @wraps(fun) def _write_json(self, *args, **kwargs): content = fun(self, *args, **kwargs) self.write(anyjson.serialize(content)) return _write_json class APIHandler(RequestHandler): def __init__(self, *args, **kwargs): super(APIHandler, self).__init__(*args, **kwargs) self.set_header("Content-Type", "application/javascript") def api_handler(fun): @JSON def get(self, *args, **kwargs): return fun(self, *args, **kwargs) return type(fun.__name__, (APIHandler, ), {"get": get}) @api_handler def task_state(request, task_id): task = state.tasks[task_id] if task.state in states.EXCEPTION_STATES: return task.info(extra=["traceback"]) return task.info() @api_handler def list_tasks(request): limit = request.get_argument("limit", None) limit = limit and int(limit) or None return state.tasks_by_timestamp(limit=limit) @api_handler def list_tasks_by_name(request, name): limit = request.get_argument("limit", None) limit = limit and int(limit) or None return state.tasks_by_type(name, limit=limit) @api_handler def list_task_types(request): return state.task_types() @api_handler def list_workers(request): return state.alive_workers() @api_handler def show_worker(request, node_name): try: return state.workers[node_name] except KeyError: raise HTTPError(404, "Unknown worker node: %s" % (node_name, )) @api_handler def list_worker_tasks(request, hostname): limit = request.get_argument("limit", None) limit = limit and int(limit) or None return state.tasks_by_worker(hostname, limit=limit) class RevokeTaskHandler(APIHandler): SUPPORTED_METHODS = ["POST"] @JSON def post(self): task_id = self.get_argument("task_id") revoke(task_id) return {"ok": True} API = [ (r"/task/name/$", list_task_types), (r"/task/name/(.+?)/?", list_tasks_by_name), (r"/task/$", list_tasks), (r"/revoke/task/", RevokeTaskHandler), (r"/task/(.+)/?", task_state), (r"/worker/", list_workers), (r"/worker/(.+?)/tasks/?", list_worker_tasks), (r"/worker/(.+?)/?", show_worker), ]
{ "repo_name": "frac/celerymon", "path": "celerymonitor/handlers/api.py", "copies": "1", "size": "2441", "license": "bsd-3-clause", "hash": 2991873580055679500, "line_mean": 22.2476190476, "line_max": 71, "alpha_frac": 0.6374436706, "autogenerated": false, "ratio": 3.418767507002801, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9556211177602801, "avg_score": 0, "num_lines": 105 }
from functools import wraps import asyncio try: from aiomeasures import StatsD except ImportError: StatsD = None from alamo_common.conf import AlamoSettings from alamo_common import stats settings = AlamoSettings() _aiostatsd = None def setup(): global _aiostatsd host = 'udp://{}:{}'.format(settings.STATSD_HOST, settings.STATSD_PORT) try: _aiostatsd = StatsD( addr=host, prefix=settings.STATSD_PREFIX ) except TypeError: raise RuntimeError('Could not use StatsD from aiomeasures.') def get_aiostatsd(): if _aiostatsd is None: setup() return _aiostatsd class increment(stats.StatMixin): """Statsd decorator and context manager to `incr` stat. >>> from alamo_common.aiostats import increment ... ... @increment('stat_name') ... def incremented(): ... pass ... ... with increment(metric_name='stat'): ... pass ... ... increment.incr('stat', value=2) """ def __call__(self, f): if asyncio.iscoroutinefunction(f): @wraps(f) async def wrapped(*args, **kwargs): get_aiostatsd().incr(self.get_metric(f.__name__), 1) return await f(*args, **kwargs) else: @wraps(f) def wrapped(*args, **kwargs): get_aiostatsd().incr(self.get_metric(f.__name__), 1) return f(*args, **kwargs) return wrapped def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): get_aiostatsd().incr(self.metric_name or 'unknown', 1) @classmethod def incr(cls, stat, value=1, hostname=None): """Increment a stat by `count`. :param stat: stat name :param value: value :param hostname: if hostname it send stat with hostname prefix """ klass = cls(metric_name=stat, hostname=hostname) get_aiostatsd().incr(klass.get_metric(''), value=value) @classmethod def decr(cls, stat, value=1, hostname=None): """Decrement a stat by `count`. :param stat: stat name :param value: value :param hostname: if hostname it send stat with hostname prefix """ klass = cls(metric_name=stat, hostname=hostname) get_aiostatsd().decr(klass.get_metric(''), value=value) class timer(stats.StatMixin): """Statsd decorator and context manager to timed method execution. >>> from alamo_common.aiostats import timer ... ... @timer(hostname=True) ... def timed_method(): ... # do something ... ... with timer(metric_name='timed_stat'): ... # do something ... ... timing = timer.start() ... # do something ... timing.stop() """ timer = None def __call__(self, f): if asyncio.iscoroutinefunction(f): @wraps(f) async def wrapped(*args, **kwargs): with get_aiostatsd().timer(self.get_metric(f.__name__)): return await f(*args, **kwargs) else: @wraps(f) def wrapped(*args, **kwargs): with get_aiostatsd().timer(self.get_metric(f.__name__)): return f(*args, **kwargs) return wrapped def __enter__(self): self.timer = get_aiostatsd().timer(self.metric_name) self.timer.start() return self def __exit__(self, exc_type, exc_val, exc_tb): self.timer.stop() @classmethod def start(cls, metric_name, hostname=None): """Manually start timer.""" klass = cls(metric_name=metric_name, hostname=hostname) klass.timer = get_aiostatsd().timer(klass.get_metric('')) return klass def stop(self): """Stop and send stats to statsd.""" if self.timer is None: raise RuntimeError('Timer has not been created.') self.timer.stop()
{ "repo_name": "RulersOfAsgard/alamo-common", "path": "alamo_common/aiostats.py", "copies": "1", "size": "3978", "license": "apache-2.0", "hash": 2276079599042615600, "line_mean": 26.625, "line_max": 72, "alpha_frac": 0.5643539467, "autogenerated": false, "ratio": 3.8734177215189876, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.49377716682189876, "avg_score": null, "num_lines": null }
from functools import wraps import asyncpg from .transactionmanager import ConnectionTransactionContextManager from .connection import SAConnection as _SAConnection @wraps(asyncpg.create_pool) def create_pool(*args, dialect=None, connection_class=_SAConnection, **connect_kwargs): class SAConnection(connection_class): def __init__(self, *args, dialect=dialect, **kwargs): super().__init__(*args, dialect=dialect, **kwargs) connection_class = SAConnection # dict is fine on the pool object as there is usually only one of them # asyncpg.pool.Pool.__slots__ += ('__dict__',) # monkey patch pool to have some extra methods def transaction(self, **kwargs): return ConnectionTransactionContextManager(self, **kwargs) asyncpg.pool.Pool.transaction = transaction asyncpg.pool.Pool.begin = transaction pool = asyncpg.create_pool(*args, connection_class=connection_class, **connect_kwargs) return pool
{ "repo_name": "CanopyTax/asyncpgsa", "path": "asyncpgsa/pool.py", "copies": "1", "size": "1049", "license": "apache-2.0", "hash": 6315420214096158000, "line_mean": 32.8387096774, "line_max": 74, "alpha_frac": 0.6663489037, "autogenerated": false, "ratio": 4.281632653061225, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5447981556761224, "avg_score": null, "num_lines": null }
from functools import wraps import brewer2mpl import numpy as np import matplotlib as mpl from matplotlib import cm # Get Set2 from ColorBrewer, a set of colors deemed colorblind-safe and # pleasant to look at by Drs. Cynthia Brewer and Mark Harrower of Pennsylvania # State University. These colors look lovely together, and are less # saturated than those colors in Set1. For more on ColorBrewer, see: # - Flash-based interactive map: # http://colorbrewer2.org/ # - A quick visual reference to every ColorBrewer scale: # http://bl.ocks.org/mbostock/5577023 #class Common(object): # def __init__(self): set2 = brewer2mpl.get_map('Set2', 'qualitative', 8).mpl_colors # Another ColorBrewer scale. This one has nice "traditional" colors like # reds and blues set1 = brewer2mpl.get_map('Set1', 'qualitative', 9).mpl_colors # A colormapcycle for stacked barplots stackmaps = [brewer2mpl.get_map('YlGn', 'sequential', 8).mpl_colormap, brewer2mpl.get_map('YlOrRd', 'sequential', 8).mpl_colormap] # This context-decorator makes it possible to change the color cycle inside # prettyplotlib without affecting pyplot class _pretty: rcParams = {'axes.color_cycle': set2, 'lines.linewidth': .75} mpl_contexts = [] def __call__(self, func): @wraps(func) def wrapper(*args, **kwargs): with self: return func(*args, **kwargs) return wrapper def __enter__(self): context = mpl.rc_context(rc=self.rcParams) self.mpl_contexts.append(context) return context.__enter__() def __exit__(self, *args): return self.mpl_contexts.pop().__exit__(*args) pretty = _pretty() # This function returns a colorlist for barplots def getcolors(cmap, yvals, n): if isinstance(cmap, bool): cmap = stackmaps[n%len(stackmaps)] return [cmap( abs(int((float(yval)/np.max(yvals))*cmap.N) )) for yval in yvals] # Set some commonly used colors almost_black = '#262626' light_grey = np.array([float(248) / float(255)] * 3) reds = cm.Reds reds.set_bad('white') reds.set_under('white') blues_r = cm.Blues_r blues_r.set_bad('white') blues_r.set_under('white') # Need to 'reverse' red to blue so that blue=cold=small numbers, # and red=hot=large numbers with '_r' suffix blue_red = brewer2mpl.get_map('RdBu', 'Diverging', 11, reverse=True).mpl_colormap
{ "repo_name": "olgabot/prettyplotlib", "path": "prettyplotlib/colors.py", "copies": "1", "size": "2394", "license": "mit", "hash": -6659177050495764000, "line_mean": 30.5, "line_max": 83, "alpha_frac": 0.6766917293, "autogenerated": false, "ratio": 3.1541501976284585, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9313217350641854, "avg_score": 0.003524915257320951, "num_lines": 76 }
from functools import wraps import bugsnag import sendgrid from . import app, meetup_oauth, sendgrid_api from flask import render_template, redirect, url_for, request, session, flash from flask.ext.login import current_user, login_required, login_user, logout_user from .forms import (VenueEditForm, VenueClaimForm, RequestForSpaceForm, UserProfileForm, RequestForSpaceInitial, VenueSearchForm) from .logic import sync_user, get_unclaimed_venues, get_users_venues, get_groups, get_events, get_venues, event_cmp from .models import User, Group, Venue, Event, login_manager def skip_if_logged_in(func): """Decorator for functions in the login flow that skips to the destination if the user is already logged in. """ @wraps(func) def wrapper(*args, **kwargs): if current_user.is_authenticated(): return redirect(url_for('user_profile')) return func(*args, **kwargs) return wrapper @app.route('/clear/') def clear(): session.clear() return redirect('/') @app.route('/') def index(): return render_template('index.html') @app.route('/have/', methods=('GET', 'POST')) def have(): form = VenueSearchForm(request.form) if request.method == 'POST' and form.validate(): name = form.name.data or None location = None if form.longitude.data and form.latitude.data: location = [float(form.longitude.data), float(form.latitude.data)] venues = get_unclaimed_venues(name=name, location=location) else: venues = () return render_template('have.html', form=form, venues=venues) @app.route('/login/') @app.route('/login/<string:service>/', methods=('GET', 'POST')) @skip_if_logged_in def login(service=''): if service: return meetup_oauth.authorize(callback=url_for('login_meetup_return')) else: return render_template('redirect_to_meetup.html') @app.route('/login/meetup/return/', methods=('GET',)) @meetup_oauth.authorized_handler @skip_if_logged_in def login_meetup_return(oauth_response): session['meetup_token'] = ( oauth_response['oauth_token'], oauth_response['oauth_token_secret'] ) session['member_id'] = oauth_response['member_id'] return render_template('login.html') @app.route('/login/sync/', methods=('GET',)) @skip_if_logged_in def login_sync(): user = User.with_id(session["member_id"]) sync_user(user) login_user(user) redirect_to = session.pop('login_redirect', url_for('user_profile')) return redirect(redirect_to) @app.route('/logout/') def logout(): session.pop('meetup_token', None) session.pop('meetup_member_id', None) session.pop('login_redirect', None) logout_user() return redirect(url_for('.index')) @app.route('/need/') @login_required def need(): user = User(_id=int(session['member_id'])).load() groups = get_groups({'_id': {'$in': user.organizer_of}}) return render_template('need.html', user=user, groups=groups, ) @app.route('/need/group/<int:group_id>/') @login_required def need_event(group_id): user = User(_id=int(session['member_id'])).load() group = Group(_id=group_id).load() events = list(get_events({'group_id': group._id})) events.sort(event_cmp) return render_template('need.html', user=user, group=group, events=events, ) @app.route('/need/group/<int:group_id>/event/<event_id>/') @login_required def need_venue(group_id, event_id): user = User(_id=int(session['member_id'])).load() group = Group(_id=group_id).load() event = Event(_id=event_id).load() all_venues = get_venues({ 'loc': {'$near': user.loc}, 'claimed': True, 'deleted': False, }) return render_template('need.html', user=user, group=group, event=event, all_venues=all_venues, ) @app.route('/need/group/<int:group_id>/event/<event_id>/request/', methods=('POST',)) @login_required def need_request(group_id, event_id, form=None): venue_ids = request.form.getlist('venue_id', type=int) if not venue_ids: flash(u'You need to pick at least one venue!', 'warning') return redirect(url_for('need_venue', group_id=group_id, event_id=event_id)) user = User(_id=int(session['member_id'])).load() group = Group(_id=group_id).load() event = Event(_id=event_id).load() all_venues = get_venues({ 'loc': {'$near': user.loc}, 'claimed': True, 'deleted': False, }) picked_venues = [] venue_ids = set(venue_ids) for venue in all_venues: if venue._id in venue_ids: picked_venues.append(venue) initial = RequestForSpaceInitial(user, event, group) request_form = form or RequestForSpaceForm(obj=initial) return render_template('need.html', user=user, group=group, event=event, picked_venues=picked_venues, request_form=request_form, event_size_known=hasattr(event, 'rsvp_limit'), event_time_known=hasattr(event, 'time'), ) @app.route('/need/group/<int:group_id>/event/<event_id>/request/submit/', methods=('POST',)) @login_required def need_request_submit(group_id, event_id): user = User(_id=int(session['member_id'])).load() group = Group(_id=group_id).load() event = Event(_id=event_id).load() initial = RequestForSpaceInitial(user, event, group) form = RequestForSpaceForm(request.form, obj=initial) if not form.validate(): flash(u'There were errors with the form', 'error') return need_request(group_id, event_id, form=form) venues = get_venues({ '_id': {'$in': map(int, request.form.getlist('venue_id'))}}) def evaluate_body(venue): body = form.body.data body = body.replace('{{host}}', venue.contact['name']) body = body.replace('{{venue_name}}', venue.name) return body for count, venue in enumerate(venues): recipient = venue.contact['email'] body = evaluate_body(venue) message = sendgrid.Message( addr_from=form.email.data, subject="WhereToMeetup Request for Use of Your Space", text=body) message.add_to(recipient) sendgrid_api.smtp.send(message) if count > 1: flash(u'The hosts have been notified of your request', 'info') else: flash(u'The host has been notified of your request', 'info') return redirect(url_for('index')) @app.route('/account/', methods=('GET', 'POST')) @login_required def user_profile(): user = User(_id=int(session['member_id'])).load() form = UserProfileForm(request.form, obj=user) if request.method == 'POST' and form.validate(): user.update_profile(email=form.email.data, phone=form.phone.data) flash('Your profile has been updated', 'success') return redirect(url_for('user_profile')) return render_template('account/profile.html', user=user, form=form) @app.route('/space/<int:_id>/claim/', methods=('GET', 'POST')) @login_required def venue_claim(_id): def get_contact_field(attr): """Return an attribute for a venue's contact. If the venue already has contact information associated with it, the value stored in the document will be used. If not, the contact information from the current user will be used instead. """ value = getattr(user, attr, None) if hasattr(venue, 'contact'): value = venue.contact.get(attr, value) return value venue = Venue(_id=_id).load() user = User(_id=int(session['member_id'])).load() # If the user has not email or phone number and the venue does, place # them on the user for the purpose for prepopulating the form. if not getattr(user, 'email', None) and getattr(venue, 'email', None): user.email = venue.email if not getattr(user, 'phone', None) and getattr(venue, 'phone', None): user.phone = venue.phone # There are different forms for editing and claiming a venue. Use the # right one. if venue.claimed: form_class = VenueEditForm else: form_class = VenueClaimForm # Check for current contact information linked to the venue. For any fields # that don't have a value, use the values associated with the user doing # the claiming. venue.contact_name = get_contact_field('name') venue.contact_email = get_contact_field('email') venue.contact_phone = get_contact_field('phone') form = form_class(request.form, obj=venue) if request.method == 'POST' and form.validate(): venue.claim(contact_name=form.contact_name.data, contact_email=form.contact_email.data, contact_phone=form.contact_phone.data, user_id=user._id, capacity=form.capacity.data, need_names=form.need_names.data, food=form.food.data, av=form.av.data, chairs=form.chairs.data, instructions=form.instructions.data) flash('Thank you for %s %s' % ( 'updating' if venue.claimed else 'claiming', venue.name), 'success') return redirect(url_for('venues_for_user')) return render_template('venue/claim.html', venue=venue, form=form) @app.route('/account/spaces/') @login_required def venues_for_user(): user = User(_id=int(session['member_id'])).load() venues = get_users_venues(user_id=user._id) return render_template('account/venues.html', venues=venues) @meetup_oauth.tokengetter def get_meetup_token(): return session.get('meetup_token') @login_manager.unauthorized_handler def login_prompt(): session['login_redirect'] = request.path return redirect(url_for('login')) @app.errorhandler(500) def internal_server_error(error): bugsnag.notify( error, context=request.path, user=session.get('member_id', '<anon>'), ) return render_template('errors/500.html') @app.errorhandler(404) def not_found(error): return render_template('errors/404.html')
{ "repo_name": "NYCPython/wheretomeetup", "path": "meetups/views.py", "copies": "1", "size": "10073", "license": "bsd-3-clause", "hash": 4135505931576620000, "line_mean": 30.478125, "line_max": 115, "alpha_frac": 0.6436017075, "autogenerated": false, "ratio": 3.405341446923597, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4548943154423597, "avg_score": null, "num_lines": null }
from functools import wraps import cantal from cantal_tools.metrics import appflow requests = cantal.RequestTracker('requests') post = cantal.RequestTracker('post') appflow.ensure_branches('redis') appflow.ensure_branches('mongo') def request_tracking_middleware(wsgi_app): def middleware(environ, start_response): method = environ['REQUEST_METHOD'] if method == 'POST': with requests.request(), post.request(): return wsgi_app(environ, start_response) else: with requests.request(): return wsgi_app(environ, start_response) return middleware def patch_redis(redis, pipeline): """Wraps `execute_command` method.""" redis_real_execute = redis.execute_command pipeline_real_execute = pipeline.execute @wraps(redis_real_execute) def redis_execute_with_metrics(*args, **options): with appflow.redis.context(): return redis_real_execute(*args, **options) @wraps(pipeline_real_execute) def pipeline_execute_with_metrics(*args, **options): with appflow.redis.context(): return pipeline_real_execute(*args, **options) redis.execute_command = redis_execute_with_metrics pipeline.execute = pipeline_execute_with_metrics return redis def patch_mongo(mongo): """Wraps `_send_message_with_response` method.""" real_func = mongo._send_message_with_response @wraps(real_func) def execute_with_metrics(*args, **options): with appflow.mongo.context(): return real_func(*args, **options) mongo._send_message_with_response = execute_with_metrics return mongo
{ "repo_name": "uvNikita/appstats", "path": "appstats/metrics.py", "copies": "1", "size": "1666", "license": "mit", "hash": -1003079101688768300, "line_mean": 29.2909090909, "line_max": 60, "alpha_frac": 0.6704681873, "autogenerated": false, "ratio": 3.976133651551313, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 55 }
from functools import wraps import django from django.core.management import get_commands, load_command_class try: from importlib import import_module except ImportError: from django.utils.importlib import import_module from kronos.settings import PROJECT_MODULE, KRONOS_PYTHON, KRONOS_MANAGE, \ KRONOS_PYTHONPATH, KRONOS_POSTFIX, KRONOS_PREFIX from django.conf import settings from kronos.utils import read_crontab, write_crontab, delete_crontab from kronos.version import __version__ import six try: from django.utils.module_loading import autodiscover_modules def load(): """ Load ``cron`` modules for applications listed in ``INSTALLED_APPS``. """ autodiscover_modules('cron') if '.' in PROJECT_MODULE.__name__: try: import_module('%s.cron' % '.'.join( PROJECT_MODULE.__name__.split('.')[0:-1])) except ImportError as e: if 'No module named' not in str(e): print(e) # load django tasks for cmd, app in get_commands().items(): try: load_command_class(app, cmd) except django.core.exceptions.ImproperlyConfigured: pass except ImportError: def load(): """ Load ``cron`` modules for applications listed in ``INSTALLED_APPS``. """ paths = ['%s.cron' % PROJECT_MODULE.__name__] if '.' in PROJECT_MODULE.__name__: paths.append('%s.cron' % '.'.join( PROJECT_MODULE.__name__.split('.')[0:-1])) for application in settings.INSTALLED_APPS: paths.append('%s.cron' % application) # load kronostasks for p in paths: try: import_module(p) except ImportError as e: if e.message != 'No module named cron': print e.message # load django tasks for cmd, app in get_commands().items(): load_command_class(app, cmd) tasks = [] def register(schedule, *args, **kwargs): def decorator(function): global tasks passed_args = [] if "args" in kwargs: for key, value in six.iteritems(kwargs["args"]): if isinstance(value, dict): raise TypeError('Parse for dict arguments not yet implemented.') if isinstance(value, list): temp_args = ",".join(map(str, value)) passed_args.append("{}={}".format(key, temp_args)) else: if value is None: arg_text = "{}" elif isinstance(value, str): arg_text = '{} "{}"' else: arg_text = '{} {}' passed_args.append(arg_text.format(key, value)) if hasattr(function, 'handle'): # django command function.cron_expression = '%(schedule)s %(prefix)s %(python)s %(manage)s ' \ '%(task)s %(passed_args)s --settings=%(settings_module)s %(postfix)s' \ '$KRONOS_BREAD_CRUMB' % { 'schedule': schedule, 'prefix': KRONOS_PREFIX, 'python': KRONOS_PYTHON, 'manage': KRONOS_MANAGE, 'task': function.__module__.split('.')[-1], 'passed_args': " ".join(passed_args), 'settings_module': settings.SETTINGS_MODULE, 'postfix': KRONOS_POSTFIX } task = dict(name=function.__module__.split('.')[-1], django_command=True, fn=function) else: function.cron_expression = '%(schedule)s %(prefix)s %(python)s %(manage)s ' \ 'runtask %(task)s %(passed_args)s --settings=%(settings_module)s ' \ '%(postfix)s $KRONOS_BREAD_CRUMB' % { 'schedule': schedule, 'prefix': KRONOS_PREFIX, 'python': KRONOS_PYTHON, 'manage': KRONOS_MANAGE, 'task': function.__name__, 'passed_args': " ".join(passed_args), 'settings_module': settings.SETTINGS_MODULE, 'postfix': KRONOS_POSTFIX } task = dict(name=function.__name__, django_command=False, fn=function) if KRONOS_PYTHONPATH is not None: function.cron_expression += ' --pythonpath=%s' % KRONOS_PYTHONPATH tasks.append(task) @wraps(function) def wrapper(*args, **kwargs): return function(*args, **kwargs) return wrapper return decorator def install(): """ Register tasks with cron. """ load() current_crontab = six.u(read_crontab()) new_crontab = '' for task in tasks: new_crontab += '%s\n' % task['fn'].cron_expression write_crontab(current_crontab + new_crontab) def printtasks(): """ Print the tasks that would be installed in the crontab, for debugging purposes. """ load() for task in tasks: print(task['fn'].cron_expression) def find_existing_jobs(current_crontab): new_crontab = '' for line in six.u(current_crontab).split('\n')[:-1]: exp = '%(python)s %(manage)s runtask' % { 'python': KRONOS_PYTHON, 'manage': KRONOS_MANAGE, } if not ('$KRONOS_BREAD_CRUMB' in line and exp in line): new_crontab += '%s\n' % line return new_crontab def uninstall(): """ Uninstall tasks from cron. """ current_crontab = read_crontab() new_crontab = find_existing_jobs(current_crontab) if new_crontab: write_crontab(new_crontab) else: delete_crontab() def reinstall(): uninstall() install()
{ "repo_name": "jeanbaptistelab/django-kronos", "path": "kronos/__init__.py", "copies": "1", "size": "5982", "license": "mit", "hash": 8002084115886691000, "line_mean": 30.4842105263, "line_max": 89, "alpha_frac": 0.5222333668, "autogenerated": false, "ratio": 4.221594918842626, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5243828285642625, "avg_score": null, "num_lines": null }
from functools import wraps import django try: from django.db.models.expressions import BaseExpression, F except ImportError: # Django < 1.8 from django.db.models.expressions import ExpressionNode as BaseExpression, F from django.db.models.sql.query import Query from djmoney.models.fields import MoneyField from django.db.models.query_utils import Q from moneyed import Money try: from django.utils.encoding import smart_unicode except ImportError: # Python 3 from django.utils.encoding import smart_text as smart_unicode from djmoney.utils import get_currency_field_name try: from django.db.models.constants import LOOKUP_SEP except ImportError: # Django < 1.5 LOOKUP_SEP = '__' from django.db.models.sql.constants import QUERY_TERMS def _get_clean_name(name): # Get rid of __lt, __gt etc for the currency lookup path = name.split(LOOKUP_SEP) if path[-1] in QUERY_TERMS: return LOOKUP_SEP.join(path[:-1]) else: return name def _get_field(model, name): if django.VERSION[0] >= 1 and django.VERSION[1] >= 8: # Django 1.8+ - can use something like # expression.output_field.get_internal_field() == 'Money..' raise NotImplementedError("Django 1.8+ support is not implemented.") from django.db.models.fields import FieldDoesNotExist # Create a fake query object so we can easily work out what field # type we are dealing with qs = Query(model) opts = qs.get_meta() alias = qs.get_initial_alias() parts = name.split(LOOKUP_SEP) # The following is borrowed from the innards of Query.add_filter - it strips out __gt, __exact et al. num_parts = len(parts) if num_parts > 1 and parts[-1] in qs.query_terms: # Traverse the lookup query to distinguish related fields from # lookup types. lookup_model = model for counter, field_name in enumerate(parts): try: lookup_field = lookup_model._meta.get_field(field_name) except FieldDoesNotExist: # Not a field. Bail out. parts.pop() break # Unless we're at the end of the list of lookups, let's attempt # to continue traversing relations. if (counter + 1) < num_parts: try: lookup_model = lookup_field.rel.to except AttributeError: # Not a related field. Bail out. parts.pop() break if django.VERSION[0] >= 1 and django.VERSION[1] in (6, 7): # Django 1.6-1.7 field = qs.setup_joins(parts, opts, alias)[0] else: # Django 1.4-1.5 field = qs.setup_joins(parts, opts, alias, False)[0] return field def _expand_money_args(model, args): """ Augments args so that they contain _currency lookups - ie.. Q() | Q() """ for arg in args: if isinstance(arg, Q): for i, child in enumerate(arg.children): if isinstance(child, Q): _expand_money_args(model, [child]) elif isinstance(child, (list, tuple)): name, value = child if isinstance(value, Money): clean_name = _get_clean_name(name) arg.children[i] = Q(*[ child, (get_currency_field_name(clean_name), smart_unicode(value.currency)) ]) if isinstance(value, BaseExpression): field = _get_field(model, name) if isinstance(field, MoneyField): clean_name = _get_clean_name(name) arg.children[i] = Q(*[ child, ('_'.join([clean_name, 'currency']), F(get_currency_field_name(value.name))) ]) return args def _expand_money_kwargs(model, kwargs): """ Augments kwargs so that they contain _currency lookups. """ to_append = {} for name, value in kwargs.items(): if isinstance(value, Money): clean_name = _get_clean_name(name) to_append[name] = value.amount to_append[get_currency_field_name(clean_name)] = smart_unicode( value.currency) if isinstance(value, BaseExpression): field = _get_field(model, name) if isinstance(field, MoneyField): clean_name = _get_clean_name(name) to_append['_'.join([clean_name, 'currency'])] = F(get_currency_field_name(value.name)) kwargs.update(to_append) return kwargs def understands_money(model, func): """ Used to wrap a queryset method with logic to expand a query from something like: mymodel.objects.filter(money=Money(100,"USD")) To something equivalent to: mymodel.objects.filter(money=Decimal("100.0), money_currency="USD") """ @wraps(func) def wrapper(*args, **kwargs): args = _expand_money_args(model, args) kwargs = kwargs.copy() kwargs = _expand_money_kwargs(model, kwargs) return func(*args, **kwargs) return wrapper RELEVANT_QUERYSET_METHODS = ['distinct', 'get', 'get_or_create', 'filter', 'exclude'] def add_money_comprehension_to_queryset(model, qs): # Decorate each relevant method with understand_money in the queryset given for attr in RELEVANT_QUERYSET_METHODS: setattr(qs, attr, understands_money(model, getattr(qs, attr))) return qs def money_manager(manager): """ Patches a model manager's get_queryset method so that each QuerySet it returns is able to work on money fields. This allow users of django-money to use other managers while still doing money queries. """ # Need to dynamically subclass to add our behaviour, and then change # the class of 'manager' to our subclass. # Rejected alternatives: # # * A monkey patch that adds things to the manager instance dictionary. # This fails due to complications with Manager._copy_to_model behaviour. # # * Returning a new MoneyManager instance (rather than modifying # the passed in manager instance). This fails for reasons that # are tricky to get to the bottom of - Manager does funny things. class MoneyManager(manager.__class__): def get_queryset(self, *args, **kwargs): # If we are calling code that is pre-Django 1.6, need to # spell it 'get_query_set' s = super(MoneyManager, self) method = getattr(s, 'get_queryset', getattr(s, 'get_query_set', None)) return add_money_comprehension_to_queryset(self.model, method(*args, **kwargs)) # If we are being called by code pre Django 1.6, need # 'get_query_set'. if django.VERSION < (1, 6): get_query_set = get_queryset manager.__class__ = MoneyManager return manager
{ "repo_name": "tsouvarev/django-money", "path": "djmoney/models/managers.py", "copies": "3", "size": "7147", "license": "bsd-3-clause", "hash": -7054097099821306000, "line_mean": 33.1961722488, "line_max": 108, "alpha_frac": 0.5929760739, "autogenerated": false, "ratio": 4.0355731225296445, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0005728376111938001, "num_lines": 209 }
from functools import wraps import facebook from django.contrib.auth import REDIRECT_FIELD_NAME from django.http import (HttpResponse, HttpResponseRedirect, HttpResponseBadRequest) from django.utils.decorators import available_attrs from django.utils.http import urlquote from django.conf import settings from exceptions import (MissingSignedRequestException, InvalidSignedRequestException) def canvas_only(function=None): """ Decorator ensures that a page is only accessed from within a facebook application. """ def _dec(view_func): def _view(request, *args, **kwargs): # Make sure we're receiving a signed_request from facebook if not request.POST.get('signed_request'): if hasattr(settings, 'FACEBOOK_RAISE_SR_EXCEPTIONS') \ and settings.FACEBOOK_RAISE_SR_EXCEPTIONS: raise MissingSignedRequestException( 'Signed request token missing' ) else: return HttpResponseBadRequest( '<h1>400 Bad Request</h1>' '<p>Missing <em>signed_request</em>.</p>' ) # Parse the request and ensure it's valid signed_request = request.POST["signed_request"] data = facebook.parse_signed_request(signed_request, settings.FACEBOOK_SECRET_KEY) if data is False: if hasattr(settings, 'FACEBOOK_RAISE_SR_EXCEPTIONS') \ and settings.FACEBOOK_RAISE_SR_EXCEPTIONS: raise InvalidSignedRequestException( 'Signed request token does not parse' ) else: return HttpResponseBadRequest( '<h1>400 Bad Request</h1>' '<p>Malformed <em>signed_request</em>.</p>' ) # If the user has not authorised redirect them if not data.get('user_id'): scope = getattr(settings, 'FACEBOOK_SCOPE', None) auth_url = facebook.auth_url(settings.FACEBOOK_APP_ID, settings.FACEBOOK_CANVAS_PAGE, scope) markup = ('<script type="text/javascript">' 'top.location.href="%s"</script>' % auth_url) return HttpResponse(markup) # Success so return the view return view_func(request, *args, **kwargs) return _view return _dec(function) def facebook_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME): """ Decorator for views that checks that the user is logged in, redirecting to the log-in page if necessary. """ def _passes_test(test_func, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME): if not login_url: login_url = settings.LOGIN_URL def decorator(view_func): def _wrapped_view(request, *args, **kwargs): if test_func(request): return view_func(request, *args, **kwargs) path = urlquote(request.get_full_path()) tup = login_url, redirect_field_name, path return HttpResponseRedirect('%s?%s=%s' % tup) return wraps(view_func, assigned=available_attrs(view_func))( _wrapped_view) return decorator actual_decorator = _passes_test( lambda r: r.facebook, redirect_field_name=redirect_field_name ) if function: return actual_decorator(function) return actual_decorator
{ "repo_name": "pythonforfacebook/django-facebook", "path": "django_facebook/decorators.py", "copies": "1", "size": "3854", "license": "mit", "hash": -8149058800410113000, "line_mean": 36.4174757282, "line_max": 78, "alpha_frac": 0.5492994292, "autogenerated": false, "ratio": 4.841708542713568, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5891007971913568, "avg_score": null, "num_lines": null }
from functools import wraps import flask_marshmallow from flask_restplus import Namespace as OriginalNamespace from flask_restplus.utils import merge from webargs.flaskparser import use_args as use_webargs from werkzeug import exceptions as http_exceptions from .model import Model, DefaultHTTPErrorSchema class Namespace(OriginalNamespace): def _handle_api_doc(self, cls, doc): if doc is False: cls.__apidoc__ = False return ##unshortcut_params_description(doc) ##handle_deprecations(doc) ##for key in 'get', 'post', 'put', 'delete', 'options', 'head', 'patch': ## if key in doc: ## if doc[key] is False: ## continue ## unshortcut_params_description(doc[key]) ## handle_deprecations(doc[key]) ## if 'expect' in doc[key] and not isinstance(doc[key]['expect'], (list, tuple)): ## doc[key]['expect'] = [doc[key]['expect']] cls.__apidoc__ = merge(getattr(cls, '__apidoc__', {}), doc) def resolve_object(self, object_arg_name, resolver): """ A helper decorator to resolve object instance from arguments (e.g. identity). Example: >>> @namespace.route('/<int:user_id>') ... class MyResource(Resource): ... @namespace.resolve_object( ... object_arg_name='user', ... resolver=lambda kwargs: User.query.get_or_404(kwargs.pop('user_id')) ... ) ... def get(self, user): ... # user is a User instance here """ def decorator(func): @wraps(func) def wrapper(*args, **kwargs): kwargs[object_arg_name] = resolver(kwargs) return func(*args, **kwargs) return wrapper return decorator def model(self, name=None, model=None, mask=None, **kwargs): """ Model registration decorator. """ if isinstance(model, flask_marshmallow.Schema): if not name: name = model.__class__.__name__ api_model = Model(name, model, mask=mask) api_model.__apidoc__ = kwargs return self.add_model(name, api_model) return super(Namespace, self).model(name, model, **kwargs) def parameters(self, parameters, locations=None): """ Endpoint parameters registration decorator. """ def decorator(func): if locations is None and parameters.many: _locations = ('json', ) else: _locations = locations if _locations is not None: parameters.context['in'] = _locations return self.doc(params=parameters)( self.response(code=http_exceptions.UnprocessableEntity.code)( use_webargs(parameters, locations=_locations)( func ) ) ) return decorator def response(self, model=None, code=200, description=None, **kwargs): """ Endpoint response OpenAPI documentation decorator. It automatically documents HTTPError%(code)d responses with relevant schemas. Arguments: model (flask_marshmallow.Schema) - it can be a class or an instance of the class, which will be used for OpenAPI documentation purposes. It can be omitted if ``code`` argument is set to an error HTTP status code. code (int) - HTTP status code which is documented. description (str) Example: >>> @namespace.response(BaseTeamSchema(many=True)) ... @namespace.response(code=403) ... def get_teams(): ... if not user.is_admin: ... abort(403) ... return Team.query.all() """ if model is None: if code not in http_exceptions.default_exceptions: raise ValueError("`model` parameter is required for code %d" % code) model = self.model( name='HTTPError%d' % code, model=DefaultHTTPErrorSchema(http_code=code) ) if description is None: if code in http_exceptions.default_exceptions: description = http_exceptions.default_exceptions[code].description def decorator(func_or_class): if code in http_exceptions.default_exceptions: # If the code is handled by raising an exception, it will # produce a response later, so we don't need to apply a dump # wrapper. decorated_func_or_class = func_or_class else: def dump_decorator(func): def dump_wrapper(*args, **kwargs): return model.dump(func(*args, **kwargs)).data return dump_wrapper if isinstance(func_or_class, type): # Make a copy of `method_decorators` as otherwise we will # modify the behaviour of all flask-restful.Resource-based # classes func_or_class.method_decorators = ( [dump_decorator] + func_or_class.method_decorators ) decorated_func_or_class = func_or_class else: decorated_func_or_class = wraps(func_or_class)(dump_decorator(func_or_class)) if isinstance(model, Model): api_model = model else: api_model = self.model(model=model) return self.doc( responses={ code: ( description, [api_model] if getattr(model, 'many', False) else api_model ), } )(decorated_func_or_class) return decorator
{ "repo_name": "ssls/beetle-agent", "path": "flask_restplus_patched/namespace.py", "copies": "1", "size": "6025", "license": "mit", "hash": -8273283527665469000, "line_mean": 37.3757961783, "line_max": 97, "alpha_frac": 0.5346058091, "autogenerated": false, "ratio": 4.663312693498452, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5697918502598451, "avg_score": null, "num_lines": null }
from functools import wraps import flask from flask import request, render_template from . import ex_blueprint """ The default_session is only used for testing purposes Alternative: 11010001 34563456 """ DEFAULT_SESSION = '11010001' ZEEGUU_LOGIN = 'https://www.zeeguu.unibe.ch/login?next=' ZEEGUU_SESSION = 'sessionID' def with_session(f): """ Decorator for checking sessionID - query string - cookie parameter - defualt_session for tests Example: http://127.0.0.1:5000/?sessionID=11010001 """ @wraps(f) def decorated_function(*args, **kwargs): print(request.args.get) request.sessionID = None if ZEEGUU_SESSION in request.cookies: print("Session is retrived from cookies") request.sessionID = request.cookies.get(ZEEGUU_SESSION) else: print("Redirecting Zeeguu login") return flask.redirect(ZEEGUU_LOGIN + request.url) return f(*args, **kwargs) return decorated_function @ex_blueprint.route('/debug', methods=['GET']) def set_cookie(): """ Test route for setting the cookie only for local resting """ return render_template('exercises/set_cookie.html') @ex_blueprint.route('/', defaults={'path': ''}) @ex_blueprint.route('/<path:path>') @with_session def index(path): """ Main entry point """ return render_template('exercises/index.html')
{ "repo_name": "martinavagyan/zeeguu-exercises", "path": "src/zeeguu_exercises/endpoints.py", "copies": "1", "size": "1414", "license": "mit", "hash": 4641035168417418000, "line_mean": 23.3793103448, "line_max": 67, "alpha_frac": 0.6605374823, "autogenerated": false, "ratio": 3.672727272727273, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9747057858475549, "avg_score": 0.017241379310344827, "num_lines": 58 }
from functools import wraps import flask import flask_marshmallow from flask_restplus import Namespace as OriginalNamespace from flask_restplus.utils import merge from webargs.flaskparser import parser as webargs_parser from werkzeug import cached_property, exceptions as http_exceptions from ._http import HTTPStatus from .model import Model, DefaultHTTPErrorSchema class Namespace(OriginalNamespace): WEBARGS_PARSER = webargs_parser def _handle_api_doc(self, cls, doc): if doc is False: cls.__apidoc__ = False return ##unshortcut_params_description(doc) ##handle_deprecations(doc) ##for key in 'get', 'post', 'put', 'delete', 'options', 'head', 'patch': ## if key in doc: ## if doc[key] is False: ## continue ## unshortcut_params_description(doc[key]) ## handle_deprecations(doc[key]) ## if 'expect' in doc[key] and not isinstance(doc[key]['expect'], (list, tuple)): ## doc[key]['expect'] = [doc[key]['expect']] cls.__apidoc__ = merge(getattr(cls, '__apidoc__', {}), doc) def resolve_object(self, object_arg_name, resolver): """ A helper decorator to resolve object instance from arguments (e.g. identity). Example: >>> @namespace.route('/<int:user_id>') ... class MyResource(Resource): ... @namespace.resolve_object( ... object_arg_name='user', ... resolver=lambda kwargs: User.query.get_or_404(kwargs.pop('user_id')) ... ) ... def get(self, user): ... # user is a User instance here """ def decorator(func_or_class): if isinstance(func_or_class, type): # Handle Resource classes decoration # pylint: disable=protected-access func_or_class._apply_decorator_to_methods(decorator) return func_or_class @wraps(func_or_class) def wrapper(*args, **kwargs): kwargs[object_arg_name] = resolver(kwargs) return func_or_class(*args, **kwargs) return wrapper return decorator def model(self, name=None, model=None, mask=None, **kwargs): """ Model registration decorator. """ if isinstance(model, flask_marshmallow.Schema): if not name: name = model.__class__.__name__ api_model = Model(name, model, mask=mask) api_model.__apidoc__ = kwargs return self.add_model(name, api_model) return super(Namespace, self).model(name, model, **kwargs) def parameters(self, parameters, locations=None): """ Endpoint parameters registration decorator. """ def decorator(func): if locations is None and parameters.many: _locations = ('json', ) else: _locations = locations if _locations is not None: parameters.context['in'] = _locations return self.doc(params=parameters)( self.response(code=http_exceptions.UnprocessableEntity.code)( self.WEBARGS_PARSER.use_args(parameters, locations=_locations)( func ) ) ) return decorator def response(self, model=None, code=200, description=None, **kwargs): """ Endpoint response OpenAPI documentation decorator. It automatically documents HTTPError%(code)d responses with relevant schemas. Arguments: model (flask_marshmallow.Schema) - it can be a class or an instance of the class, which will be used for OpenAPI documentation purposes. It can be omitted if ``code`` argument is set to an error HTTP status code. code (int) - HTTP status code which is documented. description (str) Example: >>> @namespace.response(BaseTeamSchema(many=True)) ... @namespace.response(code=403) ... def get_teams(): ... if not user.is_admin: ... abort(403) ... return Team.query.all() """ ALLOWED_EMPTY_BODY_STATUSES = (HTTPStatus.NO_CONTENT, HTTPStatus.ACCEPTED) if model is None and code not in ALLOWED_EMPTY_BODY_STATUSES: if code not in http_exceptions.default_exceptions: raise ValueError("`model` parameter is required for code %d" % code) model = self.model( name='HTTPError%d' % code, model=DefaultHTTPErrorSchema(http_code=code) ) if description is None: if code in http_exceptions.default_exceptions: description = http_exceptions.default_exceptions[code].description elif code in ALLOWED_EMPTY_BODY_STATUSES: description = 'Request fulfilled, nothing follows' def response_serializer_decorator(func): """ This decorator handles responses to serialize the returned value with a given model. """ def dump_wrapper(*args, **kwargs): # pylint: disable=missing-docstring response = func(*args, **kwargs) if response is None: if code in ALLOWED_EMPTY_BODY_STATUSES: return flask.Response( status=code, content_type='application/json' ) raise ValueError("Reponse must not be empty with code 200") elif isinstance(response, flask.Response) or model is None: return response elif isinstance(response, tuple): response, _code = response else: _code = code return model.dump(response).data, _code return dump_wrapper def decorator(func_or_class): if code in http_exceptions.default_exceptions: # If the code is handled by raising an exception, it will # produce a response later, so we don't need to apply a useless # wrapper. decorated_func_or_class = func_or_class elif isinstance(func_or_class, type): # Handle Resource classes decoration # pylint: disable=protected-access func_or_class._apply_decorator_to_methods(response_serializer_decorator) decorated_func_or_class = func_or_class else: decorated_func_or_class = wraps(func_or_class)( response_serializer_decorator(func_or_class) ) if code in ALLOWED_EMPTY_BODY_STATUSES: api_model = None else: if isinstance(model, Model): api_model = model else: api_model = self.model(model=model) if getattr(model, 'many', False): api_model = [api_model] return self.doc(responses={code: (description, api_model)})(decorated_func_or_class) return decorator def route(self, *args, **kwargs): base_wrapper = super(Namespace, self).route(*args, **kwargs) def wrapper(cls): if 'OPTIONS' in cls.methods: cls.options = self.response(code=204)(cls.options) return base_wrapper(cls) return wrapper
{ "repo_name": "millen1m/flask-restplus-server-example", "path": "flask_restplus_patched/namespace.py", "copies": "1", "size": "7715", "license": "mit", "hash": -2895260732488225300, "line_mean": 38.1624365482, "line_max": 96, "alpha_frac": 0.5507453014, "autogenerated": false, "ratio": 4.661631419939577, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5712376721339577, "avg_score": null, "num_lines": null }
from functools import wraps import flask import flask_marshmallow from flask_restplus import Namespace as OriginalNamespace from flask_restplus.utils import merge, unpack from flask_restplus._http import HTTPStatus from webargs.flaskparser import parser as webargs_parser from werkzeug import cached_property, exceptions as http_exceptions from .model import Model, DefaultHTTPErrorSchema class Namespace(OriginalNamespace): WEBARGS_PARSER = webargs_parser def _handle_api_doc(self, cls, doc): if doc is False: cls.__apidoc__ = False return ##unshortcut_params_description(doc) ##handle_deprecations(doc) ##for key in 'get', 'post', 'put', 'delete', 'options', 'head', 'patch': ## if key in doc: ## if doc[key] is False: ## continue ## unshortcut_params_description(doc[key]) ## handle_deprecations(doc[key]) ## if 'expect' in doc[key] and not isinstance(doc[key]['expect'], (list, tuple)): ## doc[key]['expect'] = [doc[key]['expect']] cls.__apidoc__ = merge(getattr(cls, '__apidoc__', {}), doc) def resolve_object(self, object_arg_name, resolver): """ A helper decorator to resolve object instance from arguments (e.g. identity). Example: >>> @namespace.route('/<int:user_id>') ... class MyResource(Resource): ... @namespace.resolve_object( ... object_arg_name='user', ... resolver=lambda kwargs: User.query.get_or_404(kwargs.pop('user_id')) ... ) ... def get(self, user): ... # user is a User instance here """ def decorator(func_or_class): if isinstance(func_or_class, type): # Handle Resource classes decoration # pylint: disable=protected-access func_or_class._apply_decorator_to_methods(decorator) return func_or_class @wraps(func_or_class) def wrapper(*args, **kwargs): kwargs[object_arg_name] = resolver(kwargs) return func_or_class(*args, **kwargs) return wrapper return decorator def model(self, name=None, model=None, mask=None, **kwargs): """ Model registration decorator. """ if isinstance(model, (flask_marshmallow.Schema, flask_marshmallow.base_fields.FieldABC)): if not name: name = model.__class__.__name__ api_model = Model(name, model, mask=mask) api_model.__apidoc__ = kwargs return self.add_model(name, api_model) return super(Namespace, self).model(name=name, model=model, **kwargs) def parameters(self, parameters, locations=None): """ Endpoint parameters registration decorator. """ def decorator(func): if locations is None and parameters.many: _locations = ('json', ) else: _locations = locations if _locations is not None: parameters.context['in'] = _locations return self.doc(params=parameters)( self.response(code=HTTPStatus.UNPROCESSABLE_ENTITY)( self.WEBARGS_PARSER.use_args(parameters, locations=_locations)( func ) ) ) return decorator def response(self, model=None, code=HTTPStatus.OK, description=None, **kwargs): """ Endpoint response OpenAPI documentation decorator. It automatically documents HTTPError%(code)d responses with relevant schemas. Arguments: model (flask_marshmallow.Schema) - it can be a class or an instance of the class, which will be used for OpenAPI documentation purposes. It can be omitted if ``code`` argument is set to an error HTTP status code. code (int) - HTTP status code which is documented. description (str) Example: >>> @namespace.response(BaseTeamSchema(many=True)) ... @namespace.response(code=HTTPStatus.FORBIDDEN) ... def get_teams(): ... if not user.is_admin: ... abort(HTTPStatus.FORBIDDEN) ... return Team.query.all() """ code = HTTPStatus(code) if code is HTTPStatus.NO_CONTENT: assert model is None if model is None and code not in {HTTPStatus.ACCEPTED, HTTPStatus.NO_CONTENT}: if code.value not in http_exceptions.default_exceptions: raise ValueError("`model` parameter is required for code %d" % code) model = self.model( name='HTTPError%d' % code, model=DefaultHTTPErrorSchema(http_code=code) ) if description is None: description = code.description def response_serializer_decorator(func): """ This decorator handles responses to serialize the returned value with a given model. """ def dump_wrapper(*args, **kwargs): # pylint: disable=missing-docstring response = func(*args, **kwargs) extra_headers = None if response is None: if model is not None: raise ValueError("Response cannot not be None with HTTP status %d" % code) return flask.Response(status=code) elif isinstance(response, flask.Response) or model is None: return response elif isinstance(response, tuple): response, _code, extra_headers = unpack(response) else: _code = code if HTTPStatus(_code) is code: response = model.dump(response).data return response, _code, extra_headers return dump_wrapper def decorator(func_or_class): if code.value in http_exceptions.default_exceptions: # If the code is handled by raising an exception, it will # produce a response later, so we don't need to apply a useless # wrapper. decorated_func_or_class = func_or_class elif isinstance(func_or_class, type): # Handle Resource classes decoration # pylint: disable=protected-access func_or_class._apply_decorator_to_methods(response_serializer_decorator) decorated_func_or_class = func_or_class else: decorated_func_or_class = wraps(func_or_class)( response_serializer_decorator(func_or_class) ) if model is None: api_model = None else: if isinstance(model, Model): api_model = model else: api_model = self.model(model=model) if getattr(model, 'many', False): api_model = [api_model] doc_decorator = self.doc( responses={ code.value: (description, api_model) } ) return doc_decorator(decorated_func_or_class) return decorator def preflight_options_handler(self, func): @wraps(func) def wrapper(self, *args, **kwargs): if 'Access-Control-Request-Method' in flask.request.headers: response = flask.Response(status=HTTPStatus.OK) response.headers['Access-Control-Allow-Methods'] = ", ".join(self.methods) return response return func(self, *args, **kwargs) return wrapper def route(self, *args, **kwargs): base_wrapper = super(Namespace, self).route(*args, **kwargs) def wrapper(cls): if 'OPTIONS' in cls.methods: cls.options = self.preflight_options_handler( self.response(code=HTTPStatus.NO_CONTENT)(cls.options) ) return base_wrapper(cls) return wrapper
{ "repo_name": "frol/flask-restplus-server-example", "path": "flask_restplus_patched/namespace.py", "copies": "1", "size": "8353", "license": "mit", "hash": -3171248025667173400, "line_mean": 37.6712962963, "line_max": 98, "alpha_frac": 0.549383455, "autogenerated": false, "ratio": 4.653481894150418, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5702865349150418, "avg_score": null, "num_lines": null }
from functools import wraps import flask import flask_marshmallow from flask_restplus import Namespace as OriginalNamespace from flask_restplus.utils import merge, unpack from webargs.flaskparser import parser as webargs_parser from werkzeug import cached_property, exceptions as http_exceptions from ._http import HTTPStatus from .model import Model, DefaultHTTPErrorSchema class Namespace(OriginalNamespace): WEBARGS_PARSER = webargs_parser def _handle_api_doc(self, cls, doc): if doc is False: cls.__apidoc__ = False return ##unshortcut_params_description(doc) ##handle_deprecations(doc) ##for key in 'get', 'post', 'put', 'delete', 'options', 'head', 'patch': ## if key in doc: ## if doc[key] is False: ## continue ## unshortcut_params_description(doc[key]) ## handle_deprecations(doc[key]) ## if 'expect' in doc[key] and not isinstance(doc[key]['expect'], (list, tuple)): ## doc[key]['expect'] = [doc[key]['expect']] cls.__apidoc__ = merge(getattr(cls, '__apidoc__', {}), doc) def resolve_object(self, object_arg_name, resolver): """ A helper decorator to resolve object instance from arguments (e.g. identity). Example: >>> @namespace.route('/<int:user_id>') ... class MyResource(Resource): ... @namespace.resolve_object( ... object_arg_name='user', ... resolver=lambda kwargs: User.query.get_or_404(kwargs.pop('user_id')) ... ) ... def get(self, user): ... # user is a User instance here """ def decorator(func_or_class): if isinstance(func_or_class, type): # Handle Resource classes decoration # pylint: disable=protected-access func_or_class._apply_decorator_to_methods(decorator) return func_or_class @wraps(func_or_class) def wrapper(*args, **kwargs): kwargs[object_arg_name] = resolver(kwargs) return func_or_class(*args, **kwargs) return wrapper return decorator def model(self, name=None, model=None, mask=None, **kwargs): """ Model registration decorator. """ if isinstance(model, (flask_marshmallow.Schema, flask_marshmallow.base_fields.FieldABC)): if not name: name = model.__class__.__name__ api_model = Model(name, model, mask=mask) api_model.__apidoc__ = kwargs return self.add_model(name, api_model) return super(Namespace, self).model(name=name, model=model, **kwargs) def parameters(self, parameters, locations=None): """ Endpoint parameters registration decorator. """ def decorator(func): if locations is None and parameters.many: _locations = ('json', ) else: _locations = locations if _locations is not None: parameters.context['in'] = _locations return self.doc(params=parameters)( self.response(code=HTTPStatus.UNPROCESSABLE_ENTITY)( self.WEBARGS_PARSER.use_args(parameters, locations=_locations)( func ) ) ) return decorator def response(self, model=None, code=HTTPStatus.OK, description=None, **kwargs): """ Endpoint response OpenAPI documentation decorator. It automatically documents HTTPError%(code)d responses with relevant schemas. Arguments: model (flask_marshmallow.Schema) - it can be a class or an instance of the class, which will be used for OpenAPI documentation purposes. It can be omitted if ``code`` argument is set to an error HTTP status code. code (int) - HTTP status code which is documented. description (str) Example: >>> @namespace.response(BaseTeamSchema(many=True)) ... @namespace.response(code=HTTPStatus.FORBIDDEN) ... def get_teams(): ... if not user.is_admin: ... abort(HTTPStatus.FORBIDDEN) ... return Team.query.all() """ code = HTTPStatus(code) if code is HTTPStatus.NO_CONTENT: assert model is None if model is None and code not in {HTTPStatus.ACCEPTED, HTTPStatus.NO_CONTENT}: if code.value not in http_exceptions.default_exceptions: raise ValueError("`model` parameter is required for code %d" % code) model = self.model( name='HTTPError%d' % code, model=DefaultHTTPErrorSchema(http_code=code) ) if description is None: description = code.description def response_serializer_decorator(func): """ This decorator handles responses to serialize the returned value with a given model. """ def dump_wrapper(*args, **kwargs): # pylint: disable=missing-docstring response = func(*args, **kwargs) extra_headers = None if response is None: if model is not None: raise ValueError("Response cannot not be None with HTTP status %d" % code) return flask.Response(status=code) elif isinstance(response, flask.Response) or model is None: return response elif isinstance(response, tuple): response, _code, extra_headers = unpack(response) else: _code = code if HTTPStatus(_code) is code: response = model.dump(response).data return response, _code, extra_headers return dump_wrapper def decorator(func_or_class): if code.value in http_exceptions.default_exceptions: # If the code is handled by raising an exception, it will # produce a response later, so we don't need to apply a useless # wrapper. decorated_func_or_class = func_or_class elif isinstance(func_or_class, type): # Handle Resource classes decoration # pylint: disable=protected-access func_or_class._apply_decorator_to_methods(response_serializer_decorator) decorated_func_or_class = func_or_class else: decorated_func_or_class = wraps(func_or_class)( response_serializer_decorator(func_or_class) ) if model is None: api_model = None else: if isinstance(model, Model): api_model = model else: api_model = self.model(model=model) if getattr(model, 'many', False): api_model = [api_model] doc_decorator = self.doc( responses={ code.value: (description, api_model) } ) return doc_decorator(decorated_func_or_class) return decorator def preflight_options_handler(self, func): @wraps(func) def wrapper(self, *args, **kwargs): if 'Access-Control-Request-Method' in flask.request.headers: response = flask.Response(status=HTTPStatus.OK) response.headers['Access-Control-Allow-Methods'] = ", ".join(self.methods) return response return func(self, *args, **kwargs) return wrapper def route(self, *args, **kwargs): base_wrapper = super(Namespace, self).route(*args, **kwargs) def wrapper(cls): if 'OPTIONS' in cls.methods: cls.options = self.preflight_options_handler( self.response(code=HTTPStatus.NO_CONTENT)(cls.options) ) return base_wrapper(cls) return wrapper
{ "repo_name": "Jaza/flask-restplus-patched", "path": "flask_restplus_patched/namespace.py", "copies": "1", "size": "8339", "license": "mit", "hash": -644933325552082600, "line_mean": 37.6064814815, "line_max": 98, "alpha_frac": 0.5487468521, "autogenerated": false, "ratio": 4.656058068118369, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5704804920218369, "avg_score": null, "num_lines": null }
from functools import wraps import flask import flask_socketio import plotly from plotly.graph_objs import Scatter, Layout from .. import controllers, exceptions, models, signals from . import forms app = flask.Flask(__name__) socketio = flask_socketio.SocketIO(app) try: app.config.from_pyfile('flask.cfg') except FileNotFoundError: pass try: app.config.from_envvar('WELLO_FLASK_CONFIG_FILE') except RuntimeError: pass def need_config(func): @wraps(func) # For Flask def wrapper(*args, **kwargs): if not models.config.is_valid(): flask.flash("L'application nécessite d'être configurée.", 'error') return flask.redirect(flask.url_for('config')) return func(*args, **kwargs) return wrapper @app.route('/') @need_config def home(): volume = models.water_volume.last() flow_in = models.water_flow_in.last() flow_out = models.water_flow_out.last() pump_in_state = models.pump_in_state.last() urban_network_state = models.urban_network_state.last() return flask.render_template( 'home.html', config=models.config.last(), tank=models.config.tank(), water_volume=volume.volume if volume is not None else None, water_flow_in=flow_in.flow if flow_in is not None else None, water_flow_out=flow_out.flow if flow_out is not None else None, pump_in_state=pump_in_state.running if pump_in_state is not None else None, urban_network_state=urban_network_state.running if urban_network_state is not None else None, ) @app.route('/pump_in/<int:running>', methods=['POST']) @need_config def pump_in(running): try: controllers.pump_in(running) except exceptions.TankMayOverflow: flask.flash('Allumer la pompe peut faire déborder la cuve.', 'error') return flask.redirect(flask.url_for('home')) @app.route('/config', methods=['GET', 'POST']) def config(): if flask.request.method == 'POST': form = forms.Config(flask.request.form) if form.validate(): config = models.Config() form.populate_obj(config) models.save(config) flask.flash("Configuration réussie.", 'success') return flask.redirect(flask.url_for('home')) else: form = forms.Config(obj=models.config.last()) return flask.render_template( 'config.html', form=form, cuboid_tanks=models.cuboid_tank.all(), cylinder_tanks=models.cylinder_tank.all(), ) @app.route('/create-cylinder-tank', methods=['GET', 'POST']) def create_cylinder_tank(): if flask.request.method == 'POST': form = forms.CylinderTank(flask.request.form) if form.validate(): model = models.CylinderTank() form.populate_obj(model) models.save(model) return flask.redirect(flask.url_for('config')) else: form = forms.CylinderTank() return flask.render_template('create_cylinder_tank.html', form=form) @app.route('/create-cuboid-tank', methods=['GET', 'POST']) def create_cuboid_tank(): if flask.request.method == 'POST': form = forms.CuboidTank(flask.request.form) if form.validate(): model = models.CuboidTank() form.populate_obj(model) models.save(model) return flask.redirect(flask.url_for('config')) else: form = forms.CuboidTank() return flask.render_template('create_cuboid_tank.html', form=form) @app.route('/statistics') def statistics(): # Flow in data = models.water_flow_in.all() x = [line.datetime for line in data] y = [line.flow / 10**6 for line in data] flow_in_plot = plotly.offline.plot({ "data": [Scatter(x=x, y=y)], "layout": Layout( title="Input flow", xaxis=dict( title='Date', ), yaxis=dict( title='Flow (L/s)', ), ) }, include_plotlyjs=False, output_type='div', show_link=False,) # Tank volume data = models.water_volume.all() x = [line.datetime for line in data] y = [line.volume / 10**6 for line in data] tank_volume_plot = plotly.offline.plot({ "data": [Scatter(x=x, y=y)], "layout": Layout( title="Tank volume", xaxis=dict( title='Date', ), yaxis=dict( title='Volume (L)', ), ) }, include_plotlyjs=False, output_type='div', show_link=False,) # Flow out data = models.water_flow_out.all() x = [line.datetime for line in data] y = [line.flow / 10**6 for line in data] flow_out_plot = plotly.offline.plot({ "data": [Scatter(x=x, y=y)], "layout": Layout( title="Output flow", xaxis=dict( title='Date', ), yaxis=dict( title='Flow (L/s)', ), ) }, include_plotlyjs=False, output_type='div', show_link=False,) # Urban network data = models.urban_network_state.all() x = [line.datetime for line in data] y = [int(line.running) for line in data] urban_network_plot = plotly.offline.plot({ "data": [Scatter( x=x, y=y, mode='lines', line=dict( shape='hv', ), fill='tozeroy', )], "layout": Layout( title="Urban network", xaxis=dict( title='Date', ), yaxis=dict( title='Used', tickvals=[0, 1], ), ) }, include_plotlyjs=False, output_type='div', show_link=False,) return flask.render_template( 'statistics.html', flow_in=flow_in_plot, flow_out=flow_out_plot, tank_volume=tank_volume_plot, urban_network=urban_network_plot, ) signals.pump_in_state.connect( lambda running, **kwargs: socketio.emit('pump_in_state', {'running': running}) ) signals.urban_network_state.connect( lambda running, **kwargs: socketio.emit('urban_network_state', {'running': running}) ) signals.water_volume_updated.connect( lambda volume, **kwargs: socketio.emit('water_volume', {'volume': volume}) ) signals.water_flow_in_updated.connect( lambda value, **kwargs: socketio.emit('water_flow_in', {'value': value}) ) signals.water_flow_out_updated.connect( lambda value, **kwargs: socketio.emit('water_flow_out', {'value': value}) )
{ "repo_name": "Vayel/wello", "path": "wello/ui/__init__.py", "copies": "1", "size": "6589", "license": "mit", "hash": 8454873397944438000, "line_mean": 27.1367521368, "line_max": 101, "alpha_frac": 0.5855103281, "autogenerated": false, "ratio": 3.4965480616038236, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9576823500146117, "avg_score": 0.0010469779115412543, "num_lines": 234 }
from functools import wraps import flask def response(*, mimetype: str = None, template_file: str = None): def response_inner(f): # print("Wrapping in response {}".format(f.__name__), flush=True) @wraps(f) def view_method(*args, **kwargs): response_val = f(*args, **kwargs) if isinstance(response_val, flask.Response): return response_val if isinstance(response_val, dict): model = dict(response_val) else: model = dict() if template_file and not isinstance(response_val, dict): raise Exception( "Invalid return type {}, we expected a dict as the return value.".format(type(response_val))) if template_file: response_val = flask.render_template(template_file, **response_val) resp = flask.make_response(response_val) resp.model = model if mimetype: resp.mimetype = mimetype return resp return view_method return response_inner # # def template(template_file: str = None): # def template_inner(f): # @wraps(f) # def view_method(*args, **kwargs): # data_dict = f(*args, **kwargs) # if not isinstance(data_dict, dict): # raise Exception( # "Invalid return type {}, we expected a dict as the return value.".format(type(data_dict))) # # return flask.render_template(template_file, **data_dict) # # return view_method # # return template_inner
{ "repo_name": "Wintellect/WintellectWebinars", "path": "2019-06-06-ten-tips-python-web-devs-kennedy/code/top_10_web_explore/ex07_viewmodels/pypi_vm/infrastructure/view_modifiers.py", "copies": "4", "size": "1630", "license": "apache-2.0", "hash": 8286179227360220000, "line_mean": 29.7547169811, "line_max": 113, "alpha_frac": 0.5533742331, "autogenerated": false, "ratio": 4.2558746736292425, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6809248906729243, "avg_score": null, "num_lines": null }
from functools import wraps import fluent_comments from fluent_comments import appsettings from fluent_comments.moderation import FluentCommentsModerator def override_appsettings(**settings): """ Temporary override the appsettings. """ def _dec(func): @wraps(func) def _inner(*args, **kwargs): # Apply new settings, backup old, clear caches old_values = {} for key, new_value in settings.items(): old_values[key] = getattr(appsettings, key) setattr(appsettings, key, new_value) _reset_setting_caches() func(*args, **kwargs) for key, old_value in old_values.items(): setattr(appsettings, key, old_value) # reset caches _reset_setting_caches() return _inner return _dec def _reset_setting_caches(): fluent_comments.form_class = None fluent_comments.model_class = None FluentCommentsModerator.close_after = appsettings.FLUENT_COMMENTS_CLOSE_AFTER_DAYS FluentCommentsModerator.moderate_after = appsettings.FLUENT_COMMENTS_MODERATE_AFTER_DAYS FluentCommentsModerator.akismet_check = appsettings.FLUENT_CONTENTS_USE_AKISMET FluentCommentsModerator.akismet_check_action = appsettings.FLUENT_COMMENTS_AKISMET_ACTION FluentCommentsModerator.moderate_bad_words = set(appsettings.FLUENT_COMMENTS_MODERATE_BAD_WORDS) class MockedResponse(object): def __init__(self, result, definitive=False): self.result = result self.headers = {} if definitive: self.headers['X-Akismet-Pro-Tip'] = 'discard' def json(self): return self.result
{ "repo_name": "django-fluent/django-fluent-comments", "path": "fluent_comments/tests/utils.py", "copies": "2", "size": "1694", "license": "apache-2.0", "hash": 4950031768383177000, "line_mean": 32.88, "line_max": 100, "alpha_frac": 0.6629279811, "autogenerated": false, "ratio": 3.6746203904555315, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5337548371555532, "avg_score": null, "num_lines": null }
from functools import wraps import l18n from django.contrib.auth import get_user_model from django.contrib.auth.views import redirect_to_login as auth_redirect_to_login from django.core.exceptions import PermissionDenied from django.db.models import Q from django.shortcuts import redirect from django.urls import reverse from django.utils.timezone import activate as activate_tz from django.utils.translation import ugettext as _ from django.utils.translation import override from wagtail.admin import messages from wagtail.core.models import GroupPagePermission def users_with_page_permission(page, permission_type, include_superusers=True): # Get user model User = get_user_model() # Find GroupPagePermission records of the given type that apply to this page or an ancestor ancestors_and_self = list(page.get_ancestors()) + [page] perm = GroupPagePermission.objects.filter(permission_type=permission_type, page__in=ancestors_and_self) q = Q(groups__page_permissions__in=perm) # Include superusers if include_superusers: q |= Q(is_superuser=True) return User.objects.filter(is_active=True).filter(q).distinct() def permission_denied(request): """Return a standard 'permission denied' response""" if request.is_ajax(): raise PermissionDenied from wagtail.admin import messages messages.error(request, _('Sorry, you do not have permission to access this area.')) return redirect('wagtailadmin_home') def user_passes_test(test): """ Given a test function that takes a user object and returns a boolean, return a view decorator that denies access to the user if the test returns false. """ def decorator(view_func): # decorator takes the view function, and returns the view wrapped in # a permission check @wraps(view_func) def wrapped_view_func(request, *args, **kwargs): if test(request.user): # permission check succeeds; run the view function as normal return view_func(request, *args, **kwargs) else: # permission check failed return permission_denied(request) return wrapped_view_func return decorator def permission_required(permission_name): """ Replacement for django.contrib.auth.decorators.permission_required which returns a more meaningful 'permission denied' response than just redirecting to the login page. (The latter doesn't work anyway because Wagtail doesn't define LOGIN_URL...) """ def test(user): return user.has_perm(permission_name) # user_passes_test constructs a decorator function specific to the above test function return user_passes_test(test) def any_permission_required(*perms): """ Decorator that accepts a list of permission names, and allows the user to pass if they have *any* of the permissions in the list """ def test(user): for perm in perms: if user.has_perm(perm): return True return False return user_passes_test(test) class PermissionPolicyChecker: """ Provides a view decorator that enforces the given permission policy, returning the wagtailadmin 'permission denied' response if permission not granted """ def __init__(self, policy): self.policy = policy def require(self, action): def test(user): return self.policy.user_has_permission(user, action) return user_passes_test(test) def require_any(self, *actions): def test(user): return self.policy.user_has_any_permission(user, actions) return user_passes_test(test) def user_has_any_page_permission(user): """ Check if a user has any permission to add, edit, or otherwise manage any page. """ # Can't do nothin if you're not active. if not user.is_active: return False # Superusers can do anything. if user.is_superuser: return True # At least one of the users groups has a GroupPagePermission. # The user can probably do something. if GroupPagePermission.objects.filter(group__in=user.groups.all()).exists(): return True # Specific permissions for a page type do not mean anything. # No luck! This user can not do anything with pages. return False def reject_request(request): if request.is_ajax(): raise PermissionDenied return auth_redirect_to_login( request.get_full_path(), login_url=reverse('wagtailadmin_login')) def require_admin_access(view_func): def decorated_view(request, *args, **kwargs): user = request.user if user.is_anonymous: return reject_request(request) if user.has_perms(['wagtailadmin.access_admin']): preferred_language = None if hasattr(user, 'wagtail_userprofile'): preferred_language = user.wagtail_userprofile.get_preferred_language() l18n.set_language(preferred_language) time_zone = user.wagtail_userprofile.get_current_time_zone() activate_tz(time_zone) if preferred_language: with override(preferred_language): return view_func(request, *args, **kwargs) else: return view_func(request, *args, **kwargs) if not request.is_ajax(): messages.error(request, _('You do not have permission to access the admin')) return reject_request(request) return decorated_view
{ "repo_name": "timorieber/wagtail", "path": "wagtail/admin/auth.py", "copies": "4", "size": "5599", "license": "bsd-3-clause", "hash": -4885849116090201000, "line_mean": 30.9942857143, "line_max": 107, "alpha_frac": 0.6720843008, "autogenerated": false, "ratio": 4.34031007751938, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.701239437831938, "avg_score": null, "num_lines": null }
from functools import wraps import ldap3 from flask import request, g, Response from .settings import SiteSettings from urllib.parse import urlparse config = SiteSettings() def requires_auth(f): @wraps(f) def decorated(*args, **kwargs): auth = request.authorization if not auth or not authenticate(auth.username, auth.password): return need_auth_response() return f(*args, **kwargs) return decorated def authenticate(username, password): """ Authenticate as this employee and set the ldap connection to a flask global :param username: :param password: :return: true if the login succeeded, false if not """ if g.get('ldap_service', None): return True url = config.get_ldap_url() try: # first let's create an anonymous LDAP connection server = urlparse(url).hostname s = ldap3.Server(server, get_info=ldap3.ALL) unauthenticated_conn = ldap3.Connection(s) if not unauthenticated_conn.bind(): raise OSError('Connection error.') # now find the employee unauthenticated_conn.search( search_base=config.get_ldap_base(), search_scope=ldap3.SUBTREE, search_filter=f'(uid={username})', attributes=ldap3.ALL_ATTRIBUTES ) dn = unauthenticated_conn.response[0]['dn'] # now let's bind with this employee auth_conn = ldap3.Connection(s, user=dn, password=password) if not auth_conn.bind(): raise OSError('No such user') g.ldap_connection = auth_conn return True except Exception as e: return False def need_auth_response(): return Response( 'Could not verify your access level for that URL.\n' 'You have to login with proper credentials', 401, {'WWW-Authenticate': 'Basic realm="Login Required"'})
{ "repo_name": "ishgroup/lightbook", "path": "ldap_api/ldap_auth.py", "copies": "1", "size": "1914", "license": "apache-2.0", "hash": -1602288027925998600, "line_mean": 28, "line_max": 79, "alpha_frac": 0.6347962382, "autogenerated": false, "ratio": 4.272321428571429, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 66 }
from functools import wraps import memcache import simplejson from canvas import util from configuration import Config from django.conf import settings cache = memcache.Client(settings.MEMCACHE_HOSTS) class DoesNotExist(object): pass class InProcessCache(object): def __init__(self): self.flush() def flush(self): self.cache = {} def get(self, key): return self.cache.get(key, DoesNotExist) def set(self, key, value): self.cache[key] = value def delete(self, key): try: del self.cache[key] except KeyError: pass def __contains__(self, key): return key in self.cache def _call_if_not_instance_of(key, cls, *args, **kwargs): """ If key is not an instance of cls, call it with *args and **kwargs, otherwise just return it. """ return key(*args, **kwargs) if not isinstance(key, cls) else key def memoize(key, time=24*60*60): """ A cache decorator that returns a CachedCall. Takes a key or a function that returns a key. """ def decorator(fxn): def wrapper(*args, **kwargs): cache_key = _call_if_not_instance_of(key, str, *args, **kwargs) return CachedCall(cache_key, lambda: fxn(*args, **kwargs), time) return wrapper return decorator def invalidates_cache(cc): """ Invalidates the cache after the function executes. Takes a CachedCall or a function that returns a CachedCall. """ def decorator(fxn): def wrapper(*args, **kwargs): # Invalidate the cache, given a CachedCall cache_call = _call_if_not_instance_of(cc, CachedCall, *args, **kwargs) # Execute and return the wrapped function try: return fxn(*args, **kwargs) finally: cache_call.invalidate() return wrapper return decorator class CachedCall(object): # Note that inprocess_cache is a CLASS variable. It is instantiated the first time # this class is parsed. It is then shared by all instances of CachedCall. inprocess_cache = InProcessCache() default_timeout = 24 * 60 * 60 def __init__(self, key, function, timeout=default_timeout, decorator=lambda x: x, promoter=lambda x: x): #TODO `promoter` is a temporary hack until we can instantiate Details objs based on the cached data. self.key = key.encode('ascii') self.timeout = timeout self.function = function self.cache_decorator = decorator self.promoter = promoter def __eq__(self, other): return hasattr(other, 'key') and other.key == self.key def __hash__(self): return hash(self.key) def __call__(self, *args, **kwargs): return self._get(*args, **kwargs) def _get(self, force=False, skip_decorator=False): raw_value = self._fetch(force) value = self.promoter(raw_value) if not skip_decorator: return self.cache_decorator(value) else: return value def get_local_only(self): return self.inprocess_cache.get(self.key) def value_from_cache_data(self, cache_data): try: value = util.loads(cache_data) except: return DoesNotExist return value def invalidate(self): """ Just unsets the cache without re-caching. """ # Remove from remote cache.delete(self.key) # Remove from class_variable/local cache. CachedCall.inprocess_cache.delete(self.key) def generate_cache_data(self): """ Sets the cache. """ value = self.function() cache_data = self.remote_set(value) self.local_set(value) return cache_data def remote_set(self, value): """ Sets the remote cache. """ cache_data = util.dumps(value) cache.set(self.key, cache_data, self.timeout) return cache_data def local_set(self, value): """ Sets the local cache. """ self.inprocess_cache.set(self.key, value) def _fetch(self, force): if not force: # Can we find it in the local cache? value = self.get_local_only() if not value == DoesNotExist: return value # Is it in the remote cache? value = self.value_from_cache_data(cache.get(self.key)) if not value == DoesNotExist: return value cache_data = self.generate_cache_data() return self.value_from_cache_data(cache_data) def force(self, *args, **kwargs): return self._get(force=True, *args, **kwargs) @classmethod def multicall(cls, calls, skip_decorator=False): if not calls: return [] results = {} fetch_calls = [] for call in set(calls): value = call.get_local_only() if value == DoesNotExist: fetch_calls.append(call) else: results[call] = value results[call] = call.promoter(results[call]) todo = [] multicall_results = cache.get_multi([call.key for call in fetch_calls]) for call in fetch_calls: cache_data = multicall_results.get(call.key, DoesNotExist) value = call.value_from_cache_data(cache_data) if cache_data != DoesNotExist else DoesNotExist if value == DoesNotExist: todo.append(call) else: results[call] = value results[call] = call.promoter(results[call]) for call in todo: results[call] = call.value_from_cache_data(call.generate_cache_data()) if results[call]: results[call] = call.promoter(results[call]) if not skip_decorator: for call, value in results.items(): results[call] = call.cache_decorator(value) return [results[call] for call in calls] @classmethod def many_multicall(cls, *call_lists, **kwargs): concat = sum(call_lists, []) concat_results = cls.multicall(concat, **kwargs) start = 0 results = [] for call_list in call_lists: length = len(call_list) results.append(concat_results[start:start + length]) start += length return results @classmethod def queryset_details(cls, queryset, **kwargs): return cls.multicall([obj.details for obj in queryset], **kwargs) def cacheable(key): def decorator(func): #TODO Category.get_top = CachedCall('category:top_v2', Category._get_top) @wraps(func) def wrapper(*args, **kwargs): return CachedCall(key, lambda: func(*args, **kwargs))() return wrapper return decorator
{ "repo_name": "canvasnetworks/canvas", "path": "website/canvas/cache_patterns.py", "copies": "1", "size": "6806", "license": "bsd-3-clause", "hash": 1738385425596303400, "line_mean": 31.1037735849, "line_max": 114, "alpha_frac": 0.5941816045, "autogenerated": false, "ratio": 3.970828471411902, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.007478738966948646, "num_lines": 212 }
from functools import wraps import newrelic.agent from django.http import Http404, HttpResponsePermanentRedirect from waffle import flag_is_active, switch_is_active from kuma.core.urlresolvers import reverse from kuma.core.utils import urlparams from .exceptions import ReadOnlyException from .utils import locale_and_slug_from_path def prevent_indexing(func): """Decorator to prevent a page from being indexable by robots""" @wraps(func) def _added_header(request, *args, **kwargs): response = func(request, *args, **kwargs) response["X-Robots-Tag"] = "noindex" return response return _added_header def allow_CORS_GET(func): """Decorator to allow CORS for GET requests""" @wraps(func) def _added_header(request, *args, **kwargs): response = func(request, *args, **kwargs) # We are using this switch temporarily to research bug 1104260. # Disabling this code has no effect locally, but may have an effect on # production. if "GET" == request.method and switch_is_active("application_ACAO"): response["Access-Control-Allow-Origin"] = "*" return response return _added_header def check_readonly(view): """Decorator to enable readonly mode""" def _check_readonly(request, *args, **kwargs): if not flag_is_active(request, "kumaediting"): raise ReadOnlyException("kumaediting") return view(request, *args, **kwargs) return _check_readonly @newrelic.agent.function_trace() def process_document_path(func, reverse_name="wiki.document"): """ Decorator to process document_path into locale and slug, with auto-redirect if necessary. This function takes generic args and kwargs so it can presume as little as possible on the view method signature. """ @wraps(func) def process(request, document_path=None, *args, **kwargs): if kwargs.get("bypass_process_document_path", False): # Support an option to bypass this decorator altogether, so one # view can directly call another view. del kwargs["bypass_process_document_path"] return func(request, document_path, *args, **kwargs) document_slug, document_locale = None, None if document_path: # Parse the document path into locale and slug. document_locale, document_slug, needs_redirect = locale_and_slug_from_path( document_path, request ) # Add check for "local" URL, remove trailing slash slug_length = len(document_slug) if slug_length and document_slug[slug_length - 1] == "/": needs_redirect = True document_slug = document_slug.rstrip("/") if not document_slug: # If there's no slug, then this is just a 404. raise Http404 if request.GET.get("raw", False) is not False: # HACK: There are and will be a lot of kumascript templates # based on legacy DekiScript which will attempt to request # old-style URLs. Skip 301 redirects for raw content. # TODO: evaluate if this is still appropriate needs_redirect = False if needs_redirect: # This catches old MindTouch locales, missing locale, and a few # other cases to fire off a 301 Moved permanent redirect. url = reverse( "wiki.document", locale=document_locale, args=[document_slug] ) url = urlparams(url, query_dict=request.GET) return HttpResponsePermanentRedirect(url) # Set the kwargs that decorated methods will expect. kwargs["document_slug"] = document_slug kwargs["document_locale"] = document_locale return func(request, *args, **kwargs) return process
{ "repo_name": "a2sheppy/kuma", "path": "kuma/wiki/decorators.py", "copies": "2", "size": "3963", "license": "mpl-2.0", "hash": -2482687405412813000, "line_mean": 34.0707964602, "line_max": 87, "alpha_frac": 0.6298258895, "autogenerated": false, "ratio": 4.35973597359736, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5989561863097359, "avg_score": null, "num_lines": null }
from functools import wraps import numpy as np import networkx as nx import vigra import collections from contextlib import contextmanager from .box import Box class BlockflowArray(np.ndarray): def __new__(cls, shape, dtype=float, buffer=None, offset=0, strides=None, order=None, box=None): obj = np.ndarray.__new__(cls, shape, dtype, buffer, offset, strides, order) obj.box = box return obj def __array_finalize__(self, obj): if obj is None: return orig_box = getattr(obj, 'box', None) self.box = orig_box # We're creating a new array using an existing array as a template, but if the array was generated # via a broadcasting ufunc, then the box might not be copied from the correct array. # If it's wrong, just remove the box attribute. # # FIXME: We might be able to handle cases like this automatically # via __array_wrap__() or __array_prepare__() if orig_box is not None: if tuple(orig_box[1] - orig_box[0]) == self.shape: self.box = orig_box class DryArray(BlockflowArray): def __new__(cls, shape=(), dtype=float, buffer=None, offset=0, strides=None, order=None, box=None): assert shape == () or np.prod(shape) == 0, "DryArray must have empty shape" obj = BlockflowArray.__new__(cls, shape, dtype, buffer, offset, strides, order, box=box) return obj @contextmanager def readonly_array(a): a = np.asanyarray(a) writeable = a.flags['WRITEABLE'] a.flags['WRITEABLE'] = False yield a a.flags['WRITEABLE'] = writeable class Operator(object): def __init__(self, name=None): self.name = name or self.__class__.__name__ def __call__(self, *args, **kwargs): self.args = args self.kwargs = kwargs assert 'req_box' not in kwargs, \ "The req_box should not be passed to operators explicitly. Use foo.pull(box)" return self def dry_pull(self, box): with readonly_array(box) as box: assert box.ndim == 2 and box.shape[0] == 2 and box.shape[1] <= 5 kwargs = {'req_box': box} kwargs.update(self.kwargs) if global_graph.mode == 'registration_dry_run': global_graph.dag.add_node(self) if global_graph.op_callstack: caller = global_graph.op_callstack[-1] global_graph.dag.add_edge(self, caller) global_graph.op_callstack.append(self) try: return self.dry_execute(*self.args, **kwargs) finally: global_graph.op_callstack.pop() def pull(self, box): with readonly_array(box) as box: assert box.ndim == 2 and box.shape[0] == 2 and box.shape[1] <= 5 kwargs = {'req_box': box} kwargs.update(self.kwargs) result_data = self.execute(*self.args, **kwargs) assert isinstance(result_data, BlockflowArray) assert result_data.box is not None return result_data def dry_execute(self, *args, **kwargs): raise NotImplementedError() def execute(self, *args, **kwargs): raise NotImplementedError() def __str__(self): return self.name class ReadArray(Operator): def dry_execute(self, arr, req_box): return DryArray(box=self._clip_box(arr, req_box)) def execute(self, arr, req_box=None): clipped_box = self._clip_box(arr, req_box) result = arr[clipped_box.slicing()].view(BlockflowArray) result.box = clipped_box return result def _clip_box(self, arr, req_box): full_array_box = Box.from_shape(arr.shape) valid_box = full_array_box.intersection(req_box) return valid_box def wrap_filter_5d(filter_func): """ Decorator. Given a 5D array (tzyxc), and corresponding output box, compute the given filter over the spatial dimensions. (It doesn't suffice to simply drop the 't' axis and run the filter, because singleton spatial dimensions would cause trouble.) """ @wraps(filter_func) def wrapper(input_data, scale, box_5d): input_data = vigra.taggedView(input_data, 'tzyxc') assert box_5d.shape == (2,5) assert box_5d[1,0] - box_5d[0,0] == 1, \ "FIXME: Can't handle multiple time slices yet. (Add a loop to this function.)" # Find the non-singleton axes, so we can keep only them # but also keep channel, no matter what input_shape_nochannel = np.array(input_data.shape[:-1]) nonsingleton_axes = (input_shape_nochannel != 1).nonzero()[0] nonsingleton_axes = tuple(nonsingleton_axes) + (4,) # Keep channel box = box_5d[:, nonsingleton_axes] # Might be a 2D OR 3D box # Squeeze, but keep channel squeezed_input = input_data.squeeze() if 'c' not in squeezed_input.axistags.keys(): squeezed_input = squeezed_input.insertChannelAxis(-1) result = filter_func(squeezed_input, scale, box=box) result = result.withAxes(*'tzyxc') return result return wrapper class ConvolutionalFilter(Operator): WINDOW_SIZE = 2.0 # Subclasses may override this def __init__(self, name=None): super(ConvolutionalFilter, self).__init__(name) self.filter_func_5d = wrap_filter_5d(self.filter_func) def filter_func(self, input_data, scale, box): """ input_data: array data whose axes are one of the following: zyxc, yxc, zxc, zyc scale: filter scale (sigma) box: Not 5D. Either 4D or 3D, depending on the dimensionality of input_data """ raise NotImplementedError("Convolutional Filter '{}' must override filter_func()" .format(self.__class__.__name__)) def num_channels_for_input_box(self, box): # Default implementation: One output channel per input channel, # regardless of dimensions return box[1,'c'] - box[0,'c'] def num_channels_for_input_box_vector_valued(self, box): """ For vector-valued filters whose output channels is N*C """ shape_zyx = box[1,'zyx'] - box[0,'zyx'] ndim = (shape_zyx > 1).sum() channels = box.to_shape()[-1] return ndim*channels def dry_execute(self, input_op, scale, req_box): upstream_req_box = self._get_upstream_box(scale, req_box) empty_data = input_op.dry_pull(upstream_req_box) n_channels = self.num_channels_for_input_box(empty_data.box) box = empty_data.box.copy() box[:,-1] = (0, n_channels) box = box.intersection(req_box) return DryArray(box=box) def execute(self, input_op, scale, req_box=None): # Ask for the fully padded input upstream_req_box = self._get_upstream_box(scale, req_box) input_data = input_op.pull(upstream_req_box) # The result is tagged with a box. # If we asked for too much (wider than the actual image), # then this box won't match what we requested. upstream_actual_box = input_data.box result_box, req_box_within_upstream = upstream_actual_box.intersection(req_box, True) filtered = self.filter_func_5d(input_data, scale, req_box_within_upstream) filtered = filtered.view(BlockflowArray) filtered.box = result_box expected_channels = self.num_channels_for_input_box(upstream_actual_box) assert filtered.shape[-1] == expected_channels, \ "Filter '{}' returned an unexpected number of channels: got {}, expected {}"\ .format(self.name, filtered.shape[-1], expected_channels) return filtered def _get_upstream_box(self, sigma, req_box): padding = np.ceil(np.array(sigma)*self.WINDOW_SIZE).astype(np.int64) upstream_req_box = req_box.copy() upstream_req_box[0, 'zyx'] -= padding upstream_req_box[1, 'zyx'] += padding return upstream_req_box class GaussianSmoothing(ConvolutionalFilter): def filter_func(self, input_data, scale, box): return vigra.filters.gaussianSmoothing(input_data, sigma=scale, window_size=self.WINDOW_SIZE, roi=box[:,:-1].tolist()) class LaplacianOfGaussian(ConvolutionalFilter): def filter_func(self, input_data, scale, box): return vigra.filters.laplacianOfGaussian(input_data, scale=scale, window_size=self.WINDOW_SIZE, roi=box[:,:-1].tolist()) class GaussianGradientMagnitude(ConvolutionalFilter): def filter_func(self, input_data, scale, box): return vigra.filters.gaussianGradientMagnitude(input_data, sigma=scale, window_size=self.WINDOW_SIZE, roi=box[:,:-1].tolist()) class HessianOfGaussianEigenvalues(ConvolutionalFilter): num_channels_for_input_box = ConvolutionalFilter.num_channels_for_input_box_vector_valued def filter_func(self, input_data, scale, box): return vigra.filters.hessianOfGaussianEigenvalues(input_data, scale=scale, window_size=self.WINDOW_SIZE, roi=box[:,:-1].tolist()) class StructureTensorEigenvalues(ConvolutionalFilter): num_channels_for_input_box = ConvolutionalFilter.num_channels_for_input_box_vector_valued def filter_func(self, input_data, scale, box): inner_scale = scale outer_scale = scale / 2.0 return vigra.filters.structureTensorEigenvalues(input_data, innerScale=inner_scale, outerScale=outer_scale, window_size=self.WINDOW_SIZE, roi=box[:,:-1].tolist()) class DifferenceOfGaussians(ConvolutionalFilter): def filter_func(self, input_data, scale, box): sigma_1 = scale sigma_2 = 0.66*scale smoothed_1 = vigra.filters.gaussianSmoothing(input_data, sigma=sigma_1, window_size=self.WINDOW_SIZE, roi=box[:,:-1].tolist()) smoothed_2 = vigra.filters.gaussianSmoothing(input_data, sigma=sigma_2, window_size=self.WINDOW_SIZE, roi=box[:,:-1].tolist()) # In-place subtraction np.subtract( smoothed_1, smoothed_2, out=smoothed_1 ) return smoothed_1 class DifferenceOfGaussiansComposite(Operator): """ Alternative implementation of DifferenceOfGaussians, but using internal operators for the two smoothing operations. """ def __init__(self, name=None): super(DifferenceOfGaussiansComposite, self).__init__(name) self.gaussian_1 = GaussianSmoothing('Gaussian-1') self.gaussian_2 = GaussianSmoothing('Gaussian-2') def dry_execute(self, input_op, scale, req_box): empty_1 = self.gaussian_1(input_op, scale).dry_pull(req_box) empty_2 = self.gaussian_2(input_op, scale*0.66).dry_pull(req_box) assert (empty_1.box == empty_2.box).all() return empty_1 def execute(self, input_op, scale, req_box=None): a = self.gaussian_1(input_op, scale).pull(req_box) b = self.gaussian_2(input_op, scale*0.66).pull(req_box) # For pointwise numpy ufuncs, the result is already cast as # a BlockflowArray, with the box already initialized. # Nothing extra needed here. return a - b FilterSpec = collections.namedtuple( 'FilterSpec', 'name scale' ) FilterNames = { 'GaussianSmoothing': GaussianSmoothing, 'LaplacianOfGaussian': LaplacianOfGaussian, 'GaussianGradientMagnitude': GaussianGradientMagnitude, 'DifferenceOfGaussians': DifferenceOfGaussians, #'DifferenceOfGaussians': DifferenceOfGaussiansComposite, 'HessianOfGaussianEigenvalues': HessianOfGaussianEigenvalues, 'StructureTensorEigenvalues': StructureTensorEigenvalues } class PixelFeatures(Operator): def __init__(self, name=None): Operator.__init__(self, name) self.feature_ops = {} # (name, scale) : op def dry_execute(self, input_op, filter_specs, req_box): n_channels = 0 for spec in filter_specs: feature_op = self._get_filter_op(spec) empty = feature_op(input_op, spec.scale).dry_pull(req_box) n_channels += empty.box[1, 'c'] box = empty.box.copy() box[:,-1] = (0, n_channels) # Restrict to requested channels box = box.intersection(req_box) return DryArray(box=box) def execute(self, input_op, filter_specs, req_box=None): # FIXME: This requests all channels, no matter what. results = [] for spec in filter_specs: feature_op = self._get_filter_op(spec) feature_data = feature_op(input_op, spec.scale).pull(req_box) results.append(feature_data) stacked_data = np.concatenate(results, axis=-1) # Select only the requested channels stacked_data = stacked_data[..., slice(*req_box[:,-1])] stacked_data = stacked_data.view(BlockflowArray) stacked_data.box = feature_data.box stacked_data.box[:,-1] = req_box[:,-1] return stacked_data def _get_filter_op(self, spec): try: feature_op = self.feature_ops[spec] except KeyError: feature_op = self.feature_ops[spec] = FilterNames[spec.name]() return feature_op class PredictPixels(Operator): def dry_execute(self, features_op, classifier, req_box): upstream_box = req_box.copy() upstream_box[:,-1] = (Box.MIN,Box.MAX) # Request all features empty_feats = features_op.dry_pull(upstream_box) out_box = empty_feats.box.copy() out_box[:,-1] = (0, len(classifier.known_classes)) out_box = out_box.intersection(req_box) return DryArray(dtype=np.float32, box=out_box) def execute(self, features_op, classifier, req_box): upstream_box = req_box.copy() upstream_box[:,-1] = (Box.MIN,Box.MAX) # Request all features feature_vol = features_op.pull(upstream_box) prod = np.prod(feature_vol.shape[:-1]) feature_matrix = feature_vol.reshape((prod, feature_vol.shape[-1])) probabilities_matrix = classifier.predict_probabilities( feature_matrix ) # TODO: Somehow check for correct number of channels, in case the classifier returned fewer classes than we expected # (See lazyflow for example) probabilities_vol = probabilities_matrix.reshape(feature_vol.shape[:-1] + (-1,)) # Extract only the channel range that was originally requested ch_start, ch_stop = req_box[:,-1] probabilities_vol = probabilities_vol[..., ch_start:ch_stop] probabilities_vol = probabilities_vol.view(BlockflowArray) probabilities_vol.box = np.append(feature_vol.box[:,:-1], req_box[:,-1:], axis=1) return probabilities_vol class Graph(object): MODES = ['uninitialized', 'registration_dry_run', 'block_flow_dry_run', 'executable'] def __init__(self): self.op_callstack = [] self.dag = nx.DiGraph() self.mode = 'uninitialized' @contextmanager def register_calls(self): assert len(self.op_callstack) == 0 self.mode = 'registration_dry_run' yield assert len(self.op_callstack) == 0 self.mode = 'executable' global_graph = Graph()
{ "repo_name": "stuarteberg/blockflow", "path": "blockflow/blockflow.py", "copies": "1", "size": "15659", "license": "mit", "hash": 531534710664469900, "line_mean": 39.3582474227, "line_max": 137, "alpha_frac": 0.61562041, "autogenerated": false, "ratio": 3.732777115613826, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4848397525613826, "avg_score": null, "num_lines": null }
from functools import wraps import numpy as np import tensorflow as tf from neupy.utils.misc import as_tuple from neupy.utils.processing import asfloat __all__ = ( # Main tensorflow functions 'tensorflow_session', 'tensorflow_eval', 'create_variable', 'initialize_uninitialized_variables', 'function', # Functions that help to deal with tensorflow name scope 'class_method_name_scope', 'function_name_scope', # Misc utils for tensorflow 'flatten', 'outer', 'repeat', 'dimshuffle', 'shape_to_tuple', 'dot', 'make_single_vector', 'setup_parameter_updates', ) def function(inputs, outputs, updates=None, name=None): """ Function simulates behaviour of the Theano's functions. Parameters ---------- inputs : list List of input placeholders outputs : list, Tensor Output that has to be computed by the function updates : list or None List of the updates that has to be performed on the variables. The ``None`` value means that no updates will be applied at the end of the computation. Defaults to ``None``. name : str or None Defaults to ``None``. Returns ------- function """ if updates is None: updates = [] session = tensorflow_session() tensorflow_updates = [] # Ensure that all new values has been computed. Absence of these # checks might lead to the non-deterministic update behaviour. new_values = [val[1] for val in updates if isinstance(val, (list, tuple))] # Make sure that all outputs has been computed with tf.control_dependencies(as_tuple(outputs, new_values)): for update in updates: if isinstance(update, (list, tuple)): old_value, new_value = update update = old_value.assign(new_value) tensorflow_updates.append(update) # Group variables in order to avoid output for the updates tensorflow_updates = tf.group(*tensorflow_updates) @wraps(function) def wrapper(*input_values): feed_dict = dict(zip(inputs, input_values)) result, _ = session.run( [outputs, tensorflow_updates], feed_dict=feed_dict, ) return result return wrapper def tensorflow_session(): if hasattr(tensorflow_session, 'cache'): session = tensorflow_session.cache if not session._closed: return session config = tf.ConfigProto( allow_soft_placement=True, inter_op_parallelism_threads=0, intra_op_parallelism_threads=0, ) session = tf.Session(config=config) tensorflow_session.cache = session return session def initialize_uninitialized_variables(variables=None): if variables is None: variables = tf.global_variables() if not variables: return session = tensorflow_session() is_not_initialized = session.run([ tf.is_variable_initialized(var) for var in variables]) not_initialized_vars = [ v for (v, f) in zip(variables, is_not_initialized) if not f] if len(not_initialized_vars): session.run(tf.variables_initializer(not_initialized_vars)) def function_name_scope(function): """ Decorator that wraps any function with the name score that has the same name as a function. """ @wraps(function) def wrapper(*args, **kwargs): with tf.name_scope(function.__name__): return function(*args, **kwargs) return wrapper def class_method_name_scope(method): """ Decorator that wraps any method with the name score that has the same name as a method. """ @wraps(method) def wrapper(self, *args, **kwargs): with tf.name_scope(self.__class__.__name__): if hasattr(method, '__self__'): # check if method bounded return method(*args, **kwargs) return method(self, *args, **kwargs) wrapper.original_method = method return wrapper def tensorflow_eval(value): session = tensorflow_session() initialize_uninitialized_variables() return session.run(value) @function_name_scope def flatten(value): return tf.reshape(value, [-1]) @function_name_scope def outer(a, b): a = tf.expand_dims(a, 1) # column vector b = tf.expand_dims(b, 0) # row vector return tf.matmul(a, b) @function_name_scope def dot(a, b): return tf.tensordot(a, b, 1) def repeat(tensor, repeats): """ Repeat elements of an tensor. The same as ``numpy.repeat``. Parameters ---------- input : tensor repeats: list, tuple Number of repeat for each dimension, length must be the same as the number of dimensions in input. Returns ------- tensor Has the same type as input. Has the shape of ``tensor.shape * repeats``. """ with tf.variable_scope("repeat"): expanded_tensor = tf.expand_dims(tensor, -1) multiples = as_tuple(1, repeats) tiled_tensor = tf.tile(expanded_tensor, multiples) repeats = tf.convert_to_tensor(repeats) return tf.reshape(tiled_tensor, tf.shape(tensor) * repeats) def make_single_vector(parameters): with tf.name_scope('parameters-vector'): return tf.concat([flatten(param) for param in parameters], axis=0) def setup_parameter_updates(parameters, parameter_update_vector): """ Creates update rules for list of parameters from one vector. Function is useful in Conjugate Gradient or Levenberg-Marquardt optimization algorithms Parameters ---------- parameters : list List of parameters. parameter_update_vector : Tensorfow varible Vector that contains updates for all parameters. Returns ------- list List of updates separeted for each parameter. """ updates = [] start_position = 0 for parameter in parameters: end_position = start_position + tf.size(parameter) new_parameter = tf.reshape( parameter_update_vector[start_position:end_position], parameter.shape ) updates.append((parameter, new_parameter)) start_position = end_position return updates def dimshuffle(value, ndim, axes): """ Shuffle dimension based on the specified number of dimensions and axes. Parameters ---------- value : Tensorfow variable ndim : int axes : tuple, list Returns ------- Tensorfow variable """ for dim in range(ndim): if dim not in axes: value = tf.expand_dims(value, dim) return value def shape_to_tuple(shape): if isinstance(shape, tf.TensorShape): if shape.ndims is not None: return tuple([dim.value for dim in shape.dims]) return None if isinstance(shape, tf.Dimension): return shape.value if isinstance(shape, list): return [shape_to_tuple(s) for s in shape] if isinstance(shape, tuple): return tuple([shape_to_tuple(s) for s in shape]) return shape def create_variable(value, name, shape, trainable=True): """ Creates NN parameter as Tensorfow variable. Parameters ---------- value : array-like, Tensorfow variable, scalar or Initializer Default value for the parameter. name : str Shared variable name. shape : tuple Parameter's shape. trainable : bool Whether parameter trainable by backpropagation. Returns ------- Tensorfow variable. """ from neupy import init if shape is not None: shape = shape_to_tuple(shape) if isinstance(value, (tf.Variable, tf.Tensor, np.ndarray, np.matrix)): variable_shape = shape_to_tuple(value.shape) if as_tuple(variable_shape) != as_tuple(shape): raise ValueError( "Cannot create variable with name `{}`. Provided variable " "with shape {} is incompatible with expected shape {}" "".format(name, variable_shape, shape)) if isinstance(value, (tf.Variable, tf.Tensor)): return value if isinstance(value, (int, float)): value = init.Constant(value) if isinstance(value, init.Initializer): value = value.sample(shape) return tf.Variable( asfloat(value), name=name, dtype=tf.float32, trainable=trainable, )
{ "repo_name": "itdxer/neupy", "path": "neupy/utils/tf_utils.py", "copies": "1", "size": "8447", "license": "mit", "hash": 1218204837948436500, "line_mean": 25.2329192547, "line_max": 78, "alpha_frac": 0.6295726293, "autogenerated": false, "ratio": 4.1983101391650095, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 322 }
from functools import wraps import numpy as np from ..base import normalize_token from .core import ( concatenate_lookup, tensordot_lookup, map_blocks, asanyarray, blockwise, ) from .routines import _average from ..utils import derived_from @normalize_token.register(np.ma.masked_array) def normalize_masked_array(x): data = normalize_token(x.data) mask = normalize_token(x.mask) fill_value = normalize_token(x.fill_value) return (data, mask, fill_value) @concatenate_lookup.register(np.ma.masked_array) def _concatenate(arrays, axis=0): out = np.ma.concatenate(arrays, axis=axis) fill_values = [i.fill_value for i in arrays if hasattr(i, "fill_value")] if any(isinstance(f, np.ndarray) for f in fill_values): raise ValueError( "Dask doesn't support masked array's with non-scalar `fill_value`s" ) if fill_values: # If all the fill_values are the same copy over the fill value fill_values = np.unique(fill_values) if len(fill_values) == 1: out.fill_value = fill_values[0] return out @tensordot_lookup.register(np.ma.masked_array) def _tensordot(a, b, axes=2): # Much of this is stolen from numpy/core/numeric.py::tensordot # Please see license at https://github.com/numpy/numpy/blob/master/LICENSE.txt try: iter(axes) except TypeError: axes_a = list(range(-axes, 0)) axes_b = list(range(0, axes)) else: axes_a, axes_b = axes try: na = len(axes_a) axes_a = list(axes_a) except TypeError: axes_a = [axes_a] na = 1 try: nb = len(axes_b) axes_b = list(axes_b) except TypeError: axes_b = [axes_b] nb = 1 # a, b = asarray(a), asarray(b) # <--- modified as_ = a.shape nda = a.ndim bs = b.shape ndb = b.ndim equal = True if na != nb: equal = False else: for k in range(na): if as_[axes_a[k]] != bs[axes_b[k]]: equal = False break if axes_a[k] < 0: axes_a[k] += nda if axes_b[k] < 0: axes_b[k] += ndb if not equal: raise ValueError("shape-mismatch for sum") # Move the axes to sum over to the end of "a" # and to the front of "b" notin = [k for k in range(nda) if k not in axes_a] newaxes_a = notin + axes_a N2 = 1 for axis in axes_a: N2 *= as_[axis] newshape_a = (-1, N2) olda = [as_[axis] for axis in notin] notin = [k for k in range(ndb) if k not in axes_b] newaxes_b = axes_b + notin N2 = 1 for axis in axes_b: N2 *= bs[axis] newshape_b = (N2, -1) oldb = [bs[axis] for axis in notin] at = a.transpose(newaxes_a).reshape(newshape_a) bt = b.transpose(newaxes_b).reshape(newshape_b) res = np.ma.dot(at, bt) return res.reshape(olda + oldb) @derived_from(np.ma) def filled(a, fill_value=None): a = asanyarray(a) return a.map_blocks(np.ma.filled, fill_value=fill_value) def _wrap_masked(f): @wraps(f) def _(a, value): a = asanyarray(a) value = asanyarray(value) ainds = tuple(range(a.ndim))[::-1] vinds = tuple(range(value.ndim))[::-1] oinds = max(ainds, vinds, key=len) return blockwise(f, oinds, a, ainds, value, vinds, dtype=a.dtype) return _ masked_greater = _wrap_masked(np.ma.masked_greater) masked_greater_equal = _wrap_masked(np.ma.masked_greater_equal) masked_less = _wrap_masked(np.ma.masked_less) masked_less_equal = _wrap_masked(np.ma.masked_less_equal) masked_not_equal = _wrap_masked(np.ma.masked_not_equal) @derived_from(np.ma) def masked_equal(a, value): a = asanyarray(a) if getattr(value, "shape", ()): raise ValueError("da.ma.masked_equal doesn't support array `value`s") inds = tuple(range(a.ndim)) return blockwise(np.ma.masked_equal, inds, a, inds, value, (), dtype=a.dtype) @derived_from(np.ma) def masked_invalid(a): return asanyarray(a).map_blocks(np.ma.masked_invalid) @derived_from(np.ma) def masked_inside(x, v1, v2): x = asanyarray(x) return x.map_blocks(np.ma.masked_inside, v1, v2) @derived_from(np.ma) def masked_outside(x, v1, v2): x = asanyarray(x) return x.map_blocks(np.ma.masked_outside, v1, v2) @derived_from(np.ma) def masked_where(condition, a): cshape = getattr(condition, "shape", ()) if cshape and cshape != a.shape: raise IndexError( "Inconsistant shape between the condition and the " "input (got %s and %s)" % (cshape, a.shape) ) condition = asanyarray(condition) a = asanyarray(a) ainds = tuple(range(a.ndim)) cinds = tuple(range(condition.ndim)) return blockwise( np.ma.masked_where, ainds, condition, cinds, a, ainds, dtype=a.dtype ) @derived_from(np.ma) def masked_values(x, value, rtol=1e-05, atol=1e-08, shrink=True): x = asanyarray(x) if getattr(value, "shape", ()): raise ValueError("da.ma.masked_values doesn't support array `value`s") return map_blocks( np.ma.masked_values, x, value, rtol=rtol, atol=atol, shrink=shrink ) @derived_from(np.ma) def fix_invalid(a, fill_value=None): a = asanyarray(a) return a.map_blocks(np.ma.fix_invalid, fill_value=fill_value) @derived_from(np.ma) def getdata(a): a = asanyarray(a) return a.map_blocks(np.ma.getdata) @derived_from(np.ma) def getmaskarray(a): a = asanyarray(a) return a.map_blocks(np.ma.getmaskarray) def _masked_array(data, mask=np.ma.nomask, **kwargs): dtype = kwargs.pop("masked_dtype", None) return np.ma.masked_array(data, mask=mask, dtype=dtype, **kwargs) @derived_from(np.ma) def masked_array(data, mask=np.ma.nomask, fill_value=None, **kwargs): data = asanyarray(data) inds = tuple(range(data.ndim)) arginds = [inds, data, inds] if getattr(fill_value, "shape", ()): raise ValueError("non-scalar fill_value not supported") kwargs["fill_value"] = fill_value if mask is not np.ma.nomask: mask = asanyarray(mask) if mask.size == 1: mask = mask.reshape((1,) * data.ndim) elif data.shape != mask.shape: raise np.ma.MaskError( "Mask and data not compatible: data shape " "is %s, and mask shape is " "%s." % (repr(data.shape), repr(mask.shape)) ) arginds.extend([mask, inds]) if "dtype" in kwargs: kwargs["masked_dtype"] = kwargs["dtype"] else: kwargs["dtype"] = data.dtype return blockwise(_masked_array, *arginds, **kwargs) def _set_fill_value(x, fill_value): if isinstance(x, np.ma.masked_array): x = x.copy() np.ma.set_fill_value(x, fill_value=fill_value) return x @derived_from(np.ma) def set_fill_value(a, fill_value): a = asanyarray(a) if getattr(fill_value, "shape", ()): raise ValueError("da.ma.set_fill_value doesn't support array `value`s") fill_value = np.ma.core._check_fill_value(fill_value, a.dtype) res = a.map_blocks(_set_fill_value, fill_value) a.dask = res.dask a.name = res.name @derived_from(np.ma) def average(a, axis=None, weights=None, returned=False): return _average(a, axis, weights, returned, is_masked=True)
{ "repo_name": "blaze/dask", "path": "dask/array/ma.py", "copies": "3", "size": "7376", "license": "bsd-3-clause", "hash": 3103686533373151000, "line_mean": 27.2605363985, "line_max": 82, "alpha_frac": 0.6095444685, "autogenerated": false, "ratio": 3.0229508196721313, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5132495288172131, "avg_score": null, "num_lines": null }
from functools import wraps import numpy as np from imagepipe.raw_functions import f_3d_stack_2d_filter, f_2d_stack_2d_filter from imagepipe.tools.helpers import safe_dir_create, PipeArgError, list_not_string safe_dir_create('verification') # guarantees existence, although might not be the best location to do it def doublewrap(f): """ a decorator decorator, allowing the decorator to be used as: @decorator(with, arguments, and=kwargs) or @decorator credits: http://stackoverflow.com/questions/653368/how-to-create-a-python-decorator-that-can-be-used-either-with-or-without-paramet """ @wraps(f) def new_dec(*args, **kwargs): if len(args) == 1 and len(kwargs) == 0 and callable(args[0]): # actual decorated function return f(args[0]) else: # decorator arguments return lambda realf: f(realf, *args, **kwargs) return new_dec @doublewrap def generator_wrapper(my_function, in_dims=(3,), out_dims=None): if out_dims is None: out_dims = in_dims @wraps(my_function) def inner_wrapper(*args, **kwargs): """ converts a function to accepting a generator of named dicts and adds channel selection logic """ iterator = args[0] args = args[1:] print my_function.__name__ if 'in_channel' in kwargs: #Start in/out channel logic in_chan = kwargs['in_channel'] # Multiple arguments del kwargs['in_channel'] if not list_not_string(in_chan): # convert string to list of strings in_chan = [in_chan] if 'out_channel' in kwargs: out_chan = kwargs['out_channel'] # output explicitely provided del kwargs['out_channel'] if not list_not_string(out_chan): # convert string to list of strings out_chan = [out_chan] else: # implicit output, bound to in_channel only if a single input is provided if len(in_chan) == 1: print 'Input %s will be overwritten by function %s' % (in_chan[0], my_function.__name__) out_chan = in_chan else: print my_function.__name__ print in_chan, in_dims raise PipeArgError('Please provide out_channel argument') if len(in_chan) != len(in_dims): print my_function.__name__ print in_chan, in_dims print len(in_chan), len(in_dims) raise PipeArgError('%s inbound channels are piped, function allows %s' % (len(in_chan), len(in_dims))) if len(out_chan) != len(out_dims): print my_function.__name__ print out_chan, out_dims print len(out_chan), len(out_dims) raise PipeArgError('%s outbound channels are piped, function allows %s' % (len(out_chan), len(out_dims))) # end in/out channel logic for name_space in iterator: # start args prepare print "pullin function %s " % my_function.__name__ print args, kwargs args_puck = [] for i, chan in enumerate(in_chan): if in_dims[i] and len(name_space[chan].shape) != in_dims[i]: print my_function.__name__ print chan, len(name_space[chan].shape), in_dims[i] raise PipeArgError('Mismatched inbound channel dimension for channel. %s is of dim %s, expected %s' % (chan, len(name_space[chan].shape), in_dims[i])) args_puck.append(name_space[chan]) local_args = tuple(args_puck) + args # end args prepare print "local args ready" print my_function.__name__ return_puck = my_function(*local_args, **kwargs) print "return puck ready" if return_puck is None and out_chan[0] == '_': print my_function.__name__, "yields" yield name_space # unlike return, yield is probably non-blocking.... else: # start output prepare if not isinstance(return_puck, tuple): return_puck = (return_puck, ) for i, chan in enumerate(out_chan): if out_dims[i] and len(return_puck[i].shape) != out_dims[i]: print my_function.__name__ print chan raise PipeArgError('Mismatched outgoing channel dimension for channel. %s is of dim %s, expected %s' % (chan, len(return_puck[i].shape), out_dims[i])) if chan != '_': name_space[chan] = return_puck[i] # end output prepare print my_function.__name__, "yields" yield name_space else: for name_space in iterator: local_args = (name_space,) + args name_space = my_function(*local_args, **kwargs) print my_function.__name__, "yields" yield name_space return inner_wrapper def pad_skipping_iterator(secondary_namespace): for key, value in secondary_namespace.iteritems(): if key != '_pad': yield value def splitter(outer_generator, to, sources, mask): """ Creates a secondary namespace by using mask as a pad to conserve only certain segments in sources :param outer_generator: :param to: :param sources: :param mask: :return: """ for primary_namespace in outer_generator: primary_namespace[to] = {} unique_vals = np.unique(primary_namespace[mask]) unique_vals = unique_vals[unique_vals > 0] primary_namespace[to]['_pad'] = (unique_vals, primary_namespace[mask]) # used to rebuild padded images for val in unique_vals: secondary_namespace = {} primary_namespace[to][val] = secondary_namespace for chan in sources: local_mask = primary_namespace[mask] == val if len(primary_namespace[chan].shape) == 2: base_chan = f_2d_stack_2d_filter(primary_namespace[chan], local_mask) elif len(primary_namespace[chan].shape) == 3: base_chan = f_3d_stack_2d_filter(primary_namespace[chan], local_mask) else: raise PipeArgError('masking impossible: dims not match, base channel %s is of dim %s' % (chan, len(primary_namespace[chan].shape))) secondary_namespace[chan] = base_chan yield primary_namespace def for_each(outer_generator, embedded_transformer, inside, **kwargs): for primary_namespace in outer_generator: secondary_generator = embedded_transformer(pad_skipping_iterator(primary_namespace[inside]), **kwargs) for i, _ in enumerate(secondary_generator): # forces secondary generator to evaluate pass yield primary_namespace def paint_from_mask(outer_generator, based_on, in_anchor, out_channel=None): if out_channel is None: out_channel = in_anchor for primary_namespace in outer_generator: secondary_namespace = primary_namespace[based_on] mask = secondary_namespace['_pad'][1] mask_values = secondary_namespace['_pad'][0] accumulator = np.zeros_like(mask) for i, unique_value in enumerate(mask_values): if i == 0: accumulator = accumulator.astype(secondary_namespace[unique_value][in_anchor].dtype) accumulator[mask == unique_value] = secondary_namespace[unique_value][in_anchor] primary_namespace[out_channel] = accumulator yield primary_namespace def tile_from_mask(outer_generator, based_on, in_anchor, out_channel=None): if out_channel is None: out_channel = in_anchor for primary_namespace in outer_generator: secondary_namespace = primary_namespace[based_on] mask = secondary_namespace['_pad'][1] mask_values = secondary_namespace['_pad'][0] accumulator = np.zeros_like(mask) for i, unique_value in enumerate(mask_values): accumulator = accumulator.astype(secondary_namespace[unique_value][in_anchor].dtype) accumulator[mask == unique_value] = secondary_namespace[unique_value][in_anchor][mask == unique_value] primary_namespace[out_channel] = accumulator yield primary_namespace
{ "repo_name": "chiffa/Image_pipe", "path": "imagepipe/core_functions.py", "copies": "1", "size": "8930", "license": "bsd-3-clause", "hash": 7053279866917215000, "line_mean": 37.3261802575, "line_max": 135, "alpha_frac": 0.5617021277, "autogenerated": false, "ratio": 4.270683883309421, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.005962604954767588, "num_lines": 233 }
from functools import wraps import numpy as np from menpo.image import BooleanImage, Image, MaskedImage from menpo.transform import NonUniformScale, Translation def lm_centres_correction(centres): r""" Construct a transform that will correct landmarks for a window iterating feature calculation Parameters ---------- centres : `ndarray` (H, W, 2) The location of the window centres in the features Returns ------- :map:`Affine` An affine transform that performs the correction. Should be applied to the landmarks on the target image. """ t = Translation(-centres.min(axis=0).min(axis=0), skip_checks=True) step_v = centres[0, 0, 0] if centres.shape[0] > 1: step_v = centres[1, 0, 0] - centres[0, 0, 0] step_h = centres[0, 0, 1] if centres.shape[1] > 1: step_h = centres[0, 1, 1] - centres[0, 0, 1] s = NonUniformScale((1.0 / step_v, 1.0 / step_h), skip_checks=True) return t.compose_before(s) def sample_mask_for_centres(mask, centres): r""" Sample a mask at the centres Parameters ---------- mask : Either MaskedImage or Image class. The target image object that includes the windows_centres. window_centres : ndarray, optional If set, use these window centres to rescale the landmarks appropriately. If None, no scaling is applied. """ return BooleanImage(mask[centres[..., 0], centres[..., 1]], copy=False) def rebuild_feature_image(image, f_pixels): shape_changed = f_pixels.shape[1:] != image.shape if hasattr(image, "mask"): # original image had a mask. Did the feature generate an image of the # same size? if shape_changed: # feature is of a different size - best we can do is rescale the # mask mask = image.mask.resize(f_pixels.shape[1:]) else: # feature is same size as input mask = image.mask.copy() new_image = MaskedImage(f_pixels, mask=mask, copy=False) else: new_image = Image(f_pixels, copy=False) if image.has_landmarks: if shape_changed: # need to adjust the landmarks sf = np.array(f_pixels.shape[1:]) / np.array(image.shape) new_image.landmarks = NonUniformScale(sf).apply(image.landmarks) else: new_image.landmarks = image.landmarks return new_image def rebuild_feature_image_with_centres(image, f_pixels, centres): if hasattr(image, "mask"): mask = sample_mask_for_centres(image.mask.mask, centres) new_image = MaskedImage(f_pixels, mask=mask, copy=False) else: new_image = Image(f_pixels, copy=False) if image.has_landmarks: t = lm_centres_correction(centres) new_image.landmarks = t.apply(image.landmarks) return new_image def imgfeature(wrapped): @wraps(wrapped) def wrapper(image, *args, **kwargs): if isinstance(image, np.ndarray): # ndarray supplied to Image feature - build a # temp image for it and just return the pixels image = Image(image, copy=False) return wrapped(image, *args, **kwargs).pixels else: return wrapped(image, *args, **kwargs) return wrapper def ndfeature(wrapped): @wraps(wrapped) def wrapper(image, *args, **kwargs): if not isinstance(image, np.ndarray): # Image supplied to ndarray feature - # extract pixels and go feature = wrapped(image.pixels, *args, **kwargs) return rebuild_feature_image(image, feature) else: return wrapped(image, *args, **kwargs) return wrapper def winitfeature(wrapped): @wraps(wrapped) def wrapper(image, *args, **kwargs): if not isinstance(image, np.ndarray): # Image supplied to ndarray feature - # extract pixels and go feature, centres = wrapped(image.pixels, *args, **kwargs) return rebuild_feature_image_with_centres(image, feature, centres) else: # user just supplied ndarray - give them ndarray back return wrapped(image, *args, **kwargs)[0] return wrapper
{ "repo_name": "menpo/menpo", "path": "menpo/feature/base.py", "copies": "2", "size": "4258", "license": "bsd-3-clause", "hash": -382021686838935550, "line_mean": 31.7538461538, "line_max": 78, "alpha_frac": 0.6183654298, "autogenerated": false, "ratio": 3.805183199285076, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5423548629085075, "avg_score": null, "num_lines": null }
from functools import wraps import numpy as np class LossFunction: """Defines a loss function that maps nonlinearity activations to error.""" def loss(self, activities, targets): """Computes the loss for each unit in the network. Note that most loss functions are only based on the output of the final layer, activities[-1]. However, we pass the activities of all layers here so that loss functions can include things like sparsity constraints. Targets, however, are only defined for the output layer. Targets can be defined as ``np.nan``, which will be translated into zero error. :param list activities: output activations of each layer :param targets: target activation values for last layer :type targets: :class:`~numpy:numpy.ndarray` """ raise NotImplementedError() def d_loss(self, activities, targets): """First derivative of loss function (with respect to activities).""" raise NotImplementedError() def d2_loss(self, activities, targets): """Second derivative of loss function (with respect to activities).""" raise NotImplementedError() def batch_loss(self, activities, targets): """Utility function to compute a single loss value for the network (taking the mean across batches and summing across and within layers). """ losses = self.loss(activities, targets) return np.sum([np.true_divide(np.sum(l), l.shape[0]) for l in losses if l is not None]) def output_loss(func): """Convenience decorator that takes a loss defined for the output layer and converts it into the more general form in terms of all layers.""" @wraps(func) def wrapped_loss(self, activities, targets): result = [None for _ in activities[:-1]] result += [func(self, activities[-1], targets)] return result return wrapped_loss class SquaredError(LossFunction): """Squared error :math:`\\frac{1}{2} \\sum(output - target)^2` """ @output_loss def loss(self, output, targets): return np.sum(np.nan_to_num(output - targets) ** 2, axis=tuple(range(1, output.ndim))) / 2 @output_loss def d_loss(self, output, targets): return np.nan_to_num(output - targets) @output_loss def d2_loss(self, output, _): return np.ones_like(output) class CrossEntropy(LossFunction): """Cross-entropy error :math:`-\\sum(target * log(output))` """ @output_loss def loss(self, output, targets): return -np.sum(np.nan_to_num(targets) * np.log(output), axis=tuple(range(1, output.ndim))) @output_loss def d_loss(self, output, targets): return -np.nan_to_num(targets) / output @output_loss def d2_loss(self, output, targets): return np.nan_to_num(targets) / output ** 2 class ClassificationError(LossFunction): """Classification error :math:`argmax(output) \\neq argmax(target)` Note: ``d_loss`` and ``d2_loss`` are not defined; classification error should only be used for validation, which doesn't require either. """ @output_loss def loss(self, output, targets): return np.logical_and(np.argmax(output, axis=-1) != np.argmax(targets, axis=-1), np.logical_not(np.isnan(np.sum(targets, axis=-1)))) class StructuralDamping(LossFunction): """Applies structural damping, which penalizes layers for having highly variable output activity. Note: this is not exactly the same as the structural damping in Martens (2010), because it is applied on the output side of the nonlinearity (meaning that this error will be filtered through ``d_activations`` during the backwards propagation). :param float weight: scale on structural damping relative to other losses :param list layers: indices specifying which layers will have the damping applied (defaults to all except first/last layers) :param optimizer: if provided, the weight on structural damping will be scaled relative to the ``damping`` attribute in the optimizer (so that any processes dynamically adjusting the damping during the optimization will also affect the structural damping) :type optimizer: :class:`~hessianfree.optimizers.Optimizer` """ def __init__(self, weight, layers=None, optimizer=None): self.weight = weight self.layers = (np.index_exp[1:-1] if layers is None else np.asarray(layers)) self.opt = optimizer def loss(self, activities, _): return [None for _ in activities] def d_loss(self, activities, _): return [None for _ in activities] def d2_loss(self, activities, _): opt_damp = 1 if self.opt is None else getattr(self.opt, "damping", 1) d2_loss = [None for _ in activities] for l in np.arange(len(activities))[self.layers]: d2_loss[l] = np.ones_like(activities[l]) * self.weight * opt_damp return d2_loss class SparseL1(LossFunction): """Imposes L1 sparsity constraint on nonlinearity activations. :param float weight: relative weight of sparsity constraint :param list layers: indices specifying which layers will have the sparsity constraint applied (defaults to all except first/last layers) :param float target: target activation level for nonlinearities """ def __init__(self, weight, layers=None, target=0.0): # TODO: is it valid to apply L1 sparsity to HF, given that CG is meant # to optimize quadratic loss functions? self.weight = weight self.layers = np.index_exp[1:-1] if layers is None else layers self.target = target def loss(self, activities, _): loss = [None for _ in activities] for l in np.arange(len(activities))[self.layers]: loss[l] = self.weight * np.abs(activities[l] - self.target) return loss def d_loss(self, activities, _): d_loss = [None for _ in activities] for l in np.arange(len(activities))[self.layers]: d_loss[l] = self.weight * ((activities[l] > self.target) * 2 - 1) return d_loss def d2_loss(self, activities, _): return [None for _ in activities] class SparseL2(LossFunction): """Imposes L2 sparsity constraint on nonlinearity activations. :param float weight: relative weight of sparsity constraint :param list layers: indices specifying which layers will have the sparsity constraint applied (defaults to all except first/last layers) :param float target: target activation level for nonlinearities """ # note: this is similar to structural damping, except we also include it # in the first derivative # TODO: test how well this works relative to standard structural damping def __init__(self, weight, layers=None, target=0.0): self.weight = weight self.layers = np.index_exp[1:-1] if layers is None else layers self.target = target def loss(self, activities, _): loss = [None for _ in activities] for l in np.arange(len(activities))[self.layers]: loss[l] = 0.5 * self.weight * (activities[l] - self.target) ** 2 return loss def d_loss(self, activities, _): d_loss = [None for _ in activities] for l in np.arange(len(activities))[self.layers]: d_loss[l] = self.weight * (activities[l] - self.target) return d_loss def d2_loss(self, activities, _): d2_loss = [None for _ in activities] for l in np.arange(len(activities))[self.layers]: d2_loss[l] = np.ones_like(activities[l]) * self.weight return d2_loss class LossSet(LossFunction): """Combines several loss functions into one (e.g., combining :class:`SquaredError` and :class:`SparseL2`). It doesn't need to be created directly; a list of loss functions can be passed to :class:`.FFNet`/:class:`.RNNet` and a LossSet will be created automatically. :param list set: list of :class:`LossFunction`""" def __init__(self, set): self.set = set def group_func(self, func_name, activities, targets): """Computes the given function for each :class:`LossFunction` in the set, and sums the result.""" # apply each of the loss functions result = [getattr(s, func_name)(activities, targets) for s in self.set] # sum the losses for each layer across the loss functions result = [np.sum([s[i] for s in result if s[i] is not None], axis=0) for i in range(len(activities))] # convert 0.0's (from np.sum([])) back to None result = [None if (isinstance(x, float) and x == 0.0) else x for x in result] return result def loss(self, activities, targets): return self.group_func("loss", activities, targets) def d_loss(self, activities, targets): return self.group_func("d_loss", activities, targets) def d2_loss(self, activities, targets): return self.group_func("d2_loss", activities, targets)
{ "repo_name": "drasmuss/hessianfree", "path": "hessianfree/loss_funcs.py", "copies": "1", "size": "9402", "license": "bsd-2-clause", "hash": 5645156485957538000, "line_mean": 34.2134831461, "line_max": 78, "alpha_frac": 0.6356094448, "autogenerated": false, "ratio": 4.0736568457538995, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.52092662905539, "avg_score": null, "num_lines": null }
from functools import wraps import numpy as np def stvariogram(func): @wraps(func) def wrapper(*args, **kwargs): st = args[0] if st.ndim == 2: new_args = args[1:] mapping = map(lambda lags: func(lags, *new_args, **kwargs), st) return np.fromiter(mapping, dtype=float) else: return func(*args, **kwargs) return wrapper @stvariogram def sum(lags, Vx, Vt): """Sum space-time model Separable space-time variogram model. This is the most basic model as the two marginal models of the space and time axis are simply summed up for each lag pair. Further, there are no fitting parameters. Please consider the notes before using this model. Parameters ---------- lags : tuple Tuple of the space (x) and time (t) lag given as tuple: (x, t) which will be used to calculate the dependent semivariance. Vx : skgstat.Variogram.fitted_model instance of the space marginal variogram with a fitted theoretical model sufficiently describing the marginal. If this model does not fit the experimental variogram, the space-time model fit will be poor as well. Vt : skgstat.Variogram.fitted_model instance of the time marginal variogram with a fitted theoretical model sufficiently describing the marginal. If this model does not fit the experimental variogram, the space-time model fit will be poor as well. Returns ------- gamma : float The semi-variance modeled for the given lags. Notes ----- This model is implemented like: .. math:: \gamma (h,t) = \gamma_x (h) + \gamma_t (t) Where :math:`\gamma_x(h)` is the spatial marginal variogram and :math:`\gamma_t(t)` is the temporal marginal variogram. It is not a good idea to use this model in almost any case, as it assumes the covariance field to be isotropic in space and time direction, which will hardly be true. Further, it might not be strictly definite as shown by [7]_, [8]_, [9]_. References ---------- .. [7] Myers, D. E., Journel, A. (1990), Variograms with Zonal Anisotropies and Non-Invertible Kriging Systems. Mathematical Geology 22, 779-785. .. [8] Dimitrakopoulos, R. and Lou, X. (1994), Spatiotemporal modeling: covariances and ordinary kriging systems, in R. Dimitrakopoulos, (ed.) Geostatistics for the next century, Kluwer Academic Publishers, Dodrecht 88-93. """ h, t = lags return Vx(h) + Vt(t) @stvariogram def product(lags, Vx, Vt, Cx, Ct): """Product model Separable space-time variogram model. This model is based on the product of the marginal space and time models. Parameters ---------- lags : tuple Tuple of the space (x) and time (t) lag given as tuple: (x, t) which will be used to calculate the dependent semivariance. Vx : skgstat.Variogram.fitted_model instance of the space marginal variogram with a fitted theoretical model sufficiently describing the marginal. If this model does not fit the experimental variogram, the space-time model fit will be poor as well. Vt : skgstat.Variogram.fitted_model instance of the time marginal variogram with a fitted theoretical model sufficiently describing the marginal. If this model does not fit the experimental variogram, the space-time model fit will be poor as well. Cx : float Marginal space sill. Ct : float Marignal time sill. Returns ------- gamma : float The semi-variance modeled for the given lags. Notes ----- The product sum model is implemented following [14]_: .. math:: \gamma (h,t) = C_x * \gamma_t(t) + C_t * \gamma_x(h) - \gamma_x(h) * \gamma_t(t) Where :math:`\gamma_x(h)` is the spatial marginal variogram and :math:`\gamma_t(t)` is the temporal marginal variogram. References ---------- .. [14] De Cesare, L., Myers, D., and Pose, D. (201b), FORTRAN 77 programs for space-time modeling, Computers & Geoscience 28, 205-212. """ h, t = lags return Cx * Vt(t) + Ct * Vx(h) - Vx(h) * Vt(t) @stvariogram def product_sum(lags, Vx, Vt, k1, k2, k3, Cx, Ct): """Product-Sum space-time model Separable space-time variogram model, based on a combination of 'sum' and 'product' models. Both base models are based on separated marginal variograms for the space and time axis. Parameters ---------- lags : tuple Tuple of the space (x) and time (t) lag given as tuple: (x, t) which will be used to calculate the dependent semivariance. Vx : skgstat.Variogram.fitted_model instance of the space marginal variogram with a fitted theoretical model sufficiently describing the marginal. If this model does not fit the experimental variogram, the space-time model fit will be poor as well. Vt : skgstat.Variogram.fitted_model instance of the time marginal variogram with a fitted theoretical model sufficiently describing the marginal. If this model does not fit the experimental variogram, the space-time model fit will be poor as well. k1 : float Fitting parameter. k1 has to be positive or zero and may not be larger than all marginal sill values. k2 : float Fitting paramter. k2 has to be positive or zero and may not be larger than all marginal sill values. k3 : float Fitting parameter. k3 has to be positive and may not be larger than all marginal sill values. Cx : float Marginal space sill. Ct : float Marignal time sill. Returns ------- gamma : float The semi-variance modeled for the given lags. Notes ----- This model implements the product-sum model as suggested by De Cesare et. al [15]_, [16]_: .. math:: \\gamma_{ST}(h_s, h_t) = [k_1C_T(0) + k_2]*\\gamma_S(h_s) + [k_1C_s(0) + k_3]\\gamma_T(h_t) - k_1\\gamma_s(h_s) x \\gamma_T(h_t) References ---------- .. [15] De Cesare, L., Myers, D. and Posa, D. (2001a), Product-sum covariance for space-time mdeling, Environmetrics 12, 11-23. .. [16] De Cesare, L., Myers, D., and Pose, D. (201b), FORTRAN 77 programs for space-time modeling, Computers & Geoscience 28, 205-212. """ h, t = lags return (k2 + k1*Ct)*Vx(h) + (k3 + k1*Cx) * Vt(t) - k1 * Vx(h) * Vt(t)
{ "repo_name": "mmaelicke/scikit-gstat", "path": "skgstat/stmodels.py", "copies": "1", "size": "6628", "license": "mit", "hash": -5588692340878507000, "line_mean": 33.7015706806, "line_max": 89, "alpha_frac": 0.6332226916, "autogenerated": false, "ratio": 3.479265091863517, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4612487783463517, "avg_score": null, "num_lines": null }
from functools import wraps import numpy import pyximport.pyximport def cython_on_demand(unsafe): """ Enables loading .pyx files from .py files (on-demand compilation). With `unsafe` deactivates all Cython safety checks and compatibility options (do not use without first testing that things work reliably). """ if unsafe: _old_get_du_ext = pyximport.pyximport.get_distutils_extension @wraps(_old_get_du_ext) def _new_get_du_ext(*args, **kwargs): extension, setup_args = _old_get_du_ext(*args, **kwargs) directives = getattr(extension, 'cython_directives', {}) directives.update({ 'language_level': 3, 'boundscheck': False, 'wraparound': False, 'initializedcheck': False, 'cdivision': True, 'always_allow_keywords': False }) extension.cython_directives = directives return extension, setup_args pyximport.pyximport.get_distutils_extension = _new_get_du_ext pyximport.install(setup_args={'include_dirs': numpy.get_include()}) # Needs to be executed before other imports. cython_on_demand(unsafe=True)
{ "repo_name": "wrwrwr/blackbox", "path": "cython_init.py", "copies": "1", "size": "1236", "license": "mit", "hash": -8291881390323234000, "line_mean": 32.4054054054, "line_max": 73, "alpha_frac": 0.6189320388, "autogenerated": false, "ratio": 4.106312292358804, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5225244331158804, "avg_score": null, "num_lines": null }
from functools import wraps import numpy from theano import scalar as scal, Constant from theano.gof import local_optimizer from theano.tensor import (DimShuffle, get_scalar_constant_value, NotScalarConstantError) from .basic_ops import GpuFromHost, HostFromGpu, GpuAllocEmpty from .elemwise import GpuDimShuffle, GpuElemwise _one = scal.constant(numpy.asarray(1.0, dtype='float64')) def grab_cpu_scalar(v, nd): """ Get a scalar variable value from the tree at `v`. This function will dig through transfers and dimshuffles to get the constant value. If no such constant is found, it returns None. Parameters ---------- v Theano variable to extract the constant value from. nd : int Expected number of dimensions for the variable (for broadcasted constants). """ if v.owner is not None: n = v.owner if (isinstance(n.op, (GpuDimShuffle, DimShuffle)) and n.op.new_order == ('x',) * nd): return grab_cpu_scalar(n.inputs[0], n.inputs[0].ndim) elif isinstance(n.op, (GpuFromHost, HostFromGpu)): return grab_cpu_scalar(n.inputs[0], nd) else: return None else: if (isinstance(v, Constant) and v.broadcastable == (True,) * nd): return v.dimshuffle(()) def find_node(v, cls, ignore_clients=False): """ Find the node that has an op of of type `cls` in `v`. This digs through possibly redundant transfers to for the node that has the type `cls`. If `ignore_clients` is False (the default) it will only dig through nodes that have a single client to avoid duplicating computations. Parameters ---------- v The variable to dig through cls : Op class The type of the node we are looking for ignore_clients : bool, optional Whether to ignore multiple clients or not. """ if v.owner is not None and (ignore_clients or len(v.clients) == 1): if isinstance(v.owner.op, cls): return v.owner elif (isinstance(v.owner.op, GpuFromHost) and v.owner.inputs[0].owner is not None and (ignore_clients or len(v.owner.inputs[0].clients) == 1) and isinstance(v.owner.inputs[0].owner.op, HostFromGpu)): return find_node(v.owner.inputs[0].owner.inputs[0], cls) else: return None def is_equal(var, val): """ Returns True if `var` is always equal to `val`. This will only return True if the variable will always be equal to the value. If it might not be true in some cases then it returns False. Parameters ---------- var Variable to compare val Python value """ try: v = get_scalar_constant_value(var) return v == val except NotScalarConstantError: return False def alpha_merge(cls, alpha_in, beta_in): """ Decorator to merge multiplication by a scalar on the output. This will find a pattern of `scal * <yourop>(some, params, alpha, beta)` and update it so that the scalar multiplication happens as part of your op. The op needs to accept an alpha and a beta scalar which act this way:: out = Op() * alpha + out_like * beta Where out_like is a buffer that has the same size as the output and gets added to the "real" output of the operation. An example of an operation that respects this pattern is GEMM from blas. The decorated function must have this signature:: maker(node, *inputs) The `node` argument you recieve is the original apply node that contains your op. You should use it to grab relevant properties for your op so that the new version performs the same computation. The `*inputs` parameters contains the new inputs for your op. You MUST use those inputs instead of the ones on `node`. Note that this function can be as simple as:: def maker(node, *inputs): return node.op(*inputs) Parameters ---------- cls : op class The class of the op you want to merge alpha_in : int The input index for the alpha scalar for your op (in node.inputs). beta_in : int The input index for the beta scalar for your op (in node.inputs). Returns ------- local optimizer an unregistered local optimizer that has the same name as the decorated function. Notes ----- This was factored out since the code to deal with intervening transfers and correctness in the presence of different values of alpha and beta scaling factors is not trivial. """ def wrapper(maker): @local_optimizer([GpuElemwise]) @wraps(maker) def opt(node): if (isinstance(node.op, GpuElemwise) and node.op.scalar_op == scal.mul and node.nin == 2): targ = find_node(node.inputs[0], cls) if targ is None: targ = find_node(node.inputs[1], cls) if targ is None: return lr = grab_cpu_scalar(node.inputs[0], nd=targ.outputs[0].ndim) else: lr = grab_cpu_scalar(node.inputs[1], nd=targ.outputs[0].ndim) if lr is None or lr.dtype != targ.outputs[0].dtype: return None inputs = list(targ.inputs) try: c = get_scalar_constant_value(lr) if c == 0: inputs[alpha_in] = lr inputs[beta_in] = lr elif c == 1: inputs[alpha_in] = targ.inputs[alpha_in] inputs[beta_in] = targ.inputs[beta_in] else: inputs[alpha_in] = lr * targ.inputs[alpha_in] inputs[beta_in] = lr * targ.inputs[beta_in] except NotScalarConstantError: inputs[alpha_in] = lr * targ.inputs[alpha_in] inputs[beta_in] = lr * targ.inputs[beta_in] return maker(targ, *inputs) return opt return wrapper def output_merge(cls, alpha_in, beta_in, out_in): """ Decorator to merge addition by a value on the output. This will find a pattern of `val * <yourop>(some, params, alpha, beta, out_like)` and update it so that the addtition happens as part of your op. The op needs to accept an alpha and a beta scalar which act this way:: out = Op() * alpha + out_like * beta Where out_like is a buffer that has the same size as the output and gets added to the "real" output of the operation. An example of an operation that respects this pattern is GEMM from blas. The decorated function must have this signature:: maker(node, *inputs) The `node` argument you recieve is the original apply node that contains your op. You should use it to grab relevant properties for your op so that the new version performs the same computation. The `*inputs` parameters contains the new inputs for your op. You MUST use those inputs instead of the ones on `node`. Note that this function can be as simple as:: def maker(node, *inputs): return node.op(*inputs) Parameters ---------- cls : op class The class of the op you want to merge alpha_in : int The input index for the alpha scalar for your op (in node.inputs). beta_in : int The input index for the beta scalar for your op (in node.inputs). out_in : int The input index for the out_like input for your op (in node.inputs). Returns ------- local optimizer an unregistered local optimizer that has the same name as the decorated function. Notes ----- This was factored out since the code to deal with intervening transfers and correctness in the presence of different values of alpha and beta scaling factors is not trivial. This also correctly handles the case where the added value is broadcasted (by not performing the replacement). """ def wrapper(maker): @local_optimizer([GpuElemwise]) @wraps(maker) def opt(node): if (isinstance(node.op, GpuElemwise) and node.op.scalar_op == scal.add and node.nin == 2): targ = find_node(node.inputs[0], cls) W = node.inputs[1] if targ is None: targ = find_node(node.inputs[1], cls) W = node.inputs[0] if targ is None: return None if W.dtype != targ.outputs[0].dtype: return None if not is_equal(targ.inputs[beta_in], 0.0): # other cases are too complex for now return None if W.broadcastable != targ.inputs[out_in].broadcastable: # Would need to explicitly tile the output to fill # the full shape here. Disable for now. return None inputs = list(targ.inputs) inputs[out_in] = W inputs[beta_in] = _one.clone() return maker(targ, *inputs) return opt return wrapper def inplace_allocempty(op, idx): """ Wrapper to make an inplace optimization that deals with AllocEmpty This will duplicate the alloc input if it has more than one client to allow the op to work on it inplace. The decorated function must have this signature:: maker(node, inputs) The `node` argument you recieve is the original apply node that contains your op. You should use it to grab relevant properties for your op so that the new version performs the same computation. You should also switch the op to work inplace. The `*inputs` parameters contains the new inputs for your op. You MUST use those inputs instead of the ones on `node`. Note that this function can be as simple as:: def maker(node, inputs): return [node.op.__class__(inplace=True)(*inputs)] Parameters ---------- op : op class The op class to look for to make inplace idx : int The index of the (possibly) AllocEmpty input (in node.inputs). Returns ------- local optimizer an unregistered inplace local optimizer that has the same name as the decorated function. """ def wrapper(maker): @local_optimizer([op], inplace=True) @wraps(maker) def opt(node): if type(node.op) != op or node.op.inplace: return inputs = list(node.inputs) alloc = inputs[idx] if (alloc.owner and isinstance(alloc.owner.op, GpuAllocEmpty) and len(alloc.clients) > 1): alloc_op = GpuAllocEmpty(alloc.owner.op.dtype, alloc.owner.op.context_name) inputs[idx] = alloc_op(*alloc.owner.inputs) return maker(node, inputs) return opt return wrapper
{ "repo_name": "marcsans/cnn-physics-perception", "path": "phy/lib/python2.7/site-packages/theano/sandbox/gpuarray/opt_util.py", "copies": "4", "size": "11455", "license": "mit", "hash": 316731549541652000, "line_mean": 33.6072507553, "line_max": 76, "alpha_frac": 0.5874290703, "autogenerated": false, "ratio": 4.298311444652908, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6885740514952908, "avg_score": null, "num_lines": null }
from functools import wraps import pandas as pd def plot_dispatch(f): @wraps(f) def wrapper(self, **kwargs): return self(kind=f.__name__, **kwargs) wrapper.__doc__ = """Used to mimic the `pandas Series plotting API <http://pandas.pydata.org/pandas-docs/stable/api.html#plotting>`_ for {} plots. See `the documentation of pandas.Series.plot <http://pandas.pydata.org/pandas-docs/stable/generated/pandas.Series.plot.html#pandas.Series.plot>`_ for details on keyword arguments.""".format( f.__name__.capitalize() if f.__name__ != "hist" else "Histogram") return wrapper class Plotter(object): def __init__(self, column): self.column = column @plot_dispatch def area(self, **kwargs): pass @plot_dispatch def bar(self, **kwargs): pass @plot_dispatch def barh(self, **kwargs): pass @plot_dispatch def box(self, **kwargs): pass @plot_dispatch def density(self, **kwargs): pass @plot_dispatch def hist(self, **kwargs): pass @plot_dispatch def kde(self, **kwargs): pass @plot_dispatch def line(self, **kwargs): pass @plot_dispatch def pie(self, **kwargs): pass def __call__(self, kind="line", **kwargs): data = pd.Series(self.column.head("all")) return data.plot(kind=kind, **kwargs)
{ "repo_name": "jackmaney/pg-utils", "path": "pg_utils/column/plot/__init__.py", "copies": "1", "size": "1330", "license": "mit", "hash": -8542165187824364000, "line_mean": 24.5769230769, "line_max": 341, "alpha_frac": 0.6345864662, "autogenerated": false, "ratio": 3.614130434782609, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9748435749385668, "avg_score": 0.00005623031938821412, "num_lines": 52 }
from functools import wraps import pytest from overloading import * from overloading import OverloadingError from test_overloading import * @overload @decorated(2) @decorated(1) def f(*args): return ('default',) @overload @decorated(4) @decorated(3) def f(foo, bar:int): return ('any', 'int') @overload @decorated(5) def f(foo:int, bar): return ('int', 'any') for _ in range(rounds): assert f(a, b, c) == ('default', 1, 2) assert f(a, 2) == ('any', 'int', 3, 4) assert f(1, b) == ('int', 'any', 5) @decorated(2) @overload def g(*args): return ('default',) @overload @decorated(1) def g(foo, bar:int): return ('any', 'int') @decorated(3) @overload def g(foo:int, bar): return ('int', 'any') for _ in range(rounds): assert g(a, b, c) == ('default', 3) assert g(a, 2) == ('any', 'int', 1, 3) assert g(1, b) == ('int', 'any', 3) def bad_decorator(func): # no `wraps` def wrapper(*args): return func(*args) + (id,) return wrapper with pytest.raises(OverloadingError): @overload @bad_decorator def q(): pass
{ "repo_name": "bintoro/overloading.py", "path": "tests/_test_decorated.py", "copies": "1", "size": "1116", "license": "mit", "hash": 8121240559360376000, "line_mean": 16.7142857143, "line_max": 45, "alpha_frac": 0.585125448, "autogenerated": false, "ratio": 2.8469387755102042, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.39320642235102043, "avg_score": null, "num_lines": null }
from functools import wraps import redis from django.conf import settings from django.http import HttpRequest from redis_ratelimit.exceptions import RateLimited from redis_ratelimit.utils import parse_rate, build_redis_key def is_rate_limited(request, rate=None): if not rate: return False count, seconds = parse_rate(rate) redis_key = build_redis_key(request, count, seconds) db_url = getattr(settings, 'REDIS_RATELIMIT_DB_URL', "redis://localhost:6379/0") r = redis.from_url(db_url) current = r.get(redis_key) if current: current = int(current.decode('utf-8')) if current >= count: return True value = r.incr(redis_key) if value == 1: r.expire(redis_key, seconds) return False def ratelimit(rate=None): def decorator(f): @wraps(f) def decorated_function(*args, **kwargs): # CBV support if isinstance(args[0], HttpRequest): request = args[0] else: request = args[1] if is_rate_limited(request, rate=rate): raise RateLimited("Too Many Requests") return f(*args, **kwargs) return decorated_function return decorator
{ "repo_name": "r00m/django-redis-ratelimit", "path": "redis_ratelimit/decorators.py", "copies": "1", "size": "1250", "license": "mit", "hash": 6006605941059408000, "line_mean": 25.5957446809, "line_max": 84, "alpha_frac": 0.6168, "autogenerated": false, "ratio": 3.869969040247678, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9985517475792108, "avg_score": 0.00025031289111389235, "num_lines": 47 }
from functools import wraps import requests from flask import current_app def cronitor(task_name): # check if task_name is in config def decorator(func): def ping_cronitor(command): if not current_app.config['CRONITOR_ENABLED']: return task_slug = current_app.config['CRONITOR_KEYS'].get(task_name) if not task_slug: current_app.logger.error( 'Cronitor enabled but task_name {} not found in environment'.format(task_name) ) return if command not in {'run', 'complete', 'fail'}: raise ValueError('command {} not a valid cronitor command'.format(command)) try: resp = requests.get( 'https://cronitor.link/{}/{}'.format(task_slug, command), # cronitor limits msg to 1000 characters params={ 'host': current_app.config['API_HOST_NAME'], } ) resp.raise_for_status() except requests.RequestException as e: current_app.logger.warning('Cronitor API failed for task {} due to {}'.format( task_name, repr(e) )) @wraps(func) def inner_decorator(*args, **kwargs): ping_cronitor('run') status = 'fail' try: ret = func(*args, **kwargs) status = 'complete' return ret finally: ping_cronitor(status) return inner_decorator return decorator
{ "repo_name": "alphagov/notifications-api", "path": "app/cronitor.py", "copies": "1", "size": "1684", "license": "mit", "hash": 8590191269943882000, "line_mean": 32.0196078431, "line_max": 98, "alpha_frac": 0.4958432304, "autogenerated": false, "ratio": 4.811428571428571, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0006175861001544136, "num_lines": 51 }
from functools import wraps import spbu from flask import g from requests import ConnectTimeout, ReadTimeout from app.constants import ( access_denied_answer, read_timeout_answer, connect_timeout_answer, spbu_api_exception_answer ) from tg_bot import bot from tg_bot.keyboards import link_button def access_denied_message(func): @wraps(func) def wrapper(message): bot.reply_to(message, access_denied_answer) return wrapper def access_denied_callback(func): @wraps(func) def wrapper(call_back): bot.edit_message_text( text=access_denied_answer, chat_id=call_back.message.chat.id, message_id=call_back.message.message_id, parse_mode="HTML" ) return wrapper def access_denied_inline(func): @wraps(func) def wrapper(inline_query): bot.answer_inline_query( inline_query_id=inline_query.id, results=[], switch_pm_text=access_denied_answer, switch_pm_parameter="access_denied_inline", cache_time=1, is_personal=True ) return wrapper def expected_failure_spbu_message(func): @wraps(func) def wrapper(message): was_error, answer = False, "None" try: func(message) except ConnectTimeout: was_error, answer = True, connect_timeout_answer except ReadTimeout: was_error, answer = True, read_timeout_answer except spbu.ApiException: was_error, answer = True, spbu_api_exception_answer finally: if was_error: if g.current_tbot_user: link = g.current_tbot_user.get_current_tt_link() else: link = None bot.send_message( chat_id=message.chat.id, text=answer, reply_markup=link_button(link=link), parse_mode="HTML" ) return wrapper def expected_failure_spbu_callback(func): @wraps(func) def wrapper(call_back): was_error, answer = False, "None" try: func(call_back) except ConnectTimeout: was_error, answer = True, connect_timeout_answer except ReadTimeout: was_error, answer = True, read_timeout_answer except spbu.ApiException: was_error, answer = True, spbu_api_exception_answer finally: if was_error: if g.current_tbot_user: link = g.current_tbot_user.get_current_tt_link() else: link = None bot.edit_message_text( text=connect_timeout_answer, chat_id=call_back.message.chat.id, message_id=call_back.message.message_id, parse_mode="HTML", reply_markup=link_button(link=link) ) return wrapper def expected_failure_spbu_inline(func): @wraps(func) def wrapper(inline_query): try: func(inline_query) except ConnectTimeout: pass except ReadTimeout: pass except spbu.ApiException: pass return wrapper
{ "repo_name": "EeOneDown/spbu4u", "path": "telebot_login/help_decorators.py", "copies": "1", "size": "3318", "license": "apache-2.0", "hash": 545329940212179140, "line_mean": 28.1052631579, "line_max": 70, "alpha_frac": 0.5599758891, "autogenerated": false, "ratio": 4.1320049813200495, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5191980870420049, "avg_score": null, "num_lines": null }
from functools import wraps import string import cycl import math import numpy as np import logging log = logging.getLogger(__name__) def memoize(fct): """ This is a decorator which cache the result of a function based on the given parameter. """ return_dict = {} @wraps(fct) def wrapper(*args): if args not in return_dict: return_dict[args] = fct(*args) return return_dict[args] return wrapper type_defines = """ #define float32 float #define int32 int """ __axpy_template__ = string.Template(""" __kernel void axpy(__global ${ftype} *a, ${ftype} x, __global ${ftype} *y, __const int size) { int index = get_global_id(0); if (index < size) { y[index] += a[index] * x; } } """) __dot_template__ = string.Template(""" __kernel void dot_product(__global float *result, __global ${ftype} *v1, __global ${ftype} *v2, __const int size) { __local float buffer[1024]; int local_index = get_local_id(0); buffer[local_index] = 0; for (int i = get_global_id(0); i < size; i += get_global_size(0)) { buffer[local_index] += v1[i] * v2[i]; } barrier(CLK_LOCAL_MEM_FENCE); for (int i = get_local_size(0) / 2; i > 0; i = i / 2) { if (local_index < i) { buffer[local_index] = buffer[local_index + i] + buffer[local_index]; } barrier(CLK_LOCAL_MEM_FENCE); } if (local_index == 0) { result[get_group_id(0)] = buffer[local_index]; } } """) __spmv_template__ = string.Template(""" __kernel void csr_spmv(__global ${ftype} *vout, __global ${ftype} *data, __global int *indices, __global int *indptr, __global ${ftype} *vinp, int size) { int pos = get_global_id(0); if (pos < size) { float result = 0; for (int i = indptr[pos]; i < indptr[pos + 1]; i++) { result += vinp[pos] * data[indices[i]]; } vout[pos] = result; } } """) def __compile_program__(ctx, program_text): program = ctx.createProgramWithSource(type_defines + program_text) try: program.build() except cycl.CLError, e: if log.isEnabledFor(logging.WARNING): for d in ctx._devices: print program.getBuildLog(d) raise return program @memoize def __get_axpy_kernel__(ctx, ftype): program = __compile_program__(ctx, __axpy_template__.substitute(ftype = ftype)) kernel = program.createKernel("axpy") kernel.parameters = (cycl.parameter_type.MEM_TYPE, cycl.parameter_type.string_dict[ftype], cycl.parameter_type.MEM_TYPE, cycl.parameter_type.INT_TYPE) return kernel @memoize def __get_dot_kernel__(ctx, ftype): program = __compile_program__(ctx, __dot_template__.substitute(ftype = ftype)) kernel = program.createKernel("dot_product") kernel.parameters = (cycl.parameter_type.MEM_TYPE, cycl.parameter_type.MEM_TYPE, cycl.parameter_type.MEM_TYPE, cycl.parameter_type.INT_TYPE) return kernel @memoize def __get_spmv_kernel__(ctx, ftype): program = __compile_program__(ctx, __spmv_template__.substitute(ftype = ftype)) kernel = program.createKernel("csr_spmv") kernel.parameters = (cycl.parameter_type.MEM_TYPE, cycl.parameter_type.MEM_TYPE, cycl.parameter_type.MEM_TYPE, cycl.parameter_type.MEM_TYPE, cycl.parameter_type.MEM_TYPE, cycl.parameter_type.INT_TYPE) return kernel class CLCSRMatrix(object): def __init__(self, context, spmatrix): matrix = spmatrix.tocsr() self.data = context.createBufferLike(matrix.data) self.indices = context.createBufferLike(matrix.indices) self.indptr = context.createBufferLike(matrix.indptr) self.shape = matrix.shape self.dtype = spmatrix.dtype self._context = context def send(self, spmatrix, **kw): send_data = cycl.CLWriteBufferNDArray(self.data, spmatrix.data, **kw) send_indices = cycl.CLWriteBufferNDArray(self.indices, spmatrix.indices, **kw) send_intptr = cycl.CLWriteBufferNDArray(self.indptr, spmatrix.indptr, blocking = blocking) return [send_data, send_indices, send_indptr] def elementwise(kernel, size, device): lws = 2.0 ** 8 if device is not None: lws = self.kernel.getWorkGroupSize(device) gws = math.ceil(size / lws) * lws return [cycl.CLNDRangeKernel(kernel, global_work_size = (gws, 1, 1), local_work_size = (lws, 1, 1))] def axpy(a, y, x = 1, device = None): size = a.size / a.dtype.itemsize kernel = __get_axpy_kernel__(a._context, str(a.dtype)) kernel.setArgs(a, x, y, size) return elementwise(kernel, size, device) def dot(r, v1, v2, device = None): size = v1.size / v1.dtype.itemsize kernel = __get_dot_kernel__(v1._context, str(v1.dtype)) kernel.setArgs(r, v1, v2, size) return elementwise(kernel, size, device) def spvm(csrmat, out, inp, device = None): size = out.size / out.dtype.itemsize if csrmat.shape[0] != size: raise ValueError("Matrix-Vector Alignment Mismatch : Incompatible Output") if csrmat.shape[1] * out.dtype.itemsize != inp.size: raise ValueError("Matrix-Vector Alignment Mismatch : Incompatible Input") kernel = __get_spmv_kernel__(out._context, str(out.dtype)) kernel.setArgs(out, csrmat.data, csrmat.indices, csrmat.indptr, inp, size) return elementwise(kernel, size, device) if __name__ == '__main__': import numpy as np import cycl import scipy as sc import scipy.sparse import eikonal.cllinear as linear p = cycl.getPlatforms()[0] d = p.getDevices()[0] c = p.createContext([d]) q = c.createCommandQueue(d) size = 64 * 64 * 64 cpu_sp = sc.sparse.eye(size, size, dtype = 'float32').tocsr() * 4 cpu_sp = sc.sparse.dia_matrix((([1] * size, [1] * size, [1] * size), (-1, 0, 1)), shape = (size, size), dtype = 'float32').tocsr() gpu_sp = linear.CLCSRMatrix(c, cpu_sp) q.enqueue(gpu_sp.send(cpu_sp)) cpu_a = np.ones(size, dtype = 'float32') cpu_b = np.zeros(size, dtype = 'float32') gpu_a = c.createTypedBuffer(size, 'float32') gpu_b = c.createTypedBuffer(size, 'float32') writecmd = [cycl.CLWriteBufferNDArray(gpu_a, cpu_a)] writespcmd = gpu_sp.send(cpu_sp) spmvcmd = spvm(gpu_sp, gpu_b, gpu_a) readbcmd = [cycl.CLReadBufferNDArray(cpu_b, gpu_b)] dotcmd = dot(gpu_b, gpu_b, gpu_a) readacmd = [cycl.CLReadBufferNDArray(cpu_a, gpu_a)] q.enqueue(writecmd + writespcmd + spmvcmd + readbcmd + dotcmd + readacmd) q.finish()
{ "repo_name": "jmercier/CyCL", "path": "cycl/clmath.py", "copies": "1", "size": "7521", "license": "mit", "hash": 4434104664914002400, "line_mean": 28.37890625, "line_max": 134, "alpha_frac": 0.5470017285, "autogenerated": false, "ratio": 3.496513249651325, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4543514978151325, "avg_score": null, "num_lines": null }
from functools import wraps import substitution as sub from util import forgiving_join # To do: # - Fix substitution: # - substituting a quantified variable # - see comments in substitution.py # - Think about whether I want to add Function functionality # - Add XOR, NOR class Variable(object): """ A representation for a variable. The name is merely for humans. Two Variable objects x and y are only considered equal when x is y. """ def __init__(self, name): self.name = name def __repr__(self): return "$" + str(self.name) def recursive(f): @wraps(f) def inner(self, *args, **kwargs): return self.copy( f(sentence, *args, **kwargs) for sentence in self.content ) return inner class RecursiveObject(object): name = None content = None CONNECTIVE = None def __eq__(self, other): return isinstance(other, type(self)) and \ self.content == other.content and \ self.name == other.name def __hash__(self): return hash(type(self)) + hash(self.name) + hash(self.content) def __contains__(self, something): if self == something: return True else: return any( something in cont if isinstance(cont, RecursiveObject) else something == cont for cont in self.content ) def __repr__(self): rep = "({})".format( forgiving_join(self.CONNECTIVE, self.content) ) if self.name is not None: rep = self.name + rep return rep def copy(self, content=None): content = self.content if content is None else content if self.name is None: return type(self)(*content) else: return type(self)(self.name, *content) class Function(RecursiveObject): CONNECTIVE = ', ' def __init__(self, name, *arguments): self.name = name self.content = arguments def free_variables(self): return {x for x in self.content if isinstance(x, Variable)} | \ {x for s in (f.free_variables() for f in self.content if isinstance(f, Function)) for x in s} def substituted(self, dic): """ Apply a substitution to this Function AND return whether anything was substituted. The substitution is handled as if it's a dict. """ new_content = [] substituted = False for cont in self.content: if isinstance(cont, Variable) and cont in dic: substituted = True cont = dic[cont] elif isinstance(cont, Function): cont, newsub = cont.substituted(dic) substituted |= newsub new_content.append(cont) return self.copy(new_content), substituted def unify(self, other): if isinstance(other, Function) and \ self.name == other.name and \ len(self.content) == len(other.content): substitution = sub.Substitution() for selfc, otherc in zip(self.content, other.content): if selfc != otherc: if isinstance(selfc, Variable): substitution[selfc] = otherc elif isinstance(otherc, Variable): substitution[otherc] = selfc else: return None return substitution class Sentence(RecursiveObject): def free_variables(self): """Get all free variables of this sentence""" return {x for c in self.content for x in c.free_variables()} @recursive def substitute(self, subst): """Apply a substitution to this Sentence""" return self.substitute(subst) @recursive def simplified(self): """ Get a logical equivalent copy of this sentence using only And, Or, Not, Quantifier and Predicate. """ return self.simplified() @recursive def skolemised(self, variables=tuple()): """ Replace existentially quantified variables by a Function of the universally quantified variables in this scope. Drop universal quantifiers. """ return self.skolemised(variables) @recursive def distributed(self): """ Distribute And over Or """ return self.distributed() @recursive def cleaned(self): """Remove any meaningless parts of this Sentence""" return self.cleaned() def cnf(self): """Convert sentence to conjunctive normal form""" return self.simplified().negated_inwards().skolemised().cleaned() \ .distributed() class Quantifier(Sentence): SYMBOL = None def __init__(self, variable, sentence): self.name = variable self.content = (sentence, ) def __repr__(self): return "{} {} [{}]".format(self.SYMBOL, self.name, self.content[0]) def free_variables(self): frees = super(Quantifier, self).free_variables() frees.discard(self.name) return frees def substitute(self, subst): """ Apply a substitution to this sentence """ if self.name in subst: raise ValueError( "Can't substitute a quantified variable. ({}, {})" .format(self, subst) ) # subst = subst.copy() # del subst[self.name] return super(Quantifier, self).substitute(subst) def negated_inwards(self, negate, negative, positive): """ Negate this sentence, pushing occurrences of Not inwards until they hit a Predicate. """ if negate: return negative( self.name, self.content[0].negated_inwards(True) ) else: return positive( self.name, self.content[0].negated_inwards(False) ) # It feels like the following function can be wrapped with recursive def cleaned(self): if self.name in super(Quantifier, self).free_variables(): return super(Quantifier, self).cleaned() else: return self.content[0].cleaned() class ForAll(Quantifier): SYMBOL = "∀" def negated_inwards(self, negate=False): """ Negate this sentence, pushing occurrences of Not inwards until they hit a Predicate. """ return super(ForAll, self).negated_inwards(negate, Exists, ForAll) def skolemised(self, variables=tuple()): return self.content[0].skolemised({self.name}.union(variables)) class Exists(Quantifier): SYMBOL = "∃" def negated_inwards(self, negate=False): """ Negate this sentence, pushing occurrences of Not inwards until they hit a Predicate. """ return super(Exists, self).negated_inwards(negate, ForAll, Exists) def skolemised(self, variables=tuple()): # Replace my variable with a function s = sub.Substitution({self.name: Function(str(id(self)), *variables)}) return self.content[0].substitute(s).skolemised(variables) class IFF(Sentence): CONNECTIVE = ' <=> ' def __init__(self, formula1, formula2): self.content = frozenset((formula1, formula2)) # The following happens when (formula1 is formula2) is True if len(self.content) == 1: self.content = (formula1, formula2) def simplified(self): cont = tuple(self.content) return And( Implies(*cont).simplified(), Implies(*reversed(cont)).simplified() ) class Implies(Sentence): CONNECTIVE = ' => ' def __init__(self, formula1, formula2): self.content = (formula1, formula2) def simplified(self): return Or( Not(self.content[0].simplified()), self.content[1].simplified() ) class AssociativeCommutativeBinaryOperator(Sentence): def __init__(self, formula1, *formulas): formulas = (formula1, ) + formulas self.content = frozenset(formulas) def simplified(self): return super( AssociativeCommutativeBinaryOperator, self ).simplified() def cleaned(self): new = super(AssociativeCommutativeBinaryOperator, self).cleaned() if len(new.content) == 1: return next(iter(new.content)).cleaned() newcont = set() for cont in new.content: if type(cont) == type(new): newcont.update(cont.content) else: newcont.add(cont) return type(self)(*newcont) def negated_inwards(self, negate, negative, positive): if negate: return negative( *[cont.negated_inwards(True) for cont in self.content] ) else: return positive( *[cont.negated_inwards(False) for cont in self.content] ) def distributed(self, otherType): ... class And(AssociativeCommutativeBinaryOperator): CONNECTIVE = " ∧ " def negated_inwards(self, negate=False): """ Negate this sentence, pushing occurrences of Not inwards until they hit a Predicate. """ return super(And, self).negated_inwards(negate, Or, And) class Or(AssociativeCommutativeBinaryOperator): CONNECTIVE = " ∨ " def negated_inwards(self, negate=False): """ Negate this sentence, pushing occurrences of Not inwards until they hit a Predicate. """ return super(Or, self).negated_inwards(negate, And, Or) class Not(Sentence): def __init__(self, sentence): self.content = (sentence, ) def __repr__(self): return "¬{}".format(self.content[0]) def unify(self, other): if isinstance(other, Not): return self.content[0].unify(other.content[0]) def negated_inwards(self, negate=False): return self.content[0].negated_inwards(not negate) # def cnf(self): # if isinstance(self.content, Not): # # ¬¬A = A # return self.content.content.cnf() # elif isinstance(self.content, And): # ... # else: # return self.content.cnf() class Predicate(Sentence): CONNECTIVE = ', ' def __init__(self, name, *arguments): self.name = name self.content = arguments def unify(self, other): if isinstance(other, Predicate) and \ self.name == other.name and \ len(self.content) == len(other.content): substitution = sub.Substitution() for selfc, otherc in zip(self.content, other.content): if selfc != otherc: if isinstance(selfc, Variable): substitution[selfc] = otherc elif isinstance(otherc, Variable): substitution[otherc] = selfc elif isinstance(selfc, Function) and \ isinstance(otherc, Function): substitution &= selfc.unify(otherc) else: return None return substitution free_variables = Function.free_variables def substitute(self, substitution): return self.copy( cont.substituted(substitution)[0] if isinstance(cont, Function) else substitution[cont] for cont in self.content ) def simplified(self): return self.copy() def cleaned(self): return self.copy() def skolemised(self, variables=tuple()): return self.copy() def negated_inwards(self, negate=False): if negate: return Not(self.copy()) else: return self.copy() # def cnf(self): # return self
{ "repo_name": "mpvharmelen/resolution", "path": "sentence.py", "copies": "1", "size": "12080", "license": "mit", "hash": 6375510882264510000, "line_mean": 28.0819277108, "line_max": 79, "alpha_frac": 0.5680669484, "autogenerated": false, "ratio": 4.225840336134453, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5293907284534454, "avg_score": null, "num_lines": null }
from functools import wraps import tensorflow as tf class MetaModule(type): def __new__(cls, name, bases, local): cls = type.__new__(cls, name, bases, local) def store_init_args(fn): @wraps(fn) def store_init_args_wrapper(self, *args, **kwargs): fn(self, *args, **kwargs) self._args = args self._kwargs = kwargs return store_init_args_wrapper cls.__init__ = store_init_args(cls.__init__) return cls class Module(metaclass=MetaModule): """ Base class for all NN layers""" def __init__(self, name=None): self._name = self.__class__.__name__ if name is None else name self._parent = None self._children = [] with tf.variable_scope(None, default_name=self._name) as scope: self._scope = scope.name self._initialize() if len(self.variables.keys()) > 0: self.saver = tf.train.Saver(self.variables, save_relative_paths=True) else: self.saver = None def __call__(self, *args, **kwargs): with tf.variable_scope(self._scope, reuse=True): res = self._forward(*args, **kwargs) return res def _initialize(self): ''' Initialize all TF variable and sub-layers here ''' pass def _forward(self, *args, **kwargs): ''' Implement forward pass here. No new variable allowed! ''' raise NotImplementedError # def signal(self, *args, **kwargs): # ''' Implement signal estimator here ''' # raise NotImplementedError def save(self, sess, save_path, global_step=None): if self.saver is None: print('No variables to save!') else: self.saver.save(sess, save_path, global_step) def restore(self, sess, save_path): if self.saver is None: print('No variables to restore!') else: self.saver.restore(sess, save_path) def create_child(self): print(*self._args, **self._kwargs) child = type(self)(*self._args, **self._kwargs) self._children.append(child) child._parent = self child._pullops = child._create_pullops() return child @property def variables(self): scope_filter = self._scope + '/' varlist = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=scope_filter) variables = {v.name[len(scope_filter):]: v for v in varlist} return variables def _create_pullops(self): child_vars = self.variables parent_vars = self._parent.variables pull_ops = [] for vname in child_vars.keys(): pull_op = tf.assign(child_vars[vname], parent_vars[vname]) pull_ops.append(pull_op) return pull_ops def pull(self, session): assert self._parent, 'Pull can only be used for child layers!' session.run(self._pullops)
{ "repo_name": "atomistic-machine-learning/SchNet", "path": "src/schnet/nn/layers/module.py", "copies": "1", "size": "3059", "license": "mit", "hash": -272365729057080930, "line_mean": 31.2, "line_max": 71, "alpha_frac": 0.5580254985, "autogenerated": false, "ratio": 4.06781914893617, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5125844647436171, "avg_score": null, "num_lines": null }
from functools import wraps # # LISP-style list (llist) # class _LNoneProto: def __repr__(self): return "LNone" def __str__(self): return "LNone" LNone = _LNoneProto() def cons(hd, tl): ''' lisp-style pair cell ''' return (hd, tl) def list_to_llist(lst): result = LNone for idx in range(len(lst) - 1, -1, -1): result = cons(lst[idx], result) return result def car(pair): if pair is LNone: raise TypeError("Cannot car LNone") else: return pair[0] def cdr(pair): if pair is LNone: raise TypeError("Cannot cdr LNone") else: return pair[1] def llist_to_list(llst): result = [] pair = llst while pair is not LNone: result.append(car(pair)) pair = cdr(pair) return result def llist(*args): return list_to_llist(args) def llmap(fun, llst): if llst is LNone: return LNone else: return cons(fun(car(llst)), llmap(fun, cdr(llst))) def llist_to_stream(llst): if llst is LNone: return null_stream else: return make_stream(car(llst))(lambda: llist_to_stream(cdr(llst))) # # Lazy # class Promise: ''' Lazy Class Not to be confused with Premise class! This class is for lazy evaluation, while Premise is used for translation ''' def __init__(self, fun, args=(), kwds={}): self.fun = fun self.args = args self.kwds = kwds def __call__(self): return self.force() def force(self): return self.fun(*self.args, **self.kwds) class MemoizedPromise: ''' Memoized Version of Lazy Class ''' def __init__(self, fun, args=(), kwds={}): self.fun = fun self.args = args self.kwds = kwds self.tried = False self.result = None def __call__(self): return self.force() def force(self): if not self.tried: self.tried = True self.result = self.fun(*self.args, **self.kwds) return self.result def delay(*app_args, **app_kwds): @wraps(delay) def decorator(fun): return Promise(fun, app_args, app_kwds) return decorator def memoized_delay(*app_args, **app_kwds): @wraps(delay) def decorator(fun): return MemoizedPromise(fun, app_args, app_kwds) return decorator def is_delayed(obj): return isinstance(obj, Promise) # # Stream # def make_stream(first_elem, memo=False): memo_box = [] @wraps(make_stream) def inner(thunk): promise = (memoized_delay if memo else delay)()(thunk) return cons(first_elem, promise) return inner null_stream = None def is_stream_null(stream): return stream is null_stream def stream_car(stream): return car(stream) def stream_cdr(stream): return cdr(stream).force() def stream_filter(fun, stream): first_filtered = None success = False while not is_stream_null(stream): car_elem = stream_car(stream) stream = stream_cdr(stream) if fun(car_elem): first_filtered = car_elem success = True break if not success: return null_stream return make_stream(first_filtered)(lambda: stream_filter(fun, stream)) def stream_map(fun, *args): if is_stream_null(args[0]): return null_stream first_arg_list = map(stream_car, args) first_elem = fun(*first_arg_list) @make_stream(first_elem) def next_stream(): return stream_map(fun, *map(stream_cdr, args)) return next_stream def _stream_concat_iter(stream_llist, cur_stream): if is_stream_null(cur_stream) and stream_llist is None: return null_stream elif is_stream_null(cur_stream): return _stream_concat_iter(cdr(stream_llist), car(stream_llist)) else: return make_stream(stream_car(cur_stream))(lambda: _stream_concat_iter(stream_llist, stream_cdr(cur_stream))) def _stream_concat_llist(stream_llist): if stream_llist is None: return null_stream else: return _stream_concat_iter(cdr(stream_llist), car(stream_llist)) def stream_concat(streams): return _stream_concat_llist(list_to_llist(streams)) def _stream_map_append_iter(fun, stream, elem_stream): if is_stream_null(elem_stream): if is_stream_null(stream): return null_stream else: return stream_map_append(fun, stream_cdr(stream)) else: first_elem = stream_car(elem_stream) return make_stream(first_elem)(lambda: _stream_map_append_iter(fun, stream, stream_cdr(elem_stream))) def stream_map_append(fun, stream): ''' ('a -> 'b stream) x 'a stream -> 'b stream fun: 'a -> 'b stream stream: 'a stream ''' if is_stream_null(stream): return null_stream else: return _stream_map_append_iter(fun, stream, fun(stream_car(stream))) def sieve(stream): elem = stream_car(stream) return make_stream(elem)(lambda: sieve(stream_filter(lambda x: x % elem != 0, stream_cdr(stream))))
{ "repo_name": "Algy/tempy", "path": "lisn/functional.py", "copies": "1", "size": "5243", "license": "apache-2.0", "hash": 5416273232249867000, "line_mean": 22.40625, "line_max": 117, "alpha_frac": 0.5891665077, "autogenerated": false, "ratio": 3.426797385620915, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4515963893320915, "avg_score": null, "num_lines": null }