text
stringlengths 0
1.05M
| meta
dict |
---|---|
from functools import wraps
__all__ = ['classproperty', 'validatetype']
class classproperty(object):
def __init__(self, getter):
self.getter = getter
def __get__(self, instance, owner):
return self.getter(owner)
def validatetype(pos, typ, exc=TypeError, msg=None, format_args=None):
"""
A type validation decorator
Args:
pos (int): The position of the argument(to validate) in args
typ (python objects): Type that the argument should be an instance of
exc (exception, optional): Exception to raise for failed validation
msg (str, optional): Message for exception
format_args (list of ints, optional): A list of ints defining the
position of arguments to be used
from wrapped functions *args
Returns:
decorated function's return value
"""
def validate_arg_type(func):
@wraps(func)
def wrapper(*args, **kwargs):
to_validate = args[pos]
# Accessing arg through a mutable([]) to allow assigment
# for a non local argument outside local scope. Basically
# a workaround for what is now `nonlocal` statement in Python 3
outertyp = [typ]
fargs = ([args[i] for i in format_args] if format_args else
[to_validate, pos, outertyp[0]])
if not isinstance(to_validate, typ):
if outertyp[0] is basestring:
outertyp[0] = '<str or unicode>'
message = msg or ("argument '{0}', at position {1} "
"should be of type {2}")
raise exc(message.format(*fargs))
return func(*args, **kwargs)
return wrapper
return validate_arg_type
| {
"repo_name": "alok1974/compage",
"path": "src/compage/decorator.py",
"copies": "1",
"size": "1848",
"license": "mit",
"hash": 7095520478013080000,
"line_mean": 35.2352941176,
"line_max": 77,
"alpha_frac": 0.5633116883,
"autogenerated": false,
"ratio": 4.666666666666667,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 51
} |
from functools import wraps
class AssemblaObject(object):
"""
Proxies getitem calls (eg: `instance['id']`) to a dictionary `instance.data['id']`.
"""
def __init__(self, data={}):
self.data = data
def __getitem__(self, key):
return self.data[key]
def __setitem__(self, key, value):
self.data[key] = value
def keys(self):
return self.data.keys()
def values(self):
return self.data.values()
def get(self, *args, **kwargs):
return self.data.get(*args, **kwargs)
def __repr__(self):
# Most objects
for field in ('menu_name', 'page_name', 'name',):
if field in self.data:
return '<%s: %s>' % (type(self).__name__, self.data[field])
# Tickets
if ('number' in self.data) and ('summary' in self.data):
return "<%s: #%s - %s>" % (type(self).__name__, self.data['number'], self.data['summary'])
# Ticket Comments
if 'id' in self.data:
return "<%s: #%s>" % (type(self).__name__, self.data['id'])
return super(AssemblaObject, self).__repr__()
def assembla_filter(func):
"""
Filters :data for the objects in it which possess attributes equal in
name/value to a key/value in kwargs.
Each key/value combination in kwargs is compared against the object, so
multiple keyword arguments can be passed in to constrain the filtering.
"""
@wraps(func)
def wrapper(class_instance, **kwargs):
# Get the result
extra_params = kwargs.get('extra_params', None)
if extra_params:
del kwargs['extra_params']
results = func(class_instance, extra_params)
# Filter the result
if kwargs:
results = filter(
# Find the objects who have an equal number of matching attr/value
# combinations as `len(kwargs)`
lambda obj: len(kwargs) == len(
filter(
lambda boolean: boolean,
[obj.get(attr_name) == value
for attr_name, value in kwargs.iteritems()]
)
),
results
)
return results
return wrapper | {
"repo_name": "markfinger/assembla",
"path": "assembla/lib.py",
"copies": "1",
"size": "2289",
"license": "mit",
"hash": -4750020619272711000,
"line_mean": 29.1315789474,
"line_max": 102,
"alpha_frac": 0.5351681957,
"autogenerated": false,
"ratio": 4.207720588235294,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5242888783935294,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
class AttributeDescription(object):
def __init__(self, text, value=None, *args, **kwargs):
self.name = None
self.text = text
self.value = value
def __call__(self, attr, model):
self.name = attr
def __get__(self, obj, type=None): # pragma: no cover
return self.value
def __set__(self, obj, val): # pragma: no cover
self.value = val
class Dimension(AttributeDescription):
def __get__(self, obj, type=None):
return obj._dims.get(self.name, None)
def __set__(self, obj, value):
obj._dims[self.name] = value
class Weights(AttributeDescription):
def __init__(self, text, get_shape, init=None):
self.name = None
self.text = text
self.get_shape = get_shape
self.init = init
def __get__(self, obj, type=None):
key = (obj.id, self.name)
if key in obj._mem:
return obj._mem[key]
else:
shape = self.get_shape(obj)
data = obj._mem.add(key, shape)
if self.init is not None:
self.init(data, obj.ops)
return data
def __set__(self, obj, val):
data = obj._mem.get((obj.id, self.name))
data[:] = val
class Gradient(AttributeDescription):
def __init__(self, param_name):
self.name = None
self.text = "Gradient of %s" % param_name
self.param_name = param_name
def __get__(self, obj, type=None):
key = (obj.id, self.name)
if key in obj._mem:
return obj._mem.get(key)
else:
param_key = (obj.id, self.param_name)
grad = obj._mem.add_gradient(key, param_key)
return grad
def __set__(self, obj, val):
data = obj._mem.get((obj.id, self.name))
data[:] = val
class Synapses(Weights):
pass
class Biases(Weights):
pass
class Moment(Weights):
pass
def attributes(**specs):
if not specs: # pragma: no cover
raise ValueError("Must describe at least one attribute")
def wrapped(cls):
cls.descriptions = dict(cls.descriptions)
cls.descriptions.update(specs)
for attr, desc in cls.descriptions.items():
setattr(cls, attr, desc)
desc.name = attr
return cls
return wrapped
def on_init(*callbacks):
def wrapped(cls):
cls.on_init_hooks = list(cls.on_init_hooks)
cls.on_init_hooks.extend(callbacks)
return cls
return wrapped
def on_data(*callbacks):
def wrapped(cls):
cls.on_data_hooks = list(cls.on_data_hooks)
cls.on_data_hooks.extend(callbacks)
return cls
return wrapped
def input(getter):
def wrapped(cls):
cls.describe_input = getter
return cls
return wrapped
def output(getter):
def wrapped(cls):
cls.describe_output = getter
return cls
return wrapped
| {
"repo_name": "ryfeus/lambda-packs",
"path": "Spacy/source2.7/thinc/describe.py",
"copies": "1",
"size": "2948",
"license": "mit",
"hash": 8473119189768093000,
"line_mean": 22.9674796748,
"line_max": 64,
"alpha_frac": 0.5712347354,
"autogenerated": false,
"ratio": 3.73637515842839,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.480760989382839,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
class cached_property(object):
"""
A property that is only computed once per instance and then replaces itself
with an ordinary attribute. Deleting the attribute resets the property.
Source: https://github.com/bottlepy/bottle/commit/fa7733e075da0d790d809aa3d2f53071897e6f76
""" # noqa
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None:
return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
def memoize(fun):
"""A simple memoize decorator for functions supporting positional args."""
@wraps(fun)
def wrapper(*args, **kwargs):
key = (args, frozenset(sorted(kwargs.items())))
try:
return cache[key]
except KeyError:
ret = cache[key] = fun(*args, **kwargs)
return ret
cache = {}
return wrapper
# From this response in Stackoverflow
# http://stackoverflow.com/a/19053800/1072990
def to_camel_case(snake_str):
components = snake_str.split('_')
# We capitalize the first letter of each component except the first one
# with the 'title' method and join them together.
return components[0] + "".join(x.title() for x in components[1:])
class LazyMap(object):
def __init__(self, origin, _map, state=None):
self._origin = origin
self._origin_iter = origin.__iter__()
self._state = state or []
self._finished = False
self._map = _map
def __iter__(self):
return self if not self._finished else iter(self._state)
def iter(self):
return self.__iter__()
def __len__(self):
return self._origin.__len__()
def __next__(self):
try:
n = next(self._origin_iter)
n = self._map(n)
except StopIteration as e:
self._finished = True
raise e
else:
self._state.append(n)
return n
def next(self):
return self.__next__()
def __getitem__(self, key):
item = self._origin.__getitem__(key)
if isinstance(key, slice):
return LazyMap(item, self._map)
return self._map(item)
def __getattr__(self, name):
return getattr(self._origin, name)
def __repr__(self):
return "<LazyMap %s>" % repr(self._origin)
| {
"repo_name": "jhgg/graphene",
"path": "graphene/utils.py",
"copies": "1",
"size": "2444",
"license": "mit",
"hash": 5405893544731956000,
"line_mean": 27.4186046512,
"line_max": 94,
"alpha_frac": 0.5793780687,
"autogenerated": false,
"ratio": 3.961102106969206,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5040480175669206,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
class ChainableBase(object):
def _generate(self):
s = self.__class__.__new__(self.__class__)
s.__dict__ = self.__dict__.copy()
return s
def chain(func):
@wraps(func)
def decorator(self, *args, **kw):
self = self._generate()
func(self, *args, **kw)
return self
return decorator
class InboundMailRelationMapper(ChainableBase):
"""
Creates a model instance for each data item. Keyword arguments are
appended to each item in the data.
"""
def __init__(self, **kwargs):
# Common attributes applicable to all models are passed as keyword argments (e.g. model instance for a shared foreign key)
self.common_attributes = kwargs
@chain
def data(self, data):
self.data = data
# Append arguments from class initiation to each item in the data
self.append(self.common_attributes)
@chain
def append(self, dict_data):
# If there are multiple relations, loop through and append `dict_data` to each child object. If the key already exists in the data, it will be overwritten
if isinstance(self.data, list):
for data in self.data:
for key, value in dict_data.items():
data[key] = value
# Single relation, no loop required
elif isinstance(self.data, dict):
for key, value in dict_data.items():
self.data[key] = value
@chain
def create_for(self, target_model):
# Multiple relation
if isinstance(self.data, list):
relations = []
for data in self.data:
relations.append(target_model.objects.create(**data))
return relations
# Single relation
elif isinstance(self.data, dict):
return target_model.objects.create(**self.data)
| {
"repo_name": "christippett/django-postmark-inbound",
"path": "postmark_inbound/utils.py",
"copies": "1",
"size": "1894",
"license": "mit",
"hash": 5015918507287738000,
"line_mean": 31.1016949153,
"line_max": 162,
"alpha_frac": 0.6040126716,
"autogenerated": false,
"ratio": 4.354022988505747,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00023336537476971044,
"num_lines": 59
} |
from functools import wraps
class Command(object):
def __init__(self, name, func, arg_names):
self.name = name
self.func = func
self.arg_names = arg_names
def __call__(self, arg_dict):
values = []
for name in self.arg_names:
if name == '*':
values.append(arg_dict)
else:
values.append(arg_dict.get(name, None))
return self.func(*values)
def __repr__(self):
return "<Command {0}({1})>".format(self.name, ", ".join(self.arg_names))
class DuplicatedCommand(Exception):
""" Duplicated command name """
pass
class CommandNotFound(Exception):
""" Couldn't find any command to execute """
pass
class Climate(object):
def __init__(self):
self.commands = dict()
def __call__(self, arg_dict):
for cmd_name in self.commands.keys():
if arg_dict.get(cmd_name, False):
return self.commands[cmd_name](arg_dict)
raise CommandNotFound
def add_command(self, name, func, arg_names):
if name not in self.commands.keys():
self.commands[name] = Command(name, func, arg_names)
else:
raise DuplicatedCommand(name)
def command(self, *args, **kwargs):
def wrapped_cmd(func):
cmd_name = kwargs.get('name', None) or func.__name__
self.add_command(cmd_name, func, args)
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return wrapped_cmd
def merge_commands(self, commands):
for cmd in commands.values():
self.add_command(cmd.name, cmd.func, cmd.arg_names)
def merge(self, cli, namespace=None):
if not namespace:
self.merge_commands(cli.commands)
elif namespace not in self.commands.keys():
self.commands[namespace] = cli
else:
raise DuplicatedCommand
| {
"repo_name": "fespino/climate",
"path": "climate.py",
"copies": "1",
"size": "2007",
"license": "mit",
"hash": -8417562928656906000,
"line_mean": 26.4931506849,
"line_max": 80,
"alpha_frac": 0.5645241654,
"autogenerated": false,
"ratio": 4.014,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.50785241654,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
class MetaMachine(type):
def __new__(cls, name, bases, d):
state = d.get('initial_state')
if state == None:
for base in bases:
try:
state = base.initial_state
break
except AttributeError:
pass
before, after = [], []
for name, func in d.items():
try:
after += [(start, end, func) for start, end in func.after]
except AttributeError:
pass
try:
before += [(start, end, func) for start, end in func.before]
except AttributeError:
pass
d['_after_transitions'] = after
d['_before_transitions'] = before
d['_state'] = state
return type.__new__(cls, name, bases, d)
# Python 2/3 Metaclass
# http://mikewatkins.ca/2008/11/29/python-2-and-3-metaclasses/
Machine = MetaMachine('Machine', (object, ), {
'state': property(lambda x: x._state),
})
def create_transition(attr, from_state, to_state):
def wrapper(f):
try:
getattr(f, attr).append((from_state, to_state))
except AttributeError:
setattr(f, attr, [(from_state, to_state)])
return f
return wrapper
def after_transition(from_state, to_state):
return create_transition('after', from_state, to_state)
def before_transition(from_state, to_state):
return create_transition('before', from_state, to_state)
def around_transition(f):
return f
def is_transition(start, end, current, future):
return (start in current or start == '*') and (end in future or end == '*')
def transition_from(from_state, timing='before'):
"""Trigger the decorated function whenever transitioning
`from` the specified state (to anything else). By default,
fires before the state transition has taken place, so the
:attr:`~Machine.state` will be `from_state`.
"""
return create_transition(timing, from_state, '*')
def transition_to(to_state, timing='after'):
"""Trigger the decorated function whenever transitioning
`to` the specified state (from anything else). By default,
fires after the state transition has taken place, so the
:attr:`~Machine.state` will be `to_state`.
"""
return create_transition(timing, '*', to_state)
def event(f):
@wraps(f)
def wrapper(self, *args, **kwargs):
for current, next_state in f(self, *args, **kwargs):
if self.state in current or '*' in current:
for start, end, method in self._before_transitions:
if is_transition(start, end, current, next_state):
method(self, *args, **kwargs)
self._state = next_state
for start, end, method in self._after_transitions:
if is_transition(start, end, current, next_state):
method(self, *args, **kwargs)
return
return wrapper
| {
"repo_name": "kyleconroy/statemachine",
"path": "statemachine.py",
"copies": "1",
"size": "3061",
"license": "mit",
"hash": 474971479178246900,
"line_mean": 29.3069306931,
"line_max": 79,
"alpha_frac": 0.5775890232,
"autogenerated": false,
"ratio": 4.142083897158322,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00212926331211564,
"num_lines": 101
} |
from functools import wraps
class _PluginManager(object):
def __init__(self):
self._registered_plugins = []
self._cached_base_callbacks = {}
self._built_functions = {}
def register(self, *plugins):
"""
Makes it possible to register your plugin.
"""
self._registered_plugins.extend(plugins)
self._build_functions()
def decorate(self, name=None):
def decorator(callback):
@wraps(callback)
def wrapper(*args, **kwargs):
return built_functions[public_name](*args, **kwargs)
public_name = name or callback.__name__
assert public_name not in self._built_functions
built_functions = self._built_functions
built_functions[public_name] = callback
self._cached_base_callbacks[public_name] = callback
return wrapper
return decorator
def _build_functions(self):
for name, callback in self._cached_base_callbacks.items():
for plugin in reversed(self._registered_plugins):
# Need to reverse so the first plugin is run first.
try:
func = getattr(plugin, name)
except AttributeError:
pass
else:
callback = func(callback)
self._built_functions[name] = callback
plugin_manager = _PluginManager()
| {
"repo_name": "srusskih/SublimeJEDI",
"path": "dependencies/jedi/plugins/__init__.py",
"copies": "6",
"size": "1453",
"license": "mit",
"hash": 4002873228695675400,
"line_mean": 29.914893617,
"line_max": 68,
"alpha_frac": 0.5636613902,
"autogenerated": false,
"ratio": 4.875838926174497,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8439500316374496,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
class SafeDict(dict):
"""
A dict that a "get" method that allows to use a path-like reference to its subdict values.
For example with a dict like {"key": {"subkey": {"subsubkey": "value"}}}
you can use a string 'key|subkey|subsubkey' to get the 'value'.
The default value is returned if ANY of the subelements does not exist.
Code based on https://stackoverflow.com/a/44859638/2693875
"""
def get(self, path, default=None):
keys = path.split("|")
val = None
for key in keys:
if val:
if isinstance(val, list):
val = [v.get(key, default) if v else None for v in val]
else:
val = val.get(key, default)
else:
val = dict.get(self, key, default)
if not val:
break
return val
def configuration_to_safe_dict(method):
"""
This wrapper function calls the method with the configuration converted from a regular dict into a SafeDict
"""
@wraps(method)
def method_wrapper(self, project_and_group, configuration, *args):
return method(self, project_and_group, SafeDict(configuration), *args)
return method_wrapper
| {
"repo_name": "egnyte/gitlabform",
"path": "gitlabform/gitlabform/processors/util/decorators.py",
"copies": "1",
"size": "1277",
"license": "mit",
"hash": -7889891810961765000,
"line_mean": 28.0227272727,
"line_max": 111,
"alpha_frac": 0.5967110415,
"autogenerated": false,
"ratio": 4.119354838709677,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5216065880209677,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def bound(method):
@wraps(method)
def bound_method(self, *args, **kwargs):
return self.bind(method, args, kwargs)
return bound_method
class MathOp(object):
def __init__(self, value=None, is_nan=False):
self.value = value
self.is_nan = is_nan
def __repr__(self):
if self.is_nan:
return "<MathOp NaN>"
else:
return "<MathOp {}>".format(self.value)
def bind(self, method, args, kwargs):
if self.is_nan:
return self
else:
return method(self, *args, **kwargs)
@bound
def div(self, denum):
if denum == 0:
return MathOp(is_nan=True)
else:
return MathOp(self.value / denum)
@bound
def mul(self, multiplicand):
return MathOp(self.value * multiplicand)
@bound
def add(self, addend):
return MathOp(self.value + addend)
@bound
def sub(self, subtrahend):
return MathOp(self.value - subtrahend)
| {
"repo_name": "jorgenschaefer/monads-for-normal-programmers",
"path": "monads/mathop/step2_2.py",
"copies": "1",
"size": "1043",
"license": "bsd-2-clause",
"hash": -5382719913850946000,
"line_mean": 22.1777777778,
"line_max": 51,
"alpha_frac": 0.5637583893,
"autogenerated": false,
"ratio": 3.6089965397923875,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46727549290923875,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def bound(method):
@wraps(method)
def bound_method(self, *args, **kwargs):
return self.bind(method, args, kwargs)
return bound_method
class MaybeMonad(object):
is_nothing = False
def bind(self, method, args, kwargs):
if self.is_nothing:
return self
else:
return method(self, *args, **kwargs)
class MathOp(MaybeMonad):
is_nothing = False
def __init__(self, value):
self.value = value
def __repr__(self):
return "<MathOp {}>".format(self.value)
@bound
def div(self, denum):
if denum == 0:
return MathOpNaN()
else:
return MathOp(self.value / denum)
@bound
def mul(self, multiplicand):
return MathOp(self.value * multiplicand)
@bound
def add(self, addend):
return MathOp(self.value + addend)
@bound
def sub(self, subtrahend):
return MathOp(self.value - subtrahend)
class MathOpNaN(MathOp):
is_nothing = True
def __init__(self):
super(MathOpNaN, self).__init__(None)
def __repr__(self):
return "<MathOp NaN>"
| {
"repo_name": "jorgenschaefer/monads-for-normal-programmers",
"path": "monads/mathop/step3.py",
"copies": "1",
"size": "1169",
"license": "bsd-2-clause",
"hash": 8788106227948160000,
"line_mean": 19.5087719298,
"line_max": 48,
"alpha_frac": 0.5791274594,
"autogenerated": false,
"ratio": 3.630434782608696,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9709562242008696,
"avg_score": 0,
"num_lines": 57
} |
from functools import wraps
def cached_class(klass):
"""Decorator to cache class instances by constructor arguments.
We "tuple-ize" the keyword arguments dictionary since
dicts are mutable; keywords themselves are strings and
so are always hashable, but if any arguments (keyword
or positional) are non-hashable, that set of arguments
is not cached.
"""
cache = {}
@wraps(klass, assigned=('__name__', '__module__'), updated=())
class _decorated(klass):
# The wraps decorator can't do this because __doc__
# isn't writable once the class is created
__doc__ = klass.__doc__
def __new__(cls, *args, **kwds):
key = (cls,) + args + tuple(kwds.iteritems())
try:
inst = cache.get(key, None)
except TypeError:
# Can't cache this set of arguments
inst = key = None
if inst is None:
# Technically this is cheating, but it works,
# and takes care of initializing the instance
# (so we can override __init__ below safely);
# calling up to klass.__new__ would be the
# "official" way to create the instance, but
# that raises DeprecationWarning if there are
# args or kwds and klass does not override
# __new__ (which most classes don't), because
# object.__new__ takes no parameters (and in
# Python 3 the warning will become an error)
inst = klass(*args, **kwds)
# This makes isinstance and issubclass work
# properly
inst.__class__ = cls
if key is not None:
cache[key] = inst
return inst
def __init__(self, *args, **kwds):
# This will be called every time __new__ is
# called, so we skip initializing here and do
# it only when the instance is created above
pass
return _decorated
| {
"repo_name": "ActiveState/code",
"path": "recipes/Python/577998_Cached_Class/recipe-577998.py",
"copies": "1",
"size": "2088",
"license": "mit",
"hash": -2413890014548571000,
"line_mean": 39.9411764706,
"line_max": 67,
"alpha_frac": 0.5421455939,
"autogenerated": false,
"ratio": 4.9952153110047846,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6037360904904785,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def cachedprop(fn):
'''Decorator which creates a cached property.'''
@wraps(fn)
def get(self):
cache_name = '__' + fn.__name__ + '__cache'
try:
return self.__dict__[cache_name]
except KeyError:
ret = fn(self)
self.__dict__[cache_name] = ret
return ret
return property(get)
def rangecachedfn(fn):
'''Decorator which creates a range memoized function. Decorator speeds up functions that response
depends on numeric parameter and is constant in some ranges of this parameter. Decorated function must have
numeric parameter as second positional parameter. Decorated function must return response, lower boundary
and high boundary. Response will be cached for all function calls with second parameter in returned range
and the same other parameters. Keyword arguments are not supported.'''
memo = {}
@wraps(fn)
def wrapper(*args):
try:
return memo[args], None, None
except KeyError:
rv, lo, hi = fn(*args)
if hi:
for i in range(lo, hi):
newargs = list(args)
newargs[1] = i
memo[tuple(newargs)] = rv
else:
memo[args] = rv
return rv, lo, hi
return wrapper
def cachedfn(fn):
'''Decorator which creates a memoized function.'''
memo = {}
@wraps(fn)
def wrapper(*args):
try:
return memo[args]
except KeyError:
#print 'Calling %s(%s)' % (fn.__name__, ', '.join([str(x) for x in args]))
rv = fn(*args)
memo[args] = rv
return rv
return wrapper
def singleton(cls):
'''Convert the given into a singleton.
This function is ment to be used as a decorator (which should be applied to
classes, e.g.:
@singleton
class Foo:
pass
After this, the class can be called (as if an constructor was called), but
the call will always return the same instance, e.g.:
a = Foo()
b = Foo()
assert a is b
assert id(a) == id(b)
Implementation taken from PEP 318 examples.
'''
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
| {
"repo_name": "Samsung/ADBI",
"path": "idk/common/deco.py",
"copies": "1",
"size": "2437",
"license": "apache-2.0",
"hash": -5570610554295112000,
"line_mean": 26.393258427,
"line_max": 111,
"alpha_frac": 0.5666803447,
"autogenerated": false,
"ratio": 4.479779411764706,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.010751477472982881,
"num_lines": 89
} |
from functools import wraps
def cache_forever(f):
f.cache = {}
@wraps(f)
def inner(*args):
if args not in f.cache:
f.cache[args] = f(*args)
return f.cache[args]
return inner
def property_cache_forever(f):
f.cached = None
@wraps(f)
def inner(self):
if f.cached is None:
f.cached = f(self)
return f.cached
return property(inner)
def property_cache_once_per_frame(f):
""" This decorator caches the return value for one game loop, then clears it if it is accessed in a different game loop
Only works on properties of the bot object because it requires access to self.state.game_loop """
f.frame = -1
f.cache = None
@wraps(f)
def inner(self):
if f.frame != self.state.game_loop:
f.frame = self.state.game_loop
f.cache = None
if f.cache is None:
f.cache = f(self)
return f.cache
return property(inner)
def property_immutable_cache(f):
@wraps(f)
def inner(self):
if f.__name__ not in self.cache:
self.cache[f.__name__] = f(self)
return self.cache[f.__name__]
return property(inner)
def property_mutable_cache(f):
@wraps(f)
def inner(self):
if f.__name__ not in self.cache:
self.cache[f.__name__] = f(self)
return self.cache[f.__name__].copy()
return property(inner)
| {
"repo_name": "Dentosal/python-sc2",
"path": "sc2/cache.py",
"copies": "1",
"size": "1433",
"license": "mit",
"hash": 7077598009132946000,
"line_mean": 21.746031746,
"line_max": 123,
"alpha_frac": 0.5799023029,
"autogenerated": false,
"ratio": 3.5646766169154227,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9643160789839418,
"avg_score": 0.00028362599520094777,
"num_lines": 63
} |
from functools import wraps
def chainable_method(fn):
@wraps(fn)
def inner(self, *args, **kwargs):
fn(self, *args, **kwargs)
return self
return inner
class Sortable(object):
def sort(self, pattern=None, limit=None, offset=None, get_pattern=None,
ordering=None, alpha=True, store=None):
if limit or offset:
offset = offset or 0
return self.database.sort(
self.key,
start=offset,
num=limit,
by=pattern,
get=get_pattern,
desc=ordering in ('DESC', 'desc'),
alpha=alpha,
store=store)
class Container(object):
"""
Base-class for rich Redis object wrappers.
"""
def __init__(self, database, key):
self.database = database
self.key = key
def expire(self, ttl=None):
"""
Expire the given key in the given number of seconds.
If ``ttl`` is ``None``, then any expiry will be cleared
and key will be persisted.
"""
if ttl is not None:
self.database.expire(self.key, ttl)
else:
self.database.persist(self.key)
def pexpire(self, ttl=None):
"""
Expire the given key in the given number of milliseconds.
If ``ttl`` is ``None``, then any expiry will be cleared
and key will be persisted.
"""
if ttl is not None:
self.database.pexpire(self.key, ttl)
else:
self.database.persist(self.key)
def dump(self):
"""
Dump the contents of the given key using Redis' native
serialization format.
"""
return self.database.dump(self.key)
@chainable_method
def clear(self):
"""
Clear the contents of the container by deleting the key.
"""
self.database.delete(self.key)
class Hash(Container):
"""
Redis Hash object wrapper. Supports a dictionary-like interface
with some modifications.
See `Hash commands <http://redis.io/commands#hash>`_ for more info.
"""
def __repr__(self):
l = len(self)
if l > 5:
# Get a few keys.
data = self.database.hscan(self.key, count=5)
else:
data = self.as_dict()
return '<Hash "%s": %s>' % (self.key, data)
def __getitem__(self, item):
"""
Retrieve the value at the given key. To retrieve multiple
values at once, you can specify multiple keys as a tuple or
list:
.. code-block:: python
hsh = db.Hash('my-hash')
first, last = hsh['first_name', 'last_name']
"""
if isinstance(item, (list, tuple)):
return self.database.hmget(self.key, item)
else:
return self.database.hget(self.key, item)
def get(self, key, fallback=None):
val = self.database.hget(self.key, key)
return val if val is not None else fallback
def __setitem__(self, key, value):
"""Set the value of the given key."""
return self.database.hset(self.key, key, value)
def __delitem__(self, key):
"""Delete the key from the hash."""
return self.database.hdel(self.key, key)
def __contains__(self, key):
"""
Return a boolean valud indicating whether the given key
exists.
"""
return self.database.hexists(self.key, key)
def __len__(self):
"""Return the number of keys in the hash."""
return self.database.hlen(self.key)
def _scan(self, *args, **kwargs):
return self.database.hscan_iter(self.key, *args, **kwargs)
def __iter__(self):
"""Iterate over the items in the hash."""
return iter(self._scan())
def search(self, pattern, count=None):
"""
Search the keys of the given hash using the specified pattern.
:param str pattern: Pattern used to match keys.
:param int count: Limit number of results returned.
:returns: An iterator yielding matching key/value pairs.
"""
return self._scan(match=pattern, count=count)
def keys(self):
"""Return the keys of the hash."""
return self.database.hkeys(self.key)
def values(self):
"""Return the values stored in the hash."""
return self.database.hvals(self.key)
def items(self, lazy=False):
"""
Like Python's ``dict.items()`` but supports an optional
parameter ``lazy`` which will return a generator rather than
a list.
"""
if lazy:
return self._scan()
else:
return list(self)
@chainable_method
def update(self, *args, **kwargs):
"""
Update the hash using the given dictionary or key/value pairs.
"""
if args:
self.database.hmset(self.key, *args)
else:
self.database.hmset(self.key, kwargs)
def as_dict(self):
"""
Return a dictionary containing all the key/value pairs in the
hash.
"""
return self.database.hgetall(self.key)
def incr(self, key, incr_by=1):
"""Increment the key by the given amount."""
return self.database.hincrby(self.key, key, incr_by)
def incr_float(self, key, incr_by=1.):
"""Increment the key by the given amount."""
return self.database.hincrbyfloat(self.key, key, incr_by)
class List(Sortable, Container):
"""
Redis List object wrapper. Supports a list-like interface.
See `List commands <http://redis.io/commands#list>`_ for more info.
"""
def __repr__(self):
l = len(self)
n_items = min(l, 10)
return '<List "%s": %s%s>' % (
self.key,
', '.join(self[:n_items]),
n_items < l and '...' or '')
def __getitem__(self, item):
"""
Retrieve an item from the list by index. In addition to
integer indexes, you can also pass a ``slice``.
"""
if isinstance(item, slice):
start = item.start or 0
stop = item.stop
if not stop:
stop = -1
else:
stop -= 1
return self.database.lrange(self.key, start, stop)
return self.database.lindex(self.key, item)
def __setitem__(self, idx, value):
"""Set the value of the given index."""
return self.database.lset(self.key, idx, value)
def __delitem__(self, item):
"""
By default Redis treats deletes as delete by value, as
opposed to delete by index. If an integer is passed into the
function, it will be treated as an index, otherwise it will
be treated as a value.
"""
if isinstance(item, slice):
start = item.start or 0
stop = item.stop or -1
if stop > 0:
stop -= 1
return self.database.ltrim(self.key, start, stop)
elif isinstance(item, int):
item = self[item]
if item is None:
return
return self.database.lrem(self.key, item)
def __len__(self):
"""Return the length of the list."""
return self.database.llen(self.key)
def __iter__(self):
"""Iterate over the items in the list."""
return iter(self.database.lrange(self.key, 0, -1))
def append(self, value):
"""Add the given value to the end of the list."""
return self.database.rpush(self.key, value)
def prepend(self, value):
"""Add the given value to the beginning of the list."""
return self.database.lpush(self.key, value)
def extend(self, value):
"""Extend the list by the given value."""
return self.database.rpush(self.key, *value)
def insert(self, value, pivot, where):
return self.database.linsert(self.key, where, pivot, value)
def insert_before(self, value, key):
"""
Insert the given value into the list before the index
containing ``key``.
"""
self.insert(value, key, 'before')
def insert_after(self, value, key):
"""
Insert the given value into the list after the index
containing ``key``.
"""
self.insert(value, key, 'after')
def popleft(self):
"""Remove the first item from the list."""
return self.database.lpop(self.key)
def popright(self):
"""Remove the last item from the list."""
return self.database.rpop(self.key)
pop = popright
def move_tail(self, key):
return self.database.rpoplpush(self.key, key)
class Set(Sortable, Container):
"""
Redis Set object wrapper. Supports a set-like interface.
See `Set commands <http://redis.io/commands#set>`_ for more info.
"""
def __repr__(self):
return '<Set "%s": %s items>' % (self.key, len(self))
def add(self, *items):
"""Add the given items to the set."""
return self.database.sadd(self.key, *items)
def __delitem__(self, item):
"""Remove the given item from the set."""
return self.remove(item)
def remove(self, *items):
"""Remove the given item(s) from the set."""
return self.database.srem(self.key, *items)
def pop(self):
"""Remove an element from the set."""
return self.database.spop(self.key)
def _first_or_any(self):
return self.random()
def __contains__(self, item):
"""
Return a boolean value indicating whether the given item is
a member of the set.
"""
return self.database.sismember(self.key, item)
def __len__(self):
"""Return the number of items in the set."""
return self.database.scard(self.key)
def _scan(self, *args, **kwargs):
return self.database.sscan_iter(self.key, *args, **kwargs)
def __iter__(self):
"""Return an iterable that yields the items of the set."""
return iter(self._scan())
def search(self, pattern, count=None):
"""
Search the values of the given set using the specified pattern.
:param str pattern: Pattern used to match keys.
:param int count: Limit number of results returned.
:returns: An iterator yielding matching values.
"""
return self._scan(match=pattern, count=count)
def members(self):
"""Return a ``set()`` containing the members of the set."""
return self.database.smembers(self.key)
def random(self, n=None):
"""Return a random member of the given set."""
return self.database.srandmember(self.key, n)
def __sub__(self, other):
"""
Return the set difference of the current set and the left-
hand :py:class:`Set` object.
"""
return self.database.sdiff(self.key, other.key)
def __or__(self, other):
"""
Return the set union of the current set and the left-hand
:py:class:`Set` object.
"""
return self.database.sunion(self.key, other.key)
def __and__(self, other):
"""
Return the set intersection of the current set and the left-
hand :py:class:`Set` object.
"""
return self.database.sinter(self.key, other.key)
@chainable_method
def __isub__(self, other):
self.diffstore(self.key, other)
@chainable_method
def __ior__(self, other):
self.unionstore(self.key, other)
@chainable_method
def __iand__(self, other):
self.interstore(self.key, other)
def diffstore(self, dest, *others):
"""
Store the set difference of the current set and one or more
others in a new key.
:param dest: the name of the key to store set difference
:param others: One or more :py:class:`Set` instances
:returns: A :py:class:`Set` referencing ``dest``.
"""
keys = [self.key]
keys.extend([other.key for other in others])
self.database.sdiffstore(dest, keys)
return self.database.Set(dest)
def interstore(self, dest, *others):
"""
Store the intersection of the current set and one or more
others in a new key.
:param dest: the name of the key to store intersection
:param others: One or more :py:class:`Set` instances
:returns: A :py:class:`Set` referencing ``dest``.
"""
keys = [self.key]
keys.extend([other.key for other in others])
self.database.sinterstore(dest, keys)
return self.database.Set(dest)
def unionstore(self, dest, *others):
"""
Store the union of the current set and one or more
others in a new key.
:param dest: the name of the key to store union
:param others: One or more :py:class:`Set` instances
:returns: A :py:class:`Set` referencing ``dest``.
"""
keys = [self.key]
keys.extend([other.key for other in others])
self.database.sunionstore(dest, keys)
return self.database.Set(dest)
class ZSet(Sortable, Container):
"""
Redis ZSet object wrapper. Acts like a set and a dictionary.
See `Sorted set commands <http://redis.io/commands#sorted_set>`_
for more info.
"""
def __repr__(self):
l = len(self)
n_items = min(l, 5)
return '<ZSet "%s": %s%s>' % (
self.key,
', '.join(self[:n_items, False]),
n_items < l and '...' or '')
def add(self, *args, **kwargs):
"""
Add the given item/score pairs to the ZSet. Arguments are
specified as ``item1, score1, item2, score2...``.
"""
return self.database.zadd(self.key, *args, **kwargs)
def _convert_slice(self, s):
def _slice_to_indexes(s):
start = s.start
stop = s.stop
if isinstance(start, int) or isinstance(stop, int):
return start, stop
if start:
start = self.database.zrank(self.key, start)
if start is None:
raise KeyError(s.start)
if stop:
stop = self.database.zrank(self.key, stop)
if stop is None:
raise KeyError(s.stop)
return start, stop
start, stop = _slice_to_indexes(s)
start = start or 0
if not stop:
stop = -1
else:
stop -= 1
return start, stop
def __getitem__(self, item):
"""
Retrieve the given values from the sorted set. Accepts a
variety of parameters for the input:
.. code-block:: python
zs = db.ZSet('my-zset')
# Return the first 10 elements with their scores.
zs[:10, True]
# Return the first 10 elements without scores.
zs[:10]
zs[:10, False]
# Return the range of values between 'k1' and 'k10' along
# with their scores.
zs['k1':'k10', True]
# Return the range of items preceding and including 'k5'
# without scores.
zs[:'k5', False]
"""
if isinstance(item, tuple) and len(item) == 2:
item, withscores = item
else:
withscores = False
if isinstance(item, slice):
start, stop = self._convert_slice(item)
else:
start = stop = item
return self.database.zrange(
self.key,
start,
stop,
withscores=withscores)
def __setitem__(self, item, score):
"""Add item to the set with the given score."""
return self.database.zadd(self.key, item, score)
def __delitem__(self, item):
"""
Delete the given item(s) from the set. Like
:py:meth:`~ZSet.__getitem__`, this method supports a wide
variety of indexing and slicing options.
"""
if isinstance(item, slice):
start, stop = self._convert_slice(item)
return self.database.zremrangebyrank(self.key, start, stop)
else:
return self.remove(item)
def remove(self, *items):
"""Remove the given items from the ZSet."""
return self.database.zrem(self.key, *items)
def __contains__(self, item):
"""
Return a boolean indicating whether the given item is in the
sorted set.
"""
return not (self.rank(item) is None)
def __len__(self):
"""Return the number of items in the sorted set."""
return self.database.zcard(self.key)
def _scan(self, *args, **kwargs):
return self.database.zscan_iter(self.key, *args, **kwargs)
def __iter__(self):
"""
Return an iterator that will yield (item, score) tuples.
"""
return iter(self._scan())
def iterator(self, with_scores=False, reverse=False):
if with_scores and not reverse:
return self.database.search(None)
return self.range(
0,
-1,
with_scores=with_scores,
reverse=reverse)
def search(self, pattern, count=None):
"""
Search the set, returning items that match the given search
pattern.
:param str pattern: Search pattern using wildcards.
:param int count: Limit result set size.
:returns: Iterator that yields matching item/score tuples.
"""
return self._scan(match=pattern, count=count)
def score(self, item):
"""Return the score of the given item."""
return self.database.zscore(self.key, item)
def rank(self, item, reverse=False):
"""Return the rank of the given item."""
fn = reverse and self.database.zrevrank or self.database.zrank
return fn(self.key, item)
def count(self, low, high=None):
"""
Return the number of items between the given bounds.
"""
if high is None:
high = low
return self.database.zcount(self.key, low, high)
def lex_count(self, low, high):
"""
Count the number of members in a sorted set between a given
lexicographical range.
"""
return self.database.zlexcount(self.key, low, high)
def range(self, low, high, with_scores=False, desc=False, reverse=False):
"""
Return a range of items between ``low`` and ``high``. By
default scores will not be included, but this can be controlled
via the ``with_scores`` parameter.
:param low: Lower bound.
:param high: Upper bound.
:param bool with_scores: Whether the range should include the
scores along with the items.
:param bool desc: Whether to sort the results descendingly.
:param bool reverse: Whether to select the range in reverse.
"""
if reverse:
return self.database.zrevrange(self.key, low, high, with_scores)
else:
return self.database.zrange(self.key, low, high, desc, with_scores)
def range_by_score(self, low, high, start=None, num=None,
with_scores=False, reverse=False):
if reverse:
fn = self.database.zrevrangebyscore
low, high = high, low
else:
fn = self.database.zrangebyscore
return fn(self.key, low, high, start, num, with_scores)
def range_by_lex(self, low, high, start=None, num=None, reverse=False):
"""
Return a range of members in a sorted set, by lexicographical range.
"""
if reverse:
fn = self.database.zrevrangebylex
low, high = high, low
else:
fn = self.database.zrangebylex
return fn(self.key, low, high, start, num)
def remove_by_rank(self, low, high=None):
"""
Remove elements from the ZSet by their rank (relative position).
:param low: Lower bound.
:param high: Upper bound.
"""
if high is None:
high = low
return self.database.zremrangebyrank(self.key, low, high)
def remove_by_score(self, low, high=None):
"""
Remove elements from the ZSet by their score.
:param low: Lower bound.
:param high: Upper bound.
"""
if high is None:
high = low
return self.database.zremrangebyscore(self.key, low, high)
def remove_by_lex(self, low, high):
return self.database.zremrangebylex(self.key, low, high)
def incr(self, key, incr_by=1):
"""
Increment the score of an item in the ZSet.
:param key: Item to increment.
:param incr_by: Amount to increment item's score.
"""
return self.database.zincrby(self.key, key, incr_by)
def _first_or_any(self):
item = self[0]
if item:
return item[0]
@chainable_method
def __ior__(self, other):
self.unionstore(self.key, other)
return self
@chainable_method
def __iand__(self, other):
self.interstore(self.key, other)
return self
def interstore(self, dest, *others, **kwargs):
"""
Store the intersection of the current zset and one or more
others in a new key.
:param dest: the name of the key to store intersection
:param others: One or more :py:class:`ZSet` instances
:returns: A :py:class:`ZSet` referencing ``dest``.
"""
keys = [self.key]
keys.extend([other.key for other in others])
self.database.zinterstore(dest, keys, **kwargs)
return self.database.ZSet(dest)
def unionstore(self, dest, *others, **kwargs):
"""
Store the union of the current set and one or more
others in a new key.
:param dest: the name of the key to store union
:param others: One or more :py:class:`ZSet` instances
:returns: A :py:class:`ZSet` referencing ``dest``.
"""
keys = [self.key]
keys.extend([other.key for other in others])
self.database.zunionstore(dest, keys, **kwargs)
return self.database.ZSet(dest)
class HyperLogLog(Container):
"""
Redis HyperLogLog object wrapper.
See `HyperLogLog commands <http://redis.io/commands#hyperloglog>`_
for more info.
"""
def add(self, *items):
"""
Add the given items to the HyperLogLog.
"""
return self.database.pfadd(self.key, *items)
def __len__(self):
return self.database.pfcount(self.key)
def __ior__(self, other):
if not isinstance(other, (list, tuple)):
other = [other]
return self.merge(self.key, *other)
def merge(self, dest, *others):
"""
Merge one or more :py:class:`HyperLogLog` instances.
:param dest: Key to store merged result.
:param others: One or more ``HyperLogLog`` instances.
"""
items = [self.key]
items.extend([other.key for other in others])
self.database.pfmerge(dest, *items)
return HyperLogLog(self.database, dest)
class Array(Container):
"""
Custom container that emulates an array (as opposed to the
linked-list implementation of :py:class:`List`). This gives:
* O(1) append, get, len, pop last, set
* O(n) remove from middle
:py:class:`Array` is built on top of the hash data type and
is implemented using lua scripts.
"""
def __getitem__(self, idx):
"""Get the value stored in the given index."""
return self.database.run_script(
'array_get',
keys=[self.key],
args=[idx])
def __setitem__(self, idx, value):
"""Set the value at the given index."""
return self.database.run_script(
'array_set',
keys=[self.key],
args=[idx, value])
def __delitem__(self, idx):
"""Delete the given index."""
return self.pop(idx)
def __len__(self):
"""Return the number of items in the array."""
return self.database.hlen(self.key)
def append(self, value):
"""Append a new value to the end of the array."""
self.database.run_script(
'array_append',
keys=[self.key],
args=[value])
def extend(self, values):
"""Extend the array, appending the given values."""
self.database.run_script(
'array_extend',
keys=[self.key],
args=values)
def pop(self, idx=None):
"""
Remove an item from the array. By default this will be the
last item by index, but any index can be specified.
"""
if idx is not None:
return self.database.run_script(
'array_remove',
keys=[self.key],
args=[idx])
else:
return self.database.run_script(
'array_pop',
keys=[self.key],
args=[])
def __contains__(self, item):
"""
Return a boolean indicating whether the given item is stored
in the array. O(n).
"""
for value in self:
if value == item:
return True
return False
def __iter__(self):
"""Return an iterable that yields array items."""
return iter(
item[1] for item in sorted(self.database.hscan_iter(self.key)))
| {
"repo_name": "johndlong/walrus",
"path": "walrus/containers.py",
"copies": "1",
"size": "25627",
"license": "mit",
"hash": 9207946388598180000,
"line_mean": 29.9879081016,
"line_max": 79,
"alpha_frac": 0.5635462598,
"autogenerated": false,
"ratio": 4.046581399021001,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00016488952401890733,
"num_lines": 827
} |
from functools import wraps
def csp_exempt(f):
@wraps(f)
def _wrapped(*a, **kw):
r = f(*a, **kw)
r._csp_exempt = True
return r
return _wrapped
def csp_update(**kwargs):
update = dict((k.lower().replace('_', '-'), v) for k, v in kwargs.items())
def decorator(f):
@wraps(f)
def _wrapped(*a, **kw):
r = f(*a, **kw)
r._csp_update = update
return r
return _wrapped
return decorator
def csp_replace(**kwargs):
replace = dict((k.lower().replace('_', '-'), v) for k, v in kwargs.items())
def decorator(f):
@wraps(f)
def _wrapped(*a, **kw):
r = f(*a, **kw)
r._csp_replace = replace
return r
return _wrapped
return decorator
def csp(**kwargs):
config = dict((k.lower().replace('_', '-'), v) for k, v in kwargs.items())
def decorator(f):
@wraps(f)
def _wrapped(*a, **kw):
r = f(*a, **kw)
r._csp_config = config
return r
return _wrapped
return decorator
| {
"repo_name": "graingert/django-csp",
"path": "csp/decorators.py",
"copies": "2",
"size": "1105",
"license": "bsd-3-clause",
"hash": -3387153826585380400,
"line_mean": 21.5510204082,
"line_max": 79,
"alpha_frac": 0.4859728507,
"autogenerated": false,
"ratio": 3.611111111111111,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5097083961811112,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def lazy_property(fct, name=None):
name = name or fct.__name__
attr_name = '_' + name
if attr_name == '_<lambda>':
raise Exception("cannot assign <lambda> to lazy property")
@property
@wraps(fct)
def _wrapper(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fct(self))
return getattr(self, attr_name)
return _wrapper
class DeferredExecutionMixin(object):
"""Mixin which defers execution of methods by adding then to a queue until 'apply' is invoked or the object is
invoked '()'.
Don't want to use this if you modify object variables between method calls (Deferred calls methods later)
"""
NOT_DEFERRED = ['apply']
def __init__(self):
self._deferred = []
def __getattribute__(self, name):
attr = super(DeferredExecutionMixin, self).__getattribute__(name)
if callable(attr) and not name.startswith('_') and name not in self.NOT_DEFERRED \
and not isinstance(attr, DeferredExecutionMixin):
def wrapped(*args, **kwargs):
self._deferred.append(lambda: attr(*args, **kwargs))
return self
return wrapped
else:
return attr
def __call__(self):
[f() for f in self._deferred] | {
"repo_name": "bpsmith/tia",
"path": "tia/util/decorator.py",
"copies": "1",
"size": "1339",
"license": "bsd-3-clause",
"hash": 9160870729775040000,
"line_mean": 30.1627906977,
"line_max": 114,
"alpha_frac": 0.6064227035,
"autogenerated": false,
"ratio": 4.264331210191083,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5370753913691082,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def memoized(f):
"""
A simple memoization decorator.
"""
cache = f.cache = {}
@wraps(f)
def wrapper(*args, **kwargs):
key = (args, frozenset(kwargs.items()))
if key in cache:
return cache[key]
result = f(*args, **kwargs)
cache[key] = result
return result
return wrapper
class Reserver(object):
"""
Manage reservations of some objects, trying to assign the "better" objects
first.
"""
def __init__(self, objs, overflow_obj):
"""
objs: List of objects to be reserved.
overflow_obj: The object to use once objs is depleted.
The elements of objs are assumed to be sorted so that earlier objects
are preferred over later ones.
"""
self._objs = objs
self._overflow_obj = overflow_obj
self.assigned_objs = [False] * self.num_objs
@property
def num_objs(self):
return len(self._objs)
def allocate(self):
for i, obj in enumerate(self._objs):
if not self.assigned_objs[i]:
self.assigned_objs[i] = True
return obj
# Contingency plan!
return self._overflow_obj
def free(self, obj_free):
if self._overflow_obj == obj_free:
# Nothing to be done about this.
return
for i, obj in enumerate(self._objs):
if obj == obj_free:
# Don't worry about freeing unassigned objects.
self.assigned_objs[i] = False
return
raise ValueError('Not a valid object: {0}'.format(obj_free))
| {
"repo_name": "0/Boltzmannizer",
"path": "boltzmannizer/tools/misc.py",
"copies": "1",
"size": "1396",
"license": "mit",
"hash": -5229988607004490000,
"line_mean": 18.3888888889,
"line_max": 75,
"alpha_frac": 0.6676217765,
"autogenerated": false,
"ratio": 3.1799544419134396,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4347576218413439,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def _restore_languages_on_generator_exit(method):
@wraps(method)
def wrapped(self, *args, **kwargs):
stored_languages = self.languages[:]
for language in method(self, *args, **kwargs):
yield language
else:
self.languages[:] = stored_languages
return wrapped
class BaseLanguageDetector:
def __init__(self, languages):
self.languages = languages[:]
@_restore_languages_on_generator_exit
def iterate_applicable_languages(self, date_string, settings=None, modify=False):
languages = self.languages if modify else self.languages[:]
yield from self._filter_languages(date_string, languages, settings)
@staticmethod
def _filter_languages(date_string, languages, settings=None):
while languages:
language = languages[0]
if language.is_applicable(date_string, strip_timezone=False, settings=settings):
yield language
elif language.is_applicable(date_string, strip_timezone=True, settings=settings):
yield language
languages.pop(0)
class AutoDetectLanguage(BaseLanguageDetector):
def __init__(self, languages, allow_redetection=False):
super().__init__(languages=languages[:])
self.language_pool = languages[:]
self.allow_redetection = allow_redetection
@_restore_languages_on_generator_exit
def iterate_applicable_languages(self, date_string, modify=False, settings=None):
languages = self.languages if modify else self.languages[:]
initial_languages = languages[:]
yield from self._filter_languages(date_string, languages, settings=settings)
if not self.allow_redetection:
return
# Try languages that was not tried before with this date_string
languages = [language
for language in self.language_pool
if language not in initial_languages]
if modify:
self.languages = languages
yield from self._filter_languages(date_string, languages, settings=settings)
class ExactLanguages(BaseLanguageDetector):
def __init__(self, languages):
if languages is None:
raise ValueError("language cannot be None for ExactLanguages")
super().__init__(languages=languages)
@_restore_languages_on_generator_exit
def iterate_applicable_languages(self, date_string, modify=False, settings=None):
yield from super().iterate_applicable_languages(date_string, modify=False, settings=settings)
| {
"repo_name": "scrapinghub/dateparser",
"path": "dateparser/search/detection.py",
"copies": "1",
"size": "2609",
"license": "bsd-3-clause",
"hash": 215689389633639360,
"line_mean": 36.2714285714,
"line_max": 101,
"alpha_frac": 0.6646224607,
"autogenerated": false,
"ratio": 4.59330985915493,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5757932319854929,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def split_in_size_n(l, n):
return [l[i:i + n] for i in range(0, len(l), n)]
def reg_to_memfunc(f, in_size, out_size, per_reg=256):
""" Makes a function that operates on registers use memory instead. """
# To prevent circular imports
from .instructions import vmovdqu
from .data import Register
@wraps(f)
def memfunc(out_data, in_data):
src = [Register(per_reg) for _ in range(in_size)]
dst = [Register(per_reg) for _ in range(out_size)]
for reg, mem in zip(src, in_data):
vmovdqu(reg, mem)
f(dst, src)
for mem, reg in zip(out_data, dst):
vmovdqu(mem, reg)
return memfunc
def format_value(value, n=32):
result = ""
x = list(reversed(value))
offset = 256 - len(x) % 256
for i, a in enumerate(x):
i += offset
result += '{:^4}'.format(a)
if i % 8 == 7:
result += '|'
if i % n == n-1:
result += '\n'
return result
def sequence_to_values(dst, seq, padding=None):
seq = list(seq)
for i, reg in enumerate(dst):
if len(seq) >= reg.size:
reg.value, seq = seq[:reg.size], seq[reg.size:]
else:
reg.value = seq + [padding] * (reg.size - len(seq))
break
else:
if len(seq) > 0:
raise Exception("Sequence did not fit in registers; "
"{} elements remaining".format(len(seq)))
| {
"repo_name": "joostrijneveld/bitpermutations",
"path": "bitpermutations/utils.py",
"copies": "1",
"size": "1482",
"license": "cc0-1.0",
"hash": -5111207290359574000,
"line_mean": 26.9622641509,
"line_max": 75,
"alpha_frac": 0.5371120108,
"autogenerated": false,
"ratio": 3.375854214123007,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9409472164825172,
"avg_score": 0.0006988120195667365,
"num_lines": 53
} |
from functools import wraps
def stub(fn):
"""
Used for unimplemeted parent class methods.
Warns about it upon usage
:param fn: method
"""
fn.__stub__ = True
@wraps(fn)
def wrapper(*args, **kwargs):
raise NotImplemented("Method {0} is not implemented. Method desc: {1}".format(fn.__name__, fn.__doc__))
return wrapper
def create_module(api, name=None):
"""
A decorator which dynamically creates and binds new module
:param api: flask_restful api endpoint
:param name: optional name override for module. If not defined, automatically picked from function name
:return:
"""
from wutu.util import get_logger, camel_case_name, class_factory, setup_endpoint
from wutu.module import Module
def injector(fn):
log = get_logger("Decorators")
nonlocal name
if not name:
name = fn.__name__
ctr = class_factory(camel_case_name(name), Module, **fn())
inst = ctr()
setup_endpoint(api, inst, name)
if inst.create_service:
inst.create_service(api.jsstream)
if inst.create_controller:
inst.create_controller(api.jsstream)
log.info("Module '{0}' created".format(name))
return injector
| {
"repo_name": "zaibacu/wutu",
"path": "wutu/decorators.py",
"copies": "1",
"size": "1265",
"license": "mit",
"hash": -835436842193281900,
"line_mean": 29.119047619,
"line_max": 111,
"alpha_frac": 0.628458498,
"autogenerated": false,
"ratio": 3.940809968847352,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5069268466847352,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def type_name(x):
return x.__class__.__name__
def check_equal(a, b):
assert a == b, '{}:{} != {}:{}'.format(a, type_name(a), b, type_name(b))
def checker_fn(fn_apply, fn_assert):
def inner(a, b):
return fn_assert(fn_apply(*a), b)
return inner
def gentests(fn_apply, fn_assert=check_equal):
def decorator(func):
check = checker_fn(fn_apply, fn_assert)
@wraps(func)
def decorated():
for case in func():
if len(case) > 1:
inputs, expected = case[:-1], case[-1]
else:
inputs, expected = case, None
yield check, inputs, expected
return decorated
return decorator
# other common checker functions
def check_member(a, b):
assert b in a, '{}:{} not in {}:{}'.format(a, type_name(a),
b, type_name(b))
def check_none(a, *_):
assert a is None, '{}:{} is not None'.format(a, type_name(a))
def check_true(a, *_):
assert a, '{}:{} is not True'.format(a, type_name(a))
def check_false(a, *_):
assert not a, '{}:{} is not False'.format(a, type_name(a))
| {
"repo_name": "naiquevin/nozzle",
"path": "nozzle.py",
"copies": "1",
"size": "1237",
"license": "mit",
"hash": 2736438287001176600,
"line_mean": 24.2448979592,
"line_max": 76,
"alpha_frac": 0.510105093,
"autogenerated": false,
"ratio": 3.5042492917847023,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45143543847847023,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def use_select2(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to enable the inclusion of the select2 js library at the base template.
Example:
@use_select2
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_select2 = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_select2_v4(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to enable the inclusion of the 4.0 Version of select2 js library at
the base template. (4.0 is still in testing phase)
Example:
@use_select2_v4
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_select2_v4 = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_angular_js(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to enable the inclusion of the angularjs library at the base template
level.
Example:
@use_angular_js
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_angular_js = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_daterangepicker(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to enable the inclusion of the daterangepicker library at the base template
level.
Example:
@use_daterangepicker
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_daterangepicker = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_jquery_ui(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to enable the inclusion of the jquery-ui library at the base template
level.
Example:
@use_jquery_ui
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_jquery_ui = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_multiselect(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to enable the inclusion of the multiselect library at the base template
level.
Example:
@use_multiselect
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_multiselect = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_nvd3(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to enable the inclusion of the nvd3 library at the base template
level. nvd3 is a library of charts for d3.
Example:
@use_nvd3
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_nvd3 = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_nvd3_v3(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to enable the inclusion of the nvd3 library at the base template
level. nvd3 Version 3 is a library of charts for d3.
Example:
@use_nvd3
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_nvd3_v3 = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_timeago(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to enable the inclusion of the timeago library at the base template
level.
Example:
@use_timeago
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_timeago = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_datatables(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to enable the inclusion of the datatables library at the base template
level.
Example:
@use_datatables
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_datatables = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_typeahead(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to enable the inclusion of the typeahead library at the base template
level.
Example:
@use_typeahead
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_typeahead = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_timepicker(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to enable the inclusion of the timepicker library at the base template
level.
Example:
@use_timepicker
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_timepicker = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_maps(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to enable the inclusion of the maps (with sync utils) library at the base
template level.
Example:
@use_maps
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_maps = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_maps_async(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to enable the inclusion of the maps (with async utils) library at the base
template level.
Example:
@use_maps_async
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_maps_async = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def maps_prefer_canvas(view_func):
"""Use this decorator on the dispatch method of a TemplateView subclass
to set L_PREFER_CANVAS = true; before including the maps library.
Example:
@maps_prefer_canvas
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.maps_prefer_canvas = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_blazy(view_func):
"""Use this decorator to create a blazy watch on the body element
for loading images asynchronously by specifying the b-lazy class
on an element and adding the path to the image in
data-src="{% static 'path/to/image.jpg' %}"
Example Tag Usage:
@use_blazy
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_blazy = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
def use_ko_validation(view_func):
"""Use this decorator to use knockout validation in knockout forms
Example Tag Usage:
@use_ko_validation
def dispatch(self, request, *args, **kwargs):
return super(MyView, self).dispatch(request, *args, **kwargs)
"""
@wraps(view_func)
def _wrapped(class_based_view, request, *args, **kwargs):
request.use_ko_validation = True
return view_func(class_based_view, request, *args, **kwargs)
return _wrapped
| {
"repo_name": "qedsoftware/commcare-hq",
"path": "corehq/apps/style/decorators.py",
"copies": "1",
"size": "9771",
"license": "bsd-3-clause",
"hash": -8744902030891091000,
"line_mean": 31.1414473684,
"line_max": 79,
"alpha_frac": 0.6639033876,
"autogenerated": false,
"ratio": 3.7194518462124098,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.488335523381241,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def xframe_options_deny(view_func):
"""
Modifies a view function so its response has the X-Frame-Options HTTP
header set to 'DENY' as long as the response doesn't already have that
header set.
e.g.
@xframe_options_deny
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
if resp.get('X-Frame-Options') is None:
resp['X-Frame-Options'] = 'DENY'
return resp
return wraps(view_func)(wrapped_view)
def xframe_options_sameorigin(view_func):
"""
Modifies a view function so its response has the X-Frame-Options HTTP
header set to 'SAMEORIGIN' as long as the response doesn't already have
that header set.
e.g.
@xframe_options_sameorigin
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
if resp.get('X-Frame-Options') is None:
resp['X-Frame-Options'] = 'SAMEORIGIN'
return resp
return wraps(view_func)(wrapped_view)
def xframe_options_exempt(view_func):
"""
Modifies a view function by setting a response variable that instructs
XFrameOptionsMiddleware to NOT set the X-Frame-Options HTTP header.
e.g.
@xframe_options_exempt
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
resp.xframe_options_exempt = True
return resp
return wraps(view_func)(wrapped_view)
| {
"repo_name": "mattseymour/django",
"path": "django/views/decorators/clickjacking.py",
"copies": "10",
"size": "1580",
"license": "bsd-3-clause",
"hash": 7710506650923923000,
"line_mean": 25.7796610169,
"line_max": 75,
"alpha_frac": 0.6259493671,
"autogenerated": false,
"ratio": 3.744075829383886,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9370025196483887,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def xframe_options_deny(view_func):
"""
Modify a view function so its response has the X-Frame-Options HTTP
header set to 'DENY' as long as the response doesn't already have that
header set. Usage:
@xframe_options_deny
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
if resp.get('X-Frame-Options') is None:
resp['X-Frame-Options'] = 'DENY'
return resp
return wraps(view_func)(wrapped_view)
def xframe_options_sameorigin(view_func):
"""
Modify a view function so its response has the X-Frame-Options HTTP
header set to 'SAMEORIGIN' as long as the response doesn't already have
that header set. Usage:
@xframe_options_sameorigin
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
if resp.get('X-Frame-Options') is None:
resp['X-Frame-Options'] = 'SAMEORIGIN'
return resp
return wraps(view_func)(wrapped_view)
def xframe_options_exempt(view_func):
"""
Modify a view function by setting a response variable that instructs
XFrameOptionsMiddleware to NOT set the X-Frame-Options HTTP header. Usage:
@xframe_options_exempt
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
resp.xframe_options_exempt = True
return resp
return wraps(view_func)(wrapped_view)
| {
"repo_name": "arun6582/django",
"path": "django/views/decorators/clickjacking.py",
"copies": "125",
"size": "1565",
"license": "bsd-3-clause",
"hash": 256381051921515870,
"line_mean": 28.5283018868,
"line_max": 78,
"alpha_frac": 0.6338658147,
"autogenerated": false,
"ratio": 3.817073170731707,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
def xframe_sameorigin(view_fn):
@wraps(view_fn)
def _wrapped_view(request, *args, **kwargs):
response = view_fn(request, *args, **kwargs)
response['X-Frame-Options'] = 'SAMEORIGIN'
return response
return _wrapped_view
def xframe_allow(view_fn):
@wraps(view_fn)
def _wrapped_view(request, *args, **kwargs):
response = view_fn(request, *args, **kwargs)
response.no_frame_options = True
return response
return _wrapped_view
def xframe_deny(view_fn):
@wraps(view_fn)
def _wrapped_view(request, *args, **kwargs):
response = view_fn(request, *args, **kwargs)
response['X-Frame-Options'] = 'DENY'
return response
return _wrapped_view
def xrobots_exempt(view_fn):
@wraps(view_fn)
def _wrapped_view(request, *args, **kwargs):
response = view_fn(request, *args, **kwargs)
response.no_robots_tag = True
return response
return _wrapped_view
def xrobots_tag(rule='noindex'):
def decorator(view_fn):
@wraps(view_fn)
def _wrapped_view(request, *args, **kwargs):
response = view_fn(request, *args, **kwargs)
response['X-Robots-Tag'] = rule
return response
return _wrapped_view
return decorator
| {
"repo_name": "jsocol/commonware",
"path": "commonware/response/decorators.py",
"copies": "1",
"size": "1328",
"license": "bsd-3-clause",
"hash": 7496949868046068000,
"line_mean": 26.6666666667,
"line_max": 56,
"alpha_frac": 0.6129518072,
"autogenerated": false,
"ratio": 3.550802139037433,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46637539462374333,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
_enable_pluggable_decorators = True
def pluggable(*decorators):
""" Pluggable decorators
@pluggable(
require_POST,
login_required,
)
def some_view(request):
''' Your view func here. '''
## Decorated behavior.
res = some_view(request)
## Original behavior.
res = some_view.original_func(request)
## decorators
some_view.decorators
## Disabling temporarily
With ``purge_pluggable_decorators`` you can disable all of decorators in it.
See docs on ``purge_pluggable_decorators``.
"""
def dec(func):
_original_func = func
for d in reversed(decorators):
func = d(func)
@wraps(func)
def wrapped(*args, **kwargs):
global _enable_pluggable_decorators
if _enable_pluggable_decorators:
return func(*args, **kwargs)
else:
return _original_func(*args, **kwargs)
wrapped.original_func = _original_func
wrapped.decorators = decorators
return wrapped
return dec
class purge_pluggable_decorators:
""" decorator or context manager to disable applied decorators in it.
This decorator/context manager is useful when you want to test decorated functions by
``pluggable`` decorator.
>>> def hello(func):
... def wrapped():
... print("hello")
... func()
... return wrapped
...
>>> @pluggable(
... hello
... )
... def goodbye():
... print("goodbye")
...
>>> # As context manager
>>> with purge_pluggable_decorators:
... goodbye() # Just say "goodbye"
...
>>> # As decorator
>>> @purge_pluggable_decorators
... def test():
... goodbye()
...
>>> test() # Just say "goodbye"
"""
@classmethod
def purge(cls):
global _enable_pluggable_decorators
_enable_pluggable_decorators = False
@classmethod
def wrap(cls):
global _enable_pluggable_decorators
_enable_pluggable_decorators = True
def __enter__(self):
self.purge()
def __exit__(self, exc_type, exc_val, exc_tb):
self.wrap()
def __init__(self, func=None):
self.func = func
def __call__(self, *args, **kwargs):
""" As decorator
"""
with self:
return self.func(*args, **kwargs)
| {
"repo_name": "hirokiky/wraptools",
"path": "wraptools/pluggable.py",
"copies": "1",
"size": "2439",
"license": "mit",
"hash": 7874435099790849000,
"line_mean": 23.1485148515,
"line_max": 89,
"alpha_frac": 0.561295613,
"autogenerated": false,
"ratio": 4.234375,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5295670613,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from django.core.cache import cache
from django.shortcuts import get_object_or_404
from django.views.generic.simple import direct_to_template
from django.http import HttpResponseRedirect, Http404, HttpResponse
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from models import Password, LdapGroup
from forms import PasswordForm
from utils import check_authorization, get_ldap_groups
def check_ldap_access(view_func):
"""
Decorator for views with signature `view(request, pw_pk, **kwargs)`. Denies the access if the connected user has no rights on the password.
If *pw_pk* is None, then authorization is never denied. If the password doesn't exist, then view is called with `pw_pk=None`.
"""
def _decorated_view (request, pw_pk=None, **kwargs):
if not pw_pk or check_authorization(pw_pk, request.user.username):
return view_func(request, pw_pk, **kwargs)
elif check_authorization(pw_pk, request.user.username) == None:
return view_func(request, None, **kwargs)
else:
raise PermissionDenied(str(get_ldap_groups(request.user.username))+request.user.username)
return wraps(view_func)(_decorated_view)
def first_parameter_to_int(view_func):
"""
Decorator for views with signature `view(request, pw_pk, **kwargs)`. Calls the view with *pw_pk* converted to an int. If *pw_pk* is None, the value stays the same.
Unhandled exception if int(pw_pk) fails - with correct url regex ([0-9]+) that never happens.
"""
def _decorated_view (request, pw_pk=None, **kwargs):
if pw_pk == None:
pass
else:
pw_pk = int(pw_pk)
return view_func(request, pw_pk, **kwargs)
return wraps(view_func)(_decorated_view)
def index(request):
username = request.user.username
user_passwords = filter(lambda p: check_authorization(p.pk, username),
Password.objects.all())
if request.session.get('showOnly', True):
show_only_accessible = True
else:
show_only_accessible = False
baseUrl = '/'
return direct_to_template(request, 'passwords/index.html', {'passwords': Password.objects.all(), 'user_passwords': user_passwords, 'showOnly': show_only_accessible, 'baseUrl': baseUrl})
def new_password(request):
new = True
password = Password()
ldap_groups = get_ldap_groups(request.user.username)
ldap_groups_choices = [(lg, lg) for lg in ldap_groups]
if request.method == 'POST':
form = PasswordForm(request.POST, instance=password,
ldap_groups_choices=ldap_groups_choices)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse("index"))
elif request.method == 'GET':
form = PasswordForm(instance=password,
ldap_groups_choices=ldap_groups_choices)
return direct_to_template(request, 'passwords/edit_password.html', {'form': form, 'ldapGroups': LdapGroup.objects.all(), 'new': new})
@first_parameter_to_int
@check_ldap_access
def edit_password(request, pw_pk=None):
new = False
password = get_object_or_404(Password, pk=pw_pk)
ldap_groups = get_ldap_groups(request.user.username)
ldap_groups_choices = [(lg, lg) for lg in ldap_groups]
if request.method == 'POST':
form = PasswordForm(request.POST, instance=password,
ldap_groups_choices=ldap_groups_choices)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse("index"))
elif request.method == 'GET':
form = PasswordForm(instance=password,
ldap_groups_choices=ldap_groups_choices)
return direct_to_template(request, 'edit_password.html', {'form': form, 'ldapGroups': LdapGroup.objects.all(), 'new': new})
@first_parameter_to_int
@check_ldap_access
def delete_password(request, pw_pk=None):
try:
pw = Password.objects.get(pk=pw_pk)
except Password.DoesNotExist:
pass
else:
pw.delete()
return HttpResponseRedirect(reverse("index"))
@first_parameter_to_int
@check_ldap_access
def get_password(request, pw_pk=None):
try:
pw = Password.objects.get(pk=pw_pk)
except Password.DoesNotExist:
raise Http404
return HttpResponse(pw.password, mimetype="text/plain")
| {
"repo_name": "ojarva/password-safe-django",
"path": "passwordsafe/passwords/views.py",
"copies": "1",
"size": "4370",
"license": "mit",
"hash": 6172048626245807000,
"line_mean": 37.3333333333,
"line_max": 189,
"alpha_frac": 0.6750572082,
"autogenerated": false,
"ratio": 3.754295532646048,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9883069517441552,
"avg_score": 0.009256644680899342,
"num_lines": 114
} |
from functools import wraps
# generic container used to denote zset
class zset():
def __init__(self, primitive):
self.primitive = primitive
def check_field(func):
@wraps(func)
def _wrapper(self_cls, field, *args, **kwargs):
if not field in self_cls.fields:
raise TypeError('invalid field: '+field)
return func(self_cls, field, *args, **kwargs)
return _wrapper
def _set_relation(entity1, field1, entity2):
if type(entity1) is set:
for element in entity1:
entity = element
elif type(entity1) in (list, tuple):
entity = entity1[0]
elif issubclass(entity1, Entity):
entity = entity1
else:
raise TypeError('Unknown entity type')
if (field1 in entity.fields):
if (entity1 != entity2):
raise KeyError('Cannot add relation to existing field')
entity.fields[field1] = entity2
return entity
def relate(entityA, fieldA, entityB, fieldB=None):
""" Relate entityA's fieldA with that of entityB's fieldB. fieldA and
fieldB are new fields to be defined.
Container semantics can be used to denote many to many relationships*.
Example:
# 1 to N relationship between a person and cats
relate(Person,'cats',{Cat},'owner'}
# N to 1 relationship (equivalent to above)
relate({Cat},'owner',Person,'cats')
# N to N relationship
relate({Person},'cats_to_feed',{Cat},'people_who_feed_me')
# this is equivalent to the imaginary function definition
forward_map(Person,'cats_to_feed',{Cat})
inverse_map(Cat,'people_who_feed_me',{Person})
# N to N relationship between self fields
relate({Person},'friends',{Person},'friends')
*Note that not all n-to-n relationships are sensible.
"""
entity1 = _set_relation(entityA, fieldA, entityB)
if fieldB:
entity2 = _set_relation(entityB, fieldB, entityA)
entity1.relations[fieldA] = (entity2, fieldB)
entity2.relations[fieldB] = (entity1, fieldA)
class _entity_metaclass(type):
def __new__(cls, clsname, bases, attrs):
if len(bases) > 0:
mandatory_fields = ('fields', 'relations', 'lookups')
for field in mandatory_fields:
if not field in attrs:
attrs[field] = dict()
return super(_entity_metaclass, cls).__new__(
cls, clsname, bases, attrs)
class Entity(metaclass=_entity_metaclass):
""" An Entity is an entity represented and stored using redis. This class
is meant to be subclassed using the example template given below. Entities
are indexed using an id, similar to the primary key in SQL. These ids are
are contained in a redis SET for book-keeping purposes. There are three
major components to an Entity:
1. fields - which describes basic features of the Entity using primitives
such as str,int,float,bool. They can be bracketed in {},[],() to denote
a redis SET, a redis LIST, and a redis SORTED SET.
2. lookups - which are similar to indices in SQL tables, allowing fast
retrieval of the entity id given a field and the field value. Lookups
are added via the Entity.add_lookup() method, and is injective by
default.
3. relations - which describe relations between different subclasses of
Entity. Relations add additional implicit fields to the Entity that can
be queried.
So when should you use a lookup, and when should you create another entity
and define a relation?
Use lookups when you don't care about being able to list the entire set and
test existence of a value in constant time. The 'age' field should be a
lookup because we almost never need to see if a given age in a set of all
existing ages, in constant time, though we could certainly iterate over all
the person's ages in O(N) time. The lifetime of a lookup field is tied
directly to the lifetime of the underlying object. The life time of lookups
are bound to the lifetime of the entity.
Relations on the otherhand, are used to describe relationships between two
entities. It is similar to how SQL relates between two tables. Even if a
related field is deleted, the entity itself still exists in the set of
managed entities.
N-to-N Relations between different sets are a tricky business. For example,
mappings from sets to sets can make intuitive sense, so does sets to sorted
sets and possibly sets to lists. However, sorted sets to sorted sets are
seemingly nonsensical, as are sorted sets to lists, and lists to lists.
For this reason, sorted sets and lists can only map to either single
objects or sets, but not to other sorted sets or lists.
Example:
class Person(apollo.Entity):
prefix = person
fields = {'age' : int,
'income' : int,
'ssn' : str,
'emails' : {str},
'nicknames' : {str}
}
Person.add_lookup('ssn')
Person.add_lookup('emails')
Person.add_lookup('nicknames',injective=False)
class Cat(apollo.Entity):
prefix = Cat
fields = {'age' : int,
'eye_color' : str,
'favorite_foods' : {str},
'biochip' : int
}
Cat.add_lookup('biochip')
# A Person's set of cats map to a Cat's owner
apollo.relate(Person,'cats',{Cat},'owner')
apollo.relate({Person},'cats_to_feed',{Cat},'caretakers')
# 1 to 1 with no lookup
apollo.relate(Person,'favorite_cat',Cat)
# 1 to N with no lookup
apollo.relate(Person,'favorite_cats',{Cat})
# N to N with no lookup (makes no sense)
# 1 to 1 with lookup
apollo.relate(Person,'cat_buddy',Cat,'person_buddy')
# 1 to N with lookup
apollo.relate(Person,'cats_owned',{Cat},'owner')
# N to N with lookup
apollo.relate({Person},'cats_to_feed',{Cat},'persons_feeding_me')
"""
@classmethod
def members(cls, db):
""" List all entities """
return db.smembers(cls.prefix+'s')
@classmethod
def exists(cls, id, db):
""" Returns true if an entity with id id exists on the db """
return db.sismember(cls.prefix+'s', id)
@classmethod
def create(cls, id, db):
""" Create an object with identifier id on the redis client db """
if isinstance(id, bytes):
raise TypeError('id must be a string')
if cls.exists(id, db):
raise KeyError(id, 'already exists')
db.sadd(cls.prefix + 's', id)
return cls(id, db)
@classmethod
def add_lookup(cls, field, injective=True):
""" Call this method only after all the relevant Entities have been
created since it ensures there are no conflicts. TODO: Entity
metaclass can be modified to make the check so add_lookup can be
placed wherever.
"""
# ensure lookup field is not a prefix for any existing derived Entity
for subclass in Entity.__subclasses__():
if subclass.prefix == field:
raise AttributeError('lookup field cannot be a prefix for \
any existing entity')
cls.lookups[field] = injective
@classmethod
def instance(cls, id, db):
return cls(id, db)
@classmethod
@check_field
def lookup(self, field, value, db):
assert field in self.lookups
# if its injective
if self.lookups[field]:
return db.hget(field+':'+value, self.prefix)
else:
return db.smembers(field+':'+value+':'+self.prefix)
def delete(self):
""" Remove this entity from the db, all associated fields and related
fields will also be cleaned up
"""
for field_name, field_type in self.fields.items():
if type(field_type) is set:
if field_name in self.relations or field_name in self.lookups:
for member in self._db.smembers(self.prefix+':'+self.id
+':'+field_name):
self.srem(field_name, member)
self._db.delete(self.prefix+':'+self.id+':'+field_name)
elif type(field_type) is zset:
self._db.delete(self.prefix+':'+self.id+':'+field_name)
elif issubclass(field_type, Entity):
self.hdel(field_name)
elif field_type in (str, int, bool, float):
self.hdel(field_name)
self._db.delete(self.prefix+':'+self.id)
self._db.srem(self.prefix+'s', self.id)
@property
def id(self):
return self._id
@check_field
def hincrby(self, field, count=1):
""" Increment the field by count, field must be declared int """
if self.fields[field] != int:
raise TypeError('cannot call hincrby on a non-int field')
return self._db.hincrby(self.prefix + ':' + self._id, field, count)
@check_field
def hset(self, field, value):
""" Set a hash field equal to value """
# set local value
assert (self.fields[field] in (str, int, bool, float) or
issubclass(self.fields[field], Entity))
# clean up this field first since it can only be bound to a single
# object hash field (implicitly).
if field in self.relations:
self.hdel(field)
assert isinstance(value, Entity)
other_entity = self.relations[field][0]
other_field_name = self.relations[field][1]
other_field_type = other_entity.fields[other_field_name]
if type(other_field_type) is set:
self._db.sadd(other_entity.prefix+':'+value.id+':'+
other_field_name, self.id)
elif issubclass(other_field_type, Entity):
# raise?
value.hdel(other_field_name)
self._db.hset(other_entity.prefix+':'+value.id,
other_field_name, self.id)
self.hdel(field)
elif field in self.lookups:
self.hdel(field)
if self.lookups[field]:
# see if this field is mapped to something already
reference = self.__class__.lookup(field, value, self._db)
if reference:
self._db.hdel(self.prefix+':'+reference+':'+field, value)
self._db.hset(field+':'+value, self.prefix, self.id)
else:
self._db.sadd(field+':'+value+':'+self.prefix, self.id)
if isinstance(value, Entity):
value = value.id
self._db.hset(self.prefix + ':' + self._id, field, value)
@check_field
def hdel(self, field):
""" Delete a hash field and its related fields and lookups """
assert (self.fields[field] in (str, int, bool, float) or
issubclass(self.fields[field], Entity))
if field in self.relations:
#if issubclass(self.fields[field], Entity):
other_entity = self.relations[field][0]
other_field_name = self.relations[field][1]
other_field_type = other_entity.fields[other_field_name]
other_entity_id = self._db.hget(self.prefix+':'+self.id, field)
if other_entity_id:
if type(other_field_type) is set:
self._db.srem(other_entity.prefix+':'+other_entity_id+':'+
other_field_name, self.id)
elif issubclass(other_field_type, Entity):
self._db.hdel(other_entity.prefix+':'+other_entity_id,
other_field_name)
elif field in self.lookups:
lookup_value = self._db.hget(self.prefix+':'+self.id, field)
if lookup_value:
# if it is injective, implies mapping to a single hash
if self.lookups[field]:
self._db.hdel(field+':'+lookup_value, self.prefix)
# lookup maps to many different values
else:
self._db.srem(field+':'+lookup_value+':'+self.prefix,
self.id)
self._db.hdel(self.prefix+':'+self.id, field)
@check_field
def hget(self, field):
""" Get a hash field """
field_type = self.fields[field]
if (field_type in (str, int, bool, float)):
val = self._db.hget(self.prefix + ':' + self._id, field)
if val:
return field_type(val)
else:
return val
elif issubclass(field_type, Entity):
return self._db.hget(self.prefix+':'+self._id, field)
else:
raise TypeError('Unknown type')
@check_field
def smembers(self, field):
""" Return members of a set """
if type(self.fields[field]) != set:
raise KeyError('called smembers on non-set field')
set_values = set()
for member in self._db.smembers(self.prefix + ':' + self._id + ':' +
field):
for primitive_type in self.fields[field]:
if issubclass(primitive_type, Entity):
set_values.add(member)
elif primitive_type in (str, int, bool, float):
set_values.add(primitive_type(member))
else:
raise TypeError('Unknown field type')
return set_values
@check_field
def sismember(self, field, value):
if isinstance(value, Entity):
value = value.id
return self._db.sismember(self.prefix+':'+self.id+':'+field, value)
@check_field
def scard(self, field):
return self._db.scard(self.prefix+':'+self.id+':'+field)
@check_field
def srandmember(self, field):
return self._db.srandmember(self.prefix+':'+self.id+':'+field)
@check_field
def sremall(self, field):
""" Empty the set """
assert type(self.fields[field]) == set
if field in self.relations or field in self.lookups:
values = list(self.smembers(field))
self.srem(field, *values)
else:
self._db.delete(self.prefix+':'+self._id+':'+field)
@check_field
def srem(self, field, *values):
""" Remove values from the set field """
assert type(self.fields[field]) == set
carbon_copy_values = []
for value in values:
if isinstance(value, Entity):
carbon_copy_values.append(value.id)
else:
carbon_copy_values.append(value)
for value in carbon_copy_values:
if field in self.relations:
if not self.sismember(field, value):
raise ValueError(value+' is not in '+self.id+'\'s '+field)
other_entity = self.relations[field][0]
other_field_name = self.relations[field][1]
other_field_type = other_entity.fields[other_field_name]
if type(other_field_type) is set:
self._db.srem(other_entity.prefix+':'+value+':'+
other_field_name, self.id)
elif issubclass(other_field_type, Entity):
self._db.hdel(other_entity.prefix+':'+value,
other_field_name)
elif field in self.lookups:
if not self.sismember(field, value):
raise ValueError(value+' is not in '+self.id+'\'s '+field)
if self.lookups[field]:
# see if this field mapped to something already
self._db.hdel(field+':'+value, self.prefix, self.id)
else:
self._db.srem(field+':'+value+':'+self.prefix, self.id)
self._db.srem(self.prefix+':'+self._id+':'+field, *carbon_copy_values)
@check_field
def sadd(self, field, *values):
""" Add values to the field. If the field expects Entities, then values
can either be a list of strings, a list of Entities, or a mix of both.
"""
assert type(self.fields[field]) == set
for key in self.fields[field]:
derived_entity = key
carbon_copy_values = []
# convert all values to strings first
for value in values:
if isinstance(value, derived_entity) and isinstance(value, Entity):
carbon_copy_values.append(value.id)
elif type(value) == str:
carbon_copy_values.append(value)
else:
raise TypeError('Bad sadd type')
if field in self.relations:
other_entity = self.relations[field][0]
other_field_name = self.relations[field][1]
other_field_type = other_entity.fields[other_field_name]
for value in carbon_copy_values:
if type(other_field_type) is set:
self._db.sadd(other_entity.prefix+':'+value+':'+
other_field_name, self.id)
elif issubclass(other_field_type, Entity):
other_entity(value, self._db).hdel(other_field_name)
self._db.hset(other_entity.prefix+':'+value,
other_field_name, self.id)
elif field in self.lookups:
for value in carbon_copy_values:
if self.lookups[field]:
# see if this field mapped to something already
reference = self.__class__.lookup(field, value, self._db)
if reference:
self._db.srem(self.prefix+':'+reference+':'+field,
value)
self._db.hset(field+':'+value, self.prefix, self.id)
else:
self._db.sadd(field+':'+value+':'+self.prefix, self.id)
self._db.sadd(self.prefix+':'+self._id+':'+field, *carbon_copy_values)
@check_field
def zscore(self, field, key):
assert type(self.fields[field] == zset)
return self._db.zscore(self.prefix+':'+self.id+':'+field, key)
@check_field
def zrange(self, field, start, stop):
assert type(self.fields[field] == zset)
return self._db.zrange(self.prefix+':'+self.id+':'+field, start, stop)
@check_field
def zremrangebyrank(self, field, start, stop):
assert type(self.fields[field] == zset)
return self._db.zremrangebyrank(self.prefix+':'+self.id+':'+field,
start, stop)
@check_field
def zadd(self, field, *args, **kwargs):
assert type(self.fields[field] == zset)
assert not field in self.lookups
assert not field in self.relations
return self._db.zadd(self.prefix+':'+self.id+':'+field, *args,
**kwargs)
@check_field
def zrem(self, field, *args):
assert type(self.fields[field] == zset)
assert not field in self.lookups
assert not field in self.relations
return self._db.zrem(self.prefix+':'+self.id+':'+field, *args)
def __init__(self, id, db):
assert type(id) in (str, int)
self._db = db
self._id = id
# overhead
if not self.__class__.exists(id, db):
raise KeyError(id, 'has not been created yet')
self.__dict__['_id'] = id
| {
"repo_name": "proteneer/apollo",
"path": "apollo.py",
"copies": "1",
"size": "19600",
"license": "mit",
"hash": -3256364899504813000,
"line_mean": 38.595959596,
"line_max": 79,
"alpha_frac": 0.5711734694,
"autogenerated": false,
"ratio": 3.9918533604887982,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005139803140403868,
"num_lines": 495
} |
from functools import wraps
#########################################################
# _correct_type, _multi_type_fix and _single_type_fix #
# are all meant to be helper functions, they are not #
# meant to be used outside this modole #
#########################################################
# cast an object to a type if the
# object passed in is not of the same
# type as the type passed in. This might result
# in a ValueError or TypeError.
def _correct_type(obj, _type):
if type(obj) != _type:
obj = _type(obj)
return obj
# Pairs each object and type as tuples in a list,
# then loops through this list and yields each object,
# if possible, after we have made sure the object
# has the same type as the type in its tuple.
def _multi_type_fix(obj_seq, type_seq):
zipped = zip(obj_seq, type_seq)
for obj, _type in zipped:
yield _correct_type(obj, _type)
# Loops true a sequence of objects and yields
# each object, if possible, after we have made sure
# the object has the same type as the type passed in
# as an argument.
def _single_type_fix(obj_seq, _type):
for obj in obj_seq:
yield _correct_type(obj, _type)
def type_corrector(*types):
"""A decorator that casts the parameters of a function to the types
used as arguments with this decorator"""
def wrapper(func):
@wraps(func)
def _wrapper(*args, **kwargs):
_args = _multi_type_fix(args, types) if len(types) > 1 \
else _single_type_fix(args, types[0])
kwargs_values = _multi_type_fix(kwargs.values(), types) if len(types) > 1 \
else _single_type_fix(kwargs.values(), types[0])
zipped = zip(kwargs.keys(), kwargs_values)
for key, value in zipped:
kwargs[key] = value
return func(*_args, **kwargs)
return _wrapper
return wrapper
| {
"repo_name": "fredgj/typecorrector",
"path": "typecorrector/corrector.py",
"copies": "1",
"size": "1956",
"license": "mit",
"hash": -6063045079613811000,
"line_mean": 32.724137931,
"line_max": 87,
"alpha_frac": 0.5823108384,
"autogenerated": false,
"ratio": 3.8579881656804735,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9837917875126234,
"avg_score": 0.02047622579084789,
"num_lines": 58
} |
from functools import wraps
##################################################################
# The Monad meta design pattern.
class Monad(object):
def bind(self, method, args, kwargs):
return method(self, *args, **kwargs)
def bound(method):
@wraps(method)
def bound_method(self, *args, **kwargs):
result = self.bind(method, args, kwargs)
assert(isinstance(result, Monad))
return result
return bound_method
##################################################################
# The MaybeMonad design pattern,
# an instance of the Monad meta design pattern.
class MaybeMonad(Monad):
is_nothing = False
def bind(self, method, args, kwargs):
if self.is_nothing:
return self
else:
return method(self, *args, **kwargs)
##################################################################
# The MathOp class,
# an instance of the MaybeMonad design pattern,
# in turn an instance of the Monad meta design pattern.
class MathOp(MaybeMonad):
is_nothing = False
def __init__(self, value):
self.value = value
def __repr__(self):
return "<MathOp {}>".format(self.value)
@bound
def div(self, denum):
if denum == 0:
return MathOpNaN()
else:
return MathOp(self.value / denum)
@bound
def mul(self, multiplicand):
return MathOp(self.value * multiplicand)
@bound
def add(self, addend):
return MathOp(self.value + addend)
@bound
def sub(self, subtrahend):
return MathOp(self.value - subtrahend)
class MathOpNaN(MathOp):
is_nothing = True
def __init__(self):
super(MathOpNaN, self).__init__(None)
def __repr__(self):
return "<MathOp NaN>"
| {
"repo_name": "jorgenschaefer/monads-for-normal-programmers",
"path": "monads/mathop/step4.py",
"copies": "1",
"size": "1786",
"license": "bsd-2-clause",
"hash": 236182415988419420,
"line_mean": 22.5,
"line_max": 66,
"alpha_frac": 0.5414333707,
"autogenerated": false,
"ratio": 4.0225225225225225,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5063955893222523,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
"""
The following example creates a Chart.js chart with id of 'mychart' width and height of 400px.
Two datasets are created for '# of apples' and '# of bananas'. Both are 'bar' charts.
At '17:51' the value for the '# of apples' are 12, and at '17:54' it is 5.
At '17:51' the value for the '# of bananas' are 0, and at '17:54' it is 11.
canvas = Canvas('mychart', 400, 400)
chart_builder = ChartBuilder('bar')
chart = Chart(canvas, 'ctx', chart_builder)
chart_builder.create_dataset('# of apples')
chart_builder.create_dataset('# of bananas')
chart_builder.add_data('17:51', 12)
chart_builder.add_data('17:54', [5, 11])
"""
def encode_unicode(f):
@wraps(f)
def wrapper(*args, **kwargs):
unicode_ = f(*args, **kwargs)
if isinstance(unicode_, dict):
encoded = {}
for k, v in unicode_.iteritems():
try:
encoded[k] = v.encode('utf-8')
except AttributeError:
encoded[k] = v
elif isinstance(unicode_, list):
encoded = []
for v in unicode_:
try:
encoded.append(v.encode('utf-8'))
except AttributeError:
encoded.append(v)
elif isinstance(unicode_, str):
encoded = unicode_.encode('utf-8')
elif isinstance(unicode_, (int, long, float)):
encoded = float(unicode_)
else:
encoded = None
return encoded
return wrapper
chart_types = [
'line',
'bar',
'radar',
'doughnut',
'pie',
'polarArea',
'bubble',
'scatter'
]
class InvalidTypeError(Exception):
def __init__(self, type, valid_types):
self.type = type
self.valid_types = valid_types
def __str__(self):
return '{0} is not a valid type. Valid types are: {1}'\
.format(self.type, ', '.join(self.valid_types))
class Colors(object):
_colors = [
(108, 101, 215),
(212, 210, 9),
(11, 138, 73),
(187, 63, 24),
(135, 165, 162)
]
_coeffs = [7, 11, 19, 15, 6]
_alpha = 0.2
_ctr = 0
_last_color = (0, 0, 0)
@classmethod
def alpha(cls, alpha):
cls._alpha = alpha
@classmethod
def next(cls):
_base_color = cls._colors[cls._ctr]
_coeff = tuple(cls._coeffs[(cls._ctr + i) % 5] for i in xrange(3))
_color = tuple(((_base_color[i] + cls._last_color[i]) * _coeff[i]) % 256 for i in xrange(3))
cls._last_color = _color
cls._ctr += 1
if cls._ctr == 4:
cls._ctr = 0
return _color + (cls._alpha,)
class Canvas(object):
def __init__(self, id, width, height):
self.id = id
self.width = width
self.height = height
@encode_unicode
def to_dict(self):
return {
'id': self.id,
'width': self.width,
'height': self.height
}
class Dataset(object):
def __init__(self, label):
self.label = label
self.data = []
self.background_color = []
def add_data(self, dat):
self.data.append(dat)
self.background_color.append(Colors.next())
@encode_unicode
def to_dict(self):
return {
'label': self.label,
'data': [d for d in self.data],
'backgroundColor': [bgc for bgc in self.background_color]
}
class Options(object):
def __init__(self, min_value, max_value):
self.min_value = min_value
self.max_value = max_value
from fractions import gcd
self.step = gcd(min_value, max_value)
@encode_unicode
def to_dict(self):
return {
'scales': {
'yAxes': [{
'ticks': {
'min': self.min_value,
'max': self.max_value,
'stepSize': self.step
}
}]
}
}
class ChartBuilder(object):
def __init__(self, type, options=None):
if type not in chart_types:
raise InvalidTypeError(type, chart_types)
if options is not None and not isinstance(options, Options):
raise TypeError('options must be an instance of {0}'.format(Options))
self.type = type
self.labels = []
self.datasets = []
self.options = options
def create_dataset(self, label):
self.datasets.append(Dataset(label))
def add_data(self, label, datas):
self.labels.append(label)
if not isinstance(datas, list):
datas = [datas]
for i in xrange(len(self.datasets)):
curr_data = datas[i] if i < len(datas) else 0
self.datasets[i].add_data(curr_data)
@encode_unicode
def to_dict(self):
return {
'type': self.type,
'data': {
'labels': [label for label in self.labels],
'datasets': [dataset.to_dict() for dataset in self.datasets]
},
'options': {} if self.options is None else self.options.to_dict()
}
class Chart(object):
def __init__(self, canvas, context, chart_builder):
self.canvas = canvas
self.context = context
self.chart_builder = chart_builder
@encode_unicode
def to_dict(self):
return {
'canvas': self.canvas.to_dict(),
'context': self.context,
'chart': self.chart_builder.to_dict()
}
| {
"repo_name": "danielrenes/data-visualization",
"path": "data_visualization/chartjs.py",
"copies": "1",
"size": "5562",
"license": "mit",
"hash": -3323838769291802600,
"line_mean": 27.6701030928,
"line_max": 100,
"alpha_frac": 0.5251708019,
"autogenerated": false,
"ratio": 3.7581081081081082,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9770818156629022,
"avg_score": 0.0024921506758173247,
"num_lines": 194
} |
from functools import wraps
__version__ = "0.1"
class Multimple(object):
_IMPL_ATTR_NAME = "_multimple_current"
def __init__(self, func, default):
super(Multimple, self).__init__()
self._impls = {
default: func
}
self._default = default
self._name = func.__name__
def __get__(self, owner, klass=None):
current_impl = getattr(owner, self._IMPL_ATTR_NAME, None)
impl = None
if not current_impl:
impl = self._impls.get(self._default)
else:
impl = self._impls.get(current_impl)
if not impl:
raise NotImplementedError(
"'{}' is not implemented for '{}'".format(
self._name,
current_impl
)
)
return impl.__get__(owner, klass)
@classmethod
def decorator(cls, arg):
if isinstance(arg, type):
return cls.class_decorator(arg)
else:
return cls.func_decorator(arg)
@classmethod
def func_decorator(cls, impl_name):
def wrapper(func):
mio = cls(func, default=impl_name)
return mio
return wrapper
@classmethod
def class_decorator(cls, klass):
@classmethod
def get_implementation(target, impl_name):
@wraps(target.__init__)
def init(*args, **kwargs):
obj = target(*args, **kwargs)
setattr(obj, cls._IMPL_ATTR_NAME, impl_name)
return obj
return init
setattr(klass, 'multimple', get_implementation)
return klass
def multimple(self, impl_name):
def wrapper(func):
self._impls[impl_name] = func
return self
return wrapper
multimple = Multimple.decorator # pylint: disable=invalid-name
| {
"repo_name": "n9code/multimple",
"path": "multimple/__init__.py",
"copies": "1",
"size": "1886",
"license": "mit",
"hash": 5643768464516046000,
"line_mean": 22.575,
"line_max": 65,
"alpha_frac": 0.5281018028,
"autogenerated": false,
"ratio": 4.325688073394495,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 80
} |
from functools import wraps
__version__ = "0.1"
def get_version():
return __version__
def next_version():
_v = __version__.split('.')
_v[-1] = str(int(_v[-1]) + 1)
return '.'.join(_v)
def validate_request(validator):
def _json_selector(obj, current_selector):
json_dict = obj.get_json(force=True, silent=True) or {}
return [(current_selector + ['json'], json_dict)]
def decorator(func):
from flask import current_app, request, g
@wraps(func)
def inner_func(*args, **kwargs):
g.request_validation_result = \
current_app.extensions['gladiator'].validate(
validator, request, ctx={
'custom_selectors': {
'json!': _json_selector
}
})
if g.request_validation_result.success:
return func(*args, **kwargs)
else:
return g.request_validation_result.errors
return inner_func
return decorator
class Gladiator(object):
"""This class is used to control the Gladiator integration to one
or more Flask applications."""
def __init__(self, app=None, default_validation_ctx=None):
self.default_validation_ctx = default_validation_ctx
if app is not None:
self.app = app
self.init_app(app)
else:
self.app = None
def _default_ctx(self, ctx=None):
ret = {}
if self.app.config.get('GLADIATOR_VALIDATION_CTX', None):
ret.update(self.app.config.get('GLADIATOR_VALIDATION_CTX'))
if self.default_validation_ctx is not None:
ret.update(self.default_validation_ctx)
if ctx is not None:
ret.update(ctx)
return ret
def init_app(self, app):
app.config.setdefault('GLADIATOR_VALIDATION_CTX', None)
if not hasattr(app, 'extensions'):
app.extensions = {}
app.extensions['gladiator'] = self
def validate(self, validator, obj, selector=None, ctx=None, **kw):
from gladiator.core import validate as _validate
_ctx = self._default_ctx(ctx)
result = _validate(validator, obj, selector, ctx=_ctx, **kw)
return result
| {
"repo_name": "laco/flask-gladiator",
"path": "flask_gladiator/__init__.py",
"copies": "1",
"size": "2302",
"license": "bsd-3-clause",
"hash": -391807248353248830,
"line_mean": 29.2894736842,
"line_max": 71,
"alpha_frac": 0.5638575152,
"autogenerated": false,
"ratio": 3.9689655172413794,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5032823032441379,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
RED = '\033[91m'
BLUE = '\033[94m'
BOLD = '\033[1m'
END = '\033[0m'
def _default_handler(e, *args, **kwargs):
pass
def silence(target_exceptions:list, exception_handler=_default_handler):
def decor(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
if isinstance(target_exceptions, list):
for each in target_exceptions:
if isinstance(e, each):
return exception_handler(e, *args, **kwargs)
else:
if isinstance(e, target_exceptions):
return exception_handler(e, *args, **kwargs)
raise e
return wrapper
return decor
def silence_coroutine(target_exceptions:list, exception_handler=_default_handler):
def decor(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return (yield from func(*args, **kwargs))
except Exception as e:
if isinstance(target_exceptions, list):
for each in target_exceptions:
if isinstance(e, each):
return exception_handler(e, *args, **kwargs)
else:
if isinstance(e, target_exceptions):
return exception_handler(e, *args, **kwargs)
raise e
return wrapper
return decor
def log(fn):
"""
logs parameters and result - takes no arguments
"""
def func(*args, **kwargs):
arg_string = ""
for i in range(0, len(args)):
var_name = fn.__code__.co_varnames[i]
if var_name != "self":
arg_string += var_name + ":" + str(args[i]) + ","
arg_string = arg_string[0:len(arg_string) - 1]
string = (RED + BOLD + '>> ' + END + 'Calling {0}({1})'.format(fn.__code__.co_name, arg_string))
if len(kwargs):
string = (RED + BOLD + '>> ' + END + 'Calling {0} with args {1} and kwargs {2}'.format(fn.__code__.co_name,
arg_string, kwargs))
print(string)
result = fn(*args, **kwargs)
string = BLUE + BOLD + '<< ' + END + 'Return {0} with result :{1}'.format(fn.__code__.co_name, result)
print(string)
return result
return func
def logx(supress_args=[], supress_all_args=False, supress_result=False, receiver=None):
"""
logs parameters and result
takes arguments
supress_args - list of parameter names to supress
supress_all_args - boolean to supress all arguments
supress_result - boolean to supress result
receiver - custom logging function which takes a string as input; defaults to logging on stdout
"""
def decorator(fn):
def func(*args, **kwargs):
if not supress_all_args:
arg_string = ""
for i in range(0, len(args)):
var_name = fn.__code__.co_varnames[i]
if var_name != "self" and var_name not in supress_args:
arg_string += var_name + ":" + str(args[i]) + ","
arg_string = arg_string[0:len(arg_string) - 1]
string = (RED + BOLD + '>> ' + END + 'Calling {0}({1})'.format(fn.__code__.co_name, arg_string))
if len(kwargs):
string = (
RED + BOLD + '>> ' + END + 'Calling {0} with args {1} and kwargs {2}'.format(
fn.__code__.co_name,
arg_string, kwargs))
if receiver:
receiver(string)
else:
print(string)
result = fn(*args, **kwargs)
if not supress_result:
string = BLUE + BOLD + '<< ' + END + 'Return {0} with result :{1}'.format(fn.__code__.co_name, result)
if receiver:
receiver(string)
else:
print(string)
return result
return func
return decorator
def value_check(arg_name, pos, allowed_values):
"""
allows value checking at runtime for args or kwargs
"""
def decorator(fn):
# brevity compromised in favour of readability
def logic(*args, **kwargs):
arg_count = len(args)
if arg_count:
if pos < arg_count:
if args[pos] in allowed_values:
return fn(*args, **kwargs)
else:
raise ValueError(
"'{0}' at position {1} not in allowed values {2}".format(args[pos], pos, allowed_values))
else:
if arg_name in kwargs:
value = kwargs[arg_name]
if value in allowed_values:
return fn(*args, **kwargs)
else:
raise ValueError("'{0}' is not an allowed kwarg".format(arg_name))
else:
# partially applied functions because of incomplete args, let python handle this
return fn(*args, **kwargs)
else:
if arg_name in kwargs:
value = kwargs[arg_name]
if value in allowed_values:
return fn(*args, **kwargs)
else:
raise ValueError("'{0}' is not an allowed kwarg".format(arg_name))
return logic
return decorator
def type_check(arg_name, pos, reqd_type):
"""
allows type checking at runtime for args or kwargs
"""
def decorator(fn):
# brevity compromised in favour of readability
def logic(*args, **kwargs):
arg_count = len(args)
if arg_count:
if pos < arg_count:
if isinstance(args[pos], reqd_type):
return fn(*args, **kwargs)
else:
raise TypeError("'{0}' at position {1} not of type {2}".format(args[pos], pos, reqd_type))
else:
if arg_name in kwargs:
value = kwargs[arg_name]
if isinstance(value, reqd_type):
return fn(*args, **kwargs)
else:
raise TypeError("'{0}' is not of type {1}".format(arg_name, reqd_type))
else:
# partially applied functions because of incomplete args, let python handle this
return fn(*args, **kwargs)
else:
if arg_name in kwargs:
value = kwargs[arg_name]
if isinstance(value, reqd_type):
return fn(*args, **kwargs)
else:
raise TypeError("'{0}' is not of type {1}".format(arg_name, reqd_type))
return logic
return decorator | {
"repo_name": "kashifrazzaqui/again",
"path": "again/decorate.py",
"copies": "1",
"size": "7286",
"license": "mit",
"hash": -8577842810936702000,
"line_mean": 35.435,
"line_max": 119,
"alpha_frac": 0.4733735932,
"autogenerated": false,
"ratio": 4.545227698066126,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5518601291266128,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
try:
import simplejson as json
except ImportError:
import json
from tornado.web import HTTPError, RequestHandler
from pycloudia.uitls.decorators import generate_list, generate_dict
from pycloudia.uitls.defer import maybe_deferred, return_value, inline_callbacks
def http_request_handler(cls):
@wraps(cls)
class RequestHandlerDecorator(RequestHandler):
subject = None
def prepare(self):
self.subject = cls()
def get(self, *args, **kwargs):
deferred = maybe_deferred(self.subject.get(*args, **kwargs))
deferred.addCallbacks(self._send_success, self._send_failure)
def _send_success(self, response):
self.finish(json.dumps({
'data': response,
'code': 0,
'message': None,
}))
def _send_failure(self, exception):
self.finish(json.dumps({
'data': None,
'code': self._get_failure_code(exception),
'message': str(exception),
}))
@staticmethod
def _get_failure_code(exception):
if isinstance(exception, HTTPError):
return exception.status_code
return getattr(exception, 'code', 500)
return RequestHandlerDecorator
def http_error(exception_cls, code):
def http_error_call(func):
@wraps(func)
def http_error_decorator(*args, **kwargs):
try:
return func(*args, **kwargs)
except exception_cls as e:
raise HTTPError(code, e)
return http_error_decorator
return http_error_call
def http_jsonify(encode_func):
def http_jsonify_call(func):
@wraps(func)
@inline_callbacks
def http_jsonify_decorator(*args, **kwargs):
obj = yield maybe_deferred(func(*args, **kwargs))
return_value(encode_func(obj))
return http_jsonify_decorator
return http_jsonify_call
def http_jsonify_list(encode_func):
@generate_list
def encode_list(obj_list):
for obj in obj_list:
yield encode_func(obj)
def http_jsonify_list_call(func):
@wraps(func)
@inline_callbacks
def http_jsonify_list_decorator(*args, **kwargs):
obj_list = yield maybe_deferred(func(*args, **kwargs))
return_value(encode_list(obj_list))
return http_jsonify_list_decorator
return http_jsonify_list_call
def http_jsonify_dict(encode_func):
@generate_dict
def encode_dict(obj_dict):
for key, obj in obj_dict.iteritems():
yield key, encode_func(obj)
def http_jsonify_dict_call(func):
@wraps(func)
@inline_callbacks
def http_jsonify_dict_decorator(*args, **kwargs):
obj_dict = yield maybe_deferred(func(*args, **kwargs))
return_value(encode_dict(obj_dict))
return http_jsonify_dict_decorator
return http_jsonify_dict_call
| {
"repo_name": "cordis/pycloudia-chat",
"path": "pyligaforex/rest/decorators.py",
"copies": "1",
"size": "3030",
"license": "mit",
"hash": -6624981193899774000,
"line_mean": 28.1346153846,
"line_max": 80,
"alpha_frac": 0.601650165,
"autogenerated": false,
"ratio": 4.04,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0020417853751187086,
"num_lines": 104
} |
from functools import wraps
try: # Python 2.*
from django.utils.encoding import force_unicode
except ImportError: # Python 3.*
from django.utils.encoding import force_text
force_unicode = force_text
try: # Django >= 1.4
from django.utils import timezone
except ImportError: # Django < 1.4
from datetime import datetime
timezone = datetime
from importlib import import_module
from .conf import settings
def add_prefix(key):
if settings.SESSION_REDIS_PREFIX:
if not force_unicode(key).startswith(
'%s:' % settings.SESSION_REDIS_PREFIX
):
return '%s:%s' % (
settings.SESSION_REDIS_PREFIX,
key
)
return key
def remove_prefix(key):
if settings.SESSION_REDIS_PREFIX:
key = str(key).replace(
'%s:' % settings.SESSION_REDIS_PREFIX, '', 1
)
return key
def prefix(fn):
@wraps(fn)
def wrapped(*args, **kwargs):
args = list(args)
args[0] = add_prefix(args[0])
return fn(*args, **kwargs)
return wrapped
def import_by_path(dotted_path):
try:
module_path, class_name = dotted_path.rsplit('.', 1)
module = import_module(module_path)
attr = getattr(module, class_name)
except (ValueError, ImportError, AttributeError):
raise ImportError('can not import %s' % dotted_path)
return attr
| {
"repo_name": "ProDG/django-redis-sessions-fork",
"path": "redis_sessions_fork/utils.py",
"copies": "1",
"size": "1418",
"license": "bsd-3-clause",
"hash": -2217537386608763400,
"line_mean": 23.0338983051,
"line_max": 60,
"alpha_frac": 0.6142454161,
"autogenerated": false,
"ratio": 3.90633608815427,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.502058150425427,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
unique_options = dict([
('ledger', ['ledger_hash', 'ledger_index']),
])
class RippleRPCError(Exception):
""""
An error in an RPC response.
"""
def __init__(self, name, code, message):
self.name = name
self.code = code
self.message = message
def __unicode__(self):
msg = '{name}, {code}: {message}'.format(
name=self.name,
code=self.code,
message=self.message,
)
return msg
def __str__(self):
return unicode(self)
def check_result(fn):
""""
Return the result in the response. If the response contains an
error then raise an exception of type RippleRPCError containing
the error information.
"""
@wraps(fn)
def wrapper(res):
result = res.pop('result', None)
if not result:
raise KeyError(
'The response did not return a "result" field'
)
status = result.pop('status', None)
if not status:
raise KeyError(
'The response did not return a "result.status" field'
)
if status == 'error':
raise RippleRPCError(
name=result['error'],
code=result['error_code'],
message=result['error_message']
)
return fn(result)
return wrapper
def check_options(*options):
"""
Raise if the request contains more than one option in an option
set. For example, a request should only contain one of ledger_hash
or ledger_index.
"""
def wrapper(fn):
@wraps(fn)
def wrapped(*args, **kwargs):
if not options:
raise ValueError(
'At least one option set is needed: '
'{need}'.format(
need=', '.join(unique_options.keys())
)
)
check = [
v for (k, v) in unique_options.items()
if k in options
]
if len(options) != len(check):
diff = set(options) - set(unique_options.keys())
raise ValueError(
'Invalid option set: {options}'.format(
options=', '.join(diff)
)
)
for unique in check:
found = [
k for k in kwargs.keys() if k in unique
]
if not found:
raise ValueError(
'At least one option is needed: {need}'.format(
need=', '.join(unique)
)
)
if len(found) > 1:
raise ValueError(
'Only one option can be specified: '
'{need}'.format(
need=', '.join(unique),
)
)
return fn(*args, **kwargs)
return wrapped
return wrapper
| {
"repo_name": "thelinuxkid/ripple",
"path": "ripple/jsonrpc.py",
"copies": "1",
"size": "3095",
"license": "mit",
"hash": -3510120293464666000,
"line_mean": 29.0485436893,
"line_max": 71,
"alpha_frac": 0.4584814216,
"autogenerated": false,
"ratio": 4.881703470031546,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5840184891631546,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
"""
wraps helps to remember information about wrapped function,
for example attributes like __name__, __doc__
details: http://docs.python.org/2/library/functools.html#functools.wraps
"""
def instance_cache(func):
"""
Stores returned value as instance attr.
Currently function arguments are not respected, i.e. result of first
call will always be returned.
Usage example:
from relish.decorators import instance_cache
class MyModel(models.Model):
name = models.CharField(max_length=20)
@instance_cache
def first_another(self):
# heavy calculation goes here
# or hitting the database
return AnotherModel.objects.all()[0]
my = MyModel.objects.get(id=1)
my.first_another() # hits database and saves as instance attr
my.first_another() # got from instance saved attr
my.first_another() # got from instance saved attr
Can be used with @property decorator:
class MyModel(models.Model):
# ...
@property
@instance_cache
def first_another(self):
# ...
my = MyModel.objects.get(id=1)
print my.first_another
"""
@wraps(func)
def wrapped(instance, *args, **kwargs):
attr_name = "_{0}".format(func.__name__)
if hasattr(instance, attr_name):
return getattr(instance, attr_name)
else:
value = func(instance, *args, **kwargs)
setattr(instance, attr_name, value)
return value
return wrapped
def self_if_blank_arg(func):
"""
If all of function args and kwargs are False in python way, then
return self. Otherwise, return applied function. Useful for querysets.
Example:
from relish.decorators import self_if_blank_arg
class MyModel(QuerySet):
# to apply this queryset to model use PassThroughManager
# from django-model-utils
# or see https://code.djangoproject.com/ticket/15062 for
# solution without dependencies
@self_if_blank_arg
def in_country(self, country):
return self.filter(country=country)
# `country` variable contains country or None
# If None, country filter will NOT be applied
MyModel.objects.in_country(country)
"""
@wraps(func)
def wrapped(instance, *args, **kwargs):
if any(args) or any(kwargs.values()):
return func(instance, *args, **kwargs)
else:
return instance
return wrapped
| {
"repo_name": "st4lk/django-relish",
"path": "relish/decorators/generic.py",
"copies": "1",
"size": "2665",
"license": "bsd-3-clause",
"hash": 5485489625308807000,
"line_mean": 27.9673913043,
"line_max": 74,
"alpha_frac": 0.6037523452,
"autogenerated": false,
"ratio": 4.449081803005009,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00031055900621118014,
"num_lines": 92
} |
from functools import wraps
try:
from decorator.src.decorator import decorator
except:
from decorator import decorator
class NotFoundException(Exception):
def __init__(self, response):
try:
self.data = response.json()
if 'errors' in self.data:
msg = self.data['errors'][0]['message']
else:
msg = str(self.data)
except ValueError:
msg = "Not found: " + response.url
super(NotFoundException, self).__init__(msg)
class GenericException(Exception):
def __init__(self, response):
try:
self.data = response.json()
msg = "%d: %s" % (response.status_code, self.data)
except ValueError:
msg = "Unknown error: %d" % response.status_code
super(GenericException, self).__init__(msg)
class AuthenticationException(Exception):
def __init__(self, response):
try:
msg = "%d: Invalid User / Password" % response.status_code
except ValueError:
msg = "Invalid Authentication"
super(AuthenticationException,self).__init__(msg)
def maybe_throw(response):
if not response.ok:
if response.status_code == 404:
raise NotFoundException(response)
elif response.status_code == 401:
raise AuthenticationException(response)
else:
e = GenericException(response)
try:
e.data = response.json()
except ValueError:
e.content = response.content
raise e
@decorator
def ok_or_error(fn, *args, **kw):
response = fn(*args, **kw)
maybe_throw(response)
return response.ok
@decorator
def response_or_error(fn, *args, **kw):
response = fn(*args, **kw)
maybe_throw(response)
return response.json()
| {
"repo_name": "robinson96/GRAPE",
"path": "stashy/stashy/errors.py",
"copies": "1",
"size": "1853",
"license": "bsd-3-clause",
"hash": 3291324731120601000,
"line_mean": 26.6567164179,
"line_max": 70,
"alpha_frac": 0.58283864,
"autogenerated": false,
"ratio": 4.299303944315545,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5382142584315545,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
try:
from io import BytesIO
except ImportError: # pragma: no cover
from cStringIO import StringIO as BytesIO
from flask import Blueprint, abort, g, request
from werkzeug.exceptions import InternalServerError
from celery import states
from . import celery
from .utils import url_for
text_types = (str, bytes)
try:
text_types += (unicode,)
except NameError:
# no unicode on Python 3
pass
tasks_bp = Blueprint('tasks', __name__)
@celery.task
def run_flask_request(environ):
from .wsgi_aux import app
if '_wsgi.input' in environ:
environ['wsgi.input'] = BytesIO(environ['_wsgi.input'])
# Create a request context similar to that of the original request
# so that the task can have access to flask.g, flask.request, etc.
with app.request_context(environ):
# Record the fact that we are running in the Celery worker now
g.in_celery = True
# Run the route function and record the response
try:
rv = app.full_dispatch_request()
except:
# If we are in debug mode we want to see the exception
# Else, return a 500 error
if app.debug:
raise
rv = app.make_response(InternalServerError())
return (rv.get_data(), rv.status_code, rv.headers)
def async(f):
"""
This decorator transforms a sync route to asynchronous by running it
in a background thread.
"""
@wraps(f)
def wrapped(*args, **kwargs):
# If we are already running the request on the celery side, then we
# just call the wrapped function to allow the request to execute.
if getattr(g, 'in_celery', False):
return f(*args, **kwargs)
# If we are on the Flask side, we need to launch the Celery task,
# passing the request environment, which will be used to reconstruct
# the request object. The request body has to be handled as a special
# case, since WSGI requires it to be provided as a file-like object.
environ = {k: v for k, v in request.environ.items()
if isinstance(v, text_types)}
if 'wsgi.input' in request.environ:
environ['_wsgi.input'] = request.get_data()
t = run_flask_request.apply_async(args=(environ,))
# Return a 202 response, with a link that the client can use to
# obtain task status that is based on the Celery task id.
if t.state == states.PENDING or t.state == states.RECEIVED or \
t.state == states.STARTED:
return '', 202, {'Location': url_for('tasks.get_status', id=t.id)}
# If the task already finished, return its return value as response.
# This would be the case when CELERY_ALWAYS_EAGER is set to True.
return t.info
return wrapped
@tasks_bp.route('/status/<id>', methods=['GET'])
def get_status(id):
"""
Return status about an asynchronous task. If this request returns a 202
status code, it means that task hasn't finished yet. Else, the response
from the task is returned.
"""
task = run_flask_request.AsyncResult(id)
if task.state == states.PENDING:
abort(404)
if task.state == states.RECEIVED or task.state == states.STARTED:
return '', 202, {'Location': url_for('tasks.get_status', id=id)}
return task.info
| {
"repo_name": "send2zhao/boilerplate",
"path": "flack/tasks.py",
"copies": "1",
"size": "3386",
"license": "mit",
"hash": 3448926707813755000,
"line_mean": 34.6421052632,
"line_max": 78,
"alpha_frac": 0.6408741878,
"autogenerated": false,
"ratio": 4.0893719806763285,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5230246168476328,
"avg_score": null,
"num_lines": null
} |
from functools import wraps, partial
from again.utils import unique_hex
from ..utils.stats import Stats, Aggregator
from ..exceptions import VykedServiceException
from ..utils.common_utils import json_file_to_dict, valid_timeout
import asyncio
import logging
import socket
import setproctitle
import time
import traceback
import json
from ..config import CONFIG
_tcp_timeout = CONFIG.TCP_TIMEOUT
def publish(func=None, blocking=False):
"""
publish the return value of this function as a message from this endpoint
"""
if func is None:
return partial(publish, blocking=blocking)
@wraps(func)
def wrapper(self, *args, **kwargs): # outgoing
payload = func(self, *args, **kwargs)
payload.pop('self', None)
self._publish(func.__name__, payload, blocking=blocking)
return None
wrapper.is_publish = True
return wrapper
def subscribe(func):
"""
use to listen for publications from a specific endpoint of a service,
this method receives a publication from a remote service
"""
wrapper = _get_subscribe_decorator(func)
wrapper.is_subscribe = True
return wrapper
def xsubscribe(func=None, strategy='DESIGNATION', blocking=False):
"""
Used to listen for publications from a specific endpoint of a service. If multiple instances
subscribe to an endpoint, only one of them receives the event. And the publish event is retried till
an acknowledgment is received from the other end.
:param func: the function to decorate with. The name of the function is the event subscribers will subscribe to.
:param strategy: The strategy of delivery. Can be 'RANDOM' or 'LEADER'. If 'RANDOM', then the event will be randomly
passed to any one of the interested parties. If 'LEADER' then it is passed to the first instance alive
which registered for that endpoint.
"""
if func is None:
return partial(xsubscribe, strategy=strategy, blocking=blocking)
else:
wrapper = _get_subscribe_decorator(func)
wrapper.is_xsubscribe = True
wrapper.strategy = strategy
wrapper.blocking = blocking
return wrapper
def _get_subscribe_decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
coroutine_func = func
if not asyncio.iscoroutine(func):
coroutine_func = asyncio.coroutine(func)
return (yield from coroutine_func(*args, **kwargs))
return wrapper
def request(func):
"""
use to request an api call from a specific endpoint
"""
@wraps(func)
def wrapper(self, *args, **kwargs):
params = func(self, *args, **kwargs)
self = params.pop('self', None)
entity = params.pop('entity', None)
app_name = params.pop('app_name', None)
request_id = unique_hex()
params['request_id'] = request_id
future = self._send_request(app_name, endpoint=func.__name__, entity=entity, params=params)
return future
wrapper.is_request = True
return wrapper
def api(func=None, timeout=None): # incoming
"""
provide a request/response api
receives any requests here and return value is the response
all functions must have the following signature
- request_id
- entity (partition/routing key)
followed by kwargs
"""
if func is None:
return partial(api, timeout=timeout)
else:
wrapper = _get_api_decorator(func=func, timeout=timeout)
return wrapper
def deprecated(func=None, replacement_api=None):
if func is None:
return partial(deprecated, replacement_api=replacement_api)
else:
wrapper = _get_api_decorator(func=func, old_api=func.__name__, replacement_api=replacement_api)
return wrapper
def _get_api_decorator(func=None, old_api=None, replacement_api=None, timeout=None):
@asyncio.coroutine
@wraps(func)
def wrapper(*args, **kwargs):
_logger = logging.getLogger(__name__)
start_time = int(time.time() * 1000)
start_process_time = int(time.process_time() * 1000)
self = args[0]
rid = kwargs.pop('request_id')
entity = kwargs.pop('entity')
from_id = kwargs.pop('from_id')
wrapped_func = func
result = None
error = None
failed = False
api_timeout = _tcp_timeout
status = 'succesful'
success = True
if not asyncio.iscoroutine(func):
wrapped_func = asyncio.coroutine(func)
if valid_timeout(timeout):
api_timeout = timeout
Stats.tcp_stats['total_requests'] += 1
try:
result = yield from asyncio.wait_for(asyncio.shield(wrapped_func(self, **kwargs)), api_timeout)
except asyncio.TimeoutError as e:
Stats.tcp_stats['timedout'] += 1
error = str(e)
status = 'timeout'
success = False
failed = True
logging.exception("TCP request had a timeout for method %s", func.__name__)
except VykedServiceException as e:
Stats.tcp_stats['total_responses'] += 1
error = str(e)
status = 'handled_error'
_logger.info('Handled exception %s for method %s ', e.__class__.__name__, func.__name__)
except Exception as e:
Stats.tcp_stats['total_errors'] += 1
error = str(e)
status = 'unhandled_error'
success = False
failed = True
_logger.exception('Unhandled exception %s for method %s ', e.__class__.__name__, func.__name__)
_stats_logger = logging.getLogger('stats')
_method_param = json.dumps(kwargs)
d = {"exception_type": e.__class__.__name__, "method_name": func.__name__, "message": str(e),
"method_param": _method_param, "service_name": self._service_name,
"hostname": socket.gethostbyname(socket.gethostname())}
_stats_logger.info(dict(d))
_exception_logger = logging.getLogger('exceptions')
d["message"] = traceback.format_exc()
_exception_logger.info(dict(d))
else:
Stats.tcp_stats['total_responses'] += 1
end_time = int(time.time() * 1000)
end_process_time = int(time.process_time() * 1000)
hostname = socket.gethostname()
service_name = '_'.join(setproctitle.getproctitle().split('_')[:-1])
logd = {
'endpoint': func.__name__,
'time_taken': end_time - start_time,
'hostname': hostname, 'service_name': service_name
}
logging.getLogger('stats').debug(logd)
_logger.debug('Time taken for %s is %d milliseconds', func.__name__, end_time - start_time)
_logger.debug('Timeout for %s is %s seconds', func.__name__, api_timeout)
# call to update aggregator, designed to replace the stats module.
Aggregator.update_stats(endpoint=func.__name__, status=status, success=success,
server_type='tcp', time_taken=end_time - start_time,
process_time_taken=end_process_time - start_process_time)
if not old_api:
return self._make_response_packet(request_id=rid, from_id=from_id, entity=entity, result=result,
error=error, failed=failed, method=func.__name__,
service_name=self.name)
else:
return self._make_response_packet(request_id=rid, from_id=from_id, entity=entity, result=result,
error=error, failed=failed, old_api=old_api,
replacement_api=replacement_api, method=func.__name__,
service_name=self.name)
wrapper.is_api = True
return wrapper
def task_queue(func=None, queue_name=None):
if func is None:
return partial(task_queue, queue_name=queue_name)
@wraps(func)
def wrapper(*args, **kwargs):
coroutine_func = func
if not asyncio.iscoroutine(func):
coroutine_func = asyncio.coroutine(func)
return (yield from coroutine_func(*args, **kwargs))
wrapper.queue_name = queue_name
wrapper.is_task_queue = True
return wrapper
def enqueue(func=None, queue_name=None):
if func is None:
return partial(enqueue, queue_name=queue_name)
@wraps(func)
def wrapper(self, *args, **kwargs): # outgoing
payload = func(self, *args, **kwargs)
payload.pop('self', None)
self._enqueue(queue_name, payload)
return None
return wrapper
| {
"repo_name": "amanwriter/vyked",
"path": "vyked/decorators/tcp.py",
"copies": "1",
"size": "8739",
"license": "mit",
"hash": 8091175935342554000,
"line_mean": 35.2614107884,
"line_max": 120,
"alpha_frac": 0.6092230232,
"autogenerated": false,
"ratio": 4.15351711026616,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5262740133466159,
"avg_score": null,
"num_lines": null
} |
from functools import wraps, partial
from again.utils import unique_hex
from ..utils.stats import Stats, Aggregator
from ..exceptions import VykedServiceException
from ..utils.common_utils import valid_timeout, X_REQUEST_ID, get_uuid
import asyncio
import logging
import socket
import setproctitle
import time
import traceback
import json
from ..config import CONFIG
from ..shared_context import SharedContext
_tcp_timeout = CONFIG.TCP_TIMEOUT
def publish(func=None, blocking=False):
"""
publish the return value of this function as a message from this endpoint
"""
if func is None:
return partial(publish, blocking=blocking)
@wraps(func)
def wrapper(self, *args, **kwargs): # outgoing
payload = func(self, *args, **kwargs)
payload.pop('self', None)
self._publish(func.__name__, payload, blocking=blocking)
return None
wrapper.is_publish = True
return wrapper
def subscribe(func):
"""
use to listen for publications from a specific endpoint of a service,
this method receives a publication from a remote service
"""
wrapper = _get_subscribe_decorator(func)
wrapper.is_subscribe = True
return wrapper
def xsubscribe(func=None, strategy='DESIGNATION', blocking=False):
"""
Used to listen for publications from a specific endpoint of a service. If multiple instances
subscribe to an endpoint, only one of them receives the event. And the publish event is retried till
an acknowledgment is received from the other end.
:param func: the function to decorate with. The name of the function is the event subscribers will subscribe to.
:param strategy: The strategy of delivery. Can be 'RANDOM' or 'LEADER'. If 'RANDOM', then the event will be randomly
passed to any one of the interested parties. If 'LEADER' then it is passed to the first instance alive
which registered for that endpoint.
"""
if func is None:
return partial(xsubscribe, strategy=strategy, blocking=blocking)
else:
wrapper = _get_subscribe_decorator(func)
wrapper.is_xsubscribe = True
wrapper.strategy = strategy
wrapper.blocking = blocking
return wrapper
def _get_subscribe_decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
coroutine_func = func
if not asyncio.iscoroutine(func):
coroutine_func = asyncio.coroutine(func)
return (yield from coroutine_func(*args, **kwargs))
return wrapper
def request(func):
"""
use to request an api call from a specific endpoint
"""
@wraps(func)
def wrapper(self, *args, **kwargs):
params = func(self, *args, **kwargs)
self = params.pop('self', None)
entity = params.pop('entity', None)
app_name = params.pop('app_name', None)
request_id = unique_hex()
params['request_id'] = request_id
future = self._send_request(app_name, endpoint=func.__name__, entity=entity, params=params)
return future
wrapper.is_request = True
return wrapper
def api(func=None, timeout=None): # incoming
"""
provide a request/response api
receives any requests here and return value is the response
all functions must have the following signature
- request_id
- entity (partition/routing key)
followed by kwargs
"""
if func is None:
return partial(api, timeout=timeout)
else:
wrapper = _get_api_decorator(func=func, timeout=timeout)
return wrapper
def deprecated(func=None, replacement_api=None):
if func is None:
return partial(deprecated, replacement_api=replacement_api)
else:
wrapper = _get_api_decorator(func=func, old_api=func.__name__, replacement_api=replacement_api)
return wrapper
def _get_api_decorator(func=None, old_api=None, replacement_api=None, timeout=None):
@asyncio.coroutine
@wraps(func)
def wrapper(*args, **kwargs):
_logger = logging.getLogger(__name__)
start_time = int(time.time() * 1000)
start_process_time = int(time.process_time() * 1000)
self = args[0]
rid = kwargs.pop('request_id')
entity = kwargs.pop('entity')
from_id = kwargs.pop('from_id')
wrapped_func = func
result = None
error = None
failed = False
api_timeout = _tcp_timeout
status = 'succesful'
success = True
if not asyncio.iscoroutine(func):
wrapped_func = asyncio.coroutine(func)
if valid_timeout(timeout):
api_timeout = timeout
Stats.tcp_stats['total_requests'] += 1
tracking_id = kwargs.pop(X_REQUEST_ID , None) or get_uuid()
SharedContext.set(X_REQUEST_ID, tracking_id)
try:
result = yield from asyncio.wait_for(asyncio.shield(wrapped_func(self, **kwargs)), api_timeout)
except asyncio.TimeoutError as e:
Stats.tcp_stats['timedout'] += 1
error = str(e)
status = 'timeout'
success = False
failed = True
logging.exception("%s TCP request had a timeout for method %s", tracking_id, func.__name__)
except VykedServiceException as e:
Stats.tcp_stats['total_responses'] += 1
error = str(e)
status = 'handled_error'
_logger.info('%s Handled exception %s for method %s ', tracking_id, e.__class__.__name__, func.__name__)
except Exception as e:
Stats.tcp_stats['total_errors'] += 1
error = str(e)
status = 'unhandled_error'
success = False
failed = True
_logger.exception('%s Unhandled exception %s for method %s ', tracking_id, e.__class__.__name__, func.__name__)
_stats_logger = logging.getLogger('stats')
_method_param = json.dumps(kwargs)
d = {"exception_type": e.__class__.__name__, "method_name": func.__name__, "message": str(e),
"method_param": _method_param, "service_name": self._service_name,
"hostname": socket.gethostbyname(socket.gethostname()), X_REQUEST_ID: tracking_id}
_stats_logger.info(dict(d))
_exception_logger = logging.getLogger('exceptions')
d["message"] = traceback.format_exc()
_exception_logger.info(dict(d))
else:
Stats.tcp_stats['total_responses'] += 1
end_time = int(time.time() * 1000)
end_process_time = int(time.process_time() * 1000)
hostname = socket.gethostname()
service_name = '_'.join(setproctitle.getproctitle().split('_')[:-1])
logd = {
'endpoint': func.__name__,
'time_taken': end_time - start_time,
'hostname': hostname,
'service_name': service_name,
'endpoint': func.__name__,
'api_execution_threshold_exceed': False,
X_REQUEST_ID: tracking_id
}
method_execution_time = (end_time - start_time)
if method_execution_time > (CONFIG.SLOW_API_THRESHOLD * 1000):
logd['api_execution_threshold_exceed'] = True
logging.getLogger('stats').info(logd)
else:
logging.getLogger('stats').debug(logd)
logging.getLogger('tcp').info('%s %d %s', func.__name__, end_time - start_time, tracking_id)
# call to update aggregator, designed to replace the stats module.
Aggregator.update_stats(endpoint=func.__name__, status=status, success=success,
server_type='tcp', time_taken=end_time - start_time,
process_time_taken=end_process_time - start_process_time)
if not old_api:
return self._make_response_packet(request_id=rid, from_id=from_id, entity=entity, result=result,
error=error, failed=failed, method=func.__name__,
service_name=self.name)
else:
return self._make_response_packet(request_id=rid, from_id=from_id, entity=entity, result=result,
error=error, failed=failed, old_api=old_api,
replacement_api=replacement_api, method=func.__name__,
service_name=self.name)
wrapper.is_api = True
return wrapper
def task_queue(func=None, queue_name=None):
if func is None:
return partial(task_queue, queue_name=queue_name)
@wraps(func)
def wrapper(*args, **kwargs):
coroutine_func = func
if not asyncio.iscoroutine(func):
coroutine_func = asyncio.coroutine(func)
return (yield from coroutine_func(*args, **kwargs))
wrapper.queue_name = queue_name
wrapper.is_task_queue = True
return wrapper
def enqueue(func=None, queue_name=None):
if func is None:
return partial(enqueue, queue_name=queue_name)
@wraps(func)
def wrapper(self, *args, **kwargs): # outgoing
payload = func(self, *args, **kwargs)
payload.pop('self', None)
self._enqueue(queue_name, payload)
return None
return wrapper
| {
"repo_name": "1mgOfficial/vyked",
"path": "vyked/decorators/tcp.py",
"copies": "1",
"size": "9301",
"license": "mit",
"hash": -6005088078110196000,
"line_mean": 35.4745098039,
"line_max": 123,
"alpha_frac": 0.6084292012,
"autogenerated": false,
"ratio": 4.122783687943262,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0016830099705621706,
"num_lines": 255
} |
from functools import wraps, partial
from django.core.validators import RegexValidator
from django.forms import Form, Field, CharField, TextInput, FileField
from collections import OrderedDict
from django.forms.formsets import formset_factory
class UploadStaticForm(Form):
file = FileField()
class FormFromPattern(Form):
def create_fields(self, pattern, *args, **kwargs):
for key, value in pattern.iteritems():
if key != 'STATIC_DIR':
if isinstance(value['type'], unicode) or isinstance(value['type'], str):
self.fields[key] = CharField(required=True, label=value['caption'],
widget=TextInput(attrs={'placeholder': value['hint'],
}),
validators=[RegexValidator(value['check'], 'Wrong format')])
elif isinstance(value['type'], list): # formset
FormFromPatternFormSet = formset_factory(
wraps(FormFromPattern)(partial(FormFromPattern, pattern=value['type'][0], tags=False)),
extra=1, can_delete=True)
self.nested_formsets.append({'caption': value['caption'],
'name': key,
'formset': FormFromPatternFormSet(prefix=key, *args, **kwargs)})
def __init__(self, pattern, tags, *args, **kwargs):
super(FormFromPattern, self).__init__(*args, **kwargs)
self.nested_formsets = []
pattern = OrderedDict(sorted(pattern.items(), key=lambda i: i[1]['order']))
self.create_fields(pattern, *args, **kwargs)
if tags:
self.fields['tags'] = CharField(required=False, label='Tags',
widget=TextInput(attrs={'placeholder': 'Tags separated with comma',
}), )
def is_valid(self):
form_is_valid = super(FormFromPattern, self).is_valid()
for nested_formset in self.nested_formsets:
if not nested_formset['formset'].is_valid():
form_is_valid = False
return form_is_valid
def process(self):
data = self.cleaned_data
for nested_formset in self.nested_formsets:
formset_data = [f.cleaned_data for f in nested_formset['formset'] if f.cleaned_data]
data[nested_formset['name']] = formset_data
return data
class SearchForm(Form):
q = CharField(required=False, label='Search') | {
"repo_name": "maranathaaa/templado",
"path": "templado/forms.py",
"copies": "2",
"size": "2619",
"license": "bsd-2-clause",
"hash": 8019699657351266000,
"line_mean": 46.6363636364,
"line_max": 113,
"alpha_frac": 0.5547919053,
"autogenerated": false,
"ratio": 4.761818181818182,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0027185170746435007,
"num_lines": 55
} |
from functools import wraps, partial
from .helpers import FSMLogDescriptor
def fsm_log_by(func):
@wraps(func)
def wrapped(instance, *args, **kwargs):
try:
by = kwargs['by']
except KeyError:
return func(instance, *args, **kwargs)
with FSMLogDescriptor(instance, 'by', by):
return func(instance, *args, **kwargs)
return wrapped
def fsm_log_description(func=None, allow_inline=False):
if func is None:
return partial(fsm_log_description, allow_inline=allow_inline)
@wraps(func)
def wrapped(instance, *args, **kwargs):
with FSMLogDescriptor(instance, 'description') as descriptor:
try:
description = kwargs['description']
except KeyError:
if allow_inline:
kwargs['description'] = descriptor
return func(instance, *args, **kwargs)
descriptor.set(description)
return func(instance, *args, **kwargs)
return wrapped
| {
"repo_name": "ticosax/django-fsm-log",
"path": "django_fsm_log/decorators.py",
"copies": "2",
"size": "1035",
"license": "mit",
"hash": 7035178754776893000,
"line_mean": 29.4411764706,
"line_max": 70,
"alpha_frac": 0.5971014493,
"autogenerated": false,
"ratio": 4.330543933054393,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 34
} |
from functools import wraps, partial
from inspect import getfullargspec
from typing import Callable, Union, Any, TypeVar, Tuple, Generic, cast
A = TypeVar('A')
B = TypeVar('B')
C = TypeVar('C')
def curried(func: Callable[..., B]) -> Callable[[A], Callable[..., Union[Callable, B]]]:
@wraps(func)
def _curried(*args: Any, **kwargs: Any) -> Union[B, Callable[..., Union[Callable, B]]]:
f = func
count = 0
while isinstance(f, partial):
if f.args:
count += len(f.args)
f = f.func
spec = getfullargspec(f)
if count == len(spec.args) - len(args):
return func(*args, **kwargs)
else:
return curried(partial(func, *args, **kwargs))
return _curried
class Identity:
def __init__(self) -> None:
self.__name__ = 'identity'
def __call__(self, a: A) -> A:
return a
def __str__(self) -> str:
return '(a => a)'
I = Identity()
class Val(Generic[A]):
def __init__(self, value: A) -> None:
self.value = value
self.__name__ = self.__class__.__name__
def __call__(self) -> A:
return self.value
def __str__(self) -> str:
return '{}({})'.format(self.__class__.__name__, self.value)
class ReplaceVal(Generic[A], Val[A]):
def __call__(self, *a: Any, **kw: Any) -> A:
return super().__call__()
def flip(a: A, b: B) -> Tuple[B, A]:
return b, a
CallByName = Union[Any, Callable[[], Any]]
def call_by_name(b: Union[A, Callable[[], A]]) -> A:
return b() if callable(b) else cast(A, b)
def is_not_none(a: Any) -> bool:
return a is not None
def tupled2(f: Callable[[A, B], C]) -> Callable[[Tuple[A, B]], C]:
def wrap(a: Tuple[A, B]) -> C:
return f(a[0], a[1])
return wrap
TailrecResult = Tuple[bool, Union[A, tuple]]
class tailrec:
__slots__ = 'func',
def __init__(self, func: Callable[..., Tuple[bool, Union[A, tuple]]]) -> None:
self.func = func
def __call__(self, *a: Any) -> A:
args: Union[A, tuple] = a
while True:
cont, args = self.func(*cast(tuple, args))
if not cont:
break
return cast(A, args)
tco = tailrec
class mtailrec:
__slots__ = 'func',
def const(a: A) -> Callable[[B], A]:
return lambda b: a
__all__ = ('curried', 'I', 'flip', 'call_by_name', 'Val', 'ReplaceVal', 'is_not_none', 'tupled2', 'tailrec', 'mtailrec',
'const', 'TailrecResult',)
| {
"repo_name": "tek/amino",
"path": "amino/func.py",
"copies": "1",
"size": "2512",
"license": "mit",
"hash": -3417239705792789500,
"line_mean": 21.6306306306,
"line_max": 120,
"alpha_frac": 0.5290605096,
"autogenerated": false,
"ratio": 3.21227621483376,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9233419989785656,
"avg_score": 0.0015833469296207758,
"num_lines": 111
} |
from functools import wraps, partial
from inspect import iscoroutine
from .loop import Loop, get_current_loop
from .timer import Timer
from .idle import Idle
from .workers import worker
from .stream import Stream
from . import fs
from . import net
from . import process
get_default_loop = Loop.get_default_loop
def sync(*func, timeout=None):
"""
coroutine decorator, convert a coroutine into a syncronous function::
@sync(timeout=2)
async def main(sleep_for):
await uvio.sleep(sleep_for)
return 'main returned ok!'
print(main(1))
"""
if not func:
return partial(sync, timeout=timeout)
func = func[0]
@wraps(func)
def inner(*args, **kwargs):
loop = Loop.create(func.__name__)
coro = func(*args, **kwargs)
if not iscoroutine(coro):
raise Exception("{} is not a coroutine (returned from {})".format(coro, func))
loop.next_tick(coro)
if timeout:
def stop_loop():
loop.stop()
raise Exception("timeout")
timer = loop.set_timeout(stop_loop, timeout)
# Don't wait for the timout to exit the loop
timer.unref()
loop.run()
loop.close()
if timeout:
timer.close()
if coro.cr_await is not None:
coro.throw(Exception('coroutine {} should not be running at the end of the loop'.format(coro)))
# This should not happend
assert not loop._awaiting, loop._awaiting
assert not loop.ready, loop.ready
return inner
def sleep(timeout):
'''Coroutine that completes after a given time (in seconds).
'''
return Timer(None, timeout)
async def set_timeout(func, timeout, repeat=None):
'''Coroutine that starts after a given time (in seconds).
'''
loop = await get_current_loop()
return loop.set_timeout(func, timeout, repeat=repeat)
async def next_tick(func, *args, **kwargs):
'''Coroutine that starts after being idle
'''
loop = await get_current_loop()
return loop.next_tick(func, *args, **kwargs)
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
| {
"repo_name": "srossross/uvio",
"path": "uvio/__init__.py",
"copies": "1",
"size": "2236",
"license": "mit",
"hash": 1386645413584365000,
"line_mean": 23.3043478261,
"line_max": 107,
"alpha_frac": 0.6176207513,
"autogenerated": false,
"ratio": 3.9575221238938054,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5075142875193805,
"avg_score": null,
"num_lines": null
} |
from functools import wraps, partial
from itertools import product
import numpy as np
from sympy import S, finite_diff_weights, cacheit, sympify
from devito.tools import Tag, as_tuple
class Transpose(Tag):
"""
Utility class to change the sign of a derivative. This is only needed
for odd order derivatives, which require a minus sign for the transpose.
"""
pass
direct = Transpose('direct', 1)
transpose = Transpose('transpose', -1)
class Side(Tag):
"""
Class encapsulating the side of the shift for derivatives.
"""
def adjoint(self, matvec):
if matvec is direct:
return self
else:
if self is centered:
return centered
elif self is right:
return left
elif self is left:
return right
else:
raise ValueError("Unsupported side value")
left = Side('left', -1)
right = Side('right', 1)
centered = Side('centered', 0)
def check_input(func):
@wraps(func)
def wrapper(expr, *args, **kwargs):
try:
return S.Zero if expr.is_Number else func(expr, *args, **kwargs)
except AttributeError:
raise ValueError("'%s' must be of type Differentiable, not %s"
% (expr, type(expr)))
return wrapper
def check_symbolic(func):
@wraps(func)
def wrapper(expr, *args, **kwargs):
if expr._uses_symbolic_coefficients:
expr_dict = expr.as_coefficients_dict()
if any(len(expr_dict) > 1 for item in expr_dict):
raise NotImplementedError("Applying the chain rule to functions "
"with symbolic coefficients is not currently "
"supported")
kwargs['symbolic'] = expr._uses_symbolic_coefficients
return func(expr, *args, **kwargs)
return wrapper
def dim_with_order(dims, orders):
"""
Create all possible derivative order for each dims
for example dim_with_order((x, y), 1) outputs:
[(1, 0), (0, 1), (1, 1)]
"""
ndim = len(dims)
max_order = np.min([6, np.max(orders)])
# Get all combinations and remove (0, 0, 0)
all_comb = tuple(product(range(max_order+1), repeat=ndim))[1:]
# Only keep the one with each dimension maximum order
all_comb = [c for c in all_comb if all(c[k] <= orders[k] for k in range(ndim))]
return all_comb
def deriv_name(dims, orders):
name = []
for d, o in zip(dims, orders):
name_dim = 't' if d.is_Time else d.root.name
name.append('d%s%s' % (name_dim, o) if o > 1 else 'd%s' % name_dim)
return ''.join(name)
def generate_fd_shortcuts(dims, so, to=0):
"""Create all legal finite-difference derivatives for the given Function."""
orders = tuple(to if i.is_Time else so for i in dims)
from devito.finite_differences.derivative import Derivative
def diff_f(expr, deriv_order, dims, fd_order, side=None, **kwargs):
return Derivative(expr, *as_tuple(dims), deriv_order=deriv_order,
fd_order=fd_order, side=side, **kwargs)
all_combs = dim_with_order(dims, orders)
derivatives = {}
# All conventional FD shortcuts
for o in all_combs:
fd_dims = tuple(d for d, o_d in zip(dims, o) if o_d > 0)
d_orders = tuple(o_d for d, o_d in zip(dims, o) if o_d > 0)
fd_orders = tuple(to if d.is_Time else so for d in fd_dims)
deriv = partial(diff_f, deriv_order=d_orders, dims=fd_dims, fd_order=fd_orders)
name_fd = deriv_name(fd_dims, d_orders)
dname = (d.root.name for d in fd_dims)
desciption = 'derivative of order %s w.r.t dimension %s' % (d_orders, dname)
derivatives[name_fd] = (deriv, desciption)
# Add non-conventional, non-centered first-order FDs
for d, o in zip(dims, orders):
name = 't' if d.is_Time else d.root.name
# Add centered first derivatives
deriv = partial(diff_f, deriv_order=1, dims=d, fd_order=o, side=centered)
name_fd = 'd%sc' % name
desciption = 'centered derivative staggered w.r.t dimension %s' % d.name
derivatives[name_fd] = (deriv, desciption)
# Left
deriv = partial(diff_f, deriv_order=1, dims=d, fd_order=o, side=left)
name_fd = 'd%sl' % name
desciption = 'left first order derivative w.r.t dimension %s' % d.name
derivatives[name_fd] = (deriv, desciption)
# Right
deriv = partial(diff_f, deriv_order=1, dims=d, fd_order=o, side=right)
name_fd = 'd%sr' % name
desciption = 'right first order derivative w.r.t dimension %s' % d.name
derivatives[name_fd] = (deriv, desciption)
return derivatives
def symbolic_weights(function, deriv_order, indices, dim):
return [function._coeff_symbol(indices[j], deriv_order, function, dim)
for j in range(0, len(indices))]
@cacheit
def numeric_weights(deriv_order, indices, x0):
return finite_diff_weights(deriv_order, indices, x0)[-1][-1]
def generate_indices(func, dim, order, side=None, x0=None):
"""
Indices for the finite-difference scheme
Parameters
----------
func: Function
Function that is differentiated
dim: Dimension
Dimensions w.r.t which the derivative is taken
order: Int
Order of the finite-difference scheme
side: Side
Side of the scheme, (centered, left, right)
x0: Dict of {Dimension: Dimension or Expr or Number}
Origin of the scheme, ie. `x`, `x + .5 * x.spacing`, ...
Returns
-------
Ordered list of indices
"""
# If staggered finited difference
if func.is_Staggered and not dim.is_Time:
x0, ind = generate_indices_staggered(func, dim, order, side=side, x0=x0)
else:
x0 = (x0 or {dim: dim}).get(dim, dim)
# Check if called from first_derivative()
ind = generate_indices_cartesian(dim, order, side, x0)
return ind, x0
def generate_indices_cartesian(dim, order, side, x0):
"""
Indices for the finite-difference scheme on a cartesian grid
Parameters
----------
dim: Dimension
Dimensions w.r.t which the derivative is taken
order: Int
Order of the finite-difference scheme
side: Side
Side of the scheme, (centered, left, right)
x0: Dict of {Dimension: Dimension or Expr or Number}
Origin of the scheme, ie. `x`, `x + .5 * x.spacing`, ...
Returns
-------
Ordered list of indices
"""
shift = 0
# Shift if x0 is not on the grid
offset_c = 0 if sympify(x0).is_Integer else (dim - x0)/dim.spacing
offset_c = np.sign(offset_c) * (offset_c % 1)
# left and right max offsets for indices
o_start = -order//2 + int(np.ceil(-offset_c))
o_end = order//2 + 1 - int(np.ceil(offset_c))
offset = offset_c * dim.spacing
# Spacing
diff = dim.spacing
if side in [left, right]:
shift = 1
diff *= side.val
# Indices
if order < 2:
ind = [x0, x0 + diff] if offset == 0 else [x0 - offset, x0 + offset]
else:
ind = [(x0 + (i + shift) * diff + offset) for i in range(o_start, o_end)]
return tuple(ind)
def generate_indices_staggered(func, dim, order, side=None, x0=None):
"""
Indices for the finite-difference scheme on a staggered grid
Parameters
----------
func: Function
Function that is differentiated
dim: Dimension
Dimensions w.r.t which the derivative is taken
order: Int
Order of the finite-difference scheme
side: Side
Side of the scheme, (centered, left, right)
x0: Dict of {Dimension: Dimension or Expr or Number}
Origin of the scheme, ie. `x`, `x + .5 * x.spacing`, ...
Returns
-------
Ordered list of indices
"""
diff = dim.spacing
start = (x0 or {}).get(dim) or func.indices_ref[dim]
try:
ind0 = func.indices_ref[dim]
except AttributeError:
ind0 = start
if start != ind0:
ind = [start - diff/2 - i * diff for i in range(0, order//2)][::-1]
ind += [start + diff/2 + i * diff for i in range(0, order//2)]
if order < 2:
ind = [start - diff/2, start + diff/2]
else:
ind = [start + i * diff for i in range(-order//2, order//2+1)]
if order < 2:
ind = [start, start - diff]
return start, tuple(ind)
def make_shift_x0(shift, ndim):
"""
Returns a callable that calculates a shifted origin for each derivative
of an operation derivatives scheme (given by ndim) given a shift object
which can be a None, a float or a tuple with shape equal to ndim
"""
if shift is None:
return lambda s, d, i, j: None
elif isinstance(shift, float):
return lambda s, d, i, j: d + s * d.spacing
elif type(shift) is tuple and np.shape(shift) == ndim:
if len(ndim) == 1:
return lambda s, d, i, j: d + s[j] * d.spacing
elif len(ndim) == 2:
return lambda s, d, i, j: d + s[i][j] * d.spacing
else:
raise ValueError("ndim length must be equal to 1 or 2")
raise ValueError("shift parameter must be one of the following options: "
"None, float or tuple with shape equal to %s" % (ndim,))
| {
"repo_name": "opesci/devito",
"path": "devito/finite_differences/tools.py",
"copies": "1",
"size": "9364",
"license": "mit",
"hash": 4298944745306953700,
"line_mean": 32.3238434164,
"line_max": 88,
"alpha_frac": 0.5963263563,
"autogenerated": false,
"ratio": 3.5537001897533207,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.465002654605332,
"avg_score": null,
"num_lines": null
} |
from functools import wraps, partial
from .methods import __methods__
from .errors import IpernityError
from .rest import call_api
def _required_params(info):
params = info.get('parameters', [])
requires = [p['name'] for p in
filter(lambda p: bool(p.get('required', 0)), params)]
# api_key would be handled in rest
try:
requires.remove('api_key')
except ValueError:
pass
return requires
def call(api_method):
''' decorator to wrapper api method call for instance method
Parameters:
api_method: Ipernity method name to be called.
we can't get the album, and always return 'Album not found')
Note:
* requirement for function:
each function must return a tuple with (params, format_result), where
"params" is a dict consist of parameters to ipernity api method.
"format_result" is a function to decode json resonpse.
* return value of decorated function:
api json resonpse will be decoded by "format_result" function
and return to caller.
'''
# use two level decorator here:
# level 1: "call" to accept decorator paramter 'api_method'
# level 2: "decorator" is the real decorator to accept function, this
# decorator will finally return the wrapper functon "wrapper".
def decorator(func):
try:
info = __methods__[api_method]
except KeyError:
raise IpernityError('Method %s not found' % api_method)
requires = _required_params(info)
auth_info = info['authentication']
# partial object for this api call
request = partial(call_api, api_method,
authed=auth_info['token'],
http_post=auth_info['post'],
signed=auth_info['sign'])
@wraps(func)
def wrapper(self, *args, **kwargs):
params, format_result = func(self, *args, **kwargs)
# IpernityObject.__id__ handling
idname = getattr(self.__class__, '__id__', None)
if idname and idname not in params:
params[idname] = self.id
# required parameters checking
if not all([p in params for p in requires]):
raise IpernityError('parameters missing, required: %s'
% ', '.join(requires))
resp = request(**params)
return format_result(resp)
wrapper.ipernity_method = api_method
wrapper.static = False
return wrapper
return decorator
class StaticCaller(staticmethod):
def __init__(self, func):
staticmethod.__init__(self, func)
self.__dict__ = func.__dict__
self.inner_func = func
def static_call(api_method):
''' call decorator for static method
The same as 'call' decorator, except it design for class static method
'''
def decorator(func):
try:
info = __methods__[api_method]
except KeyError:
raise IpernityError('Method %s not found' % api_method)
requires = _required_params(info)
auth_info = info['authentication']
# partial object for this api call
request = partial(call_api, api_method,
authed=auth_info['token'],
http_post=auth_info['post'],
signed=auth_info['sign'])
@wraps(func)
def wrapper(*args, **kwargs):
params, format_result = func(*args, **kwargs)
# required parameters checking
if not all([p in params for p in requires]):
raise IpernityError('parameters missing, required: %s'
% ','.join(requires))
resp = request(**params)
return format_result(resp)
wrapper.ipernity_method = api_method
wrapper.static = True
return StaticCaller(wrapper)
return decorator
def method_doc(method, ignore_params=[]):
doc = '''
API: %(method)s
Description: %(desc)s
Auth: %(auth)s
Permission: %(perms)s
%(params)s
'''
info = __methods__[method]
desc = info['title']
# resp = info['response']
auth = 'Required' if info['authentication']['token'] else 'No Need'
perms = ','.join(['%s:%s' % (k, v)
for k, v in info['permissions'].iteritems()]
if info['permissions'] else [])
params_required = []
params_optional = []
for param in info['parameters']:
name = param['name']
value = param['value']
required = param.get('required', 0)
if name in ignore_params:
continue
value.strip()
line = '%s: %s' % (name, value)
if required:
params_required.append(line)
else:
params_optional.append(line)
params = ''
if params_required:
params_required.sort()
params += '\n Required Parameters:\n '
params += '\n '.join(params_required)
if params_optional:
params_optional.sort()
params += '\n Optional Parameters:\n '
params += '\n '.join(params_optional)
context = {
'method': method,
'desc': desc,
'auth': auth,
'perms': perms,
'params': params,
}
text = doc % context
text = text.replace('<code>', "'").replace('</code>', "'")
text = text.replace('<ul>', "").replace('</ul>', "")
text = text.replace('<li>', " " * 12).replace('</li>', "")
# Removed keyword arguments 'errors=' from decode() to support versions of
# Python < 2.7
return text.encode('ascii', 'ignore')
class AutoDoc(type):
def __new__(meta, classname, bases, classDict):
selfname = classDict.get('__id__', None)
ignore_params = ['api_key']
for k, v in classDict.items():
if hasattr(v, 'ipernity_method'):
method = v.ipernity_method
if v.static:
v.inner_func.__doc__ = method_doc(method, ignore_params)
else:
ignore_params.append(selfname)
v.__doc__ = method_doc(method, ignore_params)
return type.__new__(meta, classname, bases, classDict)
| {
"repo_name": "oneyoung/python-ipernity-api",
"path": "ipernity_api/reflection.py",
"copies": "1",
"size": "6336",
"license": "apache-2.0",
"hash": 7087239991484043000,
"line_mean": 34.2,
"line_max": 78,
"alpha_frac": 0.5542929293,
"autogenerated": false,
"ratio": 4.258064516129032,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 180
} |
from functools import wraps, partial
from nose.tools import make_decorator as make_dec, istest
from wumpus.events import *
from wumpus.server import Server
from wumpus.client import Client
from wumpus.tests.mock import FakeServer, FakeClient
from wumpus.core import Player
from wumpus.network_node import Network_Node
def server_and_client(function):
@wraps(function)
def wrapper(self):
for listener in [self.server] + self.clients:
function(self, listener)
return wrapper
def requires_networking(function):
@wraps(function)
def wrapper(self):
if not (self.server.running and self.client.running):
return
else:
return function(self)
return wrapper
class testEvents:
def setup(self):
self.server = FakeServer()
self.client = self.client1 = FakeClient()
self.client2 = FakeClient()
self.clients = [self.client, self.client2]
self.player = self.player1 = Player("IDC")
self.player2 = Player("Still don't care")
@server_and_client
def test_join_event(self, listener):
#Unmock the players attribute
listener.game.players = []
if listener.is_server:
listener.clients = {}
return True
event = Join_Event(self.player)
old_player_count = len(listener.game.players)
event.handle(listener)
assert old_player_count + 1 == len(listener.game.players), (old_player_count, len(listener.game.players))
@requires_networking
def test_event_broadcasting(self):
#This test can be naturalized now. It was made awkward because I was
#trying to make it not require networking. That requirement is not
#possible.
self.client.game.players = []
self.server.game.players = []
self.client2.game.players = []
self.server.clients = {}
self.server.broadcast = functools.partial(Server.broadcast, self.server)
self.server.read = functools.partial(Server.read, self.server)
self.server.is_server = True#functools.partial(Network_Node.is_server, self.server)
self.client.is_server = False#functools.partial(Network_Node.is_server, self.client)
self.client2.is_server = False#functools.partial(Network_Node.is_server, self.client2)
event = Join_Event(self.player)
print(self.server.read, "REAd")
self.server.read(None, bytify(event).encode("utf-8"))
assert len(self.server.game.players) == 1, self.server.clients
event2 = Join_Event(self.player2)
self.server.read(None, bytify(event2).encode("utf-8"))
assert len(self.server.game.players) == 2, self.server.game.players
assert len(self.client2.game.players) == 2, self.client2.game.players
| {
"repo_name": "marky1991/Legend-of-Wumpus",
"path": "wumpus/tests/test_networking.py",
"copies": "1",
"size": "2809",
"license": "mit",
"hash": -6641253414738168000,
"line_mean": 38.5633802817,
"line_max": 113,
"alpha_frac": 0.6646493414,
"autogenerated": false,
"ratio": 3.7403462050599203,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.490499554645992,
"avg_score": null,
"num_lines": null
} |
from functools import wraps, partial
from operator import add, sub, mul
from heapq import nlargest, nsmallest
from random import randint
import lark
def unpack(m, values):
return [values[i] if i < len(values) else None for i in range(m)]
class Modifier:
def __init__(self, values):
self.op, self.value = unpack(2, values)
self.op = partial(self.op, self.value)
def __call__(self, value):
return self.op(value)
class Roll:
def __init__(self, values):
self.times, self.size, self.colmod, self.mod = unpack(4, values)
if not self.mod:
self.mod = lambda x: x
if not self.colmod:
self.colmod = lambda x: x
def __call__(self):
return self.mod(sum(self.colmod([randint(1,self.size) for _ in range(self.times)])))
class GameTransfomer(lark.Transformer):
string = lambda x, y: y[0][1:-1]
number = lambda x, y: int(y[0])
min_ = lambda x, y: nsmallest
max_ = lambda x, y: nlargest
add_ = lambda x, y: add
sub_ = lambda x, y: sub
mul_ = lambda x, y: mul
div_ = lambda x, y: divmod
mod = Modifier
colmod = Modifier
roll = Roll
with open('char.g') as fd:
parser = lark.Lark(fd.read(), start="game", parser='lalr', transformer=GameTransfomer())
if __name__ == "__main__":
with open('example.game') as fd:
tree = parser.parse(fd.read())
print(tree.pretty()) | {
"repo_name": "micaiahparker/diediedie",
"path": "char.py",
"copies": "1",
"size": "1425",
"license": "mit",
"hash": 8818988497614289000,
"line_mean": 25.9056603774,
"line_max": 92,
"alpha_frac": 0.6035087719,
"autogenerated": false,
"ratio": 3.1879194630872485,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.924066440711868,
"avg_score": 0.010152765573713625,
"num_lines": 53
} |
from functools import wraps, partial
from types import FunctionType
from typing import Callable, Any, Dict
from classes import MshException, MultiDecorator, Function
from util import skip_spaces, keep_leading
class RegisterError(Exception):
pass
# Unbound methods.
parsers = {} # type: Dict[str, Function]
"""
# Before: d(f, args)
# dec_args(f) returns an argument decorator
# def dec_args(olddec:Function):
# # Return a 2-level decorator.
# @wraps(olddec)
# def args_only(*args, **kwargs):
# def func_only(func:Function):
#
# # Create our own decorator syntax
# return olddec(func, *args, **kwargs)
#
# return func_only
# return args_only
# def dec_args(olddec:Function):
# # Return a 2-level decorator.
# def args_only(*args, **kwargs):
# partial(olddec, *args, **kwargs)
# def func_only(func:Function):
#
# # Create our own decorator syntax
# return olddec(func, *args, **kwargs)
#
# return func_only
# return args_only
"""
# This doesn't modify msh directly, to avoid import cycles.
def register(fun:Function, dic:dict, prefix:str, name:str=None):
if name is None:
name = fun.__name__
if not name.startswith(prefix):
raise RegisterError(name, prefix)
real_name = name[len(prefix):]
else:
real_name = name
dic[real_name] = fun
return fun
# parser(in_str, args) args[0] == command, args[1] = argument
def msh(parser: Function):
return register(parser, parsers, '_')
def mshname(name):
def mshname_(parser: Function):
return register(parser, parsers, '_', name)
return mshname_
def optional(decorator: MultiDecorator) -> Function:
# This wraps the decorator. It's a bit difficult to understand.
@wraps(decorator)
def inner(parser=None, **kwargs):
if parser is None:
return partial(inner, **kwargs)
elif isinstance(parser, FunctionType):
# When calling a unwrapped decorator, **kwargs is [].
# When calling a parametric decorator, **kwargs comes from the partial object.
return decorator(parser, **kwargs)
else:
raise TypeError('Under NO circumstances can you pass a non-keyword argument into {}()!'
.format(decorator.__name__))
return inner
def get_dedent(parser):
return getattr(parser, 'dedent', 0)
# INTERCHANGEABLE WITH @msh
def dedent(parser):
parser.dedent = get_dedent(parser) + 1
return parser
# I fucking give up.
def ignore_indent(parser):
parser.exclude = True
return parser
# ????: Non-@msh decorator for function composition
# ????: Non-@msh decorator for function concatenation
# (these lines appeared near pushpopf)
# BEGIN @msh decorators...
''' SPEC
@msh
@concatenate(offset=1, optional=True)
def _filter_instr(self, in_str, args):
return (list[tracknum])filter_instr(in_str)
----
filter_instr acoustic_grand say_hello
# pushpop(result) return(run_str)
{ filter_instr acoustic_grand
# return(result)
'''
''' PSEUDOCODE:
Evaluate parser with specified number of arguments.
Save result.
If rest, push result to rest(), and return.
Elif optional, return result.
Else raise error.
'''
@optional
def concatenate(parser=None, *, offset=0, optional=True):
# ****: take $optional into account
@wraps(parser)
def inner(self, in_str, args):
leading = keep_leading(in_str, offset)
rest = skip_spaces(in_str, offset)
result = parser(self, leading, leading.split())
if rest:
with self.pushpop(result):
return self.run_str(rest)
# ****{1}: I changed the concatenation semantics to pass along inner returns.
# return result
elif optional:
return result
else:
raise MshException('{} concatenator requires {} parameters plus expression'
.format(parser.__name__, offset))
return inner
"""@optional
def composition(parser=None, *, orpop=False, optional=True): # optional=False
# ****: take $optional into account -> orpeek
# FIXME: orpop is a bad default {a}
# $parser takes a stack, instead of in_str like usual.
# If we have string: result = run_str().
# # Else, if optional, noop, else error.
# Return parser(result)
@wraps(parser)
def inner(self, in_str, args):
if in_str:
result = self.run_str(in_str)
return parser(self, result)
elif not optional:
raise MshException('{} composition requires parameter expression'.format(parser.__name__))
elif orpop:
return parser(self, self.pop())
else:
return parser(self, self.peek())
return inner"""
@optional
def composition(parser=None, *, ornone=False, optional=True): # optional=False
# ****: take $optional into account -> orpeek
# $parser takes a stack, instead of in_str like usual.
# If we have string: result = run_str().
# # Else, if optional, noop, else error.
# Return parser(result)
@wraps(parser)
def inner(self, in_str, args):
if in_str:
result = self.run_str(in_str)
return parser(self, result)
elif not optional:
raise MshException('{} composition requires parameter expression'.format(parser.__name__))
elif ornone:
return parser(self, None)
else:
return parser(self, self.peek())
return inner
def no_args(parser):
@wraps(parser)
def inner(self, in_str, args):
if in_str or args:
raise MshException('{} takes no arguments'.format(parser.__name__))
return parser(self)
return inner
| {
"repo_name": "jimbo1qaz/msh",
"path": "utils/decorators.py",
"copies": "1",
"size": "5832",
"license": "mit",
"hash": -6142436296086617000,
"line_mean": 24.8053097345,
"line_max": 102,
"alpha_frac": 0.6193415638,
"autogenerated": false,
"ratio": 3.814257684761282,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4933599248561282,
"avg_score": null,
"num_lines": null
} |
from functools import wraps, partial
from ...utils.logging import logger
from ...constants import LOG_URL_MAX_LENGTH
def logging_dispatch_middleware(reporter, dispatch):
@wraps(dispatch)
async def enhanced(*args, **kwargs):
log = _log_factory(reporter)
log('Dispatching reporter with {} intel'.format(len(r.meta.intel)))
news_list = await dispatch(*args, **kwargs)
log('Found {} news'.format(len(news_list)))
return news_list
return enhanced
def logging_fetch_middleware(reporter, fetch):
@wraps(fetch)
async def enhanced(*args, **kwargs):
log = _log_factory(reporter)
log('Fetch started')
fetched = await fetch(*args, **kwargs)
if fetched:
log('Fetch successed')
else:
log('Fetch failed', tag='warning')
return fetched
return enhanced
def _log_reporter(reporter, message, tag='info'):
id = reporter.schedule.id
url = reporter.url[:LOG_URL_MAX_LENGTH] + '...' \
if len(reporter.url) > LOG_URL_MAX_LENGTH else reporter.url
title = '[Reporter of schedule {} for {}]'.format(id, url)
logging_method = getattr(logger, tag)
logging_method('{}: {}'.format(title, message))
def _log_factory(reporter):
return partial(_log_reporter, reporter)
| {
"repo_name": "kuc2477/news",
"path": "news/contrib/logging/middlewares.py",
"copies": "1",
"size": "1312",
"license": "mit",
"hash": -768308979139703000,
"line_mean": 30.2380952381,
"line_max": 75,
"alpha_frac": 0.6356707317,
"autogenerated": false,
"ratio": 3.9047619047619047,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5040432636461905,
"avg_score": null,
"num_lines": null
} |
from functools import wraps, partial
import inspect
from json import JSONEncoder
from threading import local as threadlocal
from typing import AnyStr, Tuple, Optional
import warnings
import copy
import logging
from datetime import datetime, timedelta
from urllib.parse import urlparse, urlunsplit
MAX_PAYLOAD_LENGTH = 128 * 1024
MAX_STRING_LENGTH = 1024
__all__ = [] # type: ignore
class SanitizingJSONEncoder(JSONEncoder):
"""
A JSON encoder which handles filtering and conversion from JSON-
incompatible types to strings.
>>> import logging
>>> from json import loads
>>> logger = logging.getLogger(__name__)
>>> encoder = SanitizingJSONEncoder(logger, keyword_filters=['bananas'])
>>> items = loads(encoder.encode(FilterDict({'carrots': 4, 'bananas': 5})))
>>> items['bananas']
'[FILTERED]'
>>> items['carrots']
4
"""
filtered_value = '[FILTERED]'
recursive_value = '[RECURSIVE]'
unencodeable_value = '[BADENCODING]'
def __init__(self, logger: logging.Logger, keyword_filters=None, **kwargs):
self.logger = logger
self.filters = list(map(str.lower, keyword_filters or []))
self.bytes_filters = [x.encode('utf-8') for x in self.filters]
super(SanitizingJSONEncoder, self).__init__(**kwargs)
def encode(self, obj):
safe_obj = self._sanitize(obj, False)
payload = super(SanitizingJSONEncoder, self).encode(safe_obj)
if len(payload) > MAX_PAYLOAD_LENGTH:
safe_obj = self._sanitize(safe_obj, True)
return super(SanitizingJSONEncoder, self).encode(safe_obj)
else:
return payload
def filter_string_values(self, obj, ignored=None, seen=None):
"""
Remove any value from the dictionary which match the key filters
"""
if not ignored:
ignored = set()
# Keep track of nested objects to avoid having references garbage
# collected (which would cause id reuse and false positive recursion
if seen is None:
seen = []
if type(ignored) is list:
ignored = set(ignored)
if id(obj) in ignored:
return self.recursive_value
if isinstance(obj, dict):
ignored.add(id(obj))
seen.append(obj)
clean_dict = {}
for key, value in obj.items():
if self._should_filter(key):
clean_dict[key] = self.filtered_value
else:
clean_dict[key] = self.filter_string_values(
value, ignored, seen)
return clean_dict
return obj
def default(self, obj):
"""
Coerce values to strings if possible, otherwise replace with
'[BADENCODING]'
"""
try:
if isinstance(obj, bytes):
return str(obj, encoding='utf-8', errors='replace')
else:
return str(obj)
except Exception:
self.logger.exception('Could not add object to payload')
return self.unencodeable_value
def _sanitize(self, obj, trim_strings, ignored=None, seen=None):
"""
Replace recursive values and trim strings longer than
MAX_STRING_LENGTH
"""
if not ignored:
ignored = set()
# Keep track of nested objects to avoid having references garbage
# collected (which would cause id reuse and false positive recursion)
if seen is None:
seen = []
if type(ignored) is list:
ignored = set(ignored)
if id(obj) in ignored:
return self.recursive_value
elif isinstance(obj, dict):
ignored.add(id(obj))
seen.append(obj)
return self._sanitize_dict(obj, trim_strings, ignored, seen)
elif isinstance(obj, (set, tuple, list)):
ignored.add(id(obj))
seen.append(obj)
items = []
for value in obj:
items.append(
self._sanitize(value, trim_strings, ignored, seen))
return items
elif trim_strings and isinstance(obj, str):
return obj[:MAX_STRING_LENGTH]
else:
return obj
def _sanitize_dict_key_value(self, clean_dict, key, clean_value):
"""
Safely sets the provided key on the dictionary by coercing the key
to a string
"""
if isinstance(key, bytes):
try:
key = str(key, encoding='utf-8', errors='replace')
clean_dict[key] = clean_value
except Exception:
self.logger.exception(
'Could not add sanitize key for dictionary, '
'dropping value.')
if isinstance(key, str):
clean_dict[key] = clean_value
else:
try:
clean_dict[str(key)] = clean_value
except Exception:
self.logger.exception(
'Could not add sanitize key for dictionary, '
'dropping value.')
def _sanitize_dict(self, obj, trim_strings, ignored, seen):
"""
Trim individual values in an object, applying filtering if the object
is a FilterDict
"""
if isinstance(obj, FilterDict):
obj = self.filter_string_values(obj)
clean_dict = {}
for key, value in obj.items():
clean_value = self._sanitize(value, trim_strings, ignored, seen)
self._sanitize_dict_key_value(clean_dict, key, clean_value)
return clean_dict
def _should_filter(self, key):
if isinstance(key, str):
key_lower = key.lower()
return any(f in key_lower for f in self.filters)
if isinstance(key, bytes):
key_lower = key.lower()
return any(f in key_lower for f in self.bytes_filters)
return False
class FilterDict(dict):
"""
Object which will be filtered when encoded
"""
pass
ContentType = Tuple[str, Optional[str], Optional[str], Optional[str]]
def parse_content_type(value: str) -> ContentType:
"""
Generate a tuple of (type, subtype, suffix, parameters) from a type based
on RFC 6838
>>> parse_content_type("text/plain")
('text', 'plain', None, None)
>>> parse_content_type("application/hal+json")
('application', 'hal', 'json', None)
>>> parse_content_type("application/json;schema=\\"ftp://example.com/a\\"")
('application', 'json', None, 'schema="ftp://example.com/a"')
"""
parameters = None # type: Optional[str]
if ';' in value:
types, parameters = value.split(';', 1)
else:
types = value
if '/' in types:
maintype, subtype = types.split('/', 1)
if '+' in subtype:
subtype, suffix = subtype.split('+', 1)
return (maintype, subtype, suffix, parameters)
else:
return (maintype, subtype, None, parameters)
else:
return (types, None, None, parameters)
def is_json_content_type(value: str) -> bool:
"""
Check if a content type is JSON-parseable
>>> is_json_content_type('text/plain')
False
>>> is_json_content_type('application/schema+json')
True
>>> is_json_content_type('application/json')
True
"""
type, subtype, suffix, _ = parse_content_type(value.lower())
return type == 'application' and (subtype == 'json' or suffix == 'json')
def fully_qualified_class_name(obj):
module = inspect.getmodule(obj)
if module is not None and module.__name__ != "__main__":
return module.__name__ + "." + obj.__class__.__name__
else:
return obj.__class__.__name__
def package_version(package_name):
try:
import pkg_resources
except ImportError:
return None
else:
try:
return pkg_resources.get_distribution(package_name).version
except pkg_resources.DistributionNotFound:
return None
def _validate_setter(types, func, should_error=False):
"""
Check that the first argument of a function is of a provided set of types
before calling the body of the wrapped function, printing a runtime warning
(or raising a TypeError) if the validation fails.
"""
@wraps(func)
def wrapper(obj, value):
option_name = func.__name__
if value is None or isinstance(value, types):
func(obj, value)
else:
error_format = '{0} should be {1}, got {2}'
actual = type(value).__name__
requirement = ' or '.join([t.__name__ for t in types])
message = error_format.format(option_name, requirement, actual)
if should_error:
raise TypeError(message)
else:
warnings.warn(message, RuntimeWarning)
return wrapper
validate_str_setter = partial(_validate_setter, (str,))
validate_required_str_setter = partial(_validate_setter, (str,),
should_error=True)
validate_bool_setter = partial(_validate_setter, (bool,))
validate_iterable_setter = partial(_validate_setter, (list, tuple))
validate_int_setter = partial(_validate_setter, (int,))
class ThreadContextVar:
"""
A wrapper around thread-local variables to mimic the API of contextvars
"""
LOCALS = None
@classmethod
def local_context(cls):
if not ThreadContextVar.LOCALS:
ThreadContextVar.LOCALS = threadlocal()
return ThreadContextVar.LOCALS
def __init__(self, name, **kwargs):
self.name = name
# Mimic the behaviour of ContextVar - if a default has been explicitly
# passed then we will use it, otherwise don't set an initial value
# This allows 'get' to know when to raise a LookupError
if 'default' in kwargs:
self.default = kwargs['default']
# Make a deep copy so this thread starts with a fresh default
self.set(copy.deepcopy(self.default))
def get(self):
local = ThreadContextVar.local_context()
if hasattr(local, self.name):
return getattr(local, self.name)
if hasattr(self, 'default'):
# Make a deep copy so that each thread starts with a fresh default
result = copy.deepcopy(self.default)
self.set(result)
return result
raise LookupError("No value for '{}'".format(self.name))
def set(self, new_value):
setattr(ThreadContextVar.local_context(), self.name, new_value)
def sanitize_url(url_to_sanitize: AnyStr) -> Optional[AnyStr]:
try:
parsed = urlparse(url_to_sanitize)
sanitized_url = urlunsplit(
# urlunsplit always requires 5 elements in this tuple
(parsed.scheme, parsed.netloc, parsed.path, None, None)
).strip()
except Exception:
return None
# If the sanitized url is empty then it did not have any of the components
# we are interested in, so return None to indicate failure
if not sanitized_url:
return None
return sanitized_url
# to_rfc3339: format a datetime instance to match to_rfc3339/iso8601 with
# milliseconds precision
# Python can do this natively from version 3.6, but we need to include a
# fallback implementation for Python 3.5
try:
# this will raise if 'timespec' isn't supported
datetime.utcnow().isoformat(timespec='milliseconds') # type: ignore
def to_rfc3339(dt: datetime) -> str:
return dt.isoformat(timespec='milliseconds') # type: ignore
except Exception:
def _get_timezone_offset(dt: datetime) -> str:
if dt.tzinfo is None:
return ''
utc_offset = dt.tzinfo.utcoffset(dt)
if utc_offset is None:
return ''
sign = '+'
if utc_offset.days < 0:
sign = '-'
utc_offset = -utc_offset
hours_offset, minutes = divmod(utc_offset, timedelta(hours=1))
minutes_offset, seconds = divmod(minutes, timedelta(minutes=1))
return '{:s}{:02d}:{:02d}'.format(sign, hours_offset, minutes_offset)
def to_rfc3339(dt: datetime) -> str:
return '{:04d}-{:02d}-{:02d}T{:02d}:{:02d}:{:02d}.{:03d}{:s}'.format(
dt.year,
dt.month,
dt.day,
dt.hour,
dt.minute,
dt.second,
int(dt.microsecond / 1000),
_get_timezone_offset(dt)
)
| {
"repo_name": "bugsnag/bugsnag-python",
"path": "bugsnag/utils.py",
"copies": "1",
"size": "12587",
"license": "mit",
"hash": -3181755011718781400,
"line_mean": 30.8658227848,
"line_max": 79,
"alpha_frac": 0.5884642886,
"autogenerated": false,
"ratio": 4.223825503355704,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5312289791955704,
"avg_score": null,
"num_lines": null
} |
from functools import wraps, partial
import logging
def attach_wrapper(obj, func=None):
if func is None:
return partial(attach_wrapper, obj)
setattr(obj, func.__name__, func)
return func
def logged(level, name=None, message=None):
'''
Add logging to a function. level is the logging
level, name is the logger name, and message is the
log message. If name and message aren't specified,
they default to the function's module and name.
'''
def decorate(func):
logname = name if name else func.__module__
log = logging.getLogger(logname)
logmsg = message if message else func.__name__
@wraps(func)
def wrapper(*args, **kwargs):
log.log(level, logmsg)
return func(*args, **kwargs)
# Attach setter functions
@attach_wrapper(wrapper)
def set_level(newlevel):
nonlocal level
level = newlevel
@attach_wrapper(wrapper)
def set_message(newmsg):
nonlocal logmsg
logmsg = newmsg
return wrapper
return decorate
# Example use
@logged(logging.DEBUG)
def add(x, y):
return x + y
@logged(logging.CRITICAL, 'example')
def spam():
print('Spam!')
# Example involving multiple decorators
import time
def timethis(func):
@wraps(func)
def wrapper(*args, **kwargs):
start = time.time()
r = func(*args, **kwargs)
end = time.time()
print(func.__name__, end - start)
return r
return wrapper
@timethis
@logged(logging.DEBUG)
def countdown(n):
while n > 0:
n -= 1
@logged(logging.DEBUG)
@timethis
def countdown2(n):
while n > 0:
n -= 1
if __name__ == '__main__':
import logging
logging.basicConfig(level=logging.DEBUG)
print(add(2, 3))
# Change the log message
add.set_message('Add called')
print(add(2, 3))
# Change the log level
add.set_level(logging.WARNING)
print(add(2, 3))
countdown(100000)
countdown.set_level(logging.CRITICAL)
countdown(100000)
countdown2(100000)
countdown2.set_level(logging.CRITICAL)
countdown2(100000)
| {
"repo_name": "hyller/CodeLibrary",
"path": "python-cookbook-master/src/9/defining_a_decorator_with_user_adjustable_attributes/example1.py",
"copies": "2",
"size": "2176",
"license": "unlicense",
"hash": 6561465850107215000,
"line_mean": 21.9052631579,
"line_max": 55,
"alpha_frac": 0.6125919118,
"autogenerated": false,
"ratio": 3.7582037996545767,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5370795711454577,
"avg_score": null,
"num_lines": null
} |
from functools import wraps, partial
import logging
"""
topic: 写一个装饰器来包装一个函数,并且允许用户提供参数在运行时控制装饰器行为。
desc: 引入一个访问函数,使用 nonlocal 来修改内部变量。 然后这个访问函数被作为一个属性赋值给包装函数。
"""
def attach_wrapper(obj, func=None):
if func is None:
return partial(attach_wrapper, obj)
setattr(obj, func.__name__, func)
return func
def logged(level, name=None, message=None):
def decorate(func):
logname = name if name else func.__module__
log = logging.getLogger(logname)
logmsg = message if message else func.__name__
@wraps(func)
def wrapper(*args, **kwargs):
log.log(level, logmsg)
return func(*args, **kwargs)
# Attach setter functions
@attach_wrapper(wrapper)
def set_level(newlevel):
nonlocal level
level = newlevel
@attach_wrapper(wrapper)
def set_message(newmsg):
nonlocal logmsg
logmsg = newmsg
return wrapper
return decorate
@logged(logging.DEBUG)
def add(x, y):
return x + y
@logged(logging.CRITICAL, 'example')
def spam():
print('Spam!')
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
print(add(1, 2))
# change the log message
add.set_message('Add called')
print(add(1, 3))
# change the log level
add.set_level(logging.WARNING)
print(add(1,4))
| {
"repo_name": "AtlantisFox/Green-Point-Challenge",
"path": "python_cookbook/c09/p05_adjust_attribute.py",
"copies": "1",
"size": "1551",
"license": "mit",
"hash": 2993371002009348600,
"line_mean": 20.4615384615,
"line_max": 61,
"alpha_frac": 0.6114695341,
"autogenerated": false,
"ratio": 2.961783439490446,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9069406819744292,
"avg_score": 0.0007692307692307692,
"num_lines": 65
} |
from functools import wraps, partial
import numpy as np
from menpo.shape import PointCloud
def pointcloud_to_points(wrapped):
@wraps(wrapped)
def wrapper(*args, **kwargs):
args = list(args)
for index, arg in enumerate(args):
if isinstance(arg, PointCloud):
args[index] = arg.points
for key in kwargs:
if isinstance(kwargs[key], PointCloud):
kwargs[key] = kwargs[key].points
return wrapped(*args, **kwargs)
return wrapper
# BOUNDING BOX NORMALISERS
def bb_area(shape):
r"""
Computes the area of the bounding box of the provided shape,
i.e.
.. math::
h w
where :math:`h` and :math:`w` are the height and width of the bounding box.
Parameters
----------
shape : `menpo.shape.PointCloud` or `subclass`
The input shape.
Returns
-------
bb_area : `float`
The area of the bounding box.
"""
# Area = w * h
height, width = np.max(shape, axis=0) - np.min(shape, axis=0)
return height * width
def bb_perimeter(shape):
r"""
Computes the perimeter of the bounding box of the provided shape, i.e.
.. math::
2(h + w)
where :math:`h` and :math:`w` are the height and width of the bounding box.
Parameters
----------
shape : `menpo.shape.PointCloud` or `subclass`
The input shape.
Returns
-------
bb_perimeter : `float`
The perimeter of the bounding box.
"""
# Area = 2(w + h)
height, width = np.max(shape, axis=0) - np.min(shape, axis=0)
return 2 * (height + width)
def bb_avg_edge_length(shape):
r"""
Computes the average edge length of the bounding box of the provided shape,
i.e.
.. math::
\frac{h + w}{2} = \frac{2h + 2w}{4}
where :math:`h` and :math:`w` are the height and width of the bounding box.
Parameters
----------
shape : `menpo.shape.PointCloud` or `subclass`
The input shape.
Returns
-------
bb_avg_edge_length : `float`
The average edge length of the bounding box.
"""
# 0.5(w + h) = (2w + 2h) / 4
height, width = np.max(shape, axis=0) - np.min(shape, axis=0)
return 0.5 * (height + width)
def bb_diagonal(shape):
r"""
Computes the diagonal of the bounding box of the provided shape, i.e.
.. math::
\sqrt{h^2 + w^2}
where :math:`h` and :math:`w` are the height and width of the bounding box.
Parameters
----------
shape : `menpo.shape.PointCloud` or `subclass`
The input shape.
Returns
-------
bb_diagonal : `float`
The diagonal of the bounding box.
"""
# sqrt(w**2 + h**2)
height, width = np.max(shape, axis=0) - np.min(shape, axis=0)
return np.sqrt(width ** 2 + height ** 2)
bb_norm_types = {
'avg_edge_length': bb_avg_edge_length,
'perimeter': bb_perimeter,
'diagonal': bb_diagonal,
'area': bb_area
}
# EUCLIDEAN AND ROOT MEAN SQUARE ERRORS
@pointcloud_to_points
def root_mean_square_error(shape, gt_shape):
r"""
Computes the root mean square error between two shapes, i.e.
.. math::
\sqrt{\frac{1}{N}\sum_{i=1}^N(s_i-s^*_i)^2}
where :math:`s_i` and :math:`s^*_i` are the coordinates of the :math:`i`'th
point of the final and ground truth shapes, and :math:`N` is the total
number of points.
Parameters
----------
shape : `menpo.shape.PointCloud`
The input shape (e.g. the final shape of a fitting procedure).
gt_shape : `menpo.shape.PointCloud`
The ground truth shape.
Returns
-------
root_mean_square_error : `float`
The root mean square error.
"""
return np.sqrt(np.mean((shape.ravel() - gt_shape.ravel()) ** 2))
@pointcloud_to_points
def euclidean_error(shape, gt_shape):
r"""
Computes the Euclidean error between two shapes, i.e.
.. math::
\frac{1}{N}\sum_{i=1}^N\sqrt{(s_{i,x}-s^*_{i,x})^2 + (s_{i,y}-s^*_{i,y})^2}
where :math:`(s_{i,x}, s_{i,y})` are the `x` and `y` coordinates of the
:math:`i`'th point of the final shape, :math:`(s^*_{i,x}, s^*_{i,y})`
are the `x` and `y` coordinates of the :math:`i`'th point of the ground
truth shape and :math:`N` is the total number of points.
Parameters
----------
shape : `menpo.shape.PointCloud`
The input shape (e.g. the final shape of a fitting procedure).
gt_shape : `menpo.shape.PointCloud`
The ground truth shape.
Returns
-------
root_mean_square_error : `float`
The Euclidean error.
"""
return np.mean(np.sqrt(np.sum((shape - gt_shape) ** 2, axis=-1)))
# DISTANCE NORMALISER
def distance_two_indices(index1, index2, shape):
r"""
Computes the Euclidean distance between two points of a shape, i.e.
.. math::
\sqrt{(s_{i,x}-s_{j,x})^2 + (s_{i,y}-s_{j,y})^2}
where :math:`s_{i,x}`, :math:`s_{i,y}` are the `x` and `y` coordinates of
the :math:`i`'th point (`index1`) and :math:`s_{j,x}`, :math:`s_{j,y}` are
the `x` and `y` coordinates of the :math:`j`'th point (`index2`).
Parameters
----------
index1 : `int`
The index of the first point.
index2 : `int`
The index of the second point.
shape : `menpo.shape.PointCloud`
The input shape.
Returns
-------
distance_two_indices : `float`
The Euclidean distance between the points.
"""
return euclidean_error(shape[index1], shape[index2])
# GENERIC NORMALISED ERROR FUNCTIONS
@pointcloud_to_points
def bb_normalised_error(shape_error_f, shape, gt_shape,
norm_shape=None, norm_type='avg_edge_length'):
r"""
Computes an error normalised by a measure based on the shape's bounding
box, i.e.
.. math::
\frac{\mathcal{F}(s,s^*)}{\mathcal{N}(s^*)}
where :math:`\mathcal{F}(s,s^*)` is an error metric function between the
final shape :math:`s` and the ground truth shape :math:`s^*` and
:math:`\mathcal{N}(s^*)` is a normalising function that returns a measure
based on the ground truth shape's bounding box.
Parameters
----------
shape_error_f : `callable`
The function to be used for computing the error.
shape : `menpo.shape.PointCloud`
The input shape (e.g. the final shape of a fitting procedure).
gt_shape : `menpo.shape.PointCloud`
The ground truth shape.
norm_shape : `menpo.shape.PointCloud` or ``None``, optional
The shape to be used to compute the normaliser. If ``None``, then the
ground truth shape is used.
norm_type : ``{'area', 'perimeter', 'avg_edge_length', 'diagonal'}``, optional
The type of the normaliser. Possible options are:
========================= ==========================================
Method Description
========================= ==========================================
:map:`bb_area` Area of `norm_shape`'s bounding box
:map:`bb_perimeter` Perimeter of `norm_shape`'s bounding box
:map:`bb_avg_edge_length` Average edge length of `norm_shape`'s bbox
:map:`bb_diagonal` Diagonal of `norm_shape`'s bounding box
========================= ==========================================
Returns
-------
normalised_error : `float`
The computed normalised error.
"""
if norm_type not in bb_norm_types:
raise ValueError('norm_type must be one of '
'{avg_edge_length, perimeter, diagonal, area}.')
if norm_shape is None:
norm_shape = gt_shape
return (shape_error_f(shape, gt_shape) /
bb_norm_types[norm_type](norm_shape))
@pointcloud_to_points
def distance_normalised_error(shape_error_f, distance_norm_f, shape, gt_shape):
r"""
Computes an error normalised by a distance measure between two shapes, i.e.
.. math::
\frac{\mathcal{F}(s,s^*)}{\mathcal{N}(s,s^*)}
where :math:`\mathcal{F}(s,s^*)` is an error metric function between the
final shape :math:`s` and the ground truth shape :math:`s^*` and
:math:`\mathcal{N}(s,s^*)` is a normalising function based on a distance
metric between the two shapes.
Parameters
----------
shape_error_f : `callable`
The function to be used for computing the error.
distance_norm_f : `callable`
The function to be used for computing the normalisation distance metric.
shape : `menpo.shape.PointCloud`
The input shape (e.g. the final shape of a fitting procedure).
gt_shape : `menpo.shape.PointCloud`
The ground truth shape.
Returns
-------
normalised_error : `float`
The computed normalised error.
"""
return shape_error_f(shape, gt_shape) / distance_norm_f(shape, gt_shape)
@pointcloud_to_points
def distance_indexed_normalised_error(shape_error_f, index1, index2, shape,
gt_shape):
r"""
Computes an error normalised by the distance measure between two points
of the ground truth shape, i.e.
.. math::
\frac{\mathcal{F}(s,s^*)}{\mathcal{N}(s^*)}
where :math:`\mathcal{F}(s,s^*)` is an error metric function between the
final shape :math:`s` and the ground truth shape :math:`s^*` and
:math:`\mathcal{N}(s^*)` is a normalising function that returns the
distance between two points of the ground truth shape.
Parameters
----------
shape_error_f : `callable`
The function to be used for computing the error.
index1 : `int`
The index of the first point.
index2 : `int`
The index of the second point.
shape : `menpo.shape.PointCloud`
The input shape (e.g. the final shape of a fitting procedure).
gt_shape : `menpo.shape.PointCloud`
The ground truth shape.
Returns
-------
normalised_error : `float`
The computed normalised error.
"""
return shape_error_f(shape, gt_shape) / distance_two_indices(index1, index2,
gt_shape)
# EUCLIDEAN AND ROOT MEAN SQUARE NORMALISED ERRORS
def root_mean_square_bb_normalised_error(shape, gt_shape, norm_shape=None,
norm_type='avg_edge_length'):
r"""
Computes the root mean square error between two shapes normalised by a
measure based on the ground truth shape's bounding box, i.e.
.. math::
\frac{\mathcal{F}(s,s^*)}{\mathcal{N}(s^*)}
where
.. math::
\mathcal{F}(s,s^*) = \sqrt{\frac{1}{N}\sum_{i=1}^N(s_i-s^*_i)^2}
where :math:`s` and :math:`s^*` are the final and ground truth shapes,
respectively. :math:`s_i` and :math:`s^*_i` are the coordinates of the
:math:`i`'th point of the final and ground truth shapes, and :math:`N` is
the total number of points. Finally, :math:`\mathcal{N}(s^*)` is a
normalising function that returns a measure based on the ground truth
shape's bounding box.
Parameters
----------
shape : `menpo.shape.PointCloud`
The input shape (e.g. the final shape of a fitting procedure).
gt_shape : `menpo.shape.PointCloud`
The ground truth shape.
norm_shape : `menpo.shape.PointCloud` or ``None``, optional
The shape to be used to compute the normaliser. If ``None``, then the
ground truth shape is used.
norm_type : ``{'area', 'perimeter', 'avg_edge_length', 'diagonal'}``, optional
The type of the normaliser. Possible options are:
========================= ==========================================
Method Description
========================= ==========================================
:map:`bb_area` Area of `norm_shape`'s bounding box
:map:`bb_perimeter` Perimeter of `norm_shape`'s bounding box
:map:`bb_avg_edge_length` Average edge length of `norm_shape`'s bbox
:map:`bb_diagonal` Diagonal of `norm_shape`'s bounding box
========================= ==========================================
Returns
-------
error : `float`
The computed root mean square normalised error.
"""
return bb_normalised_error(shape_error_f=root_mean_square_error,
shape=shape, gt_shape=gt_shape,
norm_shape=norm_shape, norm_type=norm_type)
def root_mean_square_distance_normalised_error(shape, gt_shape,
distance_norm_f):
r"""
Computes the root mean square error between two shapes normalised by a
distance measure between two shapes, i.e.
.. math::
\frac{\mathcal{F}(s,s^*)}{\mathcal{N}(s,s^*)}
where
.. math::
\mathcal{F}(s,s^*) = \sqrt{\frac{1}{N}\sum_{i=1}^N(s_i-s^*_i)^2}
where :math:`s` and :math:`s^*` are the final and ground truth shapes,
respectively. :math:`s_i` and :math:`s^*_i` are the coordinates of the
:math:`i`'th point of the final and ground truth shapes, and :math:`N` is
the total number of points. Finally, :math:`\mathcal{N}(s,s^*)` is a
normalising function based on a distance metric between the two shapes.
Parameters
----------
shape : `menpo.shape.PointCloud`
The input shape (e.g. the final shape of a fitting procedure).
gt_shape : `menpo.shape.PointCloud`
The ground truth shape.
distance_norm_f : `callable`
The function to be used for computing the normalisation distance metric.
Returns
-------
error : `float`
The computed root mean square normalised error.
"""
return distance_normalised_error(shape_error_f=root_mean_square_error,
distance_norm_f=distance_norm_f,
shape=shape, gt_shape=gt_shape)
def root_mean_square_distance_indexed_normalised_error(shape, gt_shape,
index1, index2):
r"""
Computes the root mean square error between two shapes normalised by the
distance measure between two points of the ground truth shape, i.e.
.. math::
\frac{\mathcal{F}(s,s^*)}{\mathcal{N}(s^*)}
where
.. math::
\mathcal{F}(s,s^*) = \sqrt{\frac{1}{N}\sum_{i=1}^N(s_i-s^*_i)^2}
where :math:`s` and :math:`s^*` are the final and ground truth shapes,
respectively. :math:`s_i` and :math:`s^*_i` are the coordinates of the
:math:`i`'th point of the final and ground truth shapes, and :math:`N` is
the total number of points. Finally, :math:`\mathcal{N}(s^*)` is a
normalising function that returns the distance between two points of the
ground truth shape.
Parameters
----------
shape : `menpo.shape.PointCloud`
The input shape (e.g. the final shape of a fitting procedure).
gt_shape : `menpo.shape.PointCloud`
The ground truth shape.
index1 : `int`
The index of the first point.
index2 : `int`
The index of the second point.
Returns
-------
error : `float`
The computed root mean square normalised error.
"""
return distance_indexed_normalised_error(
shape_error_f=root_mean_square_error, index1=index1, index2=index2,
shape=shape, gt_shape=gt_shape)
def euclidean_bb_normalised_error(shape, gt_shape, norm_shape=None,
norm_type='avg_edge_length'):
r"""
Computes the Euclidean error between two shapes normalised by a measure
based on the ground truth shape's bounding box, i.e.
.. math::
\frac{\mathcal{F}(s,s^*)}{\mathcal{N}(s^*)}
where
.. math::
\mathcal{F}(s,s^*) = \frac{1}{N}\sum_{i=1}^N\sqrt{(s_{i,x}-s^*_{i,x})^2 + (s_{i,y}-s^*_{i,y})^2}
where :math:`s` and :math:`s^*` are the final and ground truth shapes,
respectively. :math:`(s_{i,x}, s_{i,y})` are the `x` and `y` coordinates of
the :math:`i`'th point of the final shape, :math:`(s^*_{i,x}, s^*_{i,y})`
are the `x` and `y` coordinates of the :math:`i`'th point of the ground
truth shape and :math:`N` is the total number of points. Finally,
:math:`\mathcal{N}(s^*)` is a normalising function that returns a measure
based on the ground truth shape's bounding box.
Parameters
----------
shape : `menpo.shape.PointCloud`
The input shape (e.g. the final shape of a fitting procedure).
gt_shape : `menpo.shape.PointCloud`
The ground truth shape.
norm_shape : `menpo.shape.PointCloud` or ``None``, optional
The shape to be used to compute the normaliser. If ``None``, then the
ground truth shape is used.
norm_type : ``{'area', 'perimeter', 'avg_edge_length', 'diagonal'}``, optional
The type of the normaliser. Possible options are:
========================= ==========================================
Method Description
========================= ==========================================
:map:`bb_area` Area of `norm_shape`'s bounding box
:map:`bb_perimeter` Perimeter of `norm_shape`'s bounding box
:map:`bb_avg_edge_length` Average edge length of `norm_shape`'s bbox
:map:`bb_diagonal` Diagonal of `norm_shape`'s bounding box
========================= ==========================================
Returns
-------
error : `float`
The computed Euclidean normalised error.
"""
return bb_normalised_error(shape_error_f=euclidean_error,
shape=shape, gt_shape=gt_shape,
norm_shape=norm_shape, norm_type=norm_type)
def euclidean_distance_normalised_error(shape, gt_shape, distance_norm_f):
r"""
Computes the Euclidean error between two shapes normalised by a distance
measure between two shapes, i.e.
.. math::
\frac{\mathcal{F}(s,s^*)}{\mathcal{N}(s,s^*)}
where
.. math::
\mathcal{F}(s,s^*) = \frac{1}{N}\sum_{i=1}^N\sqrt{(s_{i,x}-s^*_{i,x})^2 + (s_{i,y}-s^*_{i,y})^2}
where :math:`s` and :math:`s^*` are the final and ground truth shapes,
respectively. :math:`(s_{i,x}, s_{i,y})` are the `x` and `y` coordinates of
the :math:`i`'th point of the final shape, :math:`(s^*_{i,x}, s^*_{i,y})`
are the `x` and `y` coordinates of the :math:`i`'th point of the ground
truth shape and :math:`N` is the total number of points. Finally,
:math:`\mathcal{N}(s,s^*)` is a normalising function based on a distance
metric between the two shapes.
Parameters
----------
shape : `menpo.shape.PointCloud`
The input shape (e.g. the final shape of a fitting procedure).
gt_shape : `menpo.shape.PointCloud`
The ground truth shape.
distance_norm_f : `callable`
The function to be used for computing the normalisation distance metric.
Returns
-------
error : `float`
The computed Euclidean normalised error.
"""
return distance_normalised_error(shape_error_f=euclidean_error,
distance_norm_f=distance_norm_f,
shape=shape, gt_shape=gt_shape)
def euclidean_distance_indexed_normalised_error(shape, gt_shape, index1,
index2):
r"""
Computes the Euclidean error between two shapes normalised by the
distance measure between two points of the ground truth shape, i.e.
.. math::
\frac{\mathcal{F}(s,s^*)}{\mathcal{N}(s^*)}
where
.. math::
\mathcal{F}(s,s^*) = \frac{1}{N}\sum_{i=1}^N\sqrt{(s_{i,x}-s^*_{i,x})^2 + (s_{i,y}-s^*_{i,y})^2}
where :math:`s` and :math:`s^*` are the final and ground truth shapes,
respectively. :math:`(s_{i,x}, s_{i,y})` are the `x` and `y` coordinates of
the :math:`i`'th point of the final shape, :math:`(s^*_{i,x}, s^*_{i,y})`
are the `x` and `y` coordinates of the :math:`i`'th point of the ground
truth shape and :math:`N` is the total number of points. Finally,
:math:`\mathcal{N}(s^*)` is a normalising function that returns the
distance between two points of the ground truth shape.
Parameters
----------
shape : `menpo.shape.PointCloud`
The input shape (e.g. the final shape of a fitting procedure).
gt_shape : `menpo.shape.PointCloud`
The ground truth shape.
index1 : `int`
The index of the first point.
index2 : `int`
The index of the second point.
Returns
-------
error : `float`
The computed Euclidean normalised error.
"""
return distance_indexed_normalised_error(
shape_error_f=euclidean_error, index1=index1, index2=index2,
shape=shape, gt_shape=gt_shape)
| {
"repo_name": "grigorisg9gr/menpofit",
"path": "menpofit/error/base.py",
"copies": "6",
"size": "20913",
"license": "bsd-3-clause",
"hash": 6466178541834077000,
"line_mean": 33.7392026578,
"line_max": 103,
"alpha_frac": 0.5745708411,
"autogenerated": false,
"ratio": 3.5718189581554225,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7146389799255422,
"avg_score": null,
"num_lines": null
} |
from functools import wraps, partial
import re
from itertools import chain, repeat
class NoMatch(Exception):
pass
class Any(object):
def __init__(self, *allowed_types, _matcher=None, _repeat=False):
self._allowed_types = allowed_types
if _matcher is None:
self.matcher = lambda arg: issubclass(type(arg),
tuple(self._allowed_types)) if self._allowed_types else True
else:
self.matcher = _matcher
self._repeat = _repeat
def __call__(self, arg):
return self.matcher(arg)
@staticmethod
def of(*matches):
baked_matches = _bake_predicates(matches)
def of_matcher(arg):
return any(map(lambda p: p(arg), baked_matches))
return Any(_matcher=of_matcher)
@staticmethod
def has(*attrs, **attr_with_check):
def has_matcher(arg):
def has_attr_and_predicate(key_predicate):
key, predicate = key_predicate
return hasattr(arg, key) and _bake_predicate(predicate)(getattr(arg, key))
return any(map(partial(hasattr, arg), attrs)) or any(
map(has_attr_and_predicate, attr_with_check.items()))
matcher = Any(_matcher=has_matcher)
return matcher
@staticmethod
def re(str_or_re):
pattern = re.compile(str_or_re)
def re_matcher(arg):
return bool(pattern.match(arg))
return Any(_matcher=re_matcher)
@staticmethod
def args(*predicates):
matcher = Any.of(*predicates)
matcher._repeat = True
return matcher
def _bake_predicate(p):
if (type(p) != type or p == Any) and callable(p):
return p
else:
return lambda a: a == p
def _bake_predicates(predicates):
return [_bake_predicate(p) for p in predicates]
def _bake_kw_predicates(kw_predicates):
return {k: p for k, p in kw_predicates if callable(p)}
def Pattern(*functions, wrap_fn=None):
def wrapped(*args, **kwargs):
for f in functions:
try:
return f(*args, **kwargs)
except NoMatch:
continue
else:
raise NoMatch
if wrap_fn:
wrapped = wraps(wrap_fn)(wrapped)
return wrapped
def guard(*predicates, **kw_predicates):
baked_predicates = _bake_predicates(predicates)
baked_keyword_predicates = _bake_kw_predicates(kw_predicates)
var_args_predicate = predicates[-1] if predicates and getattr(predicates[-1], '_repeat', False) else lambda a: False
def test_args(args):
return len(args) >= len(baked_predicates) and all(
map(lambda p, x: p(x), chain(baked_predicates, repeat(var_args_predicate)), args))
def test_kw_args(kwargs):
# TODO no length matching
return all(map(lambda p_item: p_item[1](kwargs[p_item[0]]), baked_keyword_predicates))
def decorator(fn):
@wraps(fn)
def decorated(*args, **kwargs):
if test_args(args) and test_kw_args(kwargs):
return fn(*args, **kwargs)
else:
raise NoMatch
if fn.__name__ in fn.__globals__:
old_definition = fn.__globals__[fn.__name__]
# FIXME we're creating more recursion, no good in python
return Pattern(decorated, old_definition)
return decorated
return decorator
| {
"repo_name": "Luftzig/pypatterns",
"path": "pypatterns/patterns.py",
"copies": "1",
"size": "3428",
"license": "mit",
"hash": 4459346455901170000,
"line_mean": 28.2991452991,
"line_max": 120,
"alpha_frac": 0.5889731622,
"autogenerated": false,
"ratio": 3.882219705549264,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9969085384228458,
"avg_score": 0.00042149670416140783,
"num_lines": 117
} |
from functools import wraps, partial
import six
from django.http import HttpRequest
from django.views.generic import View
try:
from rest_framework.request import Request as RestRequest
from rest_framework.views import APIView
except ImportError:
"""
Fake class for rest_framework
"""
class RestRequest(object):
pass
class APIView(object):
pass
from .converters import ConverterRegistry
from .exceptions import ValidationError
from .validators import ValidatorRegistry
def _get_lookup(request, name, default, kwargs, extra_kwargs):
# Try to be compatible with older django rest framework.
if hasattr(request, 'query_params'):
return request.query_params.get(name, default)
else:
return request.GET.get(name, default)
def _post_lookup(request, name, default, kwargs, extra_kwargs):
if hasattr(request, 'data'):
return request.data.get(name, default)
elif hasattr(request, 'DATA'):
return request.DATA.get(name, default)
else:
return request.POST.get(name, default)
def _file_lookup(request, name, default, kwargs, extra_kwargs):
if hasattr(request, 'data'):
return request.data.get(name, default)
else:
return request.FILES.get(name, default)
def _post_or_get_lookup(request, name, default, kwargs, extra_kwargs):
value = _post_lookup(request, name, None, kwargs, extra_kwargs)
return value if value is not None else _get_lookup(request, name, default, kwargs, extra_kwargs)
def _header_lookup(request, name, default, kwargs, extra_kwargs):
if request is not None and hasattr(request, 'META'):
return request.META.get(name, default)
else:
return default
def _uri_lookup(request, name, default, kwargs, extra_kwargs):
if name in kwargs:
return kwargs.get(name)
else:
return extra_kwargs.get(name, default)
def param(name, related_name=None, verbose_name=None, default=None, type='string', lookup=_get_lookup, many=False,
separator=',', validators=None, validator_classes=None):
return _Param(name, related_name, verbose_name, default, type, lookup, many, separator, validators,
validator_classes)
class _Param(object):
def __init__(self, name, related_name, verbose_name, default, type, lookup, many, separator, validators,
validator_classes):
self.name = name
self.related_name = related_name if related_name else name
self.verbose_name = verbose_name if verbose_name else name
self.default = default
self.type = type
self.lookup = lookup
self.many = many
self.separator = separator
self.validators = ValidatorRegistry.get_validators(validators)
if validator_classes:
if hasattr(validator_classes, '__iter__'):
self.validators.extend(validator_classes)
else:
self.validators.append(validator_classes)
def __call__(self, func):
if hasattr(func, '__params__'):
func.__params__.append(self)
return func
@wraps(func)
def _decorator(*args, **kwargs):
if len(args) < 1:
# Call function immediately, maybe raise an error is better.
return func(*args, **kwargs)
extra_kwargs = {}
if isinstance(args[0], View):
request = args[0].request
# Update the kwargs from Django REST framework's APIView class
if isinstance(args[0], APIView):
extra_kwargs = args[0].kwargs
else:
# Find the first request object
for arg in args:
if isinstance(arg, (RestRequest, HttpRequest)):
request = arg
break
else:
request = args[0]
if request:
# Checkout all the params first.
for _param in _decorator.__params__:
_param._parse(request, kwargs, extra_kwargs)
# Validate after all the params has checked out, because some validators needs all the params.
for _param in _decorator.__params__:
for validator in _param.validators:
validator(_param.related_name, kwargs, _param.verbose_name)
return func(*args, **kwargs)
_decorator.__params__ = [self]
return _decorator
def _parse(self, request, kwargs, extra_kwargs=None):
converter = ConverterRegistry.get(self.type)
value = self.lookup(request, self.name, self.default, kwargs, extra_kwargs)
try:
if self.many:
if isinstance(value, six.string_types):
values = value.split(self.separator)
elif value is None:
values = []
else:
values = value
converted_value = [converter.convert(self.name, _value) for _value in values]
else:
converted_value = converter.convert(self.name, value)
except ValidationError as e:
raise e
except Exception as e:
raise ValidationError('Type Convert error: %s' % e.message)
kwargs[self.related_name] = converted_value
GET = partial(param, lookup=_get_lookup)
POST = partial(param, lookup=_post_lookup)
FILE = partial(param, type='file', lookup=_file_lookup)
POST_OR_GET = partial(param, lookup=_post_or_get_lookup)
HEADER = partial(param, lookup=_header_lookup)
URI = partial(param, lookup=_uri_lookup)
| {
"repo_name": "romain-li/django-validator",
"path": "django_validator/decorators.py",
"copies": "1",
"size": "5695",
"license": "mit",
"hash": -4200912720367004700,
"line_mean": 34.59375,
"line_max": 114,
"alpha_frac": 0.6087796313,
"autogenerated": false,
"ratio": 4.353975535168196,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005052698094111644,
"num_lines": 160
} |
from functools import wraps, partial
import warnings
from .list_ import iterate_items
__all__ = [
'memoized', 'memoized_property', 'memoized_method',
'assert_hashable', 'deprecated',
]
def assert_hashable(*args, **kw):
""" Verify that each argument is hashable.
Passes silently if successful. Raises descriptive TypeError otherwise.
Example::
>>> assert_hashable(1, 'foo', bar='baz')
>>> assert_hashable(1, [], baz='baz')
Traceback (most recent call last):
...
TypeError: Argument in position 1 is not hashable: []
>>> assert_hashable(1, 'foo', bar=[])
Traceback (most recent call last):
...
TypeError: Keyword argument 'bar' is not hashable: []
"""
try:
for i, arg in enumerate(args):
hash(arg)
except TypeError:
raise TypeError('Argument in position %d is not hashable: %r' % (i, arg))
try:
for key, val in iterate_items(kw):
hash(val)
except TypeError:
raise TypeError('Keyword argument %r is not hashable: %r' % (key, val))
def _memoized_call(fn, cache, *args, **kw):
key = (args, tuple(sorted(kw.items())))
try:
is_cached = key in cache
except TypeError as e:
# Re-raise a more descriptive error if it's a hashing problem.
assert_hashable(*args, **kw)
# If it hasn't raised by now, then something else is going on,
# raise it. (This shouldn't happen.)
raise e
if not is_cached:
cache[key] = fn(*args, **kw)
return cache[key]
def memoized(fn=None, cache=None):
""" Memoize a function into an optionally-specificed cache container.
If the `cache` container is not specified, then the instance container is
accessible from the wrapped function's `memoize_cache` property.
Example::
>>> @memoized
... def foo(bar):
... print("Not cached.")
>>> foo(1)
Not cached.
>>> foo(1)
>>> foo(2)
Not cached.
Example with a specific cache container (in this case, the
``RecentlyUsedContainer``, which will only store the ``maxsize`` most
recently accessed items)::
>>> from unstdlib.standard.collections_ import RecentlyUsedContainer
>>> lru_container = RecentlyUsedContainer(maxsize=2)
>>> @memoized(cache=lru_container)
... def baz(x):
... print("Not cached.")
>>> baz(1)
Not cached.
>>> baz(1)
>>> baz(2)
Not cached.
>>> baz(3)
Not cached.
>>> baz(2)
>>> baz(1)
Not cached.
>>> # Notice that the '2' key remains, but the '1' key was evicted from
>>> # the cache.
"""
if fn:
# This is a hack to support both @memoize and @memoize(...)
return memoized(cache=cache)(fn)
if cache is None:
cache = {}
def decorator(fn):
wrapped = wraps(fn)(partial(_memoized_call, fn, cache))
wrapped.memoize_cache = cache
return wrapped
return decorator
# `memoized_property` is lovingly borrowed from @zzzeek, with permission:
# https://twitter.com/zzzeek/status/310503354268790784
class memoized_property(object):
""" A read-only @property that is only evaluated once. """
def __init__(self, fget, doc=None, name=None):
self.fget = fget
self.__doc__ = doc or fget.__doc__
self.__name__ = name or fget.__name__
def __get__(self, obj, cls):
if obj is None:
return self
obj.__dict__[self.__name__] = result = self.fget(obj)
return result
def memoized_method(method=None, cache_factory=None):
""" Memoize a class's method.
Arguments are similar to to `memoized`, except that the cache container is
specified with `cache_factory`: a function called with no arguments to
create the caching container for the instance.
Note that, unlike `memoized`, the result cache will be stored on the
instance, so cached results will be deallocated along with the instance.
Example::
>>> class Person(object):
... def __init__(self, name):
... self._name = name
... @memoized_method
... def get_name(self):
... print("Calling get_name on %r" %(self._name, ))
... return self._name
>>> shazow = Person("shazow")
>>> shazow.get_name()
Calling get_name on 'shazow'
'shazow'
>>> shazow.get_name()
'shazow'
>>> shazow._get_name_cache
{((), ()): 'shazow'}
Example with a specific cache container::
>>> from unstdlib.standard.collections_ import RecentlyUsedContainer
>>> class Foo(object):
... @memoized_method(cache_factory=lambda: RecentlyUsedContainer(maxsize=2))
... def add(self, a, b):
... print("Calling add with %r and %r" %(a, b))
... return a + b
>>> foo = Foo()
>>> foo.add(1, 1)
Calling add with 1 and 1
2
>>> foo.add(1, 1)
2
>>> foo.add(2, 2)
Calling add with 2 and 2
4
>>> foo.add(3, 3)
Calling add with 3 and 3
6
>>> foo.add(1, 1)
Calling add with 1 and 1
2
"""
if method is None:
return lambda f: memoized_method(f, cache_factory=cache_factory)
cache_factory = cache_factory or dict
@wraps(method)
def memoized_method_property(self):
cache = cache_factory()
cache_attr = "_%s_cache" %(method.__name__, )
setattr(self, cache_attr, cache)
result = partial(
_memoized_call,
partial(method, self),
cache
)
result.memoize_cache = cache
return result
return memoized_property(memoized_method_property)
def deprecated(message, exception=PendingDeprecationWarning):
"""Throw a warning when a function/method will be soon deprecated
Supports passing a ``message`` and an ``exception`` class
(uses ``PendingDeprecationWarning`` by default). This is useful if you
want to alternatively pass a ``DeprecationWarning`` exception for already
deprecated functions/methods.
Example::
>>> import warnings
>>> from functools import wraps
>>> message = "this function will be deprecated in the near future"
>>> @deprecated(message)
... def foo(n):
... return n+n
>>> with warnings.catch_warnings(record=True) as w:
... warnings.simplefilter("always")
... foo(4)
... assert len(w) == 1
... assert issubclass(w[-1].category, PendingDeprecationWarning)
... assert message == str(w[-1].message)
... assert foo.__name__ == 'foo'
8
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
warnings.warn(message, exception, stacklevel=2)
return func(*args, **kwargs)
return wrapper
return decorator
if __name__ == "__main__":
import doctest
doctest.testmod(optionflags=doctest.ELLIPSIS)
| {
"repo_name": "shazow/unstdlib.py",
"path": "unstdlib/standard/functools_.py",
"copies": "1",
"size": "7235",
"license": "mit",
"hash": -6111854586342349000,
"line_mean": 29.3991596639,
"line_max": 88,
"alpha_frac": 0.5695922598,
"autogenerated": false,
"ratio": 4.064606741573034,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00017625880473074706,
"num_lines": 238
} |
from functools import wraps, partial
import warnings
Version, version, __version__, VERSION = ('0.9.2',) * 4
JSON_HEADERS = {
'Content-type': 'application/json',
'Accept': 'application/json',
'client-lib': 'python',
'version-number': VERSION
}
from indicoio.text.twitter_engagement import twitter_engagement
from indicoio.text.sentiment import political, posneg, sentiment_hq
from indicoio.text.sentiment import posneg as sentiment
from indicoio.text.lang import language
from indicoio.text.tagging import text_tags
from indicoio.text.keywords import keywords
from indicoio.text.ner import named_entities
from indicoio.images.fer import fer
from indicoio.images.features import facial_features
from indicoio.images.faciallocalization import facial_localization
from indicoio.images.features import image_features
from indicoio.images.filtering import content_filtering
from indicoio.utils.multi import analyze_image, analyze_text, intersections
from indicoio.config import API_NAMES
def deprecation_decorator(f, api):
@wraps(f)
def wrapper(*args, **kwargs):
warnings.warn(
"'batch_" + api + "' will be deprecated in the next major update. Please call '" + api + "' instead with the same arguments.",
DeprecationWarning
)
return f(*args, **kwargs)
return wrapper
def detect_batch_decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
if isinstance(args[0], list):
kwargs['batch'] = True
return f(*args, **kwargs)
return wrapper
apis = dict((api, globals().get(api)) for api in API_NAMES)
for api in apis:
globals()[api] = detect_batch_decorator(apis[api])
globals()['batch_' + api] = partial(deprecation_decorator(apis[api], api), batch=True)
| {
"repo_name": "wassname/IndicoIo-python",
"path": "indicoio/__init__.py",
"copies": "1",
"size": "1775",
"license": "mit",
"hash": 2599833600839904000,
"line_mean": 33.8039215686,
"line_max": 139,
"alpha_frac": 0.7138028169,
"autogenerated": false,
"ratio": 3.6298568507157465,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4843659667615746,
"avg_score": null,
"num_lines": null
} |
from functools import wraps, partial
import warnings
Version, version, __version__, VERSION = ('0.9.3',) * 4
JSON_HEADERS = {
'Content-type': 'application/json',
'Accept': 'application/json',
'client-lib': 'python',
'version-number': VERSION
}
from indicoio.text.twitter_engagement import twitter_engagement
from indicoio.text.sentiment import political, posneg, sentiment_hq
from indicoio.text.sentiment import posneg as sentiment
from indicoio.text.lang import language
from indicoio.text.tagging import text_tags
from indicoio.text.keywords import keywords
from indicoio.text.ner import named_entities
from indicoio.images.fer import fer
from indicoio.images.features import facial_features, image_features
from indicoio.images.faciallocalization import facial_localization
from indicoio.images.recognition import image_recognition
from indicoio.images.filtering import content_filtering
from indicoio.utils.multi import analyze_image, analyze_text, intersections
from indicoio.config import API_NAMES
def deprecation_decorator(f, api):
@wraps(f)
def wrapper(*args, **kwargs):
warnings.warn(
"'batch_" + api + "' will be deprecated in the next major update. Please call '" + api + "' instead with the same arguments.",
DeprecationWarning
)
return f(*args, **kwargs)
return wrapper
def detect_batch_decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
if isinstance(args[0], list):
kwargs['batch'] = True
return f(*args, **kwargs)
return wrapper
apis = dict((api, globals().get(api)) for api in API_NAMES)
for api in apis:
globals()[api] = detect_batch_decorator(apis[api])
globals()['batch_' + api] = partial(deprecation_decorator(apis[api], api), batch=True)
| {
"repo_name": "madisonmay/IndicoIo-python",
"path": "indicoio/__init__.py",
"copies": "2",
"size": "1797",
"license": "mit",
"hash": -589688423046982700,
"line_mean": 34.2352941176,
"line_max": 139,
"alpha_frac": 0.7156371731,
"autogenerated": false,
"ratio": 3.6303030303030304,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.534594020340303,
"avg_score": null,
"num_lines": null
} |
from functools import wraps, partial
class _CtxImpl:
def __init__(self):
self._has_buffered = set()
class _Context:
def __init__(self):
self._impl = _CtxImpl()
self._parsed = False
self._checked = False
self._slot_policy = None
self._slot_expr = []
def reset(self):
self._impl = _CtxImpl()
def __enter__(self):
self._parsed = True
if not self._checked:
raise ValueError("mandatory @run_if_... decorator is missing!")
return self._impl
def __exit__(self, exc_type, exc_val, exc_tb):
self.reset()
def process_slot(*names, reset_if=('update', 'delete'), reset_cb=None):
"""
this function includes reset_if, reset_cb in the closure
"""
#import pdb;pdb.set_trace()
if isinstance(reset_if, str):
assert reset_if in ('update', 'delete')
reset_if = (reset_if,)
elif not reset_if:
reset_if = tuple()
else:
assert set(reset_if) == set(('update', 'delete'))
def run_step_decorator(run_step_):
"""
run_step() decorator
"""
#print("process slot deco", names)
@wraps(run_step_)
def run_step_wrapper(self, run_number, step_size, howlong):
"""
decoration
"""
#print("process slot wrapper", names, run_number)
if self.context is None:
self.context = _Context()
reset_all = False
# check if at least one slot fill the reset condition
for name in names:
slot = self.get_input_slot(name)
# slot.update(run_number)
if ('update' in reset_if and slot.updated.any()) or\
('delete' in reset_if and slot.deleted.any()):
reset_all = True
break
# if True (reset_all) thel all slots are reseted
if reset_all:
for name in names:
slot = self.get_input_slot(name)
# slot.update(run_number)
slot.reset()
slot.update(run_number)
if isinstance(reset_cb, str):
getattr(self, reset_cb)()
elif reset_cb is not None:
reset_cb(self)
# all slots are added to the context
for name in names:
slot = self.get_input_slot(name)
setattr(self.context._impl, name, slot)
if slot.has_buffered():
self.context._impl._has_buffered.add(name)
calc = run_step_(self, run_number, step_size, howlong)
# NB: "run_step_" fait partie de la fermeture
return calc
return run_step_wrapper
return run_step_decorator
_RULES = dict(run_if_all="or_if_all", run_if_any="and_if_any", run_always="run_always")
_INV_RULES = {v:k for (k, v) in _RULES.items()}
def accepted_first(s):
return s in _RULES
def _slot_policy_rule(decname, *slots_maybe):
"""
this function includes *args in the closure
"""
called_with_args = (not slots_maybe) or isinstance(slots_maybe[0], str)
slots = slots_maybe if called_with_args else tuple([])
assert called_with_args or callable(slots_maybe[0])
def decorator_(to_decorate):
"""
this is the decorator. it combines the decoration
with the function to be decorated
"""
#print("policy deco", slots_maybe)
has_hidden_attr = hasattr(to_decorate, "_hidden_progressivis_attr")
@wraps(to_decorate)
def decoration_(self, *args, **kwargs):
"""
this function makes the decoration
"""
#import pdb;pdb.set_trace()
#print("policy wrapper", decname, slots_maybe, args, to_decorate.__name__, has_hidden_attr)
if self.context is None:
raise ValueError("context not found. consider processing slots before")
if not self.context._parsed:
if self.context._slot_policy is None:
if not accepted_first(decname):
raise ValueError(f"{decname} must follow {_INV_RULES[decname]}")
self.context._slot_policy = decname
elif (self.context._slot_policy == "run_always" or
decname != _RULES[self.context._slot_policy]): # first exists and is not compatble
raise ValueError(f"{decname} cannot follow {self.context._slot_policy}")
elif self.context._slot_expr == [tuple()]:
raise ValueError(f"{decname} without arguments must be unique")
elif not accepted_first(decname) and not slots:
raise ValueError(f"{decname} requires arguments")
self.context._slot_expr.append(slots)
if not has_hidden_attr: # i.e. to_decorate is the genuine run_step
self.context._parsed = True
self.context._checked = True
if not run_step_required(self):
return self._return_run_step(self.state_blocked, steps_run=0)
return to_decorate(self, *args, **kwargs)
decoration_._hidden_progressivis_attr = True
return decoration_
if called_with_args:
return decorator_
return decorator_(slots_maybe[0])
run_if_all = partial(_slot_policy_rule, "run_if_all")
or_all = partial(_slot_policy_rule, "or_if_all")
run_if_any = partial(_slot_policy_rule, "run_if_any")
and_any = partial(_slot_policy_rule, "and_if_any")
run_always = partial(_slot_policy_rule, "run_always")
def run_step_required(self_):
policy = self_.context._slot_policy
slot_expr = self_.context._slot_expr
if slot_expr == [tuple()]:
slot_expr = [[k for k in self_.input_descriptors.keys() if k!='_params']]
self_.context._slot_expr = slot_expr
if policy == "run_if_all": # i.e. all() or all() ...
for grp in slot_expr:
grp_test = True
for elt in grp:
if elt not in self_.context._impl._has_buffered:
grp_test = False
break
if grp_test:
return True
return False
elif policy == "run_if_any": # i.e. any() and any()
for grp in slot_expr:
grp_test = False
for elt in grp:
if elt in self_.context._impl._has_buffered:
grp_test = True
break
if not grp_test:
return False
return True
elif policy == "run_always":
return True
else:
raise ValueError("Unknown slot policy")
| {
"repo_name": "jdfekete/progressivis",
"path": "progressivis/core/decorators.py",
"copies": "1",
"size": "6794",
"license": "bsd-2-clause",
"hash": 2029157158506172400,
"line_mean": 38.0459770115,
"line_max": 108,
"alpha_frac": 0.5419487783,
"autogenerated": false,
"ratio": 4.005896226415095,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.004649465113794712,
"num_lines": 174
} |
from functools import wraps, partial
from seamus.exceptions import SeamusException
from seamus.seamus import Seamus
REFACTORED_FUNC = 'refactored_func'
FACTORY = 'factory'
DECORATOR_ARGS = [REFACTORED_FUNC, FACTORY]
def seamus(func=None, **dkwargs):
"""
Run seamus test with the supplied arguments
:return: The result returned by original function
"""
if func is None:
return partial(seamus, **dkwargs)
@wraps(func)
def wrapper(*args, **kwargs):
refactored_function = dkwargs.get(REFACTORED_FUNC, None)
if refactored_function:
factory = dkwargs.get(FACTORY, None)
seamus_args = {k: v for k, v in dkwargs.items() if k not in DECORATOR_ARGS}
if factory:
runner = factory()
else:
runner = Seamus(**seamus_args)
runner.use(func, *args, **kwargs)
runner.test(refactored_function, *args, **kwargs)
return runner.run()
else:
raise SeamusException(
'Refactored function can\'t be None. Are you missing the \'{}\' argument?'.format(
REFACTORED_FUNC))
return wrapper
| {
"repo_name": "nerandell/seamus",
"path": "seamus/decorator.py",
"copies": "1",
"size": "1204",
"license": "mit",
"hash": -231959335567915330,
"line_mean": 30.6842105263,
"line_max": 102,
"alpha_frac": 0.6013289037,
"autogenerated": false,
"ratio": 3.7275541795665634,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48288830832665636,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from django.shortcuts import redirect
from django.utils.decorators import method_decorator
from django.core.urlresolvers import reverse
from django.conf import settings
from datetime import datetime
from app.models import Users, Tokens, Providers, Consumers, Carers, ItExperience
from app.openam import openam
def loginRequired(function):
"""
Check if the user is connected
Decorator for function-based views
"""
def wrap(request, *args, **kwargs):
# If logout
if 'id' not in request.session.keys() and 'username' not in request.session.keys():
request.session.flush()
return redirect(reverse('login_page'), permanent=True)
if settings.OPENAM_INTEGRATION:
ows = openam.OpenamAuth()
url = ows.getAuthorizeURL()
username = request.session['username']
tokenInstance = Tokens.objects.get(user_id=request.session['id'])
if not tokenInstance:
if settings.DEBUG:
print 'Access token is required'
# redirect initially to AOD inform page - 401
request.session.flush()
return redirect(reverse('login_page'), permanent=True)
accessToken = tokenInstance.access_token
refreshToken = tokenInstance.refresh_token
accessTokenStatus, response= ows.validateAccessToken(accessToken)
# if access_token has expired
if int(accessTokenStatus) not in [200]:
refreshTokenStatus, refreshTokenData = ows.refreshExpiredAccessToken(refreshToken)
# if refresh_token is still valid
if int(refreshTokenStatus) in [200]:
refreshTokenJsonData= json.loads(refreshTokenData)
accessToken = refreshTokenJsonData['access_token']
# Retrieve the fully profile
fullProfileStatus, fullProfileData = ows.retrieveFullyProfile(accessToken)
fullProfileJson = json.loads(fullProfileData)
# Retrieve the list of roles
roles = []
rolesListStatus, rolesListData = ows.retrieveRolesList(accessToken)
rolesJson = json.loads(rolesListData)
if int(rolesListStatus) == 200:
for i in rolesJson:
roles.append(i['application_role']['role'].values()[0])
# Update all
Users.objects.filter(username__exact=username).update(
name=fullProfileJson["name"],
lastname=fullProfileJson["surname"],
gender=fullProfileJson['gender'],
email=fullProfileJson['mail'],
mobile=fullProfileJson['phone'],
country=fullProfileJson["country"],
city=fullProfileJson["city"],
address=fullProfileJson["address"],
postal_code=fullProfileJson["postcode"],
experience=ItExperience.objects.get(level__iexact=fullProfileJson['skills']),
last_login=datetime.today()
)
user = Users.objects.get(username__exact=username)
rolesList = ["service_provider", "service_consumer", "carer"]
if type(roles) is list:
for i in rolesList:
if i in roles:
if i in ["service_provider"]:
provider = Providers.objects.filter(user_id=user.id).update(is_active=True, company="Not set")
if i in ["service_consumer"]:
consumer = Consumers.objects.filter(user_id=user.id).update(crowd_fund_notification=False, crowd_fund_participation=False, is_active=True)
if i in ["carer"]:
carer = Carers.objects.filter(user_id=user.id).update(is_active=True)
else:
if i in ["service_provider"]:
provider = Providers.objects.filter(user_id=user.id).update(is_active=False)
if i in ["service_consumer"]:
consumer = Consumers.objects.filter(user_id=use.id).update(is_active=False)
if i in ["carer"]:
carer = Carers.objects.filter(user_id=user.id).update(is_active=False)
Tokens.objects.filter(user_id=user.id).update(
access_token=refreshTokenJsonData['access_token'],
refresh_token=refreshTokenJsonData['refresh_token'],
expires_in=refreshTokenJsonData['expires_in'],
scope=refreshTokenJsonData['scope'],
token_type=refreshTokenJsonData['token_type']
)
request.session.flush()
request.session['id'] = user.id
request.session['username'] = username
request.session['cart'] = []
request.session['is_provider'] = Providers.objects.get(user_id=user.id).is_active
request.session['is_consumer'] = Consumers.objects.get(user_id=user.id).is_active
request.session['is_carer'] = Carers.objects.get(user_id=user.id).is_active
else:
# redirect initially to AOD inform page - 401
request.session.flush()
return redirect(reverse('login_page'), permanent=True)
return function(request, *args, **kwargs)
wrap.__doc__=function.__doc__
wrap.__name__=function.__name__
return wrap
def loginRequiredView(View):
"""
Check if the user is connected
Decorator for class-based views
"""
View.dispatch = method_decorator(loginRequired)(View.dispatch)
return View | {
"repo_name": "silop4all/aod",
"path": "AssistanceOnDemand/app/decorators.py",
"copies": "1",
"size": "6399",
"license": "apache-2.0",
"hash": 5219670525684994000,
"line_mean": 48.0078125,
"line_max": 174,
"alpha_frac": 0.5302390999,
"autogenerated": false,
"ratio": 4.829433962264151,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.585967306216415,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from flask import current_app, request, session, redirect
from werkzeug.local import LocalProxy
import requests
import urllib
__version__ = "1.0"
OLINAPPS_STR = 'http://olinapps.com/external?%s'
class OlinAuth(object):
def __init__(self, app=None, host_name=None):
if app is not None and host_name is not None:
self.init_app(app, host_name)
else:
self.app = None
def init_app(self, app, host_name):
app.olin_auth = self
self.host_name = host_name # used to generate callback url
app.add_url_rule('/olinauthlogin', view_func=OlinAuth.__login, methods=['GET', 'POST'])
app.add_url_rule('/olinauthlogout', view_func=OlinAuth.__logout, methods=['GET', 'POST'])
@staticmethod
def __login():
"""Handles the POST from Olin Auth's callback, and redirects to the original
destination specified in the :destination: querystring.
"""
if request.method == 'POST':
# External login.
if load_session():
return redirect(request.args.get("destination") or "/")
else:
session.pop('sessionid', None)
return "Please authenticate with Olin Apps to view."
@staticmethod
def __logout():
""" Provides a logout view to the application, removing the user from the
session and removing the session id. TODO: tell olinapps to deauth user?
"""
session.pop('sessionid', None)
session.pop('user', None)
return redirect('/')
def load_session():
"""Returns an OlinAuth user dict and stores the sessionid and user in
this application's session.
TODO: support caching? When the sessionid is specified as a url argument,
thie application will make a new web request each time, which is a waste.
Perhaps we can check if there exists a cache on the app and use it.
"""
sessionid = request.form.get('sessionid') or request.args.get('sessionid')
if not sessionid:
return None
r = requests.get('http://olinapps.com/api/me',
params={"sessionid": sessionid})
if r.status_code == 200 and r.json() and 'user' in r.json():
session['sessionid'] = sessionid
session['user'] = r.json()['user']
return r.json()
return None
def get_session_user():
""" Returns the current session's user, or the user specified by the
sessionid url parameter.
"""
session_user = session.get('user', None)
if session_user:
return session_user
else:
if load_session():
return session.get('user', None)
return None
def get_session_email():
userinfo = get_session_user()
if not userinfo:
return None
return str(userinfo['id']) + '@' + str(userinfo['domain'])
def logout_user():
'''Logs out the current_user, removing their information from the session. TODO: notify olinapps?'''
session.pop('sessionid', None)
session.pop('user', None)
def auth_required(fn):
"""
If you decorate a view with this, it will ensure that the current user is
logged in and authenticated before calling the actual view.
@app.route("/trees", methods=['GET'])
@auth_required
def get_trees():
return "no trees"
:param fn: The view function to decorate.
"""
@wraps(fn)
def decorated_view(*args, **kwargs):
if current_user:
return fn(*args, **kwargs)
else: # TODO: support SSL?
# callback to pass to olinapps, urlencode original url to allow
# a user to be redirected to the original view they were accessing
cbstring = "http://%s/olinauthlogin?destination=%s" % (
current_app.olin_auth.host_name,
request.url
)
return redirect(OLINAPPS_STR % urllib.urlencode({
"callback": cbstring
})
)
return decorated_view
current_user = LocalProxy(lambda: get_session_user())
| {
"repo_name": "corydolphin/flask-olinauth",
"path": "flask_olinauth.py",
"copies": "1",
"size": "4205",
"license": "mit",
"hash": -1259385816238112300,
"line_mean": 31.373015873,
"line_max": 104,
"alpha_frac": 0.594530321,
"autogenerated": false,
"ratio": 4.159248269040554,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5253778590040554,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from flask import Flask, make_response
def headers(headerDict={}, **headerskwargs):
'''
This function is the decorator which is used to wrap a Flask route with.
Either pass a dictionary of headers to be set as the headerDict keyword
argument, or pass header values as keyword arguments. Or, do both :-)
The key and value of items in a dictionary will be converted to strings using
the `str` method, ensure both keys and values are serializable thusly.
:param headerDict: A dictionary of headers to be injected into the response
headers. Note, the supplied dictionary is first copied then mutated.
:type origins: dict
:param headerskwargs: The headers to be injected into the response headers.
:type headerskwargs: identical to the `dict` constructor.
'''
_headerDict = headerDict.copy()
_headerDict.update(headerskwargs)
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
resp = make_response(f(*args, **kwargs))
h = resp.headers
for header, value in _headerDict.items():
h[str(header)] = str(value)
return resp
return decorated_function
return decorator | {
"repo_name": "corydolphin/flask-headers",
"path": "flask_headers.py",
"copies": "1",
"size": "1298",
"license": "mit",
"hash": 3102774932852997600,
"line_mean": 39.935483871,
"line_max": 81,
"alpha_frac": 0.6594761171,
"autogenerated": false,
"ratio": 4.669064748201439,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.011206374629949438,
"num_lines": 31
} |
from functools import wraps
from flask import redirect,url_for,abort,current_app,request
from flask.ext.security import current_user
from datetime import datetime,date, timedelta
import time
#Add http to the URL if not already there
def format_url(url):
if not url.startswith('http') or not url.startswith('https'):
newurl = '%s%s' % ('http://', url)
return newurl
return url
def client_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not current_user.is_authenticated():
current_app.logger.debug("Client URL :%s called by a user not logged in"%request.url)
return redirect(url_for('security.login', next=request.url))
if not current_user.check_client():
current_app.logger.debug("Client URL :%s called by a userID :%s who is not a client "%(request.url,current_user.id))
return abort(401)
return f(*args, **kwargs)
return decorated_function
def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not current_user.is_authenticated():
current_app.logger.debug("Admin URL :%s called by a user not logged in"%request.url)
return redirect(url_for('security.login', next=request.url))
if not current_user.check_admin():
current_app.logger.debug("Admin URL :%s called by a userID :%s who is not an admin "%(request.url,current_user.id))
return abort(401)
return f(*args, **kwargs)
return decorated_function
def register_api(bp,view,endpoint,url,security_wrap):
''' Register API views for a serveradmin element'''
view_func = security_wrap(view.as_view(endpoint))
bp.add_url_rule(url, defaults={'id': None},
view_func=view_func, methods=['GET',])
bp.add_url_rule(url, defaults={'id': None}, view_func=view_func, methods=['POST',])
bp.add_url_rule(url+'<int:id>', view_func=view_func,methods=['GET', 'POST', 'DELETE'])
def pretty_date(dt, default=None):
"""
Returns string representing "time since" e.g.
3 days ago, 5 hours ago etc.
Ref: https://bitbucket.org/danjac/newsmeme/src/a281babb9ca3/newsmeme/
"""
if default is None:
default = 'just now'
now = datetime.utcnow()
diff = now - dt
periods = (
(diff.days / 365, 'year', 'years'),
(diff.days / 30, 'month', 'months'),
(diff.days / 7, 'week', 'weeks'),
(diff.days, 'day', 'days'),
(diff.seconds / 3600, 'hour', 'hours'),
(diff.seconds / 60, 'minute', 'minutes'),
(diff.seconds, 'second', 'seconds'),
)
for period, singular, plural in periods:
if not period:
continue
if period == 1:
return u'%d %s ago' % (period, singular)
else:
return u'%d %s ago' % (period, plural)
return default
def get_dates_between(start_date,end_date):
'''Returns dates in between given datetime objects
'''
delta = end_date - start_date
return [start_date + timedelta(days=x) for x in range(delta.days + 1)]
| {
"repo_name": "unifispot/unifispot-free",
"path": "bluespot/base/utils/helper.py",
"copies": "1",
"size": "3208",
"license": "mit",
"hash": 2597347342394282500,
"line_mean": 31.4375,
"line_max": 128,
"alpha_frac": 0.59819202,
"autogenerated": false,
"ratio": 3.641316685584563,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9511441904497187,
"avg_score": 0.04561336021747541,
"num_lines": 96
} |
from functools import wraps
from flask import request, send_from_directory
import sqlite3
import hashlib
'''
Dirty auth implementation
'''
def auth_required(f):
@wraps(f)
def decorated(*args, **kwargs):
token = None
if 'auth-token' in request.cookies:
token = request.cookies.get("auth-token")
username = None
if 'username' in request.cookies:
username = request.cookies.get("username")
if token is None or username is None or not (AuthStore().get_token(username) == token):
return send_from_directory('.', 'login.html')
return f(*args, **kwargs)
return decorated
class AuthStore:
def __init__(self):
self.db_file = 'app.db'
def valid_user(self, username, password):
conn = sqlite3.connect(self.db_file)
cur = conn.cursor()
cur.execute("select 1 from user_login where username = ? and password = ?", (username, password))
is_valid = cur.fetchone() is not None
conn.close()
return is_valid
def init(self):
conn = sqlite3.connect(self.db_file)
cur = conn.cursor()
sql = "create table if not exists user_login " \
"( " \
" username text primary key, " \
" password text not null, " \
" user_role text not null, " \
" last_login text, " \
" is_active int, " \
" created_ts text" \
")"
cur.execute(sql)
if not self.valid_user('admin', 'admin01'):
sql = "insert into user_login(username, password, user_role, is_active) values(?,?,?,?)"
cur.execute(sql, ('admin', 'admin01', 'admin', 1))
conn.commit()
conn.close()
def get_token(self, username):
conn = sqlite3.connect(self.db_file)
cur = conn.cursor()
sql = "select password from user_login where username = ?"
cur.execute(sql, (username,))
row = cur.fetchone()
if row is not None:
return str(hashlib.sha224("%s-%s-HASH" % (username, row[0])).hexdigest())
return None
| {
"repo_name": "Ambalavanar/ferry",
"path": "service/security/api_auth.py",
"copies": "1",
"size": "2244",
"license": "mit",
"hash": 3564546706010002400,
"line_mean": 31,
"line_max": 105,
"alpha_frac": 0.5369875223,
"autogenerated": false,
"ratio": 4.0359712230215825,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.008594581000805804,
"num_lines": 68
} |
from functools import wraps
import importlib
def model_constructor(f):
""" Wraps the function 'f' which returns the network. An extra field 'constructor' is added to the network returned
by 'f'. This field contains an instance of the 'NetConstructor' class, which contains the information needed to
re-construct the network, such as the name of the function 'f', the function arguments etc. Thus, the network can
be easily constructed from a saved checkpoint by calling NetConstructor.get() function.
"""
@wraps(f)
def f_wrapper(*args, **kwds):
net_constr = NetConstructor(f.__name__, f.__module__, args, kwds)
output = f(*args, **kwds)
if isinstance(output, (tuple, list)):
# Assume first argument is the network
output[0].constructor = net_constr
else:
output.constructor = net_constr
return output
return f_wrapper
class NetConstructor:
""" Class to construct networks. Takes as input the function name (e.g. atom_resnet18), the name of the module
which contains the network function (e.g. ltr.models.bbreg.atom) and the arguments for the network
function. The class object can then be stored along with the network weights to re-construct the network."""
def __init__(self, fun_name, fun_module, args, kwds):
"""
args:
fun_name - The function which returns the network
fun_module - the module which contains the network function
args - arguments which are passed to the network function
kwds - arguments which are passed to the network function
"""
self.fun_name = fun_name
self.fun_module = fun_module
self.args = args
self.kwds = kwds
def get(self):
""" Rebuild the network by calling the network function with the correct arguments. """
net_module = importlib.import_module(self.fun_module)
net_fun = getattr(net_module, self.fun_name)
return net_fun(*self.args, **self.kwds)
| {
"repo_name": "PaddlePaddle/models",
"path": "PaddleCV/tracking/ltr/admin/model_constructor.py",
"copies": "1",
"size": "2104",
"license": "apache-2.0",
"hash": 4402343705075434500,
"line_mean": 41.8333333333,
"line_max": 119,
"alpha_frac": 0.644486692,
"autogenerated": false,
"ratio": 4.467091295116773,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0015252577300691357,
"num_lines": 48
} |
from functools import wraps
import inspect
import json
from flask import Response, current_app, request
__all__ = ('Refract', 'Prism', 'ResponseMapper')
class Refract(Response):
STATUS_OK = 200
DEFAULT_MIMETYPE = 'text/json'
PRISM_VERSION_ATTRIBUTE = 'prism_version'
PRISM_MIMETYPE_ATTRIBUTE = 'prism_mimetype'
def __init__(self, objects, status=STATUS_OK, as_list=False, mimetype=None, version=None, headers=None):
super(Refract, self).__init__()
if isinstance(objects, list):
self.data_objects = objects
else:
self.data_objects = [objects]
self.as_list = as_list
# Store headers
if headers:
self.headers = headers
# Get mimetype from representations
mimetype_model_rep = self.get_mimetype_representation()
if mimetype_model_rep != None:
self.mimetype = mimetype_model_rep
else:
self.mimetype = 'text/json'
# If mimetype is defined in this response then override
if mimetype != None:
self.mimetype = mimetype
self.data = self.build_response()
if isinstance(status, (int,)):
self.status_code = status
else:
self.status = status
self.mimetype = Refract.DEFAULT_MIMETYPE if mimetype_model_rep == None else self.mimetype
def get_mimetype_representation(self):
for o in self.data_objects:
func = self.get_representation_builder(o)
if hasattr(func, Refract.PRISM_MIMETYPE_ATTRIBUTE):
return getattr(func, Refract.PRISM_MIMETYPE_ATTRIBUTE)
return None
def get_representation_builder(self, prism_object, soft_fail=False):
# Get class name of object
class_name = prism_object.__class__.__name__
# Determine if a blueprint is being used
if request.blueprint != None:
# Get BP from current app
bp = current_app.blueprints[request.blueprint]
# Get prism_version from BP if applicable
version = getattr(bp, Refract.PRISM_VERSION_ATTRIBUTE) \
if hasattr(bp, Refract.PRISM_VERSION_ATTRIBUTE) else None
else:
version = None
# Get representation
func = current_app.ext_prism.lookup_mappings(class_name, version=version)
if func is None:
if isinstance(prism_object, dict):
return prism_object
if not soft_fail:
raise Exception('Issue retrieving stored function reference for PRISM mapping on %s object. '
'Does this object have an api_response/is the right version defined?' % class_name)
return func
def get_representation_dict(self, prism_object):
# Get response from builder
if not isinstance(prism_object, dict):
resp = self.get_representation_builder(prism_object)(prism_object)
else:
resp = prism_object
# Look for has_ methods and evaluate
def evaluate_level(items):
# TODO:// Convert and combine, this if statement is poor.
if isinstance(items, dict):
for k, v in items.items():
if isinstance(v, (dict, list)):
# If a dict or a list pass through eval_level again for further processing
evaluate_level(v)
elif isinstance(v, ResponseEvaluator.Killer):
# If killer object, pop this key (I doubt this will ever be hit, but just for safety)
items.pop(k, 0)
elif isinstance(v, ResponseEvaluator):
# If a response evaluator, evaluate
new_val = v.evaluate_for_response(prism_object)
# If new_val is a Killer, pop this key and continue
if isinstance(new_val, ResponseEvaluator.Killer):
items.pop(k, 0)
continue
# If new_val is a list or dict, pass through eval_level again for further processing
if isinstance(new_val, (dict, list)):
evaluate_level(new_val)
items[k] = new_val
elif self.get_representation_builder(v, soft_fail=True) is not None:
new_val = self.get_representation_dict(v)
items[k] = new_val
elif isinstance(items, list):
for i, v in enumerate(items):
if isinstance(v, (dict, list)):
# If a dict or list pass through eval_level again for further processing
evaluate_level(v)
elif isinstance(v, ResponseEvaluator.Killer):
# If a killer object, remove this item from the list
items.remove(v)
elif isinstance(v, ResponseEvaluator):
# If it's a response evaluator, evaluate it
new_val = v.evaluate_for_response(prism_object)
# If new_val is a Killer, remove this value and continue
if isinstance(new_val, ResponseEvaluator.Killer):
items.remove(v)
continue
# If new_val is a list or dict, pass through eval_level again for further processing
if isinstance(new_val, (dict, list)):
evaluate_level(new_val)
items[i] = new_val
elif self.get_representation_builder(v, soft_fail=True) is not None:
new_val = self.get_representation_dict(v)
items[i] = new_val
return items
final = evaluate_level(resp)
return final
def build_response(self):
return_objects = {} if not self.as_list and self.data_objects.__len__() <= 1 else []
if self.data_objects.__len__() > 1:
for o in self.data_objects:
return_objects.append(self.get_representation_dict(o))
elif self.data_objects.__len__() == 1:
r = self.get_representation_dict(self.data_objects[0])
if self.as_list:
return_objects.append(r)
else:
return_objects = r
return json.dumps(return_objects)
class Prism(object):
def __init__(self, app=None):
self.app = app
self.mapper = ResponseMapper()
if app is not None:
app.ext_prism = self
def init_app(self, app):
self.app = app
app.ext_prism = self
def get_calling_class_name(self):
stack = inspect.stack()
the_class = str(stack[2][0].f_locals["self"].__class__).split('.')[1]
return the_class
def has_or_none(self, key, value, version=None):
r = ResponseEvaluator(self, '%s' % (self.get_calling_class_name()), key, ResponseEvaluator.MODE_NONE, value,
version=version)
return r
def has_or_exclude(self, key, value, version=None):
r = ResponseEvaluator(self, self.get_calling_class_name(), key, ResponseEvaluator.MODE_EXCLUDE, value,
version=version)
return r
def has_or_else(self, key, value, else_value, version=None):
r = ResponseEvaluator(self, self.get_calling_class_name(), key, ResponseEvaluator.MODE_ELSE, value,
version=version)
r.alternative = else_value
return r
def api_representation(self, version=None, mimetype=None):
"""
:param version: The version of this representation as an integer
:param mimetype: The final mimetype of this response, default is text/json
"""
def func_decorator(func):
@wraps(func)
def process(*args, **kwargs):
return func(*args, **kwargs)
# Determine if method was used in a class or not
frames = inspect.stack()
defined_in_class = False
first_statment = ''
if len(frames) > 2:
maybe_class_frame = frames[2]
statement_list = maybe_class_frame[4]
first_statment = statement_list[0]
if first_statment.strip().startswith('class '):
defined_in_class = True
if not defined_in_class:
raise Exception('PRISM representation methods must be defined in a class.')
# Get class name for use
class_name = first_statment.replace('class ', '').split('(')[0]
# Store mimetype to function
func.prism_mimetype = mimetype
# Map the method to a format we know about
self.mapper.map('%s/%s/rep/%s' % (class_name.strip('\n:'), version, func.__name__), func)
return process
return func_decorator
def has_access(self, version=None):
def func_decorator(func):
@wraps(func)
def process(*args, **kwargs):
return func(*args, **kwargs)
# FIXME:// Is having two copies of this code required? It could easily be moved into a method.
# Determine if method was used in a class or not
frames = inspect.stack()
defined_in_class = False
first_statment = ''
if len(frames) > 2:
maybe_class_frame = frames[2]
statement_list = maybe_class_frame[4]
first_statment = statement_list[0]
if first_statment.strip().startswith('class '):
defined_in_class = True
if not defined_in_class:
raise Exception('PRISM access methods must be defined in a class.')
# Get class name for use
class_name = first_statment.replace('class ', '').split('(')[0]
# Map the method to a format we know about
self.mapper.map('%s/%s/acc/%s' % (class_name, version, func.__name__), func)
return process
return func_decorator
def check_has_access(self, instance, access_reference, access_key, version=None):
# Get the relevant access method
func = self.lookup_mappings(class_name=access_reference, version=version, type='acc')
# If result is none, raise exception
if func is None:
raise Exception('Mapping not found for class %s, version %s, would have used access key %s' % (
access_reference, version, access_key))
# Get the result from method
result = func(instance, access_key)
# If the user wrote a function that doesn't return a boolean, the returned value is useless to us. Fail now.
if not isinstance(result, bool):
raise Exception('PRISM issue checking for access, expected boolean but got %s' % type(result))
# Return the result
return result
def __get_mapper(self):
return self.mapper
def lookup_mappings(self, class_name, version=None, type='rep'):
return self.mapper.return_for(class_name=class_name, version=version, type=type)
class ResponseEvaluator(object):
"""
An instance of this class is inserted in place of values when using prisms has_ methods. It is evaluated on the way
out and the value replaced.
"""
MODE_NONE = 0
MODE_EXCLUDE = 1
MODE_ELSE = 2
def __init__(self, prism, access_reference, access_key, mode, value, version=None):
self.mode = mode
self.prism = prism
self.access_reference = access_reference
self.access_key = access_key
self.value = value
self.version = version
self.alternative = None
def get_alternative(self):
# Get alternative if mode is None
if self.mode == ResponseEvaluator.MODE_NONE:
return None
# Get alternative if mode is Exclude
elif self.mode == ResponseEvaluator.MODE_EXCLUDE:
return ResponseEvaluator.Killer()
# Get alternative if mode is Else
elif self.mode == ResponseEvaluator.MODE_ELSE:
return self.alternative
# Raise exception if unknown constant
else:
raise Exception("Unrecognised mode for Response evaluator. Expected known constant, given %s" % self.mode)
def evaluate_for_response(self, instance):
# Return the positive value if has_access check is passed, else the alternative
return self.value \
if self.prism.check_has_access(instance, self.access_reference, self.access_key, version=self.version) \
else self.get_alternative()
# Shelf class to know if we should kill off this value or key/value
class Killer(object):
pass
class ResponseMapper(object):
"""
This class maintains references to representations for objects.
"""
def __init__(self):
self.maps = {}
def map(self, key, response):
if key in self.maps.keys():
raise Exception('Map key "%s" overwrites existing mapping. Try renaming the new method and try again.' %
key)
self.maps[key] = response
def return_for(self, class_name, version, type):
for key, function in self.maps.items():
if key.startswith('%s/%s/%s' % (class_name, version, type)):
return function
return None
| {
"repo_name": "patrickmccallum/flask-prism",
"path": "flask_prism.py",
"copies": "1",
"size": "14043",
"license": "mit",
"hash": -6100292392391209000,
"line_mean": 35.6487935657,
"line_max": 119,
"alpha_frac": 0.5545111443,
"autogenerated": false,
"ratio": 4.503848620910841,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0012140825940212322,
"num_lines": 373
} |
from functools import wraps
from django.http import HttpResponse
from django.template.context import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
def render_to(template=None, mimetype=None):
def renderer(function):
@wraps(function)
def wrapper(request, *args, **kwargs):
output = function(request, *args, **kwargs)
if not isinstance(output, dict):
return output
tmpl = output.pop('TEMPLATE', template)
return render_to_response(tmpl, output,
context_instance=RequestContext(request), mimetype=mimetype)
return wrapper
return renderer
class JsonResponse(HttpResponse):
"""
HttpResponse descendant, which return response with ``application/json`` mimetype.
"""
def __init__(self, data):
super(JsonResponse, self).__init__(content=simplejson.dumps(data), mimetype='application/json')
def ajax_request(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
response = func(request, *args, **kwargs)
if isinstance(response, dict):
return JsonResponse(response)
else:
return response
return wrapper
| {
"repo_name": "dbreen/games",
"path": "games/utils/decorators.py",
"copies": "1",
"size": "1299",
"license": "mit",
"hash": -8569725297775521000,
"line_mean": 32.1842105263,
"line_max": 103,
"alpha_frac": 0.6351039261,
"autogenerated": false,
"ratio": 4.775735294117647,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5910839220217647,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from django.middleware.cache import CacheMiddleware
from django.utils.cache import add_never_cache_headers, patch_cache_control
from django.utils.decorators import (
available_attrs, decorator_from_middleware_with_args,
)
def cache_page(*args, **kwargs):
"""
Decorator for views that tries getting the page from the cache and
populates the cache if the page isn't in the cache yet.
The cache is keyed by the URL and some data from the headers.
Additionally there is the key prefix that is used to distinguish different
cache areas in a multi-site setup. You could use the
get_current_site().domain, for example, as that is unique across a Django
project.
Additionally, all headers from the response's Vary header will be taken
into account on caching -- just like the middleware does.
"""
# We also add some asserts to give better error messages in case people are
# using other ways to call cache_page that no longer work.
if len(args) != 1 or callable(args[0]):
raise TypeError("cache_page has a single mandatory positional argument: timeout")
cache_timeout = args[0]
cache_alias = kwargs.pop('cache', None)
key_prefix = kwargs.pop('key_prefix', None)
if kwargs:
raise TypeError("cache_page has two optional keyword arguments: cache and key_prefix")
return decorator_from_middleware_with_args(CacheMiddleware)(
cache_timeout=cache_timeout, cache_alias=cache_alias, key_prefix=key_prefix
)
def cache_control(**kwargs):
def _cache_controller(viewfunc):
@wraps(viewfunc, assigned=available_attrs(viewfunc))
def _cache_controlled(request, *args, **kw):
response = viewfunc(request, *args, **kw)
patch_cache_control(response, **kwargs)
return response
return _cache_controlled
return _cache_controller
def never_cache(view_func):
"""
Decorator that adds headers to a response so that it will
never be cached.
"""
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view_func(request, *args, **kwargs):
response = view_func(request, *args, **kwargs)
add_never_cache_headers(response)
return response
return _wrapped_view_func
| {
"repo_name": "diego-d5000/MisValesMd",
"path": "env/lib/python2.7/site-packages/django/views/decorators/cache.py",
"copies": "2",
"size": "2364",
"license": "mit",
"hash": 3903271864025219000,
"line_mean": 37.4,
"line_max": 94,
"alpha_frac": 0.679357022,
"autogenerated": false,
"ratio": 4.198934280639431,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5878291302639431,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.utils.decorators import available_attrs, decorator_from_middleware
csrf_protect = decorator_from_middleware(CsrfViewMiddleware)
csrf_protect.__name__ = "csrf_protect"
csrf_protect.__doc__ = """
This decorator adds CSRF protection in exactly the same way as
CsrfViewMiddleware, but it can be used on a per view basis. Using both, or
using the decorator multiple times, is harmless and efficient.
"""
class _EnsureCsrfToken(CsrfViewMiddleware):
# We need this to behave just like the CsrfViewMiddleware, but not reject
# requests or log warnings.
def _reject(self, request, reason):
return None
requires_csrf_token = decorator_from_middleware(_EnsureCsrfToken)
requires_csrf_token.__name__ = 'requires_csrf_token'
requires_csrf_token.__doc__ = """
Use this decorator on views that need a correct csrf_token available to
RequestContext, but without the CSRF protection that csrf_protect
enforces.
"""
class _EnsureCsrfCookie(CsrfViewMiddleware):
def _reject(self, request, reason):
return None
def process_view(self, request, callback, callback_args, callback_kwargs):
retval = super(_EnsureCsrfCookie, self).process_view(request, callback, callback_args, callback_kwargs)
# Forces process_response to send the cookie
get_token(request)
return retval
ensure_csrf_cookie = decorator_from_middleware(_EnsureCsrfCookie)
ensure_csrf_cookie.__name__ = 'ensure_csrf_cookie'
ensure_csrf_cookie.__doc__ = """
Use this decorator to ensure that a view sets a CSRF cookie, whether or not it
uses the csrf_token template tag, or the CsrfViewMiddleware is used.
"""
def csrf_exempt(view_func):
"""
Marks a view function as being exempt from the CSRF view protection.
"""
# We could just do view_func.csrf_exempt = True, but decorators
# are nicer if they don't have side-effects, so we return a new
# function.
def wrapped_view(*args, **kwargs):
return view_func(*args, **kwargs)
wrapped_view.csrf_exempt = True
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
| {
"repo_name": "yephper/django",
"path": "django/views/decorators/csrf.py",
"copies": "2",
"size": "2262",
"license": "bsd-3-clause",
"hash": 4410747656402163700,
"line_mean": 35.7,
"line_max": 111,
"alpha_frac": 0.7130857648,
"autogenerated": false,
"ratio": 3.940766550522648,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00014749262536873156,
"num_lines": 60
} |
from functools import wraps
from django.utils.decorators import available_attrs
def xframe_options_deny(view_func):
"""
Modifies a view function so its response has the X-Frame-Options HTTP
header set to 'DENY' as long as the response doesn't already have that
header set.
e.g.
@xframe_options_deny
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
if resp.get('X-Frame-Options') is None:
resp['X-Frame-Options'] = 'DENY'
return resp
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
def xframe_options_sameorigin(view_func):
"""
Modifies a view function so its response has the X-Frame-Options HTTP
header set to 'SAMEORIGIN' as long as the response doesn't already have
that header set.
e.g.
@xframe_options_sameorigin
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
if resp.get('X-Frame-Options') is None:
resp['X-Frame-Options'] = 'SAMEORIGIN'
return resp
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
def xframe_options_exempt(view_func):
"""
Modifies a view function by setting a response variable that instructs
XFrameOptionsMiddleware to NOT set the X-Frame-Options HTTP header.
e.g.
@xframe_options_exempt
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
resp.xframe_options_exempt = True
return resp
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
| {
"repo_name": "yephper/django",
"path": "django/views/decorators/clickjacking.py",
"copies": "1",
"size": "1805",
"license": "bsd-3-clause",
"hash": -8673396118064572000,
"line_mean": 27.5901639344,
"line_max": 78,
"alpha_frac": 0.6227146814,
"autogenerated": false,
"ratio": 3.8650963597430406,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.998781104114304,
"avg_score": 0,
"num_lines": 61
} |
from functools import wraps
from flask import abort
from flask import has_request_context
from flask import request
from flask_restful import fields
from flask_restful import marshal
from flask_restful import marshal_with
from flask_restful.utils import unpack
from flask_sqlalchemy import BaseQuery
from flask_sqlalchemy import Pagination
from furl import furl
def paginate(query, page=None, per_page=None, error_out=True):
"""Clone from flask_sqlachemy.Pagination
Returns `per_page` items from page `page`. By default it will
abort with 404 if no items were found and the page was larger than
1. This behavor can be disabled by setting `error_out` to `False`.
If page or per_page are None, they will be retrieved from the
request query. If the values are not ints and ``error_out`` is
true, it will abort with 404. If there is no request or they
aren't in the query, they default to page 1 and 20
respectively.
Returns an :class:`Pagination` object.
"""
if has_request_context():
if page is None:
try:
page = int(request.args.get('page', 1))
except (TypeError, ValueError):
if error_out:
abort(404)
page = 1
if per_page is None:
try:
per_page = int(request.args.get('per_page', 20))
except (TypeError, ValueError):
if error_out:
abort(404)
per_page = 20
else:
if page is None:
page = 1
if per_page is None:
per_page = 20
if error_out and page < 1:
abort(404)
items = query.limit(per_page).offset((page - 1) * per_page).all()
if not items and page != 1 and error_out:
abort(404)
# No need to count if we're on the first page and there are fewer
# items than we expected.
if page == 1 and len(items) < per_page:
total = len(items)
else:
total = query.order_by(None).count()
return Pagination(query, page, per_page, total, items)
class Paginate(marshal_with):
"""used to pagination
Example:
@Paginate({'id'': fields.Integer})
def get():
pass
"""
def __init__(self, serializer_fields, envelope=None, item_builder=None):
"""decorator initialize for Paginate
:param serializer_fields: a dict of whose keys will make up the final
serialized response output
:param envelope: optional key that will be used to envelop the serialized
response
:param item_builder: optional key that will be used to rebuild item for group_by
:return:
"""
super(Paginate, self).__init__(self.rebuild_fields(serializer_fields), envelope=envelope)
self.item_builder = item_builder
def __call__(self, func):
@wraps(func)
def wrapper(*args, **kwargs):
resp = func(*args, **kwargs)
if isinstance(resp, tuple):
query, code, headers = unpack(resp)
data = self.make_pagination(query)
return marshal(data, self.fields, self.envelope), code, headers
else:
data = self.make_pagination(resp)
return marshal(data, self.fields, self.envelope)
return wrapper
@staticmethod
def rebuild_fields(serializer_fields):
return {
'count': fields.Integer,
'previous': fields.String,
'next': fields.String,
'results': fields.List(fields.Nested(serializer_fields))
}
def make_pagination(self, query):
if isinstance(query, BaseQuery):
pagination = query.paginate()
else:
pagination = paginate(query)
if self.item_builder:
builder = self.item_builder
pagination.items = [builder(item) for item in pagination.items]
url = furl(request.url)
url.args['per_page'] = pagination.per_page
previous_url = None
if pagination.has_prev:
url.args['page'] = pagination.prev_num
previous_url = url.url
next_url = None
if pagination.has_next:
url.args['page'] = pagination.next_num
next_url = url.url
return {
'count': pagination.total,
'previous': previous_url,
'next': next_url,
'results': pagination.items
}
| {
"repo_name": "by46/coffee",
"path": "flask_kits1/restful/pagination.py",
"copies": "1",
"size": "4683",
"license": "mit",
"hash": -5970442448781456000,
"line_mean": 30.0753424658,
"line_max": 97,
"alpha_frac": 0.5673713432,
"autogenerated": false,
"ratio": 4.364398881640261,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00022781036974798324,
"num_lines": 146
} |
from functools import wraps
from flask import abort
from flask import redirect
from flask import request
from flask import url_for
from flask_login import current_user
from furl import furl
def permission_required(permission):
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not current_user.has_permission(permission):
abort(401)
return f(*args, **kwargs)
return decorated_function
return decorator
def admin_required(f):
return permission_required()(f)
def common_required(validate_permission):
"""
common required decorator, used to simple permission validate
:param validate_permission: `class`:`func`
:return:
"""
def decorator(func):
@wraps(func)
def decorated_function(*args, **kwargs):
if not validate_permission(current_user):
abort(401)
return func(*args, **kwargs)
return decorated_function
return decorator
def wx_required(func):
"""
require wx login
:param func:
:return:
"""
return common_required(lambda user: user.is_wx_user)(func)
def manager_required(func):
"""
require portal login
:param func:
:return:
"""
return common_required(lambda user: user.is_manager_user)(func)
def h5_required(func):
@wraps(func)
def decorated_function(*args, **kwargs):
if not current_user.is_wx_user:
location = url_for('h5.router')
url = furl(location)
end_point = request.endpoint
view = end_point.split('.')[-1]
redirect_url = furl(request.full_path)
redirect_url.path = view
url.query.params['redirect_uri'] = redirect_url.url
return redirect(url.url)
return func(*args, **kwargs)
return decorated_function
def portal_required(func):
@wraps(func)
def decorated_function(*args, **kwargs):
if not current_user.is_manager_user:
location = url_for('auth.login')
return redirect(location)
return func(*args, **kwargs)
return decorated_function
| {
"repo_name": "by46/flask-kits",
"path": "flask_kits/decorators/permission.py",
"copies": "1",
"size": "2270",
"license": "mit",
"hash": -9020362648406861000,
"line_mean": 23.2222222222,
"line_max": 67,
"alpha_frac": 0.5938325991,
"autogenerated": false,
"ratio": 4.274952919020715,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5368785518120716,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from google.appengine.ext import db
from google.appengine.api import users
from db_helper import IdUrlField, generate_sorted_query, update_model
from flask import abort, redirect, request
from flask.ext.restful import Resource, reqparse, fields, marshal
from auth import requires_auth, hash_password
def login_required(func):
@wraps(func)
def decorated_view(*args, **kwargs):
if not users.get_current_user():
return redirect(users.create_login_url(request.url))
return func(*args, **kwargs)
return decorated_view
class User(db.Model):
user_name = db.StringProperty(required=True)
first_name = db.StringProperty(required=True)
last_name = db.StringProperty(required=True)
password = db.StringProperty(required=True)
last_beer_add_date = db.DateTimeProperty(required=False)
@property
def id(self):
return self.key().id()
user_fields = {
'user_name': fields.String,
'first_name': fields.String,
'last_name': fields.String,
'last_beer_add_date': fields.DateTime,
'uri': IdUrlField('user', absolute=True),
}
class UserListApi(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument("user_name", type=str, required=True, help='User Name Is Required')
self.reqparse.add_argument("first_name", type=str, required=True, help='First Name Is Required')
self.reqparse.add_argument("last_name", type=str, required=True, help='Last Name Is Required')
self.reqparse.add_argument("password", type=str, required=True, help='Password Is Required')
super(UserListApi, self).__init__()
@requires_auth
def get(self):
user_list = []
for u in generate_sorted_query(User):
user_list.append(u)
return {'users': map(lambda u: marshal(u, user_fields), user_list)}
@requires_auth
def post(self):
args = self.reqparse.parse_args()
u = User.all(keys_only=True).filter('user_name', args['user_name']).get()
if u:
abort(409, message="User with user_name '%s' already exists" % args['user_name'])
u = User(user_name=args.user_name,
first_name=args.first_name,
last_name=args.last_name,
password=hash_password(args.password))
u.put()
return {'user': marshal(u, user_fields)}
class UserApi(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument("user_name", type=str)
self.reqparse.add_argument("first_name", type=str)
self.reqparse.add_argument("last_name", type=str)
self.reqparse.add_argument("password", type=str)
self.reqparse.add_argument("last_beer_add_date", type=str)
super(UserApi, self).__init__()
@requires_auth
def get(self, id):
user = User.get_by_id(id)
if user is None:
abort(404)
return {'user': marshal(user, user_fields)}
@requires_auth
def put(self, id):
user = User.get_by_id(id)
if user is None:
abort(404)
args = self.reqparse.parse_args()
u = dict(filter(lambda (k, v): v is not None, args.items()))
if u.get('password') is not None:
u['password'] = hash_password(u['password'])
update_model(user, u)
user.put()
return {'user': marshal(user, user_fields)}
@requires_auth
def delete(self, id):
user = User.get_by_id(id)
if user is None:
abort(404)
user.delete()
return {'user': marshal(user, user_fields), 'action': 'deleted'}
| {
"repo_name": "pwojt/beer_app_414",
"path": "user_api.py",
"copies": "1",
"size": "3831",
"license": "apache-2.0",
"hash": -8069848243445657000,
"line_mean": 31.6052631579,
"line_max": 104,
"alpha_frac": 0.6042808666,
"autogenerated": false,
"ratio": 3.694310511089682,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9795914046039502,
"avg_score": 0.0005354663300360144,
"num_lines": 114
} |
from functools import wraps
class Log:
"""
Класс декоратор для логирования функций
"""
def __init__(self, logger):
# запоминаем логгер, чтобы можно было использовать разные
self.logger = logger
@staticmethod
def _create_message(result=None, *args, **kwargs):
"""
Формирует сообщение для записи в лог
:param result: результат работы функции
:param args: любые параметры по порядку
:param kwargs: любые именованные параметры
:return:
"""
message = ''
if args:
message += 'args: {} '.format(args)
if kwargs:
message += 'kwargs: {} '.format(kwargs)
if result:
message += '= {}'.format(result)
# Возвращаем итоговое сообщение
return message
def __call__(self, func):
"""
Определяем __call__ для возможности вызова экземпляра как декоратора
:param func: функция которую будем декорировать
:return: новая функция
"""
@wraps(func)
def decorated(*args, **kwargs):
# Выполняем функцию и получаем результат
result = func(*args, **kwargs)
# Формируем сообщение в лог
message = Log._create_message(result, *args, **kwargs)
# Пишем сообщение в лог
# Хотя мы и подменили с помощью wraps имя и модуль для внутренней функции,
# логгер всё равно берет не те, поэтому приходиться делать через decorated.__name__, !
self.logger.info('{} - {} - {}'.format(message, decorated.__name__, decorated.__module__))
return result
return decorated | {
"repo_name": "OOPSA45/Python-learn-",
"path": "my_package/log/decorators.py",
"copies": "1",
"size": "2217",
"license": "apache-2.0",
"hash": 7405134841538525000,
"line_mean": 31.9607843137,
"line_max": 102,
"alpha_frac": 0.5595375723,
"autogenerated": false,
"ratio": 2.3569482288828336,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.840936367494306,
"avg_score": 0.0014244252479546596,
"num_lines": 51
} |
from functools import wraps
from django.middleware.cache import CacheMiddleware
from django.utils.cache import add_never_cache_headers, patch_cache_control
from django.utils.decorators import (
available_attrs, decorator_from_middleware_with_args,
)
def cache_page(*args, **kwargs):
"""
Decorator for views that tries getting the page from the cache and
populates the cache if the page isn't in the cache yet.
The cache is keyed by the URL and some data from the headers.
Additionally there is the key prefix that is used to distinguish different
cache areas in a multi-site setup. You could use the
get_current_site().domain, for example, as that is unique across a Django
project.
Additionally, all headers from the response's Vary header will be taken
into account on caching -- just like the middleware does.
"""
# We also add some asserts to give better error messages in case people are
# using other ways to call cache_page that no longer work.
if len(args) != 1 or callable(args[0]):
raise TypeError("cache_page has a single mandatory positional argument: timeout")
cache_timeout = args[0]
cache_alias = kwargs.pop('cache', None)
key_prefix = kwargs.pop('key_prefix', None)
if kwargs:
raise TypeError("cache_page has two optional keyword arguments: cache and key_prefix")
return decorator_from_middleware_with_args(CacheMiddleware)(
cache_timeout=cache_timeout, cache_alias=cache_alias, key_prefix=key_prefix
)
def cache_control(**kwargs):
def _cache_controller(viewfunc):
@wraps(viewfunc, assigned=available_attrs(viewfunc))
def _cache_controlled(request, *args, **kw):
response = viewfunc(request, *args, **kw)
patch_cache_control(response, **kwargs)
return response
return _cache_controlled
return _cache_controller
def never_cache(view_func):
"""
Decorator that adds headers to a response so that it will
never be cached.
"""
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view_func(request, *args, **kwargs):
response = view_func(request, *args, **kwargs)
add_never_cache_headers(response)
return response
return _wrapped_view_func
| {
"repo_name": "letouriste001/SmartForest_2.0",
"path": "python3.4Smartforest/lib/python3.4/site-packages/django/views/decorators/cache.py",
"copies": "1",
"size": "2304",
"license": "mit",
"hash": -5729212962489686000,
"line_mean": 37.4,
"line_max": 94,
"alpha_frac": 0.6970486111,
"autogenerated": false,
"ratio": 3.789473684210526,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.992141812864386,
"avg_score": 0.013020833333333334,
"num_lines": 1
} |
from functools import wraps
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.utils.decorators import available_attrs, decorator_from_middleware
csrf_protect = decorator_from_middleware(CsrfViewMiddleware)
csrf_protect.__name__ = "csrf_protect"
csrf_protect.__doc__ = """
This decorator adds CSRF protection in exactly the same way as
CsrfViewMiddleware, but it can be used on a per view basis. Using both, or
using the decorator multiple times, is harmless and efficient.
"""
class _EnsureCsrfToken(CsrfViewMiddleware):
# We need this to behave just like the CsrfViewMiddleware, but not reject
# requests or log warnings.
def _reject(self, request, reason):
return None
requires_csrf_token = decorator_from_middleware(_EnsureCsrfToken)
requires_csrf_token.__name__ = 'requires_csrf_token'
requires_csrf_token.__doc__ = """
Use this decorator on views that need a correct csrf_token available to
RequestContext, but without the CSRF protection that csrf_protect
enforces.
"""
class _EnsureCsrfCookie(CsrfViewMiddleware):
def _reject(self, request, reason):
return None
def process_view(self, request, callback, callback_args, callback_kwargs):
retval = super(_EnsureCsrfCookie, self).process_view(request, callback, callback_args, callback_kwargs)
# Forces process_response to send the cookie
get_token(request)
return retval
ensure_csrf_cookie = decorator_from_middleware(_EnsureCsrfCookie)
ensure_csrf_cookie.__name__ = 'ensure_csrf_cookie'
ensure_csrf_cookie.__doc__ = """
Use this decorator to ensure that a view sets a CSRF cookie, whether or not it
uses the csrf_token template tag, or the CsrfViewMiddleware is used.
"""
def csrf_exempt(view_func):
"""
Marks a view function as being exempt from the CSRF view protection.
"""
# We could just do view_func.csrf_exempt = True, but decorators
# are nicer if they don't have side-effects, so we return a new
# function.
def wrapped_view(*args, **kwargs):
return view_func(*args, **kwargs)
wrapped_view.csrf_exempt = True
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
| {
"repo_name": "letouriste001/SmartForest_2.0",
"path": "python3.4Smartforest/lib/python3.4/site-packages/django/views/decorators/csrf.py",
"copies": "1",
"size": "2202",
"license": "mit",
"hash": 346350312581909600,
"line_mean": 35.7,
"line_max": 111,
"alpha_frac": 0.7325158946,
"autogenerated": false,
"ratio": 3.7770154373927958,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5009531331992796,
"avg_score": null,
"num_lines": null
} |
from functools import wraps
from django.utils.decorators import available_attrs
def xframe_options_deny(view_func):
"""
Modifies a view function so its response has the X-Frame-Options HTTP
header set to 'DENY' as long as the response doesn't already have that
header set.
e.g.
@xframe_options_deny
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
if resp.get('X-Frame-Options') is None:
resp['X-Frame-Options'] = 'DENY'
return resp
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
def xframe_options_sameorigin(view_func):
"""
Modifies a view function so its response has the X-Frame-Options HTTP
header set to 'SAMEORIGIN' as long as the response doesn't already have
that header set.
e.g.
@xframe_options_sameorigin
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
if resp.get('X-Frame-Options') is None:
resp['X-Frame-Options'] = 'SAMEORIGIN'
return resp
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
def xframe_options_exempt(view_func):
"""
Modifies a view function by setting a response variable that instructs
XFrameOptionsMiddleware to NOT set the X-Frame-Options HTTP header.
e.g.
@xframe_options_exempt
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
resp.xframe_options_exempt = True
return resp
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
| {
"repo_name": "letouriste001/SmartForest_2.0",
"path": "python3.4Smartforest/lib/python3.4/site-packages/django/views/decorators/clickjacking.py",
"copies": "1",
"size": "1744",
"license": "mit",
"hash": -2041274687794030600,
"line_mean": 27.5901639344,
"line_max": 78,
"alpha_frac": 0.6444954128,
"autogenerated": false,
"ratio": 3.3603082851637764,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4504803697963777,
"avg_score": null,
"num_lines": null
} |
from functools import wraps, update_wrapper
from flask import Flask, render_template, request, Response, jsonify, make_response
#----------------------------------------------
# decorator for turning off browser caching
#----------------------------------------------
def nocache(f):
"""Stop caching for pages wrapped in nocache decorator."""
def new_func(*args, **kwargs):
resp = make_response(f(*args, **kwargs))
resp.cache_control.no_cache = True
return resp
return update_wrapper(new_func, f)
#----------------------------------------------
# class for adding for authentication decorator
#----------------------------------------------
class PsiTurkAuthorization():
def __init__(self, config):
self.queryname = config.get('Server Parameters', 'login_username')
self.querypw = config.get('Server Parameters', 'login_pw')
def wrapper(func, args):
return func(*args)
def check_auth(self, username, password):
"""This function is called to check if a username /
password combination is valid.
"""
return username == self.queryname and password == self.querypw
def authenticate(self):
"""Sends a 401 response that enables basic auth"""
return Response(
'Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
def requires_auth(self, f):
"""
Decorator to prompt for user name and password. Useful for data dumps, etc.
that you don't want to be public.
"""
@wraps(f)
def decorated(*args, **kwargs):
auth = request.authorization
if not auth or not self.check_auth(auth.username, auth.password):
return self.authenticate()
return f(*args, **kwargs)
return decorated | {
"repo_name": "johnmcdonnell/psiTurk",
"path": "psiturk/user_utils.py",
"copies": "1",
"size": "1930",
"license": "mit",
"hash": -7014919257699078000,
"line_mean": 36.1346153846,
"line_max": 83,
"alpha_frac": 0.5740932642,
"autogenerated": false,
"ratio": 4.7073170731707314,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0044597444487349475,
"num_lines": 52
} |
from functools import wraps,update_wrapper
from datetime import timedelta
from flask import request,make_response,abort,current_app
from models import Survey,User,Response
import pymongo
import settings
from settings import logger
import uuid
import urlparse
from werkzeug.routing import BaseConverter
class RegexConverter(BaseConverter):
def __init__(self, url_map, *items):
super(RegexConverter, self).__init__(url_map)
self.regex = items[0]
def request_wants_json():
best = request.accept_mimetypes \
.best_match(['application/json', 'text/html'])
return best == 'application/json' and \
request.accept_mimetypes[best] > \
request.accept_mimetypes['text/html']
def crossdomain(origin=None, methods=None, headers=None,
max_age=21600, attach_to_all=True,
automatic_options=True,auto_origin = False):
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, basestring):
headers = ', '.join(x.upper() for x in headers)
if not isinstance(origin, basestring) and not auto_origin:
origin = ', '.join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = f(*args, **kwargs)
if isinstance(resp,str):
resp = make_response(resp)
h = resp.headers
if auto_origin:
if not 'Referer' in request.headers:
abort(404)
o = urlparse.urlparse(request.headers['Referer'])
origin_str = o.scheme+'://'+o.hostname+(':'+str(o.port) if o.port else '')
else:
origin_str = origin
h['Access-Control-Allow-Origin'] = origin_str
h['Access-Control-Allow-Credentials'] = 'true'
h['Access-Control-Allow-Methods'] = get_methods()
h['Access-Control-Max-Age'] = str(max_age)
h['P3P'] = 'CP="CURa ADMa DEVa PSAo PSDo OUR BUS UNI PUR INT DEM STA PRE COM NAV OTC NOI DSP COR"'
if headers is not None:
h['Access-Control-Allow-Headers'] = headers
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
def _get_session():
session = request.cookies.get('session')
if not session:
session = uuid.uuid4().hex
return session
def _get_survey(survey_key):
survey = Survey.collection.find_one({'key':survey_key})
if not survey:
abort(404)
return survey
def with_session():
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
request.session = _get_session()
response = f(*args, **kwargs)
if isinstance(response,str):
response = make_response(response)
response.set_cookie('session',request.session)
return response
return decorated_function
return decorator
def jsonp():
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not 'callback' in request.args:
abort(404)
response = f(*args, **kwargs)
if isinstance(response,str):
response = make_response(response)
response.data = "%s(%s)" % (request.args['callback'],response.data)
response.mimetype = 'application/javascript'
return response
return decorated_function
return decorator
def with_user():
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
user = User.collection.find_one({'session':request.session})
if not user:
user = User(session = request.session)
request.user = user
return f(*args, **kwargs)
return decorated_function
return decorator
def with_survey():
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not 'survey_key' in kwargs:
logger.debug("with_survey: No survey key given!")
abort(404)
request.survey = Survey.collection.find_one({'key':kwargs['survey_key']})
if not request.survey:
logger.debug("with_survey: Survey not found!")
abort(404)
return f(*args, **kwargs)
return decorated_function
return decorator
def with_field():
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not 'field_type' in kwargs or not 'field_id' in kwargs or not hasattr(request,'survey'):
abort(404)
if not kwargs['field_type'] in settings.field_types:
abort(403)
if not request.survey.has_field(kwargs['field_type'],kwargs['field_id']):
abort(404)
request.field = request.survey.get_field(kwargs['field_type'],kwargs['field_id'])
return f(*args, **kwargs)
return decorated_function
return decorator
def with_admin():
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not hasattr(request,'survey') or not hasattr(request,'user'):
logger.debug("with_admin: survey or user not loaded!")
abort(404)
print request.user.document_id,str(request.survey['user'])
if not request.survey['user'] == request.user:
logger.debug("with_admin: not an admin!")
abort(403)
return f(*args, **kwargs)
return decorated_function
return decorator
def with_response():
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'response_key' in request.args and request.args['response_key']:
response_key = request.args['response_key']
response = Response.collection.find_one({'survey_key' : request.survey['key'],'response_key':response_key},sort = [('_updated_at',pymongo.DESCENDING)])
if response:
response['session'] = request.session
response.save()
else:
response_key = uuid.uuid4().hex
response = Response.collection.find_one({'survey_key' : request.survey['key'],'session':request.session},sort = [('_updated_at',pymongo.DESCENDING)])
if not response:
response = Response(**{'survey_key': request.survey['key'],'session':request.session,'response_key':response_key})
request.response = response
return f(*args, **kwargs)
return decorated_function
return decorator
| {
"repo_name": "adewes/instant-feedback",
"path": "get_feedback/utils.py",
"copies": "1",
"size": "7436",
"license": "mit",
"hash": -804023756826648600,
"line_mean": 32.3452914798,
"line_max": 167,
"alpha_frac": 0.5687197418,
"autogenerated": false,
"ratio": 4.366412213740458,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.024370889486153784,
"num_lines": 223
} |
from functools import wraps, update_wrapper
# You can't trivially replace this `functools.partial` because this binds to
# classes and returns bound instances, whereas functools.partial (on CPython)
# is a type and its instances don't bind.
def curry(_curried_func, *args, **kwargs):
def _curried(*moreargs, **morekwargs):
return _curried_func(*(args+moreargs), **dict(kwargs, **morekwargs))
return _curried
def memoize(func, cache, num_args):
"""
Wrap a function so that results for any argument tuple are stored in
'cache'. Note that the args to the function must be usable as dictionary
keys.
Only the first num_args are considered when creating the key.
"""
def wrapper(*args):
mem_args = args[:num_args]
if mem_args in cache:
return cache[mem_args]
result = func(*args)
cache[mem_args] = result
return result
return wraps(func)(wrapper)
class Promise(object):
"""
This is just a base class for the proxy class created in
the closure of the lazy function. It can be used to recognize
promises in code.
"""
pass
def lazy(func, *resultclasses):
"""
Turns any callable into a lazy evaluated callable. You need to give result
classes or types -- at least one is needed so that the automatic forcing of
the lazy evaluation code is triggered. Results are not memoized; the
function is evaluated on every access.
"""
class __proxy__(Promise):
"""
Encapsulate a function call and act as a proxy for methods that are
called on the result of that function. The function is not evaluated
until one of the methods on the result is called.
"""
__dispatch = None
def __init__(self, args, kw):
self.__func = func
self.__args = args
self.__kw = kw
if self.__dispatch is None:
self.__prepare_class__()
def __reduce__(self):
return (
_lazy_proxy_unpickle,
(self.__func, self.__args, self.__kw) + resultclasses
)
def __prepare_class__(cls):
cls.__dispatch = {}
for resultclass in resultclasses:
cls.__dispatch[resultclass] = {}
for (k, v) in resultclass.__dict__.items():
# All __promise__ return the same wrapper method, but they
# also do setup, inserting the method into the dispatch
# dict.
meth = cls.__promise__(resultclass, k, v)
if hasattr(cls, k):
continue
setattr(cls, k, meth)
cls._delegate_str = str in resultclasses
cls._delegate_unicode = unicode in resultclasses
assert not (cls._delegate_str and cls._delegate_unicode), "Cannot call lazy() with both str and unicode return types."
if cls._delegate_unicode:
cls.__unicode__ = cls.__unicode_cast
elif cls._delegate_str:
cls.__str__ = cls.__str_cast
__prepare_class__ = classmethod(__prepare_class__)
def __promise__(cls, klass, funcname, func):
# Builds a wrapper around some magic method and registers that magic
# method for the given type and method name.
def __wrapper__(self, *args, **kw):
# Automatically triggers the evaluation of a lazy value and
# applies the given magic method of the result type.
res = self.__func(*self.__args, **self.__kw)
for t in type(res).mro():
if t in self.__dispatch:
return self.__dispatch[t][funcname](res, *args, **kw)
raise TypeError("Lazy object returned unexpected type.")
if klass not in cls.__dispatch:
cls.__dispatch[klass] = {}
cls.__dispatch[klass][funcname] = func
return __wrapper__
__promise__ = classmethod(__promise__)
def __unicode_cast(self):
return self.__func(*self.__args, **self.__kw)
def __str_cast(self):
return str(self.__func(*self.__args, **self.__kw))
def __cmp__(self, rhs):
if self._delegate_str:
s = str(self.__func(*self.__args, **self.__kw))
elif self._delegate_unicode:
s = unicode(self.__func(*self.__args, **self.__kw))
else:
s = self.__func(*self.__args, **self.__kw)
if isinstance(rhs, Promise):
return -cmp(rhs, s)
else:
return cmp(s, rhs)
def __mod__(self, rhs):
if self._delegate_str:
return str(self) % rhs
elif self._delegate_unicode:
return unicode(self) % rhs
else:
raise AssertionError('__mod__ not supported for non-string types')
def __deepcopy__(self, memo):
# Instances of this class are effectively immutable. It's just a
# collection of functions. So we don't need to do anything
# complicated for copying.
memo[id(self)] = self
return self
def __wrapper__(*args, **kw):
# Creates the proxy object, instead of the actual value.
return __proxy__(args, kw)
return wraps(func)(__wrapper__)
def _lazy_proxy_unpickle(func, args, kwargs, *resultclasses):
return lazy(func, *resultclasses)(*args, **kwargs)
def allow_lazy(func, *resultclasses):
"""
A decorator that allows a function to be called with one or more lazy
arguments. If none of the args are lazy, the function is evaluated
immediately, otherwise a __proxy__ is returned that will evaluate the
function when needed.
"""
def wrapper(*args, **kwargs):
for arg in list(args) + kwargs.values():
if isinstance(arg, Promise):
break
else:
return func(*args, **kwargs)
return lazy(func, *resultclasses)(*args, **kwargs)
return wraps(func)(wrapper)
class LazyObject(object):
"""
A wrapper for another class that can be used to delay instantiation of the
wrapped class.
By subclassing, you have the opportunity to intercept and alter the
instantiation. If you don't need to do that, use SimpleLazyObject.
"""
def __init__(self):
self._wrapped = None
def __getattr__(self, name):
if self._wrapped is None:
self._setup()
return getattr(self._wrapped, name)
def __setattr__(self, name, value):
if name == "_wrapped":
# Assign to __dict__ to avoid infinite __setattr__ loops.
self.__dict__["_wrapped"] = value
else:
if self._wrapped is None:
self._setup()
setattr(self._wrapped, name, value)
def __delattr__(self, name):
if name == "_wrapped":
raise TypeError("can't delete _wrapped.")
if self._wrapped is None:
self._setup()
delattr(self._wrapped, name)
def _setup(self):
"""
Must be implemented by subclasses to initialise the wrapped object.
"""
raise NotImplementedError
# introspection support:
__members__ = property(lambda self: self.__dir__())
def __dir__(self):
if self._wrapped is None:
self._setup()
return dir(self._wrapped)
class SimpleLazyObject(LazyObject):
"""
A lazy object initialised from any function.
Designed for compound objects of unknown type. For builtins or objects of
known type, use django.utils.functional.lazy.
"""
def __init__(self, func):
"""
Pass in a callable that returns the object to be wrapped.
If copies are made of the resulting SimpleLazyObject, which can happen
in various circumstances within Django, then you must ensure that the
callable can be safely run more than once and will return the same
value.
"""
self.__dict__['_setupfunc'] = func
# For some reason, we have to inline LazyObject.__init__ here to avoid
# recursion
self._wrapped = None
def __str__(self):
if self._wrapped is None: self._setup()
return str(self._wrapped)
def __unicode__(self):
if self._wrapped is None: self._setup()
return unicode(self._wrapped)
def __deepcopy__(self, memo):
if self._wrapped is None:
# We have to use SimpleLazyObject, not self.__class__, because the
# latter is proxied.
result = SimpleLazyObject(self._setupfunc)
memo[id(self)] = result
return result
else:
import copy
return copy.deepcopy(self._wrapped, memo)
# Need to pretend to be the wrapped class, for the sake of objects that care
# about this (especially in equality tests)
def __get_class(self):
if self._wrapped is None: self._setup()
return self._wrapped.__class__
__class__ = property(__get_class)
def __eq__(self, other):
if self._wrapped is None: self._setup()
return self._wrapped == other
def __hash__(self):
if self._wrapped is None: self._setup()
return hash(self._wrapped)
def _setup(self):
self._wrapped = self._setupfunc()
| {
"repo_name": "skevy/django",
"path": "django/utils/functional.py",
"copies": "2",
"size": "9520",
"license": "bsd-3-clause",
"hash": 8645507386163942000,
"line_mean": 34.9245283019,
"line_max": 130,
"alpha_frac": 0.5698529412,
"autogenerated": false,
"ratio": 4.488448844884489,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6058301786084488,
"avg_score": null,
"num_lines": null
} |
from functools import wraps, WRAPPER_ASSIGNMENTS
from django.http.response import HttpResponse
from rest_framework_extensions.settings import extensions_api_settings
def get_cache(alias):
from django.core.cache import caches
return caches[alias]
class CacheResponse:
"""
Store/Receive and return cached `HttpResponse` based on DRF response.
.. note::
This decorator will render and discard the original DRF response in
favor of Django's `HttpResponse`. The allows the cache to retain a
smaller memory footprint and eliminates the need to re-render
responses on each request. Furthermore it eliminates the risk for users
to unknowingly cache whole Serializers and QuerySets.
"""
def __init__(self,
timeout=None,
key_func=None,
cache=None,
cache_errors=None):
if timeout is None:
self.timeout = extensions_api_settings.DEFAULT_CACHE_RESPONSE_TIMEOUT
else:
self.timeout = timeout
if key_func is None:
self.key_func = extensions_api_settings.DEFAULT_CACHE_KEY_FUNC
else:
self.key_func = key_func
if cache_errors is None:
self.cache_errors = extensions_api_settings.DEFAULT_CACHE_ERRORS
else:
self.cache_errors = cache_errors
self.cache = get_cache(cache or extensions_api_settings.DEFAULT_USE_CACHE)
def __call__(self, func):
this = self
@wraps(func, assigned=WRAPPER_ASSIGNMENTS)
def inner(self, request, *args, **kwargs):
return this.process_cache_response(
view_instance=self,
view_method=func,
request=request,
args=args,
kwargs=kwargs,
)
return inner
def process_cache_response(self,
view_instance,
view_method,
request,
args,
kwargs):
key = self.calculate_key(
view_instance=view_instance,
view_method=view_method,
request=request,
args=args,
kwargs=kwargs
)
timeout = self.calculate_timeout(view_instance=view_instance)
response_triple = self.cache.get(key)
if not response_triple:
# render response to create and cache the content byte string
response = view_method(view_instance, request, *args, **kwargs)
response = view_instance.finalize_response(request, response, *args, **kwargs)
response.render()
if not response.status_code >= 400 or self.cache_errors:
# django 3.0 has not .items() method, django 3.2 has not ._headers
if hasattr(response, '_headers'):
headers = response._headers.copy()
else:
headers = {k: (k, v) for k, v in response.items()}
response_triple = (
response.rendered_content,
response.status_code,
headers
)
self.cache.set(key, response_triple, timeout)
else:
# build smaller Django HttpResponse
content, status, headers = response_triple
response = HttpResponse(content=content, status=status)
for k, v in headers.values():
response[k] = v
if not hasattr(response, '_closable_objects'):
response._closable_objects = []
return response
def calculate_key(self,
view_instance,
view_method,
request,
args,
kwargs):
if isinstance(self.key_func, str):
key_func = getattr(view_instance, self.key_func)
else:
key_func = self.key_func
return key_func(
view_instance=view_instance,
view_method=view_method,
request=request,
args=args,
kwargs=kwargs,
)
def calculate_timeout(self, view_instance, **_):
if isinstance(self.timeout, str):
self.timeout = getattr(view_instance, self.timeout)
return self.timeout
cache_response = CacheResponse
| {
"repo_name": "chibisov/drf-extensions",
"path": "rest_framework_extensions/cache/decorators.py",
"copies": "1",
"size": "4464",
"license": "mit",
"hash": -3111288558497384000,
"line_mean": 32.3134328358,
"line_max": 90,
"alpha_frac": 0.5488351254,
"autogenerated": false,
"ratio": 4.764140875133404,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5812976000533404,
"avg_score": null,
"num_lines": null
} |
from FunctorApplicativeMonad import Functor, Applicative, Monad
from abc import ABCMeta, abstractmethod
from ParseResult import *
from Utils import *
class Parser(Functor, Applicative, Monad):
"""A monadic parser, similar to Haskell's Parsec. In contrast to Parsec,
this parser backtracks by default: this behaviour can be avoided with the
`commit` combinator."""
def __init__(self, run):
self._run = run
def fmap(self, mapper):
return Parser(lambda t,l: self._run(t,l).fmap(mapper))
def apply(self, something):
def run(text, loc):
result = self._run(text, loc)
if not result: return result
return something.fmap(result._val)._run(text, result._loc)
return Parser(run)
def bind(self, func):
def run(text, loc):
result = self._run(text, loc)
if not result: return result
return func(result._val)._run(text, result._loc)
return Parser(run)
@staticmethod
def pure(val):
return Parser(lambda _,l: Success(val,l))
def __call__(self, string):
return self._run(string.encode().splitlines() + [None], (0,0)).finish()
def __or__(lhs, rhs):
def run(t, l):
res = lhs._run(t,l)
if res or res._com: return res
return res | rhs._run(t,l)
return Parser(run)
def __xor__(self, dsc):
"""Corresponds to Parsec's `<?>` operator. Used to give a parser a
description"""
def run(text, loc):
result = self._run(text, loc)
if not result: result._exp = {dsc}
return result
return Parser(run)
def quote(string: str) -> str:
return '"%s"' % string
def commit(p: Parser) -> Parser:
"""This turns off backtracking for a given Parser. This is useful for
giving better error messages, when there is ambiguity at some stage
of parsing. For instance, in an expression parser, the following:
1 + (3 + _)
would ideally give an error pointing to the '_'. However, if backtracking
is turned on, the parser may give an error on the opening paren, or even
worse, it may parse '1' without error.
The solution is to have the parser commit whenever it sees a binary
operator. This is because what comes after should be unambiguous."""
def run(b,l):
result = p._run(b,l)
if not result: result._com = True
return result
return Parser(run)
def advance(loc, by, over):
lin, col = loc[0], loc[1] + by
cur = len(over[lin])
while col >= cur:
lin += 1
col -= cur
if over[lin] == None: return (lin, 0)
return (lin, col)
def err(text, loc, dsc, length=None):
if text[loc[0]] == None:
msg = 'eof'
loc = loc[0] - 1, len(text[loc[0] - 1])
else:
end = length and min(length, len(text[loc[0]]) - loc[1])
msg = quote(text[loc[0]][loc[1]:][:end].decode())
return Failure(loc, dsc, msg)
def match(string):
b = str.encode(string)
def run(text, loc):
line, col = loc
if text[line] != None and text[line].startswith(b, col):
return Success(string, advance(loc, len(b), text))
return err(text, loc, {quote(string)}, len(b))
return Parser(run)
def many(p):
def run(text, loc):
results = []
next = p._run(text,loc)
while next:
results.append(next._val)
loc = next._loc
next = p._run(text,loc)
return next | Success(results,loc)
return Parser(run)
def some(p):
return p.fmap(lambda x: lambda xs: [x] + xs) * many(p)
def choice(f, *p):
return reduce(Parser.__or__, (f,) + p)
def bsatisfies(pred, dsc=set()):
def run(t,l):
n,i = l
if t[n]:
c = t[n][i]
if pred(c): return Success(chr(c), advance(l,1,t))
return err(t,l,dsc,1)
return Parser(run)
def satisfies(pred, dsc=set()):
return bsatisfies(compose(pred, chr), dsc)
def oneof(chars):
return bsatisfies(set(str.encode(chars)).__contains__, {quote(c) for c in chars})
def noneof(chars):
bsetf = set(str.encode(chars)).__contains__
return bsatisfies(lambda c: not bsetf(c), {quote(c) for c in chars})
anychar = satisfies(const(True), {'any character'})
eof = Parser(lambda t,l: Success(None, l) if t[l[0]] == None else err(t,l,{'eof'}))
def chainl1(p,op):
def run(t,l):
x = p()._run(t,l)
while x:
o = op._run(t,x._loc)
if not o: break
y = p()._run(t,o._loc)
if not y: return y
x = Success(o._val(x._val,y._val), y._loc)
return x
return Parser(run)
| {
"repo_name": "oisdk/PyParse",
"path": "Parser.py",
"copies": "1",
"size": "4717",
"license": "mit",
"hash": -8202000146180460000,
"line_mean": 29.6298701299,
"line_max": 85,
"alpha_frac": 0.5743057028,
"autogenerated": false,
"ratio": 3.383787661406026,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4458093364206026,
"avg_score": null,
"num_lines": null
} |
from FunctorApplicativeMonad import Functor, Applicative, Monad
from typing import Callable, Any
from functools import partial
class Maybe(Functor, Applicative, Monad):
def __init__(self, value):
self._is_just = True
self._value = value
def fmap(self, mapper: Callable[[Any], Any]) -> 'Maybe':
if self._is_just:
value = self._value
try: return Maybe(mapper(value))
except TypeError: return Maybe(partial(mapper, value))
else:
nothing = Maybe(None)
nothing._is_just = False
return nothing
def apply(self, something: 'Maybe') -> 'Maybe':
if self._is_just:
return something.fmap(self._value)
else: return self
def bind(self, func: Callable[[Any], 'Maybe']) -> 'Maybe':
if self._is_just:
value = self._value
return func(value)
else:
nothing = Maybe(None)
nothing._is_just = False
return nothing
def __eq__(self, other: object) -> bool:
if isinstance(other, Maybe):
return self._is_just == other._is_just and self._value == other._value
return False
def __repr__(self) -> str:
return 'Just %s' % self._value if self._is_just else 'Nothing'
@staticmethod
def pure(val) -> 'Maybe':
return Maybe(val)
Nothing = Maybe(None)
Nothing._is_just = False
def Just(val):
return Maybe(val)
| {
"repo_name": "oisdk/PyParse",
"path": "Maybe.py",
"copies": "1",
"size": "1471",
"license": "mit",
"hash": 7412349542677714000,
"line_mean": 27.8431372549,
"line_max": 82,
"alpha_frac": 0.5730795377,
"autogenerated": false,
"ratio": 3.9122340425531914,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4985313580253191,
"avg_score": null,
"num_lines": null
} |
from FunctorApplicativeMonad import Functor, Applicative, Monad
from typing import Callable, Any, TypeVar, Tuple, Generic, cast, Union
from abc import ABCMeta, abstractmethod
A = TypeVar('A')
B = TypeVar('B')
C = TypeVar('C')
S = TypeVar('S')
class State(Functor, Applicative, Monad, Generic[S,A]):
def __init__(self, fn: Callable[[S], Tuple[S, A]]):
self.__call__ = fn
def fmap(self, fn: Callable[[A], Any]) -> 'State':
def run(state):
return second(self(state), fn)
return State(run)
def apply(self, something: 'State') -> 'State':
def run(state):
new, val = self(state)
return val(something(new))
return State(run)
def bind(self, fn: Callable[[A], 'State']) -> 'State':
def run(state):
new, val = self(state)
newm = fn(val)
return newm(new)
return State(run)
@staticmethod
def pure(val) -> 'State':
return State(lambda s: (s,x))
def second(tup: Tuple[A, B], fn: Callable[B, C]) -> Tuple[A, C]:
return (tup[0], fn(tup[1]))
get = State(lambda s: (s,s))
def put(val):
return State(lambda _: (val, None))
| {
"repo_name": "oisdk/PyParse",
"path": "State.py",
"copies": "1",
"size": "1182",
"license": "mit",
"hash": 3867814527406225000,
"line_mean": 26.488372093,
"line_max": 70,
"alpha_frac": 0.5727580372,
"autogenerated": false,
"ratio": 3.2472527472527473,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43200107844527474,
"avg_score": null,
"num_lines": null
} |
from funcy import compose, partial, walk, mapcat, first,\
str_join
from firestone import get_user, get_f, merge_id
from django.core.urlresolvers import reverse
import date_converter
w_ids = partial(walk, lambda (k, v): merge_id(k, v))
as_arr = partial(lambda v: v.values())
mids = compose(as_arr, w_ids)
p_collection = compose(as_arr, w_ids)
extract_qs = partial(map, lambda a: a['questions'])
class JobStatus:
drafting = "drafting"
drafted = "drafted"
selected = "selected"
def job_status(job):
return job['status']
def user_selected(job, user_id):
return (job['status'] is JobStatus.selected) and \
(get_user(job['owner'])['expert'] == user_id)
def picked_expert(job):
return 'expert' in get_user(job['owner'])
def job_applied_status(job, user_id):
still_in_progress = (job_status(job) == JobStatus.drafting)
selected = not still_in_progress and \
(user_selected(job, user_id))
done_not_selected = not still_in_progress and not selected
done_selected = selected
idle = not still_in_progress and not picked_expert(job)
return {
'in_progress': still_in_progress,
'done_selected': done_selected,
'done_not_selected': done_not_selected,
'idle': idle
}
def base(rest):
return "https://jonnyibiza.com/%s" % rest
def fe_expert(dest):
return base("expert/%s" % dest)
def fe_expert_client(client):
return fe_expert("client/%s" % client)
def fe_user_pick():
return base('app/pick-expert')
def full_url(rest):
return "https://jonnyinc.herokuapp.com%s" % rest
def apply_for_job_url(job_id, user_id):
return full_url(reverse('apply_for_job', args=(job_id, user_id,)))
def get_details(user_id):
return zipthem(get_questions(), get_answers(user_id))
def zipthem(questions, answers):
def id(a): return a['id']
def ty(a): return a['type']
def txt(a): return a['text']
def v(a): return a['value']
def q_with_id(qid):
return first(filter(lambda q: id(q) == qid, questions))
def q_for_ans(ans):
q = q_with_id(id(ans))
return {
'answer_str': answer_as_str(v(ans), ty(q)),
'question': txt(q),
'answer': ans,
}
return map(q_for_ans, answers)
def get_answers(user_id):
return get_anons_answers(get_anon(user_id))
def get_anon(user_id):
return get_user(user_id)['anon']
def get_anons_answers(anon_id):
a = get_f('answers')(anon_id)
if a is not None:
return mids(a)
else:
return []
def get_questions():
ls = p_collection(get_f('levels')())
qs = extract_qs(ls)
def a(lq):
return mids(lq)
return mapcat(a, qs)
def answer_as_str(answer, qtype):
def j(v): return str_join(', ', v)
if qtype == 'check-list':
return j(answer)
elif qtype == 'bingo':
return j(answer)
elif qtype == 'about':
return j(answer.values())
elif qtype == 'company-details':
return j(about_company(answer))
elif qtype == 'dates':
return j(dates_answer(answer))
elif qtype == 'rolling':
return budget_names(answer)
return answer.__str__()
def about_company(cd):
r = []
r += ['With partner'] if 'partner' in cd and cd['partner'] else ''
if 'malefriends' in cd:
r += ['Male friends: %s' % cd['malefriends']['count']]
if 'femalefriends' in cd:
r += ['Female friends: %s' % cd['femalefriends']['count']]
if 'kids' in cd:
a = ['Kids:']
a += ['%s Boys' % cd['kids']['boys']] if 'boys' in cd['kids'] else []
a += ['%s Girls' % cd['kids']['girls']] if 'girls' in cd['kids'] else []
r += [str_join(' ', a)]
r += [cd['details']]
return r
def dates_answer(dates):
date_format = '%B %d, %Y'
d = []
if 'start' in dates:
start = sanitize_date(dates['start'])
d += ['From %s' % date_converter.timestamp_to_string(start, date_format)]
if 'end' in dates:
end = sanitize_date(dates['end'])
d += ['To %s' % date_converter.timestamp_to_string(end, date_format)]
if 'flexible' in dates:
if dates['flexible']:
d += ["Dates are flexible!"]
return d
def sanitize_date(date):
return int(date)/1000
def budget_names(val):
if val == 1:
return 'Backpacker'
elif val == 2:
return 'Cosmopolitan'
elif val == 3:
return 'Jetsetter'
elif val == 4:
return 'Rock star!'
elif val == 5:
return 'MOTHERFUCKING SULTAN!'
| {
"repo_name": "popara/jonny-api",
"path": "matching/models.py",
"copies": "1",
"size": "4572",
"license": "mit",
"hash": -7200669232063566000,
"line_mean": 21.9748743719,
"line_max": 81,
"alpha_frac": 0.5839895013,
"autogenerated": false,
"ratio": 3.1038696537678208,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.918464708244459,
"avg_score": 0.000642414524646025,
"num_lines": 199
} |
from funcy import ContextDecorator
from django.db.models import Manager
from django.db.models.query import QuerySet
# query
def cached_as(*samples, **kwargs):
return lambda func: func
cached_view_as = cached_as
def install_cacheops():
if not hasattr(Manager, 'get_queryset'):
Manager.get_queryset = lambda self: self.get_query_set()
# query
QuerySet._cache_key = lambda self, extra=None: None
QuerySet.nocache = lambda self: self
QuerySet.cache = lambda self: self
QuerySet.inplace = lambda self: self
Manager.nocache = lambda self: self.get_queryset().nocache()
Manager.cache = lambda self: self.get_queryset().cache()
Manager.inplace = lambda self: self.get_queryset().inplace()
# invalidation
def invalidate_obj(obj):
pass
def invalidate_model(model):
pass
def invalidate_all():
pass
# simple
from cacheops.simple import BaseCache, CacheMiss
class DummyCache(BaseCache):
def get(self, cache_key):
raise CacheMiss
def set(self, cache_key, data, timeout=None):
pass
def delete(self, cache_key):
pass
cache = DummyCache()
cached = cache.cached
cached_view = cache.cached_view
file_cache = DummyCache()
# templates
def invalidate_fragment(fragment_name, *extra):
pass
class _no_invalidation(ContextDecorator):
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
pass
no_invalidation = _no_invalidation()
| {
"repo_name": "andwun/django-cacheops",
"path": "cacheops/fake.py",
"copies": "2",
"size": "1465",
"license": "bsd-3-clause",
"hash": 631760818426064300,
"line_mean": 20.8656716418,
"line_max": 64,
"alpha_frac": 0.6887372014,
"autogenerated": false,
"ratio": 3.690176322418136,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5378913523818136,
"avg_score": null,
"num_lines": null
} |
from funcy import first
from pygtrie import Trie
from dvc.exceptions import OutputDuplicationError, OverlappingOutputPathsError
def build_outs_trie(stages):
outs = Trie()
for stage in stages:
for out in stage.outs:
out_key = out.path_info.parts
# Check for dup outs
if out_key in outs:
dup_stages = [stage, outs[out_key].stage]
raise OutputDuplicationError(str(out), dup_stages)
# Check for overlapping outs
if outs.has_subtrie(out_key):
parent = out
overlapping = first(outs.values(prefix=out_key))
else:
parent = outs.shortest_prefix(out_key).value
overlapping = out
if parent and overlapping:
msg = (
"The output paths:\n'{}'('{}')\n'{}'('{}')\n"
"overlap and are thus in the same tracked directory.\n"
"To keep reproducibility, outputs should be in separate "
"tracked directories or tracked individually."
).format(
str(parent),
parent.stage.addressing,
str(overlapping),
overlapping.stage.addressing,
)
raise OverlappingOutputPathsError(parent, overlapping, msg)
outs[out_key] = out
return outs
| {
"repo_name": "efiop/dvc",
"path": "dvc/repo/trie.py",
"copies": "1",
"size": "1445",
"license": "apache-2.0",
"hash": -3374930792752370000,
"line_mean": 33.4047619048,
"line_max": 78,
"alpha_frac": 0.5287197232,
"autogenerated": false,
"ratio": 4.631410256410256,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 42
} |
from funcy import group_by
from dvc.scm.tree import WorkingTree
def brancher( # noqa: E302
self, revs=None, all_branches=False, all_tags=False, all_commits=False
):
"""Generator that iterates over specified revisions.
Args:
revs (list): a list of revisions to iterate over.
all_branches (bool): iterate over all available branches.
all_commits (bool): iterate over all commits.
all_tags (bool): iterate over all available tags.
Yields:
str: the display name for the currently selected tree, it could be:
- a git revision identifier
- empty string it there is no branches to iterate over
- "Working Tree" if there are uncommitted changes in the SCM repo
"""
if not any([revs, all_branches, all_tags, all_commits]):
yield ""
return
saved_tree = self.tree
revs = revs or []
scm = self.scm
self.tree = WorkingTree(self.root_dir)
yield "working tree"
if all_commits:
revs = scm.list_all_commits()
else:
if all_branches:
revs.extend(scm.list_branches())
if all_tags:
revs.extend(scm.list_tags())
try:
if revs:
for sha, names in group_by(scm.resolve_rev, revs).items():
self.tree = scm.get_tree(sha)
yield ", ".join(names)
finally:
self.tree = saved_tree
| {
"repo_name": "dmpetrov/dataversioncontrol",
"path": "dvc/repo/brancher.py",
"copies": "1",
"size": "1420",
"license": "apache-2.0",
"hash": 6698374027437437000,
"line_mean": 27.4,
"line_max": 77,
"alpha_frac": 0.6014084507,
"autogenerated": false,
"ratio": 3.9664804469273744,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5067888897627375,
"avg_score": null,
"num_lines": null
} |
from funcy import group_by
def brancher( # noqa: E302
self,
revs=None,
all_branches=False,
all_tags=False,
all_commits=False,
all_experiments=False,
sha_only=False,
):
"""Generator that iterates over specified revisions.
Args:
revs (list): a list of revisions to iterate over.
all_branches (bool): iterate over all available branches.
all_commits (bool): iterate over all commits.
all_tags (bool): iterate over all available tags.
sha_only (bool): only return git SHA for a revision.
Yields:
str: the display name for the currently selected fs, it could be:
- a git revision identifier
- empty string it there is no branches to iterate over
- "workspace" if there are uncommitted changes in the SCM repo
"""
if not any([revs, all_branches, all_tags, all_commits, all_experiments]):
yield ""
return
from dvc.fs.local import LocalFileSystem
saved_fs = self.fs
revs = revs.copy() if revs else []
scm = self.scm
self.fs = LocalFileSystem(url=self.root_dir)
yield "workspace"
if revs and "workspace" in revs:
revs.remove("workspace")
if all_commits:
revs = scm.list_all_commits()
else:
if all_branches:
revs.extend(scm.list_branches())
if all_tags:
revs.extend(scm.list_tags())
if all_experiments:
from dvc.repo.experiments.utils import exp_commits
revs.extend(exp_commits(scm))
try:
if revs:
for sha, names in group_by(scm.resolve_rev, revs).items():
self.fs = scm.get_fs(sha)
# ignore revs that don't contain repo root
# (i.e. revs from before a subdir=True repo was init'ed)
if self.fs.exists(self.root_dir):
if sha_only:
yield sha
else:
yield ", ".join(names)
finally:
self.fs = saved_fs
| {
"repo_name": "efiop/dvc",
"path": "dvc/repo/brancher.py",
"copies": "1",
"size": "2050",
"license": "apache-2.0",
"hash": -2657371672896400400,
"line_mean": 27.8732394366,
"line_max": 77,
"alpha_frac": 0.5780487805,
"autogenerated": false,
"ratio": 4.051383399209486,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5129432179709486,
"avg_score": null,
"num_lines": null
} |
from funcy import identity
from jinja2 import Markup
from dxr.filters import QualifiedNameFilterBase, Filter, negatable
class _QualifiedNameFilter(QualifiedNameFilterBase):
lang = "rust"
class FunctionFilter(_QualifiedNameFilter):
name = 'function'
is_identifier = True
description = Markup('Function or method definition: <code>function:foo</code>')
class FunctionRefFilter(_QualifiedNameFilter):
name = 'function-ref'
is_reference = True
description = 'Function or method references'
class CallersFilter(_QualifiedNameFilter):
name = 'callers'
is_reference = True
description = 'Function callers'
class CalledByFilter(_QualifiedNameFilter):
name = 'called-by'
description = 'Functions called by this function'
class FnImplsFilter(_QualifiedNameFilter):
name = 'fn-impls'
is_identifier = True
description = 'Function implementations'
class DerivedFilter(_QualifiedNameFilter):
name = 'derived'
description = 'Sub-traits'
class BasesFilter(_QualifiedNameFilter):
name = 'bases'
description = 'Super-traits'
class ImplFilter(_QualifiedNameFilter):
name = 'impl'
is_reference = True
description = 'Implementations'
class ModuleFilter(_QualifiedNameFilter):
name = 'module'
is_identifier = True
description = 'Module defintions'
class ModuleUseFilter(_QualifiedNameFilter):
name = 'module-use'
is_reference = True
description = 'Module imports'
class VarFilter(_QualifiedNameFilter):
name = 'var'
is_identifier = True
description = 'Variable definitions'
class VarRefFilter(_QualifiedNameFilter):
name = 'var-ref'
is_reference = True
description = 'Variable references'
class TypeFilter(_QualifiedNameFilter):
name = 'type'
is_identifier = True
description = 'Type (struct, enum, type, trait) definition'
class TypeRefFilter(_QualifiedNameFilter):
name = 'type-ref'
is_reference = True
description = 'Type references'
class ModuleRefFilter(_QualifiedNameFilter):
name = 'module-ref'
is_reference = True
description = 'Module references'
class ModuleAliasRefFilter(_QualifiedNameFilter):
name = 'module-alias-ref'
description = 'Module alias references'
is_reference = True
class ExternRefFilter(_QualifiedNameFilter):
name = 'extern-ref'
is_reference = True
description = 'References to items in external crate'
| {
"repo_name": "gartung/dxr",
"path": "dxr/plugins/rust/filters.py",
"copies": "1",
"size": "2424",
"license": "mit",
"hash": 3471569374878914000,
"line_mean": 26.2359550562,
"line_max": 84,
"alpha_frac": 0.720709571,
"autogenerated": false,
"ratio": 4.060301507537688,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5281011078537687,
"avg_score": null,
"num_lines": null
} |
from funcy import *
import requests
from django.conf import settings
"""
TODO:
- threads to reduce processing time
- permissive include directive syntax support (virtual vs file, spaces everywhere)
- timeouts
- error handling
"""
class SsiMiddleware(object):
"""
Response-phase middleware that processes SSI include directives in the text/html responses content.
Define SSI_BASE_URL = 'http://www.your.main.host.accessible.from.this.machine' in your settings.py.
"""
def __init__(self):
self.base_url = settings.SSI_BASE_URL
self.default_encoding = getattr(settings, 'SSI_DEFAULT_ENCODING', 'utf-8')
def process_response(self, request, response):
if response['Content-Type'].startswith('text/html'):
includes = set(re_all(r'<!--# include virtual=".*?" -->', response.content))
responses = {}
for include in includes:
include_url = re_find(r'virtual="(.*?)"', include)
url = '%s%s' % (self.base_url, include_url)
r = requests.get(url)
if 'charset' not in r.headers.get('content-type', ''):
r.encoding = self.default_encoding
responses[include] = r.text.encode('utf-8')
for include, replacement in responses.items():
response.content = response.content.replace(include, replacement)
return response
| {
"repo_name": "furagu/django-ssi",
"path": "middleware.py",
"copies": "1",
"size": "1427",
"license": "mit",
"hash": 869036808728355800,
"line_mean": 36.5526315789,
"line_max": 103,
"alpha_frac": 0.6194814296,
"autogenerated": false,
"ratio": 4.124277456647399,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5243758886247399,
"avg_score": null,
"num_lines": null
} |
from funcy import project
from flask import render_template, url_for
from flask_login import login_required
from flask_restful import abort
from redash import models, settings
from redash.wsgi import app
from redash.utils import json_dumps
from redash.handlers import org_scoped_rule
from redash.authentication.org_resolving import current_org
@app.route(org_scoped_rule('/embed/query/<query_id>/visualization/<visualization_id>'), methods=['GET'])
@login_required
def embed(query_id, visualization_id, org_slug=None):
# TODO: add event for embed access
query = models.Query.get_by_id_and_org(query_id, current_org)
vis = query.visualizations.where(models.Visualization.id == visualization_id).first()
qr = {}
if vis is not None:
vis = vis.to_dict()
qr = query.latest_query_data
if qr is None:
abort(400, message="No Results for this query")
else:
qr = qr.to_dict()
else:
abort(404, message="Visualization not found.")
client_config = {}
client_config.update(settings.COMMON_CLIENT_CONFIG)
qr = project(qr, ('data', 'id', 'retrieved_at'))
vis = project(vis, ('description', 'name', 'id', 'options', 'query', 'type', 'updated_at'))
vis['query'] = project(vis, ('created_at', 'description', 'name', 'id', 'latest_query_data_id', 'name', 'updated_at'))
if settings.MULTI_ORG:
base_href = url_for('index', _external=True, org_slug=current_org.slug)
else:
base_href = url_for('index', _external=True)
return render_template("embed.html",
name=settings.NAME,
base_href=base_href,
client_config=json_dumps(client_config),
visualization=json_dumps(vis),
query_result=json_dumps(qr),
analytics=settings.ANALYTICS)
| {
"repo_name": "olivetree123/redash-x",
"path": "redash/handlers/embed.py",
"copies": "1",
"size": "1911",
"license": "bsd-2-clause",
"hash": -812700462589554300,
"line_mean": 38,
"line_max": 122,
"alpha_frac": 0.6248037677,
"autogenerated": false,
"ratio": 3.732421875,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48572256427,
"avg_score": null,
"num_lines": null
} |
from funcy import project
from flask import url_for
from flask_login import current_user
from mock import patch
from redash import models, settings
from tests import BaseTestCase
from tests import authenticated_user
class AuthenticationTestMixin(object):
def test_returns_404_when_not_unauthenticated(self):
for path in self.paths:
rv = self.client.get(path)
self.assertEquals(404, rv.status_code)
def test_returns_content_when_authenticated(self):
for path in self.paths:
rv = self.make_request('get', path, is_json=False)
self.assertEquals(200, rv.status_code)
class TestAuthentication(BaseTestCase):
def test_redirects_for_nonsigned_in_user(self):
rv = self.client.get("/default/")
self.assertEquals(302, rv.status_code)
class PingTest(BaseTestCase):
def test_ping(self):
rv = self.client.get('/ping')
self.assertEquals(200, rv.status_code)
self.assertEquals('PONG.', rv.data)
class IndexTest(BaseTestCase):
def setUp(self):
self.paths = ['/default/', '/default/dashboard/example', '/default/queries/1', '/default/admin/status']
super(IndexTest, self).setUp()
def test_redirect_to_login_when_not_authenticated(self):
for path in self.paths:
rv = self.client.get(path)
self.assertEquals(302, rv.status_code)
def test_returns_content_when_authenticated(self):
for path in self.paths:
rv = self.make_request('get', path, org=False, is_json=False)
self.assertEquals(200, rv.status_code)
class StatusTest(BaseTestCase):
def test_returns_data_for_super_admin(self):
admin = self.factory.create_admin()
models.db.session.commit()
rv = self.make_request('get', '/status.json', org=False, user=admin, is_json=False)
self.assertEqual(rv.status_code, 200)
def test_returns_403_for_non_admin(self):
rv = self.make_request('get', '/status.json', org=False, is_json=False)
self.assertEqual(rv.status_code, 403)
def test_redirects_non_authenticated_user(self):
rv = self.client.get('/status.json')
self.assertEqual(rv.status_code, 302)
class VisualizationResourceTest(BaseTestCase):
def test_create_visualization(self):
query = self.factory.create_query()
models.db.session.commit()
data = {
'query_id': query.id,
'name': 'Chart',
'description': '',
'options': {},
'type': 'CHART'
}
rv = self.make_request('post', '/api/visualizations', data=data)
self.assertEquals(rv.status_code, 200)
data.pop('query_id')
self.assertDictContainsSubset(data, rv.json)
def test_delete_visualization(self):
visualization = self.factory.create_visualization()
models.db.session.commit()
rv = self.make_request('delete', '/api/visualizations/{}'.format(visualization.id))
self.assertEquals(rv.status_code, 200)
self.assertEquals(models.db.session.query(models.Visualization).count(), 0)
def test_update_visualization(self):
visualization = self.factory.create_visualization()
models.db.session.commit()
rv = self.make_request('post', '/api/visualizations/{0}'.format(visualization.id), data={'name': 'After Update'})
self.assertEquals(rv.status_code, 200)
self.assertEquals(rv.json['name'], 'After Update')
def test_only_owner_or_admin_can_create_visualization(self):
query = self.factory.create_query()
other_user = self.factory.create_user()
admin = self.factory.create_admin()
admin_from_diff_org = self.factory.create_admin(org=self.factory.create_org())
models.db.session.commit()
models.db.session.refresh(admin)
models.db.session.refresh(other_user)
models.db.session.refresh(admin_from_diff_org)
data = {
'query_id': query.id,
'name': 'Chart',
'description': '',
'options': {},
'type': 'CHART'
}
rv = self.make_request('post', '/api/visualizations', data=data, user=admin)
self.assertEquals(rv.status_code, 200)
rv = self.make_request('post', '/api/visualizations', data=data, user=other_user)
self.assertEquals(rv.status_code, 403)
rv = self.make_request('post', '/api/visualizations', data=data, user=admin_from_diff_org)
self.assertEquals(rv.status_code, 404)
def test_only_owner_or_admin_can_edit_visualization(self):
vis = self.factory.create_visualization()
models.db.session.flush()
path = '/api/visualizations/{}'.format(vis.id)
data = {'name': 'After Update'}
other_user = self.factory.create_user()
admin = self.factory.create_admin()
admin_from_diff_org = self.factory.create_admin(org=self.factory.create_org())
models.db.session.commit()
models.db.session.refresh(admin)
models.db.session.refresh(other_user)
models.db.session.refresh(admin_from_diff_org)
rv = self.make_request('post', path, user=admin, data=data)
self.assertEquals(rv.status_code, 200)
rv = self.make_request('post', path, user=other_user, data=data)
self.assertEquals(rv.status_code, 403)
rv = self.make_request('post', path, user=admin_from_diff_org, data=data)
self.assertEquals(rv.status_code, 404)
def test_only_owner_or_admin_can_delete_visualization(self):
vis = self.factory.create_visualization()
models.db.session.flush()
path = '/api/visualizations/{}'.format(vis.id)
other_user = self.factory.create_user()
admin = self.factory.create_admin()
admin_from_diff_org = self.factory.create_admin(org=self.factory.create_org())
models.db.session.commit()
models.db.session.refresh(admin)
models.db.session.refresh(other_user)
models.db.session.refresh(admin_from_diff_org)
rv = self.make_request('delete', path, user=admin)
self.assertEquals(rv.status_code, 200)
vis = self.factory.create_visualization()
models.db.session.commit()
path = '/api/visualizations/{}'.format(vis.id)
rv = self.make_request('delete', path, user=other_user)
self.assertEquals(rv.status_code, 403)
vis = self.factory.create_visualization()
models.db.session.commit()
path = '/api/visualizations/{}'.format(vis.id)
rv = self.make_request('delete', path, user=admin_from_diff_org)
self.assertEquals(rv.status_code, 404)
class JobAPITest(BaseTestCase, AuthenticationTestMixin):
def setUp(self):
self.paths = []
super(JobAPITest, self).setUp()
class TestLogin(BaseTestCase):
def setUp(self):
settings.PASSWORD_LOGIN_ENABLED = True
super(TestLogin, self).setUp()
@classmethod
def setUpClass(cls):
settings.ORG_RESOLVING = "single_org"
@classmethod
def tearDownClass(cls):
settings.ORG_RESOLVING = "multi_org"
def test_redirects_to_google_login_if_password_disabled(self):
with patch.object(settings, 'PASSWORD_LOGIN_ENABLED', False), self.app.test_request_context('/default/login'):
rv = self.client.get('/default/login')
self.assertEquals(rv.status_code, 302)
self.assertTrue(rv.location.endswith(url_for('google_oauth.authorize', next='/default/')))
def test_get_login_form(self):
rv = self.client.get('/default/login')
self.assertEquals(rv.status_code, 200)
def test_submit_non_existing_user(self):
with patch('redash.handlers.authentication.login_user') as login_user_mock:
rv = self.client.post('/default/login', data={'email': 'arik', 'password': 'password'})
self.assertEquals(rv.status_code, 200)
self.assertFalse(login_user_mock.called)
def test_submit_correct_user_and_password(self):
user = self.factory.user
user.hash_password('password')
self.db.session.add(user)
self.db.session.commit()
with patch('redash.handlers.authentication.login_user') as login_user_mock:
rv = self.client.post('/default/login', data={'email': user.email, 'password': 'password'})
self.assertEquals(rv.status_code, 302)
login_user_mock.assert_called_with(user, remember=False)
def test_submit_correct_user_and_password_and_remember_me(self):
user = self.factory.user
user.hash_password('password')
self.db.session.add(user)
self.db.session.commit()
with patch('redash.handlers.authentication.login_user') as login_user_mock:
rv = self.client.post('/default/login', data={'email': user.email, 'password': 'password', 'remember': True})
self.assertEquals(rv.status_code, 302)
login_user_mock.assert_called_with(user, remember=True)
def test_submit_correct_user_and_password_with_next(self):
user = self.factory.user
user.hash_password('password')
self.db.session.add(user)
self.db.session.commit()
with patch('redash.handlers.authentication.login_user') as login_user_mock:
rv = self.client.post('/default/login?next=/test',
data={'email': user.email, 'password': 'password'})
self.assertEquals(rv.status_code, 302)
self.assertEquals(rv.location, 'http://localhost/test')
login_user_mock.assert_called_with(user, remember=False)
def test_submit_incorrect_user(self):
with patch('redash.handlers.authentication.login_user') as login_user_mock:
rv = self.client.post('/default/login', data={'email': 'non-existing', 'password': 'password'})
self.assertEquals(rv.status_code, 200)
self.assertFalse(login_user_mock.called)
def test_submit_incorrect_password(self):
user = self.factory.user
user.hash_password('password')
self.db.session.add(user)
self.db.session.commit()
with patch('redash.handlers.authentication.login_user') as login_user_mock:
rv = self.client.post('/default/login', data={
'email': user.email, 'password': 'badbadpassword'})
self.assertEquals(rv.status_code, 200)
self.assertFalse(login_user_mock.called)
def test_submit_empty_password(self):
user = self.factory.user
with patch('redash.handlers.authentication.login_user') as login_user_mock:
rv = self.client.post('/default/login', data={'email': user.email, 'password': ''})
self.assertEquals(rv.status_code, 200)
self.assertFalse(login_user_mock.called)
def test_user_already_loggedin(self):
with authenticated_user(self.client), patch('redash.handlers.authentication.login_user') as login_user_mock:
rv = self.client.get('/default/login')
self.assertEquals(rv.status_code, 302)
self.assertFalse(login_user_mock.called)
class TestLogout(BaseTestCase):
def test_logout_when_not_loggedin(self):
with self.app.test_client() as c:
rv = c.get('/default/logout')
self.assertEquals(rv.status_code, 302)
self.assertFalse(current_user.is_authenticated)
def test_logout_when_loggedin(self):
with self.app.test_client() as c, authenticated_user(c, user=self.factory.user):
rv = c.get('/default/')
self.assertTrue(current_user.is_authenticated)
rv = c.get('/default/logout')
self.assertEquals(rv.status_code, 302)
self.assertFalse(current_user.is_authenticated)
class TestQuerySnippet(BaseTestCase):
def test_create(self):
res = self.make_request(
'post',
'/api/query_snippets',
data={'trigger': 'x', 'description': 'y', 'snippet': 'z'},
user=self.factory.user)
self.assertEqual(
project(res.json, ['id', 'trigger', 'description', 'snippet']), {
'id': 1,
'trigger': 'x',
'description': 'y',
'snippet': 'z',
})
qs = models.QuerySnippet.query.one()
self.assertEqual(qs.trigger, 'x')
self.assertEqual(qs.description, 'y')
self.assertEqual(qs.snippet, 'z')
def test_edit(self):
qs = models.QuerySnippet(
trigger='a',
description='b',
snippet='c',
user=self.factory.user,
org=self.factory.org
)
models.db.session.add(qs)
models.db.session.commit()
res = self.make_request(
'post',
'/api/query_snippets/1',
data={'trigger': 'x', 'description': 'y', 'snippet': 'z'},
user=self.factory.user)
self.assertEqual(
project(res.json, ['id', 'trigger', 'description', 'snippet']), {
'id': 1,
'trigger': 'x',
'description': 'y',
'snippet': 'z',
})
self.assertEqual(qs.trigger, 'x')
self.assertEqual(qs.description, 'y')
self.assertEqual(qs.snippet, 'z')
def test_list(self):
qs = models.QuerySnippet(
trigger='x',
description='y',
snippet='z',
user=self.factory.user,
org=self.factory.org
)
models.db.session.add(qs)
models.db.session.commit()
res = self.make_request(
'get',
'/api/query_snippets',
user=self.factory.user)
self.assertEqual(res.status_code, 200)
data = res.json
self.assertEqual(len(data), 1)
self.assertEqual(
project(data[0], ['id', 'trigger', 'description', 'snippet']), {
'id': 1,
'trigger': 'x',
'description': 'y',
'snippet': 'z',
})
self.assertEqual(qs.trigger, 'x')
self.assertEqual(qs.description, 'y')
self.assertEqual(qs.snippet, 'z')
def test_delete(self):
qs = models.QuerySnippet(
trigger='a',
description='b',
snippet='c',
user=self.factory.user,
org=self.factory.org
)
models.db.session.add(qs)
models.db.session.commit()
self.make_request(
'delete',
'/api/query_snippets/1',
user=self.factory.user)
self.assertEqual(models.QuerySnippet.query.count(), 0)
| {
"repo_name": "luozhanxin/redash-docker",
"path": "tests/test_handlers.py",
"copies": "7",
"size": "14778",
"license": "bsd-2-clause",
"hash": -7152801648980840000,
"line_mean": 36.6030534351,
"line_max": 121,
"alpha_frac": 0.6067803492,
"autogenerated": false,
"ratio": 3.78437900128041,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.789115935048041,
"avg_score": null,
"num_lines": null
} |
from Fund import Fund
from TAA import TAA
fundListPath = input( "Enter path to fundlist: " )
with open( fundListPath, 'r' ) as handle:
fundList = []
print( "Fetching data", end='', flush=True )
for line in handle:
if line == '\n' or line.startswith( '#' ):
continue
parts = line.split()
id = parts[ 0 ]
name = " ".join( parts[ 1: ] )
fundList.append( Fund( id, name ) )
print('.', end='', flush=True)
fundList = sorted( fundList, key = lambda fund: fund.getAverageReturns(), reverse = True )
print( "\n\n" + Fund.getFormattedHeader() )
for fund in fundList:
print( fund.getFormattedData() )
print()
topN = int( input( "Choose top n: " ) )
if topN < 1 or topN > len( fundList ):
topN = len( fundList )
print( "\n" + Fund.getFormattedHeader() )
for fund in fundList[ :topN ]:
print( fund.getFormattedData() )
unRateData = TAA.getUnRateData()
print( "\n\n=== US Unemployment rate ===" )
print( "Current: " + str( unRateData[ 0 ] ) )
print( "MA12: " + str( '{:.3f}'.format( unRateData[ 1 ] ) ) )
input( "\nDone" )
| {
"repo_name": "Swassie/FundInfo-Python",
"path": "main.py",
"copies": "1",
"size": "1116",
"license": "mit",
"hash": 5562403688211334000,
"line_mean": 24.3636363636,
"line_max": 90,
"alpha_frac": 0.5869175627,
"autogenerated": false,
"ratio": 3.032608695652174,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.41195262583521736,
"avg_score": null,
"num_lines": null
} |
from funfactory.settings_base import *
from funfactory.manage import path
LESS_PREPROCESS = True
LESS_BIN = '/usr/local/bin/lessc'
COFFEE_PREPROCESS = True
COFFEE_BIN = 'coffee'
MEDIA_ROOT = path('m')
STATIC_ROOT = path('s')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
path('static'),
)
# For integration with staticfiles, this should be the same as STATIC_URL
# followed by 'admin/'.
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
# Bundles is a dictionary of two dictionaries, css and js, which list css files
# and js files that can be bundled together by the minify app.
MINIFY_BUNDLES = {
'css': {
'common': (
'css/location-picker.css',
'less/datetimepicker.less',
'css/annotation.css',
'less/bootstrap/bootstrap.less',
'less/bootstrap-extension.less',
'less/common.less',
'less/layout.less',
'less/home.less',
'less/store.less',
'less/view-item.less',
'less/accounts.less',
'less/profiles.less',
'less/friends.less',
'less/messages.less',
'less/cart.less',
),
'manage_store': (
'less/manage-store.less',
'less/autocomplite.less'
),
'example_mobile_css': (
'css/examples/mobile.css',
),
'colorpicker_css':(
'css/colorpicker.css',
)
},
'js': {
'modernizr': ('js/libs/modernizr-2.5.2.js',),
'jquery': ('js/libs/jquery-1.7.1.js',),
'public': (
'js/libs/underscore-1.3.1.js',
'js/libs/backbone-0.5.3.js',
'js/libs/backbone-relational-0.4.0.js',
'js/libs/backbone-tastypie-0.1.js',
'js/libs/moment-1.4.0.js',
'js/libs/accounting-0.3.2.js',
'js/accounting.jquery.js',
'js/location-picker.js',
'js/libs/jquery.mousewheel.js',
'js/libs/bootstrap-transition.js',
'js/libs/bootstrap-alert.js',
'js/libs/bootstrap-dropdown.js',
'js/libs/bootstrap-scrollspy.js',
'js/libs/bootstrap-tab.js',
'js/libs/bootstrap-tooltip.js',
'js/libs/bootstrap-popover.js',
'js/libs/bootstrap-button.js',
'js/libs/bootstrap-collapse.js',
'js/libs/bootstrap-carousel.js',
'js/libs/bootstrap-typeahead.js',
'js/libs/bootstrap-modal-custom-old.js',
'js/libs/jquery.timePicker.js',
'js/libs/bootstrap-datepicker-custom.js',
'coffee/datetimepicker.coffee',
'js/utils.js',
'js/backbone/base.js',
'js/backbone/views.js',
'js/backbone/collections.js',
'coffee/backbone/models.coffee',
'js/pages/models.js',
'js/pages/views.js',
'js/pages/routers.js',
'js/stores/models.js',
'js/stores/views.js',
'coffee/stores/views.coffee',
'js/stores/routers.js',
'coffee/cart/models.coffee',
'coffee/cart/views.coffee',
'coffee/cart/routers.coffee',
# 'js/libs/jquery-ui-1.8.17.js', Alexei, it breaks (overrides) $.datepicker from bootstrap-datepicker-custom.js!
'js/libs/jquery.annotate.js',
'js/init.js', # must go last
'js/example.coffee',
),
'manage': (
'js/libs/fileuploader.js',
'js/libs/ajax-file-form.js',
'js/stores/manage-views.js',
'js/stores/manage-routers.js',
'coffee/stores/models.coffee',
'coffee/stores/manage-views.coffee',
'coffee/stores/manage-routers.coffee',
),
'geo' : (
'js/libs/geo.js',
),
'jqueryui' : (
'js/libs/jquery-ui-1.8.17.custom.min.js',
),
'colorpicker_js' : (
'js/colorpicker_init.js',
'js/libs/colorpicker.js',
),
}
}
# Defines the views served for root URLs.
ROOT_URLCONF = 'urls'
TEMPLATE_LOADERS = (
'utils.jinja2_for_django.Loader', # must go first
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader'
)
# https://github.com/mozilla/funfactory/blob/master/funfactory/middleware.py
# bug with DELETE HTTP request
MIDDLEWARE_CLASSES = list(MIDDLEWARE_CLASSES)
MIDDLEWARE_CLASSES.pop(0) # FIXME
MIDDLEWARE_CLASSES = MIDDLEWARE_CLASSES + [
'cart.middleware.TryCompletePendingTransactionMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
]
TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.static',
'stores.context_processors.has_store',
'accounts.context_processors.auth_forms',
'django_messages.context_processors.inbox',
'friends.context_processors.friendship_requests_counter',
'utils.context_processors.settings',
)
INSTALLED_APPS = list(INSTALLED_APPS) + [
'django.contrib.sites',
'django.contrib.flatpages',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.staticfiles',
'django.contrib.gis',
'django_extensions',
'lettuce.django',
'utils',
'pages',
'accounts',
'profiles',
'friends',
'stores',
'cart',
'messages',
'djkombu',
'sorl.thumbnail',
'django_messages',
'facebook',
'south', # must go last
]
AUTH_PROFILE_MODULE = 'profiles.Profile'
AUTHENTICATION_BACKENDS = [
'accounts.backends.EmailOrUsernameModelBackend', # must go first
'facebook.backend.FacebookBackend',
]
# Tells the extract script what files to look for L10n in and what function
# handles the extraction. The Tower library expects this.
# # Use this if you have localizable HTML files:
# DOMAIN_METHODS['lhtml'] = [
# ('**/templates/**.lhtml',
# 'tower.management.commands.extract.extract_tower_template'),
# ]
# # Use this if you have localizable HTML files:
# DOMAIN_METHODS['javascript'] = [
# # Make sure that this won't pull in strings from external libraries you
# # may use.
# ('media/js/**.js', 'javascript'),
# ]
LOGGING = dict(loggers=dict(playdoh = {'level': logging.DEBUG}))
LOGIN_URL = '/login/'
LOGIN_REDIRECT_URL = '/'
# Minimum time interval between confirmation-like emails resends (in minutes)
CONFIRMATION_RESEND_TIMEOUT = 5
# django_facebook_oauth settings
FACEBOOK_APP_ID = ''
FACEBOOK_APP_SECRET = ''
FACEBOOK_SCOPE = 'email,user_birthday'
FACEBOOK_FORCE_VERIFICATION = True
PP_API_ENVIRONMENT = 'sandbox'
PP_API_EMAIL = 'sllr1_1319977604_biz@gmail.com'
PP_API_USERID = 'sllr1_1319977604_biz_api1.gmail.com'
PP_API_PASSWORD = '1319977630'
PP_API_SIGNATURE = 'AIIafItLgz5fj4SuyH4SPcjKJp-pAyNi8Piqc-yfBieFP7FY0X0WF.z5'
PP_API_APPLICATION_ID = 'APP-80W284485P519543T' # common ID for sandbox applications
# for development purporses
# ./manage.py createcachetable cache
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'cache',
}
}
import djcelery
djcelery.setup_loader()
BROKER_BACKEND = 'djkombu.transport.DatabaseTransport'
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
# Use these settings with RabbitMQ:
# BROKER_HOST = 'localhost'
# BROKER_PORT = 5672
# BROKER_USER = ''
# BROKER_PASSWORD = ''
# BROKER_VHOST = '/'
CELERY_ALWAYS_EAGER = False
CELERY_IGNORE_RESULT = True
CELERY_RESULT_BACKEND = 'amqp'
CELERY_IMPORTS = ('cart.tasks',)
EMAIL_RESEND_INTERVAL = 5
import decimal
SD_FEE = decimal.Decimal(95) / decimal.Decimal(100)
from datetime import timedelta
SHIPPING_PRICE_REQUEST_PROCESSING_PERIOD = timedelta(hours=1)
SHIPPING_PAY_PERIOD = timedelta(hours=10)
| {
"repo_name": "softak/webfaction_demo",
"path": "settings.py",
"copies": "1",
"size": "7846",
"license": "bsd-3-clause",
"hash": 892264076341193000,
"line_mean": 26.1487889273,
"line_max": 124,
"alpha_frac": 0.6145806781,
"autogenerated": false,
"ratio": 3.2814721873693014,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43960528654693015,
"avg_score": null,
"num_lines": null
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.